remove duplicate aidls and blobs

This commit is contained in:
SaschaNes
2025-08-13 00:56:09 +02:00
parent fb2ac00da2
commit 9f87105940
2224 changed files with 7 additions and 568522 deletions

View File

@@ -1,6 +1,6 @@
cc_binary {
name: "audioadsprpcd",
enabled: false,
srcs: ["adsprpcd.c"],
header_libs: ["libfastrpc_vendor_headers"],

View File

@@ -1,2 +0,0 @@
include $(call all-subdir-makefiles)

View File

@@ -1,28 +0,0 @@
LOCAL_PATH := $(call my-dir)
CURRENT_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libaudioplatformconverter.qti
LOCAL_MODULE_OWNER := qti
LOCAL_MODULE_TAGS := optional
LOCAL_VENDOR_MODULE := true
LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/include
# { SEC_AUDIO_COMMON
LOCAL_C_INCLUDES += $(TOP)/system/media/audio/include
# } SEC_AUDIO_COMMON
LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
LOCAL_SRC_FILES := \
PlatformConverter.cpp
LOCAL_SHARED_LIBRARIES := \
libbase \
libstagefright_foundation \
android.hardware.audio.core-V2-ndk \
android.media.audio.common.types-V3-ndk \
libar-pal
include $(BUILD_SHARED_LIBRARY)

View File

@@ -1,527 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_NDEBUG 0
#define LOG_TAG "AHAL_PlatformConverter_QTI"
#include <android-base/logging.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <qti-audio/PlatformConverter.h>
// { SEC_AUDIO_COMMON
#include <system/audio.h>
// } SEC_AUDIO_COMMON
using ::aidl::android::media::audio::common::AudioChannelLayout;
using ::aidl::android::media::audio::common::AudioDeviceAddress;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::aidl::android::media::audio::common::AudioDeviceType;
using ::aidl::android::media::audio::common::AudioFormatDescription;
using ::aidl::android::media::audio::common::AudioFormatType;
using ::aidl::android::media::audio::common::AudioOutputFlags;
using ::aidl::android::media::audio::common::PcmType;
// clang-format off
namespace {
__attribute__((no_sanitize("unsigned-integer-overflow")))
static void hash_combiner(std::size_t& seed, const std::size_t& v) {
// see boost::hash_combine
seed ^= v + 0x9e3779b9 + (seed << 6) + (seed >> 2);
}
} // namespace
// clang-format on
namespace std {
template <>
struct hash<::aidl::android::media::audio::common::AudioDeviceDescription> {
std::size_t operator()(const ::aidl::android::media::audio::common::AudioDeviceDescription& add)
const noexcept {
std::size_t seed = 0;
hash_combiner(seed, std::hash<::aidl::android::media::audio::common::AudioDeviceType>{}(
add.type));
hash_combiner(seed, std::hash<std::string>{}(add.connection));
return seed;
}
};
template <>
struct hash<::aidl::android::media::audio::common::AudioFormatDescription> {
std::size_t operator()(const ::aidl::android::media::audio::common::AudioFormatDescription& aft)
const noexcept {
std::size_t seed = 0;
hash_combiner(seed, std::hash<::aidl::android::media::audio::common::AudioFormatType>{}(
aft.type));
hash_combiner(seed, std::hash<::aidl::android::media::audio::common::PcmType>{}(aft.pcm));
hash_combiner(seed, std::hash<std::string>{}(aft.encoding));
return seed;
}
};
} // namespace std
namespace qti::audio {
AudioDeviceDescription makeAudioDeviceDescription(AudioDeviceType type,
const std::string& connection = "") {
AudioDeviceDescription result;
result.type = type;
result.connection = connection;
return result;
}
using DevicePair = std::pair<AudioDeviceDescription, pal_device_id_t>;
using DevicePairs = std::vector<DevicePair>;
using outputFlagsStreamtypeMap =
std::unordered_map<int32_t, pal_stream_type_t>;
// conversions
DevicePairs getDevicePairs() {
// No dupicates on first entry
DevicePairs pairs = {
{AudioDeviceDescription{}, PAL_DEVICE_NONE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEFAULT), PAL_DEVICE_OUT_SPEAKER},
{makeAudioDeviceDescription(AudioDeviceType::OUT_SPEAKER_EARPIECE),
PAL_DEVICE_OUT_HANDSET},
{makeAudioDeviceDescription(AudioDeviceType::OUT_SPEAKER), PAL_DEVICE_OUT_SPEAKER},
{makeAudioDeviceDescription(AudioDeviceType::OUT_HEADPHONE,
AudioDeviceDescription::CONNECTION_ANALOG),
PAL_DEVICE_OUT_WIRED_HEADPHONE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_BT_SCO),
PAL_DEVICE_OUT_BLUETOOTH_SCO},
{makeAudioDeviceDescription(AudioDeviceType::OUT_CARKIT,
AudioDeviceDescription::CONNECTION_BT_SCO),
PAL_DEVICE_OUT_BLUETOOTH_SCO},
{makeAudioDeviceDescription(AudioDeviceType::OUT_TELEPHONY_TX), PAL_DEVICE_NONE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_LINE_AUX), PAL_DEVICE_OUT_AUX_LINE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_SPEAKER_SAFE), PAL_DEVICE_OUT_SPEAKER},
{makeAudioDeviceDescription(AudioDeviceType::OUT_SPEAKER,
AudioDeviceDescription::CONNECTION_BT_LE),
PAL_DEVICE_OUT_BLUETOOTH_BLE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_BROADCAST,
AudioDeviceDescription::CONNECTION_BT_LE),
PAL_DEVICE_OUT_BLUETOOTH_BLE_BROADCAST},
{makeAudioDeviceDescription(AudioDeviceType::IN_DEFAULT), PAL_DEVICE_IN_HANDSET_MIC},
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE), PAL_DEVICE_IN_HANDSET_MIC},
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE_BACK),
PAL_DEVICE_IN_SPEAKER_MIC},
#ifdef SEC_AUDIO_COMMON
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE_MULTI), // AUDIO_DEVICE_IN_2MIC
PAL_DEVICE_IN_SPEAKER_MIC},
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE_MULTI, // AUDIO_DEVICE_IN_MIC3
AudioDeviceDescription::CONNECTION_BUILTIN_MIC3),
PAL_DEVICE_IN_SPEAKER_MIC},
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE_MULTI, // AUDIO_DEVICE_IN_MIC4
AudioDeviceDescription::CONNECTION_BUILTIN_MIC4),
PAL_DEVICE_IN_SPEAKER_MIC},
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE_MULTI, // AUDIO_DEVICE_IN_MIC3_MIC4
AudioDeviceDescription::CONNECTION_BUILTIN_MIC3_MIC4),
PAL_DEVICE_IN_SPEAKER_MIC},
{makeAudioDeviceDescription(AudioDeviceType::IN_MICROPHONE_MULTI, // AUDIO_DEVICE_IN_MULTI_MIC
AudioDeviceDescription::CONNECTION_BUILTIN_MULTI_MIC),
PAL_DEVICE_IN_SPEAKER_MIC},
#endif
{makeAudioDeviceDescription(AudioDeviceType::IN_TELEPHONY_RX),
PAL_DEVICE_IN_TELEPHONY_RX},
{makeAudioDeviceDescription(AudioDeviceType::IN_ECHO_REFERENCE),
PAL_DEVICE_IN_ECHO_REF},
{makeAudioDeviceDescription(AudioDeviceType::IN_HEADSET,
AudioDeviceDescription::CONNECTION_ANALOG),
PAL_DEVICE_IN_WIRED_HEADSET},
{makeAudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
AudioDeviceDescription::CONNECTION_ANALOG),
PAL_DEVICE_OUT_WIRED_HEADSET},
{makeAudioDeviceDescription(AudioDeviceType::IN_HEADSET,
AudioDeviceDescription::CONNECTION_BT_SCO),
PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET},
{makeAudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
AudioDeviceDescription::CONNECTION_BT_SCO),
PAL_DEVICE_OUT_BLUETOOTH_SCO},
{makeAudioDeviceDescription(AudioDeviceType::IN_DEVICE,
AudioDeviceDescription::CONNECTION_HDMI),
PAL_DEVICE_IN_HDMI},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_HDMI),
PAL_DEVICE_OUT_AUX_DIGITAL},
{makeAudioDeviceDescription(AudioDeviceType::IN_ACCESSORY,
AudioDeviceDescription::CONNECTION_USB),
PAL_DEVICE_IN_USB_ACCESSORY},
#ifdef SEC_AUDIO_FMRADIO
{makeAudioDeviceDescription(AudioDeviceType::IN_FM_TUNER,
AudioDeviceDescription::VX_SEC_CONNECTION_FM),
PAL_DEVICE_IN_FM_TUNER},
{makeAudioDeviceDescription(AudioDeviceType::OUT_FM,
AudioDeviceDescription::VX_SEC_CONNECTION_FM),
PAL_DEVICE_OUT_FM},
#else
{makeAudioDeviceDescription(AudioDeviceType::IN_FM_TUNER), PAL_DEVICE_IN_FM_TUNER},
{makeAudioDeviceDescription(AudioDeviceType::OUT_FM), PAL_DEVICE_OUT_FM},
#endif
{makeAudioDeviceDescription(AudioDeviceType::IN_DEVICE,
AudioDeviceDescription::CONNECTION_ANALOG),
PAL_DEVICE_IN_LINE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_ANALOG),
PAL_DEVICE_OUT_WIRED_HEADPHONE},
{makeAudioDeviceDescription(AudioDeviceType::IN_DEVICE,
AudioDeviceDescription::CONNECTION_SPDIF),
PAL_DEVICE_IN_SPDIF},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_SPDIF),
PAL_DEVICE_OUT_SPDIF},
{makeAudioDeviceDescription(AudioDeviceType::IN_DEVICE,
AudioDeviceDescription::CONNECTION_BT_A2DP),
PAL_DEVICE_IN_BLUETOOTH_A2DP},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_BT_A2DP),
PAL_DEVICE_OUT_BLUETOOTH_A2DP},
{makeAudioDeviceDescription(AudioDeviceType::IN_AFE_PROXY,
AudioDeviceDescription::CONNECTION_VIRTUAL),
PAL_DEVICE_IN_PROXY},
{makeAudioDeviceDescription(AudioDeviceType::OUT_AFE_PROXY,
AudioDeviceDescription::CONNECTION_VIRTUAL),
PAL_DEVICE_OUT_PROXY},
{makeAudioDeviceDescription(AudioDeviceType::IN_DEVICE,
AudioDeviceDescription::CONNECTION_IP_V4),
PAL_DEVICE_IN_RECORD_PROXY},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_IP_V4),
PAL_DEVICE_OUT_RECORD_PROXY},
{makeAudioDeviceDescription(AudioDeviceType::OUT_DEVICE,
AudioDeviceDescription::CONNECTION_USB),
PAL_DEVICE_OUT_USB_DEVICE},
{makeAudioDeviceDescription(AudioDeviceType::IN_HEADSET,
AudioDeviceDescription::CONNECTION_USB),
PAL_DEVICE_IN_USB_HEADSET},
{makeAudioDeviceDescription(AudioDeviceType::IN_DEVICE,
AudioDeviceDescription::CONNECTION_USB),
PAL_DEVICE_IN_USB_HEADSET},
{makeAudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
AudioDeviceDescription::CONNECTION_USB),
PAL_DEVICE_OUT_USB_HEADSET},
{makeAudioDeviceDescription(AudioDeviceType::IN_HEADSET,
AudioDeviceDescription::CONNECTION_BT_LE),
PAL_DEVICE_IN_BLUETOOTH_BLE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_HEADSET,
AudioDeviceDescription::CONNECTION_BT_LE),
PAL_DEVICE_OUT_BLUETOOTH_BLE},
{makeAudioDeviceDescription(AudioDeviceType::OUT_HEARING_AID,
AudioDeviceDescription::CONNECTION_WIRELESS),
PAL_DEVICE_OUT_HEARING_AID}};
return pairs;
}
// AudioFormat Conversions
AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
AudioFormatDescription result;
result.type = type;
return result;
}
AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
auto result = make_AudioFormatDescription(AudioFormatType::PCM);
result.pcm = pcm;
return result;
}
AudioFormatDescription make_AudioFormatDescription(const std::string& encoding) {
AudioFormatDescription result;
result.type = ::aidl::android::media::audio::common::AudioFormatType::NON_PCM;
result.encoding = encoding;
return result;
}
AudioFormatDescription make_AudioFormatDescription(PcmType transport, const std::string& encoding) {
auto result = make_AudioFormatDescription(encoding);
result.pcm = transport;
return result;
}
using FormatPair = std::pair<pal_audio_fmt_t, AudioFormatDescription>;
using FormatPairs = std::vector<FormatPair>;
FormatPairs getFormatPairs() {
// No duplicates on second entry
FormatPairs pairs = {{
{PAL_AUDIO_FMT_PCM_S16_LE, make_AudioFormatDescription(PcmType::INT_16_BIT)},
{PAL_AUDIO_FMT_PCM_S8, make_AudioFormatDescription(PcmType::UINT_8_BIT)},
{PAL_AUDIO_FMT_PCM_S32_LE, make_AudioFormatDescription(PcmType::INT_32_BIT)},
{PAL_AUDIO_FMT_PCM_S24_LE, make_AudioFormatDescription(PcmType::FIXED_Q_8_24)},
{PAL_AUDIO_FMT_PCM_S32_LE, make_AudioFormatDescription(PcmType::FLOAT_32_BIT)},
{PAL_AUDIO_FMT_PCM_S24_3LE, make_AudioFormatDescription(PcmType::INT_24_BIT)},
{PAL_AUDIO_FMT_AAC,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_MP4)},
{PAL_AUDIO_FMT_AAC_LATM,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC)},
{PAL_AUDIO_FMT_AAC_ADTS,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS)},
{PAL_AUDIO_FMT_AAC_ADIF,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADIF)},
{PAL_AUDIO_FMT_AAC,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_LC)},
#ifdef SEC_AUDIO_OFFLOAD
{PAL_AUDIO_FMT_AAC_ADTS,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC)},
#else
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC)},
#endif
{PAL_AUDIO_FMT_AAC,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1)},
{PAL_AUDIO_FMT_AAC,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2)},
{PAL_AUDIO_FMT_AAC,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_HE_V1)},
{PAL_AUDIO_FMT_AAC,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_AAC_HE_V2)},
{PAL_AUDIO_FMT_MP3, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_MPEG)},
{PAL_AUDIO_FMT_FLAC, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_FLAC)},
{PAL_AUDIO_FMT_VORBIS,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_VORBIS)},
{PAL_AUDIO_FMT_ALAC, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_ALAC)},
{PAL_AUDIO_FMT_WMA_STD,
make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_WMA)},
{PAL_AUDIO_FMT_APE, make_AudioFormatDescription("audio/x-ape")},
{PAL_AUDIO_FMT_WMA_PRO, make_AudioFormatDescription("audio/x-ms-wma.pro")},
{PAL_AUDIO_FMT_OPUS, make_AudioFormatDescription(::android::MEDIA_MIMETYPE_AUDIO_OPUS)},
}};
return pairs;
}
template <typename S, typename T>
std::map<S, T> make_DirectMap(const std::vector<std::pair<S, T>>& v) {
std::map<S, T> result(v.begin(), v.end());
if (result.size() != v.size()) {
LOG(FATAL) << __func__ << "Duplicate key elements detected";
}
return result;
}
template <typename S, typename T>
std::map<T, S> make_ReverseMap(const std::vector<std::pair<S, T>>& v) {
std::map<T, S> result;
std::transform(v.begin(), v.end(), std::inserter(result, result.begin()),
[](const std::pair<S, T>& p) { return std::make_pair(p.second, p.first); });
if (result.size() != v.size()) {
LOG(FATAL) << __func__ << "Duplicate key elements detected";
}
return result;
}
using AidlToPalDeviceMap =
std::map<::aidl::android::media::audio::common::AudioDeviceDescription, pal_device_id_t>;
using AidlToPalAudioFormatMap =
std::map<::aidl::android::media::audio::common::AudioFormatDescription, pal_audio_fmt_t>;
const static AidlToPalDeviceMap kAidlToPalDeviceMap =
make_DirectMap<AudioDeviceDescription, pal_device_id_t>(getDevicePairs());
const static AidlToPalAudioFormatMap kAidlToPalAudioFormatMap =
make_ReverseMap<pal_audio_fmt_t, AudioFormatDescription>(getFormatPairs());
// static
std::unique_ptr<pal_channel_info> PlatformConverter::getPalChannelInfoForChannelCount(
int count) noexcept {
auto channelInfo = std::make_unique<pal_channel_info>();
channelInfo->channels = count;
if (count == 1) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
} else if (count == 2) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
} else if (count == 3) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
channelInfo->ch_map[2] = PAL_CHMAP_CHANNEL_C;
} else if (count == 4) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
channelInfo->ch_map[2] = PAL_CHMAP_CHANNEL_C;
channelInfo->ch_map[3] = PAL_CHMAP_CHANNEL_LFE;
} else if (count == 5) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
channelInfo->ch_map[2] = PAL_CHMAP_CHANNEL_C;
channelInfo->ch_map[3] = PAL_CHMAP_CHANNEL_LFE;
channelInfo->ch_map[4] = PAL_CHMAP_CHANNEL_RC;
} else if (count == 6) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
channelInfo->ch_map[2] = PAL_CHMAP_CHANNEL_C;
channelInfo->ch_map[3] = PAL_CHMAP_CHANNEL_LFE;
channelInfo->ch_map[4] = PAL_CHMAP_CHANNEL_LB;
channelInfo->ch_map[5] = PAL_CHMAP_CHANNEL_RB;
} else if (count == 7) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
channelInfo->ch_map[2] = PAL_CHMAP_CHANNEL_C;
channelInfo->ch_map[3] = PAL_CHMAP_CHANNEL_LFE;
channelInfo->ch_map[4] = PAL_CHMAP_CHANNEL_LB;
channelInfo->ch_map[5] = PAL_CHMAP_CHANNEL_RB;
channelInfo->ch_map[6] = PAL_CHMAP_CHANNEL_LS;
} else if (count == 8) {
channelInfo->ch_map[0] = PAL_CHMAP_CHANNEL_FL;
channelInfo->ch_map[1] = PAL_CHMAP_CHANNEL_FR;
channelInfo->ch_map[2] = PAL_CHMAP_CHANNEL_C;
channelInfo->ch_map[3] = PAL_CHMAP_CHANNEL_LFE;
channelInfo->ch_map[4] = PAL_CHMAP_CHANNEL_LB;
channelInfo->ch_map[5] = PAL_CHMAP_CHANNEL_RB;
channelInfo->ch_map[6] = PAL_CHMAP_CHANNEL_LS;
channelInfo->ch_map[7] = PAL_CHMAP_CHANNEL_RS;
} else {
LOG(ERROR) << __func__ << "channel map not found for channels" << count;
}
return std::move(channelInfo);
}
// static
uint16_t PlatformConverter::getBitWidthForAidlPCM(
const AudioFormatDescription& aidlFormat) noexcept {
if (aidlFormat.type != AudioFormatType::PCM) {
return 0;
}
if (aidlFormat.pcm == PcmType::UINT_8_BIT) {
return 8;
} else if (aidlFormat.pcm == PcmType::INT_16_BIT) {
return 16;
} else if (aidlFormat.pcm == PcmType::INT_24_BIT) {
return 24;
} else if (aidlFormat.pcm == PcmType::INT_32_BIT) {
return 32;
} else if (aidlFormat.pcm == PcmType::FIXED_Q_8_24) {
return 32;
} else if (aidlFormat.pcm == PcmType::FLOAT_32_BIT) {
return 32;
}
return 0;
}
#ifdef SEC_AUDIO_SAMSUNGRECORD
uint32_t PlatformConverter::getAudioFormatForAidlPCM(
const AudioFormatDescription& aidlFormat) noexcept {
if (aidlFormat.type != AudioFormatType::PCM) {
return 0xFFFFFFFFu;
}
if (aidlFormat.pcm == PcmType::UINT_8_BIT) { // AUDIO_FORMAT_PCM_8_BIT
return 0x2u;
} else if (aidlFormat.pcm == PcmType::INT_16_BIT) { // AUDIO_FORMAT_PCM_16_BIT
return 0x1u;
} else if (aidlFormat.pcm == PcmType::INT_24_BIT) { // AUDIO_FORMAT_PCM_24_BIT_PACKED
return 0x6u;
} else if (aidlFormat.pcm == PcmType::INT_32_BIT) { // AUDIO_FORMAT_PCM_32_BIT
return 0x3u;
} else if (aidlFormat.pcm == PcmType::FIXED_Q_8_24) { // AUDIO_FORMAT_PCM_8_24_BIT
return 0x4u;
} else if (aidlFormat.pcm == PcmType::FLOAT_32_BIT) { // AUDIO_FORMAT_PCM_FLOAT
return 0x5u;
}
return 0xFFFFFFFFu;
}
#endif
// static
pal_audio_fmt_t PlatformConverter::getPalFormatId(
const ::aidl::android::media::audio::common::AudioFormatDescription&
formatDescription) noexcept {
auto element = kAidlToPalAudioFormatMap.find(formatDescription);
if (element == kAidlToPalAudioFormatMap.cend()) {
LOG(ERROR) << __func__ << " failed to find corressponding pal format for "
<< formatDescription.toString();
// no format found hence return range end;
// Todo have PAL_AUDIO_FMT_INVALID as 0
return PAL_AUDIO_FMT_COMPRESSED_RANGE_END;
}
return element->second;
}
// static
pal_device_id_t PlatformConverter::getPalDeviceId(
const ::aidl::android::media::audio::common::AudioDeviceDescription&
deviceDescription) noexcept {
auto element = kAidlToPalDeviceMap.find(deviceDescription);
if (element == kAidlToPalDeviceMap.cend()) {
LOG(ERROR) << __func__ << " failed to find corressponding pal device for "
<< deviceDescription.toString();
// no device found hence return 0;
return PAL_DEVICE_OUT_MIN;
}
return element->second;
}
outputFlagsStreamtypeMap populatemOutputFlagsStreamtypeMap() {
outputFlagsStreamtypeMap result;
constexpr auto flagCastToint = [](auto flag) { return static_cast<int32_t>(flag); };
constexpr auto PrimaryPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::PRIMARY));
#ifdef SEC_AUDIO_SUPPORT_MEDIA_OUTPUT
result[PrimaryPlaybackFlags] = PAL_STREAM_GENERIC;
#else // qc orig.
result[PrimaryPlaybackFlags] = PAL_STREAM_DEEP_BUFFER;
#endif
constexpr auto deepBufferPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::DEEP_BUFFER));
result[deepBufferPlaybackFlags] = PAL_STREAM_DEEP_BUFFER;
constexpr auto compressOffloadPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::DIRECT) |
1 << flagCastToint(AudioOutputFlags::COMPRESS_OFFLOAD) |
1 << flagCastToint(AudioOutputFlags::NON_BLOCKING) |
1 << flagCastToint(AudioOutputFlags::GAPLESS_OFFLOAD));
result[compressOffloadPlaybackFlags] = PAL_STREAM_COMPRESSED;
constexpr auto lowLatencyPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::PRIMARY) |
1 << flagCastToint(AudioOutputFlags::FAST));
result[lowLatencyPlaybackFlags] = PAL_STREAM_LOW_LATENCY;
constexpr auto pcmOffloadPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::DIRECT));
result[pcmOffloadPlaybackFlags] = PAL_STREAM_PCM_OFFLOAD;
constexpr auto voipPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::VOIP_RX));
result[voipPlaybackFlags] = PAL_STREAM_VOIP_RX;
constexpr auto spatialPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::SPATIALIZER));
result[spatialPlaybackFlags] = PAL_STREAM_SPATIAL_AUDIO;
constexpr auto ullPlaybackFlags = static_cast<int32_t>(
1 << flagCastToint(AudioOutputFlags::FAST) | 1 << flagCastToint(AudioOutputFlags::RAW));
result[ullPlaybackFlags] = PAL_STREAM_ULTRA_LOW_LATENCY;
constexpr auto mmapPlaybackFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::DIRECT) |
1 << flagCastToint(AudioOutputFlags::MMAP_NOIRQ));
result[mmapPlaybackFlags] = PAL_STREAM_ULTRA_LOW_LATENCY;
constexpr auto inCallMusicFlags =
static_cast<int32_t>(1 << flagCastToint(AudioOutputFlags::INCALL_MUSIC));
result[inCallMusicFlags] = PAL_STREAM_VOICE_CALL_MUSIC;
return result;
}
const static outputFlagsStreamtypeMap kOutputFlagsStreamtypeMap =
populatemOutputFlagsStreamtypeMap();
pal_stream_type_t PlatformConverter::getPalStreamTypeId(int32_t outputFlag) noexcept {
return kOutputFlagsStreamtypeMap.at(outputFlag);
}
// static
std::string PlatformConverter::toString() noexcept {
std::ostringstream os;
os << "### platform conversion start ###" << std::endl;
os << "devices: Aidl to PAL" << std::endl;
for (const auto& [key, value] : kAidlToPalDeviceMap) {
os << key.toString() << " => " << deviceNameLUT.at(value).c_str() << std::endl;
}
os << std::endl << "formats: Aidl to PAL " << std::endl;
for (const auto& [key, value] : kAidlToPalAudioFormatMap) {
os << key.toString() << " => "
<< "pal format: 0x" << std::hex << value << std::endl;
}
os << "### platform conversion end ###" << std::endl;
return os.str();
}
} // namespace qti::audio

View File

@@ -1,59 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <algorithm>
#include <sstream>
#include <unordered_map>
#include <vector>
/* AIDL types */
#include <aidl/android/media/audio/common/AudioChannelLayout.h>
#include <aidl/android/media/audio/common/AudioDeviceAddress.h>
#include <aidl/android/media/audio/common/AudioDeviceDescription.h>
#include <aidl/android/media/audio/common/AudioDeviceType.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <aidl/android/media/audio/common/AudioFormatType.h>
#include <aidl/android/media/audio/common/AudioOutputFlags.h>
#include <aidl/android/media/audio/common/PcmType.h>
/* PAL types */
#include <PalDefs.h>
namespace qti::audio {
/**
* monostate singleton class
* APIs are designed to convert any AIDL Audio type to PAL type or vice versa
**/
class PlatformConverter {
private:
PlatformConverter() = delete;
PlatformConverter(const PlatformConverter&) = delete;
PlatformConverter& operator=(const PlatformConverter& x) = delete;
PlatformConverter(PlatformConverter&& other) = delete;
PlatformConverter& operator=(PlatformConverter&& other) = delete;
public:
static pal_audio_fmt_t getPalFormatId(
const ::aidl::android::media::audio::common::AudioFormatDescription&
formatDescription) noexcept;
static pal_device_id_t getPalDeviceId(
const ::aidl::android::media::audio::common::AudioDeviceDescription&
deviceDescription) noexcept;
static pal_stream_type_t getPalStreamTypeId(int32_t outputFlag) noexcept;
static uint16_t getBitWidthForAidlPCM(
const ::aidl::android::media::audio::common::AudioFormatDescription&) noexcept;
#ifdef SEC_AUDIO_SAMSUNGRECORD
static uint32_t getAudioFormatForAidlPCM(
const ::aidl::android::media::audio::common::AudioFormatDescription&) noexcept;
#endif
static std::unique_ptr<pal_channel_info> getPalChannelInfoForChannelCount(int count) noexcept;
static std::string toString() noexcept;
};
} // namespace qti::audio

View File

@@ -1,118 +0,0 @@
ifneq ($(AUDIO_USE_STUB_HAL), true)
LOCAL_PATH := $(call my-dir)
CURRENT_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libaudiocorehal.qti
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_RELATIVE_PATH := hw
LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
LOCAL_CFLAGS := \
-DBACKEND_NDK \
-Wall \
-Wextra \
-Werror \
-Wthread-safety
LOCAL_VINTF_FRAGMENTS := \
../../configs/common/manifest_non_qmaa.xml
LOCAL_VINTF_FRAGMENTS += \
../../configs/common/manifest_non_qmaa_extn.xml
LOCAL_SRC_FILES := \
CoreService.cpp \
Bluetooth.cpp \
Module.cpp \
ModulePrimary.cpp \
ModuleStub.cpp \
SoundDose.cpp \
Stream.cpp \
StreamStub.cpp \
Telephony.cpp \
StreamInPrimary.cpp \
StreamOutPrimary.cpp \
HalOffloadEffects.cpp
# { SEC_AUDIO_COMMON
SEC_COMMON_HAL_PATH := ../../../../../../samsung/variant/audio/sec_audioreach/hal
LOCAL_SRC_FILES += \
SecModulePrimary.cpp \
$(SEC_COMMON_HAL_PATH)/SecFTM.cpp \
$(SEC_COMMON_HAL_PATH)/AudioEffect.cpp \
$(SEC_COMMON_HAL_PATH)/AudioDump.cpp
# { SEC_AUDIO_SAMSUNGRECORD
LOCAL_SRC_FILES += \
$(SEC_COMMON_HAL_PATH)/AudioPreProcess.cpp
# } SEC_AUDIO_SAMSUNGRECORD
# } SEC_AUDIO_COMMON
LOCAL_HEADER_LIBRARIES := \
libxsdc-utils \
libaudioeffects \
liberror_headers \
libaudioclient_headers \
libaudio_system_headers \
libmedia_helper_headers
# defaults: [
# "latest_android_media_audio_common_types_ndk_shared",
# "latest_android_hardware_audio_core_ndk_shared",
# ],
# mk equivalent find a way to fix this in mk file // TODO
# android.media.audio.common.types-V2-ndk \
# android.hardware.audio.core-V1-ndk
LOCAL_STATIC_LIBRARIES := \
libaudiohalutils.qti \
libaudio_module_config.qti \
libaudiocore.extension
LOCAL_WHOLE_STATIC_LIBRARIES := \
libaudioplatform.qti
LOCAL_SHARED_LIBRARIES := \
libaudioaidlcommon \
libbase \
libbinder_ndk \
libcutils \
liblog \
libdl \
libhidlbase \
libhardware \
libfmq \
libmedia_helper \
libstagefright_foundation \
libutils \
libaudioutils \
libxml2 \
android.hardware.common-V2-ndk \
android.media.audio.common.types-V3-ndk \
android.hardware.audio.core-V2-ndk \
$(LATEST_ANDROID_HARDWARE_AUDIO_EFFECT) \
android.hardware.audio.core.sounddose-V1-ndk \
libar-pal \
libaudioserviceexampleimpl \
libaudioplatformconverter.qti \
qti-audio-types-aidl-V1-ndk
# { SEC_AUDIO_COMMON
SEC_AUDIO_VARS := vendor/samsung/variant/audio/sec_audioreach_vars.mk
include $(SEC_AUDIO_VARS)
LOCAL_SHARED_LIBRARIES += libsecaudiohalproxy_vendor
# } SEC_AUDIO_COMMON
ifneq (true,$(call spf_check,SEC_PRODUCT_FEATURE_AUDIO_CONFIG_SPEAKER_AMP,))
LOCAL_SHARED_LIBRARIES += libspeakercalibration
endif
include $(BUILD_SHARED_LIBRARY)
include $(CURRENT_PATH)/fuzzer/Android.mk
include $(CURRENT_PATH)/extensions/Android.mk
include $(CURRENT_PATH)/platform/Android.mk
include $(CURRENT_PATH)/utils/Android.mk
endif

View File

@@ -1,181 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_Bluetooth_QTI"
#include <android-base/logging.h>
#include <cutils/properties.h>
#include <qti-audio-core/Bluetooth.h>
#include <qti-audio-core/Telephony.h>
using aidl::android::hardware::audio::core::VendorParameter;
using aidl::android::media::audio::common::Boolean;
using aidl::android::media::audio::common::Float;
using aidl::android::media::audio::common::Int;
namespace qti::audio::core {
Bluetooth::Bluetooth() {
mScoConfig.isEnabled = Boolean{false};
mScoConfig.isNrecEnabled = Boolean{false};
mScoConfig.mode = ScoConfig::Mode::SCO;
mHfpConfig.isEnabled = Boolean{false};
mHfpConfig.sampleRate = Int{8000};
mHfpConfig.volume = Float{HfpConfig::VOLUME_MAX};
}
ndk::ScopedAStatus Bluetooth::setScoConfig(const ScoConfig& in_config, ScoConfig* _aidl_return) {
if (in_config.isEnabled.has_value()) {
mScoConfig.isEnabled = in_config.isEnabled;
mScoConfig.isEnabled.value().value == true ? mPlatform.setBluetoothParameters("BT_SCO=on")
: mPlatform.setBluetoothParameters("BT_SCO=off");
/* never call telephony with any lock held*/
if(auto telephony = mPlatform.getTelephony().lock()) {
// Todo remove the unsafe casting, although we ITelephony is also Telephony
auto tele = static_cast<Telephony*>(telephony.get());
tele->onBluetoothScoEvent(mScoConfig.isEnabled.value().value);
}
}
if (in_config.isNrecEnabled.has_value()) {
mScoConfig.isNrecEnabled = in_config.isNrecEnabled;
mScoConfig.isNrecEnabled.value().value == true
? mPlatform.setBluetoothParameters("bt_headset_nrec=on")
: mPlatform.setBluetoothParameters("bt_headset_nrec=off");
}
if (in_config.mode != ScoConfig::Mode::UNSPECIFIED) {
mScoConfig.mode = in_config.mode;
if (mScoConfig.mode == ScoConfig::Mode::SCO) {
mPlatform.setBluetoothParameters("bt_wbs=off");
}
if (mScoConfig.mode == ScoConfig::Mode::SCO_WB) {
mPlatform.setBluetoothParameters("bt_wbs=on");
} else if (mScoConfig.mode == ScoConfig::Mode::SCO_SWB) {
mPlatform.setBluetoothParameters("bt_swb=1");
}
}
if (in_config.debugName.has_value()) {
mScoConfig.debugName = in_config.debugName;
}
*_aidl_return = mScoConfig;
LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning "
<< _aidl_return->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Bluetooth::setHfpConfig(const HfpConfig& in_config, HfpConfig* _aidl_return) {
struct str_parms* parms = nullptr;
std::string kvpairs = "";
if (in_config.sampleRate.has_value() && in_config.sampleRate.value().value <= 0) {
LOG(ERROR) << __func__ << ": invalid sample rate: " << in_config.sampleRate.value().value;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (in_config.volume.has_value() &&
(in_config.volume.value().value < static_cast<float>(HfpConfig::VOLUME_MIN) ||
in_config.volume.value().value > static_cast<float>(HfpConfig::VOLUME_MAX))) {
LOG(ERROR) << __func__ << ": invalid volume: " << in_config.volume.value().value;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (in_config.isEnabled.has_value()) {
mHfpConfig.isEnabled = in_config.isEnabled;
std::string isEnabled = in_config.isEnabled.value().value?"true":"false";
kvpairs += "hfp_enable=" + isEnabled + ";";
}
if (in_config.sampleRate.has_value()) {
mHfpConfig.sampleRate = in_config.sampleRate;
kvpairs += "hfp_set_sampling_rate=" + std::to_string(in_config.sampleRate.value().value) + ";";
}
if (in_config.volume.has_value()) {
mHfpConfig.volume = in_config.volume;
kvpairs += "hfp_volume=" + std::to_string(in_config.volume.value().value) + ";";
}
if (!kvpairs.empty()) {
parms = str_parms_create_str(kvpairs.c_str());
mAudExt.audio_extn_set_parameters(parms);
#ifdef SEC_AUDIO_COMMON
if (parms)
str_parms_destroy(parms);
#endif
}
*_aidl_return = mHfpConfig;
LOG(DEBUG) << __func__ << ": received " << in_config.toString() << ", returning "
<< _aidl_return->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::isEnabled(bool* _aidl_return) {
*_aidl_return = mEnabled;
LOG(DEBUG) << __func__ << ": returning " << *_aidl_return;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::setEnabled(bool in_enabled) {
mEnabled = in_enabled;
mEnabled == true ? mPlatform.setBluetoothParameters("A2dpSuspended=false")
: mPlatform.setBluetoothParameters("A2dpSuspended=true");
LOG(DEBUG) << __func__ << ": " << mEnabled;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::supportsOffloadReconfiguration(bool* _aidl_return) {
bool supportReconfig = property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
!property_get_bool("persist.bluetooth.a2dp_offload.disabled", false);
*_aidl_return = supportReconfig;
LOG(DEBUG) << __func__ << ": returning " << *_aidl_return;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothA2dp::reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& in_parameters
__unused) {
LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(in_parameters);
mPlatform.setBluetoothParameters("reconfigA2dp=true");
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::isEnabled(bool* _aidl_return) {
*_aidl_return = mEnabled;
LOG(DEBUG) << __func__ << ": returning " << *_aidl_return;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::setEnabled(bool in_enabled) {
mEnabled = in_enabled;
mEnabled == true ? mPlatform.setBluetoothParameters("LeAudioSuspended=false")
: mPlatform.setBluetoothParameters("LeAudioSuspended=true");
LOG(DEBUG) << __func__ << ": " << mEnabled;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::supportsOffloadReconfiguration(bool* _aidl_return) {
*_aidl_return = true;
LOG(DEBUG) << __func__ << ": returning " << *_aidl_return;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus BluetoothLe::reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& in_parameters
__unused) {
LOG(DEBUG) << __func__ << ": " << ::android::internal::ToString(in_parameters);
return ndk::ScopedAStatus::ok();
}
} // namespace qti::audio::core

View File

@@ -1,41 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_CoreService_QTI"
#include <android-base/logging.h>
#include <android-base/properties.h>
#include <android/binder_ibinder_platform.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <qti-audio-core/Module.h>
#include <qti-audio-core/ModulePrimary.h>
#include <cstdlib>
#include <ctime>
std::shared_ptr<::qti::audio::core::ModulePrimary> gModuleDefaultQti;
auto registerBinderAsService = [](auto &&binder, const std::string &serviceName) {
AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
binder_exception_t status = AServiceManager_addService(binder.get(), serviceName.c_str());
if (status != EX_NONE) {
LOG(ERROR) << __func__ << " failed to register " << serviceName << " ret:" << status;
} else {
LOG(INFO) << __func__ << " successfully registered " << serviceName << " ret:" << status;
}
};
void registerIModuleDefaultQti() {
gModuleDefaultQti = ndk::SharedRefBase::make<::qti::audio::core::ModulePrimary>();
const std::string kServiceName =
std::string(gModuleDefaultQti->descriptor).append("/").append("default");
registerBinderAsService(gModuleDefaultQti->asBinder(), kServiceName);
}
extern "C" __attribute__((visibility("default"))) int32_t registerServices() {
registerIModuleDefaultQti();
return STATUS_OK;
}

View File

@@ -1,79 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_HalOffloadEffects_QTI"
#include <android-base/logging.h>
#include <dlfcn.h>
#include <qti-audio-core/HalOffloadEffects.h>
namespace qti::audio::core {
HalOffloadEffects::HalOffloadEffects() {
loadLibrary(kOffloadPostProcBundlePath);
loadLibrary(kOffloadVisualizerPath);
// { SEC_AUDIO_SUPPORT_AIDL_EFFECT
loadLibrary(kSecOffloadEffectLibraryPath);
// } SEC_AUDIO_SUPPORT_AIDL_EFFECT
}
void HalOffloadEffects::loadLibrary(std::string path) {
// dlopen library and dlsym fptr.
std::function<void(void *)> dlClose = [](void *handle) -> void {
if (handle && dlclose(handle)) {
LOG(ERROR) << "dlclose failed " << dlerror();
}
};
auto libHandle =
std::unique_ptr<void, decltype(dlClose)>{dlopen(path.c_str(), RTLD_LAZY), dlClose};
if (!libHandle) {
LOG(ERROR) << __func__ << ": dlopen failed for " << path << " " << dlerror();
return;
}
// std::unique_ptr<struct OffloadEffectLibIntf> effectIntf;
auto effectIntf = new OffloadEffectLibIntf{nullptr, nullptr};
effectIntf->mStartEffect = (StartEffectFptr)dlsym(libHandle.get(), "startEffect");
if (!effectIntf->mStartEffect) {
LOG(ERROR) << "startEffect is missing in " << path << dlerror();
return;
}
effectIntf->mStopEffect = (StopEffectFptr)dlsym(libHandle.get(), "stopEffect");
if (!effectIntf->mStopEffect) {
LOG(ERROR) << "stopEffect is missing in " << path << dlerror();
return;
}
// { SEC_AUDIO_VOLUME_MONITOR
effectIntf->mUpdateEffect = (UpdateEffectFptr)dlsym(libHandle.get(), "updateEffect");
if (!effectIntf->mUpdateEffect) {
LOG(ERROR) << "updateEffect is missing in " << path << dlerror();
return;
}
// } SEC_AUDIO_VOLUME_MONITOR
LOG(DEBUG) << "found post proc library" << path;
mEffects.emplace_back(std::make_pair(std::move(libHandle),
std::unique_ptr<struct OffloadEffectLibIntf>(effectIntf)));
}
void HalOffloadEffects::startEffect(int ioHandle, pal_stream_handle_t *palHandle) {
for (const auto &effect : mEffects) {
effect.second->mStartEffect(ioHandle, palHandle);
}
}
void HalOffloadEffects::stopEffect(int ioHandle) {
for (const auto &effect : mEffects) {
effect.second->mStopEffect(ioHandle);
}
}
// { SEC_AUDIO_VOLUME_MONITOR
void HalOffloadEffects::updateEffect(pal_stream_handle_t * palHandle, int updateType) {
for (const auto &effect : mEffects) {
effect.second->mUpdateEffect(palHandle, updateType);
}
}
// } SEC_AUDIO_VOLUME_MONITOR
} // namespace qti::audio::core

File diff suppressed because it is too large Load Diff

View File

@@ -1,102 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <vector>
#define LOG_TAG "AHAL_ModuleStub_QTI"
#include <Utils.h>
#include <android-base/logging.h>
#include <qti-audio-core/Bluetooth.h>
#include <qti-audio-core/ModuleStub.h>
#include <qti-audio-core/StreamStub.h>
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::media::audio::common::AudioOffloadInfo;
using aidl::android::media::audio::common::AudioPort;
using aidl::android::media::audio::common::AudioPortConfig;
using aidl::android::media::audio::common::MicrophoneInfo;
using ::aidl::android::hardware::audio::common::getFrameSizeInBytes;
using ::aidl::android::hardware::audio::common::isBitPositionFlagSet;
using ::aidl::android::hardware::audio::common::isValidAudioMode;
using ::aidl::android::hardware::audio::common::SinkMetadata;
using ::aidl::android::hardware::audio::common::SourceMetadata;
using ::aidl::android::hardware::audio::core::AudioPatch;
using ::aidl::android::hardware::audio::core::AudioRoute;
using ::aidl::android::hardware::audio::core::IBluetooth;
using ::aidl::android::hardware::audio::core::IBluetoothA2dp;
using ::aidl::android::hardware::audio::core::IBluetoothLe;
using ::aidl::android::hardware::audio::core::IStreamIn;
using ::aidl::android::hardware::audio::core::IStreamOut;
using ::aidl::android::hardware::audio::core::VendorParameter;
namespace qti::audio::core {
ndk::ScopedAStatus ModuleStub::getBluetooth(std::shared_ptr<IBluetooth>* _aidl_return) {
if (!mBluetooth) {
mBluetooth = ndk::SharedRefBase::make<Bluetooth>();
}
*_aidl_return = mBluetooth.getInstance();
LOG(DEBUG) << __func__
<< ": returning instance of IBluetooth: " << _aidl_return->get()->asBinder().get();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleStub::getBluetoothA2dp(std::shared_ptr<IBluetoothA2dp>* _aidl_return) {
if (!mBluetoothA2dp) {
mBluetoothA2dp = ndk::SharedRefBase::make<BluetoothA2dp>();
}
*_aidl_return = mBluetoothA2dp.getInstance();
LOG(DEBUG) << __func__ << ": returning instance of IBluetoothA2dp: "
<< _aidl_return->get()->asBinder().get();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleStub::getBluetoothLe(std::shared_ptr<IBluetoothLe>* _aidl_return) {
if (!mBluetoothLe) {
mBluetoothLe = ndk::SharedRefBase::make<BluetoothLe>();
}
*_aidl_return = mBluetoothLe.getInstance();
LOG(DEBUG) << __func__
<< ": returning instance of IBluetoothLe: " << _aidl_return->get()->asBinder().get();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus ModuleStub::createInputStream(StreamContext&& context,
const SinkMetadata& sinkMetadata,
const std::vector<MicrophoneInfo>& microphones,
std::shared_ptr<StreamIn>* result) {
return createStreamInstance<StreamInStub>(result, std::move(context), sinkMetadata,
microphones);
}
ndk::ScopedAStatus ModuleStub::createOutputStream(
StreamContext&& context, const SourceMetadata& sourceMetadata,
const std::optional<AudioOffloadInfo>& offloadInfo, std::shared_ptr<StreamOut>* result) {
return createStreamInstance<StreamOutStub>(result, std::move(context), sourceMetadata,
offloadInfo);
}
} // namespace qti::audio::core

View File

@@ -1,63 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_SoundDose_QTI"
#include <android-base/logging.h>
#include <qti-audio-core/SoundDose.h>
namespace qti::audio::core {
ndk::ScopedAStatus SoundDose::setOutputRs2UpperBound(float in_rs2ValueDbA) {
if (in_rs2ValueDbA < static_cast<float>(MIN_RS2) ||
in_rs2ValueDbA > static_cast<float>(DEFAULT_MAX_RS2)) {
LOG(ERROR) << __func__ << ": RS2 value is invalid: " << in_rs2ValueDbA;
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
mRs2Value = in_rs2ValueDbA;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus SoundDose::getOutputRs2UpperBound(float* _aidl_return) {
*_aidl_return = mRs2Value;
LOG(DEBUG) << __func__ << ": returning " << *_aidl_return;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus SoundDose::registerSoundDoseCallback(
const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& in_callback) {
if (in_callback.get() == nullptr) {
LOG(ERROR) << __func__ << ": Callback is nullptr";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
if (mCallback != nullptr) {
LOG(ERROR) << __func__ << ": Sound dose callback was already registered";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
mCallback = in_callback;
LOG(DEBUG) << __func__ << ": Registered sound dose callback ";
return ndk::ScopedAStatus::ok();
}
} // namespace qti::audio::core

File diff suppressed because it is too large Load Diff

View File

@@ -1,159 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <cmath>
#define LOG_TAG "AHAL_StreamStub_QTI"
#include <android-base/logging.h>
#include <audio_utils/clock.h>
#include <qti-audio-core/Module.h>
#include <qti-audio-core/StreamStub.h>
using aidl::android::hardware::audio::common::SinkMetadata;
using aidl::android::hardware::audio::common::SourceMetadata;
using aidl::android::media::audio::common::AudioDevice;
using aidl::android::media::audio::common::AudioOffloadInfo;
using aidl::android::media::audio::common::MicrophoneInfo;
namespace qti::audio::core {
StreamStub::StreamStub(StreamContext* context, const Metadata& metadata)
: StreamCommonImpl(context, metadata),
mFrameSizeBytes(getContext().getFrameSize()),
mSampleRate(getContext().getSampleRate()),
mIsAsynchronous(!!getContext().getAsyncCallback()),
mIsInput(isInput(metadata)) {}
::android::status_t StreamStub::init() {
mIsInitialized = true;
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::drain(
::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode) {
if (!mIsInitialized) {
LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
}
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::flush() {
if (!mIsInitialized) {
LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
}
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::pause() {
if (!mIsInitialized) {
LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
}
usleep(500);
return ::android::OK;
}
::android::status_t StreamStub::standby() {
if (!mIsInitialized) {
LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
}
usleep(500);
mIsStandby = true;
return ::android::OK;
}
::android::status_t StreamStub::start() {
if (!mIsInitialized) {
LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
}
usleep(500);
mIsStandby = false;
return ::android::OK;
}
::android::status_t StreamStub::transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) {
if (!mIsInitialized) {
LOG(FATAL) << __func__ << ": must not happen for an uninitialized driver";
}
if (mIsStandby) {
LOG(FATAL) << __func__ << ": must not happen while in standby";
}
static constexpr float kMicrosPerSecond = MICROS_PER_SECOND;
static constexpr float kScaleFactor = .8f;
if (mIsAsynchronous) {
usleep(500);
} else {
const size_t delayUs = static_cast<size_t>(
std::roundf(kScaleFactor * frameCount * kMicrosPerSecond / mSampleRate));
usleep(delayUs);
}
if (mIsInput) {
uint8_t* byteBuffer = static_cast<uint8_t*>(buffer);
for (size_t i = 0; i < frameCount * mFrameSizeBytes; ++i) {
byteBuffer[i] = std::rand() % 255;
}
}
*actualFrameCount = frameCount;
return ::android::OK;
}
void StreamStub::shutdown() {
mIsInitialized = false;
}
StreamInStub::StreamInStub(StreamContext&& context, const SinkMetadata& sinkMetadata,
const std::vector<MicrophoneInfo>& microphones)
: StreamIn(std::move(context), microphones), StreamStub(&(StreamIn::mContext), sinkMetadata) {}
StreamInStub::~StreamInStub() {
LOG(DEBUG) << __func__ << ": destroy";
}
StreamOutStub::StreamOutStub(StreamContext&& context, const SourceMetadata& sourceMetadata,
const std::optional<AudioOffloadInfo>& offloadInfo)
: StreamOut(std::move(context), offloadInfo),
StreamStub(&(StreamOut::mContext), sourceMetadata) {}
StreamOutStub::~StreamOutStub() {
LOG(DEBUG) << __func__ << ": destroy";
}
int32_t StreamOutStub::setAggregateSourceMetadata(bool value) {
return 0;
}
int32_t StreamInStub::setAggregateSinkMetadata(bool value) {
return 0;
}
ndk::ScopedAStatus StreamInStub::reconfigureConnectedDevices() {
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus StreamOutStub::reconfigureConnectedDevices() {
return ndk::ScopedAStatus::ok();
}
} // namespace qti::audio::core

File diff suppressed because it is too large Load Diff

View File

@@ -1,170 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libaudiocore.extension
LOCAL_VENDOR_MODULE := true
LOCAL_C_INCLUDES := $(LOCAL_PATH)/include \
$(LOCAL_PATH)/../platform/include \
$(LOCAL_PATH)/../utils/include
LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
LOCAL_CFLAGS := -Wall -Wextra -Werror -Wthread-safety
LOCAL_SRC_FILES := \
AudioExtension.cpp
LOCAL_HEADER_LIBRARIES := \
libaudioclient_headers \
libmedia_helper_headers \
libexpectedutils_headers
LOCAL_SHARED_LIBRARIES := \
libaudioaidlcommon \
libbase \
libbinder_ndk \
libcutils \
libfmq \
liblog \
libmedia_helper \
libstagefright_foundation \
libutils \
libxml2 \
android.hardware.common-V2-ndk \
android.hardware.common.fmq-V1-ndk \
android.media.audio.common.types-V3-ndk \
android.hardware.audio.core-V2-ndk \
qti-audio-types-aidl-V1-ndk \
libar-pal
# { SEC_AUDIO_COMMON
SEC_AUDIO_VARS := vendor/samsung/variant/audio/sec_audioreach_vars.mk
include $(SEC_AUDIO_VARS)
# } SEC_AUDIO_COMMON
include $(BUILD_STATIC_LIBRARY)
#-------------------------------------------
# Build HFP LIB
#-------------------------------------------
include $(CLEAR_VARS)
LOCAL_MODULE := libhfp_pal
LOCAL_VENDOR_MODULE := true
ifeq ($(TARGET_BOARD_AUTO),true)
LOCAL_CFLAGS += -DPLATFORM_AUTO
endif
LOCAL_SRC_FILES:= Hfp.cpp
LOCAL_CFLAGS += \
-Wall \
-Werror \
-Wno-unused-function \
-Wno-unused-variable
LOCAL_CPPFLAGS += -fexceptions
LOCAL_SHARED_LIBRARIES := \
libaudioroute \
libbase \
liblog \
libaudioutils \
libcutils \
libdl \
libexpat \
liblog \
libar-pal
LOCAL_C_INCLUDES := \
$(TOP)/vendor/qcom/opensource/pal \
$(TOP)/vendor/qcom/opensource/audio-hal/primary-hal/hal \
$(TOP)/external/expat/lib \
$(TOP)/system/media/audio_utils/include \
$(call include-path-for, audio-route) \
LOCAL_HEADER_LIBRARIES += libhardware_headers
LOCAL_HEADER_LIBRARIES += libsystem_headers
include $(BUILD_SHARED_LIBRARY)
#-------------------------------------------
# Build FM LIB
#-------------------------------------------
include $(CLEAR_VARS)
LOCAL_MODULE := libfmpal
LOCAL_VENDOR_MODULE := true
LOCAL_SRC_FILES:= FM.cpp
LOCAL_CFLAGS += \
-Wall \
-Werror \
-Wno-unused-function \
-Wno-unused-variable
LOCAL_SHARED_LIBRARIES := \
libaudioroute \
libbase \
liblog \
libaudioutils \
libcutils \
libdl \
libexpat \
liblog \
libar-pal
LOCAL_C_INCLUDES := \
$(TOP)/vendor/qcom/opensource/pal \
$(TOP)/vendor/qcom/opensource/audio-hal/primary-hal/hal \
$(TOP)/vendor/qcom/opensource/audio-hal/primary-hal/hal/core/extensions/include \
$(TOP)/external/expat/lib \
$(TOP)/system/media/audio_utils/include \
$(call include-path-for, audio-route) \
# { SEC_AUDIO_COMMON
SEC_AUDIO_VARS := vendor/samsung/variant/audio/sec_audioreach_vars.mk
include $(SEC_AUDIO_VARS)
# } SEC_AUDIO_COMMON
LOCAL_HEADER_LIBRARIES += libhardware_headers
LOCAL_HEADER_LIBRARIES += libsystem_headers
include $(BUILD_SHARED_LIBRARY)
#-------------------------------------------
# Build BATTERY_LISTENER
#-------------------------------------------
include $(CLEAR_VARS)
LOCAL_MODULE := libbatterylistener
LOCAL_VENDOR_MODULE := true
LOCAL_SRC_FILES:= battery_listener.cpp
LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
LOCAL_CFLAGS := \
-Wall \
-Werror \
-Wno-unused-function \
-Wno-unused-variable
LOCAL_SHARED_LIBRARIES := \
android.hardware.health@1.0 \
android.hardware.health@2.0 \
android.hardware.health@2.1 \
android.hardware.power@1.2 \
android.hardware.health-V1-ndk \
libbinder_ndk \
libaudioutils \
libbase \
libcutils \
libdl \
libhidlbase \
liblog \
libutils \
LOCAL_STATIC_LIBRARIES := libhealthhalutils
include $(BUILD_SHARED_LIBRARY)

View File

@@ -1,373 +0,0 @@
/*
* Copyright (c) 2012-2021, The Linux Foundation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Changes from Qualcomm Innovation Center are provided under the following license:
*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_FM_QTI"
#define LOG_NDDEBUG 0
#include <android-base/logging.h>
#include <cutils/properties.h>
#include <cutils/str_parms.h>
#include <errno.h>
#include <log/log.h>
#include <math.h>
#include <unistd.h>
#include "PalApi.h"
#ifdef DYNAMIC_LOG_ENABLED
#include <log_xml_parser.h>
#define LOG_MASK HAL_MOD_FILE_FM
#include <log_utils.h>
#endif
#ifdef SEC_AUDIO_FMRADIO
#include "SecPalDefs.h"
#endif
#ifdef __cplusplus
extern "C" {
#endif
#define AUDIO_PARAMETER_KEY_HANDLE_FM "handle_fm"
#define AUDIO_PARAMETER_KEY_FM_VOLUME "fm_volume"
#define AUDIO_PARAMETER_KEY_REC_PLAY_CONC "rec_play_conc_on"
#define AUDIO_PARAMETER_KEY_FM_MUTE "fm_mute"
#define AUDIO_PARAMETER_KEY_FM_RESTORE_VOLUME "fm_restore_volume"
#define AUDIO_PARAMETER_KEY_FM_ROUTING "fm_routing"
#define AUDIO_PARAMETER_KEY_FM_STATUS "fm_status"
#define FM_LOOPBACK_DRAIN_TIME_MS 2
#define CHANNELS 2
#define BIT_WIDTH 16
#define SAMPLE_RATE 48000
struct fm_module {
bool running;
bool muted;
bool restart;
float volume;
// audio_devices_t device;
pal_stream_handle_t *stream_handle;
};
#ifdef SEC_AUDIO_FMRADIO
static int usb_card_id = -1;
static int usb_device_num = -1;
#endif
static struct fm_module fm = {.running = 0,
.muted = 0,
.restart = 0,
.volume = 0,
//.device = (audio_devices_t)0,
.stream_handle = 0};
int32_t fm_set_volume(float value, bool persist = false) {
int32_t ret = 0;
struct pal_volume_data *pal_volume = NULL;
#if defined(SEC_AUDIO_FMRADIO) && \
(defined(SEC_AUDIO_SUPPORT_SOUNDBOOSTER_ON_DSP) || defined(SEC_AUDIO_PREVOLUME_SOUNDBOOSTER))
LOG(VERBOSE) << __func__ << " Enter: volume = " << value << " persist: " << persist;
return ret;
#else
LOG(DEBUG) << __func__ << " Enter: volume = " << value << " persist: " << persist;
#endif
if (value < 0.0) {
LOG(DEBUG) << __func__ << " Under 0.0, assuming 0.0" << value;
value = 0.0;
} else if (value > 1.0) {
LOG(DEBUG) << __func__ << " Over 0.0, assuming 0.0" << value;
value = 1.0;
}
if (persist) fm.volume = value;
if (fm.muted && value > 0) {
LOG(DEBUG) << __func__ << " fm is muted, applying '0' volume instead of " << value;
value = 0;
}
if (!fm.running) {
LOG(VERBOSE) << __func__ << " FM not active, ignoring set_volume call";
return -EIO;
}
LOG(DEBUG) << __func__ << " Setting FM volume to " << value;
pal_volume = (struct pal_volume_data *)malloc(sizeof(struct pal_volume_data) +
sizeof(struct pal_channel_vol_kv));
if (!pal_volume) return -ENOMEM;
pal_volume->no_of_volpair = 1;
pal_volume->volume_pair[0].channel_mask = 0x03;
pal_volume->volume_pair[0].vol = value;
ret = pal_stream_set_volume(fm.stream_handle, pal_volume);
if (ret) LOG(ERROR) << __func__ << " set volume failed: " << ret;
free(pal_volume);
LOG(DEBUG) << __func__ << " exit";
return ret;
}
int32_t fm_start(int device_id) {
int32_t ret = 0;
const int num_pal_devs = 2;
struct pal_stream_attributes stream_attr;
struct pal_channel_info ch_info;
struct pal_device pal_devs[num_pal_devs];
pal_device_id_t pal_device_id = PAL_DEVICE_OUT_SPEAKER;
LOG(DEBUG) << __func__ << " Enter";
#ifdef SEC_AUDIO_FMRADIO
fm.running = false;
if (device_id == AUDIO_DEVICE_OUT_USB_HEADSET) {
pal_param_fmradio_usb_gain_t param_fmradio_usb_gain;
param_fmradio_usb_gain.enable = true;
pal_set_param(PAL_PARAM_ID_FMRADIO_USB_GAIN, (void *)&param_fmradio_usb_gain,
sizeof(pal_param_fmradio_usb_gain_t));
fm.running = true;
LOG(DEBUG) << __func__ << " Exit";
return ret;
}
#endif
if (device_id == 2) // AUDIO_DEVICE_OUT_SPEAKER)
pal_device_id = PAL_DEVICE_OUT_SPEAKER;
else if (device_id == 4) // AUDIO_DEVICE_OUT_WIRED_HEADSET)
pal_device_id = PAL_DEVICE_OUT_WIRED_HEADSET;
else if (device_id == 8) // AUDIO_DEVICE_OUT_WIRED_HEADPHONE)
pal_device_id = PAL_DEVICE_OUT_WIRED_HEADPHONE;
else {
LOG(DEBUG) << __func__ << " Unsupported device_id " << device_id;
return -EINVAL;
}
ch_info.channels = CHANNELS;
ch_info.ch_map[0] = PAL_CHMAP_CHANNEL_FL;
ch_info.ch_map[1] = PAL_CHMAP_CHANNEL_FR;
stream_attr.type = PAL_STREAM_LOOPBACK;
stream_attr.info.opt_stream_info.loopback_type = PAL_STREAM_LOOPBACK_FM;
stream_attr.direction = PAL_AUDIO_INPUT_OUTPUT;
stream_attr.in_media_config.sample_rate = SAMPLE_RATE;
stream_attr.in_media_config.bit_width = BIT_WIDTH;
stream_attr.in_media_config.ch_info = ch_info;
stream_attr.in_media_config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
stream_attr.out_media_config.sample_rate = SAMPLE_RATE;
stream_attr.out_media_config.bit_width = BIT_WIDTH;
stream_attr.out_media_config.ch_info = ch_info;
stream_attr.out_media_config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
for (int i = 0; i < 2; ++i) {
// TODO: remove hardcoded device id & pass adev to getPalDeviceIds instead
pal_devs[i].id = i ? PAL_DEVICE_IN_FM_TUNER : pal_device_id;
pal_devs[i].config.sample_rate = SAMPLE_RATE;
pal_devs[i].config.bit_width = BIT_WIDTH;
pal_devs[i].config.ch_info = ch_info;
pal_devs[i].config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
}
#ifdef SEC_AUDIO_FMRADIO
if (pal_devs[0].id == PAL_DEVICE_OUT_SPEAKER) {
strcpy(pal_devs[0].custom_config.custom_key, ck_table[CUSTOM_KEY_SPEAKER_FM]);
pal_devs[1].id = PAL_DEVICE_IN_USB_HEADSET;
//Configure USB Digital Headset parameters
pal_param_device_capability_t *device_cap_query = (pal_param_device_capability_t *)
malloc(sizeof(pal_param_device_capability_t));
if (!device_cap_query) {
LOG(ERROR) << __func__ << "Failed to allocate mem for device_cap_query";
return -ENOMEM;
}
dynamic_media_config_t dynamic_media_config;
size_t payload_size = 0;
device_cap_query->id = PAL_DEVICE_IN_USB_HEADSET;
device_cap_query->is_playback = false;
device_cap_query->addr.card_id = usb_card_id;
device_cap_query->addr.device_num = usb_device_num;
device_cap_query->config = &dynamic_media_config;
pal_get_param(PAL_PARAM_ID_DEVICE_CAPABILITY,
(void **)&device_cap_query,
&payload_size, nullptr);
pal_devs[1].address.card_id = usb_card_id;
pal_devs[1].address.device_num = usb_device_num;
pal_devs[1].config.sample_rate = dynamic_media_config.sample_rate[0];
pal_devs[1].config.ch_info = ch_info;
pal_devs[1].config.aud_fmt_id = (pal_audio_fmt_t)dynamic_media_config.format[0];
strcpy(pal_devs[1].custom_config.custom_key, ck_table[CUSTOM_KEY_SPEAKER_FM]);
free(device_cap_query);
}
#endif
ret = pal_stream_open(&stream_attr, num_pal_devs, pal_devs, 0, NULL, NULL, 0,
&fm.stream_handle);
if (ret) {
LOG(ERROR) << __func__ << " stream open failed with: " << ret;
return ret;
}
ret = pal_stream_start(fm.stream_handle);
if (ret) {
LOG(ERROR) << __func__ << " stream start failed with: " << ret;
pal_stream_close(fm.stream_handle);
#ifdef SEC_AUDIO_FMRADIO
fm.stream_handle = NULL;
#endif
return ret;
}
fm.running = true;
fm_set_volume(fm.volume, true);
LOG(DEBUG) << __func__ << " Exit";
return ret;
}
int32_t fm_stop() {
LOG(DEBUG) << __func__ << " enter";
if (!fm.running) {
LOG(ERROR) << __func__ << " FM not in running state...";
return -EINVAL;
}
if (fm.stream_handle) {
pal_stream_stop(fm.stream_handle);
pal_stream_close(fm.stream_handle);
}
#ifdef SEC_AUDIO_FMRADIO
else { // if fm radio is playing via usb headset, fm.stream_handle is not created.
pal_param_fmradio_usb_gain_t param_fmradio_usb_gain;
param_fmradio_usb_gain.enable = false;
pal_set_param(PAL_PARAM_ID_FMRADIO_USB_GAIN, (void *)&param_fmradio_usb_gain,
sizeof(pal_param_fmradio_usb_gain_t));
}
#endif
fm.stream_handle = NULL;
fm.running = false;
LOG(DEBUG) << __func__ << " Exit";
return 0;
}
bool fm_get_running_status() {
LOG(DEBUG) << __func__ << " enter";
return fm.running;
LOG(DEBUG) << __func__ << " Exit";
}
void fm_set_parameters(struct str_parms *parms) {
int ret, val, num_pal_devs;
pal_device_id_t *pal_devs;
char value[32] = {0};
float vol = 0.0;
ret = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_HANDLE_FM, value, sizeof(value));
if (ret >= 0) {
val = atoi(value);
LOG(DEBUG) << __func__ << " FM usecase";
if (val) {
#ifdef SEC_AUDIO_FMRADIO
ret = str_parms_get_str(parms, "usb_card_id", value, sizeof(value));
if (ret >= 0) {
usb_card_id = atoi(value);
ret = str_parms_get_str(parms, "usb_device_num", value, sizeof(value));
if (ret >= 0) {
usb_device_num = atoi(value);
}
}
#endif
if (val & 0x00100000 /*AUDIO_DEVICE_OUT_FM*/ && !fm.running)
fm_start(val & ~(0x00100000) /*AUDIO_DEVICE_OUT_FM*/);
else if (!(val & 0x00100000 /*AUDIO_DEVICE_OUT_FM*/) && fm.running) {
fm_set_volume(0, false);
usleep(FM_LOOPBACK_DRAIN_TIME_MS * 1000);
fm_stop();
}
}
}
ret = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_FM_ROUTING, value, sizeof(value));
if (ret >= 0 && fm.running) {
val = atoi(value);
#ifdef SEC_AUDIO_ADD_FOR_DEBUG
LOG(DEBUG) << __func__ << " Param: routing";
#else
LOG(DEBUG) << __func__ << " FM usecase";
#endif
if (val && (val & 0x00100000 /*AUDIO_DEVICE_OUT_FM*/)) {
fm_set_volume(0, false);
fm_stop();
fm_start(val & ~(0x00100000) /*AUDIO_DEVICE_OUT_FM*/);
}
}
memset(value, 0, sizeof(value));
ret = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_FM_VOLUME, value, sizeof(value));
if (ret >= 0) {
LOG(DEBUG) << __func__ << " Param: set volume";
if (sscanf(value, "%f", &vol) != 1) {
LOG(ERROR) << __func__ << " error in retrieving fm volume";
return;
}
fm_set_volume(vol, true);
}
ret = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_FM_MUTE, value, sizeof(value));
if (ret >= 0) {
LOG(DEBUG) << __func__ << " Param: mute";
fm.muted = (value[0] == '1');
if (fm.muted)
fm_set_volume(0);
else
fm_set_volume(fm.volume);
}
ret = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_FM_RESTORE_VOLUME, value, sizeof(value));
if (ret >= 0) {
LOG(DEBUG) << __func__ << " Param: restore volume";
if (value[0] == '1') fm_set_volume(fm.volume);
}
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,506 +0,0 @@
/*
* Copyright (c) 2012-2020, The Linux Foundation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Changes from Qualcomm Innovation Center are provided under the following license:
*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_HFP_QTI"
#define LOG_NDDEBUG 0
#include <android-base/logging.h>
#include <cutils/properties.h>
#include <cutils/str_parms.h>
#include <dirent.h>
#include <dlfcn.h>
#include <errno.h>
#include <fcntl.h>
#include <math.h>
#include <math.h>
#include <stdlib.h>
#include <sys/stat.h>
#include "PalApi.h"
#ifdef __cplusplus
extern "C" {
#endif
#define AUDIO_PARAMETER_HFP_ENABLE "hfp_enable"
#define AUDIO_PARAMETER_HFP_SET_SAMPLING_RATE "hfp_set_sampling_rate"
#define AUDIO_PARAMETER_KEY_HFP_VOLUME "hfp_volume"
#define AUDIO_PARAMETER_HFP_PCM_DEV_ID "hfp_pcm_dev_id"
#define AUDIO_PARAMETER_KEY_HFP_MIC_VOLUME "hfp_mic_volume"
struct hfp_module {
bool is_hfp_running;
float hfp_volume;
float mic_volume;
bool mic_mute;
uint32_t sample_rate;
pal_stream_handle_t *rx_stream_handle;
pal_stream_handle_t *tx_stream_handle;
};
#define PLAYBACK_VOLUME_MAX 0x2000
#define CAPTURE_VOLUME_DEFAULT (15.0)
static struct hfp_module hfpmod = {
.is_hfp_running = 0,
.hfp_volume = 0,
.mic_volume = CAPTURE_VOLUME_DEFAULT,
.mic_mute = 0,
.sample_rate = 16000,
};
static int32_t hfp_set_volume(float value) {
int32_t vol, ret = 0;
struct pal_volume_data *pal_volume = NULL;
LOG(VERBOSE) << __func__ << " entry";
hfpmod.hfp_volume = value;
if (!hfpmod.is_hfp_running) {
LOG(VERBOSE) << __func__ << " HFP not active, ignoring set_hfp_volume call";
return -EIO;
}
LOG(DEBUG) << __func__ << " Setting HFP volume to " << value;
pal_volume = (struct pal_volume_data *)malloc(sizeof(struct pal_volume_data) +
sizeof(struct pal_channel_vol_kv));
if (!pal_volume) return -ENOMEM;
pal_volume->no_of_volpair = 1;
pal_volume->volume_pair[0].channel_mask = 0x03;
pal_volume->volume_pair[0].vol = value;
ret = pal_stream_set_volume(hfpmod.rx_stream_handle, pal_volume);
if (ret) LOG(ERROR) << __func__ << " set volume failed: " << ret;
free(pal_volume);
LOG(VERBOSE) << __func__ << " exit";
return ret;
}
/*Set mic volume to value.
*
* This interface is used for mic volume control, set mic volume as value(range 0 ~ 15).
*
*/
static int hfp_set_mic_volume(float value) {
int volume, ret = 0;
struct pal_volume_data *pal_volume = NULL;
LOG(DEBUG) << __func__ << " enter value= " << value;
if (!hfpmod.is_hfp_running) {
LOG(ERROR) << __func__ << " HFP not active, ignoring set_hfp_mic_volume call";
return -EIO;
}
if (value < 0.0) {
LOG(DEBUG) << __func__ << " " << value << " Under 0.0, assuming 0.0";
value = 0.0;
} else if (value > CAPTURE_VOLUME_DEFAULT) {
value = CAPTURE_VOLUME_DEFAULT;
LOG(DEBUG) << __func__ << " Volume brought within range " << value;
}
value = value / CAPTURE_VOLUME_DEFAULT;
volume = (int)(value * PLAYBACK_VOLUME_MAX);
pal_volume = (struct pal_volume_data *)malloc(sizeof(struct pal_volume_data) +
sizeof(struct pal_channel_vol_kv));
if (!pal_volume) {
LOG(ERROR) << __func__ << " Failed to allocate memory for pal_volume";
return -ENOMEM;
}
pal_volume->no_of_volpair = 1;
pal_volume->volume_pair[0].channel_mask = 0x03;
pal_volume->volume_pair[0].vol = value;
if (pal_stream_set_volume(hfpmod.tx_stream_handle, pal_volume) < 0) {
LOG(ERROR) << __func__ << " Couldn't set HFP Volume " << volume;
free(pal_volume);
pal_volume = NULL;
return -EINVAL;
}
free(pal_volume);
pal_volume = NULL;
return ret;
}
static float hfp_get_mic_volume(void) {
return hfpmod.mic_volume;
}
static int32_t start_hfp(struct str_parms *parms __unused) {
int32_t ret = 0;
uint32_t no_of_devices = 2;
struct pal_stream_attributes stream_attr = {};
struct pal_stream_attributes stream_tx_attr = {};
struct pal_device devices[2] = {};
struct pal_channel_info ch_info;
LOG(DEBUG) << __func__ << " HFP start enter";
if (hfpmod.rx_stream_handle || hfpmod.tx_stream_handle) return 0; // hfp already running;
pal_param_device_connection_t param_device_connection;
param_device_connection.id = PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
param_device_connection.connection_state = true;
ret = pal_set_param(PAL_PARAM_ID_DEVICE_CONNECTION, (void *)&param_device_connection,
sizeof(pal_param_device_connection_t));
if (ret != 0) {
LOG(ERROR) << __func__ << " Set PAL_PARAM_ID_DEVICE_CONNECTION for "
<< param_device_connection.id << " failed";
return ret;
}
param_device_connection.id = PAL_DEVICE_OUT_BLUETOOTH_SCO;
param_device_connection.connection_state = true;
ret = pal_set_param(PAL_PARAM_ID_DEVICE_CONNECTION, (void *)&param_device_connection,
sizeof(pal_param_device_connection_t));
if (ret != 0) {
LOG(ERROR) << __func__ << " Set PAL_PARAM_ID_DEVICE_CONNECTION for "
<< param_device_connection.id << " failed";
return ret;
}
pal_param_btsco_t param_btsco;
param_btsco.is_bt_hfp = true;
param_btsco.bt_sco_on = true;
ret = pal_set_param(PAL_PARAM_ID_BT_SCO, (void *)&param_btsco, sizeof(pal_param_btsco_t));
if (ret != 0) {
LOG(ERROR) << __func__ << " Set PAL_PARAM_ID_BT_SCO failed";
return ret;
}
if (hfpmod.sample_rate == 16000) {
param_btsco.bt_wb_speech_enabled = true;
} else {
param_btsco.bt_wb_speech_enabled = false;
}
ret = pal_set_param(PAL_PARAM_ID_BT_SCO_WB, (void *)&param_btsco, sizeof(pal_param_btsco_t));
if (ret != 0) {
LOG(ERROR) << __func__ << " Set PAL_PARAM_ID_BT_SCO_WB failed";
return ret;
}
ch_info.channels = 1;
ch_info.ch_map[0] = PAL_CHMAP_CHANNEL_FL;
/* BT SCO -> Spkr */
stream_attr.type = PAL_STREAM_LOOPBACK;
stream_attr.info.opt_stream_info.loopback_type = PAL_STREAM_LOOPBACK_HFP_RX;
stream_attr.direction = PAL_AUDIO_INPUT_OUTPUT;
stream_attr.in_media_config.sample_rate = hfpmod.sample_rate;
stream_attr.in_media_config.bit_width = 16;
stream_attr.in_media_config.ch_info = ch_info;
stream_attr.in_media_config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
stream_attr.out_media_config.sample_rate = 48000;
stream_attr.out_media_config.bit_width = 16;
stream_attr.out_media_config.ch_info = ch_info;
stream_attr.out_media_config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
devices[0].id = PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
devices[0].config.sample_rate = hfpmod.sample_rate;
devices[0].config.bit_width = 16;
devices[0].config.ch_info = ch_info;
devices[0].config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
devices[1].id = PAL_DEVICE_OUT_SPEAKER;
ret = pal_stream_open(&stream_attr, no_of_devices, devices, 0, NULL, NULL, 0,
&hfpmod.rx_stream_handle);
if (ret != 0) {
LOG(ERROR) << __func__ << " HFP rx stream (BT SCO->Spkr) open failed, rc " << ret;
return ret;
}
ret = pal_stream_start(hfpmod.rx_stream_handle);
if (ret != 0) {
LOG(ERROR) << __func__ << " HFP rx stream (BT SCO->Spkr) open failed, rc " << ret;
pal_stream_close(hfpmod.rx_stream_handle);
return ret;
}
/* Mic -> BT SCO */
stream_tx_attr.type = PAL_STREAM_LOOPBACK;
stream_tx_attr.info.opt_stream_info.loopback_type = PAL_STREAM_LOOPBACK_HFP_TX;
stream_tx_attr.direction = PAL_AUDIO_INPUT_OUTPUT;
stream_tx_attr.in_media_config.sample_rate = hfpmod.sample_rate;
stream_tx_attr.in_media_config.bit_width = 16;
stream_tx_attr.in_media_config.ch_info = ch_info;
stream_tx_attr.in_media_config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
stream_tx_attr.out_media_config.sample_rate = 48000;
stream_tx_attr.out_media_config.bit_width = 16;
stream_tx_attr.out_media_config.ch_info = ch_info;
stream_tx_attr.out_media_config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
devices[0].id = PAL_DEVICE_OUT_BLUETOOTH_SCO;
devices[0].config.sample_rate = hfpmod.sample_rate;
devices[0].config.bit_width = 16;
devices[0].config.ch_info = ch_info;
devices[0].config.aud_fmt_id = PAL_AUDIO_FMT_PCM_S16_LE;
devices[1].id = PAL_DEVICE_IN_SPEAKER_MIC;
ret = pal_stream_open(&stream_tx_attr, no_of_devices, devices, 0, NULL, NULL, 0,
&hfpmod.tx_stream_handle);
if (ret != 0) {
LOG(ERROR) << __func__ << " HFP tx stream (Mic->BT SCO) open failed, rc " << ret;
pal_stream_stop(hfpmod.rx_stream_handle);
pal_stream_close(hfpmod.rx_stream_handle);
hfpmod.rx_stream_handle = NULL;
return ret;
}
ret = pal_stream_start(hfpmod.tx_stream_handle);
if (ret != 0) {
LOG(ERROR) << __func__ << " HFP tx stream (Mic->BT SCO) open failed, rc " << ret;
pal_stream_close(hfpmod.tx_stream_handle);
pal_stream_stop(hfpmod.rx_stream_handle);
pal_stream_close(hfpmod.rx_stream_handle);
hfpmod.rx_stream_handle = NULL;
hfpmod.tx_stream_handle = NULL;
return ret;
}
hfpmod.mic_mute = false;
hfpmod.is_hfp_running = true;
hfp_set_volume(hfpmod.hfp_volume);
LOG(DEBUG) << __func__ << " HFP start end";
return ret;
}
static int32_t stop_hfp() {
int32_t ret = 0;
LOG(DEBUG) << __func__ << " HFP stop enter";
hfpmod.is_hfp_running = false;
if (hfpmod.rx_stream_handle) {
pal_stream_stop(hfpmod.rx_stream_handle);
pal_stream_close(hfpmod.rx_stream_handle);
hfpmod.rx_stream_handle = NULL;
}
if (hfpmod.tx_stream_handle) {
pal_stream_stop(hfpmod.tx_stream_handle);
pal_stream_close(hfpmod.tx_stream_handle);
hfpmod.tx_stream_handle = NULL;
}
pal_param_btsco_t param_btsco;
param_btsco.is_bt_hfp = true;
param_btsco.bt_sco_on = true;
ret = pal_set_param(PAL_PARAM_ID_BT_SCO, (void *)&param_btsco, sizeof(pal_param_btsco_t));
if (ret != 0) {
LOG(DEBUG) << __func__ << " Set PAL_PARAM_ID_BT_SCO failed";
}
pal_param_device_connection_t param_device_connection;
param_device_connection.id = PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
param_device_connection.connection_state = false;
ret = pal_set_param(PAL_PARAM_ID_DEVICE_CONNECTION, (void *)&param_device_connection,
sizeof(pal_param_device_connection_t));
if (ret != 0) {
LOG(ERROR) << __func__ << " Set PAL_PARAM_ID_DEVICE_DISCONNECTION for "
<< param_device_connection.id << " failed";
}
param_device_connection.id = PAL_DEVICE_OUT_BLUETOOTH_SCO;
param_device_connection.connection_state = false;
ret = pal_set_param(PAL_PARAM_ID_DEVICE_CONNECTION, (void *)&param_device_connection,
sizeof(pal_param_device_connection_t));
if (ret != 0) {
LOG(ERROR) << __func__ << " Set PAL_PARAM_ID_DEVICE_DISCONNECTION for "
<< param_device_connection.id << " failed";
}
LOG(DEBUG) << __func__ << "HFP stop end";
return ret;
}
void hfp_init() {
return;
}
bool hfp_is_active() {
return hfpmod.is_hfp_running;
}
bool is_valid_out_device(pal_device_id_t id) {
switch (id) {
case PAL_DEVICE_OUT_HANDSET:
case PAL_DEVICE_OUT_SPEAKER:
case PAL_DEVICE_OUT_WIRED_HEADSET:
case PAL_DEVICE_OUT_WIRED_HEADPHONE:
case PAL_DEVICE_OUT_USB_DEVICE:
case PAL_DEVICE_OUT_USB_HEADSET:
return true;
default:
return false;
}
}
bool is_valid_in_device(pal_device_id_t id) {
switch (id) {
case PAL_DEVICE_IN_HANDSET_MIC:
case PAL_DEVICE_IN_SPEAKER_MIC:
case PAL_DEVICE_IN_WIRED_HEADSET:
case PAL_DEVICE_IN_USB_DEVICE:
case PAL_DEVICE_IN_USB_HEADSET:
return true;
default:
return false;
}
}
bool has_valid_stream_handle() {
return (hfpmod.rx_stream_handle && hfpmod.tx_stream_handle);
}
void hfp_set_device(struct pal_device *devices) {
int rc = 0;
if (hfpmod.is_hfp_running && has_valid_stream_handle() &&
is_valid_out_device(devices[0].id) && is_valid_in_device(devices[1].id)) {
rc = pal_stream_set_device(hfpmod.rx_stream_handle, 1, &devices[0]);
if (!rc) {
rc = pal_stream_set_device(hfpmod.tx_stream_handle, 1, &devices[1]);
}
}
if (rc) {
LOG(ERROR) << __func__ << ": failed to set devices for hfp";
}
return;
}
/*Set mic mute state.
* *
* * This interface is used for mic mute state control
* */
int hfp_set_mic_mute(bool state) {
int rc = 0;
if (state == hfpmod.mic_mute) {
LOG(DEBUG) << __func__ << " mic mute already " << state;
return rc;
}
rc = hfp_set_mic_volume((state == true) ? 0.0 : hfpmod.mic_volume);
if (rc == 0) hfpmod.mic_mute = state;
LOG(DEBUG) << __func__ << " Setting mute state " << state << " rc " << rc;
return rc;
}
int hfp_set_mic_mute2(bool state __unused) {
LOG(DEBUG) << __func__ << " Unsupported";
return 0;
}
void hfp_set_parameters(bool adev_mute, struct str_parms *parms) {
int status = 0;
char value[32] = {0};
float vol;
int val;
int rate;
LOG(DEBUG) << __func__ << " enter";
status = str_parms_get_str(parms, AUDIO_PARAMETER_HFP_ENABLE, value, sizeof(value));
if (status >= 0) {
if (!strncmp(value, "true", sizeof(value)) && !hfpmod.is_hfp_running) {
status = start_hfp(parms);
/*
* Sync to adev mic mute state if hfpmod.mic_mute state is lost due
* to HFP session tear down during device switch on companion device.
*/
if (hfpmod.mic_mute != adev_mute) {
LOG(DEBUG) << __func__ << " update mic mute with latest mute state " << adev_mute;
hfp_set_mic_mute(adev_mute);
}
} else if (!strncmp(value, "false", sizeof(value)) && hfpmod.is_hfp_running) {
stop_hfp();
} else {
LOG(ERROR) << __func__ << " hfp_enable " << value << " is unsupported";
}
}
memset(value, 0, sizeof(value));
status = str_parms_get_str(parms, AUDIO_PARAMETER_HFP_SET_SAMPLING_RATE, value, sizeof(value));
if (status >= 0) {
rate = atoi(value);
if (rate == 8000) {
hfpmod.sample_rate = (uint32_t)rate;
} else if (rate == 16000) {
hfpmod.sample_rate = (uint32_t)rate;
} else
LOG(ERROR) << __func__ << " Unsupported rate.. " << rate;
}
memset(value, 0, sizeof(value));
status = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_HFP_VOLUME, value, sizeof(value));
if (status >= 0) {
if (sscanf(value, "%f", &vol) != 1) {
LOG(ERROR) << __func__ << " error in retrieving hfp volume";
status = -EIO;
goto exit;
}
LOG(DEBUG) << __func__ << " set_hfp_volume usecase, Vol: " << vol;
hfp_set_volume(vol);
}
memset(value, 0, sizeof(value));
status = str_parms_get_str(parms, AUDIO_PARAMETER_KEY_HFP_MIC_VOLUME, value, sizeof(value));
if (status >= 0) {
if (sscanf(value, "%f", &vol) != 1) {
LOG(ERROR) << __func__ << " error in retrieving hfp mic volume";
status = -EIO;
goto exit;
}
LOG(DEBUG) << __func__ << " set_hfp_mic_volume usecase, Vol: " << vol;
if (hfp_set_mic_volume(vol) == 0) hfpmod.mic_volume = vol;
}
exit:
LOG(DEBUG) << __func__ << " Exit";
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,243 +0,0 @@
/*
* Copyright (c) 2019, 2021 The Linux Foundation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Changes from Qualcomm Innovation Center are provided under the following license:
*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_BatteryListener_QTI"
#include "extensions/battery_listener.h"
#include <aidl/android/hardware/health/BnHealthInfoCallback.h>
#include <aidl/android/hardware/health/IHealth.h>
#include <aidl/android/hardware/health/IHealthInfoCallback.h>
#include <android/binder_manager.h>
#include <log/log.h>
#include <thread>
using aidl::android::hardware::health::BatteryStatus;
using aidl::android::hardware::health::HealthInfo;
using aidl::android::hardware::health::IHealthInfoCallback;
using aidl::android::hardware::health::BnHealthInfoCallback;
using aidl::android::hardware::health::IHealth;
using namespace std::literals::chrono_literals;
namespace android {
#define GET_HEALTH_SVC_RETRY_CNT 5
#define GET_HEALTH_SVC_WAIT_TIME_MS 500
struct BatteryListenerImpl : public BnHealthInfoCallback {
typedef std::function<void(bool)> cb_fn_t;
BatteryListenerImpl(cb_fn_t cb);
~BatteryListenerImpl();
ndk::ScopedAStatus healthInfoChanged(const HealthInfo &info) override;
static void serviceDied(void *cookie);
bool isCharging() {
std::lock_guard<std::mutex> _l(mLock);
return statusToBool(mStatus);
}
void reset();
status_t init();
private:
std::shared_ptr<IHealth> mHealth;
BatteryStatus mStatus;
cb_fn_t mCb;
std::mutex mLock;
std::condition_variable mCond;
std::unique_ptr<std::thread> mThread;
ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
bool mDone;
bool statusToBool(const BatteryStatus &s) const {
return (s == BatteryStatus::CHARGING) || (s == BatteryStatus::FULL);
}
};
static std::shared_ptr<BatteryListenerImpl> batteryListener;
status_t BatteryListenerImpl::init() {
int tries = 0;
auto service_name = std::string() + IHealth::descriptor + "/default";
if (mHealth != NULL) return INVALID_OPERATION;
do {
mHealth = IHealth::fromBinder(
ndk::SpAIBinder(AServiceManager_getService(service_name.c_str())));
if (mHealth != NULL) break;
usleep(GET_HEALTH_SVC_WAIT_TIME_MS * 1000);
tries++;
} while (tries < GET_HEALTH_SVC_RETRY_CNT);
if (mHealth == NULL) {
ALOGE("no health service found, retries %d", tries);
return NO_INIT;
} else {
ALOGI("Get health service in %d tries", tries);
}
mStatus = BatteryStatus::UNKNOWN;
auto ret = mHealth->getChargeStatus(&mStatus);
if (!ret.isOk()) ALOGE("batterylistener: get charge status transaction error");
if (mStatus == BatteryStatus::UNKNOWN) ALOGW("batterylistener: init: invalid battery status");
mDone = false;
mThread = std::make_unique<std::thread>([this]() {
std::unique_lock<std::mutex> l(mLock);
BatteryStatus local_status = mStatus;
while (!mDone) {
if (local_status == mStatus) {
mCond.wait(l);
continue;
}
local_status = mStatus;
switch (local_status) {
// NOT_CHARGING is a special event that indicates, a battery is connected,
// but not charging. This is seen for approx a second
// after charger is plugged in. A charging event is eventually received.
// We must try to avoid an unnecessary cb to HAL
// only to call it again shortly.
// An option to deal with this transient event would be to ignore this.
// Or process this event with a slight delay (i.e cancel this event
// if a different event comes in within a timeout
case BatteryStatus::NOT_CHARGING: {
auto mStatusnot_ncharging = [this, local_status]() {
return mStatus != local_status;
};
mCond.wait_for(l, 3s, mStatusnot_ncharging);
if (mStatusnot_ncharging()) // i.e event changed
break;
}
[[fallthrough]];
default:
bool c = statusToBool(local_status);
ALOGI("healthInfo cb thread: cb %s", c ? "CHARGING" : "NOT CHARGING");
l.unlock();
mCb(c);
l.lock();
break;
}
}
});
mHealth->registerCallback(batteryListener);
binder_status_t binder_status =
AIBinder_linkToDeath(mHealth->asBinder().get(), mDeathRecipient.get(), this);
if (binder_status != STATUS_OK) {
ALOGE("Failed to link to death, status %d", static_cast<int>(binder_status));
return NO_INIT;
}
return NO_ERROR;
}
BatteryListenerImpl::BatteryListenerImpl(cb_fn_t cb)
: mCb(cb), mDeathRecipient(AIBinder_DeathRecipient_new(BatteryListenerImpl::serviceDied)) {}
BatteryListenerImpl::~BatteryListenerImpl() {
{
std::lock_guard<std::mutex> _l(mLock);
mDone = true;
mCond.notify_one();
}
mThread->join();
}
void BatteryListenerImpl::reset() {
std::lock_guard<std::mutex> _l(mLock);
if (mHealth != nullptr) {
mHealth->unregisterCallback(batteryListener);
binder_status_t status =
AIBinder_unlinkToDeath(mHealth->asBinder().get(), mDeathRecipient.get(), this);
if (status != STATUS_OK && status != STATUS_DEAD_OBJECT) ALOGE("Cannot unlink to death");
}
mStatus = BatteryStatus::UNKNOWN;
mDone = true;
mCond.notify_one();
}
void BatteryListenerImpl::serviceDied(void *cookie) {
BatteryListenerImpl *listener = reinterpret_cast<BatteryListenerImpl *>(cookie);
{
std::lock_guard<std::mutex> _l(listener->mLock);
if (listener->mHealth == NULL) {
ALOGE("health not initialized");
return;
}
ALOGI("health service died, reinit");
listener->mDone = true;
listener->mCond.notify_one();
}
listener->mThread->join();
std::lock_guard<std::mutex> _l(listener->mLock);
listener->mHealth = NULL;
listener->init();
}
// this callback seems to be a SYNC callback and so
// waits for return before next event is issued.
// therefore we need not have a queue to process
// NOT_CHARGING and CHARGING concurrencies.
// Replace single var by a list if this assumption is broken
ndk::ScopedAStatus BatteryListenerImpl::healthInfoChanged(const HealthInfo &info) {
ALOGV("healthInfoChanged: %d", info.batteryStatus);
std::unique_lock<std::mutex> l(mLock);
if (info.batteryStatus != mStatus) {
mStatus = info.batteryStatus;
mCond.notify_one();
}
return ndk::ScopedAStatus::ok();
}
status_t batteryPropertiesListenerInit(BatteryListenerImpl::cb_fn_t cb) {
batteryListener = ndk::SharedRefBase::make<BatteryListenerImpl>(cb);
return batteryListener->init();
}
status_t batteryPropertiesListenerDeinit() {
batteryListener->reset();
return OK;
}
bool batteryPropertiesListenerIsCharging() {
return batteryListener->isCharging();
}
} // namespace android
extern "C" {
void battery_properties_listener_init(battery_status_change_fn_t fn) {
android::batteryPropertiesListenerInit([=](bool charging) { fn(charging); });
}
void battery_properties_listener_deinit() {
android::batteryPropertiesListenerDeinit();
}
bool battery_properties_is_charging() {
return android::batteryPropertiesListenerIsCharging();
}
} // extern C

View File

@@ -1,335 +0,0 @@
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <PalApi.h>
#include <cutils/properties.h>
#include <cutils/str_parms.h>
#include <memory>
#include <mutex>
#include <string>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <qti-audio-core/Platform.h>
#include "extensions/battery_listener.h"
#ifdef SEC_AUDIO_DSM_AMP
#ifdef SEC_AUDIO_VI_FEEDBACK
#define CODEC_BACKEND_FEEDBACK_BIT_WIDTH 24
#else
#define CODEC_BACKEND_FEEDBACK_BIT_WIDTH 16
#endif
#endif
typedef enum {
SESSION_UNKNOWN,
/** A2DP legacy that AVDTP media is encoded by Bluetooth Stack */
A2DP_SOFTWARE_ENCODING_DATAPATH,
/** The encoding of AVDTP media is done by HW and there is control only */
A2DP_HARDWARE_OFFLOAD_DATAPATH,
/** Used when encoded by Bluetooth Stack and streaming to Hearing Aid */
HEARING_AID_SOFTWARE_ENCODING_DATAPATH,
/** Used when encoded by Bluetooth Stack and streaming to LE Audio device */
LE_AUDIO_SOFTWARE_ENCODING_DATAPATH,
/** Used when decoded by Bluetooth Stack and streaming to audio framework */
LE_AUDIO_SOFTWARE_DECODED_DATAPATH,
/** Encoding is done by HW an there is control only */
LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH,
/** Decoding is done by HW an there is control only */
LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH,
/** SW Encoding for LE Audio Broadcast */
LE_AUDIO_BROADCAST_SOFTWARE_ENCODING_DATAPATH,
/** HW Encoding for LE Audio Broadcast */
LE_AUDIO_BROADCAST_HARDWARE_OFFLOAD_ENCODING_DATAPATH,
MAX,
} tSESSION_TYPE;
namespace qti::audio::core {
// RAII based classes to dlopen/dysym on init and dlclose on dest.
#ifdef __LP64__
static std::string kBluetoothIpcLibrary = "/vendor/lib64/btaudio_offload_if.so";
#else
static std::string kBluetoothIpcLibrary = "/vendor/lib/btaudio_offload_if.so";
#endif
static std::string kBatteryListenerLibrary = std::string("libbatterylistener.so");
static std::string kHfpLibrary = "libhfp_pal.so";
static std::string kFmLibrary = "libfmpal.so";
static std::string kKarokeLibrary = "dummy.so"; // TODO
static std::string kGefLibrary = "libqtigefar.so";
#ifdef SEC_AUDIO_COMMON
static std::string kDummyLibrary = "dummy.so";
#endif
static std::string kBatteryListenerProperty = "vendor.audio.feature.battery_listener.enable";
static std::string kHfpProperty = "vendor.audio.feature.hfp.enable";
static std::string kBluetoothProperty = "vendor.audio.feature.a2dp_offload.enable";
const std::map<tSESSION_TYPE, pal_device_id_t> SessionTypePalDevMap{
{A2DP_HARDWARE_OFFLOAD_DATAPATH, PAL_DEVICE_OUT_BLUETOOTH_A2DP},
{LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH, PAL_DEVICE_OUT_BLUETOOTH_BLE},
{LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH, PAL_DEVICE_IN_BLUETOOTH_BLE},
{LE_AUDIO_BROADCAST_HARDWARE_OFFLOAD_ENCODING_DATAPATH,
PAL_DEVICE_OUT_BLUETOOTH_BLE_BROADCAST},
};
typedef enum {
/**If reconfiguration is in progress state */
SESSION_SUSPEND,
/**If reconfiguration is in complete state */
SESSION_RESUME,
/**To set Lc3 channel mode as Mono */
CHANNEL_MONO,
/**To set LC3 channel mode as Stereo */
CHANNEL_STEREO,
} tRECONFIG_STATE;
const std::map<int32_t, std::string> reconfigStateName{
{SESSION_SUSPEND, std::string{"SESSION_SUSPEND"}},
{SESSION_RESUME, std::string{"SESSION_RESUME"}},
{CHANNEL_MONO, std::string{"CHANNEL_MONO"}},
{CHANNEL_STEREO, std::string{"CHANNEL_STEREO"}},
};
typedef void (*batt_listener_init_t)(battery_status_change_fn_t);
typedef void (*batt_listener_deinit_t)();
typedef bool (*batt_prop_is_charging_t)();
typedef void (*set_parameters_t)(struct str_parms*);
typedef void (*hfp_set_parameters_t)(bool val, struct str_parms*);
typedef void (*get_parameters_t)(struct str_parms*, struct str_parms*);
typedef bool (*fm_running_status_t)();
typedef void (*hfp_init_t)();
typedef bool (*hfp_is_active_t)();
typedef int (*hfp_get_usecase_t)();
typedef int (*hfp_set_mic_mute_t)(bool state);
typedef int (*hfp_set_mic_mute2_t)(bool state);
typedef void (*hfp_set_device_t)(struct pal_device *devices);
typedef void (*a2dp_bt_audio_pre_init_t)(void);
typedef void (*register_reconfig_cb_t)(int (*reconfig_cb)(tSESSION_TYPE, int));
typedef void (*gef_init_t)(void);
typedef void (*gef_deinit_t)(void);
static bool isExtensionEnabled(std::string property) {
return property_get_bool(property.c_str(), false);
}
class AudioExtensionBase {
public:
AudioExtensionBase(std::string library, bool enabled = true);
~AudioExtensionBase();
#ifdef SEC_AUDIO_SUPPORT_USB_OFFLOAD
static void setUSBCardConfig(pal_usb_device_address addr) { mUsbAddr = addr; }
#endif
protected:
void* mHandle = nullptr;
bool mEnabled;
#ifdef SEC_AUDIO_SUPPORT_USB_OFFLOAD
static struct pal_usb_device_address mUsbAddr;
#endif
std::string mLibraryName;
Platform& mPlatform{Platform::getInstance()};
private:
void cleanUp();
};
class BatteryListenerExtension : public AudioExtensionBase {
public:
BatteryListenerExtension();
~BatteryListenerExtension();
void battery_properties_listener_init();
void battery_properties_listener_deinit();
bool battery_properties_is_charging();
static void setChargingMode(bool is_charging);
static bool isCharging;
// void on_battery_status_changed(bool charging);
private:
batt_listener_init_t batt_listener_init;
batt_listener_deinit_t batt_listener_deinit;
batt_prop_is_charging_t batt_prop_is_charging;
};
class A2dpExtension : public AudioExtensionBase {
public:
A2dpExtension();
~A2dpExtension();
a2dp_bt_audio_pre_init_t a2dp_bt_audio_pre_init = nullptr;
register_reconfig_cb_t register_reconfig_cb = nullptr;
};
class HfpExtension : public AudioExtensionBase {
public:
HfpExtension();
~HfpExtension();
bool audio_extn_hfp_is_active();
int audio_extn_hfp_set_mic_mute(bool state);
int audio_extn_hfp_set_mic_mute2(bool state);
void audio_extn_hfp_set_parameters(struct str_parms* params);
void audio_extn_hfp_set_device(const std::vector<::aidl::android::media::audio::common::AudioDevice>&
devices, const bool updateRx);
::aidl::android::media::audio::common::AudioDevice audio_extn_hfp_get_matching_tx_device(
const ::aidl::android::media::audio::common::AudioDevice& rxDevice);
private:
hfp_init_t hfp_init;
hfp_is_active_t hfp_is_active;
hfp_get_usecase_t hfp_get_usecase;
hfp_set_mic_mute_t hfp_set_mic_mute;
hfp_set_parameters_t hfp_set_parameters;
hfp_set_mic_mute2_t hfp_set_mic_mute2;
hfp_set_device_t hfp_set_device;
bool micMute;
};
#ifdef SEC_AUDIO_CALL_SATELLITE
class ExtModemCallExtension : public AudioExtensionBase {
public:
ExtModemCallExtension();
~ExtModemCallExtension();
int32_t startCall(struct pal_device *callDevices);
void stopCall();
void setDevice(struct pal_device *devices);
bool isCallActive() { return isExtModemCallRunning; }
pal_stream_handle_t* getRxStreamHandle() { return rxStreamHandle; };
pal_stream_handle_t* getTxStreamHandle() { return txStreamHandle; };
private:
bool hasValidStreamHandle() { return (rxStreamHandle && txStreamHandle); }
bool isValidOutDevice(pal_device_id_t id);
bool isValidInDevice(pal_device_id_t id);
bool isUsbDevice(pal_device_id_t id);
void setCustomKey(pal_device& palDevice, const pal_device_id_t outDeviceId);
void configurePalDevices(struct pal_device *palDevices, const pal_device_id_t callRxDeviceId);
std::unique_ptr<pal_stream_attributes> getExtModemCallAttributes(
pal_stream_loopback_type_t type);
bool isExtModemCallRunning;
pal_stream_handle_t *rxStreamHandle;
pal_stream_handle_t *txStreamHandle;
};
#endif
class FmExtension : public AudioExtensionBase {
public:
FmExtension();
~FmExtension();
set_parameters_t fm_set_params;
fm_running_status_t fm_running_status;
void audio_extn_fm_set_parameters(struct str_parms* params);
bool audio_extn_fm_get_status();
};
class KarokeExtension : public AudioExtensionBase {
public:
KarokeExtension();
~KarokeExtension();
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
void init();
bool isKaraokeActive();
bool isVoiceRecognitionStreamCreated() { return mIsVoiceRecognitionStreamCreated; }
void setVoiceRecognitionStreamCreated(bool on) { mIsVoiceRecognitionStreamCreated = on; }
#endif
int karaoke_open(pal_device_id_t device_out, pal_stream_callback pal_callback,
pal_channel_info ch_info);
int karaoke_start();
int karaoke_stop();
int karaoke_close();
protected:
pal_stream_handle_t* karaoke_stream_handle;
struct pal_stream_attributes sattr;
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
bool mIsVoiceRecognitionStreamCreated = false;
#endif
};
#ifdef SEC_AUDIO_SUPPORT_REMOTE_MIC
class AasExtension : public AudioExtensionBase {
public:
AasExtension();
~AasExtension();
int startAasStream(const pal_device_id_t outDeviceId);
int stopAasStream();
int updateAasStream(const bool enable, const pal_device_id_t outDeviceId);
protected:
bool isAasActive() { return (mAasStreamHandle != nullptr) ? true : false; }
bool isAasDeviceAvailable(const pal_device_id_t deviceId);
bool isValidStatusForAas(const pal_device_id_t outDeviceId);
void setAasCustomKey(pal_device& palDevice, const pal_device_id_t outDeviceId);
pal_device_id_t mAasOutDeviceId;
pal_stream_handle_t *mAasStreamHandle;
};
#endif
#ifdef SEC_AUDIO_DSM_AMP
class SpeakerFeedbackExtension : public AudioExtensionBase {
public:
SpeakerFeedbackExtension();
~SpeakerFeedbackExtension();
void init();
void start();
int setDevice(pal_stream_handle_t *stream_handle,
uint32_t no_of_devices, struct pal_device *devices);
void stop();
void close();
protected:
pal_stream_handle_t *mFeedbackStreamHandle;
bool mSupportViFeedback;
std::mutex mFeedbackMutex;
};
#endif
class GefExtension : public AudioExtensionBase {
public:
GefExtension();
~GefExtension();
void gef_interface_init();
void gef_interface_deinit();
private:
gef_init_t gef_init;
gef_deinit_t gef_deinit;
};
class AudioExtension {
public:
static AudioExtension& getInstance() {
static const auto kAudioExtension = []() {
std::unique_ptr<AudioExtension> audioExt{new AudioExtension()};
return std::move(audioExt);
}();
return *(kAudioExtension.get());
}
void audio_extn_set_parameters(struct str_parms* params);
void audio_extn_get_parameters(struct str_parms* params, struct str_parms* reply);
void audio_feature_stats_set_parameters(struct str_parms* params);
explicit AudioExtension() = default;
AudioExtension(const AudioExtension&) = delete;
AudioExtension& operator=(const AudioExtension& x) = delete;
AudioExtension(AudioExtension&& other) = delete;
AudioExtension& operator=(AudioExtension&& other) = delete;
std::unique_ptr<BatteryListenerExtension> mBatteryListenerExtension =
std::make_unique<BatteryListenerExtension>();
std::unique_ptr<A2dpExtension> mA2dpExtension = std::make_unique<A2dpExtension>();
std::unique_ptr<HfpExtension> mHfpExtension = std::make_unique<HfpExtension>();
#ifdef SEC_AUDIO_CALL_SATELLITE
std::unique_ptr<ExtModemCallExtension> mExtModemCallExtension = std::make_unique<ExtModemCallExtension>();
#endif
std::unique_ptr<FmExtension> mFmExtension = std::make_unique<FmExtension>();
std::unique_ptr<KarokeExtension> mKarokeExtension = std::make_unique<KarokeExtension>();
#ifdef SEC_AUDIO_SUPPORT_REMOTE_MIC
std::unique_ptr<AasExtension> mAasExtension = std::make_unique<AasExtension>();
#endif
#ifdef SEC_AUDIO_DSM_AMP
std::unique_ptr<SpeakerFeedbackExtension> mSpeakerFeedbackExtension
= std::make_unique<SpeakerFeedbackExtension>();
#endif
std::unique_ptr<GefExtension> mGefExtension = std::make_unique<GefExtension>();
static std::mutex reconfig_wait_mutex_;
};
} // namespace qti::audio::core

View File

@@ -1,43 +0,0 @@
/*
* Copyright (c) 2019, 2021 The Linux Foundation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Changes from Qualcomm Innovation Center are provided under the following license:
*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#ifdef __cplusplus
extern "C" {
#endif
typedef void (*battery_status_change_fn_t)(bool);
void battery_properties_listener_init(battery_status_change_fn_t fn);
void battery_properties_listener_deinit(void);
bool battery_properties_is_charging(void);
#ifdef __cplusplus
}
#endif

View File

@@ -1,63 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := aidl_fuzzer_audio_core_hal
LOCAL_VENDOR_MODULE := true
LOCAL_C_INCLUDES := $(LOCAL_PATH)/../include \
$(TOP)/system/media/audio/include \
$(TOP)/hardware/libhardware/include
LOCAL_CFLAGS := -DBACKEND_NDK
LOCAL_SRC_FILES := \
fuzzer.cpp \
LOCAL_HEADER_LIBRARIES := \
libxsdc-utils \
libaudioeffects \
liberror_headers \
libaudioclient_headers \
libaudio_system_headers \
libmedia_helper_headers
LOCAL_STATIC_LIBRARIES := \
libaudiohalutils.qti \
libaudio_module_config.qti \
libaudiocore.extension
LOCAL_WHOLE_STATIC_LIBRARIES := \
libaudioplatform.qti
LOCAL_SHARED_LIBRARIES := \
libaudioaidlcommon \
libbase \
libbinder_ndk \
libcutils \
liblog \
libdl \
libhidlbase \
libhardware \
libfmq \
libmedia_helper \
libstagefright_foundation \
libutils \
libaudioutils \
libxml2 \
android.hardware.common-V2-ndk \
android.media.audio.common.types-V3-ndk \
android.hardware.audio.core-V2-ndk \
$(LATEST_ANDROID_HARDWARE_AUDIO_EFFECT) \
android.hardware.audio.core.sounddose-V1-ndk \
libar-pal \
libaudioserviceexampleimpl \
libaudioplatformconverter.qti \
qti-audio-types-aidl-V1-ndk \
libbinder \
libaudiocorehal.qti \
libaudiocorehal.default \
libclang_rt.ubsan_standalone
LOCAL_STATIC_LIBRARIES += libbinder_random_parcel
include $(BUILD_FUZZ_TEST)

View File

@@ -1,45 +0,0 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <core-impl/AudioPolicyConfigXmlConverter.h>
#include <core-impl/ChildInterface.h>
#include <core-impl/Config.h>
#include <qti-audio-core/Module.h>
#include <qti-audio-core/ModulePrimary.h>
#include <fuzzbinder/libbinder_ndk_driver.h>
#include <fuzzer/FuzzedDataProvider.h>
#define LOG_TAG "AIDL_FUZZER_AUDIO_CORE_HAL"
using aidl::android::hardware::audio::core::internal::AudioPolicyConfigXmlConverter;
extern AudioPolicyConfigXmlConverter gAudioPolicyConverter;
extern std::shared_ptr<::aidl::android::hardware::audio::core::Config> gConfigDefaultAosp;
extern std::shared_ptr<::qti::audio::core::ModulePrimary> gModuleDefaultQti;
// init
extern "C" int LLVMFuzzerInitialize(int* argc, char*** argv) {
gConfigDefaultAosp = ndk::SharedRefBase::make<::aidl::android::hardware::audio::core::Config>(gAudioPolicyConverter);
gModuleDefaultQti = ndk::SharedRefBase::make<::qti::audio::core::ModulePrimary>();
return 0;
}
// one fuzzing test case
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
FuzzedDataProvider provider(data, size);
uint32_t index = provider.ConsumeIntegralInRange<uint32_t>(1, 2);
if (index == 2 && gModuleDefaultQti != nullptr) {
android::fuzzService(gModuleDefaultQti->asBinder().get(), std::move(provider));
}
if (index == 1 && gConfigDefaultAosp != nullptr) {
android::fuzzService(gConfigDefaultAosp->asBinder().get(), std::move(provider));
}
return 0;
}

View File

@@ -1,78 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <aidl/android/hardware/audio/core/BnBluetooth.h>
#include <aidl/android/hardware/audio/core/BnBluetoothA2dp.h>
#include <aidl/android/hardware/audio/core/BnBluetoothLe.h>
#include <qti-audio-core/Platform.h>
#include <extensions/AudioExtension.h>
namespace qti::audio::core {
class Bluetooth : public ::aidl::android::hardware::audio::core::BnBluetooth {
public:
Bluetooth();
private:
ndk::ScopedAStatus setScoConfig(const ScoConfig& in_config, ScoConfig* _aidl_return) override;
ndk::ScopedAStatus setHfpConfig(const HfpConfig& in_config, HfpConfig* _aidl_return) override;
ScoConfig mScoConfig;
HfpConfig mHfpConfig;
Platform& mPlatform{Platform::getInstance()};
AudioExtension& mAudExt{AudioExtension::getInstance()};
};
class BluetoothA2dp : public ::aidl::android::hardware::audio::core::BnBluetoothA2dp {
public:
BluetoothA2dp() = default;
private:
ndk::ScopedAStatus isEnabled(bool* _aidl_return) override;
ndk::ScopedAStatus setEnabled(bool in_enabled) override;
ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override;
ndk::ScopedAStatus reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters) override;
bool mEnabled = false;
Platform& mPlatform{Platform::getInstance()};
};
class BluetoothLe : public ::aidl::android::hardware::audio::core::BnBluetoothLe {
public:
BluetoothLe() = default;
private:
ndk::ScopedAStatus isEnabled(bool* _aidl_return) override;
ndk::ScopedAStatus setEnabled(bool in_enabled) override;
ndk::ScopedAStatus supportsOffloadReconfiguration(bool* _aidl_return) override;
ndk::ScopedAStatus reconfigureOffload(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters) override;
bool mEnabled = false;
Platform& mPlatform{Platform::getInstance()};
};
} // namespace qti::audio::core

View File

@@ -1,61 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <android/binder_auto_utils.h>
#include <android/binder_ibinder_platform.h>
#include <system/thread_defs.h>
#include <memory>
#include <utility>
namespace qti::audio::core {
// Helper used for interfaces that require a persistent instance. We hold them
// via a strong pointer. The binder token is retained for a call to
// 'setMinSchedulerPolicy'.
template <class C>
struct ChildInterface : private std::pair<std::shared_ptr<C>, ndk::SpAIBinder> {
ChildInterface() = default;
ChildInterface& operator=(const std::shared_ptr<C>& c) {
return operator=(std::shared_ptr<C>(c));
}
ChildInterface& operator=(std::shared_ptr<C>&& c) {
this->first = std::move(c);
return *this;
}
explicit operator bool() const { return !!this->first; }
C& operator*() const { return *(this->first); }
C* operator->() const { return this->first.get(); }
// Use 'getInstance' when returning the interface instance.
std::shared_ptr<C> getInstance() {
if (this->second.get() == nullptr) {
const auto binder = this->second = this->first->asBinder();
AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
AIBinder_setInheritRt(binder.get(), true);
}
return this->first;
}
};
} // namespace qti::audio::core

View File

@@ -1,91 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <functional>
#include <memory>
#include <string>
#include <vector>
namespace qti::audio::core {
#ifdef __LP64__
#define OFFLOAD_EFFECTS_BUNDLE_LIBRARY_PATH "/vendor/lib64/soundfx/libqcompostprocbundle.so"
#define VISUALIZER_LIBRARY_PATH "/vendor/lib64/soundfx/libqcomvisualizer.so"
#else
#define OFFLOAD_EFFECTS_BUNDLE_LIBRARY_PATH "/vendor/lib/soundfx/libqcompostprocbundle.so"
#define VISUALIZER_LIBRARY_PATH "/vendor/lib/soundfx/libqcomvisualizer.so"
#endif
// { SEC_AUDIO_SUPPORT_AIDL_EFFECT
#ifdef __LP64__
#define OFFLOAD_EFFECT_LIBRARY_PATH "/vendor/lib64/soundfx/libaudioeffectoffload_aidl.so"
#else
#define OFFLOAD_EFFECT_LIBRARY_PATH "/vendor/lib/soundfx/libaudioeffectoffload_aidl.so"
#endif
// } SEC_AUDIO_SUPPORT_AIDL_EFFECT
/*
* All libraries implementing post processing effect should expose
* startEffect(int, pal_stream_handle_t*)
* stopEffect(int)
*/
using pal_stream_handle_t = uint64_t;
using StartEffectFptr = void (*)(int, uint64_t*);
using StopEffectFptr = void (*)(int);
// { SEC_AUDIO_VOLUME_MONITOR
using UpdateEffectFptr = void (*)(uint64_t*, int);
// } SEC_AUDIO_VOLUME_MONITOR
struct OffloadEffectLibIntf {
StartEffectFptr mStartEffect;
StopEffectFptr mStopEffect;
// { SEC_AUDIO_VOLUME_MONITOR
UpdateEffectFptr mUpdateEffect;
// } SEC_AUDIO_VOLUME_MONITOR
};
/* Pair of libhandle, and function pointers*/
using EffectLibInfo = std::pair<std::unique_ptr<void, std::function<void(void*)>>,
std::unique_ptr<struct OffloadEffectLibIntf>>;
class HalOffloadEffects {
private:
#ifdef __LP64__
const std::string kOffloadVisualizerPath = "/vendor/lib64/soundfx/libqcomvisualizer.so";
const std::string kOffloadPostProcBundlePath = "/vendor/lib64/soundfx/libqcompostprocbundle.so";
#else
const std::string kOffloadVisualizerPath = "/vendor/lib/soundfx/libqcomvisualizer.so";
const std::string kOffloadPostProcBundlePath = "/vendor/lib/soundfx/libqcompostprocbundle.so";
#endif
// { SEC_AUDIO_SUPPORT_AIDL_EFFECT
#ifdef __LP64__
const std::string kSecOffloadEffectLibraryPath = "/vendor/lib64/soundfx/libaudioeffectoffload_aidl.so";
#else
const std::string kSecOffloadEffectLibraryPath = "/vendor/lib/soundfx/libaudioeffectoffload_aidl.so";
#endif
// } SEC_AUDIO_SUPPORT_AIDL_EFFECT
std::vector<EffectLibInfo> mEffects;
HalOffloadEffects();
void loadLibrary(std::string path);
public:
static HalOffloadEffects& getInstance() {
static HalOffloadEffects halEffects;
return halEffects;
}
void startEffect(int ioHandle, pal_stream_handle_t* palHandle);
void stopEffect(int ioHandle);
// { SEC_AUDIO_VOLUME_MONITOR
void updateEffect(pal_stream_handle_t* palHandle, int updateType);
// } SEC_AUDIO_VOLUME_MONITOR
};
} // qti::audio::core

View File

@@ -1,274 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <iostream>
#include <map>
#include <memory>
#include <set>
#include <aidl/android/hardware/audio/core/BnModule.h>
#include <extensions/AudioExtension.h>
#include <qti-audio-core/ChildInterface.h>
#include <qti-audio-core/ModuleConfig.h>
#include <qti-audio-core/Stream.h>
#include <qti-audio-core/Telephony.h>
namespace qti::audio::core {
class Module : public ::aidl::android::hardware::audio::core::BnModule,
public std::enable_shared_from_this<Module> {
public:
enum Type : int { DEFAULT, R_SUBMIX, STUB, USB };
explicit Module(Type type);
// #################### start of overriding APIs from IModule ####################
ndk::ScopedAStatus setModuleDebug(
const ::aidl::android::hardware::audio::core::ModuleDebug& in_debug) override;
ndk::ScopedAStatus getTelephony(
std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony>* _aidl_return)
override;
ndk::ScopedAStatus getBluetooth(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth>* _aidl_return)
override;
ndk::ScopedAStatus getBluetoothA2dp(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp>* _aidl_return)
override;
ndk::ScopedAStatus getBluetoothLe(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe>* _aidl_return)
override;
ndk::ScopedAStatus prepareToDisconnectExternalDevice(int32_t in_portId) override;
ndk::ScopedAStatus connectExternalDevice(
const ::aidl::android::media::audio::common::AudioPort& in_templateIdAndAdditionalData,
::aidl::android::media::audio::common::AudioPort* _aidl_return) override;
ndk::ScopedAStatus disconnectExternalDevice(int32_t in_portId) override;
ndk::ScopedAStatus getAudioPatches(
std::vector<::aidl::android::hardware::audio::core::AudioPatch>* _aidl_return) override;
ndk::ScopedAStatus getAudioPort(
int32_t in_portId,
::aidl::android::media::audio::common::AudioPort* _aidl_return) override;
ndk::ScopedAStatus getAudioPortConfigs(
std::vector<::aidl::android::media::audio::common::AudioPortConfig>* _aidl_return)
override;
ndk::ScopedAStatus getAudioPorts(
std::vector<::aidl::android::media::audio::common::AudioPort>* _aidl_return) override;
ndk::ScopedAStatus getAudioRoutes(
std::vector<::aidl::android::hardware::audio::core::AudioRoute>* _aidl_return) override;
ndk::ScopedAStatus getAudioRoutesForAudioPort(
int32_t in_portId,
std::vector<::aidl::android::hardware::audio::core::AudioRoute>* _aidl_return) override;
ndk::ScopedAStatus openInputStream(
const ::aidl::android::hardware::audio::core::IModule::OpenInputStreamArguments&
in_args,
::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn* _aidl_return)
override;
ndk::ScopedAStatus openOutputStream(
const ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamArguments&
in_args,
::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn* _aidl_return)
override;
ndk::ScopedAStatus getSupportedPlaybackRateFactors(
SupportedPlaybackRateFactors* _aidl_return) override;
ndk::ScopedAStatus setAudioPatch(
const ::aidl::android::hardware::audio::core::AudioPatch& in_requested,
::aidl::android::hardware::audio::core::AudioPatch* _aidl_return) override;
ndk::ScopedAStatus setAudioPortConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& in_requested,
::aidl::android::media::audio::common::AudioPortConfig* out_suggested,
bool* _aidl_return) override;
ndk::ScopedAStatus resetAudioPatch(int32_t in_patchId) override;
ndk::ScopedAStatus resetAudioPortConfig(int32_t in_portConfigId) override;
ndk::ScopedAStatus getMasterMute(bool* _aidl_return) override;
ndk::ScopedAStatus setMasterMute(bool in_mute) override;
ndk::ScopedAStatus getMasterVolume(float* _aidl_return) override;
ndk::ScopedAStatus setMasterVolume(float in_volume) override;
ndk::ScopedAStatus getMicMute(bool* _aidl_return) override;
ndk::ScopedAStatus setMicMute(bool in_mute) override;
ndk::ScopedAStatus getMicrophones(
std::vector<::aidl::android::media::audio::common::MicrophoneInfo>* _aidl_return)
override;
ndk::ScopedAStatus updateAudioMode(
::aidl::android::media::audio::common::AudioMode in_mode) override;
ndk::ScopedAStatus updateScreenRotation(
::aidl::android::hardware::audio::core::IModule::ScreenRotation in_rotation) override;
ndk::ScopedAStatus updateScreenState(bool in_isTurnedOn) override;
ndk::ScopedAStatus getSoundDose(
std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose>*
_aidl_return) override;
ndk::ScopedAStatus generateHwAvSyncId(int32_t* _aidl_return) override;
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
override;
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) override;
ndk::ScopedAStatus addDeviceEffect(
int32_t in_portConfigId,
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus removeDeviceEffect(
int32_t in_portConfigId,
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus getMmapPolicyInfos(
::aidl::android::media::audio::common::AudioMMapPolicyType mmapPolicyType,
std::vector<::aidl::android::media::audio::common::AudioMMapPolicyInfo>* _aidl_return)
override;
ndk::ScopedAStatus supportsVariableLatency(bool* _aidl_return) override;
ndk::ScopedAStatus getAAudioMixerBurstCount(int32_t* _aidl_return) override;
ndk::ScopedAStatus getAAudioHardwareBurstMinUsec(int32_t* _aidl_return) override;
// #################### end of overriding APIs from IModule ####################
// This value is used for all AudioPatches.
static constexpr int32_t kMinimumStreamBufferSizeFrames = 48;
// The maximum stream buffer size is 1 GiB = 2 ** 30 bytes;
static constexpr int32_t kMaximumStreamBufferSizeBytes = 1 << 30;
protected:
struct VendorDebug {
static const std::string kForceTransientBurstName;
static const std::string kForceSynchronousDrainName;
bool forceTransientBurst = false;
bool forceSynchronousDrain = false;
};
// ids of device ports created at runtime via 'connectExternalDevice'.
// Also stores a list of ids of mix ports with dynamic profiles that were populated from
// the connected port. This list can be empty, thus an int->int multimap can't be used.
using ConnectedDevicePorts = std::map<int32_t, std::set<int32_t>>;
// Maps port ids and port config ids to patch ids.
// Multimap because both ports and configs can be used by multiple patches.
using Patches = std::multimap<int32_t, int32_t>;
const Type mType;
std::unique_ptr<ModuleConfig> mConfig;
::aidl::android::hardware::audio::core::ModuleDebug mDebug;
VendorDebug mVendorDebug;
ConnectedDevicePorts mConnectedDevicePorts;
Streams mStreams;
Patches mPatches;
bool mMasterMute = false;
float mMasterVolume = 1.0f;
ChildInterface<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
std::optional<bool> mIsMmapSupported;
protected:
// #################### start of virtual APIs to be implemented by children ####################
virtual ndk::ScopedAStatus createInputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones,
std::shared_ptr<StreamIn>* result) = 0;
virtual ndk::ScopedAStatus createOutputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo,
std::shared_ptr<StreamOut>* result) = 0;
virtual std::vector<::aidl::android::media::audio::common::AudioProfile> getDynamicProfiles(
const ::aidl::android::media::audio::common::AudioPort& audioPort);
virtual void onNewPatchCreation(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks,
::aidl::android::hardware::audio::core::AudioPatch& newPatch);
virtual void onPrepareToDisconnectExternalDevice(
const ::aidl::android::media::audio::common::AudioPort& audioPort);
virtual void setAudioPatchTelephony(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks,
const ::aidl::android::hardware::audio::core::AudioPatch& patch);
virtual void resetAudioPatchTelephony(
const ::aidl::android::hardware::audio::core::AudioPatch&);
virtual std::string toStringInternal() { return std::string("No-op implementation"); }
/**
* Call this API only for debugging purpose
**/
virtual void dumpInternal(const std::string& identifier = "no_id"){};
// If the module is unable to populate the connected device port correctly,
// the returned error code must correspond to the errors of
// `IModule.connectedExternalDevice` method.
virtual ndk::ScopedAStatus populateConnectedDevicePort(
::aidl::android::media::audio::common::AudioPort* connectedDevicePort,
const int32_t templateDevicePortId);
// If the module finds that the patch endpoints configurations are not
// matched, the returned error code must correspond to the errors of
// `IModule.setAudioPatch` method.
virtual ndk::ScopedAStatus checkAudioPatchEndpointsMatch(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks);
virtual int onExternalDeviceConnectionChanged(
const ::aidl::android::media::audio::common::AudioPort& audioPort, bool connected);
virtual ndk::ScopedAStatus onMasterMuteChanged(bool mute);
virtual ndk::ScopedAStatus onMasterVolumeChanged(float volume);
virtual std::unique_ptr<ModuleConfig> initializeConfig();
/* fetch the nominal latency for the given mix port config */
virtual int32_t getNominalLatencyMs(
const ::aidl::android::media::audio::common::AudioPortConfig&);
// #################### end of virtual APIs to be implemented by children ####################
// Utility and helper functions accessible to subclasses.
void cleanUpPatch(int32_t patchId);
ndk::ScopedAStatus createStreamContext(
int32_t in_portConfigId, int64_t in_bufferSizeFrames,
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCallback> asyncCallback,
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOutEventCallback>
outEventCallback,
StreamContext* out_context);
std::vector<::aidl::android::media::audio::common::AudioDevice> findConnectedDevices(
int32_t portConfigId);
std::set<int32_t> findConnectedPortConfigIds(int32_t portConfigId);
ndk::ScopedAStatus findPortIdForNewStream(
int32_t in_portConfigId, ::aidl::android::media::audio::common::AudioPort** port);
ModuleConfig& getConfig();
const ConnectedDevicePorts& getConnectedDevicePorts() const { return mConnectedDevicePorts; }
bool getMasterMute() const { return mMasterMute; }
bool getMasterVolume() const { return mMasterVolume; }
const Patches& getPatches() const { return mPatches; }
const Streams& getStreams() const { return mStreams; }
Type getType() const { return mType; }
bool isMmapSupported();
void populateConnectedProfiles();
template <typename C>
std::set<int32_t> portIdsFromPortConfigIds(C portConfigIds);
// helper functions to print human readable string for portconfig names and routes
std::string portNameFromPortConfigIds(int portConfigId);
std::string getPatchDetails(const ::aidl::android::hardware::audio::core::AudioPatch& patch);
void registerPatch(const ::aidl::android::hardware::audio::core::AudioPatch& patch);
ndk::ScopedAStatus updateStreamsConnectedState(
const ::aidl::android::hardware::audio::core::AudioPatch& oldPatch,
const ::aidl::android::hardware::audio::core::AudioPatch& newPatch);
ChildInterface<Telephony> mTelephony;
};
std::ostream& operator<<(std::ostream& os, Module::Type t);
} // namespace qti::audio::core

View File

@@ -1,242 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <qti-audio-core/Bluetooth.h>
#include <qti-audio-core/Module.h>
#include <qti-audio-core/Platform.h>
#ifdef SEC_AUDIO_COMMON
#include <qti-audio-core/SecModulePrimary.h>
#include "SecFTM.h"
#include "AudioEffect.h"
#endif
namespace qti::audio::core {
class ModulePrimary final : public Module {
public:
ModulePrimary();
// #################### start of overriding APIs from IModule ####################
binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
ndk::ScopedAStatus getBluetooth(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth>* _aidl_return)
override;
ndk::ScopedAStatus getBluetoothA2dp(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp>* _aidl_return)
override;
ndk::ScopedAStatus getBluetoothLe(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe>* _aidl_return)
override;
ndk::ScopedAStatus getTelephony(
std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony>* _aidl_return)
override;
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) override;
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
override;
ndk::ScopedAStatus getMicMute(bool* _aidl_return) override;
ndk::ScopedAStatus setMicMute(bool in_mute) override;
ndk::ScopedAStatus getMicrophones(
std::vector<::aidl::android::media::audio::common::MicrophoneInfo>* _aidl_return)
override;
ndk::ScopedAStatus updateScreenState(bool in_isTurnedOn) override;
ndk::ScopedAStatus updateScreenRotation(
::aidl::android::hardware::audio::core::IModule::ScreenRotation in_rotation) override;
ndk::ScopedAStatus getSupportedPlaybackRateFactors(
SupportedPlaybackRateFactors* _aidl_return) override;
// #################### end of overriding APIs from IModule ####################
// Mutex for stream lists protection
static std::mutex outListMutex;
static std::mutex inListMutex;
static std::vector<std::weak_ptr<StreamOut>>& getOutStreams() { return mStreamsOut; }
static std::vector<std::weak_ptr<StreamIn>>& getInStreams() { return mStreamsIn; }
#ifdef SEC_AUDIO_COMMON
public:
SecModulePrimary& mSecModulePrimary{SecModulePrimary::getInstance()};
#endif
protected:
// #################### start of overriding APIs from Module ####################
std::string toStringInternal() override;
void dumpInternal(const std::string& identifier = "no_id") override;
ndk::ScopedAStatus createInputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones,
std::shared_ptr<StreamIn>* result) override;
ndk::ScopedAStatus createOutputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo,
std::shared_ptr<StreamOut>* result) override;
std::vector<::aidl::android::media::audio::common::AudioProfile> getDynamicProfiles(
const ::aidl::android::media::audio::common::AudioPort& audioPort) override;
void onNewPatchCreation(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks,
::aidl::android::hardware::audio::core::AudioPatch& newPatch) override;
void setAudioPatchTelephony(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks,
const ::aidl::android::hardware::audio::core::AudioPatch& newPatch) override;
void resetAudioPatchTelephony(
const ::aidl::android::hardware::audio::core::AudioPatch&) override;
int onExternalDeviceConnectionChanged(
const ::aidl::android::media::audio::common::AudioPort& audioPort,
bool connected) override;
int32_t getNominalLatencyMs(
const ::aidl::android::media::audio::common::AudioPortConfig&) override;
// #################### end of overriding APIs from Module ####################
// start of Module Parameters
/**
* Features to be provided by Set/Get Parameters.
* Each Feature can be associated to one or more semantically related Parameters id's.
* Each Feature has atmost one set handler or atmost one get handler or both.
* Such a group of Parameters acquires a Feature enum and will be
* dealt either by set or get or both handlers.
*
* Example:
* {k1,k2,k3} => F1 => SH,GH
* {k3,k5} => F2 => SH
* {k7} => F3 => GH
*
* k* -> parameter's Ids,
* F* -> Feature enums,
* SH -> SetHandler
* GH -> GetHandler
**/
enum class Feature : uint16_t {
GENERIC = 0, // this enum groups much generic parameters
TELEPHONY,
BLUETOOTH,
HDR,
WFD,
FTM, // Factory Test Mode
AUDIOEXTENSION,
HAPTICS
};
// For set parameters
using SetHandler = std::function<void(
ModulePrimary*,
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&)>;
using SetParameterToFeatureMap = std::map<std::string, Feature>;
using FeatureToSetHandlerMap = std::map<Feature, SetHandler>;
static SetParameterToFeatureMap fillSetParameterToFeatureMap();
static FeatureToSetHandlerMap fillFeatureToSetHandlerMap();
using FeatureToVendorParametersMap =
std::map<Feature, std::vector<::aidl::android::hardware::audio::core::VendorParameter>>;
// For get parameters
using GetHandler =
std::function<std::vector<::aidl::android::hardware::audio::core::VendorParameter>(
ModulePrimary*, const std::vector<std::string>&)>;
using GetParameterToFeatureMap = std::map<std::string, Feature>;
using FeatureToGetHandlerMap = std::map<Feature, GetHandler>;
static GetParameterToFeatureMap fillGetParameterToFeatureMap();
static FeatureToGetHandlerMap fillFeatureToGetHandlerMap();
using FeatureToStringMap = std::map<Feature, std::vector<std::string>>;
// end of Module Parameters
static std::vector<std::weak_ptr<::qti::audio::core::StreamOut>> mStreamsOut;
static std::vector<std::weak_ptr<::qti::audio::core::StreamIn>> mStreamsIn;
static void updateStreamOutList(const std::shared_ptr<StreamOut> streamOut) {
mStreamsOut.push_back(streamOut);
}
static void updateStreamInList(const std::shared_ptr<StreamIn> streamIn) {
mStreamsIn.push_back(streamIn);
}
// start of module parameters handling
bool processSetVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
// setHandler for Generic
void onSetGenericParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
// SetHandler For HDR
void onSetHDRParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
// SetHandler For Telephony
void onSetTelephonyParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
// SetHandler For WFD
void onSetWFDParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
// SetHandler For FTM
void onSetFTMParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
// SetHandler For Haptics
void onSetHapticsParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&);
std::vector<::aidl::android::hardware::audio::core::VendorParameter> processGetVendorParameters(
const std::vector<std::string>&);
// GetHandler for HDR
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetHDRParameters(
const std::vector<std::string>&);
// GetHandler for Telephony
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetTelephonyParameters(
const std::vector<std::string>&);
// GetHandler for WFD
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetWFDParameters(
const std::vector<std::string>&);
// GetHandler for FTM
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetFTMParameters(
const std::vector<std::string>&);
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetAudioExtnParams(
const std::vector<std::string>&);
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetBluetoothParams(
const std::vector<std::string>&);
std::vector<::aidl::android::hardware::audio::core::VendorParameter> onGetGenericParams(
const std::vector<std::string>&);
// end of module parameters handling
protected:
const SetParameterToFeatureMap mSetParameterToFeatureMap{fillSetParameterToFeatureMap()};
const FeatureToSetHandlerMap mFeatureToSetHandlerMap{fillFeatureToSetHandlerMap()};
const GetParameterToFeatureMap mGetParameterToFeatureMap{fillGetParameterToFeatureMap()};
const FeatureToGetHandlerMap mFeatureToGetHandlerMap{fillFeatureToGetHandlerMap()};
ChildInterface<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
ChildInterface<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
ChildInterface<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
Platform& mPlatform{Platform::getInstance()};
AudioExtension& mAudExt{AudioExtension::getInstance()};
private:
bool mOffloadSpeedSupported;
};
} // namespace qti::audio::core

View File

@@ -1,61 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <qti-audio-core/Module.h>
namespace qti::audio::core {
class ModuleStub final : public Module {
public:
ModuleStub() : Module(Type::STUB) {}
protected:
ndk::ScopedAStatus getBluetooth(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetooth>* _aidl_return)
override;
ndk::ScopedAStatus getBluetoothA2dp(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothA2dp>* _aidl_return)
override;
ndk::ScopedAStatus getBluetoothLe(
std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe>* _aidl_return)
override;
ndk::ScopedAStatus createInputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones,
std::shared_ptr<StreamIn>* result) override;
ndk::ScopedAStatus createOutputStream(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo,
std::shared_ptr<StreamOut>* result) override;
private:
ChildInterface<::aidl::android::hardware::audio::core::IBluetooth> mBluetooth;
ChildInterface<::aidl::android::hardware::audio::core::IBluetoothA2dp> mBluetoothA2dp;
ChildInterface<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
};
} // namespace qti::audio::core

View File

@@ -1,123 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <string>
#ifdef ENABLE_TAS_SPK_PROT
#include "TISpeakerProtDefs.h"
#endif
namespace qti::audio::core::Parameters {
/**
* Since the parameters from the Android framework enables or disables features
* which would impact small to big level, It is highly recommended to write
* verbose comments for each parameter. As Parameter is composition 'id' and 'its
* possibles values', hence list all the values with verbose explaination
**/
// HDR Recording
const static std::string kHdrRecord{"hdr_record_on"};
const static std::string kHdrChannelCount{"hdr_audio_channel_count"};
const static std::string kHdrSamplingRate{"hdr_audio_sampling_rate"};
const static std::string kWnr{"wnr_on"};
const static std::string kAns{"ans_on"};
const static std::string kOrientation{"orientation"};
const static std::string kInverted{"inverted"};
const static std::string kFacing{"facing"};
// voice
const static std::string kVoiceCallState{"call_state"};
const static std::string kVoiceCallType{"call_type"};
const static std::string kVoiceVSID{"vsid"};
const static std::string kVoiceDeviceMute{"device_mute"};
const static std::string kVolumeBoost{"volume_boost"};
const static std::string kVoiceDirection{"direction"};
const static std::string kVoiceSlowTalk{"st_enable"};
const static std::string kVoiceHDVoice{"hd_voice"};
const static std::string kVoiceIsCRsSupported{"isCRSsupported"};
const static std::string kVoiceCRSCall{"crs_call"};
const static std::string kVoiceCRSVolume{"CRS_volume"};
const static std::string kVoiceTranslationRxMute{"voice_translation_rx_mute"};
/** kVoiceTranslationRxMute : helps to set the Voice Rx Volume
* to mute when the param is set to enabled during the
* voice call translation usecase running.
**/
// WFD
const static std::string kCanOpenProxy{"can_open_proxy"};
const static std::string kWfdChannelMap{"wfd_channel_cap"};
const static std::string kWfdProxyRecordActive{"proxyRecordActive"};
/**
* USE_IP_IN_DEVICE_FOR_PROXY_RECORD: Use this parameter to set/unset if ip-v4 in device
* in getting used a proxy device. Set it before making the device available and unset
* it while making device unavailable.
**/
const static std::string kWfdIPAsProxyDevConnected{"USE_IP_IN_DEVICE_FOR_PROXY_RECORD"};
/**
* clients have need to hardcode
* frame count requirement per read.
* Ideally, client should be able read
* as AHAL provided. Still, AHAL supports
* this way to set module vendor parameter
* to request a custom FMQ size from client
* FMQ size.
* example:
* As the session starts, client sets
* proxy_record_fmq_size = 480
* As session ends, client unsets
* proxy_record_fmq_size = 0
* After the session of proxy record finishes,
* client is resposible to unset the module
* vendor parameter.
* For upcoming requirements, this way is
* depreciated.
**/
const static std::string kProxyRecordFMQSize{"proxy_record_fmq_size"};
// Generic
const static std::string kInCallMusic{"icmd_playback"};
const static std::string kUHQA{"UHQA"};
const static std::string kOffloadPlaySpeedSupported{"offloadVariableRateSupported"};
const static std::string kSupportsHwSuspend{"supports_hw_suspend"};
const static std::string kIsDirectPCMTrack{"is_direct_pcm_track"};
const static std::string kTranslateRecord{"translate_record"};
#ifdef ENABLE_TAS_SPK_PROT
const static std::string kTiSmartPaKey{AUDIO_PARAM_TI_SMARTPA};
#endif
/**
* translate_record : AUDIO_FLUENCE_FFECNS PCM_RECORD
* Use this parameter to for the Voice Translation usecase.
* Set param support for APK to select FFECNS record and populate
* custom key for FFECNS record based on the setparam.
**/
// FTM
const static std::string kFbspCfgWaitTime{"fbsp_cfg_wait_time"};
const static std::string kFbspFTMWaitTime{"fbsp_cfg_ftm_time"};
const static std::string kFbspValiWaitTime{"fbsp_v_vali_wait_time"};
const static std::string kFbspValiValiTime{"fbsp_v_vali_vali_time"};
const static std::string kTriggerSpeakerCall{"trigger_spkr_cal"};
const static std::string kFTMParam{"get_ftm_param"};
const static std::string kFTMSPKRParam{"get_spkr_cal"};
// Audio Extn
const static std::string kFMStatus{"fm_status"};
// Bluetooth
const static std::string kA2dpSuspended{"A2dpSuspended"};
// Haptics
const static std::string kHapticsVolume{"haptics_volume"};
const static std::string kHapticsIntensity{"haptics_intensity"};
}; // namespace qti::audio::core::Parameters

View File

@@ -1,156 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string>
#include <system/audio.h>
#include <hardware/audio.h>
#include "AudioEffect.h"
#include "SecFTM.h"
#include <qti-audio-core/SecParameters.h>
using ::aidl::android::hardware::audio::core::VendorParameter;
using ::aidl::qti::audio::core::VString;
namespace qti::audio::core {
class SecModulePrimary {
public:
SecModulePrimary();
static SecModulePrimary& getInstance();
// For set parameters
using SetHandler = std::function<void(
SecModulePrimary*,
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&)>;
using SetParameterToFeatureMap = std::map<std::string, SecParameters::Feature_SEC>;
using FeatureToSetHandlerMap = std::map<SecParameters::Feature_SEC, SetHandler>;
static SetParameterToFeatureMap fillSetParameterToFeatureMap();
static FeatureToSetHandlerMap fillFeatureToSetHandlerMap();
using FeatureToVendorParametersMap =
std::map<SecParameters::Feature_SEC, std::vector<::aidl::android::hardware::audio::core::VendorParameter>>;
// For get parameters
using GetHandler =
std::function<std::vector<::aidl::android::hardware::audio::core::VendorParameter>(
SecModulePrimary*, const std::vector<std::string>&)>;
using GetParameterToFeatureMap = std::map<std::string, SecParameters::Feature_SEC>;
using FeatureToGetHandlerMap = std::map<SecParameters::Feature_SEC, GetHandler>;
static GetParameterToFeatureMap fillGetParameterToFeatureMap();
static FeatureToGetHandlerMap fillFeatureToGetHandlerMap();
using FeatureToStringMap = std::map<SecParameters::Feature_SEC, std::vector<std::string>>;
#ifdef SEC_AUDIO_SPEAKER_CALIBRATION
using CalValueToGetParameterMap = std::map<std::string, std::string>;
static CalValueToGetParameterMap fillCalValueToGetParameterMap();
#endif
// start of module parameters handling
bool processSetVendorParameters(const std::vector<VendorParameter>& parameters);
#ifdef SEC_AUDIO_SPEAKER_CALIBRATION
void processCalibrationParameters(const std::vector<VendorParameter>& parameters);
std::string getCalibrationResults(std::string calValue, std::string ampType);
#endif
#ifdef SEC_AUDIO_RECORDALIVE_SUPPORT_PROVIDEO
void SetProVideoState(int mode);
#endif
#ifdef SEC_AUDIO_SUPPORT_REMOTE_MIC
pal_device_id_t getPrimaryOutPalDeviceId();
#endif
#ifdef SEC_AUDIO_COMMON
static bool CheckComboDevice();
#endif
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
static bool isListenbackDevice(pal_device_id_t deviceId);
static bool isListenbackUsecase(Usecase usecase);
static int getListenbackOutputCount();
static void updateStreamListenbackMode(bool enable);
#endif
#ifdef SEC_AUDIO_USB_GAIN_CONTROL
static void updateUsbAudioGain();
#endif
#ifdef SEC_AUDIO_KARAOKE
void setKaraokeDevice();
#endif
// setHandler for Samsung Generic
void onSetSECGenericParameters(const std::vector<VendorParameter>& parameters);
// SetHandler For Samsung Telephony
void onSetSECTelephonyParameters(const std::vector<VendorParameter>& parameters);
// SetHandler For Samsung Factory
void onSetSECFTMParameters(const std::vector<VendorParameter>& parameters);
// SetHandler For Samsung Parameters using Subkey
void onSetSECSubkeyParameters(const std::vector<VendorParameter>& parameters);
std::vector<VendorParameter> processGetVendorParameters(const std::vector<std::string>&);
// GetHandler for Samsung Generic
std::vector<VendorParameter> onGetSECGenericParameters(const std::vector<std::string>&);
// GetHandler For Samsung Telephony
std::vector<VendorParameter> onGetSECTelephonyParameters(const std::vector<std::string>&);
// GetHandler For Samsung Factory
std::vector<VendorParameter> onGetSECFTMParameters(const std::vector<std::string>&);
static ndk::ScopedAStatus ForceSetOutDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
bool force = false);
static ndk::ScopedAStatus ForceSetInDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices);
static std::shared_ptr<StreamOut> GetStreamOut(Usecase tag);
static std::shared_ptr<StreamIn> GetStreamIn(Usecase tag);
#ifdef SEC_AUDIO_SAMSUNGRECORD
static std::shared_ptr<StreamIn> getHighestPriorityStreamIn();
#endif
void setTelephony(ChildInterface<Telephony> tel) { mTelephony = tel; }
std::string toString() const {
std::ostringstream os;
os << std::endl << " --- SecModulePrimary ---" << std::endl;
os << mSecFTM.toString();
os << " --- SecModulePrimary end ---" << std::endl << std::endl;
return os.str();
}
protected:
ChildInterface<Telephony> mTelephony;
const SetParameterToFeatureMap mSetParameterToFeatureMap_SEC{fillSetParameterToFeatureMap()};
const FeatureToSetHandlerMap mFeatureToSetHandlerMap{fillFeatureToSetHandlerMap()};
const GetParameterToFeatureMap mGetParameterToFeatureMap{fillGetParameterToFeatureMap()};
const FeatureToGetHandlerMap mFeatureToGetHandlerMap{fillFeatureToGetHandlerMap()};
#ifdef SEC_AUDIO_SPEAKER_CALIBRATION
const CalValueToGetParameterMap mCalValueToGetParameterMap{fillCalValueToGetParameterMap()};
#endif
#ifdef SEC_AUDIO_CALL_VOIP
ndk::ScopedAStatus RerouteForVoip();
#endif
#ifdef SEC_AUDIO_SCREEN_MIRRORING // { SUPPORT_VOIP_VIA_SMART_VIEW
ndk::ScopedAStatus UpdateSmartViewState(bool newVoipViaSmartView);
#endif // } SUPPORT_VOIP_VIA_SMART_VIEW
#if defined(SEC_AUDIO_SUPPORT_FLIP_CALL) || defined(SEC_AUDIO_SUPPORT_SOUNDBOOSTER_FOLD_PARAM_ON_DSP)
void SetFolderState(int state);
#endif
#ifdef SEC_AUDIO_BLE_OFFLOAD
void UpdateSCOdeviceState();
#endif
Platform& mPlatform{Platform::getInstance()};
AudioExtension& mAudExt{AudioExtension::getInstance()};
AudioEffect& mAudioEffect{AudioEffect::getInstance()};
SecFTM& mSecFTM{SecFTM::getInstance()};
};
} // namespace qti::audio::core::SecModulePrimary

View File

@@ -1,150 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string>
#include <system/audio.h>
#include <hardware/audio.h>
namespace qti::audio::core::SecParameters {
enum class Feature_SEC : uint16_t {
SEC_GENERIC = 0,
SEC_TELEPHONY,
SEC_FTM,
SEC_SUBKEY,
};
// Voice
const static std::string kFactoryEchoRefMuteDetect{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_ECHOREF_MUTE_DETECT};
const static std::string kFactoryEchoRefMuteValue{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_ECHOREF_MUTE_VALUE};
const static std::string kTtyMode{AUDIO_PARAMETER_KEY_TTY_MODE};
const static std::string kVoiceCallBand{AUDIO_PARAMETER_SEC_GLOBAL_CALL_BAND};
const static std::string kVoiceCallForwardingEnable{AUDIO_PARAMETER_SEC_GLOBAL_CALL_FORWARDING_ENABLE};
const static std::string kVoiceCallMemoState{AUDIO_PARAMETER_SEC_GLOBAL_CALL_MEMO_STATE};
const static std::string kVoiceCallNBQualityEnable{AUDIO_PARAMETER_SEC_LOCAL_CALL_NB_QUALITY_ENABLE};
const static std::string kVoiceCallRingbacktoneState{AUDIO_PARAMETER_SEC_GLOBAL_CALL_RINGBACKTONE_STATE};
const static std::string kVoiceCallSatelliteEnable{AUDIO_PARAMETER_SEC_GLOBAL_CALL_SATELLITE_ENABLE};
const static std::string kVoiceCallState{AUDIO_PARAMETER_SEC_GLOBAL_CALL_STATE};
const static std::string kVoiceCallTranslationMode{AUDIO_PARAMETER_SEC_LOCAL_CALL_TRANSLATION_MODE};
const static std::string kVoiceEffectDVAdaptSound{AUDIO_PARAMETER_SEC_GLOBAL_EFFECT_DV_ADAPT_SOUND};
const static std::string kVoiceEffectDVAdaptSoundCallPatam{AUDIO_PARAMETER_SEC_LOCAL_EFFECT_DV_ADAPT_SOUND_CALL_PARAM};
const static std::string kVoiceFactoryEchoRefMuteCNGEnable{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_ECHOREF_MUTE_CNG_ENABLE};
const static std::string kVoiceHAC{AUDIO_PARAMETER_KEY_HAC};
const static std::string kVoiceHACMode{AUDIO_PARAMETER_SEC_LOCAL_CALL_HAC_MODE};
const static std::string kVoiceScreenCall{AUDIO_PARAMETER_SEC_LOCAL_SCREEN_CALL};
const static std::string kVoiceStreamEnforcedActiveInCall{AUDIO_PARAMETER_SEC_LOCAL_STREAM_ENFORCED_ACTIVE_IN_CALL};
const static std::string kVoiceTxControlMode{AUDIO_PARAMETER_SEC_LOCAL_VOICE_TX_CONTROL_MODE};
const static std::string kVoiceRxControlMode{AUDIO_PARAMETER_SEC_LOCAL_VOICE_RX_CONTROL_MODE};
const static std::string kVoiceMicInputControlMode{AUDIO_PARAMETER_SEC_LOCAL_MIC_INPUT_CONTROL_MODE};
const static std::string kVoiceMicInputControlModeCall{AUDIO_PARAMETER_SEC_LOCAL_MIC_INPUT_CONTROL_MODE_CALL};
const static std::string kVoiceVSID{AUDIO_PARAMETER_SEC_GLOBAL_CALL_SIM_SLOT};
// Factory Test
const static std::string kFactoryTestLoopback{AUDIO_PARAMETER_FACTORY_TEST_LOOPBACK};
const static std::string kFactoryTestMicPath{AUDIO_PARAMETER_KEY_FACTORY_RMS_TEST};
const static std::string kFactoryTestPath{AUDIO_PARAMETER_FACTORY_TEST_PATH};
const static std::string kFactoryTestRoute{AUDIO_PARAMETER_FACTORY_TEST_ROUTE};
const static std::string kFactoryTestType{AUDIO_PARAMETER_FACTORY_TEST_TYPE};
const static std::string kFactoryTestSpkPath{AUDIO_PARAMETER_FACTORY_TEST_SPKPATH};
const static std::string kFactoryTestCalibration{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_CALIBRATION_KEY};
const static std::string kFactoryTestCalAmpType{AUDIO_PARAMETER_SUBKEY_FACTORY_CALIBRATION_AMP};
const static std::string kFactoryTestCalTarget{AUDIO_PARAMETER_SUBKEY_FACTORY_CALIBRATION_TARGET};
const static std::string kFactoryTestCalStatus{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_CALIBRATION_STATUS};
const static std::string kFactoryTestCalOff{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_CALIBRATION_OFF};
const static std::string kFactoryTestCalRead{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_CALIBRATION_READ};
const static std::string kFactoryTestCalAmpTI{AUDIO_PARAMETER_VALUE_TAS_V2};
const static std::string kVoiceFactoryEchoRefStatus{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_ECHOREF_STATUS};
const static std::string kVoiceFactoryEchoRefValue{AUDIO_PARAMETER_SEC_GLOBAL_FACTORY_ECHOREF_VALUE};
// Effect
const static std::string kEffectOffloadVSPParam{AUDIO_PARAMETER_SEC_GLOBAL_EFFECT_OFFLOAD_VSP_PARAM};
const static std::string kEffectSoundBalanceValue{AUDIO_PARAMETER_SEC_GLOBAL_EFFECT_SOUND_BALANCE_VALUE};
const static std::string kEffectToMonoEnable{AUDIO_PARAMETER_SEC_GLOBAL_EFFECT_TO_MONO_ENABLE};
const static std::string kRecordConversationEnergyKey{AUDIO_PARAMETER_SEC_GLOBAL_RECORD_CONVERSATION_ENERGY_KEY};
// Generic
const static std::string kAllSoundMuteEnable{AUDIO_PARAMETER_SEC_LOCAL_ALL_SOUND_MUTE_ENABLE};
const static std::string kA2dpFormat{AUDIO_PARAMETER_SEC_LOCAL_A2DP_FORMAT};
const static std::string kA2dpSuspendForBle{AUDIO_PARAMETER_SEC_LOCAL_A2DP_SUSPEND_FOR_BLE};
const static std::string kBargeinMode{AUDIO_PARAMETER_SEC_GLOBAL_BARGEIN_MODE};
const static std::string kBtScoCodecType{AUDIO_PARAMETER_SEC_GLOBAL_BT_SCO_CODEC_TYPE};
const static std::string kEffectSoundBoosterDspSupport{AUDIO_PARAMETER_SEC_LOCAL_EFFECT_SOUNDBOOSTER_DSP_SUPPORT};
const static std::string kFMRadioMode{AUDIO_PARAMETER_SEC_LOCAL_FMRADIO_MODE};
const static std::string kFMRadioVolume{AUDIO_PARAMETER_SEC_LOCAL_FMRADIO_VOLUME};
const static std::string kFMRadioMute{AUDIO_PARAMETER_SEC_GLOBAL_FMRADIO_MUTE};
const static std::string kGameChatEnable{AUDIO_PARAMETER_SEC_LOCAL_GAME_CHAT_ENABLE};
const static std::string kHwDisplayRotation{AUDIO_PARAMETER_SEC_GLOBAL_HW_DISPLAY_ROTATION};
const static std::string kHwFlatMotionState{AUDIO_PARAMETER_SEC_LOCAL_HW_FLAT_MOTION_STATE};
const static std::string kHwFolderState{AUDIO_PARAMETER_SEC_LOCAL_HW_FOLDER_STATE};
const static std::string kHwInterfaceTestcase{AUDIO_PARAMETER_SEC_LOCAL_HW_INTERFACE_TESTCASE};
const static std::string kHwSpeakerAmpBigData{AUDIO_PARAMETER_SEC_LOCAL_HW_SPEAKER_AMP_BIGDATA};
const static std::string kHwSpeakerAmpBigDataSupport{AUDIO_PARAMETER_SEC_LOCAL_HW_SPEAKER_AMP_BIGDATA_SUPPORT};
const static std::string kHwSpeakerAmpMaxTemperature{AUDIO_PARAMETER_SEC_GLOBAL_HW_SPEAKER_AMP_MAX_TEMPERATURE};
const static std::string kHwSpeakerAmpTemperatureRCV{AUDIO_PARAMETER_SEC_GLOBAL_HW_SPEAKER_AMP_TEMPERATURE_RCV};
const static std::string kHwSpeakerAmpTemperatureSPK{AUDIO_PARAMETER_SEC_GLOBAL_HW_SPEAKER_AMP_TEMPERATURE_SPK};
const static std::string kInterpreterMode{AUDIO_PARAMETER_SEC_GLOBAL_INTERPRETER_MODE};
const static std::string kKaraokeEnable{AUDIO_PARAMETER_SEC_LOCAL_KARAOKE_ENABLE};
const static std::string kOffloadVariableRateSupported{"offloadVariableRateSupported"};
const static std::string kPcmDumpApCallState{AUDIO_PARAMETER_SEC_GLOBAL_PCM_DUMP_AP_CALL_STATE};
const static std::string kPcmDumpRecordState{AUDIO_PARAMETER_SEC_LOCAL_PCM_DUMP_RECORD_STATE};
const static std::string kPcmDumpState{AUDIO_PARAMETER_SEC_GLOBAL_PCM_DUMP_STATE};
const static std::string kRecordBeamformingMode{AUDIO_PARAMETER_SEC_GLOBAL_RECORD_BEAMFORMING_MODE};
const static std::string kRecordInputLatency{AUDIO_PARAMETER_SEC_GLOBAL_RECORD_INPUT_LATENCY};
const static std::string kRecordNSRISecurityEnable{AUDIO_PARAMETER_SEC_GLOBAL_RECORD_NSRI_SECURITY_ENABLE};
const static std::string kRecordSecVoiceRecorderEnable{AUDIO_PARAMETER_SEC_GLOBAL_RECORD_SEC_VOICE_RECORDER_ENABLE};
const static std::string kRecordTxInversion{AUDIO_PARAMETER_SEC_GLOBAL_RECORD_TX_INVERSION};
const static std::string kRemoteMicEnable{AUDIO_PARAMETER_SEC_LOCAL_REMOTE_MIC_ENABLE};
const static std::string kRemoteMicVolume{AUDIO_PARAMETER_SEC_LOCAL_REMOTE_MIC_VOLUME};
const static std::string kScoRvcSupport{AUDIO_PARAMETER_SEC_GLOBAL_SCO_RVC_SUPPORT};
const static std::string kSetupTestcase{AUDIO_PARAMETER_SEC_GLOBAL_SETUP_TESTCASE};
const static std::string kSupportSecAudioFeature{AUDIO_PARAMETER_SEC_LOCAL_SUPPORT_SEC_AUDIO_FEATURE};
const static std::string kVoiceWakeupRegisterVoiceKeyword{AUDIO_PARAMETER_SEC_GLOBAL_VOICE_WAKEUP_REGISTER_VOICE_KEYWORD};
const static std::string kVoiceWakeupSeamlessEnable{AUDIO_PARAMETER_SEC_GLOBAL_VOICE_WAKEUP_SEAMLESS_ENABLE};
const static std::string kVoipViaSmartView{AUDIO_PARAMETER_SEC_LOCAL_VOIP_VIA_SMART_VIEW};
const static std::string kMultiMicMode{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_MODE};
const static std::string kInputFlag{AUDIO_PARAMETER_SEC_LOCAL_RECORD_INPUT_FLAG};
// Stream Out
const static std::string kDualSpeakerAmpLeftPowerEnable{AUDIO_PARAMETER_SEC_LOCAL_DUAL_SPEAKER_AMP_LEFT_POWER_ENABLE};
const static std::string kEffectUpscalerMode{AUDIO_PARAMETER_SEC_LOCAL_EFFECT_UPSCALER_MODE};
const static std::string kHapticsSource{AUDIO_PARAMETER_SEC_LOCAL_HAPTIC_SOURCE};
const static std::string kUhqUpdateFormat{AUDIO_PARAMETER_SEC_LOCAL_UHQ_UPDATE_FORMAT};
const static std::string kVolumeVoice{AUDIO_PARAMETER_SEC_LOCAL_VOLUME_VOICE};
// Stream In
// Subkey
const static std::string kDexKey{AUDIO_PARAMETER_SEC_LOCAL_DEX_KEY};
const static std::string kSubkeyDexType{AUDIO_PARAMETER_SUBKEY_DEX_TYPE};
const static std::string kSubkeyDexConnected{AUDIO_PARAMETER_SUBKEY_DEX_CONNECTED};
const static std::string kEffectListenBackKey{AUDIO_PARAMETER_SEC_LOCAL_EFFECT_LISTENBACK_KEY};
const static std::string kSubkeyEffectListenBackState{AUDIO_PARAMETER_SUBKEY_EFFECT_LISTENBACK_STATE};
const static std::string kMultiMicKey{AUDIO_PARAMETER_SEC_LOCAL_MULTI_MIC_KEY};
const static std::string kSubkeyMultiMicAudioFocusEnable{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_AUDIO_FOCUS_ENABLE};
const static std::string kSubkeyMultiMicCameraDirection{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_CAMERA_DIRECTION};
const static std::string kSubkeyMultiMicFocusCoordinate{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_FOCUS_COORDINATE};
const static std::string kSubkeyMultiMicMode{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_MODE};
const static std::string kSubkeyMultiMicPhoneOrientation{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_PHONE_ORIENTATION};
const static std::string kSubkeyMultiMicSensitivityLevel{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_SENSITIVITY_LEVEL};
const static std::string kSubkeyMultiMicZoomLevel{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_ZOOM_LEVEL};
const static std::string kSubkeyMultiMicZoomMax{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_ZOOM_MAX};
const static std::string kSubkeyMultiMicZoomMin{AUDIO_PARAMETER_SUBKEY_MULTI_MIC_ZOOM_MIN};
const static std::string kSubkeyUhqWideResolution{AUDIO_PARAMETER_SEC_LOCAL_UHQ_WIDE_RESOLUTION_ENABLE};
const static std::string kSubkeyUhqForceRouting{AUDIO_PARAMETER_SEC_LOCAL_UHQ_FORCE_ROUTING};
}; // namespace qti::audio::core::SecParameters

View File

@@ -1,48 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <mutex>
using aidl::android::media::audio::common::AudioDevice;
namespace qti::audio::core {
class SoundDose : public ::aidl::android::hardware::audio::core::sounddose::BnSoundDose {
public:
SoundDose() : mRs2Value(DEFAULT_MAX_RS2){};
ndk::ScopedAStatus setOutputRs2UpperBound(float in_rs2ValueDbA) override;
ndk::ScopedAStatus getOutputRs2UpperBound(float* _aidl_return) override;
ndk::ScopedAStatus registerSoundDoseCallback(
const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>& in_callback) override;
private:
std::shared_ptr<ISoundDose::IHalSoundDoseCallback> mCallback;
float mRs2Value;
};
} // namespace qti::audio::core

View File

@@ -1,846 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <StreamWorker.h>
#include <Utils.h>
#include <aidl/android/hardware/audio/common/SinkMetadata.h>
#include <aidl/android/hardware/audio/common/SourceMetadata.h>
#include <aidl/android/hardware/audio/core/BnStreamCommon.h>
#include <aidl/android/hardware/audio/core/BnStreamIn.h>
#include <aidl/android/hardware/audio/core/BnStreamOut.h>
#include <aidl/android/hardware/audio/core/IStreamCallback.h>
#include <aidl/android/hardware/audio/core/IStreamOutEventCallback.h>
#include <aidl/android/hardware/audio/core/StreamDescriptor.h>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <aidl/android/media/audio/common/AudioIoFlags.h>
#include <aidl/android/media/audio/common/AudioOffloadInfo.h>
#include <aidl/android/media/audio/common/MicrophoneInfo.h>
#include <extensions/AudioExtension.h>
#include <error/expected_utils.h>
#include <fmq/AidlMessageQueue.h>
#include <system/thread_defs.h>
#include <utils/Errors.h>
#include <atomic>
#include <chrono>
#include <cstdlib>
#include <map>
#include <memory>
#include <optional>
#include <variant>
#include <Utils.h>
#include <qti-audio-core/ChildInterface.h>
#include <qti-audio-core/Platform.h>
#include <qti-audio-core/Utils.h>
#ifdef SEC_AUDIO_SAMSUNGRECORD
#include "AudioPreProcess.h"
#endif
namespace qti::audio::core {
class Telephony;
// This class is similar to StreamDescriptor, but unlike
// the descriptor, it actually owns the objects implementing
// data exchange: FMQs etc, whereas StreamDescriptor only
// contains their descriptors.
class StreamContext {
public:
typedef ::android::AidlMessageQueue<
::aidl::android::hardware::audio::core::StreamDescriptor::Command,
::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
CommandMQ;
typedef ::android::AidlMessageQueue<
::aidl::android::hardware::audio::core::StreamDescriptor::Reply,
::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
ReplyMQ;
typedef ::android::AidlMessageQueue<
int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
DataMQ;
// Ensure that this value is not used by any of
// ::aidl::android::hardware::audio::core::StreamDescriptor.State enums
static constexpr int32_t STATE_CLOSED = -1;
struct DebugParameters {
// An extra delay for transient states, in ms.
int transientStateDelayMs = 0;
// Force the "burst" command to move the SM to the TRANSFERRING state.
bool forceTransientBurst = false;
// Force the "drain" command to be synchronous, going directly to the
// IDLE state.
bool forceSynchronousDrain = false;
};
StreamContext() = default;
StreamContext(
std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
const ::aidl::android::media::audio::common::AudioFormatDescription& format,
const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout,
int sampleRate, std::unique_ptr<DataMQ> dataMQ,
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCallback> asyncCallback,
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOutEventCallback>
outEventCallback,
::aidl::android::media::audio::common::AudioPortConfig mixPortConfig,
DebugParameters debugParameters, const int nominalLatency,
std::weak_ptr<Telephony> telephony)
: mCommandMQ(std::move(commandMQ)),
mInternalCommandCookie(std::rand()),
mReplyMQ(std::move(replyMQ)),
mFormat(format),
mChannelLayout(channelLayout),
mSampleRate(sampleRate),
mDataMQ(std::move(dataMQ)),
mAsyncCallback(asyncCallback),
mOutEventCallback(outEventCallback),
mMixPortConfig(mixPortConfig),
mNominalLatency(nominalLatency),
mDebugParameters(debugParameters),
mTelephony(telephony) {}
StreamContext(StreamContext&& other)
: mCommandMQ(std::move(other.mCommandMQ)),
mInternalCommandCookie(other.mInternalCommandCookie),
mReplyMQ(std::move(other.mReplyMQ)),
mFormat(other.mFormat),
mChannelLayout(other.mChannelLayout),
mSampleRate(other.mSampleRate),
mDataMQ(std::move(other.mDataMQ)),
mAsyncCallback(std::move(other.mAsyncCallback)),
mOutEventCallback(std::move(other.mOutEventCallback)),
mMixPortConfig(std::move(other.mMixPortConfig)),
mDebugParameters(std::move(other.mDebugParameters)),
mFrameCount(other.mFrameCount),
mNominalLatency(other.mNominalLatency),
mTelephony(other.mTelephony) {}
StreamContext& operator=(StreamContext&& other) {
mCommandMQ = std::move(other.mCommandMQ);
mInternalCommandCookie = other.mInternalCommandCookie;
mReplyMQ = std::move(other.mReplyMQ);
mFormat = std::move(other.mFormat);
mChannelLayout = std::move(other.mChannelLayout);
mSampleRate = other.mSampleRate;
mDataMQ = std::move(other.mDataMQ);
mAsyncCallback = std::move(other.mAsyncCallback);
mOutEventCallback = std::move(other.mOutEventCallback);
mMixPortConfig = std::move(other.mMixPortConfig);
mDebugParameters = std::move(other.mDebugParameters);
mFrameCount = other.mFrameCount;
mNominalLatency = other.mNominalLatency;
mTelephony = other.mTelephony;
return *this;
}
void fillDescriptor(::aidl::android::hardware::audio::core::StreamDescriptor* desc);
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCallback> getAsyncCallback()
const {
return mAsyncCallback;
}
size_t getBufferSizeInFrames() const;
::aidl::android::media::audio::common::AudioChannelLayout getChannelLayout() const {
return mChannelLayout;
}
CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
DataMQ* getDataMQ() const { return mDataMQ.get(); }
::aidl::android::media::audio::common::AudioFormatDescription getFormat() const {
return mFormat;
}
::aidl::android::media::audio::common::AudioIoFlags getFlags() const {
return mMixPortConfig.flags.value();
}
#ifdef SEC_AUDIO_SAMSUNGRECORD
void setFlags(::aidl::android::media::audio::common::AudioIoFlags flags) {
mMixPortConfig.flags.value() = flags;
}
#endif
bool getForceTransientBurst() const { return mDebugParameters.forceTransientBurst; }
bool getForceSynchronousDrain() const { return mDebugParameters.forceSynchronousDrain; }
size_t getFrameSize() const;
int getInternalCommandCookie() const { return mInternalCommandCookie; }
int32_t getMixPortHandle() const {
return mMixPortConfig.ext.get<::aidl::android::media::audio::common::AudioPortExt::mix>()
.handle;
}
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOutEventCallback>
getOutEventCallback() const {
return mOutEventCallback;
}
int getPortId() const { return mMixPortConfig.portId; }
ReplyMQ* getReplyMQ() const { return mReplyMQ.get(); }
int getTransientStateDelayMs() const { return mDebugParameters.transientStateDelayMs; }
int getSampleRate() const { return mSampleRate; }
bool isValid() const;
// 'reset' is called on a Binder thread when closing the stream. Does not use
// locking because it only cleans MQ pointers which were also set on the Binder thread.
void reset();
// 'advanceFrameCount' and 'getFrameCount' are only called on the worker thread.
long advanceFrameCount(size_t increase) { return mFrameCount += increase; }
long getFrameCount() const { return mFrameCount; }
const ::aidl::android::media::audio::common::AudioPortConfig& getMixPortConfig() const {
return mMixPortConfig;
}
int32_t getNominalLatencyMs() const { return mNominalLatency; }
std::weak_ptr<Telephony> getTelephony() { return mTelephony; }
private:
std::unique_ptr<CommandMQ> mCommandMQ;
int mInternalCommandCookie; // The value used to confirm that the command
// was posted internally
std::unique_ptr<ReplyMQ> mReplyMQ;
::aidl::android::media::audio::common::AudioFormatDescription mFormat;
::aidl::android::media::audio::common::AudioChannelLayout mChannelLayout;
int mSampleRate;
::aidl::android::media::audio::common::AudioPortConfig mMixPortConfig;
std::unique_ptr<DataMQ> mDataMQ;
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCallback> mAsyncCallback;
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOutEventCallback>
mOutEventCallback; // Only used by output streams
DebugParameters mDebugParameters;
long mFrameCount = 0;
int32_t mNominalLatency = 0;
std::weak_ptr<Telephony> mTelephony;
};
// This interface provides operations of the stream which are executed on the worker thread.
struct DriverInterface {
virtual ~DriverInterface() = default;
// All the methods below are called on the worker thread.
// This function is only called once.
virtual ::android::status_t init() = 0;
virtual ::android::status_t drain(
::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode mode) = 0;
virtual ::android::status_t flush() = 0;
virtual ::android::status_t pause() = 0;
virtual ::android::status_t standby() = 0;
virtual ::android::status_t start() = 0;
virtual ::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) = 0;
// No need to implement 'refinePosition' unless the driver can provide more precise
// data than just total frame count. For example, the driver may correctly account
// for any intermediate buffers.
virtual ::android::status_t refinePosition(
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* /*reply*/) {
return ::android::OK;
}
// This function is only called once.
// Implementers must provide implementation to shutdown the platform resources
virtual void shutdown() { return;}
};
class StreamWorkerCommonLogic : public ::android::hardware::audio::common::StreamLogic {
public:
bool isClosed() const {
return static_cast<int32_t>(mState.load()) == StreamContext::STATE_CLOSED;
}
void setClosed() {
mState = static_cast<::aidl::android::hardware::audio::core::StreamDescriptor::State>(
StreamContext::STATE_CLOSED);
}
void setIsConnected(bool connected) { mIsConnected = connected; }
/**
* IStreamCallback equivalents for StreamWorker
**/
virtual void publishTransferReady() {}
virtual void publishDrainReady() {}
virtual void publishError() {}
protected:
using DataBufferElement = int8_t;
StreamWorkerCommonLogic(StreamContext* context, DriverInterface* driver)
: mContext(context),
mDriver(driver),
mTransientStateDelayMs(context->getTransientStateDelayMs()) {}
pid_t getTid() const;
std::string init() override;
void populateReply(::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
bool isConnected) const;
void populateReplyWrongState(
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command) const;
void switchToTransientState(
::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
mState = state;
mTransientStateStart = std::chrono::steady_clock::now();
}
// The context is only used for reading, except for updating the frame count,
// which happens on the worker thread only.
StreamContext* const mContext;
DriverInterface* const mDriver;
// Atomic fields are used both by the main and worker threads.
std::atomic<bool> mIsConnected = false;
static_assert(std::atomic<::aidl::android::hardware::audio::core::StreamDescriptor::State>::
is_always_lock_free);
std::atomic<::aidl::android::hardware::audio::core::StreamDescriptor::State> mState =
::aidl::android::hardware::audio::core::StreamDescriptor::State::STANDBY;
std::atomic<::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode>
mRecentDrainMode =
::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode::DRAIN_UNSPECIFIED;
// All fields below are used on the worker thread only.
const std::chrono::duration<int, std::milli> mTransientStateDelayMs;
std::chrono::time_point<std::chrono::steady_clock> mTransientStateStart;
// We use an array and the "size" field instead of a vector to be able to
// detect memory allocation issues.
std::unique_ptr<DataBufferElement[]> mDataBuffer;
size_t mDataBufferSize;
/**
* only used in Asynchrous Stream(In|Out) context, to synchronize the
* callbacks from the hardware.
* Hardware sends callback any time irrespective of the Stream State.
* Hence the synchronization.
**/
std::mutex mAsyncMutex;
enum StreamCallbackType {
TR = 1, // TransferReady
DR = 2, // DrainReady
ER = 3, // Error
};
std::optional<StreamCallbackType> mPendingCallBack = std::nullopt;
std::condition_variable mPendingCV;
};
// This interface is used to decouple stream implementations from a concrete
// StreamWorker implementation.
struct StreamWorkerInterface {
using CreateInstance =
std::function<StreamWorkerInterface*(StreamContext* context, DriverInterface* driver)>;
virtual ~StreamWorkerInterface() = default;
virtual bool isClosed() const = 0;
virtual void setIsConnected(bool isConnected) = 0;
virtual void setClosed() = 0;
virtual bool start() = 0;
virtual pid_t getTid() = 0;
virtual void join() = 0;
virtual void publishTransferReady() = 0;
virtual void publishDrainReady() = 0;
virtual void publishError() = 0;
};
template <class WorkerLogic>
class StreamWorkerImpl : public StreamWorkerInterface,
public ::android::hardware::audio::common::StreamWorker<WorkerLogic> {
using WorkerImpl = ::android::hardware::audio::common::StreamWorker<WorkerLogic>;
public:
StreamWorkerImpl(StreamContext* context, DriverInterface* driver)
: WorkerImpl(context, driver) {}
bool isClosed() const override { return WorkerImpl::isClosed(); }
void setIsConnected(bool isConnected) override { WorkerImpl::setIsConnected(isConnected); }
void setClosed() override { WorkerImpl::setClosed(); }
bool start() override {
// This is an "audio service thread," must have elevated priority.
return WorkerImpl::start(WorkerImpl::kThreadName, ANDROID_PRIORITY_URGENT_AUDIO);
}
pid_t getTid() override { return WorkerImpl::getTid(); }
void join() override { return WorkerImpl::join(); }
void publishTransferReady() override { return WorkerImpl::publishTransferReady(); };
void publishDrainReady() override { return WorkerImpl::publishDrainReady(); }
void publishError() override { return WorkerImpl::publishError(); }
};
class StreamInWorkerLogic : public StreamWorkerCommonLogic {
public:
static const std::string kThreadName;
StreamInWorkerLogic(StreamContext* context, DriverInterface* driver)
: StreamWorkerCommonLogic(context, driver) {}
protected:
Status cycle() override;
private:
bool read(size_t clientSize,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply);
};
using StreamInWorker = StreamWorkerImpl<StreamInWorkerLogic>;
class StreamOutWorkerLogic : public StreamWorkerCommonLogic {
public:
static const std::string kThreadName;
StreamOutWorkerLogic(StreamContext* context, DriverInterface* driver)
: StreamWorkerCommonLogic(context, driver),
mEventCallback(context->getOutEventCallback()) {}
void publishTransferReady() override;
void publishDrainReady() override;
void publishError() override;
protected:
Status cycle() override;
private:
bool write(size_t clientSize,
::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply);
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamOutEventCallback> mEventCallback;
};
using StreamOutWorker = StreamWorkerImpl<StreamOutWorkerLogic>;
// This interface provides operations of the stream which are executed on a Binder pool thread.
// These methods originate both from the AIDL interface and its implementation.
struct StreamCommonInterface {
using ConnectedDevices = std::vector<::aidl::android::media::audio::common::AudioDevice>;
using Metadata =
std::variant<::aidl::android::hardware::audio::common::SinkMetadata /*IStreamIn*/,
::aidl::android::hardware::audio::common::SourceMetadata /*IStreamOut*/>;
static constexpr bool isInput(const Metadata& metadata) { return metadata.index() == 0; }
virtual ~StreamCommonInterface() = default;
// Methods below originate from the 'IStreamCommon' interface.
// This is semantically equivalent to inheriting from 'IStreamCommon' with a benefit
// that concrete stream implementations can inherit both from this interface and IStreamIn/Out.
virtual ndk::ScopedAStatus close() = 0;
virtual ndk::ScopedAStatus prepareToClose() = 0;
virtual ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) = 0;
virtual ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return) = 0;
virtual ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) = 0;
virtual ndk::ScopedAStatus addEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&
in_effect) = 0;
virtual ndk::ScopedAStatus removeEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&
in_effect) = 0;
// Methods below are common for both 'IStreamIn' and 'IStreamOut'. Note that
// 'updateMetadata' in them uses an individual structure which is wrapped here.
// The 'Common' suffix is added to distinguish them from the methods from 'IStreamIn/Out'.
virtual ndk::ScopedAStatus getStreamCommonCommon(
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>*
_aidl_return) = 0;
virtual ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) = 0;
virtual Metadata getMetadataCommon() = 0;
// Methods below are called by implementation of 'IModule', 'IStreamIn' and 'IStreamOut'.
virtual ndk::ScopedAStatus initInstance(
const std::shared_ptr<StreamCommonInterface>& delegate) = 0;
virtual const StreamContext& getContext() const = 0;
virtual bool isClosed() const = 0;
virtual const ConnectedDevices& getConnectedDevices() const = 0;
virtual ndk::ScopedAStatus setConnectedDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) = 0;
/**
* API to configure the connected devices based on the latest platform configuration
* Example: whenever there is a HAC enabled on the platform, we would want to reconfigure
* VOIP playback stream with HAC enabled Handset speaker.
*/
virtual ndk::ScopedAStatus reconfigureConnectedDevices() = 0;
virtual ndk::ScopedAStatus configureMMapStream(int32_t* fd, int64_t* burstSizeFrames,
int32_t* flags, int32_t* bufferSizeFrames) = 0;
virtual void setStreamMicMute(const bool muted) = 0;
#ifdef SEC_AUDIO_COMMON
virtual ndk::ScopedAStatus ForceSetDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
bool force = false) = 0;
virtual bool isStreamUsecase(Usecase tag) = 0;
virtual bool isDeviceAvailable(pal_device_id_t pal_device) = 0;
virtual bool HasPalStreamHandle() = 0;
virtual void forceShutdown() = 0;
#endif
#ifdef SEC_AUDIO_INTERPRETER_MODE
virtual void RerouteForInterpreter() = 0;
#endif
};
// This is equivalent to automatically generated 'IStreamCommonDelegator' but uses
// a weak pointer to avoid creating a reference loop. The loop will occur because
// 'IStreamIn/Out.getStreamCommon' must return the same instance every time, thus
// the stream implementation must hold a strong pointer to an instance of 'IStreamCommon'.
// Also, we use 'StreamCommonInterface' here instead of 'IStreamCommon'.
class StreamCommonDelegator : public ::aidl::android::hardware::audio::core::BnStreamCommon {
public:
explicit StreamCommonDelegator(const std::shared_ptr<StreamCommonInterface>& delegate)
: mDelegate(delegate) {}
private:
ndk::ScopedAStatus close() override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->close()
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus prepareToClose() override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->prepareToClose()
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->updateHwAvSyncId(in_hwAvSyncId)
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->getVendorParameters(in_ids, _aidl_return)
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->setVendorParameters(in_parameters, in_async)
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus addEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->addEffect(in_effect)
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
ndk::ScopedAStatus removeEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override {
auto delegate = mDelegate.lock();
return delegate != nullptr ? delegate->removeEffect(in_effect)
: ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
// It is possible that on the client side the proxy for IStreamCommon will
// outlive the IStream* instance, and the server side IStream* instance will
// get destroyed while this IStreamCommon instance is still alive.
std::weak_ptr<StreamCommonInterface> mDelegate;
};
// The implementation of DriverInterface must be provided by each concrete stream implementation.
// Note that StreamCommonImpl does not own the context. This is to support swapping on the fly
// implementations of the stream while keeping the same IStreamIn/Out instance. It's that instance
// who must be owner of the context.
class StreamCommonImpl : virtual public StreamCommonInterface, virtual public DriverInterface {
public:
StreamCommonImpl(StreamContext* context, const Metadata& metadata,
const StreamWorkerInterface::CreateInstance& createWorker)
: mContextRef(*context), mMetadata(metadata), mWorker(createWorker(context, this)) {}
StreamCommonImpl(StreamContext* context, const Metadata& metadata)
: StreamCommonImpl(context, metadata, isInput(metadata) ? getDefaultInWorkerCreator()
: getDefaultOutWorkerCreator()) {}
virtual ~StreamCommonImpl() override;
ndk::ScopedAStatus close() override;
ndk::ScopedAStatus prepareToClose() override;
ndk::ScopedAStatus updateHwAvSyncId(int32_t in_hwAvSyncId) override;
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
override;
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) override;
ndk::ScopedAStatus addEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus removeEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus getStreamCommonCommon(
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
override;
ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override;
Metadata getMetadataCommon() { return mMetadata; }
ndk::ScopedAStatus initInstance(
const std::shared_ptr<StreamCommonInterface>& delegate) override;
const StreamContext& getContext() const override { return mContextRef; }
bool isClosed() const override { return mWorker->isClosed(); }
const ConnectedDevices& getConnectedDevices() const override { return mConnectedDevices; }
ndk::ScopedAStatus setConnectedDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices)
override;
void setStreamMicMute(const bool muted) override;
ndk::ScopedAStatus configureMMapStream(int32_t* fd, int64_t* burstSizeFrames, int32_t* flags,
int32_t* bufferSizeFrames) override;
#ifdef SEC_AUDIO_COMMON
ndk::ScopedAStatus ForceSetDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
bool force = false) override;
bool isStreamUsecase(Usecase tag) override;
bool isDeviceAvailable(pal_device_id_t pal_device) override;
bool HasPalStreamHandle() override;
void forceShutdown() override;
bool isCustomKeyRouted(const int ck_id);
#endif
#ifdef SEC_AUDIO_INTERPRETER_MODE
void RerouteForInterpreter() override;
#endif
// start of Equivalent of IStreamCallbacks
void publishTransferReady() { mWorker->publishTransferReady(); }
void publishDrainReady() { mWorker->publishDrainReady(); }
void publishError() { mWorker->publishError(); }
// end of Equivalent of IStreamCallbacks
protected:
static StreamWorkerInterface::CreateInstance getDefaultInWorkerCreator() {
return [](StreamContext* ctx, DriverInterface* driver) -> StreamWorkerInterface* {
return new StreamInWorker(ctx, driver);
};
}
static StreamWorkerInterface::CreateInstance getDefaultOutWorkerCreator() {
return [](StreamContext* ctx, DriverInterface* driver) -> StreamWorkerInterface* {
return new StreamOutWorker(ctx, driver);
};
}
virtual void onClose() = 0;
void stopWorker();
const StreamContext& mContextRef;
Metadata mMetadata;
std::unique_ptr<StreamWorkerInterface> mWorker;
ChildInterface<StreamCommonDelegator> mCommon;
ConnectedDevices mConnectedDevices;
#ifdef SEC_AUDIO_COMMON
ConnectedDevices mPreviousDevices;
std::vector<pal_device> mPalDevices{};
#endif
};
// Note: 'StreamIn/Out' can not be used on their own. Instead, they must be used for defining
// concrete input/output stream implementations.
class StreamIn : virtual public StreamCommonInterface,
public ::aidl::android::hardware::audio::core::BnStreamIn {
public:
virtual ~StreamIn() override = default;
ndk::ScopedAStatus getMetadata(
::aidl::android::hardware::audio::common::SinkMetadata& out_sinkMetadata) {
out_sinkMetadata = std::get<::aidl::android::hardware::audio::common::SinkMetadata>(
getMetadataCommon());
return ndk::ScopedAStatus::ok();
}
virtual int32_t setAggregateSinkMetadata(bool) { return 0; }
#ifdef SEC_AUDIO_SAMSUNGRECORD
std::shared_ptr<AudioPreProcess> PreProcessInit();
#endif
protected:
void defaultOnClose();
ndk::ScopedAStatus getStreamCommon(
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
override {
return getStreamCommonCommon(_aidl_return);
}
ndk::ScopedAStatus updateMetadata(const ::aidl::android::hardware::audio::common::SinkMetadata&
in_sinkMetadata) override {
return updateMetadataCommon(in_sinkMetadata);
}
ndk::ScopedAStatus getActiveMicrophones(
std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return)
override;
ndk::ScopedAStatus getMicrophoneDirection(MicrophoneDirection* _aidl_return) override;
ndk::ScopedAStatus setMicrophoneDirection(MicrophoneDirection in_direction) override;
ndk::ScopedAStatus getMicrophoneFieldDimension(float* _aidl_return) override;
ndk::ScopedAStatus setMicrophoneFieldDimension(float in_zoom) override;
ndk::ScopedAStatus getHwGain(std::vector<float>* _aidl_return) override;
ndk::ScopedAStatus setHwGain(const std::vector<float>& in_channelGains) override;
friend class ndk::SharedRefBase;
StreamIn(StreamContext&& context,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones);
StreamContext mContext;
const std::map<::aidl::android::media::audio::common::AudioDevice, std::string> mMicrophones;
#ifdef SEC_AUDIO_SAMSUNGRECORD
std::shared_ptr<AudioPreProcess> preprocess_;
friend class AudioPreProcess;
#endif
};
class StreamOut : virtual public StreamCommonInterface,
public ::aidl::android::hardware::audio::core::BnStreamOut {
public:
virtual ~StreamOut() override = default;
ndk::ScopedAStatus getMetadata(
::aidl::android::hardware::audio::common::SourceMetadata& out_sourceMetadata) {
out_sourceMetadata = std::get<::aidl::android::hardware::audio::common::SourceMetadata>(
getMetadataCommon());
return ndk::ScopedAStatus::ok();
}
virtual int32_t setAggregateSourceMetadata(bool) { return 0; }
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
virtual int updateListenback(bool on) { return 0; }
#endif
protected:
void defaultOnClose();
ndk::ScopedAStatus getStreamCommon(
std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
override {
return getStreamCommonCommon(_aidl_return);
}
ndk::ScopedAStatus updateMetadata(
const ::aidl::android::hardware::audio::common::SourceMetadata& in_sourceMetadata)
override {
return updateMetadataCommon(in_sourceMetadata);
}
ndk::ScopedAStatus updateOffloadMetadata(
const ::aidl::android::hardware::audio::common::AudioOffloadMetadata&
in_offloadMetadata) override;
ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
ndk::ScopedAStatus getAudioDescriptionMixLevel(float* _aidl_return) override;
ndk::ScopedAStatus setAudioDescriptionMixLevel(float in_leveldB) override;
ndk::ScopedAStatus getDualMonoMode(
::aidl::android::media::audio::common::AudioDualMonoMode* _aidl_return) override;
ndk::ScopedAStatus setDualMonoMode(
::aidl::android::media::audio::common::AudioDualMonoMode in_mode) override;
ndk::ScopedAStatus getRecommendedLatencyModes(
std::vector<::aidl::android::media::audio::common::AudioLatencyMode>* _aidl_return)
override;
ndk::ScopedAStatus setLatencyMode(
::aidl::android::media::audio::common::AudioLatencyMode in_mode) override;
ndk::ScopedAStatus getPlaybackRateParameters(
::aidl::android::media::audio::common::AudioPlaybackRate* _aidl_return) override;
ndk::ScopedAStatus setPlaybackRateParameters(
const ::aidl::android::media::audio::common::AudioPlaybackRate& in_playbackRate)
override;
ndk::ScopedAStatus selectPresentation(int32_t in_presentationId, int32_t in_programId) override;
friend class ndk::SharedRefBase;
StreamOut(StreamContext&& context,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo);
StreamContext mContext;
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo> mOffloadInfo;
std::optional<::aidl::android::hardware::audio::common::AudioOffloadMetadata> mOffloadMetadata;
};
// The recommended way to create a stream instance.
// 'StreamImpl' is the concrete stream implementation, 'StreamInOrOut' is either 'StreamIn' or
// 'StreamOut', the rest are the arguments forwarded to the constructor of 'StreamImpl'.
template <class StreamImpl, class StreamInOrOut, class... Args>
ndk::ScopedAStatus createStreamInstance(std::shared_ptr<StreamInOrOut>* result, Args&&... args) {
std::shared_ptr<StreamInOrOut> stream =
::ndk::SharedRefBase::make<StreamImpl>(std::forward<Args>(args)...);
RETURN_STATUS_IF_ERROR(stream->initInstance(stream));
*result = std::move(stream);
return ndk::ScopedAStatus::ok();
}
class StreamWrapper {
public:
explicit StreamWrapper(const std::shared_ptr<StreamIn>& streamIn)
: mStream(streamIn), mStreamBinder(streamIn->asBinder()) {}
explicit StreamWrapper(const std::shared_ptr<StreamOut>& streamOut)
: mStream(streamOut), mStreamBinder(streamOut->asBinder()) {}
ndk::SpAIBinder getBinder() const { return mStreamBinder; }
bool isStreamOpen() const {
auto s = mStream.lock();
return s && !s->isClosed();
}
ndk::ScopedAStatus setConnectedDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
auto s = mStream.lock();
if (s) return s->setConnectedDevices(devices);
return ndk::ScopedAStatus::ok();
}
void setStreamMicMute(const bool muted) {
auto s = mStream.lock();
if (s) return s->setStreamMicMute(muted);
return;
}
ndk::ScopedAStatus configureMMapStream(int32_t* fd, int64_t* burstSizeFrames, int32_t* flags,
int32_t* bufferSizeFrames) {
auto s = mStream.lock();
if (s) return s->configureMMapStream(fd, burstSizeFrames, flags, bufferSizeFrames);
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
#ifdef SEC_AUDIO_COMMON
ndk::ScopedAStatus ForceSetDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
bool force = false) {
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
}
bool isStreamUsecase(Usecase tag) { return false; };
bool isDeviceAvailable(pal_device_id_t pal_device) { return false; };
bool HasPalStreamHandle() { return false; };
void forceShutdown() { return; };
#endif
private:
std::weak_ptr<StreamCommonInterface> mStream;
ndk::SpAIBinder mStreamBinder;
};
class Streams {
public:
Streams() = default;
Streams(const Streams&) = delete;
Streams& operator=(const Streams&) = delete;
size_t count(int32_t id) {
// Streams do not remove themselves from the collection on close.
erase_if(mStreams, [](const auto& pair) { return !pair.second.isStreamOpen(); });
return mStreams.count(id);
}
void insert(int32_t portId, int32_t portConfigId, StreamWrapper sw) {
mStreams.insert(std::pair{portConfigId, sw});
mStreams.insert(std::pair{portId, std::move(sw)});
}
ndk::ScopedAStatus setStreamConnectedDevices(
int32_t portConfigId,
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
if (auto it = mStreams.find(portConfigId); it != mStreams.end()) {
return it->second.setConnectedDevices(devices);
}
return ndk::ScopedAStatus::ok();
}
void setStreamMicMute(int32_t portConfigId, const bool muted) {
if (auto it = mStreams.find(portConfigId); it != mStreams.end()) {
return it->second.setStreamMicMute(muted);
}
return;
}
std::string toString() const {
std::ostringstream os;
os << std::endl << " --- mStreams ---" << std::endl;
std::for_each(mStreams.cbegin(), mStreams.cend(), [&](const auto& pair) {
os << "PortConfigId/PortId:" << pair.first << std::endl;
});
os << std::endl << " --- mStreams end ---" << std::endl << std::endl;
return os.str();
}
private:
// Maps port ids and port config ids to streams. Multimap because a port
// (not port config) can have multiple streams opened on it.
std::multimap<int32_t, StreamWrapper> mStreams;
};
} // namespace qti::audio::core

View File

@@ -1,155 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <qti-audio-core/AudioUsecase.h>
#include <qti-audio-core/Stream.h>
#include <system/audio_effects/effect_uuid.h>
#ifdef SEC_AUDIO_COMMON
#include "SecFTM.h"
#endif
#ifdef SEC_AUDIO_DYNAMIC_NREC
#include "AudioEffect.h"
#endif
namespace qti::audio::core {
class StreamInPrimary : public StreamIn, public StreamCommonImpl {
public:
friend class ndk::SharedRefBase;
StreamInPrimary(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones);
virtual ~StreamInPrimary() override;
int32_t setAggregateSinkMetadata(bool voiceActive) override;
// Methods of 'DriverInterface'.
::android::status_t init() override;
::android::status_t drain(
::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode) override;
::android::status_t flush() override;
::android::status_t pause() override;
::android::status_t standby() override;
::android::status_t start() override;
::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) override;
::android::status_t refinePosition(
::aidl::android::hardware::audio::core::StreamDescriptor::Reply*
/*reply*/) override;
void shutdown() override;
// methods of StreamCommonInterface
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
override;
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) override;
ndk::ScopedAStatus addEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus removeEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override;
ndk::ScopedAStatus getActiveMicrophones(
std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>* _aidl_return)
override;
// Methods called IModule
ndk::ScopedAStatus setConnectedDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices)
override;
ndk::ScopedAStatus reconfigureConnectedDevices() override;
void setStreamMicMute(const bool muted) override;
ndk::ScopedAStatus configureMMapStream(int32_t* fd, int64_t* burstSizeFrames, int32_t* flags,
int32_t* bufferSizeFrames) override;
#ifdef SEC_AUDIO_COMMON
ndk::ScopedAStatus ForceSetDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
bool force = false) override;
bool isStreamUsecase(Usecase tag) override { return (mTag == tag); }
bool isDeviceAvailable(pal_device_id_t pal_device) override;
bool HasPalStreamHandle() override { return (mPalHandle != nullptr) ? true : false; }
void forceShutdown() override { return shutdown(); }
#endif
#ifdef SEC_AUDIO_INTERPRETER_MODE
void RerouteForInterpreter() override;
#endif
#ifdef SEC_AUDIO_CAMCORDER
bool isTxDataInvertable();
#endif
void onClose() override { defaultOnClose(); }
static std::mutex sinkMetadata_mutex_;
void checkHearingAidRoutingForVoice(const Metadata& metadata, bool voiceActive);
protected:
/*
* This API opens, configures and starts pal stream.
* also responsible for validity of pal handle.
*/
void configure();
void resume();
void shutdown_I();
/* burst zero indicates that burst command with zero bytes issued from framework */
::android::status_t burstZero();
::android::status_t startMMAP();
::android::status_t stopMMAP();
size_t getPlatformDelay() const noexcept;
// API which are *_I are internal
ndk::ScopedAStatus configureConnectedDevices_I();
#ifdef SEC_AUDIO_BLE_OFFLOAD
void updateRecordMetadataForBLE();
#endif
const Usecase mTag;
const std::string mTagName;
const size_t mFrameSizeBytes;
// All the public must check the validity of this resource, if using
pal_stream_handle_t* mPalHandle{nullptr};
std::variant<std::monostate, PcmRecord, CompressCapture, VoipRecord, MMapRecord,
VoiceCallRecord, FastRecord, UltraFastRecord, HotwordRecord> mExt;
// references
Platform& mPlatform{Platform::getInstance()};
const ::aidl::android::media::audio::common::AudioPortConfig& mMixPortConfig;
#ifdef SEC_AUDIO_SAMSUNGRECORD
unsigned int mDeviceChannels;
audio_format_t mDeviceFormat;
#endif
#ifdef SEC_AUDIO_COMMON
SecFTM& mSecFTM{SecFTM::getInstance()};
#endif
#ifdef SEC_AUDIO_DYNAMIC_NREC
AudioEffect& mAudioEffect{AudioEffect::getInstance()};
#endif
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
AudioExtension& mAudExt{AudioExtension::getInstance()};
#endif
private:
::android::status_t onReadError(const size_t sleepFrameCount);
struct BufferConfig getBufferConfig();
void applyEffects();
bool mAECEnabled = false;
bool mNSEnabled = false;
bool mEffectsApplied = true;
std::string mLogPrefix = "";
};
} // namespace qti::audio::core

View File

@@ -1,223 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <qti-audio-core/AudioUsecase.h>
#include <qti-audio-core/HalOffloadEffects.h>
#include <qti-audio-core/Stream.h>
#include <qti-audio-core/PlatformStreamCallback.h>
#ifdef SEC_AUDIO_COMMON
#include "SecFTM.h"
#endif
namespace qti::audio::core {
class StreamOutPrimary : public StreamOut, public StreamCommonImpl, public PlatformStreamCallback {
public:
friend class ndk::SharedRefBase;
StreamOutPrimary(StreamContext&& context,
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo);
virtual ~StreamOutPrimary() override;
int32_t setAggregateSourceMetadata(bool voiceActive) override;
// Methods of 'DriverInterface'.
::android::status_t init() override;
::android::status_t drain(
::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode) override;
::android::status_t flush() override;
::android::status_t pause() override;
::android::status_t standby() override;
::android::status_t start() override;
::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) override;
::android::status_t refinePosition(
::aidl::android::hardware::audio::core::StreamDescriptor::Reply*
/*reply*/) override;
void shutdown() override;
// methods of StreamCommonInterface
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return)
override;
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async) override;
ndk::ScopedAStatus addEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus removeEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_effect)
override;
ndk::ScopedAStatus updateMetadataCommon(const Metadata& metadata) override;
// Methods of IStreamOut
ndk::ScopedAStatus updateOffloadMetadata(
const ::aidl::android::hardware::audio::common::AudioOffloadMetadata&
in_offloadMetadata) override;
ndk::ScopedAStatus getHwVolume(std::vector<float>* _aidl_return) override;
ndk::ScopedAStatus setHwVolume(const std::vector<float>& in_channelVolumes) override;
ndk::ScopedAStatus getPlaybackRateParameters(
::aidl::android::media::audio::common::AudioPlaybackRate* _aidl_return) override;
ndk::ScopedAStatus setPlaybackRateParameters(
const ::aidl::android::media::audio::common::AudioPlaybackRate& in_playbackRate)
override;
// Methods called IModule
ndk::ScopedAStatus setConnectedDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices)
override;
ndk::ScopedAStatus reconfigureConnectedDevices() override;
#ifdef SEC_AUDIO_COMMON
ndk::ScopedAStatus ForceSetDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
bool force = false) override;
bool isStreamUsecase(Usecase tag) override { return (mTag == tag); }
bool isDeviceAvailable(pal_device_id_t pal_device) override;
bool HasPalStreamHandle() override { return (mPalHandle != nullptr) ? true : false; }
bool setSecVolume(const std::vector<float>& volumes);
void forceShutdown() override { return shutdown(); }
#endif
#ifdef SEC_AUDIO_HDMI // { SUPPORT_VOIP_VIA_SMART_MONITOR
void RerouteForVoipSmartMonitor(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& previousDevices = {});
#endif // } SUPPORT_VOIP_VIA_SMART_MONITOR
#ifdef SEC_AUDIO_CALL_VOIP
void RerouteForVoipHeadphone();
#endif
#ifdef SEC_AUDIO_FMRADIO
void RouteFMRadioStream();
#endif
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
void checkAndSwitchListenbackMode(bool on);
int updateListenback(bool on);
#endif
ndk::ScopedAStatus configureMMapStream(int32_t* fd, int64_t* burstSizeFrames, int32_t* flags,
int32_t* bufferSizeFrames) override;
void onClose() override { defaultOnClose(); }
ndk::ScopedAStatus setLatencyMode(
::aidl::android::media::audio::common::AudioLatencyMode in_mode) override;
ndk::ScopedAStatus getRecommendedLatencyModes(
std::vector<::aidl::android::media::audio::common::AudioLatencyMode>* _aidl_return) override;
bool isStreamOutPrimary() { return (mTag == Usecase::PRIMARY_PLAYBACK) ? true : false; }
static std::mutex sourceMetadata_mutex_;
// Methods from PlatformStreamCallback
void onTransferReady() override;
void onDrainReady() override;
void onError() override;
protected:
/*
* opens, configures and starts pal stream, also validates the pal handle.
*/
void configure();
void resume();
void shutdown_I();
/* burst zero indicates that burst command with zero bytes issued from framework */
::android::status_t burstZero();
::android::status_t startMMAP();
::android::status_t stopMMAP();
size_t getPlatformDelay() const noexcept;
::android::status_t onWriteError(const size_t sleepFrameCount);
// This API calls startEffect/stopEffect only on offload/pcm offload outputs.
void enableOffloadEffects(const bool enable);
// API which are *_I are internal
ndk::ScopedAStatus configureConnectedDevices_I();
const Usecase mTag;
const std::string mTagName;
const size_t mFrameSizeBytes;
bool mIsPaused{false};
std::vector<float> mVolumes{};
bool mHwVolumeSupported = false;
bool mHwFlushSupported = false;
bool mHwPauseSupported = false;
// check validaty of mPalHandle before use
pal_stream_handle_t* mPalHandle{nullptr};
pal_stream_handle_t* mHapticsPalHandle{nullptr};
static constexpr ::aidl::android::media::audio::common::AudioPlaybackRate sDefaultPlaybackRate =
{.speed = 1.0f,
.pitch = 1.0f,
.fallbackMode = ::aidl::android::media::audio::common::AudioPlaybackRate::
TimestretchFallbackMode::FAIL};
::aidl::android::media::audio::common::AudioPlaybackRate mPlaybackRate;
//Haptics Usecase
int mHapticsChannelCount = 1;
std::unique_ptr<uint8_t[]> mHapticsBuffer{nullptr};
size_t mHapticsBufSize{0};
::android::status_t convertBufferAndWrite(const void* buffer, size_t frameCount);
// This API splits and writes audio and haptics streams
::android::status_t hapticsWrite(const void *buffer, size_t frameCount);
#ifdef SEC_AUDIO_CALL_VOIP
uint32_t mVoIPInSamplerate{0};
#endif
std::variant<std::monostate, PrimaryPlayback, DeepBufferPlayback, CompressPlayback,
PcmOffloadPlayback, VoipPlayback, SpatialPlayback, MMapPlayback, UllPlayback,
InCallMusic, HapticsPlayback>
mExt;
// references
Platform& mPlatform{Platform::getInstance()};
const ::aidl::android::media::audio::common::AudioPortConfig& mMixPortConfig;
HalOffloadEffects& mHalEffects{HalOffloadEffects::getInstance()};
AudioExtension& mAudExt{AudioExtension::getInstance()};
#ifdef SEC_AUDIO_COMMON
SecFTM& mSecFTM{SecFTM::getInstance()};
AudioEffect& mAudioEffect{AudioEffect::getInstance()};
#endif
private:
std::string mLogPrefix = "";
bool isHwVolumeSupported();
bool isHwFlushSupported();
bool isHwPauseSupported();
struct BufferConfig getBufferConfig();
// optional buffer format converter, if stream input and output formats are different
std::optional<std::unique_ptr<BufferFormatConverter>> mBufferFormatConverter;
#if defined(SEC_AUDIO_DUAL_SPEAKER) || defined(SEC_AUDIO_MULTI_SPEAKER)
bool isSoundBoosterRotationSupported();
#endif
#if defined(SEC_AUDIO_SUPPORT_SOUNDBOOSTER_ON_DSP) || defined(SEC_AUDIO_PREVOLUME_SOUNDBOOSTER)
uint32_t getSoundBoosterVolumeMode();
#endif
#ifdef SEC_AUDIO_PREVOLUME_SOUNDBOOSTER
void sendPrevolumeSoundbooster();
#endif
#ifdef SEC_AUDIO_DUAL_SPEAKER
bool isCallMode();
bool isDualSpeakerRouting();
void SetUpperAmpControl();
#endif
#ifdef SEC_AUDIO_SUPPORT_UHQ
int32_t getSampleRate();
void updateUhqConfig(int format, bool wideRes);
std::optional<std::pair<::aidl::android::media::audio::common::PcmType, pal_uhqa_state>> mUhqConfig = std::nullopt;
bool mUpdateUhqAfterRoute = false;
#endif
};
} // namespace qti::audio::core

View File

@@ -1,83 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <qti-audio-core/Stream.h>
namespace qti::audio::core {
class StreamStub : public StreamCommonImpl {
public:
StreamStub(StreamContext* context, const Metadata& metadata);
// Methods of 'DriverInterface'.
::android::status_t init() override;
::android::status_t drain(
::aidl::android::hardware::audio::core::StreamDescriptor::DrainMode) override;
::android::status_t flush() override;
::android::status_t pause() override;
::android::status_t standby() override;
::android::status_t start() override;
::android::status_t transfer(void* buffer, size_t frameCount, size_t* actualFrameCount,
int32_t* latencyMs) override;
void shutdown() override;
private:
const size_t mFrameSizeBytes;
const int mSampleRate;
const bool mIsAsynchronous;
const bool mIsInput;
bool mIsInitialized = false; // Used for validating the state machine logic.
bool mIsStandby = true; // Used for validating the state machine logic.
};
class StreamInStub final : public StreamIn, public StreamStub {
public:
friend class ndk::SharedRefBase;
StreamInStub(
StreamContext&& context,
const ::aidl::android::hardware::audio::common::SinkMetadata& sinkMetadata,
const std::vector<::aidl::android::media::audio::common::MicrophoneInfo>& microphones);
~StreamInStub() override;
int32_t setAggregateSinkMetadata(bool) override;
ndk::ScopedAStatus reconfigureConnectedDevices() override;
private:
void onClose() override { defaultOnClose(); }
};
class StreamOutStub final : public StreamOut, public StreamStub {
public:
friend class ndk::SharedRefBase;
StreamOutStub(StreamContext&& context,
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetadata,
const std::optional<::aidl::android::media::audio::common::AudioOffloadInfo>&
offloadInfo);
~StreamOutStub() override;
int32_t setAggregateSourceMetadata(bool) override;
ndk::ScopedAStatus reconfigureConnectedDevices() override;
private:
void onClose() override { defaultOnClose(); }
};
} // namespace qti::audio::core

View File

@@ -1,263 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <aidl/android/hardware/audio/core/BnTelephony.h>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <extensions/AudioExtension.h>
#include <qti-audio-core/Platform.h>
#include <qti-audio-core/Stream.h>
#include <android/binder_enums.h>
#ifdef SEC_AUDIO_COMMON
#include "SecFTM.h"
#include "AudioEffect.h"
#endif
namespace qti::audio::core {
class Telephony : public ::aidl::android::hardware::audio::core::BnTelephony {
public:
Telephony();
virtual ~Telephony() override;
enum class CallState : uint8_t {
IN_ACTIVE = 1,
ACTIVE = 2,
#ifdef SEC_AUDIO_CALL_SATELLITE
EXTMODEM_ACTIVE = 3,
#endif
};
friend std::ostream& operator<<(std::ostream& os, const CallState& state);
enum class VSID : int64_t {
VSID_1 = 0x11C05000,
VSID_2 = 0x11DC5000,
#ifdef SEC_AUDIO_COMMON
VSID_LB_1 = 0x12006000,
VSID_LB_2 = 0x121C6000,
#endif
};
friend std::ostream& operator<<(std::ostream& os, const VSID& vsid);
using CallType = std::string;
struct SetUpdates {
/*
call state is key the set update which decides validity or need of
other parameters.
*/
CallState mCallState{CallState::IN_ACTIVE};
CallType mCallType{""};
bool mIsCrsCall{false};
VSID mVSID{VSID::VSID_1};
std::string toString() const {
std::ostringstream os;
os << "{ mCallState:" << mCallState << ", mVSID:" << mVSID
<< ", mIsCrsCall:" << mIsCrsCall << ", mCallType:" << mCallType << "}";
return os.str();
}
};
struct CallStatus {
CallState current_;
CallState new_;
};
float mCRSVolume = 0.4f; //default CRS call volume
bool mIsCRSStarted{false};
VSID mCRSVSID{VSID::VSID_1};
constexpr static size_t KCodecBackendDefaultBitWidth = 16;
const static ::aidl::android::media::audio::common::AudioDevice kDefaultRxDevice;
const static ::aidl::android::media::audio::common::AudioDevice kDefaultCRSRxDevice;
static constexpr int32_t VSID1_VOICE_SESSION = 0;
static constexpr int32_t VSID2_VOICE_SESSION = 1;
static constexpr int32_t MAX_VOICE_SESSIONS = 2;
static constexpr int32_t MIN_CRS_VOL_INDEX = 0;
static constexpr int32_t MAX_CRS_VOL_INDEX = 7;
struct SetUpdateSession {
CallStatus state;
SetUpdates CallUpdate;
};
struct VoiceSession {
SetUpdateSession session[MAX_VOICE_SESSIONS];
};
VoiceSession mVoiceSession;
#ifdef SEC_AUDIO_CALL
VSID mCurVSID{VSID::VSID_1};
bool mIsVolteVT{false};
bool mIsVoWiFi{false};
int mCallBand = WB;
#endif
/* All the public APIs are guarded by mLock, Hence never call a public
* API from anther public API */
public:
ndk::ScopedAStatus getSupportedAudioModes(
std::vector<::aidl::android::media::audio::common::AudioMode>* _aidl_return) override;
ndk::ScopedAStatus switchAudioMode(
::aidl::android::media::audio::common::AudioMode in_mode) override;
ndk::ScopedAStatus setTelecomConfig(const TelecomConfig& in_config,
TelecomConfig* _aidl_return) override;
/* This API is called when there are "TELEPHONY related set parameters"
on the primary module */
void reconfigure(const SetUpdates& setUpdates);
void updateVolumeBoost(const bool enable);
void updateSlowTalk(const bool enable);
void updateHDVoice(const bool enable);
void updateDeviceMute(const bool isMute, const std::string& muteDirection);
#ifdef SEC_AUDIO_CALL
void updateSecCallState(const int state);
void updateSecVSID(const int vsid);
void updateSecCallBand(const int band);
void configureMicMode();
void setCallForwarding(bool enable);
#endif
#ifdef SEC_AUDIO_CALL_SATELLITE
void configureExtModemCall();
#endif
bool isCrsCallSupported();
void setCRSVolumeFromIndex(const int index);
void updateVoiceVolume();
#ifdef SEC_AUDIO_CALL_SATELLITE
void updateExtModemCallVolume();
void updateExtModemMicMute();
#endif
void setMicMute(const bool muted);
void updateCalls();
// The following below API are both aimed to solve routing on telephony
/**
* brief sets Rx and Tx devices from device to device patch.
* @param devices devices obtained from the patch
* @param updateRx whether device update is for rx devices or tx devices.
* true in case when rx devices needs updation, false otherwise.
*/
void setDevices(const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
const bool updateRx);
/**
* The following API resets the RX and TX device
* @param resetRx, indicates device to reset, true for RX, false for TX
**/
void resetDevices(const bool resetRx);
#ifdef SEC_AUDIO_COMMON
void updateLoopback(const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
const bool loopbackon);
#endif
// Telephony to decide its strategy where there is external device connection change
void onExternalDeviceConnectionChanged(
const ::aidl::android::media::audio::common::AudioDevice& extDevice,
const bool& connect);
/* Telephony to act on primary stream devices change */
void onOutputPrimaryStreamDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>&);
/* Telephony to act upon bluetooth sco enabled or disabled */
void onBluetoothScoEvent(const bool& enable);
/* set the voip stream */
void setVoipPlaybackStream(std::weak_ptr<StreamCommonInterface> voipStream);
/* called on playback stream start/close */
void onPlaybackStart();
void onPlaybackClose();
void updateVoiceMetadataForBT(bool call_active);
std::weak_ptr<StreamOut> mStreamOutPrimary;
std::weak_ptr<StreamIn> mStreamInPrimary;
protected:
ndk::ScopedAStatus startCall();
ndk::ScopedAStatus stopCall();
void VoiceStop();
void configureVolumeBoost();
void configureSlowTalk();
void configureHDVoice();
void configureDeviceMute();
void updateDevices();
void updateTtyMode();
void updateCrsDevice();
void startCrsLoopback();
void stopCrsLoopback();
void triggerHACinVoipPlayback();
::aidl::android::media::audio::common::AudioDevice getMatchingTxDevice(
const ::aidl::android::media::audio::common::AudioDevice & rxDevice);
bool isAnyCallActive();
#ifdef SEC_AUDIO_RECOVERY
void initSecConfig();
#endif
protected:
// Gaurd all the public APIs
std::mutex mLock;
TelecomConfig mTelecomConfig;
const std::vector<::aidl::android::media::audio::common::AudioMode> mSupportedAudioModes = {
::aidl::android::media::audio::common::AudioMode::NORMAL,
::aidl::android::media::audio::common::AudioMode::RINGTONE,
::aidl::android::media::audio::common::AudioMode::IN_CALL,
::aidl::android::media::audio::common::AudioMode::IN_COMMUNICATION,
::aidl::android::media::audio::common::AudioMode::CALL_SCREEN,
};
::aidl::android::media::audio::common::AudioMode mAudioMode{
::aidl::android::media::audio::common::AudioMode::NORMAL};
SetUpdates mSetUpdates{};
bool mIsVolumeBoostEnabled{false};
bool mIsSlowTalkEnabled{false};
bool mIsHDVoiceEnabled{false};
bool mIsDeviceMuted{false};
bool hasValidPlaybackStream{false};
bool mIsVoiceStarted{false};
std::string mMuteDirection{""};
using TtyMap = std::map<TelecomConfig::TtyMode, pal_tty_t>;
const TtyMap mTtyMap{
{TelecomConfig::TtyMode::OFF, PAL_TTY_OFF},
{TelecomConfig::TtyMode::FULL, PAL_TTY_FULL},
{TelecomConfig::TtyMode::HCO, PAL_TTY_HCO},
{TelecomConfig::TtyMode::VCO, PAL_TTY_VCO},
};
::aidl::android::media::audio::common::AudioDevice mRxDevice; // speaker, earpiece
::aidl::android::media::audio::common::AudioDevice mTxDevice; // mic, speaker mic
pal_stream_handle_t* mPalCrsHandle{nullptr};
pal_stream_handle_t* mPalHandle{nullptr};
// Stream Handle for VOIP Playback
std::weak_ptr<StreamCommonInterface> mVoipStreamWptr;
Platform& mPlatform{Platform::getInstance()};
#ifdef SEC_AUDIO_COMMON
SecFTM& mSecFTM{SecFTM::getInstance()};
AudioEffect& mAudioEffect{AudioEffect::getInstance()};
AudioExtension& mAudExt{AudioExtension::getInstance()};
#endif
};
} // namespace qti::audio::core

View File

@@ -1,26 +0,0 @@
<!--
Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
SPDX-License-Identifier: BSD-3-Clause-Clear
-->
<manifest version="1.0" type="device">
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IModule/default</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IModule/r_submix</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IModule/usb</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IConfig/default</fqname>
</hal>
</manifest>

View File

@@ -1,36 +0,0 @@
cc_library_static {
name: "libaudio_module_config.qti",
vendor: true,
export_include_dirs: ["include"],
srcs: [
"ModuleConfig.cpp",
],
shared_libs: [
"libaudioaidlcommon",
"libbase",
"libcutils",
"liblog",
"libstagefright_foundation",
"libutils",
"libxml2",
"android.media.audio.common.types-V3-ndk",
"android.hardware.audio.core-V2-ndk",
],
header_libs: [
"libxsdc-utils",
"libaudio_system_headers",
],
generated_headers: [
"audio_module_config_qti",
],
generated_sources: [
"audio_module_config_qti",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
],
}

View File

@@ -1,757 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <Utils.h>
#include <aidl/android/media/audio/common/AudioChannelLayout.h>
#include <aidl/android/media/audio/common/AudioDeviceType.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <aidl/android/media/audio/common/AudioFormatType.h>
#include <aidl/android/media/audio/common/AudioIoFlags.h>
#include <aidl/android/media/audio/common/AudioOutputFlags.h>
#include <aidl/android/media/audio/common/PcmType.h>
#include <android-base/logging.h>
#include <audio_module_config_qti.h>
#include <audio_module_config_qti_enums.h>
#include <libxml/parser.h>
#include <libxml/xinclude.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <map>
#include <memory>
#include <unordered_map>
#include <vector>
#include <qti-audio-core/ModuleConfig.h>
// { SEC_AUDIO_COMMON
#include <system/audio.h>
// } SEC_AUDIO_COMMON
using ::aidl::android::hardware::audio::common::makeBitPositionFlagMask;
using ::aidl::android::hardware::audio::core::AudioPatch;
using ::aidl::android::hardware::audio::core::AudioRoute;
using ::aidl::android::media::audio::common::AudioChannelLayout;
using ::aidl::android::media::audio::common::AudioDeviceAddress;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::aidl::android::media::audio::common::AudioDeviceType;
using ::aidl::android::media::audio::common::AudioEncapsulationType;
using ::aidl::android::media::audio::common::AudioFormatDescription;
using ::aidl::android::media::audio::common::AudioFormatType;
using ::aidl::android::media::audio::common::AudioGain;
using ::aidl::android::media::audio::common::AudioGainConfig;
using ::aidl::android::media::audio::common::AudioIoFlags;
using ::aidl::android::media::audio::common::AudioOutputFlags;
using ::aidl::android::media::audio::common::AudioPort;
using ::aidl::android::media::audio::common::AudioPortConfig;
using ::aidl::android::media::audio::common::AudioPortDeviceExt;
using ::aidl::android::media::audio::common::AudioPortExt;
using ::aidl::android::media::audio::common::AudioPortMixExt;
using ::aidl::android::media::audio::common::AudioProfile;
using ::aidl::android::media::audio::common::Int;
using ::aidl::android::media::audio::common::MicrophoneInfo;
using ::aidl::android::media::audio::common::PcmType;
namespace xsd = ::audio_module_config_qti;
namespace qti::audio::core {
const static char kRouteDelimiter = ',';
const static std::string kDefaultOutputDevice = "speaker";
const static std::unordered_map<xsd::AudioPcmType, PcmType> XsdToPcmType = {
{xsd::AudioPcmType::DEFAULT, PcmType::DEFAULT},
{xsd::AudioPcmType::UINT_8_BIT, PcmType::UINT_8_BIT},
{xsd::AudioPcmType::INT_16_BIT, PcmType::INT_16_BIT},
{xsd::AudioPcmType::INT_32_BIT, PcmType::INT_32_BIT},
{xsd::AudioPcmType::FIXED_Q_8_24, PcmType::FIXED_Q_8_24},
{xsd::AudioPcmType::FLOAT_32_BIT, PcmType::FLOAT_32_BIT},
{xsd::AudioPcmType::INT_24_BIT, PcmType::INT_24_BIT},
};
const static std::unordered_map<xsd::AudioDeviceType, AudioDeviceType> XsdToAudioDeviceType = {
{xsd::AudioDeviceType::NONE, AudioDeviceType::NONE},
{xsd::AudioDeviceType::IN_DEFAULT, AudioDeviceType::IN_DEFAULT},
{xsd::AudioDeviceType::IN_ACCESSORY, AudioDeviceType::IN_ACCESSORY},
{xsd::AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::IN_AFE_PROXY},
{xsd::AudioDeviceType::IN_DEVICE, AudioDeviceType::IN_DEVICE},
{xsd::AudioDeviceType::IN_ECHO_REFERENCE, AudioDeviceType::IN_ECHO_REFERENCE},
{xsd::AudioDeviceType::IN_FM_TUNER, AudioDeviceType::IN_FM_TUNER},
{xsd::AudioDeviceType::IN_HEADSET, AudioDeviceType::IN_HEADSET},
{xsd::AudioDeviceType::IN_LOOPBACK, AudioDeviceType::IN_LOOPBACK},
{xsd::AudioDeviceType::IN_MICROPHONE, AudioDeviceType::IN_MICROPHONE},
{xsd::AudioDeviceType::IN_MICROPHONE_BACK, AudioDeviceType::IN_MICROPHONE_BACK},
{xsd::AudioDeviceType::IN_SUBMIX, AudioDeviceType::IN_SUBMIX},
{xsd::AudioDeviceType::IN_TELEPHONY_RX, AudioDeviceType::IN_TELEPHONY_RX},
{xsd::AudioDeviceType::IN_TV_TUNER, AudioDeviceType::IN_TV_TUNER},
{xsd::AudioDeviceType::IN_DOCK, AudioDeviceType::IN_DOCK},
#ifdef SEC_AUDIO_COMMON
{xsd::AudioDeviceType::IN_MICROPHONE_MULTI, AudioDeviceType::IN_MICROPHONE_MULTI},
#endif
{xsd::AudioDeviceType::OUT_DEFAULT, AudioDeviceType::OUT_DEFAULT},
{xsd::AudioDeviceType::OUT_ACCESSORY, AudioDeviceType::OUT_ACCESSORY},
{xsd::AudioDeviceType::OUT_AFE_PROXY, AudioDeviceType::OUT_AFE_PROXY},
{xsd::AudioDeviceType::OUT_CARKIT, AudioDeviceType::OUT_CARKIT},
{xsd::AudioDeviceType::OUT_DEVICE, AudioDeviceType::OUT_DEVICE},
{xsd::AudioDeviceType::OUT_ECHO_CANCELLER, AudioDeviceType::OUT_ECHO_CANCELLER},
{xsd::AudioDeviceType::OUT_FM, AudioDeviceType::OUT_FM},
{xsd::AudioDeviceType::OUT_HEADPHONE, AudioDeviceType::OUT_HEADPHONE},
{xsd::AudioDeviceType::OUT_HEADSET, AudioDeviceType::OUT_HEADSET},
{xsd::AudioDeviceType::OUT_HEARING_AID, AudioDeviceType::OUT_HEARING_AID},
{xsd::AudioDeviceType::OUT_LINE_AUX, AudioDeviceType::OUT_LINE_AUX},
{xsd::AudioDeviceType::OUT_SPEAKER, AudioDeviceType::OUT_SPEAKER},
{xsd::AudioDeviceType::OUT_SPEAKER_EARPIECE, AudioDeviceType::OUT_SPEAKER_EARPIECE},
{xsd::AudioDeviceType::OUT_SPEAKER_SAFE, AudioDeviceType::OUT_SPEAKER_SAFE},
{xsd::AudioDeviceType::OUT_SUBMIX, AudioDeviceType::OUT_SUBMIX},
{xsd::AudioDeviceType::OUT_TELEPHONY_TX, AudioDeviceType::OUT_TELEPHONY_TX},
{xsd::AudioDeviceType::OUT_DOCK, AudioDeviceType::OUT_DOCK},
{xsd::AudioDeviceType::OUT_BROADCAST, AudioDeviceType::OUT_BROADCAST},
};
const static std::unordered_map<xsd::AudioChannelLayout, int32_t> XsdToAudioChannelLayout = {
{xsd::AudioChannelLayout::LAYOUT_MONO, AudioChannelLayout::LAYOUT_MONO},
{xsd::AudioChannelLayout::LAYOUT_STEREO, AudioChannelLayout::LAYOUT_STEREO},
{xsd::AudioChannelLayout::LAYOUT_2POINT1, AudioChannelLayout::LAYOUT_2POINT1},
{xsd::AudioChannelLayout::LAYOUT_TRI, AudioChannelLayout::LAYOUT_TRI},
{xsd::AudioChannelLayout::LAYOUT_TRI_BACK, AudioChannelLayout::LAYOUT_TRI_BACK},
{xsd::AudioChannelLayout::LAYOUT_3POINT1, AudioChannelLayout::LAYOUT_3POINT1},
{xsd::AudioChannelLayout::LAYOUT_2POINT0POINT2, AudioChannelLayout::LAYOUT_2POINT0POINT2},
{xsd::AudioChannelLayout::LAYOUT_2POINT1POINT2, AudioChannelLayout::LAYOUT_2POINT1POINT2},
{xsd::AudioChannelLayout::LAYOUT_3POINT0POINT2, AudioChannelLayout::LAYOUT_3POINT0POINT2},
{xsd::AudioChannelLayout::LAYOUT_3POINT1POINT2, AudioChannelLayout::LAYOUT_3POINT1POINT2},
{xsd::AudioChannelLayout::LAYOUT_QUAD, AudioChannelLayout::LAYOUT_QUAD},
{xsd::AudioChannelLayout::LAYOUT_QUAD_SIDE, AudioChannelLayout::LAYOUT_QUAD_SIDE},
{xsd::AudioChannelLayout::LAYOUT_SURROUND, AudioChannelLayout::LAYOUT_SURROUND},
{xsd::AudioChannelLayout::LAYOUT_PENTA, AudioChannelLayout::LAYOUT_PENTA},
{xsd::AudioChannelLayout::LAYOUT_5POINT1, AudioChannelLayout::LAYOUT_5POINT1},
{xsd::AudioChannelLayout::LAYOUT_5POINT1_SIDE, AudioChannelLayout::LAYOUT_5POINT1_SIDE},
{xsd::AudioChannelLayout::LAYOUT_5POINT1POINT2, AudioChannelLayout::LAYOUT_5POINT1POINT2},
{xsd::AudioChannelLayout::LAYOUT_5POINT1POINT4, AudioChannelLayout::LAYOUT_5POINT1POINT4},
{xsd::AudioChannelLayout::LAYOUT_6POINT1, AudioChannelLayout::LAYOUT_6POINT1},
{xsd::AudioChannelLayout::LAYOUT_7POINT1, AudioChannelLayout::LAYOUT_7POINT1},
{xsd::AudioChannelLayout::LAYOUT_7POINT1POINT2, AudioChannelLayout::LAYOUT_7POINT1POINT2},
{xsd::AudioChannelLayout::LAYOUT_7POINT1POINT4, AudioChannelLayout::LAYOUT_7POINT1POINT4},
{xsd::AudioChannelLayout::LAYOUT_9POINT1POINT4, AudioChannelLayout::LAYOUT_9POINT1POINT4},
{xsd::AudioChannelLayout::LAYOUT_9POINT1POINT6, AudioChannelLayout::LAYOUT_9POINT1POINT6},
{xsd::AudioChannelLayout::LAYOUT_13POINT_360RA, AudioChannelLayout::LAYOUT_13POINT_360RA},
{xsd::AudioChannelLayout::LAYOUT_22POINT2, AudioChannelLayout::LAYOUT_22POINT2},
{xsd::AudioChannelLayout::LAYOUT_MONO_HAPTIC_A, AudioChannelLayout::LAYOUT_MONO_HAPTIC_A},
{xsd::AudioChannelLayout::LAYOUT_STEREO_HAPTIC_A,
AudioChannelLayout::LAYOUT_STEREO_HAPTIC_A},
{xsd::AudioChannelLayout::LAYOUT_HAPTIC_AB, AudioChannelLayout::LAYOUT_HAPTIC_AB},
{xsd::AudioChannelLayout::LAYOUT_MONO_HAPTIC_AB, AudioChannelLayout::LAYOUT_MONO_HAPTIC_AB},
{xsd::AudioChannelLayout::LAYOUT_STEREO_HAPTIC_AB,
AudioChannelLayout::LAYOUT_STEREO_HAPTIC_AB},
{xsd::AudioChannelLayout::LAYOUT_FRONT_BACK, AudioChannelLayout::LAYOUT_FRONT_BACK},
};
template <class... Ts>
struct overloaded : Ts... {
using Ts::operator()...;
};
template <class... Ts>
overloaded(Ts...)->overloaded<Ts...>;
static inline bool maybeVendorExtension(const std::string& s) {
// Only checks whether the string starts with the "vendor prefix".
static const std::string vendorPrefix = "VX_";
return s.size() > vendorPrefix.size() && s.substr(0, vendorPrefix.size()) == vendorPrefix;
}
static auto findPortByTagName(const std::vector<AudioPort>& collection, std::string tagName) {
return std::find_if(collection.begin(), collection.end(),
[&](const auto& e) { return (e.name == tagName); });
}
static int32_t findPortIdByTagName(const std::vector<AudioPort>& ports, std::string tagName) {
auto portItr = findPortByTagName(ports, tagName);
if (portItr == ports.end()) {
return -EINVAL;
}
return (*portItr).id;
}
static std::vector<std::string> getAudioHalConfigurationPaths() {
static const std::vector<std::string> paths = []() {
return std::vector<std::string>({"/vendor/etc/audio"});
}();
return paths;
}
static std::string getReadAbleConfigurationFile(const char* fileName) {
for (const auto& path : getAudioHalConfigurationPaths()) {
std::string tryPath = path + "/" + fileName;
if (::access(tryPath.c_str(), R_OK) == 0) {
return tryPath;
}
}
return {};
}
static void fillProfile(AudioProfile* profile, const std::string& name,
const std::vector<int32_t>& channelLayouts,
const std::vector<int64_t>& sampleRates,
AudioEncapsulationType encapsulationType) {
profile->name = name;
for (auto layout : channelLayouts) {
profile->channelMasks.push_back(
AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout));
}
profile->sampleRates.insert(profile->sampleRates.end(), sampleRates.begin(), sampleRates.end());
profile->encapsulationType = encapsulationType;
}
static AudioProfile createProfile(
const std::string& name, PcmType pcmType, const std::vector<int32_t>& channelLayouts,
const std::vector<int64_t>& sampleRates,
AudioEncapsulationType encapsulationType = AudioEncapsulationType::NONE) {
AudioProfile profile;
profile.format.type = AudioFormatType::PCM;
profile.format.pcm = pcmType;
fillProfile(&profile, name, channelLayouts, sampleRates, encapsulationType);
return profile;
}
static AudioProfile createProfile(
const std::string& name, const std::string& encodingType,
const std::vector<int32_t>& channelLayouts, const std::vector<int64_t>& sampleRates,
AudioEncapsulationType encapsulationType = AudioEncapsulationType::NONE) {
AudioProfile profile;
profile.format.encoding = encodingType;
profile.format.type = ::aidl::android::media::audio::common::AudioFormatType::NON_PCM;
fillProfile(&profile, name, channelLayouts, sampleRates, encapsulationType);
return profile;
}
static AudioGain createGain(int32_t mode, int32_t channelMask,
std::pair<int32_t, int32_t> minMaxGain,
std::pair<int32_t, int32_t> minMaxRamp,
std::pair<int32_t, int32_t> stepAndDefault, bool useForVolume) {
AudioGain gain;
gain.mode = mode;
gain.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(channelMask);
gain.minValue = std::get<0>(minMaxGain);
gain.maxValue = std::get<1>(minMaxGain);
gain.minRampMs = std::get<0>(minMaxRamp);
gain.maxRampMs = std::get<1>(minMaxRamp);
gain.stepValue = std::get<0>(stepAndDefault);
gain.defaultValue = std::get<1>(stepAndDefault);
gain.useForVolume = useForVolume;
return gain;
}
static AudioPortExt createDeviceExt(AudioDeviceType devType, int32_t flags,
std::vector<AudioFormatDescription> formats,
AudioDeviceAddress address = "", std::string connection = "") {
AudioPortDeviceExt deviceExt;
deviceExt.device.type.type = devType;
deviceExt.device.type.connection = connection;
deviceExt.flags = flags;
deviceExt.device.address = address;
deviceExt.encodedFormats = formats;
return AudioPortExt::make<AudioPortExt::Tag::device>(deviceExt);
}
static AudioPortExt createPortMixExt(int32_t maxOpenStreamCount, int32_t maxActiveStreamCount,
int32_t recommendedMuteDurationMs = 0) {
AudioPortMixExt mixExt;
mixExt.maxOpenStreamCount = maxOpenStreamCount;
mixExt.maxActiveStreamCount = maxActiveStreamCount;
mixExt.recommendedMuteDurationMs = recommendedMuteDurationMs;
return AudioPortExt::make<AudioPortExt::Tag::mix>(mixExt);
}
static AudioPort createPort(int32_t id, const std::string& name, int32_t flags, bool isInput,
const AudioPortExt& ext) {
AudioPort port;
port.id = id;
port.name = name;
port.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
: AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
port.ext = ext;
return port;
}
static AudioPortConfig createPortConfig(int32_t id, int32_t portId, PcmType pcmType, int32_t layout,
int32_t sampleRate, int32_t flags, bool isInput,
const AudioPortExt& ext) {
AudioPortConfig config;
config.id = id;
config.portId = portId;
config.sampleRate = Int{.value = sampleRate};
config.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(layout);
config.format = AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = pcmType};
config.gain = AudioGainConfig();
config.flags = isInput ? AudioIoFlags::make<AudioIoFlags::Tag::input>(flags)
: AudioIoFlags::make<AudioIoFlags::Tag::output>(flags);
config.ext = ext;
return config;
}
static AudioRoute createRoute(const std::vector<AudioPort>& sources, const AudioPort& sink) {
AudioRoute route;
route.sinkPortId = sink.id;
std::transform(sources.begin(), sources.end(), std::back_inserter(route.sourcePortIds),
[](const auto& port) { return port.id; });
return route;
}
static void sortAudioProfiles(std::vector<AudioProfile>& profiles) {
std::sort(profiles.begin(), profiles.end());
}
static void dumpProfiles(const AudioProfile& profile, int32_t portId) {
LOG(INFO) << " --------- PROFILE for Port ID = " << portId << " ----------";
LOG(INFO) << " Name: " << profile.name;
if (profile.format.type == AudioFormatType::PCM) {
LOG(INFO) << " Format: PCM, type: 0x" << std::hex
<< static_cast<int32_t>(profile.format.pcm);
} else {
LOG(INFO) << " Format: NON_PCM, encoding: " << profile.format.encoding;
}
std::string sampleRates;
std::for_each(profile.sampleRates.begin(), profile.sampleRates.end(),
[&](int32_t rate) { sampleRates += std::to_string(rate) + ", "; });
if (!sampleRates.empty()) {
sampleRates = sampleRates.substr(0, sampleRates.size() - 2);
}
LOG(INFO) << " Sample rates: " << sampleRates;
std::ostringstream os;
std::for_each(profile.channelMasks.begin(), profile.channelMasks.end(),
[&](const auto ele) { os << ele.toString(); });
LOG(INFO) << " Channel Masks: " << os.str();
// TODO: Print channel layouts
}
static void dumpRoute(const AudioRoute& route) {
LOG(DEBUG) << "\n---------ROUTE DUMP----------";
std::string sourcePorts;
std::for_each(route.sourcePortIds.begin(), route.sourcePortIds.end(),
[&](int32_t rate) { sourcePorts += std::to_string(rate) + ", "; });
if (!sourcePorts.empty()) {
sourcePorts = sourcePorts.substr(0, sourcePorts.size() - 2);
}
LOG(DEBUG) << "Source Port IDs: " << sourcePorts;
}
static void dumpMixExt(const AudioPortExt& ext) {
auto& mixExt = ext.get<AudioPortExt::Tag::mix>();
LOG(DEBUG) << "MixExt: maxOpenStreamCount: " << mixExt.maxOpenStreamCount
<< " maxActiveStreamCount: " << mixExt.maxActiveStreamCount;
if (mixExt.recommendedMuteDurationMs) {
LOG(DEBUG) << "MixExt: recommendedMuteDurationMs: " << mixExt.recommendedMuteDurationMs;
}
}
static void dumpDeviceExt(const AudioPortExt& ext) {
auto& deviceExt = ext.get<AudioPortExt::Tag::device>();
LOG(DEBUG) << "DeviceExt: type: 0x" << std::hex
<< static_cast<int32_t>(deviceExt.device.type.type)
<< ", connection: " << deviceExt.device.type.connection;
LOG(DEBUG) << "DeviceExt: flags: 0x" << std::hex << deviceExt.flags
<< " address: " << deviceExt.device.address.get<AudioDeviceAddress::Tag::id>();
std::for_each(deviceExt.encodedFormats.begin(), deviceExt.encodedFormats.end(),
[&](const auto& format) {
LOG(DEBUG) << "DeviceExt: encoding format: " << format.encoding;
});
}
static void dumpPort(const AudioPort& port, bool isInput, bool isMix) {
LOG(DEBUG) << "\n---------PORT DUMP----------";
LOG(DEBUG) << "Port ID: " << port.id;
LOG(DEBUG) << "Port Name: " << port.name;
if (isInput) {
LOG(DEBUG) << "Input flags: 0x" << std::hex << port.flags.get<AudioIoFlags::Tag::input>();
} else {
LOG(DEBUG) << "Output flags: 0x" << std::hex << port.flags.get<AudioIoFlags::Tag::output>();
}
auto dumpExtension = isMix ? dumpMixExt : dumpDeviceExt;
dumpExtension(port.ext);
std::for_each(port.profiles.begin(), port.profiles.end(),
[&](const auto& profile) { dumpProfiles(profile, port.id); });
}
static std::vector<AudioProfile> populateProfiles(
const std::variant<const xsd::MixPorts::MixPort, const xsd::DevicePorts::DevicePort>&
audioPort) {
std::vector<AudioProfile> audioProfiles;
auto isFormatInvalid = [](const xsd::Profile& profile) {
if (!profile.hasPcmType() && !profile.hasEncoding()) {
return true;
}
return false;
};
auto getName = [](const xsd::Profile& profile) {
if (!profile.hasName()) {
return "";
}
return profile.getName().c_str();
};
auto getFormat = [](const xsd::Profile& profile) -> std::variant<PcmType, const std::string> {
if (profile.hasEncoding()) {
return profile.getEncoding();
}
if (XsdToPcmType.find(profile.getPcmType()) == XsdToPcmType.end()) {
return PcmType::DEFAULT;
}
return XsdToPcmType.at(profile.getPcmType());
};
auto getEncapsulationType = [](const xsd::Profile& profile) {
if (!profile.hasEncapsulationType()) {
return AudioEncapsulationType::NONE;
}
return static_cast<AudioEncapsulationType>(profile.getEncapsulationType());
};
auto getChannels = [](const xsd::Profile& profile) {
std::vector<int32_t> channels;
if (!profile.hasChannelLayouts()) {
return channels;
}
std::for_each(profile.getChannelLayouts().begin(), profile.getChannelLayouts().end(),
[&](const auto& chLayout) {
channels.push_back(XsdToAudioChannelLayout.at(chLayout));
});
return channels;
};
std::visit(
[&](const auto& port) {
for (const auto& profile : port.getProfile()) {
// Todo check if profiles must channel layouts
if (isFormatInvalid(profile) || !profile.hasSamplingRates() ||
!profile.hasChannelLayouts()) {
if (profile.hasName()) {
LOG(WARNING) << __func__ << ": Ignore invalid profile "
<< profile.getName();
}
continue;
}
auto format = getFormat(profile);
std::visit(
overloaded{[&](const std::string& formatStr) {
audioProfiles.push_back(createProfile(
getName(profile), formatStr,
getChannels(profile), profile.getSamplingRates(),
getEncapsulationType(profile)));
},
[&](const PcmType& formatPcm) {
audioProfiles.push_back(createProfile(
getName(profile), formatPcm,
getChannels(profile), profile.getSamplingRates(),
getEncapsulationType(profile)));
}},
format);
}
},
audioPort);
return audioProfiles;
}
static std::vector<AudioGain> populateGains(
const std::variant<const xsd::MixPorts::MixPort, const xsd::DevicePorts::DevicePort>&
audioPort) {
std::vector<AudioGain> audioGains;
auto getMode = [](const xsd::Gains::Gain& gain) {
if (!gain.hasMode()) {
return 0;
}
return static_cast<int32_t>(gain.getMode()[0]);
};
auto getChannelLayout = [](const xsd::Gains::Gain& gain) {
if (!gain.hasChannel_layout()) {
return xsd::AudioChannelLayout::LAYOUT_MONO;
}
return gain.getChannel_layout();
};
auto getMinMaxGain = [](const xsd::Gains::Gain& gain) {
int32_t min = gain.hasMinValueMB() ? gain.getMinValueMB() : 0;
int32_t max = gain.hasMaxValueMB() ? gain.getMaxValueMB() : 0;
return std::make_pair(min, max);
};
auto getMinMaxRamp = [](const xsd::Gains::Gain& gain) {
int32_t min = gain.hasMinRampMs() ? gain.getMinRampMs() : 0;
int32_t max = gain.hasMaxRampMs() ? gain.getMaxRampMs() : 0;
return std::make_pair(min, max);
};
auto getStepAndDefault = [](const xsd::Gains::Gain& gain) {
int32_t step = gain.hasStepValueMB() ? gain.getStepValueMB() : 0;
int32_t defaultVal = gain.hasDefaultValueMB() ? gain.getDefaultValueMB() : 0;
return std::make_pair(step, defaultVal);
};
auto getUseForVolume = [](const xsd::Gains::Gain& gain) {
if (!gain.hasUseForVolume()) {
return false;
}
return gain.getUseForVolume();
};
std::visit(
[&](const auto& port) {
for (const auto& gain : port.getFirstGains()->getGain()) {
auto g = createGain(getMode(gain), static_cast<int32_t>(getChannelLayout(gain)),
getMinMaxGain(gain), getMinMaxRamp(gain),
getStepAndDefault(gain), getUseForVolume(gain));
audioGains.push_back(g);
}
},
audioPort);
return audioGains;
}
static void populateMixPorts(const xsd::Modules::Module& module,
std::unique_ptr<ModuleConfig>& moduleConfig) {
auto getFlags = [](const xsd::MixPorts::MixPort& mixPort, bool isInput) {
if (!mixPort.hasFlags()) {
return 0;
}
int32_t flags = 0;
auto flagVec = mixPort.getFlags();
std::for_each(flagVec.begin(), flagVec.end(), [&](const auto& flagStr) {
LOG(VERBOSE) << " flag " << flagStr;
flags |= 1 << (isInput ? static_cast<int32_t>(xsd::stringToAudioInputFlag(flagStr))
: static_cast<int32_t>(xsd::stringToAudioOutputFlag(flagStr)));
});
return flags;
};
if (!module.getFirstMixPorts()) {
LOG(ERROR) << __func__ << " No mix ports";
return;
}
for (const auto& mixPort : module.getFirstMixPorts()->getMixPort()) {
std::string name = mixPort.hasName() ? mixPort.getName() : "";
auto role = mixPort.hasRole() ? mixPort.getRole() : xsd::Role::UNKNOWN;
bool isInput = role == xsd::Role::source ? false : true;
int32_t maxOpenCount = mixPort.hasMaxOpenCount() ? mixPort.getMaxOpenCount() : 1;
int32_t maxActiveCount = mixPort.hasMaxActiveCount() ? mixPort.getMaxActiveCount() : 1;
int32_t recommendedMuteDurationMs =
mixPort.hasRecommendedMuteDurationMs() ? mixPort.getRecommendedMuteDurationMs() : 0;
auto port = createPort(
moduleConfig->nextPortId++, name, getFlags(mixPort, isInput), isInput,
createPortMixExt(maxOpenCount, maxActiveCount, recommendedMuteDurationMs));
if (mixPort.hasProfile()) {
port.profiles = populateProfiles(mixPort);
}
if (mixPort.hasGains()) {
port.gains = populateGains(mixPort);
}
moduleConfig->ports.emplace_back(std::move(port));
// dumpPort(moduleConfig->ports.at(port.id), isInput, true);
}
}
static void populateDevicePorts(const xsd::Modules::Module& module,
std::unique_ptr<ModuleConfig>& moduleConfig) {
auto getFlags = [](const xsd::DevicePorts::DevicePort& devPort) {
if (devPort.hasDefaultDevice() && devPort.getDefaultDevice() && !devPort.hasConnection()) {
return 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
}
return 0;
};
auto getFormats = [](const xsd::DevicePorts::DevicePort& devPort) {
std::vector<AudioFormatDescription> encodings;
if (!devPort.hasEncodings()) {
return encodings;
}
for (const auto& encodingType : devPort.getEncodings()) {
auto format = AudioFormatDescription{.type = AudioFormatType::NON_PCM,
.encoding = encodingType};
encodings.push_back(format);
}
return encodings;
};
if (!module.getFirstDevicePorts()) {
LOG(ERROR) << __func__ << " No device ports";
return;
}
for (const auto& devicePort : module.getFirstDevicePorts()->getDevicePort()) {
std::string name = devicePort.hasTagName() ? devicePort.getTagName() : "";
auto role = devicePort.hasRole() ? devicePort.getRole() : xsd::Role::UNKNOWN;
bool isInput = role == xsd::Role::source ? true : false;
AudioDeviceType devType = devicePort.hasDeviceType()
? XsdToAudioDeviceType.at(devicePort.getDeviceType())
: AudioDeviceType::NONE;
auto address = devicePort.hasAddress()
? AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>(
devicePort.getAddress())
: AudioDeviceAddress{};
std::string connections =
devicePort.hasConnection() ? toString(devicePort.getConnection()) : "";
auto deviceExt = createDeviceExt(devType, getFlags(devicePort), getFormats(devicePort),
address, connections);
auto port = createPort(moduleConfig->nextPortId++, name, 0, isInput, deviceExt);
if (devicePort.hasProfile()) {
port.profiles = populateProfiles(devicePort);
}
if (devicePort.hasGains()) {
port.gains = populateGains(devicePort);
}
// only external device ports
if (!devicePort.hasAttached() || (devicePort.hasAttached() && !devicePort.getAttached())) {
// not attached
moduleConfig->mExternalDevicePortProfiles[port.id] = port.profiles;
port.profiles.clear();
}
moduleConfig->ports.emplace_back(std::move(port));
// dumpPort(moduleConfig->ports.at(port.id), isInput, false);
}
}
static void populateRoutes(const xsd::Modules::Module& module,
std::unique_ptr<ModuleConfig>& moduleConfig) {
if (!module.getFirstRoutes()) {
LOG(ERROR) << __func__ << " No routes";
return;
}
for (const auto& route : module.getFirstRoutes()->getRoute()) {
if (!route.hasSources() || !route.hasSink()) {
if (route.hasSink()) {
LOG(WARNING) << __func__ << ": Invalid route for sink " << route.getSink();
}
continue;
}
std::string source = route.getSources();
std::vector<std::string> srcTags;
std::string::size_type pos = 0;
while ((pos = source.find(kRouteDelimiter)) != std::string::npos) {
std::string tag(source, 0, pos);
LOG(VERBOSE) << __func__ << ": source tag " << tag;
srcTags.push_back(tag);
if (pos > source.size()) {
break;
}
source = source.substr(pos + 1);
}
if (!source.empty()) {
LOG(VERBOSE) << __func__ << ": source tag " << source;
srcTags.push_back(source);
}
std::vector<AudioPort> sources;
for (const auto& tag : srcTags) {
auto srcItr = findPortByTagName(moduleConfig->ports, tag);
if (srcItr != moduleConfig->ports.end()) {
sources.push_back(*srcItr);
}
}
auto sink = findPortByTagName(moduleConfig->ports, route.getSink());
LOG(VERBOSE) << __func__ << ": sink tag " << route.getSink();
if (sink != moduleConfig->ports.end()) {
moduleConfig->routes.push_back(createRoute(sources, *sink));
// dumpRoute(moduleConfig->routes.back());
}
}
}
static std::unique_ptr<ModuleConfig> getModuleConfig(const xsd::Modules::Module& module) {
auto moduleConfig = std::make_unique<ModuleConfig>();
populateMixPorts(module, moduleConfig);
populateDevicePorts(module, moduleConfig);
populateRoutes(module, moduleConfig);
return std::move(moduleConfig);
}
// static
#ifdef SEC_AUDIO_COMMON
std::unique_ptr<ModuleConfig> ModuleConfig::getPrimaryConfiguration(bool secAudioFeatureEnabled) {
const std::string& filePath = secAudioFeatureEnabled ?
kPrimaryModuleConfigFileName : kGsiPrimaryModuleConfigFileName;
auto xsdConfig =
xsd::read(getReadAbleConfigurationFile(filePath.c_str()).c_str());
#else // QC
std::unique_ptr<ModuleConfig> ModuleConfig::getPrimaryConfiguration() {
auto xsdConfig =
xsd::read(getReadAbleConfigurationFile(kPrimaryModuleConfigFileName.c_str()).c_str());
#endif
if (!xsdConfig.has_value()) {
LOG(WARNING) << __func__ << ": primary config retrieval failed, setting defaults";
return nullptr;
}
auto modules = xsdConfig.value();
if (!modules.has_module()) {
LOG(WARNING) << __func__ << ": config has no modules at all, setting defaults";
return nullptr;
}
auto module =
std::find_if(modules.get_module().cbegin(), modules.get_module().cend(), [](auto& ele) {
if (ele.hasName() && ele.getName() == "default") {
return true;
}
return false;
});
if (module == modules.get_module().cend()) {
LOG(WARNING) << __func__ << ": config has no default module, setting defaults";
return nullptr;
}
return std::move(getModuleConfig(*module));
}
std::string ModuleConfig::toString() const {
std::ostringstream os;
os << std::endl << "--ModuleConfig start--" << std::endl;
os << std::endl << "port Configs:" << std::endl;
std::for_each(portConfigs.cbegin(), portConfigs.cend(),
[&](const auto& ele) { os << ele.toString() << std::endl; });
os << std::endl << "initial PortConfigs:" << std::endl;
std::for_each(initialConfigs.cbegin(), initialConfigs.cend(),
[&](const auto& ele) { os << ele.toString() << std::endl; });
os << std::endl << "ports:" << std::endl;
std::for_each(ports.cbegin(), ports.cend(),
[&](const auto& ele) { os << ele.toString() << std::endl; });
os << std::endl << "mExternalDevicePortProfiles:" << std::endl;
for (const auto & [ portId, profilesList ] : mExternalDevicePortProfiles) {
os << "External device port id:" << portId << std::endl;
std::for_each(profilesList.cbegin(), profilesList.cend(),
[&](auto& ele) { os << ele.toString() << std::endl; });
os << std::endl;
}
os << std::endl << "routes:" << std::endl;
std::for_each(routes.cbegin(), routes.cend(),
[&](const auto& ele) { os << ele.toString() << std::endl; });
os << std::endl << "patches:" << std::endl;
std::for_each(patches.cbegin(), patches.cend(),
[&](const auto& ele) { os << ele.toString() << std::endl; });
os << std::endl << "--ModuleConfig end--" << std::endl;
return os.str();
}
} // namespace qti::audio::core

View File

@@ -1,72 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <aidl/android/hardware/audio/core/AudioPatch.h>
#include <aidl/android/hardware/audio/core/AudioRoute.h>
#include <aidl/android/media/audio/common/AudioPort.h>
#include <aidl/android/media/audio/common/AudioPortConfig.h>
#include <aidl/android/media/audio/common/MicrophoneInfo.h>
#include <map>
#include <memory>
#include <unordered_map>
#include <vector>
// { SEC_AUDIO_COMMON
#include <system/audio.h>
// } SEC_AUDIO_COMMON
namespace qti::audio::core {
#ifdef SEC_AUDIO_COMMON
static const std::string kGsiPrimaryModuleConfigFileName{"audio_module_config_primary_sec_on_gsi.xml"};
#endif
static const std::string kPrimaryModuleConfigFileName{"audio_module_config_primary.xml"};
class ModuleConfig {
public:
std::vector<::aidl::android::media::audio::common::MicrophoneInfo> microphones;
std::vector<::aidl::android::media::audio::common::AudioPort> ports;
// Exclusive for external device ports and their possible profiles
std::unordered_map<int32_t, std::vector<::aidl::android::media::audio::common::AudioProfile>>
mExternalDevicePortProfiles;
std::vector<::aidl::android::media::audio::common::AudioPortConfig> portConfigs;
std::vector<::aidl::android::media::audio::common::AudioPortConfig> initialConfigs;
// Port id -> List of profiles to use when the device port state is set to
// 'connected' in connection simulation mode.
std::map<int32_t, std::vector<::aidl::android::media::audio::common::AudioProfile>>
connectedProfiles;
std::vector<::aidl::android::hardware::audio::core::AudioRoute> routes;
std::vector<::aidl::android::hardware::audio::core::AudioPatch> patches;
std::string toString() const;
int32_t nextPortId = 1;
int32_t nextPatchId = 1;
#ifdef SEC_AUDIO_COMMON
static std::unique_ptr<ModuleConfig> getPrimaryConfiguration(bool secAudioFeatureEnabled = false);
#else // QC
static std::unique_ptr<ModuleConfig> getPrimaryConfiguration();
#endif
};
} // namespace qti::audio::core

View File

@@ -1,18 +0,0 @@
xsd_config {
name: "audio_module_config_qti",
srcs: ["audio_module_config_qti.xsd"],
package_name: "audio_module_config_qti",
nullability: true,
}
// Uncomment below to check the validity of XML with respect to XSD
/*
prebuilt_etc_xml {
name: "audio_module_config_primary.xml",
src: "audio_module_config_primary.xml",
schema:"audio_module_config_qti.xsd",
vendor: true,
relative_install_path : "audio"
}
*/

View File

@@ -1,374 +0,0 @@
// Signature format: 2.0
package audio_module_config_qti {
public enum AudioChannelIndexMask {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_1;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_10;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_11;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_12;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_13;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_14;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_15;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_16;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_17;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_18;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_19;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_2;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_20;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_21;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_22;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_23;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_24;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_3;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_4;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_5;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_6;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_7;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_8;
enum_constant public static final audio_module_config_qti.AudioChannelIndexMask INDEX_MASK_9;
}
public enum AudioChannelLayout {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_13POINT_360RA;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_22POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_2POINT0POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_2POINT1;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_2POINT1POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_3POINT0POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_3POINT1;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_3POINT1POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_5POINT1;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_5POINT1POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_5POINT1POINT4;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_5POINT1_SIDE;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_6POINT1;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_7POINT1;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_7POINT1POINT2;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_7POINT1POINT4;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_9POINT1POINT4;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_9POINT1POINT6;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_FRONT_BACK;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_HAPTIC_AB;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_MONO;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_MONO_HAPTIC_A;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_MONO_HAPTIC_AB;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_PENTA;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_QUAD;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_QUAD_SIDE;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_STEREO;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_STEREO_HAPTIC_A;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_STEREO_HAPTIC_AB;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_SURROUND;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_TRI;
enum_constant public static final audio_module_config_qti.AudioChannelLayout LAYOUT_TRI_BACK;
}
public enum AudioDeviceType {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_ACCESSORY;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_AFE_PROXY;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_DEFAULT;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_DEVICE;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_DOCK;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_ECHO_REFERENCE;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_FM_TUNER;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_HEADSET;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_LOOPBACK;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_MICROPHONE;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_MICROPHONE_BACK;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_SUBMIX;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_TELEPHONY_RX;
enum_constant public static final audio_module_config_qti.AudioDeviceType IN_TV_TUNER;
enum_constant public static final audio_module_config_qti.AudioDeviceType NONE;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_ACCESSORY;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_AFE_PROXY;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_BROADCAST;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_CARKIT;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_DEFAULT;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_DEVICE;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_DOCK;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_ECHO_CANCELLER;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_FM;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_HEADPHONE;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_HEADSET;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_HEARING_AID;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_LINE_AUX;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_SPEAKER;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_SPEAKER_EARPIECE;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_SPEAKER_SAFE;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_SUBMIX;
enum_constant public static final audio_module_config_qti.AudioDeviceType OUT_TELEPHONY_TX;
}
public enum AudioEncapsulationType {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioEncapsulationType IEC61937;
enum_constant public static final audio_module_config_qti.AudioEncapsulationType NONE;
}
public enum AudioFormatType {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioFormatType DEFAULT;
enum_constant public static final audio_module_config_qti.AudioFormatType NON_PCM;
enum_constant public static final audio_module_config_qti.AudioFormatType PCM;
enum_constant public static final audio_module_config_qti.AudioFormatType SYS_RESERVED_INVALID;
}
public enum AudioGainMode {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioGainMode CHANNELS;
enum_constant public static final audio_module_config_qti.AudioGainMode JOINT;
enum_constant public static final audio_module_config_qti.AudioGainMode RAMP;
}
public enum AudioInputFlag {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioInputFlag DIRECT;
enum_constant public static final audio_module_config_qti.AudioInputFlag FAST;
enum_constant public static final audio_module_config_qti.AudioInputFlag HW_AV_SYNC;
enum_constant public static final audio_module_config_qti.AudioInputFlag HW_HOTWORD;
enum_constant public static final audio_module_config_qti.AudioInputFlag MMAP_NOIRQ;
enum_constant public static final audio_module_config_qti.AudioInputFlag RAW;
enum_constant public static final audio_module_config_qti.AudioInputFlag SYNC;
enum_constant public static final audio_module_config_qti.AudioInputFlag ULTRASOUND;
enum_constant public static final audio_module_config_qti.AudioInputFlag VOIP_TX;
}
public enum AudioOutputFlag {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioOutputFlag COMPRESS_OFFLOAD;
enum_constant public static final audio_module_config_qti.AudioOutputFlag DEEP_BUFFER;
enum_constant public static final audio_module_config_qti.AudioOutputFlag DIRECT;
enum_constant public static final audio_module_config_qti.AudioOutputFlag DIRECT_PCM;
enum_constant public static final audio_module_config_qti.AudioOutputFlag FAST;
enum_constant public static final audio_module_config_qti.AudioOutputFlag GAPLESS_OFFLOAD;
enum_constant public static final audio_module_config_qti.AudioOutputFlag HW_AV_SYNC;
enum_constant public static final audio_module_config_qti.AudioOutputFlag IEC958_NONAUDIO;
enum_constant public static final audio_module_config_qti.AudioOutputFlag INCALL_MUSIC;
enum_constant public static final audio_module_config_qti.AudioOutputFlag MMAP_NOIRQ;
enum_constant public static final audio_module_config_qti.AudioOutputFlag NON_BLOCKING;
enum_constant public static final audio_module_config_qti.AudioOutputFlag PRIMARY;
enum_constant public static final audio_module_config_qti.AudioOutputFlag RAW;
enum_constant public static final audio_module_config_qti.AudioOutputFlag SPATIALIZER;
enum_constant public static final audio_module_config_qti.AudioOutputFlag SYNC;
enum_constant public static final audio_module_config_qti.AudioOutputFlag TTS;
enum_constant public static final audio_module_config_qti.AudioOutputFlag ULTRASOUND;
enum_constant public static final audio_module_config_qti.AudioOutputFlag VOIP_RX;
}
public enum AudioPcmType {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioPcmType DEFAULT;
enum_constant public static final audio_module_config_qti.AudioPcmType FIXED_Q_8_24;
enum_constant public static final audio_module_config_qti.AudioPcmType FLOAT_32_BIT;
enum_constant public static final audio_module_config_qti.AudioPcmType INT_16_BIT;
enum_constant public static final audio_module_config_qti.AudioPcmType INT_24_BIT;
enum_constant public static final audio_module_config_qti.AudioPcmType INT_32_BIT;
enum_constant public static final audio_module_config_qti.AudioPcmType UINT_8_BIT;
}
public enum AudioUsage {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_ALARM;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_ANNOUNCEMENT;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_ASSISTANCE_ACCESSIBILITY;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_ASSISTANCE_NAVIGATION_GUIDANCE;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_ASSISTANCE_SONIFICATION;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_ASSISTANT;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_CALL_ASSISTANT;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_EMERGENCY;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_GAME;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_INVALID;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_MEDIA;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_NOTIFICATION;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_NOTIFICATION_EVENT;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_NOTIFICATION_TELEPHONY_RINGTONE;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_SAFETY;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_UNKNOWN;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_VEHICLE_STATUS;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_VIRTUAL_SOURCE;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_VOICE_COMMUNICATION;
enum_constant public static final audio_module_config_qti.AudioUsage AUDIOUSAGE_VOICE_COMMUNICATION_SIGNALLING;
}
public enum ConnectionType {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.ConnectionType analog;
enum_constant public static final audio_module_config_qti.ConnectionType bta2dp;
enum_constant public static final audio_module_config_qti.ConnectionType btle;
enum_constant public static final audio_module_config_qti.ConnectionType btsco;
enum_constant public static final audio_module_config_qti.ConnectionType bus;
enum_constant public static final audio_module_config_qti.ConnectionType hdmi;
enum_constant public static final audio_module_config_qti.ConnectionType hdmiarc;
enum_constant public static final audio_module_config_qti.ConnectionType hdmiearc;
enum_constant public static final audio_module_config_qti.ConnectionType ipv4;
enum_constant public static final audio_module_config_qti.ConnectionType spdif;
enum_constant public static final audio_module_config_qti.ConnectionType usb;
enum_constant public static final audio_module_config_qti.ConnectionType virtual;
enum_constant public static final audio_module_config_qti.ConnectionType wireless;
}
public class DevicePorts {
ctor public DevicePorts();
method @Nullable public java.util.List<audio_module_config_qti.DevicePorts.DevicePort> getDevicePort();
}
public static class DevicePorts.DevicePort {
ctor public DevicePorts.DevicePort();
method @Nullable public String getAddress();
method @Nullable public boolean getAttached();
method @Nullable public audio_module_config_qti.ConnectionType getConnection();
method @Nullable public boolean getDefaultDevice();
method @Nullable public audio_module_config_qti.AudioDeviceType getDeviceType();
method @Nullable public java.util.List<java.lang.String> getEncodings();
method @Nullable public audio_module_config_qti.Gains getGains();
method @Nullable public java.util.List<audio_module_config_qti.Profile> getProfile();
method @Nullable public audio_module_config_qti.Role getRole();
method @Nullable public String getTagName();
method @Nullable public boolean get_default();
method public void setAddress(@Nullable String);
method public void setAttached(@Nullable boolean);
method public void setConnection(@Nullable audio_module_config_qti.ConnectionType);
method public void setDefaultDevice(@Nullable boolean);
method public void setDeviceType(@Nullable audio_module_config_qti.AudioDeviceType);
method public void setEncodings(@Nullable java.util.List<java.lang.String>);
method public void setGains(@Nullable audio_module_config_qti.Gains);
method public void setRole(@Nullable audio_module_config_qti.Role);
method public void setTagName(@Nullable String);
method public void set_default(@Nullable boolean);
}
public class Gains {
ctor public Gains();
method @Nullable public java.util.List<audio_module_config_qti.Gains.Gain> getGain();
}
public static class Gains.Gain {
ctor public Gains.Gain();
method @Nullable public audio_module_config_qti.AudioChannelLayout getChannel_layout();
method @Nullable public int getDefaultValueMB();
method @Nullable public int getMaxRampMs();
method @Nullable public int getMaxValueMB();
method @Nullable public int getMinRampMs();
method @Nullable public int getMinValueMB();
method @Nullable public java.util.List<audio_module_config_qti.AudioGainMode> getMode();
method @Nullable public String getName();
method @Nullable public int getStepValueMB();
method @Nullable public boolean getUseForVolume();
method public void setChannel_layout(@Nullable audio_module_config_qti.AudioChannelLayout);
method public void setDefaultValueMB(@Nullable int);
method public void setMaxRampMs(@Nullable int);
method public void setMaxValueMB(@Nullable int);
method public void setMinRampMs(@Nullable int);
method public void setMinValueMB(@Nullable int);
method public void setMode(@Nullable java.util.List<audio_module_config_qti.AudioGainMode>);
method public void setName(@Nullable String);
method public void setStepValueMB(@Nullable int);
method public void setUseForVolume(@Nullable boolean);
}
public class MixPorts {
ctor public MixPorts();
method @Nullable public java.util.List<audio_module_config_qti.MixPorts.MixPort> getMixPort();
}
public static class MixPorts.MixPort {
ctor public MixPorts.MixPort();
method @Nullable public java.util.List<java.lang.String> getFlags();
method @Nullable public audio_module_config_qti.Gains getGains();
method @Nullable public long getMaxActiveCount();
method @Nullable public long getMaxOpenCount();
method @Nullable public String getName();
method @Nullable public java.util.List<audio_module_config_qti.AudioUsage> getPreferredUsage();
method @Nullable public java.util.List<audio_module_config_qti.Profile> getProfile();
method @Nullable public long getRecommendedMuteDurationMs();
method @Nullable public audio_module_config_qti.Role getRole();
method public void setFlags(@Nullable java.util.List<java.lang.String>);
method public void setGains(@Nullable audio_module_config_qti.Gains);
method public void setMaxActiveCount(@Nullable long);
method public void setMaxOpenCount(@Nullable long);
method public void setName(@Nullable String);
method public void setPreferredUsage(@Nullable java.util.List<audio_module_config_qti.AudioUsage>);
method public void setRecommendedMuteDurationMs(@Nullable long);
method public void setRole(@Nullable audio_module_config_qti.Role);
}
public enum MixType {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.MixType mix;
enum_constant public static final audio_module_config_qti.MixType mux;
}
public class Modules {
ctor public Modules();
method @Nullable public audio_module_config_qti.Modules.Module getModule();
method public void setModule(@Nullable audio_module_config_qti.Modules.Module);
}
public static class Modules.Module {
ctor public Modules.Module();
method @Nullable public audio_module_config_qti.DevicePorts getDevicePorts();
method @Nullable public audio_module_config_qti.MixPorts getMixPorts();
method @Nullable public String getName();
method @Nullable public audio_module_config_qti.Routes getRoutes();
method public void setDevicePorts(@Nullable audio_module_config_qti.DevicePorts);
method public void setMixPorts(@Nullable audio_module_config_qti.MixPorts);
method public void setName(@Nullable String);
method public void setRoutes(@Nullable audio_module_config_qti.Routes);
}
public class Profile {
ctor public Profile();
method @Nullable public java.util.List<audio_module_config_qti.AudioChannelLayout> getChannelLayouts();
method @Nullable public java.util.List<audio_module_config_qti.AudioChannelIndexMask> getChannelMasks();
method @Nullable public audio_module_config_qti.AudioEncapsulationType getEncapsulationType();
method @Nullable public String getEncoding();
method @Nullable public audio_module_config_qti.AudioFormatType getFormatType();
method @Nullable public String getName();
method @Nullable public audio_module_config_qti.AudioPcmType getPcmType();
method @Nullable public java.util.List<java.math.BigInteger> getSamplingRates();
method public void setChannelLayouts(@Nullable java.util.List<audio_module_config_qti.AudioChannelLayout>);
method public void setChannelMasks(@Nullable java.util.List<audio_module_config_qti.AudioChannelIndexMask>);
method public void setEncapsulationType(@Nullable audio_module_config_qti.AudioEncapsulationType);
method public void setEncoding(@Nullable String);
method public void setFormatType(@Nullable audio_module_config_qti.AudioFormatType);
method public void setName(@Nullable String);
method public void setPcmType(@Nullable audio_module_config_qti.AudioPcmType);
method public void setSamplingRates(@Nullable java.util.List<java.math.BigInteger>);
}
public enum Role {
method @NonNull public String getRawName();
enum_constant public static final audio_module_config_qti.Role sink;
enum_constant public static final audio_module_config_qti.Role source;
}
public class Routes {
ctor public Routes();
method @Nullable public java.util.List<audio_module_config_qti.Routes.Route> getRoute();
}
public static class Routes.Route {
ctor public Routes.Route();
method @Nullable public String getSink();
method @Nullable public String getSources();
method @Nullable public audio_module_config_qti.MixType getType();
method public void setSink(@Nullable String);
method public void setSources(@Nullable String);
method public void setType(@Nullable audio_module_config_qti.MixType);
}
public class XmlParser {
ctor public XmlParser();
method @Nullable public static audio_module_config_qti.Modules read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
}
}

View File

@@ -1,425 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
SPDX-License-Identifier: BSD-3-Clause-Clear
-->
<xs:schema version="2.0" elementFormDefault="qualified" attributeFormDefault="unqualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="modules">
<!-- TODO may add version attribute-->
<xs:complexType>
<xs:sequence>
<xs:element name="module" minOccurs="1">
<xs:complexType>
<xs:sequence>
<xs:element name="mixPorts" type="mixPorts" minOccurs="0" />
<xs:element name="devicePorts" type="devicePorts" minOccurs="0" />
<xs:element name="routes" type="routes" minOccurs="0" />
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="required" />
</xs:complexType>
<xs:unique name="mixPortNameUniqueness">
<xs:selector xpath="mixPorts/mixPort" />
<xs:field xpath="@name" />
</xs:unique>
<xs:unique name="devicePortUniqueness">
<xs:selector xpath="devicePorts/devicePort" />
<xs:field xpath="@deviceType" />
<xs:field xpath="@connection" />
</xs:unique>
<xs:unique name="routeUniqueness">
<xs:selector xpath="routes/route" />
<xs:field xpath="@sink" />
</xs:unique>
<xs:key name="devicePortNameKey">
<xs:selector xpath="devicePorts/devicePort" />
<xs:field xpath="@tagName" />
</xs:key>
<xs:key name="routeSinkKey">
<!-- predicate [@type='sink'] does not work in xsd 1.0 -->
<xs:selector xpath="devicePorts/devicePort|mixPorts/mixPort" />
<xs:field xpath="@tagName|@name" />
</xs:key>
<xs:keyref name="routeSinkRef" refer="routeSinkKey">
<xs:selector xpath="routes/route" />
<xs:field xpath="@sink" />
</xs:keyref>
</xs:element>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:simpleType name="role">
<xs:restriction base="xs:string">
<xs:enumeration value="sink" />
<xs:enumeration value="source" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioInputFlag">
<xs:restriction base="xs:string">
<xs:enumeration value="FAST" />
<xs:enumeration value="HW_HOTWORD" />
<xs:enumeration value="RAW" />
<xs:enumeration value="SYNC" />
<xs:enumeration value="MMAP_NOIRQ" />
<xs:enumeration value="VOIP_TX" />
<xs:enumeration value="HW_AV_SYNC" />
<xs:enumeration value="DIRECT" />
<xs:enumeration value="ULTRASOUND" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioOutputFlag">
<xs:restriction base="xs:string">
<xs:enumeration value="DIRECT" />
<xs:enumeration value="PRIMARY" />
<xs:enumeration value="FAST" />
<xs:enumeration value="DEEP_BUFFER" />
<xs:enumeration value="COMPRESS_OFFLOAD" />
<xs:enumeration value="NON_BLOCKING" />
<xs:enumeration value="HW_AV_SYNC" />
<xs:enumeration value="TTS" />
<xs:enumeration value="RAW" />
<xs:enumeration value="SYNC" />
<xs:enumeration value="IEC958_NONAUDIO" />
<xs:enumeration value="DIRECT_PCM" />
<xs:enumeration value="MMAP_NOIRQ" />
<xs:enumeration value="VOIP_RX" />
<xs:enumeration value="INCALL_MUSIC" />
<xs:enumeration value="GAPLESS_OFFLOAD" />
<xs:enumeration value="SPATIALIZER" />
<xs:enumeration value="ULTRASOUND" />
<!-- SEC_AUDIO_HDMI -->
<xs:enumeration value="MULTI_CH" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioIOFlag">
<xs:union memberTypes="audioInputFlag audioOutputFlag" />
</xs:simpleType>
<xs:simpleType name="audioIOFlagList">
<xs:list itemType="audioIOFlag" />
</xs:simpleType>
<xs:simpleType name="audioUsage">
<xs:restriction base="xs:string">
<xs:enumeration value="AUDIOUSAGE_INVALID" />
<xs:enumeration value="AUDIOUSAGE_UNKNOWN" />
<xs:enumeration value="AUDIOUSAGE_MEDIA" />
<xs:enumeration value="AUDIOUSAGE_VOICE_COMMUNICATION" />
<xs:enumeration value="AUDIOUSAGE_VOICE_COMMUNICATION_SIGNALLING" />
<xs:enumeration value="AUDIOUSAGE_ALARM" />
<xs:enumeration value="AUDIOUSAGE_NOTIFICATION" />
<xs:enumeration value="AUDIOUSAGE_NOTIFICATION_TELEPHONY_RINGTONE" />
<xs:enumeration value="AUDIOUSAGE_SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST" />
<xs:enumeration value="AUDIOUSAGE_SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT" />
<xs:enumeration value="AUDIOUSAGE_SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED" />
<xs:enumeration value="AUDIOUSAGE_NOTIFICATION_EVENT" />
<xs:enumeration value="AUDIOUSAGE_ASSISTANCE_ACCESSIBILITY" />
<xs:enumeration value="AUDIOUSAGE_ASSISTANCE_NAVIGATION_GUIDANCE" />
<xs:enumeration value="AUDIOUSAGE_ASSISTANCE_SONIFICATION" />
<xs:enumeration value="AUDIOUSAGE_GAME" />
<xs:enumeration value="AUDIOUSAGE_VIRTUAL_SOURCE" />
<xs:enumeration value="AUDIOUSAGE_ASSISTANT" />
<xs:enumeration value="AUDIOUSAGE_CALL_ASSISTANT" />
<xs:enumeration value="AUDIOUSAGE_EMERGENCY" />
<xs:enumeration value="AUDIOUSAGE_SAFETY" />
<xs:enumeration value="AUDIOUSAGE_VEHICLE_STATUS" />
<xs:enumeration value="AUDIOUSAGE_ANNOUNCEMENT" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioUsageList">
<xs:list itemType="audioUsage" />
</xs:simpleType>
<xs:complexType name="mixPorts">
<xs:sequence>
<xs:element name="mixPort" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded" />
<xs:element name="gains" type="gains" minOccurs="0" />
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required" />
<xs:attribute name="role" type="role" use="required" />
<xs:attribute name="flags" type="audioIOFlagList" />
<xs:attribute name="maxOpenCount" type="xs:unsignedInt" />
<xs:attribute name="maxActiveCount" type="xs:unsignedInt" />
<xs:attribute name="preferredUsage" type="audioUsageList" />
<xs:attribute name="recommendedMuteDurationMs" type="xs:unsignedInt" />
</xs:complexType>
<xs:unique name="mixPortProfileUniqueness">
<xs:selector xpath="profile" />
<xs:field xpath="samplingRate" />
<xs:field xpath="channelLayouts" />
<xs:field xpath="encoding" />
<xs:field xpath="formatType" />
<xs:field xpath="pcmType" />
</xs:unique>
<xs:unique name="mixPortGainUniqueness">
<xs:selector xpath="gains/gain" />
<xs:field xpath="@name" />
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="audioDeviceType">
<xs:restriction base="xs:string">
<xs:enumeration value="NONE" />
<xs:enumeration value="IN_DEFAULT" />
<xs:enumeration value="IN_ACCESSORY" />
<xs:enumeration value="IN_AFE_PROXY" />
<xs:enumeration value="IN_DEVICE" />
<xs:enumeration value="IN_ECHO_REFERENCE" />
<xs:enumeration value="IN_FM_TUNER" />
<xs:enumeration value="IN_HEADSET" />
<xs:enumeration value="IN_LOOPBACK" />
<xs:enumeration value="IN_MICROPHONE" />
<xs:enumeration value="IN_MICROPHONE_BACK" />
<xs:enumeration value="IN_SUBMIX" />
<xs:enumeration value="IN_TELEPHONY_RX" />
<xs:enumeration value="IN_TV_TUNER" />
<xs:enumeration value="IN_DOCK" />
<!-- SEC_AUDIO_SAMSUNGRECORD -->
<xs:enumeration value="IN_MICROPHONE_MULTI" />
<xs:enumeration value="OUT_DEFAULT" />
<xs:enumeration value="OUT_ACCESSORY" />
<xs:enumeration value="OUT_AFE_PROXY" />
<xs:enumeration value="OUT_CARKIT" />
<xs:enumeration value="OUT_DEVICE" />
<xs:enumeration value="OUT_ECHO_CANCELLER" />
<xs:enumeration value="OUT_FM" />
<xs:enumeration value="OUT_HEADPHONE" />
<xs:enumeration value="OUT_HEADSET" />
<xs:enumeration value="OUT_HEARING_AID" />
<xs:enumeration value="OUT_LINE_AUX" />
<xs:enumeration value="OUT_SPEAKER" />
<xs:enumeration value="OUT_SPEAKER_EARPIECE" />
<xs:enumeration value="OUT_SPEAKER_SAFE" />
<xs:enumeration value="OUT_SUBMIX" />
<xs:enumeration value="OUT_TELEPHONY_TX" />
<xs:enumeration value="OUT_DOCK" />
<xs:enumeration value="OUT_BROADCAST" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="connectionType">
<xs:restriction base="xs:string">
<xs:enumeration value="analog" />
<xs:enumeration value="bt-a2dp" />
<xs:enumeration value="bt-le" />
<xs:enumeration value="bt-sco" />
<xs:enumeration value="bus" />
<xs:enumeration value="hdmi" />
<xs:enumeration value="hdmi-arc" />
<xs:enumeration value="hdmi-earc" />
<xs:enumeration value="ip-v4" />
<xs:enumeration value="spdif" />
<xs:enumeration value="wireless" />
<xs:enumeration value="usb" />
<xs:enumeration value="virtual" />
<!-- SEC_AUDIO_FMRADIO -->
<xs:enumeration value="fm" />
</xs:restriction>
</xs:simpleType>
<xs:complexType name="devicePorts">
<xs:sequence>
<xs:element name="devicePort" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:sequence>
<!-- Attached device ports should have atleast one profile -->
<!-- External device ports are only for templatation purpose -->
<!-- Even though template device ports need not require profiles, enforce them to have atleast one profile for the sake of default profile -->
<xs:element name="profile" type="profile" minOccurs="1" maxOccurs="unbounded" />
<xs:element name="gains" type="gains" minOccurs="0" />
</xs:sequence>
<xs:attribute name="tagName" type="xs:token" use="required" />
<!-- can used to
check whether deviceport is permanent or not -->
<!-- only permanent deviceports must have static profiles loaded -->
<xs:attribute name="attached" type="xs:boolean" use="optional" default="false" />
<!-- This attribute indicates whether the device treated as default device by framework -->
<!-- By convention, defaultDevice is also an attached device -->
<xs:attribute name="defaultDevice" type="xs:boolean" use="optional" default="false" />
<xs:attribute name="deviceType" type="audioDeviceType" use="required" />
<xs:attribute name="role" type="role" use="required" />
<!-- some of the device needs address ex: bulit in mic address as bottom -->
<xs:attribute name="address" type="xs:string" use="optional" default="" />
<xs:attribute name="connection" type="connectionType" use="optional" />
<!-- encodings is expected to have series of valid mime strings which are seperated by space -->
<xs:attribute name="encodings" type="audioMimeList" use="optional" />
<!-- Note that XSD 1.0 can not check that a type only has one default. -->
<xs:attribute name="default" type="xs:boolean" use="optional" />
</xs:complexType>
<xs:unique name="devicePortProfileUniqueness">
<xs:selector xpath="profile" />
<xs:field xpath="samplingRate" />
<xs:field xpath="channelLayouts" />
<xs:field xpath="encoding" />
<xs:field xpath="formatType" />
<xs:field xpath="pcmType" />
</xs:unique>
<xs:unique name="devicePortGainUniqueness">
<xs:selector xpath="gains/gain" />
<xs:field xpath="@name" />
</xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="mixType">
<xs:restriction base="xs:string">
<xs:enumeration value="mix" />
<xs:enumeration value="mux" />
</xs:restriction>
</xs:simpleType>
<xs:complexType name="routes">
<xs:sequence>
<xs:element name="route" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:attribute name="type" type="mixType" use="required" />
<xs:attribute name="sink" type="xs:string" use="required" />
<xs:attribute name="sources" type="xs:string" use="required" />
<!-- TODO write validation for sources attribute-->
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="audioSamplingRates">
<xs:list itemType="xs:nonNegativeInteger" />
</xs:simpleType>
<xs:simpleType name="audioChannelIndexMask">
<xs:restriction base="xs:string">
<xs:enumeration value="INDEX_MASK_1" />
<xs:enumeration value="INDEX_MASK_2" />
<xs:enumeration value="INDEX_MASK_3" />
<xs:enumeration value="INDEX_MASK_4" />
<xs:enumeration value="INDEX_MASK_5" />
<xs:enumeration value="INDEX_MASK_6" />
<xs:enumeration value="INDEX_MASK_7" />
<xs:enumeration value="INDEX_MASK_8" />
<xs:enumeration value="INDEX_MASK_9" />
<xs:enumeration value="INDEX_MASK_10" />
<xs:enumeration value="INDEX_MASK_11" />
<xs:enumeration value="INDEX_MASK_12" />
<xs:enumeration value="INDEX_MASK_13" />
<xs:enumeration value="INDEX_MASK_14" />
<xs:enumeration value="INDEX_MASK_15" />
<xs:enumeration value="INDEX_MASK_16" />
<xs:enumeration value="INDEX_MASK_17" />
<xs:enumeration value="INDEX_MASK_18" />
<xs:enumeration value="INDEX_MASK_19" />
<xs:enumeration value="INDEX_MASK_20" />
<xs:enumeration value="INDEX_MASK_21" />
<xs:enumeration value="INDEX_MASK_22" />
<xs:enumeration value="INDEX_MASK_23" />
<xs:enumeration value="INDEX_MASK_24" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioChannelIndexMaskList">
<xs:list itemType="audioChannelIndexMask" />
</xs:simpleType>
<xs:simpleType name="audioChannelLayout">
<xs:restriction base="xs:string">
<xs:enumeration value="LAYOUT_MONO" />
<xs:enumeration value="LAYOUT_STEREO" />
<xs:enumeration value="LAYOUT_2POINT1" />
<xs:enumeration value="LAYOUT_TRI" />
<xs:enumeration value="LAYOUT_TRI_BACK" />
<xs:enumeration value="LAYOUT_3POINT1" />
<xs:enumeration value="LAYOUT_2POINT0POINT2" />
<xs:enumeration value="LAYOUT_2POINT1POINT2" />
<xs:enumeration value="LAYOUT_3POINT0POINT2" />
<xs:enumeration value="LAYOUT_3POINT1POINT2" />
<xs:enumeration value="LAYOUT_QUAD" />
<xs:enumeration value="LAYOUT_QUAD_SIDE" />
<xs:enumeration value="LAYOUT_SURROUND" />
<xs:enumeration value="LAYOUT_PENTA" />
<xs:enumeration value="LAYOUT_5POINT1" />
<xs:enumeration value="LAYOUT_5POINT1_SIDE" />
<xs:enumeration value="LAYOUT_5POINT1POINT2" />
<xs:enumeration value="LAYOUT_5POINT1POINT4" />
<xs:enumeration value="LAYOUT_6POINT1" />
<xs:enumeration value="LAYOUT_7POINT1" />
<xs:enumeration value="LAYOUT_7POINT1POINT2" />
<xs:enumeration value="LAYOUT_7POINT1POINT4" />
<xs:enumeration value="LAYOUT_9POINT1POINT4" />
<xs:enumeration value="LAYOUT_9POINT1POINT6" />
<xs:enumeration value="LAYOUT_13POINT_360RA" />
<xs:enumeration value="LAYOUT_22POINT2" />
<xs:enumeration value="LAYOUT_MONO_HAPTIC_A" />
<xs:enumeration value="LAYOUT_STEREO_HAPTIC_A" />
<xs:enumeration value="LAYOUT_HAPTIC_AB" />
<xs:enumeration value="LAYOUT_MONO_HAPTIC_AB" />
<xs:enumeration value="LAYOUT_STEREO_HAPTIC_AB" />
<xs:enumeration value="LAYOUT_FRONT_BACK" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioChannelLayoutList">
<xs:list itemType="audioChannelLayout" />
</xs:simpleType>
<xs:simpleType name="audioEncapsulationType">
<xs:restriction base="xs:string">
<xs:enumeration value="NONE" />
<xs:enumeration value="IEC61937" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioFormatType">
<xs:restriction base="xs:string">
<xs:enumeration value="DEFAULT" />
<xs:enumeration value="NON_PCM" />
<xs:enumeration value="PCM" />
<xs:enumeration value="SYS_RESERVED_INVALID" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioPcmType">
<xs:restriction base="xs:string">
<xs:enumeration value="DEFAULT" />
<xs:enumeration value="UINT_8_BIT" />
<xs:enumeration value="INT_16_BIT" />
<xs:enumeration value="INT_32_BIT" />
<xs:enumeration value="FIXED_Q_8_24" />
<xs:enumeration value="FLOAT_32_BIT" />
<xs:enumeration value="INT_24_BIT" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioMimeList">
<xs:list itemType="xs:string" />
</xs:simpleType>
<xs:complexType name="profile">
<xs:attribute name="name" type="xs:token" use="optional" />
<xs:attribute name="samplingRates" type="audioSamplingRates" use="required" />
<xs:attribute name="channelLayouts" type="audioChannelLayoutList" use="required" />
<xs:attribute name="channelMasks" type="audioChannelIndexMaskList" use="optional" />
<xs:attribute name="encapsulationType" type="audioEncapsulationType" use="optional" default="NONE" />
<xs:attribute name="formatType" type="audioFormatType" use="required" />
<xs:attribute name="pcmType" type="audioPcmType" use="optional" />
<!-- encoding value is expected to be a valid mime string such as "audio/flac" -->
<xs:attribute name="encoding" type="xs:string" use="optional" />
</xs:complexType>
<xs:simpleType name="audioGainMode">
<xs:restriction base="xs:string">
<xs:enumeration value="JOINT" />
<xs:enumeration value="CHANNELS" />
<xs:enumeration value="RAMP" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="audioGainModeListUnrestricted">
<xs:list itemType="audioGainMode" />
</xs:simpleType>
<xs:simpleType name='audioGainModeList'>
<xs:restriction base='audioGainModeListUnrestricted'>
<xs:minLength value='1' />
</xs:restriction>
</xs:simpleType>
<xs:complexType name="gains">
<xs:sequence>
<xs:element name="gain" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:attribute name="name" type="xs:token" use="required" />
<xs:attribute name="mode" type="audioGainModeList" use="required" />
<xs:attribute name="channel_layout" type="audioChannelLayout" use="optional" />
<xs:attribute name="minValueMB" type="xs:int" use="optional" />
<xs:attribute name="maxValueMB" type="xs:int" use="optional" />
<xs:attribute name="defaultValueMB" type="xs:int" use="optional" />
<xs:attribute name="stepValueMB" type="xs:int" use="optional" />
<xs:attribute name="minRampMs" type="xs:int" use="optional" />
<xs:attribute name="maxRampMs" type="xs:int" use="optional" />
<xs:attribute name="useForVolume" type="xs:boolean" use="optional" />
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
</xs:schema>

View File

@@ -1,43 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libaudioplatform.qti
LOCAL_MODULE_OWNER := qti
LOCAL_MODULE_TAGS := optional
LOCAL_VENDOR_MODULE := true
LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/include \
$(LOCAL_PATH)/../extensions/include \
$(TOP)/system/media/audio/include \
$(TOP)/hardware/libhardware/include
LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
LOCAL_SRC_FILES := \
Platform.cpp \
AudioUsecase.cpp \
PlatformUtils.cpp
LOCAL_WHOLE_STATIC_LIBRARIES := libaudio_microphoneinfo_parser
LOCAL_STATIC_LIBRARIES := \
libaudiohalutils.qti
LOCAL_SHARED_LIBRARIES := \
libbinder_ndk \
libbase \
libstagefright_foundation \
android.hardware.audio.core-V2-ndk \
android.media.audio.common.types-V3-ndk \
libaudioaidlcommon \
qti-audio-types-aidl-V1-ndk \
libaudioplatformconverter.qti \
libar-pal
# { SEC_AUDIO_COMMON
SEC_AUDIO_VARS := vendor/samsung/variant/audio/sec_audioreach_vars.mk
include $(SEC_AUDIO_VARS)
# } SEC_AUDIO_COMMON
include $(BUILD_STATIC_LIBRARY)

View File

@@ -1,247 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_NDEBUG 0
#define LOG_TAG "AHAL_PlatformUtils_QTI"
#include <Utils.h>
#include <android-base/logging.h>
#include <qti-audio-core/Platform.h>
#include <qti-audio-core/PlatformUtils.h>
#include <qti-audio/PlatformConverter.h>
#include <system/audio.h>
#include <map>
#include <set>
#include <vector>
using ::aidl::android::hardware::audio::common::getChannelCount;
using ::aidl::android::media::audio::common::AudioChannelLayout;
using ::aidl::android::media::audio::common::AudioFormatDescription;
using ::aidl::android::media::audio::common::AudioFormatType;
using ::aidl::android::media::audio::common::AudioProfile;
using ::aidl::android::media::audio::common::PcmType;
using ::aidl::android::media::audio::common::AudioPlaybackRate;
using ::aidl::android::hardware::audio::core::VendorParameter;
using ::aidl::qti::audio::core::VString;
namespace qti::audio::core {
using AudioChannelCountToMaskMap = std::map<unsigned int, AudioChannelLayout>;
AudioChannelLayout getInvalidChannelLayout() {
static const AudioChannelLayout invalidChannelLayout =
AudioChannelLayout::make<AudioChannelLayout::Tag::invalid>(0);
return invalidChannelLayout;
}
static AudioChannelCountToMaskMap createChannelMaskMap(
const std::set<AudioChannelLayout>& channelMasks) {
AudioChannelCountToMaskMap channelMaskToCountMap;
for (const auto& channelMask : channelMasks) {
channelMaskToCountMap.emplace(getChannelCount(channelMask), channelMask);
}
return channelMaskToCountMap;
}
#define MAKE_LAYOUT_MASK(n) \
AudioChannelLayout::make<AudioChannelLayout::Tag::layoutMask>(AudioChannelLayout::LAYOUT_##n)
const AudioChannelCountToMaskMap& getSupportedChannelOutLayoutMap() {
static const std::set<AudioChannelLayout> supportedOutChannelLayouts = {
MAKE_LAYOUT_MASK(MONO), MAKE_LAYOUT_MASK(STEREO), MAKE_LAYOUT_MASK(2POINT1),
MAKE_LAYOUT_MASK(QUAD), MAKE_LAYOUT_MASK(PENTA), MAKE_LAYOUT_MASK(5POINT1),
MAKE_LAYOUT_MASK(6POINT1), MAKE_LAYOUT_MASK(7POINT1), MAKE_LAYOUT_MASK(7POINT1POINT4),
MAKE_LAYOUT_MASK(22POINT2),
};
static const AudioChannelCountToMaskMap outLayouts =
createChannelMaskMap(supportedOutChannelLayouts);
return outLayouts;
}
const AudioChannelCountToMaskMap& getSupportedChannelInLayoutMap() {
static const std::set<AudioChannelLayout> supportedInChannelLayouts = {
MAKE_LAYOUT_MASK(MONO), MAKE_LAYOUT_MASK(STEREO),
};
static const AudioChannelCountToMaskMap inLayouts =
createChannelMaskMap(supportedInChannelLayouts);
return inLayouts;
}
#undef MAKE_LAYOUT_MASK
#define MAKE_INDEX_MASK(n) \
AudioChannelLayout::make<AudioChannelLayout::Tag::indexMask>(AudioChannelLayout::INDEX_MASK_##n)
const AudioChannelCountToMaskMap& getSupportedChannelIndexLayoutMap() {
static const std::set<AudioChannelLayout> supportedIndexChannelLayouts = {
MAKE_INDEX_MASK(1), MAKE_INDEX_MASK(2), MAKE_INDEX_MASK(3), MAKE_INDEX_MASK(4),
MAKE_INDEX_MASK(5), MAKE_INDEX_MASK(6), MAKE_INDEX_MASK(7), MAKE_INDEX_MASK(8),
MAKE_INDEX_MASK(9), MAKE_INDEX_MASK(10), MAKE_INDEX_MASK(11), MAKE_INDEX_MASK(12),
MAKE_INDEX_MASK(13), MAKE_INDEX_MASK(14), MAKE_INDEX_MASK(15), MAKE_INDEX_MASK(16),
MAKE_INDEX_MASK(17), MAKE_INDEX_MASK(18), MAKE_INDEX_MASK(19), MAKE_INDEX_MASK(20),
MAKE_INDEX_MASK(21), MAKE_INDEX_MASK(22), MAKE_INDEX_MASK(23), MAKE_INDEX_MASK(24),
};
static const AudioChannelCountToMaskMap indexLayouts =
createChannelMaskMap(supportedIndexChannelLayouts);
return indexLayouts;
}
#undef MAKE_INDEX_MASK
// Assuming that M is a map whose keys' type is K and values' type is V,
// return the corresponding value of the given key from the map or default
// value if the key is not found.
template <typename M, typename K, typename V>
static auto findValueOrDefault(const M& m, const K& key, V defaultValue) {
auto it = m.find(key);
return it == m.end() ? defaultValue : it->second;
}
AudioChannelLayout getChannelLayoutMaskFromChannelCount(unsigned int channelCount, int isInput) {
return findValueOrDefault(
isInput ? getSupportedChannelInLayoutMap() : getSupportedChannelOutLayoutMap(),
channelCount, getInvalidChannelLayout());
}
AudioChannelLayout getChannelIndexMaskFromChannelCount(unsigned int channelCount) {
return findValueOrDefault(getSupportedChannelIndexLayoutMap(), channelCount,
getInvalidChannelLayout());
}
std::vector<AudioChannelLayout> getChannelMasksFromProfile(
pal_param_device_capability_t* capability) {
const bool isInput = !capability->is_playback;
std::vector<AudioChannelLayout> channels;
for (size_t i = 0; i < AUDIO_PORT_MAX_CHANNEL_MASKS && capability->config->mask[i] != 0; ++i) {
auto channelCount =
isInput ? audio_channel_count_from_in_mask(capability->config->mask[i])
: audio_channel_count_from_out_mask(capability->config->mask[i]);
auto layoutMask = getChannelLayoutMaskFromChannelCount(channelCount, isInput);
if (layoutMask.getTag() == AudioChannelLayout::Tag::layoutMask) {
channels.push_back(layoutMask);
}
auto indexMask = getChannelIndexMaskFromChannelCount(channelCount);
if (indexMask.getTag() == AudioChannelLayout::Tag::indexMask) {
channels.push_back(indexMask);
}
}
return channels;
}
std::vector<int> getSampleRatesFromProfile(pal_param_device_capability_t* capability) {
std::vector<int> sampleRates;
for (int i = 0; capability->config->sample_rate[i] != 0; i++) {
sampleRates.push_back(capability->config->sample_rate[i]);
}
return sampleRates;
}
static AudioFormatDescription make_AudioFormatDescription(AudioFormatType type) {
AudioFormatDescription result;
result.type = type;
return result;
}
static AudioFormatDescription make_AudioFormatDescription(PcmType pcm) {
auto result = make_AudioFormatDescription(AudioFormatType::PCM);
result.pcm = pcm;
return result;
}
static AudioFormatDescription getLegacyToAidlFormat(int palFormat) {
switch (palFormat) {
case PCM_16_BIT:
return make_AudioFormatDescription(PcmType::INT_16_BIT);
case PCM_32_BIT:
return make_AudioFormatDescription(PcmType::INT_32_BIT);
case PCM_24_BIT_PACKED:
return make_AudioFormatDescription(PcmType::INT_24_BIT);
default:
return AudioFormatDescription();
}
}
std::vector<AudioProfile> getSupportedAudioProfiles(pal_param_device_capability_t* capability,
std::string devName) {
std::vector<AudioProfile> supportedProfiles;
std::vector<AudioChannelLayout> channels = getChannelMasksFromProfile(capability);
std::vector<int> sampleRates = getSampleRatesFromProfile(capability);
std::string name = devName + "_" + (capability->is_playback ? "out" : "in");
for (size_t i = 0; i < MAX_SUPPORTED_FORMATS && capability->config->format[i] != 0; ++i) {
auto audioFormatDescription = getLegacyToAidlFormat(capability->config->format[i]);
if (audioFormatDescription.type == AudioFormatType::DEFAULT) {
LOG(WARNING) << __func__ << ": unknown pcm type= " << capability->config->format[i];
continue;
}
AudioProfile audioProfile = {.name = name,
.format = audioFormatDescription,
.channelMasks = channels,
.sampleRates = sampleRates};
LOG(VERBOSE) << __func__ << " found profile " << audioProfile.toString();
supportedProfiles.push_back(std::move(audioProfile));
}
return supportedProfiles;
}
bool isValidPlaybackRate(
const ::aidl::android::media::audio::common::AudioPlaybackRate& playbackRate) {
// For fallback mode MUTE, out of range values should not be rejected.
if (playbackRate.fallbackMode != AudioPlaybackRate::TimestretchFallbackMode::MUTE) {
if (playbackRate.speed < 0.1f || playbackRate.speed > 2.0f) {
LOG(ERROR) << __func__ << ": unsupported speed " << playbackRate.toString();
return false;
}
if (playbackRate.pitch != 1.0f) {
LOG(ERROR) << __func__ << ": unsupported pitch " << playbackRate.toString();
return false;
}
}
auto isValidStretchMode = [=](const auto& stretchMode) {
return (stretchMode == AudioPlaybackRate::TimestretchMode::DEFAULT ||
stretchMode == AudioPlaybackRate::TimestretchMode::VOICE);
};
if (!isValidStretchMode(playbackRate.timestretchMode)) {
LOG(ERROR) << __func__ << ": unsupported timstrecth mode " << playbackRate.toString();
return false;
}
auto isValidFallbackMode = [=](const auto& fallMode) {
return (fallMode == AudioPlaybackRate::TimestretchFallbackMode::MUTE ||
fallMode == AudioPlaybackRate::TimestretchFallbackMode::FAIL);
};
if (!isValidFallbackMode(playbackRate.fallbackMode)) {
LOG(ERROR) << __func__ << ": unsupported fallback mode " << playbackRate.toString();
return false;
}
return true;
}
void setPalDeviceCustomKey(pal_device& palDevice, const std::string& customKey) noexcept {
strlcpy(palDevice.custom_config.custom_key, customKey.c_str(), PAL_MAX_CUSTOM_KEY_SIZE);
}
std::vector<uint8_t> makePalVolumes(std::vector<float> const& volumes) noexcept {
if (volumes.empty()) {
return {};
}
auto channels = volumes.size();
auto palChannelInfo = PlatformConverter::getPalChannelInfoForChannelCount(channels);
const auto dataLength = sizeof(pal_volume_data) + sizeof(pal_channel_vol_kv) * channels;
auto data = std::vector<uint8_t>(dataLength);
auto palVolumeData = reinterpret_cast<pal_volume_data*>(data.data());
palVolumeData->no_of_volpair = channels;
for (unsigned long channel = 0; channel < channels; channel++) {
palVolumeData->volume_pair[channel].channel_mask = palChannelInfo->ch_map[channel];
palVolumeData->volume_pair[channel].vol = volumes[channel];
}
return data;
}
} // namespace qti::audio::core

View File

@@ -1,624 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <PalDefs.h>
#include <Utils.h>
#include <aidl/android/hardware/audio/common/AudioOffloadMetadata.h>
#include <aidl/android/hardware/audio/common/SinkMetadata.h>
#include <aidl/android/hardware/audio/common/SourceMetadata.h>
#include <aidl/android/hardware/audio/core/IStreamCallback.h>
#include <aidl/android/hardware/audio/core/IStreamOutEventCallback.h>
#include <aidl/android/hardware/audio/core/VendorParameter.h>
#include <aidl/android/media/audio/common/AudioChannelLayout.h>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <aidl/android/media/audio/common/AudioOffloadInfo.h>
#include <aidl/android/media/audio/common/AudioPortConfig.h>
#include <aidl/android/media/audio/common/Int.h>
#include <android/binder_auto_utils.h>
#include <algorithm>
#include <numeric>
#include <unordered_set>
#define DIV_ROUND_UP(x, y) (((x) + (y) - 1) / (y))
#define ALIGN(x, y) ((y) * DIV_ROUND_UP((x), (y)))
#define DEFAULT_SAMPLE_RATE 48000
namespace qti::audio::core {
// forward declaration
struct PlatformStreamCallback;
enum class Usecase : uint16_t {
INVALID = 0,
PRIMARY_PLAYBACK,
LOW_LATENCY_PLAYBACK,
DEEP_BUFFER_PLAYBACK,
ULL_PLAYBACK,
MMAP_PLAYBACK,
COMPRESS_OFFLOAD_PLAYBACK,
PCM_OFFLOAD_PLAYBACK,
VOIP_PLAYBACK,
HAPTICS_PLAYBACK,
SPATIAL_PLAYBACK,
IN_CALL_MUSIC,
PCM_RECORD, // Start of record usecases
FAST_RECORD,
ULTRA_FAST_RECORD,
MMAP_RECORD,
COMPRESS_CAPTURE,
VOIP_RECORD,
VOICE_CALL_RECORD,
HOTWORD_RECORD,
};
#ifdef SEC_AUDIO_COMMON
std::optional<aidl::android::media::audio::common::AudioSource> getMixPortAudioSource(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig) noexcept;
#endif
Usecase getUsecaseTag(const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
std::string getName(const Usecase tag);
/*
* Equivalent to getPeriodSize/getPeriodCount.
* where bufferSize = frameCount * frameSize;
* Single API getBufferConfig can be queried by Stream's to fetch Info about periodsize/count.
*/
struct BufferConfig {
size_t bufferSize;
size_t bufferCount;
};
/*
* Each usecase class should provide getLatency, getFrameCount, getBufferConfig which are
* used by Platform.cpp to map to respective usecase.
* getBufferConfig internally needs getBufferSize API.
* Earlier in Hidl, getBufferSize was used by framework and to configure pal streams.
* In AIDL when a patch is created framecount is queried, so instead of buffer size
* frameCount is expected to be returned, so for FWK calls getFrameCount is used,
* and for internal pal setups getBufferConfig is used.
*
* Ideally for pcm formats totalBytes (bufferSizeInBytes) = framecount * framesize.
* while framesize = (channelCount * sizeof(audio_format)).
* To summerize : getBufferSize = getFrameCount(portConfig) * getFrameSizeInBytes()
*
* However, for compress usecase, frameSize is 1, so bufferSize = getFrameSize.
*
* UsecaseConfig template is helpful to declare getBufferConfig and getBufferSize for
* each usecase, each new usecase can extend UsecaseConfig.
* As stated before, framesizes are different for pcm and compress types.
* UsecaseConfig by default uses pcm config, to use a pcm usecase extend like this
* class PcmUsecase : public UsecaseConfig <PcmUsecase>
* To define a compress usecase 1 can use as below:
* class CompressUsecase : public UsecaseConfig <CompressUsecase, false >
*/
template <typename Usecase, bool IsPcm = true>
class UsecaseConfig {
public:
/*
* brief create getBufferConfig definition for the usecases.
* BufferConfig publishes the bufferCount and bufferSize needed to configure the pal streams.
* To utilize this API, properly configure kPeriodCount and getBufferSize in the usecase.
*/
static BufferConfig getBufferConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig) {
BufferConfig config;
config.bufferCount = Usecase::kPeriodCount;
config.bufferSize = Usecase::getBufferSize(mixPortConfig);
return config;
}
/*
* brief create getBufferSize definition based on if usecase is pcm or non pcm.
* For pcm case frameSize is calculated based on channel count and format.
* for compress usecases frameSize is used as 1.
*/
static size_t getBufferSize(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig) {
size_t frameCount = Usecase::getFrameCount(mixPortConfig);
size_t frameSizeInBytes = 1;
if (IsPcm) {
frameSizeInBytes = ::aidl::android::hardware::audio::common::getFrameSizeInBytes(
mixPortConfig.format.value(), mixPortConfig.channelMask.value());
}
return frameCount * frameSizeInBytes;
}
};
/**
* This port is opened by default and receives routing, audio mode and volume
* controls related to voice calls
**/
class PrimaryPlayback : public UsecaseConfig<PrimaryPlayback> {
public:
#ifdef SEC_AUDIO_COMMON
constexpr static size_t kPeriodCount = 4;
constexpr static size_t kPeriodDurationMs = 20;
#else
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPeriodDurationMs = 40;
#endif
constexpr static size_t kPlatformDelayMs = 29;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class DeepBufferPlayback : public UsecaseConfig<DeepBufferPlayback> {
public:
#ifdef SEC_AUDIO_COMMON
constexpr static size_t kPeriodCount = 4;
constexpr static size_t kPeriodDurationMs = 20;
#else
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPeriodDurationMs = 40;
#endif
constexpr static size_t kPlatformDelayMs = 49;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class LowLatencyPlayback : public UsecaseConfig<LowLatencyPlayback> {
public:
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPlatformDelayMs = 13;
constexpr static size_t kPeriodDurationMs = 4;
static std::unordered_set<size_t> kSupportedFrameSizes;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class UllPlayback : public UsecaseConfig<UllPlayback> {
public:
constexpr static size_t kPlatformDelayMs = 4;
constexpr static uint32_t kPeriodCount = 512;
constexpr static size_t kPeriodDurationMs = 3;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs + kPlatformDelayMs; }
};
class MmapUsecaseBase {
public:
virtual ~MmapUsecaseBase() {}
virtual void setPalHandle(pal_stream_handle_t* handle);
virtual int32_t createMMapBuffer(int64_t frameSize, int32_t* fd, int64_t* burstSizeFrames,
int32_t* flags, int32_t* bufferSizeFrames);
virtual int32_t getMMapPosition(int64_t* frames, int64_t* timeNs);
virtual int32_t start();
virtual int32_t stop();
protected:
pal_stream_handle_t* mPalHandle{nullptr};
bool mIsStarted = false;
};
class MMapPlayback : public MmapUsecaseBase, public UsecaseConfig<MMapPlayback> {
public:
constexpr static size_t kPeriodDurationMs = 1;
constexpr static size_t kPlatformDelayMs = 3;
constexpr static uint32_t kPeriodCount = 512;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs + kPlatformDelayMs; }
};
class CompressPlayback : public UsecaseConfig<CompressPlayback, false /*IsPcm*/> {
public:
static constexpr size_t kPeriodSize = 32 * 1024;
static constexpr size_t kPeriodCount = 4;
static constexpr int32_t kLatencyMs = 50;
static constexpr size_t kPlatformDelayMs = 30;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kLatencyMs; }
class Flac final {
public:
static constexpr size_t kPeriodSize = 256 * 1024;
inline const static std::string kMinBlockSize{"music_offload_flac_min_blk_size"};
inline const static std::string kMaxBlockSize{"music_offload_flac_max_blk_size"};
inline const static std::string kMinFrameSize{"music_offload_flac_min_frame_size"};
inline const static std::string kMaxFrameSize{"music_offload_flac_max_frame_size"};
};
class Alac final {
public:
inline const static std::string kFrameLength{"music_offload_alac_frame_length"};
inline const static std::string kCompatVer{"music_offload_alac_compatible_version"};
inline const static std::string kBitDepth{"music_offload_alac_bit_depth"};
inline const static std::string kPb{"music_offload_alac_pb"};
inline const static std::string kMb{"music_offload_alac_mb"};
inline const static std::string kKb{"music_offload_alac_kb"};
inline const static std::string kNumChannels{"music_offload_alac_num_channels"};
inline const static std::string kMaxRun{"music_offload_alac_max_run"};
inline const static std::string kMaxFrameBytes{"music_offload_alac_max_frame_bytes"};
inline const static std::string kBitRate{"music_offload_alac_avg_bit_rate"};
inline const static std::string kSamplingRate{"music_offload_alac_sampling_rate"};
inline const static std::string kChannelLayoutTag{"music_offload_alac_channel_layout_tag"};
};
class Vorbis final {
public:
inline const static std::string kBitStreamFormat{"music_offload_vorbis_bitstream_fmt"};
};
class Ape final {
public:
inline const static std::string kCompatibleVersion{"music_offload_ape_compatible_version"};
inline const static std::string kCompressionLevel{"music_offload_ape_compression_level"};
inline const static std::string kFormatFlags{"music_offload_ape_format_flags"};
inline const static std::string kBlocksPerFrame{"music_offload_ape_blocks_per_frame"};
inline const static std::string kFinalFrameBlocks{"music_offload_ape_final_frame_blocks"};
inline const static std::string kTotalFrames{"music_offload_ape_total_frames"};
inline const static std::string kBitsPerSample{"music_offload_ape_bits_per_sample"};
inline const static std::string kNumChannels{"music_offload_ape_num_channels"};
inline const static std::string kSampleRate{"music_offload_ape_sample_rate"};
inline const static std::string kSeekTablePresent{"music_offload_seek_table_present"};
};
class Wma final {
public:
inline const static std::string kFormatTag{"music_offload_wma_format_tag"};
inline const static std::string kBlockAlign{"music_offload_wma_block_align"};
inline const static std::string kBitPerSample{"music_offload_wma_bit_per_sample"};
inline const static std::string kChannelMask{"music_offload_wma_channel_mask"};
inline const static std::string kEncodeOption{"music_offload_wma_encode_option"};
inline const static std::string kEncodeOption1{"music_offload_wma_encode_option1"};
inline const static std::string kEncodeOption2{"music_offload_wma_encode_option2"};
};
class Opus final {
public:
inline const static std::string kBitStreamFormat{"music_offload_opus_bitstream_format"};
inline const static std::string kPayloadType{"music_offload_opus_payload_type"};
inline const static std::string kVersion{"music_offload_opus_version"};
inline const static std::string kNumChannels{"music_offload_opus_num_channels"};
inline const static std::string kPreSkip{"music_offload_opus_pre_skip"};
inline const static std::string kOutputGain{"music_offload_opus_output_gain"};
inline const static std::string kMappingFamily{"music_offload_opus_mapping_family"};
inline const static std::string kStreamCount{"music_offload_opus_stream_count"};
inline const static std::string kCoupledCount{"music_offload_opus_coupled_count"};
inline const static std::string kChannelMap0{"music_offload_opus_channel_map0"};
inline const static std::string kChannelMap1{"music_offload_opus_channel_map1"};
inline const static std::string kChannelMap2{"music_offload_opus_channel_map2"};
inline const static std::string kChannelMap3{"music_offload_opus_channel_map3"};
inline const static std::string kChannelMap4{"music_offload_opus_channel_map4"};
inline const static std::string kChannelMap5{"music_offload_opus_channel_map5"};
inline const static std::string kChannelMap6{"music_offload_opus_channel_map6"};
inline const static std::string kChannelMap7{"music_offload_opus_channel_map7"};
};
static int32_t palCallback(pal_stream_handle_t* palHandle, uint32_t eventId,
uint32_t* eventData, uint32_t eventSize, uint64_t cookie);
explicit CompressPlayback(
const ::aidl::android::media::audio::common::AudioOffloadInfo& offloadInfo,
PlatformStreamCallback* const platformStreamCallback,
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
/* To reconfigure the codec, gapless info */
void setAndConfigureCodecInfo(pal_stream_handle_t* handle);
void configureGapless(pal_stream_handle_t* handle);
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return);
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async);
void updateOffloadMetadata(
const ::aidl::android::hardware::audio::common::AudioOffloadMetadata& offloadMetaData);
void updateSourceMetadata(
const ::aidl::android::hardware::audio::common::SourceMetadata& sourceMetaData);
int64_t getPositionInFrames(pal_stream_handle_t* palHandle);
void onFlush();
bool isGaplessConfigured() const noexcept { return mIsGaplessConfigured; }
protected:
void configureDefault();
// configure the codec info which is cached already
bool configureCodecInfo() const;
// configure the gapless info which is cached already
bool configureGapLessMetadata();
protected:
// dynamic compress info
::aidl::android::hardware::audio::common::AudioOffloadMetadata mOffloadMetadata{};
const ::aidl::android::hardware::audio::common::SourceMetadata* mSourceMetadata{nullptr};
// this is static info at the stream creation, for dynamic info check AudioOffloadMetadata
const ::aidl::android::media::audio::common::AudioOffloadInfo& mOffloadInfo;
uint16_t mCompressBitWidth{0};
pal_stream_handle_t* mCompressPlaybackHandle{nullptr};
pal_snd_dec_t mPalSndDec{};
int32_t mSampleRate;
::aidl::android::media::audio::common::AudioFormatDescription mCompressFormat;
::aidl::android::media::audio::common::AudioChannelLayout mChannelLayout;
int32_t mBitWidth;
int64_t mTotalDSPFrames{0};
int64_t mPrevFrames{0};
const ::aidl::android::media::audio::common::AudioPortConfig& mMixPortConfig;
PlatformStreamCallback * const mPlatformStreamCallback;
std::atomic<bool> mIsGaplessConfigured = false;
};
class PcmOffloadPlayback : public UsecaseConfig<PcmOffloadPlayback> {
public:
explicit PcmOffloadPlayback(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig)
: mMixPortConfig(mixPortConfig) {}
constexpr static size_t kPeriodDurationMs = 80;
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPlatformDelayMs = 30;
constexpr static size_t kMinPeriodSize = 512;
constexpr static size_t kMaxPeriodSize = 240 * 1024;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
int64_t getPositionInFrames(pal_stream_handle_t* palHandle);
void onFlush();
private:
int64_t mTotalDSPFrames{0};
int64_t mPrevFrames{0};
const ::aidl::android::media::audio::common::AudioPortConfig& mMixPortConfig;
};
class VoipPlayback : public UsecaseConfig<VoipPlayback> {
public:
constexpr static size_t kPeriodDurationMs = 20;
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPlatformDelayMs = 30;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class SpatialPlayback : public UsecaseConfig<SpatialPlayback> {
public:
constexpr static size_t kPeriodDurationMs = 10;
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPlatformDelayMs = 13;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class InCallMusic : public UsecaseConfig<InCallMusic> {
public:
constexpr static size_t kPeriodCount = 4;
constexpr static size_t kPlatformDelayMs = 0;
constexpr static size_t kPeriodDurationMs = 20;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return PrimaryPlayback::getLatency(); }
};
class HapticsPlayback : public UsecaseConfig<HapticsPlayback> {
public:
#ifdef SEC_AUDIO_SUPPORT_HAPTIC_PLAYBACK // Set the same as Generic
constexpr static size_t kPeriodCount = 4;
constexpr static size_t kPlatformDelayMs = 30;
constexpr static size_t kPeriodDurationMs = 20;
#else
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPlatformDelayMs = 30;
constexpr static size_t kPeriodDurationMs = 4;
#endif
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPeriodDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class PcmRecord : public UsecaseConfig<PcmRecord> {
public:
constexpr static uint32_t kCaptureDurationMs = 20;
constexpr static uint32_t kPeriodCount = 4;
constexpr static size_t kFMQMinFrameSize = 160;
constexpr static size_t kPlatformDelayMs = 20;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kCaptureDurationMs * kPeriodCount + kPlatformDelayMs; }
};
class FastRecord : public UsecaseConfig<FastRecord> {
public:
constexpr static uint32_t kCaptureDurationMs = 5;
constexpr static size_t kPeriodCount = 4;
constexpr static size_t kPlatformDelayMs = 5;
bool mIsWFDCapture{false};
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPlatformDelayMs; }
};
class UltraFastRecord : public UsecaseConfig<UltraFastRecord> {
public:
#ifdef SEC_AUDIO_SAMSUNGRECORD
constexpr static int32_t kSampleRate = 48000;
#endif
constexpr static uint32_t kCaptureDurationMs = 2;
// The below values at the moment are not generic, TODO make generic
constexpr static size_t kPeriodCount = 512;
constexpr static size_t kPlatformDelayMs = 2;
bool mIsWFDCapture{false};
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPlatformDelayMs; }
};
class MMapRecord : public MmapUsecaseBase, public UsecaseConfig<MMapRecord> {
public:
constexpr static uint32_t kCaptureDurationMs = 2;
constexpr static size_t kPeriodCount = 512;
constexpr static size_t kPlatformDelayMs = 4;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPlatformDelayMs; }
};
class HotwordRecord : public UsecaseConfig<HotwordRecord> {
public:
constexpr static uint32_t kPeriodCount = 4;
constexpr static size_t kPlatformDelayMs = 0;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
// use same as pcm record
static int32_t getLatency() { return PcmRecord::getLatency(); }
pal_stream_handle_t* getPalHandle(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
bool isStRecord() { return mIsStRecord; }
#ifdef SEC_AUDIO_SOUND_TRIGGER_TYPE
void enableSeamless(bool enable) { mIsSeamless = enable; }
#endif
private:
bool mIsStRecord{false};
#ifdef SEC_AUDIO_SOUND_TRIGGER_TYPE
bool mIsSeamless{false};
#endif
};
class VoipRecord : public UsecaseConfig<VoipRecord> {
public:
constexpr static uint32_t kCaptureDurationMs = 20;
constexpr static uint32_t kPeriodCount = 4;
constexpr static size_t kPlatformDelayMs = 0;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return PcmRecord::getLatency(); }
};
class VoiceCallRecord : public UsecaseConfig<VoiceCallRecord> {
public:
constexpr static size_t kCaptureDurationMs = 20;
constexpr static size_t kPeriodCount = 2;
constexpr static size_t kPlatformDelayMs = 0;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
pal_incall_record_direction getRecordDirection(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return PcmRecord::getLatency(); }
};
class CompressCapture : public UsecaseConfig<CompressCapture, false /*IsPcm*/> {
public:
constexpr static size_t kPlatformDelayMs = 20;
static size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
static int32_t getLatency() { return kPlatformDelayMs; }
class Aac final {
public:
inline static const std::string kDSPAacBitRate{"dsp_aac_audio_bitrate"};
inline static const std::string kDSPAacGlobalCutoffFrequency{
"dsp_aac_audio_global_cutoff_frequency"};
enum EncodingMode {
LC = 0x02,
SBR = 0x05,
PS = 0x1D,
};
enum EncodingFormat {
ADTS = 0x00,
LOAS = 0x01,
RAW = 0x03,
LATM = 0x04,
};
constexpr static uint32_t kAacLcPCMSamplesPerFrame = 1024;
constexpr static uint32_t kHeAacPCMSamplesPerFrame = 2048;
constexpr static int32_t kAacLcMonoMinSupportedBitRate = 8000;
constexpr static int32_t kAacLcStereoMinSupportedBitRate = 16000;
constexpr static int32_t kHeAacMonoMinSupportedBitRate1 = 10000;
constexpr static int32_t kHeAacMonoMinSupportedBitRate2 = 12000;
constexpr static int32_t kHeAacStereoMinSupportedBitRate1 = 18000;
constexpr static int32_t kHeAacStereoMinSupportedBitRate2 = 24000;
constexpr static int32_t kHeAacPsStereoMinSupportedBitRate1 = 10000;
constexpr static int32_t kHeAacPsStereoMinSupportedBitRate2 = 12000;
constexpr static int32_t kAacLcMonoMaxSupportedBitRate = 192000;
constexpr static int32_t kAacLcStereoMaxSupportedBitRate = 384000;
constexpr static int32_t kHeAacMonoMaxSupportedBitRate = 192000;
constexpr static int32_t kHeAacStereoMaxSupportedBitRate = 192000;
constexpr static int32_t kHeAacPstereoMaxSupportedBitRate = 192000;
static const uint32_t KAacMaxOutputSize = 2048; // bytes
static const int32_t kAacDefaultBitrate = 36000; // bps
};
constexpr static size_t kPeriodCount = 4;
explicit CompressCapture(
const ::aidl::android::media::audio::common::AudioFormatDescription& format,
int32_t sampleRate,
const ::aidl::android::media::audio::common::AudioChannelLayout& channelLayout);
void setPalHandle(pal_stream_handle_t* handle);
ndk::ScopedAStatus setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async);
size_t getLatencyMs();
ndk::ScopedAStatus getVendorParameters(
const std::vector<std::string>& in_ids,
std::vector<::aidl::android::hardware::audio::core::VendorParameter>* _aidl_return);
bool configureCodecInfo();
int32_t getAACMinBitrateValue();
int32_t getAACMaxBitrateValue();
uint32_t getAACMaxBufferSize();
void setAACDSPBitRate();
void advanceReadCount();
int64_t getPositionInFrames();
private:
const ::aidl::android::media::audio::common::AudioFormatDescription& mCompressFormat;
const ::aidl::android::media::audio::common::AudioChannelLayout& mChannelLayout;
int32_t mSampleRate{};
size_t mPCMSamplesPerFrame{0};
pal_stream_handle_t* mCompressHandle{nullptr};
size_t mNumReadCalls{0};
pal_snd_enc_t mPalSndEnc{};
};
} // namespace qti::audio::core

View File

@@ -1,762 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <PalApi.h>
#include <aidl/android/hardware/audio/core/IModule.h>
#include <aidl/android/hardware/audio/core/VendorParameter.h>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <aidl/android/media/audio/common/AudioFormatDescription.h>
#include <aidl/android/media/audio/common/AudioPlaybackRate.h>
#include <aidl/android/media/audio/common/AudioPort.h>
#include <aidl/android/media/audio/common/AudioPortConfig.h>
#include <aidl/android/hardware/audio/core/ITelephony.h>
#include <aidl/android/media/audio/common/MicrophoneDynamicInfo.h>
#include <aidl/android/media/audio/common/MicrophoneInfo.h>
#include <qti-audio-core/AudioUsecase.h>
#include <system/audio.h>
#include <unordered_map>
#ifdef SEC_AUDIO_COMMON
#include <qti-audio-core/SecPlatformDefs.h>
#include "SecPalDefs.h"
#endif
#ifdef ENABLE_TAS_SPK_PROT
#include "TISpeakerProtDefs.h"
#endif
#ifdef SEC_AUDIO_SAMSUNGRECORD
#include "PreProcess.h"
#endif
#ifdef SEC_AUDIO_SAMSUNGRECORD
#define AUDIO_CAPTURE_PERIOD_DURATION_MSEC 20
#endif
namespace qti::audio::core {
struct HdmiParameters {
int controller;
int stream;
pal_device_id_t deviceId;
};
enum class PlaybackRateStatus { SUCCESS, UNSUPPORTED, ILLEGAL_ARGUMENT };
using GetLatency = int32_t (*)();
using GetFrameCount =
size_t (*)(const ::aidl::android::media::audio::common::AudioPortConfig& portConfig);
using GetBufferConfig = struct BufferConfig (*)(
const ::aidl::android::media::audio::common::AudioPortConfig& portConfig);
/*
* Helper to map getLatency, getFrameCount, getBufferConfig APIs
* from platform to audiousecase. While Introducing new usecase
* always provide the APIS.
*/
struct UsecaseOps {
GetLatency getLatency;
GetFrameCount getFrameCount;
GetBufferConfig getBufferConfig;
};
template <typename UsecaseClass>
inline UsecaseOps makeUsecaseOps() {
UsecaseOps ops;
ops.getLatency = UsecaseClass::getLatency;
ops.getFrameCount = UsecaseClass::getFrameCount;
ops.getBufferConfig = UsecaseClass::getBufferConfig;
return ops;
}
class Platform {
private:
explicit Platform();
Platform(const Platform&) = delete;
Platform& operator=(const Platform& x) = delete;
Platform(Platform&& other) = delete;
Platform& operator=(Platform&& other) = delete;
static int palGlobalCallback(uint32_t event_id, uint32_t* event_data, uint64_t cookie);
public:
// BT related params used across
bool bt_lc3_speech_enabled;
static btsco_lc3_cfg_t btsco_lc3_cfg;
#ifdef SEC_AUDIO_BLUETOOTH
bool bt_nrec{false};
bool bt_sco_on{false};
#endif
#ifdef SEC_AUDIO_SUPPORT_BT_RVC
bool bt_rvc_support{false};
#endif
#ifdef SEC_AUDIO_BT_OFFLOAD
audio_format_t bt_a2dp_format{AUDIO_FORMAT_DEFAULT};
#endif
#ifdef ENABLE_TAS_SPK_PROT
static pal_tispk_prot_param_t tiSpkProtParam;
#endif
mutable bool mUSBCapEnable;
int mCallState;
int mCallMode;
#ifdef SEC_AUDIO_CALL
float mVoiceVolume{-1.0f};
bool mIsVoWiFi{false};
#ifdef SEC_AUDIO_CALL_SATELLITE
bool mSatelliteCall{false};
#endif
bool mRingbacktone{false};
bool mVoiceMuteState[2]{false/*RX*/,false/*TX*/};
bool mNbQuality{false};
#ifdef SEC_AUDIO_WB_AMR
int mCallBand = WB;
#endif
#ifdef SEC_AUDIO_CALL_FORWARDING
int mCallMemo{CALLMEMO_OFF};
bool mCallForwarding{false};
#endif
int mDeviceInfo{VOICE_DEVICE_INVALID};
bool mDexConnected{false};
bool mDexPadConnected{false};
std::vector<::aidl::android::media::audio::common::AudioDevice> mTelephonyDevices{};
#endif
#ifdef SEC_AUDIO_ALL_SOUND_MUTE
bool mAllSoundMute{false};
#endif
#ifdef SEC_AUDIO_CALL_HAC
bool mHacIncall{false};
int mHacMode{HAC_MODE_MIC};
#endif
#ifdef SEC_AUDIO_CALL_TRANSLATION
bool mCallTranslation{false};
int mVoiceTxControl{TRANSLATION_UNMUTE};
int mVoiceRxControl{TRANSLATION_UNMUTE};
#endif
#ifdef SEC_AUDIO_CALL_VOIP
bool mCngEnable{false};
uint32_t mVoipIsolationMode{EFFECTS_MICMODE_STANDARD};
uint32_t mCallIsolationMode{EFFECTS_MICMODE_STANDARD};
std::vector<pal_device> mPalDevicesOnVoipRx{};
#endif
#ifdef SEC_AUDIO_INTERPRETER_MODE
std::vector<::aidl::android::media::audio::common::AudioDevice> mOutDeepDevices{};
#endif
#ifdef SEC_AUDIO_SCREEN_MIRRORING // { SUPPORT_VOIP_VIA_SMART_VIEW
bool mVoipViaSmartView{false};
#endif // } SUPPORT_VOIP_VIA_SMART_VIEW
#ifdef SEC_AUDIO_ENFORCED_AUDIBLE
int mEnforcePlaybackState{NOT_MUTE};
#endif
#ifdef SEC_AUDIO_DUAL_SPEAKER
bool mSpeakerLeftAmpOff{false};
#endif
#if defined(SEC_AUDIO_DUAL_SPEAKER) || defined(SEC_AUDIO_MULTI_SPEAKER)
int mRotationInfo{TOP_UP};
int mFlatmotionInfo{FLATMOTION_FLAT};
#endif
#ifdef SEC_AUDIO_VOICE_TX_FOR_INCALL_MUSIC
bool mScreenCall{false};
#endif
#ifdef SEC_AUDIO_FMRADIO
fmradio_config_t mFM{false,AUDIO_DEVICE_NONE,0.0f,false};
#endif
#if defined(SEC_AUDIO_SUPPORT_FLIP_CALL) || defined(SEC_AUDIO_SUPPORT_SOUNDBOOSTER_FOLD_PARAM_ON_DSP)
bool mFolderclosed{false};
bool mFlexmode{false};
#endif
#ifdef SEC_AUDIO_SUPPORT_GAMECHAT_SPK_AEC
bool mGamechatMode{false};
#endif
#ifdef SEC_AUDIO_SAMSUNGRECORD
uint32_t preprocess_eq_enables{S_REC};
bool multidevice_rec{false};
int32_t mUnconfiguredFlagsReceived{0};
#endif
#ifdef SEC_AUDIO_SUPPORT_REMOTE_MIC
bool mAasEnabled{false};
float mAasVolume{1.0f};
#endif
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
bool usb_input_dev_enabled{false};
pal_device_id_t listenback_device{PAL_DEVICE_NONE};
bool listenback_on{false};
std::mutex karaoke_mutex;
#endif
#ifdef SEC_AUDIO_CAMCORDER
bool tx_data_inversion{false};
#endif
#ifdef SEC_AUDIO_SUPPORT_NSRI
bool is_NSRI_secure{false};
#endif
#ifdef SEC_AUDIO_SOUND_TRIGGER_TYPE
int register_voice_keyword{0};
bool seamless_enabled{false};
#endif
#ifdef SEC_AUDIO_INTERPRETER_MODE
int interpreter_mode{INTERPRETER_OFF};
#endif
#ifdef SEC_AUDIO_USB_GAIN_CONTROL
bool mUSBGainForCombo{false};
#endif
#ifdef SEC_AUDIO_KARAOKE
bool is_karaoke_on{false};
#endif
static Platform& getInstance();
size_t getFrameCount(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
Usecase const& inTag = Usecase::INVALID);
struct BufferConfig getBufferConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
Usecase const& inTag = Usecase::INVALID);
#ifdef SEC_AUDIO_SUPPORT_UHQ
struct BufferConfig getBufferConfig(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
std::optional<std::pair<::aidl::android::media::audio::common::PcmType, pal_uhqa_state>>& uhqConfig,
Usecase const& inTag);
#endif
int32_t getLatencyMs(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
Usecase const& inTag = Usecase::INVALID);
std::vector<::aidl::android::media::audio::common::MicrophoneInfo> getMicrophoneInfo() {
return mMicrophoneInfo;
}
std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>
getMicrophoneDynamicInfo(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices);
bool setParameter(const std::string& key, const std::string& value);
bool setBluetoothParameters(const char* kvpairs);
#ifdef ENABLE_TAS_SPK_PROT
bool setSpeakerProtectionParameters(const char* kvpairs);
#endif
bool setVendorParameters(
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>&
in_parameters,
bool in_async);
std::string getParameter(const std::string& key) const;
std::string toString() const;
#ifdef SEC_AUDIO_ADD_FOR_DEBUG
std::string toStringSec() const;
void dump(int fd) const;
#endif
bool isSoundCardUp() const noexcept;
bool isSoundCardDown() const noexcept;
#ifdef SEC_AUDIO_BT_OFFLOAD
bool isBluetoothA2dpDevice(const ::aidl::android::media::audio::common::AudioDevice& d) const
noexcept;
#endif
#ifdef SEC_AUDIO_RECORDALIVE_SUPPORT_MULTIDEVICE_PROVIDEO
bool IsBtForMultiDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) const noexcept;
#endif
size_t getMinimumStreamSizeFrames(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sources,
const std::vector<::aidl::android::media::audio::common::AudioPortConfig*>& sinks);
std::unique_ptr<pal_stream_attributes> getPalStreamAttributes(
const ::aidl::android::media::audio::common::AudioPortConfig& portConfig,
const bool isInput
#ifdef SEC_AUDIO_SUPPORT_UHQ
, std::optional<std::pair<::aidl::android::media::audio::common::PcmType, pal_uhqa_state>> uhqConfig = std::nullopt
#endif
) const;
std::vector<pal_device> convertToPalDevices(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices)
const noexcept;
/*
* @breif provides pal devices for given mixport and audiodevices.
*
* @param mixPortConfig mixportconfig for which devices are requested
* @param tag usecase tag
* @param setDevices vector of devices for which pal devices are requested
* @param dummyDevice setDevices can be empty, in that case if client needs
* dummy device in form of PAL_DEVICE_[IN/OUT]_DUMMY
*/
std::vector<pal_device> configureAndFetchPalDevices(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
const Usecase& tag,
const std::vector<::aidl::android::media::audio::common::AudioDevice>& setDevices,
const bool dummyDevice = false) const;
std::vector<pal_device> getDummyPalDevices(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig) const;
#ifdef SEC_AUDIO_COMMON
std::vector<pal_device> configureSecPalDevicesForTelephony(
const std::vector<::aidl::android::media::audio::common::AudioDevice>& setDevices) noexcept;
std::vector<pal_device> configureSecPalDevicesForPlayback(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
const Usecase& tag,
const std::vector<::aidl::android::media::audio::common::AudioDevice>& setDevices) noexcept;
std::vector<pal_device> configureSecPalDevicesForCapture(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
const Usecase& tag,
const std::vector<::aidl::android::media::audio::common::AudioDevice>& setDevices) const noexcept;
#endif
/*
* @breif In order to get stream position in the DSP pipeline
*
* @param,
* Input Parameters:
* palHandle, a valid stream pal handle
* sampleRate, a valid stream sample rate
*
* Output Parameters:
* dspFrames, num of frames delivered by DSP
*/
void getPositionInFrames(pal_stream_handle_t* palHandle, int32_t const& sampleRate,
int64_t* const dspFrames) const;
/*
* @brief requiresBufferReformat is used to check if format converter is needed for
* a PCM format or not, it is not applicable for compressed formats.
* It is possible that framework can use a format which might not
* be supported at below layers, so HAL needs to convert the buffer in desired format
* before writing.
*
* @param portConfig : mixport config of the stream.
* return return a pair of input and output audio_format_t in case a format converter
* is needed, otherwise nullopt.
* For example, mix port using audio format FLOAT is not supported, closest to FLOAT,
* INT_32 can be used as target format. so, return a pair of
* <AUDIO_FORMAT_PCM_FLOAT, AUDIO_FORMAT_PCM_32_BIT>
* Caller can utilize this to create a converter based of provided input, output formats.
*/
static std::optional<std::pair<audio_format_t, audio_format_t>> requiresBufferReformat(
const ::aidl::android::media::audio::common::AudioPortConfig& portConfig);
/*
* @brief creates a pal payload for a pal volume and sets to PAL
* @param handle : valid pal stream handle
* @param volumes vector of volumes in floats
* return 0 in success, error code otherwise
*/
int setVolume(pal_stream_handle_t* handle, const std::vector<float>& volumes) const;
#ifdef SEC_AUDIO_COMMON
std::vector<uint8_t> getPalVolume(const std::vector<float>& volumes);
#endif
std::unique_ptr<pal_buffer_config_t> getPalBufferConfig(const size_t bufferSize,
const size_t bufferCount) const;
std::vector<::aidl::android::media::audio::common::AudioProfile> getDynamicProfiles(
const ::aidl::android::media::audio::common::AudioPort& dynamicDeviceAudioPort) const;
int handleDeviceConnectionChange(
const ::aidl::android::media::audio::common::AudioPort& deviceAudioPort,
const bool isConnect) const;
uint32_t getBluetoothLatencyMs(
const std::vector<::aidl::android::media::audio::common::AudioDevice>&
bluetoothDevices);
std::unique_ptr<pal_stream_attributes> getDefaultTelephonyAttributes() const;
std::unique_ptr<pal_stream_attributes> getDefaultCRSTelephonyAttributes() const;
void configurePalDevicesCustomKey(std::vector<pal_device>& palDevices,
const std::string& customKey) const;
bool setStreamMicMute(pal_stream_handle_t* streamHandlePtr, const bool muted);
bool getMicMuteStatus();
void setMicMuteStatus(bool mute);
bool updateScreenState(const bool isTurnedOn) noexcept;
bool isScreenTurnedOn() const noexcept;
#ifdef SEC_AUDIO_SPK_AMP_MUTE
bool getSpeakerMuteStatus() const noexcept;
void setSpeakerMute(const bool mute) noexcept;
#endif
bool isHDREnabled() const { return mHDREnabled; }
void setHDREnabled(bool const& enable) { mHDREnabled = enable; }
int32_t getHDRSampleRate() const { return mHDRSampleRate; }
void setHDRSampleRate(int32_t const& sampleRate) { mHDRSampleRate = sampleRate; }
int32_t getHDRChannelCount() const { return mHDRChannelCount; }
void setHDRChannelCount(int32_t const& channelCount) { mHDRChannelCount = channelCount; }
bool isWNREnabled() const { return mWNREnabled; }
void setWNREnabled(bool const& enable) { mWNREnabled = enable; }
bool isANREnabled() const { return mANREnabled; }
void setANREnabled(bool const& enable) { mANREnabled = enable; }
bool isInverted() const { return mInverted; }
void setInverted(bool const& enable) { mInverted = enable; }
std::string getOrientation() const { return mOrientation; }
void setOrientation(std::string const& value) { mOrientation = value; }
std::string getFacing() const { return mFacing; }
void setFacing(std::string const& value) { mFacing = value; }
void setTelephony(const std::weak_ptr<::aidl::android::hardware::audio::core::ITelephony> telephony) noexcept {
mTelephony = telephony;
}
std::weak_ptr<::aidl::android::hardware::audio::core::ITelephony> getTelephony() const noexcept {
return mTelephony;
}
/*
* @brief creates a pal payload for a speed factor and sets to PAL
* @param handle : pal stream handle
* @param tag usecase tag
* @param playbackRate playback rate to be set
* return PlaybackRateStatus::SUCCESS on success, or if stream handle is not set.
* return PlaybackRateStatus::UNSUPPORTED operation, usecase does not support speed operations
* or speed parameters are not in the range
* return PlaybackRateStatus::ILLEGAL_ARGUMENT in case of any other failure
*/
PlaybackRateStatus setPlaybackRate(
pal_stream_handle_t* handle, const Usecase& tag,
const ::aidl::android::media::audio::common::AudioPlaybackRate& playbackRate);
void setInCallMusicState(const bool state) noexcept { mInCallMusicEnabled = state; }
bool getInCallMusicState() noexcept { return mInCallMusicEnabled; }
// Set and Get Value Functions for Translate Record.
void setTranslationRecordState(const bool state) noexcept { mIsTranslationRecordEnabled = state; }
bool getTranslationRecordState() noexcept { return mIsTranslationRecordEnabled; }
// Set and Get Value Functions for Voice Call Volume mute during Translation Record Usecase.
void setTranslationRxMuteState(const bool state) noexcept { mIsTranslationRxMuteEnabled = state; }
bool getTranslationRxMuteState() noexcept { return mIsTranslationRxMuteEnabled; }
void setHACEnabled(const bool& enable) noexcept { mIsHACEnabled = enable; }
bool isHACEnabled() const noexcept { return mIsHACEnabled; }
void updateCallState(int callState) { mCallState = callState; }
void updateCallMode(int callMode) { mCallMode = callMode; }
int getCallState() { return mCallState; }
int getCallMode() { return mCallMode; }
#ifdef SEC_AUDIO_CALL
void setTelephonyDevices(const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
mTelephonyDevices = devices;
}
std::vector<::aidl::android::media::audio::common::AudioDevice> getTelephonyDevices()
const { return mTelephonyDevices; }
void setVoiceVolume(const float volume) { mVoiceVolume = volume; }
float getVoiceVolume() { return mVoiceVolume; }
void updateVoWiFiState(bool isVoWiFi) { mIsVoWiFi = isVoWiFi; }
bool getVoWiFiState() { return mIsVoWiFi; }
#ifdef SEC_AUDIO_CALL_SATELLITE
void setSatelliteCall(bool enable) { mSatelliteCall = enable; }
bool getSatelliteCall() { return mSatelliteCall; }
#endif
void setNbQuality(bool enable) { mNbQuality = enable; }
bool getNbQuality() { return mNbQuality; }
void setRingbacktone(bool enable) { mRingbacktone = enable; }
bool getRingbacktone() { return mRingbacktone; }
void setVoiceMuteState(int dir, bool mute) { mVoiceMuteState[dir] = mute; }
bool getVoiceMuteState(int dir) { return mVoiceMuteState[dir]; }
#ifdef SEC_AUDIO_WB_AMR
void updateSecCallBand(int callBand) { mCallBand = callBand; }
int getSecCallBand() { return mCallBand; }
#endif
#ifdef SEC_AUDIO_CALL_FORWARDING
bool isCallForwarding();
void setCallForwarding(bool enable) { mCallForwarding = enable; }
bool getCallForwarding() { return mCallForwarding; };
void setCallMemo(int mode) { mCallMemo = mode; }
int getCallMemo() { return mCallMemo; };
#endif
void setDexConnected(bool connected) { mDexConnected = connected; }
bool getDexConnected() { return mDexConnected; };
void setDexPadConnected(bool connected) { mDexPadConnected = connected; }
bool getDexPadConnected() { return mDexPadConnected; };
void setDeviceInfo(int deviceType) { mDeviceInfo = deviceType; }
int getDeviceInfo() { return mDeviceInfo;}
int GetDeviceType(pal_device_id_t rx_device_id);
#endif
#ifdef SEC_AUDIO_ALL_SOUND_MUTE
void setAllSoundMute(bool mute) { mAllSoundMute = mute; }
bool getAllSoundMute() { return mAllSoundMute; }
#endif
#ifdef SEC_AUDIO_CALL_HAC
void setHacIncall(bool mode) { mHacIncall = mode; }
bool getHacIncall() { return mHacIncall; }
void setHacMode(int mode) { mHacMode = mode; }
int getHacMode() { return mHacMode; }
int GetHacCustomKeyId();
int GetVoWifiHacCustomKeyId();
#endif
#ifdef SEC_AUDIO_BLUETOOTH
void setBtNrecState(bool isOn) { bt_nrec = isOn; }
bool isBtNrecOn() { return bt_nrec; }
void setBtScoState(bool isOn) { bt_sco_on = isOn; }
bool isBtScoOn() { return bt_sco_on; }
#endif
#ifdef SEC_AUDIO_SUPPORT_BT_RVC
void setBtRvcSupportState(bool isOn) { bt_rvc_support = isOn; }
bool isBtRvcSupportState() { return bt_rvc_support; }
#endif
#ifdef SEC_AUDIO_BT_OFFLOAD
void setBtA2dpFormat(audio_format_t format) { bt_a2dp_format = format; }
audio_format_t getBtA2dpFormat() { return bt_a2dp_format; }
#endif
#ifdef SEC_AUDIO_SCREEN_MIRRORING // { SUPPORT_VOIP_VIA_SMART_VIEW
void setVoipViaSmartView(bool enable) { mVoipViaSmartView = enable; }
bool getVoipViaSmartView() { return mVoipViaSmartView; }
#endif // } SUPPORT_VOIP_VIA_SMART_VIEW
#ifdef SEC_AUDIO_ENFORCED_AUDIBLE
void updateEnforcePlaybackState(int mode) { mEnforcePlaybackState = mode; }
int getEnforcePlaybackState() { return mEnforcePlaybackState; }
#endif
#ifdef SEC_AUDIO_DUAL_SPEAKER
void setSpeakerLeftAmpOff(bool isOff) { mSpeakerLeftAmpOff = isOff; }
bool isSpeakerLeftAmpOff() { return mSpeakerLeftAmpOff; }
#endif
#ifdef SEC_AUDIO_CALL_TRANSLATION
void setCallTranslation(bool mode) { mCallTranslation = mode; }
bool getCallTranslation() { return mCallTranslation; }
void setVoiceTxControl(int mode) { mVoiceTxControl = mode; }
int getVoiceTxControl() { return mVoiceTxControl; }
void setVoiceRxControl(int mode) { mVoiceRxControl = mode; }
int getVoiceRxControl() { return mVoiceRxControl; }
#endif
#ifdef SEC_AUDIO_CALL_VOIP
void setCngEnable(bool enable) { mCngEnable = enable; }
bool getCngEnable() { return mCngEnable; }
void setVoipIsolationMode(int mode) { mVoipIsolationMode = mode; }
uint32_t getVoipIsolationMode() { return mVoipIsolationMode; }
void setCallIsolationMode(int mode) { mCallIsolationMode = mode; }
uint32_t getCallIsolationMode() { return mCallIsolationMode; }
#endif
#if defined(SEC_AUDIO_DUAL_SPEAKER) || defined(SEC_AUDIO_MULTI_SPEAKER)
void updateRotationInfo(int mode) { mRotationInfo = mode; }
int getRotationInfo() { return mRotationInfo; }
void updateFlatmotionInfo(int mode) { mFlatmotionInfo = mode; }
int getFlatmotionInfo() { return mFlatmotionInfo; }
#endif
#ifdef SEC_AUDIO_VOICE_TX_FOR_INCALL_MUSIC
void setScreenCall(bool mode) { mScreenCall = mode; }
bool getScreenCall() { return mScreenCall; }
#endif
#ifdef SEC_AUDIO_FMRADIO
void setFMRadioOn(bool on) { mFM.on = on; }
bool getFMRadioOn() { return mFM.on; }
void setFMRadioDevice(audio_devices_t device) { mFM.device = device; }
audio_devices_t getFMRadioDevice() { return mFM.device; }
void setFMRadioVolume(float volume) { mFM.volume = volume; }
float getFMRadioVolume() { return mFM.volume; }
void setFMRadioMute(bool mute) { mFM.mute = mute; }
bool getFMRadioMute() { return mFM.mute; }
#endif
#if defined(SEC_AUDIO_SUPPORT_FLIP_CALL) || defined(SEC_AUDIO_SUPPORT_SOUNDBOOSTER_FOLD_PARAM_ON_DSP)
void setFolderclosed(bool closed){ mFolderclosed = closed; };
bool getFolderclosed(){ return mFolderclosed; };
void setFlexmode(bool mode){ mFlexmode = mode; };
bool getFlexmode(){ return mFlexmode; };
#endif
#ifdef SEC_AUDIO_SUPPORT_GAMECHAT_SPK_AEC
void setGamechatMode(bool mode) { mGamechatMode = mode; }
bool getGamechatMode() { return mGamechatMode; };
#endif
#ifdef SEC_AUDIO_SUPPORT_REMOTE_MIC
void setAasEnabled(const bool enable) { mAasEnabled = enable; }
bool isAasEnabled() { return mAasEnabled; }
void setAasVolume(const float volume) { mAasVolume = volume; }
float getAasVolume() { return mAasVolume; }
#endif
#ifdef SEC_AUDIO_SUPPORT_AFE_LISTENBACK
void setUsbInputEnabled(bool mode) { usb_input_dev_enabled = mode; }
bool isUsbInputEnabled() { return usb_input_dev_enabled; };
void setListenbackDevice(pal_device_id_t device) { listenback_device = device; }
pal_device_id_t getListenbackDevice() { return listenback_device; };
void setListenBackEnabled(bool mode) { listenback_on = mode; }
bool isListenBackEnabled() { return listenback_on; };
#endif
#ifdef SEC_AUDIO_CAMCORDER
void setTxDataInversion(bool on) { tx_data_inversion = on; }
bool isTxDataInversionEnabled() { return tx_data_inversion; }
#endif
#ifdef SEC_AUDIO_SUPPORT_NSRI
void setNSRISecureEnabled(bool on) { is_NSRI_secure = on; }
bool isNSRISecureEnabled() { return is_NSRI_secure; }
#endif
#ifdef SEC_AUDIO_SOUND_TRIGGER_TYPE
void setRegisterVoiceKeyword(int value) { register_voice_keyword = value; }
void setSeamlessEnabled(bool on) { seamless_enabled = on; }
bool isSeamlessEnabled() { return seamless_enabled; }
#endif
#ifdef SEC_AUDIO_INTERPRETER_MODE
void setInterpreterMode(int mode) { interpreter_mode = mode; }
int getInterpreterMode() { return interpreter_mode; }
#endif
#ifdef SEC_AUDIO_USB_GAIN_CONTROL
void setUSBGainForCombo(bool on) { mUSBGainForCombo = on; }
bool getUSBGainForCombo() { return mUSBGainForCombo; }
#endif
bool isA2dpSuspended();
void setWFDProxyChannels(const uint32_t numProxyChannels) noexcept;
void setProxyRecordFMQSize(const size_t& FMQSize) noexcept;
size_t getProxyRecordFMQSize() const noexcept;
uint32_t getWFDProxyChannels() const noexcept;
/* Check if proxy record session is active in PAL_DEVICE_IN_RECORD_PROXY */
std::string IsProxyRecordActive() const noexcept;
bool isIPAsProxyDeviceConnected() const noexcept { return mIsIPAsProxyConnected; };
void setIPAsProxyDeviceConnected(bool isIPAsProxy) noexcept { mIsIPAsProxyConnected = isIPAsProxy; };
void setHapticsVolume(const float hapticsVolume) const noexcept;
void setHapticsIntensity(const int hapticsIntensity) const noexcept;
#ifdef SEC_AUDIO_SUPPORT_HAPTIC_PLAYBACK
void setHapticsSource(const int hapticsSource) const noexcept;
#endif
void updateUHQA(const bool enable) noexcept;
#ifdef SEC_AUDIO_SUPPORT_UHQ
void updateUHQA(const pal_uhqa_state sample) noexcept;
#endif
bool isUHQAEnabled() const noexcept;
#ifdef SEC_AUDIO_SPEAKER_CALIBRATION
void triggerSpeakerCalibration() const noexcept;
#endif
void setFTMSpeakerProtectionMode(uint32_t const heatUpTime, uint32_t const runTime,
bool const isFactoryTest, bool const isValidationMode,
bool const isDynamicCalibration) const noexcept;
std::optional<std::string> getFTMResult() const noexcept;
std::optional<std::string> getSpeakerCalibrationResult() const noexcept;
#ifdef ENABLE_TAS_SPK_PROT
std::optional<std::string> getSpeakerProtectionResult() const noexcept;
#endif
void updateScreenRotation(const ::aidl::android::hardware::audio::core::IModule::ScreenRotation
in_rotation) noexcept;
::aidl::android::hardware::audio::core::IModule::ScreenRotation getCurrentScreenRotation() const
noexcept;
bool platformSupportsOffloadSpeed() { return mOffloadSpeedSupported; }
bool usecaseSupportsOffloadSpeed(const Usecase& tag) {
return platformSupportsOffloadSpeed() && isOffload(tag);
}
bool isOffload(const Usecase& tag) { return tag == Usecase::COMPRESS_OFFLOAD_PLAYBACK; }
int setLatencyMode(uint32_t mode);
int getRecommendedLatencyModes(
std::vector<::aidl::android::media::audio::common::AudioLatencyMode>* _aidl_return);
void configurePalDevices(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
std::vector<pal_device>& palDevices);
void setHdrOnPalDevice(pal_device* palDeviceIn);
bool isHDRARMenabled();
bool isHDRSPFEnabled();
bool getUSBCapEnable() { return mUSBCapEnable; }
#ifdef SEC_AUDIO_SAMSUNGRECORD
bool GetRecMultiMic(const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig, const std::vector<::aidl::android::media::audio::common::AudioDevice>& connectedDevices, Usecase tag) const noexcept;
int match_device_enums(const ::aidl::android::media::audio::common::AudioDevice& device) const noexcept;
int get_device_types(const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) const noexcept;
bool IsSupportPreprocess(const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig, Usecase tag) const noexcept;
uint32_t GetBufferSize(const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig);
int GetRecFormat(const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig, const std::vector<::aidl::android::media::audio::common::AudioDevice>& connectedDevices, Usecase tag);
uint32_t SelectPreProcessSolutions(const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig) const noexcept;
bool isLoopBackOff() { return mIsLoopBackOff; }
bool isRmsTestMode() { return mIsRmsTestMode; }
void setLoopBackOff(bool enable) { mIsLoopBackOff = enable; }
void setRmsTestMode(bool enable) { mIsRmsTestMode = enable; }
void setUnconfiguredFlagsReceived(int32_t flags) { mUnconfiguredFlagsReceived = flags; }
int32_t getUnconfiguredFlagsReceived() { return mUnconfiguredFlagsReceived; }
#endif
#ifdef SEC_AUDIO_KARAOKE
void setKaraokeEnabled(bool on) { is_karaoke_on = on; }
bool isKaraokeEnabled() { return is_karaoke_on; }
static bool isKaraokeUsecases(const Usecase& tag);
#endif
#ifdef SEC_AUDIO_COMMON
bool isSecAudioFeatureSupported() { return mIsSecAudioFeatureSupported; }
void setSecAudioFeatureSupported(bool supported) { mIsSecAudioFeatureSupported = supported; }
#endif
#if defined(SEC_AUDIO_OFFLOAD_COMPRESSED_OPUS) && defined(SEC_AUDIO_OFFLOAD_SOUNDSPEED)
PlaybackRateStatus setSecPlaybackRate(pal_stream_handle_t* handle, const Usecase& tag,
const ::aidl::android::media::audio::common::AudioPlaybackRate& playbackRate,
const std::optional< ::aidl::android::media::audio::common::AudioOffloadInfo>& offloadInfo,
std::function<void(const float&)> sendSpeed);
bool isSecSupportsOffloadSpeed(const std::optional< ::aidl::android::media::audio::common::AudioOffloadInfo>& offloadInfo);
#endif
private:
void customizePalDevices(
const ::aidl::android::media::audio::common::AudioPortConfig& mixPortConfig,
const Usecase& tag, std::vector<pal_device>& palDevices) const noexcept;
void configurePalDevicesForHIFIPCMFilter(std::vector<pal_device>&) const noexcept;
std::vector<::aidl::android::media::audio::common::AudioProfile> getUsbProfiles(
const ::aidl::android::media::audio::common::AudioPort& port) const;
std::optional<struct HdmiParameters> getHdmiParameters(
const ::aidl::android::media::audio::common::AudioDevice&) const;
void initUsecaseOpMap();
public:
constexpr static uint32_t kDefaultOutputSampleRate = 48000;
constexpr static uint32_t kDefaultPCMBidWidth = 16;
constexpr static pal_audio_fmt_t kDefaultPalPCMFormat = PAL_AUDIO_FMT_PCM_S16_LE;
constexpr static int32_t kDefaultLatencyMs = 51;
private:
std::vector<::aidl::android::media::audio::common::AudioDevice> mPrimaryPlaybackDevices{};
std::map<std::string, std::string> mParameters;
card_status_t mSndCardStatus{CARD_STATUS_OFFLINE};
bool mInCallMusicEnabled{false};
bool mIsTranslationRecordEnabled{false};
bool mIsTranslationRxMuteEnabled{false};
bool mIsScreenTurnedOn{false};
uint32_t mWFDProxyChannels{0};
bool mIsUHQAEnabled{false};
bool mIsIPAsProxyConnected{false};
::aidl::android::hardware::audio::core::IModule::ScreenRotation mCurrentScreenRotation{
::aidl::android::hardware::audio::core::IModule::ScreenRotation::DEG_0};
bool mOffloadSpeedSupported = false;
bool mMicMuted = false;
#ifdef SEC_AUDIO_SPK_AMP_MUTE
bool mSpeakerMuted = false;
#endif
#ifdef SEC_AUDIO_COMMON
bool mIsSecAudioFeatureSupported{false};
#endif
/* HDR */
bool mHDREnabled{false};
int32_t mHDRSampleRate{0};
int32_t mHDRChannelCount{0};
bool mWNREnabled{false};
bool mANREnabled{false};
bool mInverted{false};
std::string mOrientation{""};
std::string mFacing{""};
/* HAC enablement*/
bool mIsHACEnabled{false};
std::unordered_map<Usecase, UsecaseOps> mUsecaseOpMap;
std::vector<::aidl::android::media::audio::common::MicrophoneInfo> mMicrophoneInfo;
using PalDevToMicDynamicInfoMap = std::unordered_map<
pal_device_id_t,
std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>>;
PalDevToMicDynamicInfoMap mMicrophoneDynamicInfoMap;
// proxy related info
size_t mProxyRecordFMQSize{0};
std::weak_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
#ifdef SEC_AUDIO_SAMSUNGRECORD
bool mIsLoopBackOff{true};
bool mIsRmsTestMode{false};
#endif
};
} // namespace qti::audio::core

View File

@@ -1,17 +0,0 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
namespace qti::audio::core {
struct PlatformStreamCallback {
virtual ~PlatformStreamCallback() = default;
virtual void onTransferReady() = 0;
virtual void onDrainReady() = 0;
virtual void onError() = 0;
};
} // namespace qti::audio::core

View File

@@ -1,123 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <PalApi.h>
#include <aidl/android/hardware/audio/core/VendorParameter.h>
#include <aidl/android/media/audio/common/AudioPlaybackRate.h>
#include <aidl/qti/audio/core/VString.h>
namespace qti::audio::core {
using ::aidl::android::media::audio::common::AudioProfile;
using ::aidl::android::media::audio::common::AudioChannelLayout;
constexpr size_t getNearestMultiple(size_t num, size_t multiplier) {
size_t remainder = 0;
if (!multiplier) return num;
remainder = num % multiplier;
if (remainder) num += (multiplier - remainder);
return num;
}
// std::string getStringForVendorParameter(
auto getkvPairsForVendorParameter =
[](const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& param)
-> std::string {
std::string str = "";
std::optional<::aidl::qti::audio::core::VString> parcel;
for (const auto& p : param) {
if (p.ext.getParcelable(&parcel) == STATUS_OK && parcel.has_value()) {
std::string keyvalue = p.id + "=" + parcel.value().value + ";";
str.append(keyvalue);
}
}
return str;
};
auto getBoolValueFromVString = [](
const std::vector<::aidl::android::hardware::audio::core::VendorParameter>& parameters,
const std::string& searchKey) -> std::optional<bool> {
std::optional<::aidl::qti::audio::core::VString> parcel;
for (const auto& p : parameters) {
if (p.id == searchKey && p.ext.getParcelable(&parcel) == STATUS_OK && parcel.has_value()) {
return parcel.value().value == "true";
}
}
return std::nullopt;
};
std::vector<AudioProfile> getSupportedAudioProfiles(pal_param_device_capability_t* capability,
std::string devName);
std::vector<AudioChannelLayout> getChannelMasksFromProfile(
pal_param_device_capability_t* capability);
std::vector<int> getSampleRatesFromProfile(pal_param_device_capability_t* capability);
AudioChannelLayout getChannelIndexMaskFromChannelCount(unsigned int channelCount);
AudioChannelLayout getChannelLayoutMaskFromChannelCount(unsigned int channelCount, int isInput);
void setPalDeviceCustomKey(pal_device& palDevice, const std::string& customKey) noexcept;
std::vector<uint8_t> makePalVolumes(std::vector<float> const& volumes) noexcept;
/*
* validates if the playback rate parameters are valid
*/
bool isValidPlaybackRate(
const ::aidl::android::media::audio::common::AudioPlaybackRate& playbackRate);
/**
* @brief Expects a std::unique_ptr
* checks if unique_ptr is allocated or not
* If memory is allocated then return unique_ptr
* otherwise exit with retValue which caller needs to pass
*/
#define VALUE_OR_EXIT(ptr, retValue) \
({ \
auto temp = (ptr); \
if (temp.get() == nullptr) { \
LOG(ERROR) << __func__ << " could not allocate memory "; \
return retValue; \
} \
std::move(temp); \
})
/**
* @brief allocator with custom deleter
* Takes a type T and size
* return the unique_ptr for type allocated with calloc
* When goes out of scope will be deallocated with free
* client needs to check if returned ptr is null or not.
* Usage:
* with calloc and free:
* struct pal_param_payload *param_payload = (struct pal_param_payload*)calloc(1,
* sizeof(struct pal_param_payload));
* if (param_payload == NULL) {
* ALOGE("%s: Cannot allocate memory for param_payload\n", __func__);
* return -ENOMEM;
* }
* ....
* free(param_payload);
* Now:
* auto param_payload = VALUE_OR_EXIT(allocate<pal_param_payload>(sizeof(pal_param_payload)));
* allocate will allocate unique_ptr as per type pal_param_payload
* VALUE_OR_EXIT will return the unique_ptr if allocation is succesfull
* otherwise it will exit.
* custom deletor will take to deallocate memory using free when scope is cleared.
* @param size size to be allocated for type T
* @return unique_ptr of type T with size requested.
*/
using CustomDeletor = void (*)(void*);
template <typename T>
std::unique_ptr<T, CustomDeletor> allocate(int size) {
T* obj = reinterpret_cast<T*>(calloc(1, size));
return std::unique_ptr<T, CustomDeletor>{obj, free};
}
} // namespace qti::audio::core

View File

@@ -1,141 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
// pal device direction for voice
enum {
PAL_RX = 0,
PAL_TX,
PAL_TX_RX,
};
typedef enum {
VOICE_DEVICE_INVALID = -1,
VOICE_DEVICE_ETC = 0,
VOICE_DEVICE_SPEAKER,
VOICE_DEVICE_EARPHONE,
VOICE_DEVICE_BLUETOOTH,
VOICE_DEVICE_RECEIVER,
VOICE_DEVICE_MAX
} voice_device_type;
#ifdef SEC_AUDIO_CALL_HAC
enum {
HAC_MODE_MIC = 0,
HAC_MODE_TCOIL,
HAC_MODE_MAX
};
#endif
#ifdef SEC_AUDIO_CALL_TRANSLATION
enum {
TRANSLATION_MODE_OFF = 0,
TRANSLATION_MODE_HANDSET,
TRANSLATION_MODE_SPEAKER,
TRANSLATION_MODE_EARPHONE,
TRANSLATION_MODE_BLUETOOTH,
};
#endif
enum {
EFFECTS_MICMODE_STANDARD = 0, /* default */
EFFECTS_MICMODE_VOICE_FOCUS = 1,
EFFECTS_MICMODE_ALL_SOUND = 2,
EFFECTS_TRANSLATION = 3,
EFFECTS_MICMODE_DEFAULT = 100,
};
enum {
CALLMEMO_ON = 0x00000001, /* need to enable music device */
CALLMEMO_OFF = 0x00000002,
CALLMEMO_REC = 0x00000010, /* need to disable music mixer path, and keep music device */
CALLMEMO_INIT = 0x10000000,
};
#ifdef SEC_AUDIO_CALL_VOIP
// refer to vendor/qcom/proprietary/mm-audio/ar-acdb/acdbdata/inc/kvh2xml.h
const std::map<uint32_t, uint32_t> getVoipMicMode {
{EFFECTS_MICMODE_STANDARD, 1 /* STANDARD_MODE */},
{EFFECTS_MICMODE_VOICE_FOCUS, 2 /* VOICE_FOCUS_MODE */},
{EFFECTS_MICMODE_ALL_SOUND, 3 /* ALL_SOUND_MODE */},
{EFFECTS_TRANSLATION, 4 /* TRANSLATION_MODE */},
};
const std::map<uint32_t, uint32_t> getVoipSampleRate {
{8000, 0 /* VOIP_SR_NB */},
{16000, 1 /* VOIP_SR_WB */},
{32000, 2 /* VOIP_SR_SWB */},
{48000, 3 /* VOIP_SR_FB */},
};
#endif
#ifdef SEC_AUDIO_ENFORCED_AUDIBLE
enum {
NOT_MUTE = 0,
MUTE_CALL,
MUTE_CALL_AND_REC // For Camcording
};
#endif
#if defined(SEC_AUDIO_DUAL_SPEAKER) || defined(SEC_AUDIO_MULTI_SPEAKER)
// (TODO) check : need to move AudioEffect?
enum {
TOP_UP = 0,
RIGHT_UP,
BUTTOM_UP,
LEFT_UP
};
enum {
FLATMOTION_NOT_FLAT = 0,
FLATMOTION_FLAT
};
#endif
#ifdef SEC_AUDIO_ADAPT_SOUND
#define MAX_DHA_DATA_SIZE 40
enum {
DHA_RESET = 0, /* for CP call stop case, reset mixer as dha off */
DHA_SET, /* for CP call start case, set dha mixer */
DHA_UPDATE /* update dha param, and only call/wfc/vt case set dha mixer */
};
#endif
#ifdef SEC_AUDIO_FMRADIO
// defined in FM.cpp
const static std::string kHandleFM{"handle_fm"};
const static std::string kFMVolume{"fm_volume"};
const static std::string kFMMute{"fm_mute"};
const static std::string kFMRouting{"fm_routing"};
struct fmradio_config_t {
bool on;
audio_devices_t device;
float volume;
bool mute;
};
#endif
#ifdef SEC_AUDIO_SUPPORT_FLIP_CALL
enum {
FOLDER_CLOSE = 0,
FOLDER_OPEN = 1,
FOLDER_FLEX_ON, // fold open/close
FOLDER_FLEX_OFF // fold open
};
#endif

View File

@@ -1,31 +0,0 @@
cc_library_static {
name: "libaudio_microphoneinfo_parser",
vendor: true,
export_include_dirs: ["include"],
srcs: [
"MicrophoneInfoParser.cpp",
],
shared_libs: [
"libbase",
"libcutils",
"liblog",
"libutils",
"libxml2",
"android.media.audio.common.types-V3-ndk",
],
header_libs: [
"libxsdc-utils",
"libaudio_system_headers",
"libarpal_headers",
],
generated_headers: ["libaudio_microphone_info"],
generated_sources: ["libaudio_microphone_info"],
export_generated_headers: ["libaudio_microphone_info"],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
],
}

View File

@@ -1,206 +0,0 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_NDEBUG 0
#define LOG_TAG "AHAL_MicrophoneInfoParser_QTI"
#include <android-base/logging.h>
#include <libxml/parser.h>
#include <libxml/xinclude.h>
#include <qti-audio-core/MicrophoneInfoParser.h>
#include <qti-audio-core/MicrophoneInfoTypes.h>
#include <qti_audio_microphone_info.h>
#include <system/audio_config.h>
using ::aidl::android::media::audio::common::AudioDevice;
using ::aidl::android::media::audio::common::AudioDeviceAddress;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::MicrophoneDynamicInfo;
using aidl::android::media::audio::common::MicrophoneInfo;
namespace xsd = ::qti::audio::microphone_info;
using MicrophoneInfoType = xsd::MicrophoneInfoType;
using FrequencyResponseType = xsd::FrequencyResponseType;
using MicrophoneInfoAndDynamicInfo = qti::audio::microphone_info::MicrophoneInfoAndDynamicInfo;
namespace qti::audio::core {
std::vector<::aidl::android::media::audio::common::MicrophoneInfo::FrequencyResponsePoint>
MicrophoneInfoParser::getFrequencyResponse(const FrequencyResponseType *freqResponse) {
std::vector<float> freq;
std::vector<float> response;
if (freqResponse->hasFrequencyHz()) {
freq = freqResponse->getFrequencyHz();
}
if (freqResponse->hasLeveldB()) {
response = freqResponse->getLeveldB();
}
if (freq.size() != response.size()) {
LOG(ERROR) << __func__ << " frequency / respone size mismatch " << freq.size() << " "
<< response.size();
return {};
}
std::vector<::aidl::android::media::audio::common::MicrophoneInfo::FrequencyResponsePoint> frps;
for (unsigned long i = 0; i < freq.size(); i++) {
frps.push_back({.frequencyHz = freq[i], .leveldB = response[i]});
}
return frps;
}
static std::optional<AudioDevice> populateDevice(const MicrophoneInfoType &xmlInfo) {
if (!xmlInfo.hasDevice()) {
LOG(ERROR) << __func__ << " device attribute is missing for microphone";
return std::nullopt;
}
auto xsdDevice = xmlInfo.getFirstDevice();
if (!xsdDevice->hasName()) {
LOG(ERROR) << __func__ << " name attribute is missing for device";
return std::nullopt;
}
auto devName = xsdDevice->getName();
if (XsdToAudioDeviceType.count(devName) == 0) {
LOG(ERROR) << __func__ << " unknown device " << toString(devName);
return std::nullopt;
}
AudioDeviceType fwkDevice = XsdToAudioDeviceType.at(devName);
auto address =
xsdDevice->hasAddress()
? AudioDeviceAddress::make<AudioDeviceAddress::Tag::id>(xsdDevice->getAddress())
: AudioDeviceAddress{};
std::string connection = xsdDevice->hasConnection() ? xsdDevice->getConnection() : "";
AudioDevice device = {.type = {.type = fwkDevice, .connection = connection},
.address = address};
return device;
}
void MicrophoneInfoParser::populateMicrophoneInfo(const MicrophoneInfoAndDynamicInfo &mXsdcConfig) {
for (const auto &xmlInfo : mXsdcConfig.getFirstMicrophoneInfoList()->getMicrophoneInfo()) {
::aidl::android::media::audio::common::MicrophoneInfo info;
if (!xmlInfo.hasId()) {
LOG(ERROR) << __func__ << " invalid id attribute";
return;
}
info.id = xmlInfo.getId();
auto dev = populateDevice(xmlInfo);
if (!dev.has_value()) {
LOG(ERROR) << __func__ << " invalid device attribute";
return;
}
info.device = dev.value();
if (xmlInfo.hasLocation()) info.location = XsdToLocationType.at(xmlInfo.getLocation());
if (xmlInfo.hasGroup()) info.group = xmlInfo.getGroup();
if (xmlInfo.hasIndexInTheGroup()) info.indexInTheGroup = xmlInfo.getIndexInTheGroup();
if (xmlInfo.hasSensitivity()) {
auto xsdSensitivity = xmlInfo.getFirstSensitivity();
::aidl::android::media::audio::common::MicrophoneInfo::Sensitivity ss;
ss.leveldBFS = xsdSensitivity->hasLeveldBFS() ? xsdSensitivity->getLeveldBFS() : 0.0f;
ss.maxSpldB = xsdSensitivity->hasMaxSpldB() ? xsdSensitivity->getMaxSpldB() : 0.0f;
ss.minSpldB = xsdSensitivity->hasMinSpldB() ? xsdSensitivity->getMinSpldB() : 0.0f;
info.sensitivity = ss;
}
if (xmlInfo.hasDirectionality()) {
info.directionality = XsdToDirectionalityType.at(xmlInfo.getDirectionality());
}
if (xmlInfo.hasFrequencyResponse()) {
info.frequencyResponse = getFrequencyResponse(xmlInfo.getFirstFrequencyResponse());
} else {
// should have some frequencies.. may be an error, drop
}
auto getCordinates = [](auto &xsdCord) {
MicrophoneInfo::Coordinate coordinates;
if (xsdCord->hasX() && xsdCord->hasY() && xsdCord->hasZ()) {
coordinates.x = xsdCord->getX();
coordinates.y = xsdCord->getY();
coordinates.z = xsdCord->getZ();
}
return coordinates;
};
if (xmlInfo.hasPosition(); auto coords = xmlInfo.getFirstPosition()) {
info.position = getCordinates(coords);
}
if (xmlInfo.hasOrientation(); auto coords = xmlInfo.getFirstOrientation()) {
info.orientation = getCordinates(coords);
}
mInfo.emplace_back(info);
}
}
void MicrophoneInfoParser::populateMicrophoneDynamicInfo(
const MicrophoneInfoAndDynamicInfo &mXsdcConfig) {
auto infoList = mXsdcConfig.getFirstMicrophoneDynamicInfoList()->getMicrophoneDynamicInfo();
for (const auto &xmlInfo : infoList) {
if (!xmlInfo.hasDevice() || !xmlInfo.hasMicInfo()) {
LOG(ERROR) << " invalid device or micInfo attributes";
return;
}
if (XsdToPalDeviceType.count(xmlInfo.getDevice()) == 0) {
LOG(ERROR) << __func__ << " unknown device " << toString(xmlInfo.getDevice());
return;
}
pal_device_id_t palDevice = XsdToPalDeviceType.at(xmlInfo.getDevice());
std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo> infos;
for (const auto &micInfo : xmlInfo.getMicInfo()) {
::aidl::android::media::audio::common::MicrophoneDynamicInfo info;
if (!micInfo.hasId()) {
LOG(ERROR) << " skip micinfo with invalid id";
continue;
}
info.id = micInfo.getId();
if (micInfo.hasChannelMapping()) {
for (const auto channel : micInfo.getChannelMapping()) {
info.channelMapping.push_back(XsdToChannelMap.at(channel));
}
}
infos.push_back(info);
}
mDynamicInfoMap[palDevice] = infos;
}
}
MicrophoneInfoParser::MicrophoneInfoParser(const std::string &fileName) {
auto configFile = android::audio_find_readable_configuration_file(fileName.c_str());
if (configFile == "") {
LOG(WARNING) << __func__ << " file " << fileName << " not found";
return;
}
auto xsdConfig = xsd::read(configFile.c_str());
if (!xsdConfig.has_value()) {
LOG(WARNING) << __func__ << ": could not read the xml";
return;
}
auto mXsdcConfig = xsdConfig.value();
if (!mXsdcConfig.hasMicrophoneInfoList() || !mXsdcConfig.hasMicrophoneDynamicInfoList()) {
LOG(ERROR) << " invalid micInfo or dynamic mic info ";
return;
}
populateMicrophoneInfo(mXsdcConfig);
populateMicrophoneDynamicInfo(mXsdcConfig);
}
} // namespace qti::audio::core

View File

@@ -1,49 +0,0 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <PalDefs.h>
#include <aidl/android/media/audio/common/MicrophoneDynamicInfo.h>
#include <aidl/android/media/audio/common/MicrophoneInfo.h>
#include <unordered_map>
#include <vector>
// Forward declare microphone_info classes
namespace qti::audio::microphone_info {
class FrequencyResponseType;
class MicrophoneInfoAndDynamicInfo;
}
using PalDevToMicDynamicInfoMap = std::unordered_map<
pal_device_id_t, std::vector<::aidl::android::media::audio::common::MicrophoneDynamicInfo>>;
namespace qti::audio::core {
static const std::string kDefaultConfigName{"microphone_characteristics.xml"};
class MicrophoneInfoParser {
public:
MicrophoneInfoParser(const std::string &fileName = kDefaultConfigName);
std::vector<::aidl::android::media::audio::common::MicrophoneInfo> getMicrophoneInfo() {
return mInfo;
}
PalDevToMicDynamicInfoMap getMicrophoneDynamicInfoMap() { return mDynamicInfoMap; }
private:
std::vector<::aidl::android::media::audio::common::MicrophoneInfo> mInfo;
PalDevToMicDynamicInfoMap mDynamicInfoMap;
void populateMicrophoneInfo(
const qti::audio::microphone_info::MicrophoneInfoAndDynamicInfo &xsdcConfig);
void populateMicrophoneDynamicInfo(
const qti::audio::microphone_info::MicrophoneInfoAndDynamicInfo &xsdcConfig);
// qti::audio::microphone_info::MicrophoneInfoAndDynamicInfo mXsdcConfig;
std::vector<::aidl::android::media::audio::common::MicrophoneInfo::FrequencyResponsePoint>
getFrequencyResponse(
const qti::audio::microphone_info::FrequencyResponseType *freqResponse);
};
} // namespace qti::audio::core

View File

@@ -1,95 +0,0 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <aidl/android/media/audio/common/AudioDeviceType.h>
#include <qti_audio_microphone_info.h>
#include <unordered_map>
using ::aidl::android::media::audio::common::AudioDeviceType;
using aidl::android::media::audio::common::MicrophoneDynamicInfo;
using aidl::android::media::audio::common::MicrophoneInfo;
namespace xsd = ::qti::audio::microphone_info;
using FrequencyResponseType = xsd::FrequencyResponseType;
using MicrophoneInfoAndDynamicInfo = xsd::MicrophoneInfoAndDynamicInfo;
namespace qti::audio::core {
const static std::unordered_map<xsd::AudioDeviceType, AudioDeviceType> XsdToAudioDeviceType = {
{xsd::AudioDeviceType::IN_AFE_PROXY, AudioDeviceType::IN_AFE_PROXY},
{xsd::AudioDeviceType::IN_DEVICE, AudioDeviceType::IN_DEVICE},
{xsd::AudioDeviceType::IN_ECHO_REFERENCE, AudioDeviceType::IN_ECHO_REFERENCE},
{xsd::AudioDeviceType::IN_FM_TUNER, AudioDeviceType::IN_FM_TUNER},
{xsd::AudioDeviceType::IN_HEADSET, AudioDeviceType::IN_HEADSET},
{xsd::AudioDeviceType::IN_LOOPBACK, AudioDeviceType::IN_LOOPBACK},
{xsd::AudioDeviceType::IN_MICROPHONE, AudioDeviceType::IN_MICROPHONE},
{xsd::AudioDeviceType::IN_MICROPHONE_BACK, AudioDeviceType::IN_MICROPHONE_BACK},
{xsd::AudioDeviceType::IN_SUBMIX, AudioDeviceType::IN_SUBMIX},
{xsd::AudioDeviceType::IN_TELEPHONY_RX, AudioDeviceType::IN_TELEPHONY_RX},
{xsd::AudioDeviceType::IN_TV_TUNER, AudioDeviceType::IN_TV_TUNER},
{xsd::AudioDeviceType::IN_DOCK, AudioDeviceType::IN_DOCK},
};
const static std::unordered_map<xsd::DirectionalityType, MicrophoneInfo::Directionality>
XsdToDirectionalityType = {
{xsd::DirectionalityType::OMNI, MicrophoneInfo::Directionality::OMNI},
{xsd::DirectionalityType::BI_DIRECTIONAL,
MicrophoneInfo::Directionality::BI_DIRECTIONAL},
{xsd::DirectionalityType::CARDIOID, MicrophoneInfo::Directionality::CARDIOID},
{xsd::DirectionalityType::HYPER_CARDIOID,
MicrophoneInfo::Directionality::HYPER_CARDIOID},
{xsd::DirectionalityType::SUPER_CARDIOID,
MicrophoneInfo::Directionality::SUPER_CARDIOID},
};
const static std::unordered_map<xsd::LocationType, MicrophoneInfo::Location> XsdToLocationType = {
{xsd::LocationType::UNKNOWN, MicrophoneInfo::Location::UNKNOWN},
{xsd::LocationType::MAINBODY, MicrophoneInfo::Location::MAINBODY},
{xsd::LocationType::MAINBODY_MOVABLE, MicrophoneInfo::Location::MAINBODY_MOVABLE},
{xsd::LocationType::PERIPHERAL, MicrophoneInfo::Location::PERIPHERAL},
};
const static std::unordered_map<xsd::ChannelMappingType, MicrophoneDynamicInfo::ChannelMapping>
XsdToChannelMap = {
{xsd::ChannelMappingType::UNUSED, MicrophoneDynamicInfo::ChannelMapping::UNUSED},
{xsd::ChannelMappingType::DIRECT, MicrophoneDynamicInfo::ChannelMapping::DIRECT},
{xsd::ChannelMappingType::PROCESSED,
MicrophoneDynamicInfo::ChannelMapping::PROCESSED},
};
const static std::unordered_map<xsd::PalInDevicesType, pal_device_id_t> XsdToPalDeviceType = {
{xsd::PalInDevicesType::PAL_DEVICE_IN_HANDSET_MIC, PAL_DEVICE_IN_HANDSET_MIC},
{xsd::PalInDevicesType::PAL_DEVICE_IN_SPEAKER_MIC, PAL_DEVICE_IN_SPEAKER_MIC},
{xsd::PalInDevicesType::PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET},
{xsd::PalInDevicesType::PAL_DEVICE_IN_WIRED_HEADSET, PAL_DEVICE_IN_WIRED_HEADSET},
{xsd::PalInDevicesType::PAL_DEVICE_IN_AUX_DIGITAL, PAL_DEVICE_IN_AUX_DIGITAL},
{xsd::PalInDevicesType::PAL_DEVICE_IN_HDMI, PAL_DEVICE_IN_HDMI},
{xsd::PalInDevicesType::PAL_DEVICE_IN_USB_ACCESSORY, PAL_DEVICE_IN_USB_ACCESSORY},
{xsd::PalInDevicesType::PAL_DEVICE_IN_USB_DEVICE, PAL_DEVICE_IN_USB_DEVICE},
{xsd::PalInDevicesType::PAL_DEVICE_IN_USB_HEADSET, PAL_DEVICE_IN_USB_HEADSET},
{xsd::PalInDevicesType::PAL_DEVICE_IN_FM_TUNER, PAL_DEVICE_IN_FM_TUNER},
{xsd::PalInDevicesType::PAL_DEVICE_IN_LINE, PAL_DEVICE_IN_LINE},
{xsd::PalInDevicesType::PAL_DEVICE_IN_SPDIF, PAL_DEVICE_IN_SPDIF},
{xsd::PalInDevicesType::PAL_DEVICE_IN_PROXY, PAL_DEVICE_IN_PROXY},
{xsd::PalInDevicesType::PAL_DEVICE_IN_HANDSET_VA_MIC, PAL_DEVICE_IN_HANDSET_VA_MIC},
{xsd::PalInDevicesType::PAL_DEVICE_IN_BLUETOOTH_A2DP, PAL_DEVICE_IN_BLUETOOTH_A2DP},
{xsd::PalInDevicesType::PAL_DEVICE_IN_HEADSET_VA_MIC, PAL_DEVICE_IN_HEADSET_VA_MIC},
{xsd::PalInDevicesType::PAL_DEVICE_IN_VI_FEEDBACK, PAL_DEVICE_IN_VI_FEEDBACK},
{xsd::PalInDevicesType::PAL_DEVICE_IN_TELEPHONY_RX, PAL_DEVICE_IN_TELEPHONY_RX},
{xsd::PalInDevicesType::PAL_DEVICE_IN_ULTRASOUND_MIC, PAL_DEVICE_IN_ULTRASOUND_MIC},
{xsd::PalInDevicesType::PAL_DEVICE_IN_EXT_EC_REF, PAL_DEVICE_IN_EXT_EC_REF},
{xsd::PalInDevicesType::PAL_DEVICE_IN_ECHO_REF, PAL_DEVICE_IN_ECHO_REF},
{xsd::PalInDevicesType::PAL_DEVICE_IN_HAPTICS_VI_FEEDBACK,
PAL_DEVICE_IN_HAPTICS_VI_FEEDBACK},
{xsd::PalInDevicesType::PAL_DEVICE_IN_BLUETOOTH_BLE, PAL_DEVICE_IN_BLUETOOTH_BLE},
{xsd::PalInDevicesType::PAL_DEVICE_IN_CPS_FEEDBACK, PAL_DEVICE_IN_CPS_FEEDBACK},
{xsd::PalInDevicesType::PAL_DEVICE_IN_DUMMY, PAL_DEVICE_IN_DUMMY},
{xsd::PalInDevicesType::PAL_DEVICE_IN_CPS2_FEEDBACK, PAL_DEVICE_IN_CPS2_FEEDBACK},
{xsd::PalInDevicesType::PAL_DEVICE_IN_RECORD_PROXY, PAL_DEVICE_IN_RECORD_PROXY},
};
} // namespace qti::audio::core

View File

@@ -1,7 +0,0 @@
xsd_config {
name: "libaudio_microphone_info",
srcs: ["microphone_info_schema.xsd"],
package_name: "qti.audio.microphone_info",
nullability: true,
}

View File

@@ -1,188 +0,0 @@
// Signature format: 2.0
package qti.audio.microphone_info {
public enum AudioDeviceType {
method @NonNull public String getRawName();
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_AFE_PROXY;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_DEVICE;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_DOCK;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_ECHO_REFERENCE;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_FM_TUNER;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_HEADSET;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_LOOPBACK;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_MICROPHONE;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_MICROPHONE_BACK;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_SUBMIX;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_TELEPHONY_RX;
enum_constant public static final qti.audio.microphone_info.AudioDeviceType IN_TV_TUNER;
}
public enum ChannelMappingType {
method @NonNull public String getRawName();
enum_constant public static final qti.audio.microphone_info.ChannelMappingType DIRECT;
enum_constant public static final qti.audio.microphone_info.ChannelMappingType PROCESSED;
enum_constant public static final qti.audio.microphone_info.ChannelMappingType UNUSED;
}
public class CoordinatesType {
ctor public CoordinatesType();
method @Nullable public float getX();
method @Nullable public float getY();
method @Nullable public float getZ();
method public void setX(@Nullable float);
method public void setY(@Nullable float);
method public void setZ(@Nullable float);
}
public enum DirectionalityType {
method @NonNull public String getRawName();
enum_constant public static final qti.audio.microphone_info.DirectionalityType BI_DIRECTIONAL;
enum_constant public static final qti.audio.microphone_info.DirectionalityType CARDIOID;
enum_constant public static final qti.audio.microphone_info.DirectionalityType HYPER_CARDIOID;
enum_constant public static final qti.audio.microphone_info.DirectionalityType OMNI;
enum_constant public static final qti.audio.microphone_info.DirectionalityType SUPER_CARDIOID;
}
public class FrequencyResponseType {
ctor public FrequencyResponseType();
method @Nullable public java.util.List<java.lang.Float> getFrequencyHz();
method @Nullable public java.util.List<java.lang.Float> getLeveldB();
method public void setFrequencyHz(@Nullable java.util.List<java.lang.Float>);
method public void setLeveldB(@Nullable java.util.List<java.lang.Float>);
}
public enum LocationType {
method @NonNull public String getRawName();
enum_constant public static final qti.audio.microphone_info.LocationType MAINBODY;
enum_constant public static final qti.audio.microphone_info.LocationType MAINBODY_MOVABLE;
enum_constant public static final qti.audio.microphone_info.LocationType PERIPHERAL;
}
public class MicInfoType {
ctor public MicInfoType();
method @Nullable public java.util.List<qti.audio.microphone_info.ChannelMappingType> getChannelMapping();
method @Nullable public String getId();
method public void setChannelMapping(@Nullable java.util.List<qti.audio.microphone_info.ChannelMappingType>);
method public void setId(@Nullable String);
}
public class MicrophoneDynamicInfoListType {
ctor public MicrophoneDynamicInfoListType();
method @Nullable public java.util.List<qti.audio.microphone_info.MicrophoneDynamicInfoType> getMicrophoneDynamicInfo();
}
public class MicrophoneDynamicInfoType {
ctor public MicrophoneDynamicInfoType();
method @Nullable public qti.audio.microphone_info.PalInDevicesType getDevice();
method @Nullable public qti.audio.microphone_info.MicInfoType getMicInfo();
method public void setDevice(@Nullable qti.audio.microphone_info.PalInDevicesType);
method public void setMicInfo(@Nullable qti.audio.microphone_info.MicInfoType);
}
public class MicrophoneInfoAndDynamicInfo {
ctor public MicrophoneInfoAndDynamicInfo();
method @Nullable public qti.audio.microphone_info.MicrophoneDynamicInfoListType getMicrophoneDynamicInfoList();
method @Nullable public qti.audio.microphone_info.MicrophoneInfoListType getMicrophoneInfoList();
method public void setMicrophoneDynamicInfoList(@Nullable qti.audio.microphone_info.MicrophoneDynamicInfoListType);
method public void setMicrophoneInfoList(@Nullable qti.audio.microphone_info.MicrophoneInfoListType);
}
public class MicrophoneInfoListType {
ctor public MicrophoneInfoListType();
method @Nullable public java.util.List<qti.audio.microphone_info.MicrophoneInfoType> getMicrophoneInfo();
}
public class MicrophoneInfoType {
ctor public MicrophoneInfoType();
method @Nullable public qti.audio.microphone_info.MicrophoneInfoType.Device getDevice();
method @Nullable public qti.audio.microphone_info.DirectionalityType getDirectionality();
method @Nullable public qti.audio.microphone_info.FrequencyResponseType getFrequencyResponse();
method @Nullable public int getGroup();
method @Nullable public String getId();
method @Nullable public int getIndexInTheGroup();
method @Nullable public qti.audio.microphone_info.LocationType getLocation();
method @Nullable public qti.audio.microphone_info.CoordinatesType getOrientation();
method @Nullable public qti.audio.microphone_info.CoordinatesType getPosition();
method @Nullable public qti.audio.microphone_info.MicrophoneInfoType.Sensitivity getSensitivity();
method public void setDevice(@Nullable qti.audio.microphone_info.MicrophoneInfoType.Device);
method public void setDirectionality(@Nullable qti.audio.microphone_info.DirectionalityType);
method public void setFrequencyResponse(@Nullable qti.audio.microphone_info.FrequencyResponseType);
method public void setGroup(@Nullable int);
method public void setId(@Nullable String);
method public void setIndexInTheGroup(@Nullable int);
method public void setLocation(@Nullable qti.audio.microphone_info.LocationType);
method public void setOrientation(@Nullable qti.audio.microphone_info.CoordinatesType);
method public void setPosition(@Nullable qti.audio.microphone_info.CoordinatesType);
method public void setSensitivity(@Nullable qti.audio.microphone_info.MicrophoneInfoType.Sensitivity);
}
public static class MicrophoneInfoType.Device {
ctor public MicrophoneInfoType.Device();
method @Nullable public String getAddress();
method @Nullable public String getConnection();
method @Nullable public qti.audio.microphone_info.AudioDeviceType getName();
method public void setAddress(@Nullable String);
method public void setConnection(@Nullable String);
method public void setName(@Nullable qti.audio.microphone_info.AudioDeviceType);
}
public static class MicrophoneInfoType.Sensitivity {
ctor public MicrophoneInfoType.Sensitivity();
method @Nullable public float getLeveldBFS();
method @Nullable public float getMaxSpldB();
method @Nullable public float getMinSpldB();
method public void setLeveldBFS(@Nullable float);
method public void setMaxSpldB(@Nullable float);
method public void setMinSpldB(@Nullable float);
}
public enum PalInDevicesType {
method @NonNull public String getRawName();
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_AUX_DIGITAL;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_BLUETOOTH_A2DP;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_BLUETOOTH_BLE;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_CPS2_FEEDBACK;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_CPS_FEEDBACK;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_DUMMY;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_ECHO_REF;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_EXT_EC_REF;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_FM_TUNER;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_HANDSET_MIC;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_HANDSET_VA_MIC;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_HAPTICS_VI_FEEDBACK;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_HDMI;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_HEADSET_VA_MIC;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_LINE;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_PROXY;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_RECORD_PROXY;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_SPDIF;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_SPEAKER_MIC;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_TELEPHONY_RX;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_ULTRASOUND_MIC;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_USB_ACCESSORY;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_USB_DEVICE;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_USB_HEADSET;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_VI_FEEDBACK;
enum_constant public static final qti.audio.microphone_info.PalInDevicesType PAL_DEVICE_IN_WIRED_HEADSET;
}
public class SensitivityType {
ctor public SensitivityType();
method @Nullable public float getLeveldBFS();
method @Nullable public float getMaxSpldB();
method @Nullable public float getMinSpldB();
method public void setLeveldBFS(@Nullable float);
method public void setMaxSpldB(@Nullable float);
method public void setMinSpldB(@Nullable float);
}
public class XmlParser {
ctor public XmlParser();
method @Nullable public static qti.audio.microphone_info.MicrophoneInfoAndDynamicInfo read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
}
}

View File

@@ -1,177 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
SPDX-License-Identifier: BSD-3-Clause-Clear
-->
<xs:schema version="2.0" elementFormDefault="qualified" attributeFormDefault="unqualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="MicrophoneInfoAndDynamicInfo">
<xs:complexType>
<xs:sequence>
<xs:element name="MicrophoneInfoList" type="MicrophoneInfoListType"/>
<xs:element name="MicrophoneDynamicInfoList" type="MicrophoneDynamicInfoListType"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:complexType name="MicrophoneInfoListType">
<xs:sequence>
<xs:element name="MicrophoneInfo" type="MicrophoneInfoType" maxOccurs="unbounded" />
</xs:sequence>
</xs:complexType>
<xs:complexType name="MicrophoneDynamicInfoListType">
<xs:sequence>
<xs:element name="MicrophoneDynamicInfo" type="MicrophoneDynamicInfoType" maxOccurs="unbounded" />
</xs:sequence>
</xs:complexType>
<xs:complexType name="MicrophoneInfoType">
<xs:sequence>
<xs:element name="id" type="xs:token"/>
<xs:element name="device">
<xs:complexType>
<xs:attribute name="address" type="xs:token" use="required" />
<xs:attribute name="connection" type="xs:token" use="optional" default="" />
<xs:attribute name="name" type="AudioDeviceType" use="required" />
</xs:complexType>
</xs:element>
<xs:element name="location" type="LocationType" minOccurs="0"/>
<xs:element name="group" type="xs:int" minOccurs="0" />
<xs:element name="indexInTheGroup" type="xs:int" minOccurs="0"/>
<xs:element name="sensitivity">
<xs:complexType>
<xs:attribute name="leveldBFS" type="xs:float" use="required" />
<xs:attribute name="maxSpldB" type="xs:float" use="required" />
<xs:attribute name="minSpldB" type="xs:float" use="required" />
</xs:complexType>
</xs:element>
<xs:element name="directionality" type="DirectionalityType" />
<xs:element name="frequencyResponse" type="frequencyResponseType" maxOccurs="1"/>
<xs:element name="position" type="coordinatesType" minOccurs="0" maxOccurs="1" />
<xs:element name="orientation" type="coordinatesType" minOccurs="0" maxOccurs="1" />
</xs:sequence>
</xs:complexType>
<xs:complexType name="MicrophoneDynamicInfoType">
<xs:sequence>
<xs:element name="device" type="PalInDevicesType"/>
<xs:element name="MicInfo" type="MicInfoType"/>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="AudioDeviceType">
<xs:restriction base="xs:string">
<xs:enumeration value="IN_MICROPHONE" />
<xs:enumeration value="IN_MICROPHONE_BACK" />
<xs:enumeration value="IN_AFE_PROXY" />
<xs:enumeration value="IN_DEVICE" />
<xs:enumeration value="IN_ECHO_REFERENCE" />
<xs:enumeration value="IN_FM_TUNER" />
<xs:enumeration value="IN_HEADSET" />
<xs:enumeration value="IN_LOOPBACK" />
<xs:enumeration value="IN_SUBMIX" />
<xs:enumeration value="IN_TELEPHONY_RX" />
<xs:enumeration value="IN_TV_TUNER" />
<xs:enumeration value="IN_DOCK" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="PalInDevicesType">
<xs:restriction base="xs:string">
<xs:enumeration value="PAL_DEVICE_IN_HANDSET_MIC" />
<xs:enumeration value="PAL_DEVICE_IN_SPEAKER_MIC" />
<xs:enumeration value="PAL_DEVICE_IN_BLUETOOTH_SCO_HEADSET" />
<xs:enumeration value="PAL_DEVICE_IN_WIRED_HEADSET" />
<xs:enumeration value="PAL_DEVICE_IN_AUX_DIGITAL" />
<xs:enumeration value="PAL_DEVICE_IN_HDMI" />
<xs:enumeration value="PAL_DEVICE_IN_USB_ACCESSORY" />
<xs:enumeration value="PAL_DEVICE_IN_USB_DEVICE" />
<xs:enumeration value="PAL_DEVICE_IN_USB_HEADSET" />
<xs:enumeration value="PAL_DEVICE_IN_FM_TUNER" />
<xs:enumeration value="PAL_DEVICE_IN_LINE" />
<xs:enumeration value="PAL_DEVICE_IN_SPDIF" />
<xs:enumeration value="PAL_DEVICE_IN_PROXY" />
<xs:enumeration value="PAL_DEVICE_IN_HANDSET_VA_MIC" />
<xs:enumeration value="PAL_DEVICE_IN_BLUETOOTH_A2DP" />
<xs:enumeration value="PAL_DEVICE_IN_HEADSET_VA_MIC" />
<xs:enumeration value="PAL_DEVICE_IN_VI_FEEDBACK" />
<xs:enumeration value="PAL_DEVICE_IN_TELEPHONY_RX" />
<xs:enumeration value="PAL_DEVICE_IN_ULTRASOUND_MIC" />
<xs:enumeration value="PAL_DEVICE_IN_EXT_EC_REF" />
<xs:enumeration value="PAL_DEVICE_IN_ECHO_REF" />
<xs:enumeration value="PAL_DEVICE_IN_HAPTICS_VI_FEEDBACK" />
<xs:enumeration value="PAL_DEVICE_IN_BLUETOOTH_BLE" />
<xs:enumeration value="PAL_DEVICE_IN_CPS_FEEDBACK" />
<xs:enumeration value="PAL_DEVICE_IN_DUMMY" />
<xs:enumeration value="PAL_DEVICE_IN_CPS2_FEEDBACK" />
<xs:enumeration value="PAL_DEVICE_IN_RECORD_PROXY" />
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="LocationType">
<xs:restriction base="xs:string">
<xs:enumeration value="MAINBODY" />
<xs:enumeration value="MAINBODY_MOVABLE" />
<xs:enumeration value="PERIPHERAL" />
</xs:restriction>
</xs:simpleType>
<xs:complexType name="sensitivityType">
<xs:sequence>
<xs:element name="leveldBFS" type="xs:float"/>
<xs:element name="maxSpldB" type="xs:float"/>
<xs:element name="minSpldB" type="xs:float"/>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="DirectionalityType">
<xs:restriction base="xs:string">
<xs:enumeration value="OMNI" />
<xs:enumeration value="BI_DIRECTIONAL" />
<xs:enumeration value="CARDIOID" />
<xs:enumeration value="HYPER_CARDIOID" />
<xs:enumeration value="SUPER_CARDIOID" />
</xs:restriction>
</xs:simpleType>
<xs:complexType name="frequencyResponseType">
<xs:sequence>
<xs:element name="frequencyHz" type="frequencyHzListType"/>
<xs:element name="leveldB" type="leveldBListType"/>
</xs:sequence>
</xs:complexType>
<xs:simpleType name="frequencyHzListType">
<xs:list itemType="xs:float" />
</xs:simpleType>
<xs:simpleType name="leveldBListType">
<xs:list itemType="xs:float" />
</xs:simpleType>
<xs:complexType name="coordinatesType">
<xs:attribute name="x" type="xs:float" use="required" />
<xs:attribute name="y" type="xs:float" use="required" />
<xs:attribute name="z" type="xs:float" use="required" />
</xs:complexType>
<xs:complexType name="MicInfoType">
<xs:attribute name="id" type="xs:token" use="required"/>
<xs:attribute name="channelMapping" type="channelMappingListType" use="required"/>
</xs:complexType>
<xs:simpleType name="channelMappingListType">
<xs:list itemType="ChannelMappingType" />
</xs:simpleType>
<xs:simpleType name="ChannelMappingType">
<xs:restriction base="xs:string">
<xs:enumeration value="UNUSED" />
<xs:enumeration value="DIRECT" />
<xs:enumeration value="PROCESSED" />
</xs:restriction>
</xs:simpleType>
</xs:schema>

View File

@@ -1,40 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
# Recommend to have only utils for Audio types
# defined by AOSP
LOCAL_MODULE := libaudiohalutils.qti
LOCAL_MODULE_OWNER := qti
LOCAL_MODULE_TAGS := optional
LOCAL_VENDOR_MODULE := true
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
LOCAL_EXPORT_C_INCLUDE_DIRS := \
$(LOCAL_PATH)/include
LOCAL_SRC_FILES := \
Utils.cpp
LOCAL_HEADER_LIBRARIES := \
libaudio_system_headers
LOCAL_SHARED_LIBRARIES := \
libbase \
libutils \
libaudioutils \
android.media.audio.common.types-V3-ndk \
android.hardware.audio.core-V2-ndk \
qti-audio-types-aidl-V1-ndk
LOCAL_CFLAGS := \
-DBACKEND_NDK \
-Wall \
-Wextra \
-Werror \
-Wthread-safety
include $(BUILD_STATIC_LIBRARY)

View File

@@ -1,456 +0,0 @@
/*
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_Utils_QTI"
#include <aidl/android/media/audio/common/AudioInputFlags.h>
#include <aidl/android/media/audio/common/AudioOutputFlags.h>
#include <android-base/logging.h>
#include <audio_utils/format.h>
#include <qti-audio-core/Utils.h>
using ::aidl::android::media::audio::common::AudioDevice;
using ::aidl::android::media::audio::common::AudioDeviceDescription;
using ::aidl::android::media::audio::common::AudioDeviceType;
using ::aidl::android::media::audio::common::AudioDeviceAddress;
using ::aidl::android::media::audio::common::AudioPortConfig;
using ::aidl::android::media::audio::common::AudioPortExt;
using ::aidl::android::media::audio::common::AudioIoFlags;
using ::aidl::android::media::audio::common::AudioInputFlags;
using ::aidl::android::media::audio::common::AudioOutputFlags;
using ::aidl::android::media::audio::common::AudioPortExt;
using ::aidl::android::media::audio::common::AudioSource;
using ::aidl::android::media::audio::common::AudioPortMixExtUseCase;
using ::aidl::android::hardware::audio::core::VendorParameter;
using ::aidl::qti::audio::core::VString;
namespace qti::audio::core {
BufferFormatConverter::BufferFormatConverter(audio_format_t inFormat, audio_format_t outFormat,
size_t bufSize) {
mInFormat = inFormat;
mOutFormat = outFormat;
mAllocSize = bufSize;
mInBytesPerSample = audio_bytes_per_sample(inFormat);
mOutBytesPerSample = audio_bytes_per_sample(outFormat);
int sizeoffloat = sizeof(float);
mBuffer = std::make_unique<uint8_t[]>(mAllocSize);
if (!mBuffer) {
LOG(ERROR) << __func__ << " failed to init convert buffer";
// alloc size to 0, so convert won't operate
mAllocSize = 0;
}
LOG(VERBOSE) << __func__ << "inFormat " << inFormat << " outFormat " << mOutFormat
<< " inBytesPerSample " << mInBytesPerSample << " outBytesPerSample "
<< mOutBytesPerSample << " size " << mAllocSize;
}
std::optional<std::pair<uint8_t*, size_t>> BufferFormatConverter::convert(const void* buffer,
size_t bytes) {
if (bytes > mAllocSize) {
LOG(ERROR) << " Error writing" << bytes << " to convertBuffer of capacity " << mAllocSize;
return std::nullopt;
}
size_t frames = bytes / mInBytesPerSample;
memcpy_by_audio_format(mBuffer.get(), mOutFormat, buffer, mInFormat, frames);
uint8_t* outBuffer = reinterpret_cast<uint8_t*>(mBuffer.get());
return std::make_pair(outBuffer, frames * mOutBytesPerSample);
}
bool isMixPortConfig(const AudioPortConfig& audioPortConfig) noexcept {
return audioPortConfig.ext.getTag() == AudioPortExt::Tag::mix;
};
bool isInputMixPortConfig(const AudioPortConfig& audioPortConfig) noexcept {
return isMixPortConfig(audioPortConfig) && audioPortConfig.flags &&
audioPortConfig.flags.value().getTag() == AudioIoFlags::Tag::input;
}
bool isDevicePortConfig(const AudioPortConfig& audioPortConfig) noexcept {
return audioPortConfig.ext.getTag() == AudioPortExt::Tag::device;
};
bool isOutputAudioDevice(const AudioDevice& device) noexcept {
if (device.type.type >= AudioDeviceType::OUT_DEFAULT) {
return true;
}
return false;
}
bool isTelephonyRXDevice(const AudioDevice& device) noexcept {
return device.type.type == AudioDeviceType::IN_TELEPHONY_RX;
};
bool isTelephonyTXDevice(const AudioDevice& device) noexcept {
return device.type.type == AudioDeviceType::OUT_TELEPHONY_TX;
};
bool isBluetoothSCODevice(const AudioDevice& device) noexcept {
return (device.type.connection == AudioDeviceDescription::CONNECTION_BT_SCO);
}
bool isBluetoothLEDevice(const AudioDevice& device) noexcept {
return (device.type.connection == AudioDeviceDescription::CONNECTION_BT_LE);
}
bool isBluetoothLETXDevice(const AudioDevice& device) noexcept {
return (device.type.type == AudioDeviceType::IN_HEADSET &&
device.type.connection == AudioDeviceDescription::CONNECTION_BT_LE);
}
bool isBluetoothDevice(const AudioDevice& device) noexcept {
return (device.type.connection == AudioDeviceDescription::CONNECTION_BT_A2DP ||
device.type.connection == AudioDeviceDescription::CONNECTION_BT_LE);
}
bool hasBluetoothDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isBluetoothDevice);
return itr != devices.cend();
}
bool hasBluetoothSCODevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isBluetoothSCODevice);
return itr != devices.cend();
}
bool isBluetoothA2dpDevice(const AudioDevice& device) noexcept {
return (device.type.connection == AudioDeviceDescription::CONNECTION_BT_A2DP);
}
bool isBluetoothA2dpTXDevice(const AudioDevice& device) noexcept {
return (device.type.type == AudioDeviceType::IN_DEVICE &&
device.type.connection == AudioDeviceDescription::CONNECTION_BT_A2DP);
}
bool hasBluetoothLEDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isBluetoothLEDevice);
return itr != devices.cend();
}
bool hasBluetoothA2dpDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isBluetoothA2dpDevice);
return itr != devices.cend();
}
bool hasInputMMapFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::input) {
constexpr auto inputMMapFlag = static_cast<int32_t>(
1 << static_cast<int32_t>(AudioInputFlags::MMAP_NOIRQ));
return ((inputMMapFlag & ioFlags.get<AudioIoFlags::Tag::input>()) != 0);
}
return false;
}
bool hasOutputMMapFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto outputMMapFlag = static_cast<int32_t>(
1 << static_cast<int32_t>(AudioOutputFlags::MMAP_NOIRQ));
return ((outputMMapFlag & ioFlags.get<AudioIoFlags::Tag::output>()) !=
0);
}
return false;
}
bool hasMMapFlagsEnabled(const AudioIoFlags& ioFlags) noexcept {
return (hasInputMMapFlag(ioFlags) || hasOutputMMapFlag(ioFlags));
}
bool isInputAFEProxyDevice(const AudioDevice& device) noexcept {
return device.type.type == AudioDeviceType::IN_AFE_PROXY;
}
bool isIPDevice(const AudioDevice& d) noexcept {
return isIPInDevice(d) || isIPOutDevice(d);
}
bool isIPInDevice(const AudioDevice& d) noexcept {
if(d.type.type == AudioDeviceType::IN_DEVICE &&
d.type.connection == AudioDeviceDescription::CONNECTION_IP_V4) {
return true;
}
return false;
}
bool isIPOutDevice(const AudioDevice& d) noexcept {
if(d.type.type == AudioDeviceType::OUT_DEVICE &&
d.type.connection == AudioDeviceDescription::CONNECTION_IP_V4) {
return true;
}
return false;
}
bool isOutputSpeakerEarpiece(const AudioDevice& d) noexcept {
if (d.type.type == AudioDeviceType::OUT_SPEAKER_EARPIECE) {
return true;
}
return false;
}
bool hasOutputSpeakerEarpiece(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isOutputSpeakerEarpiece);
return itr != devices.cend();
}
bool isHdmiDevice(const AudioDevice& d) noexcept {
if (d.type.connection == AudioDeviceDescription::CONNECTION_HDMI) {
return true;
}
return false;
}
bool isOutputDevice(const AudioDevice& d) noexcept {
if (d.type.type >= AudioDeviceType::OUT_DEFAULT) {
return true;
}
return false;
}
bool isInputDevice(const AudioDevice& d) noexcept {
if (d.type.type < AudioDeviceType::OUT_DEFAULT) {
return true;
}
return false;
}
bool isValidAlsaAddr(const std::vector<int>& alsaAddress) noexcept {
if (alsaAddress.size() != 2 || alsaAddress[0] < 0 || alsaAddress[1] < 0) {
LOG(ERROR) << __func__
<< ": malformed alsa address: "
<< ::android::internal::ToString(alsaAddress);
return false;
}
return true;
}
bool isUsbDevice(const AudioDevice& d) noexcept {
if (d.type.connection == AudioDeviceDescription::CONNECTION_USB) {
return true;
}
return false;
}
bool hasOutputDirectFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto directFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::DIRECT));
return ((directFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
bool hasInputRawFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::input) {
constexpr auto rawFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioInputFlags::RAW));
return ((rawFlag & ioFlags.get<AudioIoFlags::Tag::input>()) != 0);
}
return false;
}
bool hasOutputRawFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto rawFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::RAW));
return ((rawFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
bool hasOutputVoipRxFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto voipRxFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::VOIP_RX));
return ((voipRxFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
bool hasOutputDeepBufferFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto DeepBufferRxFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::DEEP_BUFFER));
return ((DeepBufferRxFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
bool hasOutputCompressOffloadFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto compressOffloadFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::COMPRESS_OFFLOAD));
return ((compressOffloadFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
#ifdef SEC_AUDIO_SAMSUNGRECORD
bool hasInputRemoteMicFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::input) {
constexpr auto remoteMicFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioInputFlags::REMOTE_MIC));
return ((remoteMicFlag & ioFlags.get<AudioIoFlags::Tag::input>()) != 0);
}
return false;
}
bool hasInputDirectFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::input) {
constexpr auto directFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioInputFlags::DIRECT));
return ((directFlag & ioFlags.get<AudioIoFlags::Tag::input>()) != 0);
}
return false;
}
bool hasInputFastFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::input) {
constexpr auto fastFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioInputFlags::FAST));
return ((fastFlag & ioFlags.get<AudioIoFlags::Tag::input>()) != 0);
}
return false;
}
#endif
#ifdef SEC_AUDIO_CALL
bool hasOutputPrimaryFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto primaryFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::PRIMARY));
return ((primaryFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
#endif
#ifdef SEC_AUDIO_SUPPORT_LOWLATENCY_MEDIA
bool hasOutputFastMediaFlag(const AudioIoFlags& ioFlags) noexcept {
if (ioFlags.getTag() == AudioIoFlags::Tag::output) {
constexpr auto fastmediaFlag =
static_cast<int32_t>(1 << static_cast<int32_t>(AudioOutputFlags::MEDIA));
return ((fastmediaFlag & ioFlags.get<AudioIoFlags::Tag::output>()) != 0);
}
return false;
}
#endif
#ifdef SEC_AUDIO_COMMON
bool isNoneDevice(const AudioDevice& device) noexcept {
return device.type.type == AudioDeviceType::NONE;
}
bool hasNoneDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isNoneDevice);
return itr != devices.cend();
}
bool isSpeakerDevice(const AudioDevice& device) noexcept {
return !isBluetoothDevice(device)
&& (device.type.type == AudioDeviceType::OUT_SPEAKER);
}
bool hasSpeakerDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isSpeakerDevice);
return itr != devices.cend();
}
bool isUsbHeadsetDevice(const AudioDevice& device) noexcept {
return (device.type.type == AudioDeviceType::OUT_HEADSET &&
device.type.connection == AudioDeviceDescription::CONNECTION_USB);
}
bool hasUsbHeadsetDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isUsbHeadsetDevice);
return itr != devices.cend();
}
bool isHdmiOutputDevice(const AudioDevice& device) noexcept {
return (device.type.connection == AudioDeviceDescription::CONNECTION_HDMI &&
device.type.type == AudioDeviceType::OUT_DEVICE);
}
bool hasHdmiOutputDevice(const std::vector<AudioDevice>& devices) noexcept {
auto itr = std::find_if(devices.cbegin(), devices.cend(), isHdmiOutputDevice);
return itr != devices.cend();
}
#endif
std::optional<AudioSource> getAudioSource(const AudioPortConfig& mixPortconfig) noexcept {
if (mixPortconfig.ext.getTag() != AudioPortExt::Tag::mix) {
LOG(ERROR) << __func__ << ": not a mix port, " << mixPortconfig.toString();
return std::nullopt;
}
if (mixPortconfig.ext.get<AudioPortExt::Tag::mix>().usecase.getTag() !=
AudioPortMixExtUseCase::Tag::source) {
LOG(ERROR) << __func__ << ": no source provided, " << mixPortconfig.toString();
return std::nullopt;
}
return mixPortconfig.ext.get<AudioPortExt::Tag::mix>()
.usecase.get<AudioPortMixExtUseCase::Tag::source>();
}
std::optional<int32_t> getSampleRate(const AudioPortConfig& portConfig) noexcept {
if (portConfig.sampleRate) {
return portConfig.sampleRate.value().value;
}
LOG(ERROR) << __func__ << ": no sample rate in port config " << portConfig.toString();
return std::nullopt;
}
std::vector<int32_t> getActiveInputMixPortConfigIds(
const std::vector<AudioPortConfig>& activePortConfigs) {
std::vector<int32_t> result;
for (const auto& activePortConfig : activePortConfigs) {
if (isInputMixPortConfig(activePortConfig)) {
result.emplace_back(activePortConfig.id);
}
}
return result;
}
int64_t getInt64FromString(const std::string& s) noexcept {
// Todo handle actual value 0
return static_cast<int64_t>(strtol(s.c_str(), nullptr, 10));
}
float getFloatFromString(const std::string& s) noexcept {
// Todo handle actual value 0
return strtof(s.c_str(), nullptr);
}
bool getBoolFromString(const std::string& s) noexcept {
#ifdef SEC_AUDIO_COMMON
return (s == "true" || s == "on");
#else
return (s == "true");
#endif
}
bool setParameter(const VString& parcel, VendorParameter& parameter) noexcept {
if (parameter.ext.setParcelable(parcel) != android::OK) {
LOG(ERROR) << __func__ << ": failed to set parcel for " << parameter.id;
return false;
}
return true;
}
VendorParameter makeVendorParameter(const std::string& id, const std::string& value) {
VString parcel;
parcel.value = value;
VendorParameter param;
param.id = id;
if (param.ext.setParcelable(parcel) != android::OK) {
LOG(ERROR) << __func__ << ": failed to set parcel for " << param.id;
}
return param;
}
std::string makeParamValue(bool const& isTrue) noexcept {
return isTrue ? "true" : "false";
}
} // namespace qti::audio::core

View File

@@ -1,319 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <aidl/android/hardware/audio/core/VendorParameter.h>
#include <aidl/android/media/audio/common/AudioDevice.h>
#include <aidl/android/media/audio/common/AudioPortConfig.h>
#include <aidl/qti/audio/core/VString.h>
#include <system/audio.h>
#include <algorithm>
#include <map>
#include <numeric>
#include <set>
#include <vector>
namespace qti::audio::core {
/*
* Helper class used by streams when the target audio format differs
* from input audio format. This can happen if underlying layers don't support certain formats.
* In that case, convert the unsupported format to supported format
* using a aux buffer.
*/
class BufferFormatConverter {
public:
BufferFormatConverter(audio_format_t inFormat, audio_format_t outFormat, size_t bufSize);
~BufferFormatConverter() = default;
/*
* @brief converts the input buffer from input format to output format.
* @param buffer, buffer to be converted.
* @param bytes total number of bytes in the buffer.
* @returns pointer to converted buffer and size of converted buffer in case of success.
* nullopt in case when bytes exceed the allocated size during setup.
*/
std::optional<std::pair<uint8_t*, size_t>> convert(const void* buffer, size_t bytes);
size_t getInputBytesPerSample() { return mInBytesPerSample; }
size_t getOutputBytesPerSample() { return mOutBytesPerSample; }
private:
audio_format_t mInFormat = AUDIO_FORMAT_PCM_16_BIT;
audio_format_t mOutFormat = AUDIO_FORMAT_PCM_16_BIT;
std::unique_ptr<uint8_t[]> mBuffer{nullptr};
size_t mAllocSize;
size_t mOutBytesPerSample;
size_t mInBytesPerSample;
// Disallow copy and move assignments / constructors.
BufferFormatConverter(const BufferFormatConverter&) = delete;
BufferFormatConverter& operator=(const BufferFormatConverter&) = delete;
BufferFormatConverter& operator=(BufferFormatConverter&& other) = delete;
BufferFormatConverter(BufferFormatConverter&& other) = delete;
};
bool isMixPortConfig(const ::aidl::android::media::audio::common::AudioPortConfig&) noexcept;
bool isInputMixPortConfig(const ::aidl::android::media::audio::common::AudioPortConfig&) noexcept;
bool isDevicePortConfig(const ::aidl::android::media::audio::common::AudioPortConfig&) noexcept;
bool isOutputAudioDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isTelephonyRXDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isTelephonyTXDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isBluetoothSCODevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isBluetoothLEDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isBluetoothLETXDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isBluetoothDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasBluetoothDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool hasBluetoothSCODevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool isBluetoothA2dpDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isBluetoothA2dpTXDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasBluetoothLEDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool hasBluetoothA2dpDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool hasInputMMapFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasOutputMMapFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasMMapFlagsEnabled(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool isInputAFEProxyDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isIPDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isIPInDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isIPOutDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isOutputSpeakerEarpiece(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasOutputSpeakerEarpiece(
const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool isHdmiDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isUsbDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isValidAlsaAddr(const std::vector<int>& alsaAddress) noexcept;
bool isInputDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool isOutputDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasOutputDirectFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasOutputRawFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasInputRawFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasOutputVoipRxFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasOutputDeepBufferFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasOutputCompressOffloadFlag(
const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
#ifdef SEC_AUDIO_SAMSUNGRECORD
bool hasInputRemoteMicFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasInputDirectFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
bool hasInputFastFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
#endif
#ifdef SEC_AUDIO_CALL
bool hasOutputPrimaryFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
#endif
#ifdef SEC_AUDIO_SUPPORT_LOWLATENCY_MEDIA
bool hasOutputFastMediaFlag(const ::aidl::android::media::audio::common::AudioIoFlags&) noexcept;
#endif
#ifdef SEC_AUDIO_COMMON
bool isNoneDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasNoneDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool isSpeakerDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasSpeakerDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool isUsbHeadsetDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasUsbHeadsetDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
bool isHdmiOutputDevice(const ::aidl::android::media::audio::common::AudioDevice&) noexcept;
bool hasHdmiOutputDevice(const std::vector<::aidl::android::media::audio::common::AudioDevice>&) noexcept;
#endif
std::optional<aidl::android::media::audio::common::AudioSource> getAudioSource(
const ::aidl::android::media::audio::common::AudioPortConfig&) noexcept;
std::optional<int32_t> getSampleRate(
const ::aidl::android::media::audio::common::AudioPortConfig&) noexcept;
std::vector<int32_t> getActiveInputMixPortConfigIds(
const std::vector<::aidl::android::media::audio::common::AudioPortConfig>&
activePortConfigs);
template <class T>
std::ostream& operator<<(std::ostream& os, const std::vector<T>& list) noexcept {
os << std::accumulate(list.cbegin(), list.cend(), std::string(""),
[](auto&& prev, const auto& l) {
return std::move(prev.append(",").append(l.toString()));
});
return os;
}
template <class T>
bool operator==(const std::vector<T>& left, const std::vector<T>& right) noexcept {
if (left.size() != right.size()) {
return false;
}
return std::equal(left.cbegin(), left.cend(), right.cbegin());
}
int64_t getInt64FromString(const std::string& s) noexcept;
float getFloatFromString(const std::string& s) noexcept;
bool getBoolFromString(const std::string& s) noexcept;
bool setParameter(const ::aidl::qti::audio::core::VString& parcel,
::aidl::android::hardware::audio::core::VendorParameter& parameter) noexcept;
template <typename W>
bool extractParameter(const ::aidl::android::hardware::audio::core::VendorParameter& p,
decltype(W::value)* v) {
std::optional<W> value;
int32_t result = p.ext.getParcelable(&value);
if (result == 0 && value.has_value()) {
*v = value.value().value;
return true;
}
return false;
}
// Return whether all the elements in the vector are unique.
template <typename T>
bool all_unique(const std::vector<T>& v) {
return std::set<T>(v.begin(), v.end()).size() == v.size();
}
// Erase all the specified elements from a map.
template <typename C, typename V>
auto erase_all(C& c, const V& keys) {
auto oldSize = c.size();
for (auto& k : keys) {
c.erase(k);
}
return oldSize - c.size();
}
// Erase all the elements in the container that satisfy the provided predicate.
template <typename C, typename P>
auto erase_if(C& c, P pred) {
auto oldSize = c.size();
for (auto it = c.begin(); it != c.end();) {
if (pred(*it)) {
it = c.erase(it);
} else {
++it;
}
}
return oldSize - c.size();
}
// Erase all the elements in the map that have specified values.
template <typename C, typename V>
auto erase_all_values(C& c, const V& values) {
return erase_if(c, [values](const auto& pair) { return values.count(pair.second) != 0; });
}
// Return non-zero count of elements for any of the provided keys.
template <typename M, typename V>
size_t count_any(const M& m, const V& keys) {
for (auto& k : keys) {
if (size_t c = m.count(k); c != 0) return c;
}
return 0;
}
// Assuming that M is a map whose values have an 'id' field,
// find an element with the specified id.
template <typename M>
auto findById(M& m, int32_t id) {
return std::find_if(m.begin(), m.end(), [&](const auto& p) { return p.second.id == id; });
}
// Assuming that the vector contains elements with an 'id' field,
// find an element with the specified id.
template <typename T>
auto findById(std::vector<T>& v, int32_t id) {
return std::find_if(v.begin(), v.end(), [&](const auto& e) { return e.id == id; });
}
// Return elements from the vector that have specified ids, also
// optionally return which ids were not found.
template <typename T>
std::vector<T*> selectByIds(std::vector<T>& v, const std::vector<int32_t>& ids,
std::vector<int32_t>* missingIds = nullptr) {
std::vector<T*> result;
std::set<int32_t> idsSet(ids.begin(), ids.end());
for (size_t i = 0; i < v.size(); ++i) {
T& e = v[i];
if (idsSet.count(e.id) != 0) {
result.push_back(&v[i]);
idsSet.erase(e.id);
}
}
if (missingIds) {
*missingIds = std::vector(idsSet.begin(), idsSet.end());
}
return result;
}
// Assuming that M is a map whose keys' type is K and values' type is V,
// return the corresponding value of the given key from the map or default
// value if the key is not found.
template <typename M, typename K, typename V>
auto findValueOrDefault(const M& m, const K& key, V defaultValue) {
auto it = m.find(key);
return it == m.end() ? defaultValue : it->second;
}
// Assuming that M is a map whose keys' type is K, return the given key if it
// is found from the map or default value.
template <typename M, typename K>
auto findKeyOrDefault(const M& m, const K& key, K defaultValue) {
auto it = m.find(key);
return it == m.end() ? defaultValue : key;
}
/*
* create a VendorParameter from a id and value, primarly used with getVendorParameters.
*/
::aidl::android::hardware::audio::core::VendorParameter makeVendorParameter(const std::string& id,
const std::string& value);
/*
* convert bool value to the corresponding string value
* true -> "true"
* false -> "false"
*/
std::string makeParamValue(bool const&) noexcept;
} // namespace qti::audio::core

View File

@@ -1,47 +0,0 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := libaudiocorehal.default
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_RELATIVE_PATH := hw
LOCAL_CFLAGS := \
-DBACKEND_NDK \
-Wall \
-Wextra \
-Werror \
-Wthread-safety
LOCAL_VINTF_FRAGMENTS += manifest_audiocorehal_default.xml
LOCAL_SRC_FILES := \
DefaultServices.cpp
LOCAL_HEADER_LIBRARIES := \
libxsdc-utils \
liberror_headers
LOCAL_SHARED_LIBRARIES := \
libaudioaidlcommon \
libaudioserviceexampleimpl \
android.hardware.audio.core-V2-ndk \
libbase \
libbinder_ndk \
libcutils \
liblog \
libdl \
libxml2 \
libaudioutils \
libutils \
android.hardware.common-V2-ndk \
android.media.audio.common.types-V3-ndk \
libmedia_helper \
libstagefright_foundation \
libhidlbase \
libhardware \
libfmq \
android.hardware.common-V2-ndk \
android.media.audio.common.types-V3-ndk
include $(BUILD_SHARED_LIBRARY)

View File

@@ -1,101 +0,0 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_DefaultService_QTI"
#include <android-base/logging.h>
#include <android-base/properties.h>
#include <android/binder_ibinder_platform.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <core-impl/AudioPolicyConfigXmlConverter.h>
#include <core-impl/ChildInterface.h>
#include <core-impl/Config.h>
#include <cstdlib>
#include <ctime>
using aidl::android::hardware::audio::core::ChildInterface;
using aidl::android::hardware::audio::core::internal::AudioPolicyConfigXmlConverter;
AudioPolicyConfigXmlConverter gAudioPolicyConverter{
::android::audio_get_audio_policy_config_file()};
using AospModule = ::aidl::android::hardware::audio::core::Module;
using AospModuleConfig = ::aidl::android::hardware::audio::core::Module::Configuration;
using AospModuleConfigurationPair = std::pair<std::string, std::unique_ptr<AospModuleConfig>>;
using AospModuleConfigs = std::vector<AospModuleConfigurationPair>;
std::unique_ptr<AospModuleConfigs> gModuleConfigs;
std::vector<ChildInterface<AospModule>> gModuleInstances;
std::shared_ptr<::aidl::android::hardware::audio::core::Config> gConfigDefaultAosp;
namespace {
ChildInterface<AospModule> createModule(const std::string &name,
std::unique_ptr<AospModuleConfig> &&config) {
ChildInterface<AospModule> result;
{
auto moduleType = AospModule::typeFromString(name);
if (!moduleType.has_value()) {
LOG(ERROR) << __func__ << ": module type \"" << name << "\" is not supported";
return result;
}
auto module = AospModule::createInstance(*moduleType, std::move(config));
if (module == nullptr) return result;
result = std::move(module);
}
const std::string moduleName =
std::string().append(AospModule::descriptor).append("/").append(name);
AIBinder_setMinSchedulerPolicy(result.getBinder(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
binder_status_t status = AServiceManager_addService(result.getBinder(), moduleName.c_str());
if (status != STATUS_OK) {
LOG(ERROR) << __func__ << ": failed to register service for \"" << moduleName << "\"";
return ChildInterface<AospModule>();
} else {
LOG(INFO) << __func__ << ": registered service for \"" << moduleName << "\"";
}
return result;
};
} // namespace
extern "C" __attribute__((visibility("default"))) int32_t registerServices() {
gConfigDefaultAosp = ndk::SharedRefBase::make<::aidl::android::hardware::audio::core::Config>(
gAudioPolicyConverter);
const std::string configIntfName =
std::string().append(gConfigDefaultAosp->descriptor).append("/default");
binder_status_t status = AServiceManager_addService(gConfigDefaultAosp->asBinder().get(),
configIntfName.c_str());
if (status != STATUS_OK) {
LOG(ERROR) << "failed to register service for \"" << configIntfName << "\"";
}
gModuleConfigs = gAudioPolicyConverter.releaseModuleConfigs();
// check if IModule/default is registered or not
const std::string serviceName = std::string(AospModule::descriptor).append("/").append("default");
AIBinder* binder = AServiceManager_checkService(serviceName.c_str());
bool registerStubAsDefault = false;
if (binder == nullptr) {
LOG(INFO) <<"IModule/default is not registered yet";
registerStubAsDefault = true;
}
for (AospModuleConfigurationPair &configPair : *gModuleConfigs) {
std::string name = configPair.first;
if (name == "default") {
registerStubAsDefault = false;
} else if (name == "stub") {
if (registerStubAsDefault) {
name = "default";
LOG(INFO) <<"register stub hal as default hal";
} else {
continue;
}
}
if (auto instance = createModule(name, std::move(configPair.second)); instance) {
gModuleInstances.push_back(std::move(instance));
}
}
return STATUS_OK;
}

View File

@@ -1,30 +0,0 @@
<!--
Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
SPDX-License-Identifier: BSD-3-Clause-Clear
-->
<manifest version="1.0" type="device">
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IConfig/default</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IModule/default</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IModule/r_submix</fqname>
</hal>
<hal format="aidl">
<name>android.hardware.audio.core</name>
<version>2</version>
<fqname>IModule/usb</fqname>
</hal>
</manifest>

View File

@@ -1,68 +0,0 @@
/*
// commenting compilation through Android.bp, as all vendor effects are
// generated using Android.mk, in that case it would require us to have
// version updated at different places. Moving base headers/shared lib
// compilation to make file so all effects libraries can utitlize
// common defaults intrdouced in target.mk to acheive behavior similar
// to cc_default of Android.bp
cc_defaults {
name: "aidlaudioeffectserviceqti_defaults",
vendor: true,
shared_libs: [
"libaudioaidlcommon",
"libbase",
"libbinder_ndk",
"libcutils",
"libfmq",
"liblog",
"libutils",
"android.hardware.common-V2-ndk",
"android.hardware.common.fmq-V1-ndk",
"android.hardware.audio.effect-V2-ndk",
],
header_libs: [
"libaudioeffectsaidlqti_headers",
"libaudio_system_headers",
"libsystem_headers",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
],
}
cc_library_static {
name: "libaudioeffecthal_base_impl_static",
srcs: [
"EffectThread.cpp",
"EffectImpl.cpp",
"EffectContext.cpp",
],
defaults: ["aidlaudioeffectserviceqti_defaults"],
}
cc_library_shared {
name: "libaudioeffecthal.qti",
relative_install_path: "hw",
vintf_fragments: ["audioeffectservice_qti.xml"],
defaults: ["aidlaudioeffectserviceqti_defaults"],
shared_libs: [
"libtinyxml2",
],
srcs: [
"EffectConfig.cpp",
"EffectFactory.cpp",
"EffectMain.cpp",
],
}
cc_library_headers {
name: "libaudioeffectsaidlqti_headers",
export_include_dirs: ["include"],
vendor_available: true,
host_supported: true,
}
*/

View File

@@ -1,59 +0,0 @@
CURRENT_PATH := $(call my-dir)
LOCAL_PATH:= $(call my-dir)
# Build Header library to expose effect headers
include $(CLEAR_VARS)
LOCAL_MODULE := libaudioeffectsaidlqti_headers
LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
LOCAL_VENDOR_MODULE := true
include $(BUILD_HEADER_LIBRARY)
#Build static library used by all effects
include $(CLEAR_VARS)
LOCAL_MODULE:= libaudioeffecthal_base_impl_static
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_OWNER := qti
LOCAL_C_FLAGS += -Werror -Wall -Wextra -Wthread-safety
LOCAL_SRC_FILES:= \
EffectThread.cpp \
EffectImpl.cpp \
EffectContext.cpp
LOCAL_SHARED_LIBRARIES:= \
$(EFFECTS_DEFAULTS_SHARED_LIBRARIES)
LOCAL_HEADER_LIBRARIES:= $(EFFECTS_DEFAULTS_HEADERS_LIBRARIES)
include $(BUILD_STATIC_LIBRARY)
# build base effects library
include $(CLEAR_VARS)
LOCAL_MODULE:= libaudioeffecthal.qti
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_OWNER := qti
LOCAL_MODULE_RELATIVE_PATH := hw
LOCAL_C_FLAGS += -Werror -Wall -Wextra
LOCAL_SRC_FILES:= \
EffectConfig.cpp \
EffectFactory.cpp \
EffectMain.cpp
LOCAL_STATIC_LIBRARIES := libaudioeffecthal_base_impl_static
LOCAL_VINTF_FRAGMENTS := audioeffectservice_qti.xml
LOCAL_SHARED_LIBRARIES:= \
$(EFFECTS_DEFAULTS_SHARED_LIBRARIES) \
libtinyxml2
LOCAL_HEADER_LIBRARIES:= $(EFFECTS_DEFAULTS_HEADERS_LIBRARIES)
include $(BUILD_SHARED_LIBRARY)
include $(CURRENT_PATH)/qcom-effects/Android.mk

View File

@@ -1,358 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_EffectConfigQti"
#include <android-base/logging.h>
#include <system/audio_effects/audio_effects_conf.h>
#include <system/audio_effects/effect_uuid.h>
#include <optional>
#include <string>
// { SEC_AUDIO_SUPPORT_AIDL_EFFECT
#include <system/audio.h>
// } SEC_AUDIO_SUPPORT_AIDL_EFFECT
#include "effectFactory-impl/EffectConfig.h"
using aidl::android::media::audio::common::AudioSource;
using aidl::android::media::audio::common::AudioStreamType;
using aidl::android::media::audio::common::AudioUuid;
using ::aidl::android::hardware::audio::effect::stringToUuid;
#ifdef SEC_AUDIO_SUPPORT_AIDL_EFFECT
using ::aidl::android::hardware::audio::effect::getEffectTypeUuidSoundAlive;
using ::aidl::android::hardware::audio::effect::getEffectTypeUuidMySpaceEffect;
using ::aidl::android::hardware::audio::effect::getEffectTypeUuidDolbyAudioEffect;
using ::aidl::android::hardware::audio::effect::getEffectTypeUuidVolumeMonitor;
#endif
namespace aidl::qti::effects {
EffectConfig::EffectConfig(const std::string& file) {
tinyxml2::XMLDocument doc;
doc.LoadFile(file.c_str());
LOG(VERBOSE) << __func__ << " loading " << file;
// parse the xml file into maps
if (doc.Error()) {
LOG(ERROR) << __func__ << " tinyxml2 failed to load " << file
<< " error: " << doc.ErrorStr();
return;
}
auto registerFailure = [&](bool result) { mSkippedElements += result ? 0 : 1; };
for (auto& xmlConfig : getChildren(doc, "audio_effects_conf")) {
// Parse library
for (auto& xmlLibraries : getChildren(xmlConfig, "libraries")) {
for (auto& xmlLibrary : getChildren(xmlLibraries, "library")) {
registerFailure(parseLibrary(xmlLibrary));
}
}
// Parse effects
for (auto& xmlEffects : getChildren(xmlConfig, "effects")) {
for (auto& xmlEffect : getChildren(xmlEffects)) {
registerFailure(parseEffect(xmlEffect));
}
}
// Parse pre processing chains
for (auto& xmlPreprocess : getChildren(xmlConfig, "preprocess")) {
for (auto& xmlStream : getChildren(xmlPreprocess, "stream")) {
// AudioSource
registerFailure(parseProcessing(Processing::Type::source, xmlStream));
}
}
// Parse post processing chains
for (auto& xmlPostprocess : getChildren(xmlConfig, "postprocess")) {
for (auto& xmlStream : getChildren(xmlPostprocess, "stream")) {
// AudioStreamType
registerFailure(parseProcessing(Processing::Type::streamType, xmlStream));
}
}
}
LOG(DEBUG) << __func__ << " successfully parsed " << file << ", skipping " << mSkippedElements
<< " element(s)";
}
std::vector<std::reference_wrapper<const tinyxml2::XMLElement>> EffectConfig::getChildren(
const tinyxml2::XMLNode& node, const char* childTag) {
std::vector<std::reference_wrapper<const tinyxml2::XMLElement>> children;
for (auto* child = node.FirstChildElement(childTag); child != nullptr;
child = child->NextSiblingElement(childTag)) {
children.emplace_back(*child);
}
return children;
}
bool EffectConfig::resolveLibrary(const std::string& path, std::string* resolvedPath) {
for (auto* libraryDirectory : kEffectLibPath) {
std::string candidatePath = std::string(libraryDirectory) + '/' + path;
if (access(candidatePath.c_str(), R_OK) == 0) {
*resolvedPath = std::move(candidatePath);
return true;
}
}
return false;
}
bool EffectConfig::parseLibrary(const tinyxml2::XMLElement& xml) {
const char* name = xml.Attribute("name");
RETURN_VALUE_IF(!name, false, "noNameAttribute");
const char* path = xml.Attribute("path");
RETURN_VALUE_IF(!path, false, "noPathAttribute");
std::string resolvedPath;
if (!resolveLibrary(path, &resolvedPath)) {
LOG(ERROR) << __func__ << " can't find " << path;
return false;
}
mLibraryMap[name] = resolvedPath;
LOG(VERBOSE) << __func__ << " " << name << " : " << resolvedPath;
return true;
}
bool EffectConfig::parseEffect(const tinyxml2::XMLElement& xml) {
struct EffectLibraries effectLibraries;
std::vector<Library> libraries;
std::string name = xml.Attribute("name");
RETURN_VALUE_IF(name == "", false, "effectsNoName");
struct Library library;
if (std::strcmp(xml.Name(), "effectProxy") == 0) {
// proxy lib and uuid
RETURN_VALUE_IF(!parseLibrary(xml, library, true), false, "parseProxyLibFailed");
effectLibraries.proxyLibrary = library;
// proxy effect libs and UUID
auto xmlProxyLib = xml.FirstChildElement();
RETURN_VALUE_IF(!xmlProxyLib, false, "noLibForProxy");
while (xmlProxyLib) {
struct Library tempLibrary;
RETURN_VALUE_IF(!parseLibrary(*xmlProxyLib, tempLibrary), false,
"parseEffectLibFailed");
libraries.push_back(std::move(tempLibrary));
xmlProxyLib = xmlProxyLib->NextSiblingElement();
}
} else {
// expect only one library if not proxy
RETURN_VALUE_IF(!parseLibrary(xml, library), false, "parseEffectLibFailed");
libraries.push_back(std::move(library));
}
effectLibraries.libraries = std::move(libraries);
mEffectsMap[name] = std::move(effectLibraries);
return true;
}
bool EffectConfig::parseLibrary(const tinyxml2::XMLElement& xml, struct Library& library,
bool isProxy) {
// Retrieve library name only if not effectProxy element
if (!isProxy) {
const char* name = xml.Attribute("library");
RETURN_VALUE_IF(!name, false, "noLibraryAttribute");
library.name = name;
}
const char* uuidStr = xml.Attribute("uuid");
RETURN_VALUE_IF(!uuidStr, false, "noUuidAttribute");
library.uuid = stringToUuid(uuidStr);
bool typeSameAsUuid = false;
if (const char* typeSameAsUuidStr = xml.Attribute("typeSameAsUuid")) {
typeSameAsUuid = (0 == strcmp(typeSameAsUuidStr, "true"));
}
if (const char* typeUuidStr = xml.Attribute("type")) {
library.type = stringToUuid(typeUuidStr);
LOG(VERBOSE) << " type specified for " << library.name;
} else if (typeSameAsUuid) {
// for vendor effects, type and uuid are generally same, so instead of modifying
// the XML to add "type", add "typeSameAsUuid" tag, so type will be mapped to uuid
// However, if type and uuid needs to be different then set type explicitly in
// the XML file.
library.type = stringToUuid(uuidStr);
}
RETURN_VALUE_IF((library.uuid == getEffectUuidZero()), false, "invalidUuidAttribute");
LOG(VERBOSE) << __func__ << (isProxy ? " proxy " : library.name) << " : uuid "
<< toString(library.uuid)
<< " : type: " << (library.type.has_value() ? toString(library.type.value()) : "");
return true;
}
std::optional<Processing::Type> EffectConfig::stringToProcessingType(Processing::Type::Tag typeTag,
const std::string& type) {
// see list of audio stream types in audio_stream_type_t:
// system/media/audio/include/system/audio_effects/audio_effects_conf.h
// AUDIO_STREAM_DEFAULT_TAG is not listed here because according to SYS_RESERVED_DEFAULT in
// AudioStreamType.aidl: "Value reserved for system use only. HALs must never return this value
// to the system or accept it from the system".
static const std::map<const std::string, AudioStreamType> sAudioStreamTypeTable = {
{AUDIO_STREAM_VOICE_CALL_TAG, AudioStreamType::VOICE_CALL},
{AUDIO_STREAM_SYSTEM_TAG, AudioStreamType::SYSTEM},
{AUDIO_STREAM_RING_TAG, AudioStreamType::RING},
{AUDIO_STREAM_MUSIC_TAG, AudioStreamType::MUSIC},
{AUDIO_STREAM_ALARM_TAG, AudioStreamType::ALARM},
{AUDIO_STREAM_NOTIFICATION_TAG, AudioStreamType::NOTIFICATION},
{AUDIO_STREAM_BLUETOOTH_SCO_TAG, AudioStreamType::BLUETOOTH_SCO},
{AUDIO_STREAM_ENFORCED_AUDIBLE_TAG, AudioStreamType::ENFORCED_AUDIBLE},
{AUDIO_STREAM_DTMF_TAG, AudioStreamType::DTMF},
{AUDIO_STREAM_TTS_TAG, AudioStreamType::TTS},
{AUDIO_STREAM_ASSISTANT_TAG, AudioStreamType::ASSISTANT}};
// see list of audio sources in audio_source_t:
// system/media/audio/include/system/audio_effects/audio_effects_conf.h
static const std::map<const std::string, AudioSource> sAudioSourceTable = {
{MIC_SRC_TAG, AudioSource::MIC},
{VOICE_UL_SRC_TAG, AudioSource::VOICE_UPLINK},
{VOICE_DL_SRC_TAG, AudioSource::VOICE_DOWNLINK},
{VOICE_CALL_SRC_TAG, AudioSource::VOICE_CALL},
{CAMCORDER_SRC_TAG, AudioSource::CAMCORDER},
{VOICE_REC_SRC_TAG, AudioSource::VOICE_RECOGNITION},
{VOICE_COMM_SRC_TAG, AudioSource::VOICE_COMMUNICATION},
{REMOTE_SUBMIX_SRC_TAG, AudioSource::REMOTE_SUBMIX},
{UNPROCESSED_SRC_TAG, AudioSource::UNPROCESSED},
{VOICE_PERFORMANCE_SRC_TAG, AudioSource::VOICE_PERFORMANCE}};
if (typeTag == Processing::Type::streamType) {
auto typeIter = sAudioStreamTypeTable.find(type);
if (typeIter != sAudioStreamTypeTable.end()) {
return typeIter->second;
}
} else if (typeTag == Processing::Type::source) {
auto typeIter = sAudioSourceTable.find(type);
if (typeIter != sAudioSourceTable.end()) {
return typeIter->second;
}
}
return std::nullopt;
}
bool EffectConfig::parseProcessing(Processing::Type::Tag typeTag, const tinyxml2::XMLElement& xml) {
const char* typeStr = xml.Attribute("type");
auto aidlType = stringToProcessingType(typeTag, typeStr);
RETURN_VALUE_IF(!aidlType.has_value(), false, "illegalStreamType");
RETURN_VALUE_IF(0 != mProcessingMap.count(aidlType.value()), false, "duplicateStreamType");
for (auto& apply : getChildren(xml, "apply")) {
const char* name = apply.get().Attribute("effect");
if (mEffectsMap.find(name) == mEffectsMap.end()) {
LOG(ERROR) << __func__ << " effect " << name << " doesn't exist, skipping";
continue;
}
RETURN_VALUE_IF(!name, false, "noEffectAttribute");
mProcessingMap[aidlType.value()].emplace_back(mEffectsMap[name]);
LOG(VERBOSE) << __func__ << " " << typeStr << " : " << name
<< " aidl: " << aidlType.value().toString();
}
return true;
}
const std::map<Processing::Type, std::vector<EffectConfig::EffectLibraries>>&
EffectConfig::getProcessingMap() const {
return mProcessingMap;
}
bool EffectConfig::findUuid(const std::pair<std::string, struct EffectLibraries>& effectElem,
AudioUuid* uuid) {
// Difference from EFFECT_TYPE_LIST_DEF, there could be multiple name mapping to same Effect Type
#define EFFECT_XML_TYPE_LIST_DEF(V) \
V("acoustic_echo_canceler", AcousticEchoCanceler) \
V("automatic_gain_control_v1", AutomaticGainControlV1) \
V("automatic_gain_control_v2", AutomaticGainControlV2) \
V("bassboost", BassBoost) \
V("downmix", Downmix) \
V("dynamics_processing", DynamicsProcessing) \
V("equalizer", Equalizer) \
V("haptic_generator", HapticGenerator) \
V("loudness_enhancer", LoudnessEnhancer) \
V("env_reverb", EnvReverb) \
V("reverb_env_aux", EnvReverb) \
V("reverb_env_ins", EnvReverb) \
V("preset_reverb", PresetReverb) \
V("reverb_pre_aux", PresetReverb) \
V("reverb_pre_ins", PresetReverb) \
V("noise_suppression", NoiseSuppression) \
V("spatializer", Spatializer) \
V("virtualizer", Virtualizer) \
V("visualizer", Visualizer) \
V("volume", Volume)
#ifdef SEC_AUDIO_SUPPORT_AIDL_EFFECT
#define EFFECT_XML_TYPE_SEC_LIST_DEF(V) \
V("soundalive", SoundAlive) \
V("sa3d", MySpaceEffect) \
V("dap", DolbyAudioEffect) \
V("volumemonitor_hw", VolumeMonitor) \
V("sa3d_sw", MySpaceEffect) \
V("sa3d_hw", MySpaceEffect)
#endif
#define GENERATE_MAP_ENTRY_V(s, symbol) {s, &getEffectTypeUuid##symbol},
const std::string xmlEffectName = effectElem.first;
typedef const AudioUuid& (*UuidGetter)(void);
static const std::map<std::string, UuidGetter> uuidMap{
{EFFECT_XML_TYPE_LIST_DEF(GENERATE_MAP_ENTRY_V)}};
if (auto it = uuidMap.find(xmlEffectName); it != uuidMap.end()) {
*uuid = (*it->second)();
LOG(VERBOSE) << __func__ << " " << xmlEffectName << " found in standard effects";
return true;
}
const auto& libs = effectElem.second.libraries;
for (const auto& lib : libs) {
if (lib.type.has_value()) {
LOG(VERBOSE) << __func__ << " " << xmlEffectName << " found from XML type "
<< toString(lib.type.value());
*uuid = lib.type.value();
return true;
}
}
#ifdef SEC_AUDIO_SUPPORT_AIDL_EFFECT
static const std::map<std::string, UuidGetter> secUuidMap{
{EFFECT_XML_TYPE_SEC_LIST_DEF(GENERATE_MAP_ENTRY_V)}};
if (auto it = secUuidMap.find(xmlEffectName); it != secUuidMap.end()) {
*uuid = (*it->second)();
LOG(VERBOSE) << __func__ << " " << xmlEffectName << " found in sec effects";
return true;
}
#endif
// find in QTI specific effects
if (auto it = kUuidNameTypeMap.find(xmlEffectName); it != kUuidNameTypeMap.end()) {
*uuid = (it->second);
LOG(VERBOSE) << __func__ << " " << xmlEffectName << " found in QTI effects";
return true;
}
return false;
}
const char* EffectConfig::dump(const tinyxml2::XMLElement& element,
tinyxml2::XMLPrinter&& printer) const {
element.Accept(&printer);
return printer.CStr();
}
} // namespace aidl::qti::effects

View File

@@ -1,294 +0,0 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <memory>
#define LOG_TAG "AHAL_EffectContextQti"
#include "effect-impl/EffectContext.h"
#include "include/effect-impl/EffectTypes.h"
using aidl::android::hardware::audio::common::getChannelCount;
using aidl::android::hardware::audio::common::getFrameSizeInBytes;
using aidl::android::hardware::audio::effect::IEffect;
// using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
using aidl::android::media::audio::common::PcmType;
using ::android::hardware::EventFlag;
namespace aidl::qti::effects {
int kReopenSupportedVersion = 2;
EffectContext::EffectContext(const Parameter::Common& common, bool processData) {
if (RetCode::SUCCESS != setCommon(common)) {
LOG(ERROR) << __func__ << " illegal common parameters";
}
initMessageQueues(processData);
}
void EffectContext::initMessageQueues(bool processData) {
if (processData) {
// in/outBuffer size in float (FMQ data format defined for DataMQ)
size_t inBufferSizeInFloat = mCommon.input.frameCount * mInputFrameSize / sizeof(float);
size_t outBufferSizeInFloat = mCommon.output.frameCount * mOutputFrameSize / sizeof(float);
// only status FMQ use the EventFlag
mStatusMQ = std::make_shared<StatusMQ>(1 /*depth*/, true /*configureEventFlagWord*/);
mInputMQ = std::make_shared<DataMQ>(inBufferSizeInFloat);
mOutputMQ = std::make_shared<DataMQ>(outBufferSizeInFloat);
if (!mStatusMQ->isValid() || !mInputMQ->isValid() || !mOutputMQ->isValid()) {
LOG(ERROR) << __func__ << " created invalid FMQ, statusMQ: " << mStatusMQ->isValid()
<< " inputMQ: " << mInputMQ->isValid()
<< " outputMQ: " << mOutputMQ->isValid();
}
::android::status_t status =
EventFlag::createEventFlag(mStatusMQ->getEventFlagWord(), &mEfGroup);
if (status != ::android::OK || !mEfGroup) {
LOG(ERROR) << __func__ << " create EventFlagGroup failed ";
return;
}
mWorkBuffer.resize(std::max(inBufferSizeInFloat, outBufferSizeInFloat));
}
mProcessData = processData;
}
EffectContext::~EffectContext() {
if (mEfGroup) {
::android::hardware::EventFlag::deleteEventFlag(&mEfGroup);
}
}
// reset buffer status by abandon input data in FMQ
void EffectContext::resetBuffer() {
if (mProcessData) {
auto buffer = static_cast<float*>(mWorkBuffer.data());
if (mStatusMQ) {
std::vector<IEffect::Status> status(mStatusMQ->availableToRead());
}
if (mInputMQ) {
mInputMQ->read(buffer, mInputMQ->availableToRead());
}
}
}
void EffectContext::dupeFmqWithReopen(IEffect::OpenEffectReturn* effectRet) {
if (!mProcessData) return;
if (!mInputMQ) {
mInputMQ = std::make_shared<DataMQ>(mCommon.input.frameCount * mInputFrameSize /
sizeof(float));
}
if (!mOutputMQ) {
mOutputMQ = std::make_shared<DataMQ>(mCommon.output.frameCount * mOutputFrameSize /
sizeof(float));
}
dupeFmq(effectRet);
}
void EffectContext::dupeFmq(IEffect::OpenEffectReturn* effectRet) {
if (!mProcessData) return;
if (effectRet && mStatusMQ && mInputMQ && mOutputMQ) {
effectRet->statusMQ = mStatusMQ->dupeDesc();
effectRet->inputDataMQ = mInputMQ->dupeDesc();
effectRet->outputDataMQ = mOutputMQ->dupeDesc();
}
}
float* EffectContext::getWorkBuffer() {
return static_cast<float*>(mWorkBuffer.data());
}
size_t EffectContext::getWorkBufferSize() const {
return mWorkBuffer.size();
}
std::shared_ptr<EffectContext::StatusMQ> EffectContext::getStatusFmq() const {
return mStatusMQ;
}
std::shared_ptr<EffectContext::DataMQ> EffectContext::getInputDataFmq() const {
return mInputMQ;
}
std::shared_ptr<EffectContext::DataMQ> EffectContext::getOutputDataFmq() const {
return mOutputMQ;
}
size_t EffectContext::getInputFrameSize() const {
return mInputFrameSize;
}
size_t EffectContext::getOutputFrameSize() const {
return mOutputFrameSize;
}
int EffectContext::getSessionId() const {
return mCommon.session;
}
int EffectContext::getIoHandle() const {
return mCommon.ioHandle;
}
RetCode EffectContext::setOutputDevice(
const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& device) {
mOutputDevice = device;
return RetCode::SUCCESS;
}
std::vector<aidl::android::media::audio::common::AudioDeviceDescription>
EffectContext::getOutputDevice() {
return mOutputDevice;
}
RetCode EffectContext::setAudioMode(const aidl::android::media::audio::common::AudioMode& mode) {
mMode = mode;
return RetCode::SUCCESS;
}
aidl::android::media::audio::common::AudioMode EffectContext::getAudioMode() {
return mMode;
}
RetCode EffectContext::setAudioSource(
const aidl::android::media::audio::common::AudioSource& source) {
mSource = source;
return RetCode::SUCCESS;
}
aidl::android::media::audio::common::AudioSource EffectContext::getAudioSource() {
return mSource;
}
RetCode EffectContext::setVolumeStereo(const Parameter::VolumeStereo& volumeStereo) {
mVolumeStereo = volumeStereo;
return RetCode::SUCCESS;
}
Parameter::VolumeStereo EffectContext::getVolumeStereo() {
return mVolumeStereo;
}
RetCode EffectContext::setCommon(const Parameter::Common& common) {
if (mCommon != common) {
LOG(VERBOSE) << __func__ << " Param Change from " << mCommon.toString();
LOG(VERBOSE) << __func__ << " to " << common.toString();
}
LOG(VERBOSE) << __func__ << common.toString();
auto& input = common.input;
auto& output = common.output;
if (input.base.format.pcm != aidl::android::media::audio::common::PcmType::FLOAT_32_BIT ||
output.base.format.pcm != aidl::android::media::audio::common::PcmType::FLOAT_32_BIT) {
LOG(ERROR) << __func__ << " illegal IO, input "
<< ::android::internal::ToString(input.base.format) << ", output "
<< ::android::internal::ToString(output.base.format);
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
if (auto ret = updateIOFrameSize(common); ret != RetCode::SUCCESS) {
return ret;
}
mInputChannelCount = getChannelCount(input.base.channelMask);
mOutputChannelCount = getChannelCount(output.base.channelMask);
if (mInputChannelCount == 0 || mOutputChannelCount == 0) {
LOG(ERROR) << __func__ << " illegal channel count input " << mInputChannelCount
<< ", output " << mOutputChannelCount;
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
mCommon = common;
return RetCode::SUCCESS;
}
Parameter::Common EffectContext::getCommon() {
LOG(VERBOSE) << __func__ << mCommon.toString();
return mCommon;
}
EventFlag* EffectContext::getStatusEventFlag() {
return mEfGroup;
}
RetCode EffectContext::updateIOFrameSize(const Parameter::Common& common) {
const auto prevInputFrameSize = mInputFrameSize;
const auto prevOutputFrameSize = mOutputFrameSize;
mInputFrameSize = ::aidl::android::hardware::audio::common::getFrameSizeInBytes(
common.input.base.format, common.input.base.channelMask);
mOutputFrameSize = ::aidl::android::hardware::audio::common::getFrameSizeInBytes(
common.output.base.format, common.output.base.channelMask);
if (!mProcessData) {
LOG(VERBOSE) << __func__ << " effect does not process data";
return RetCode::SUCCESS;
}
// workBuffer and data MQ not allocated yet, no need to update
if (mWorkBuffer.size() == 0 || !mInputMQ || !mOutputMQ) {
return RetCode::SUCCESS;
}
// IEffect::reopen introduced in android.hardware.audio.effect-V2
if (mVersion < kReopenSupportedVersion) {
LOG(WARNING) << __func__ << " skipped for HAL version " << mVersion;
return RetCode::SUCCESS;
}
bool needUpdateMq = false;
if (mInputFrameSize != prevInputFrameSize ||
mCommon.input.frameCount != common.input.frameCount) {
mInputMQ.reset();
needUpdateMq = true;
}
if (mOutputFrameSize != prevOutputFrameSize ||
mCommon.output.frameCount != common.output.frameCount) {
mOutputMQ.reset();
needUpdateMq = true;
}
if (needUpdateMq) {
mWorkBuffer.resize(std::max(common.input.frameCount * mInputFrameSize / sizeof(float),
common.output.frameCount * mOutputFrameSize / sizeof(float)));
return notifyDataMqUpdate();
}
return RetCode::SUCCESS;
}
// this should only be called for software effects
RetCode EffectContext::notifyDataMqUpdate() {
if (!mEfGroup) {
LOG(ERROR) << __func__ << ": invalid EventFlag group";
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
if (const auto ret = mEfGroup->wake(kEventFlagDataMqUpdate); ret != ::android::OK) {
LOG(ERROR) << __func__ << ": wake failure with ret " << ret;
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
LOG(DEBUG) << __func__ << " : signal client for reopen";
return RetCode::SUCCESS;
}
RetCode EffectContext::setOffload(bool offload) {
mOffload = offload;
return RetCode::SUCCESS;
}
} // namespace aidl::qti::effects

View File

@@ -1,312 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <dlfcn.h>
#include <iterator>
#include <memory>
#include <tuple>
#include <unordered_set>
#define LOG_TAG "AHAL_EffectFactoryQti"
#include <android-base/logging.h>
#include <android/binder_ibinder_platform.h>
#include <system/audio_effects/effect_uuid.h>
#include <system/thread_defs.h>
#include "effect-impl/EffectTypes.h"
#include "effectFactory-impl/EffectFactory.h"
using aidl::android::media::audio::common::AudioUuid;
namespace aidl::qti::effects {
Factory::Factory(const std::string& file) : mConfig(EffectConfig(file)) {
LOG(VERBOSE) << __func__ << " with config file: " << file;
loadEffectLibs();
}
Factory::~Factory() {
if (auto count = mEffectMap.size()) {
LOG(ERROR) << __func__ << " remaining " << count
<< " effect instances not destroyed indicating resource leak!";
for (const auto& it : mEffectMap) {
if (auto spEffect = it.first.lock()) {
LOG(ERROR) << __func__ << " erase remaining instance UUID "
<< it.second.first.toString();
destroyEffectImpl_l(spEffect);
}
}
}
}
ndk::ScopedAStatus Factory::getDescriptorWithUuid_l(const AudioUuid& uuid, Descriptor* desc) {
RETURN_IF(!desc, EX_NULL_POINTER, "nullDescriptor");
if (mEffectLibMap.count(uuid)) {
auto& entry = mEffectLibMap[uuid];
getDlSyms_l(entry);
auto& libInterface = std::get<kMapEntryInterfaceIndex>(entry);
RETURN_IF(!libInterface || !libInterface->queryEffectFunc, EX_NULL_POINTER,
"dlNullQueryEffectFunc");
RETURN_IF_BINDER_EXCEPTION(libInterface->queryEffectFunc(&uuid, desc));
return ndk::ScopedAStatus::ok();
}
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
ndk::ScopedAStatus Factory::queryEffects(const std::optional<AudioUuid>& in_type_uuid,
const std::optional<AudioUuid>& in_impl_uuid,
const std::optional<AudioUuid>& in_proxy_uuid,
std::vector<Descriptor>* _aidl_return) {
std::lock_guard lg(mMutex);
// get the matching list
std::vector<Descriptor::Identity> idList;
std::copy_if(mIdentitySet.begin(), mIdentitySet.end(), std::back_inserter(idList),
[&](auto& id) {
return (!in_type_uuid.has_value() || in_type_uuid.value() == id.type) &&
(!in_impl_uuid.has_value() || in_impl_uuid.value() == id.uuid) &&
(!in_proxy_uuid.has_value() ||
(id.proxy.has_value() && in_proxy_uuid.value() == id.proxy.value()));
});
// query through the matching list
for (const auto& id : idList) {
if (mEffectLibMap.count(id.uuid)) {
Descriptor desc;
RETURN_IF_ASTATUS_NOT_OK(getDescriptorWithUuid_l(id.uuid, &desc),
"getDescriptorFailed");
// update proxy UUID with information from config xml
desc.common.id.proxy = id.proxy;
_aidl_return->emplace_back(std::move(desc));
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Factory::queryProcessing(const std::optional<Processing::Type>& in_type,
std::vector<Processing>* _aidl_return) {
std::lock_guard lg(mMutex);
const auto& processings = mConfig.getProcessingMap();
// Processing stream type
for (const auto& procIter : processings) {
if (!in_type.has_value() || in_type.value() == procIter.first) {
Processing process = {.type = procIter.first /* Processing::Type */};
for (const auto& libs : procIter.second /* std::vector<struct EffectLibraries> */) {
// { SEC_AUDIO_EFFECT_COMMON
if (libs.proxyLibrary.has_value()) {
auto& proxyLibrary = libs.proxyLibrary.value();
Descriptor desc;
desc.common.name = proxyLibrary.name;
desc.common.id.uuid = proxyLibrary.uuid;
desc.common.id.proxy = proxyLibrary.uuid;
process.ids.emplace_back(desc);
continue;
}
// } SEC_AUDIO_EFFECT_COMMON
for (const auto& lib : libs.libraries /* std::vector<struct Library> */) {
Descriptor desc;
if (libs.proxyLibrary.has_value()) {
desc.common.id.proxy = libs.proxyLibrary.value().uuid;
}
RETURN_IF_ASTATUS_NOT_OK(getDescriptorWithUuid_l(lib.uuid, &desc),
"getDescriptorFailed");
process.ids.emplace_back(desc);
}
}
LOG(VERBOSE) << __func__ << " insert processing " << process.toString();
_aidl_return->emplace_back(process);
}
}
LOG(VERBOSE) << __func__ << " return " << _aidl_return->size();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Factory::createEffect(const AudioUuid& in_impl_uuid,
std::shared_ptr<IEffect>* _aidl_return) {
LOG(VERBOSE) << __func__ << ": UUID " << toString(in_impl_uuid);
std::lock_guard lg(mMutex);
if (mEffectLibMap.count(in_impl_uuid)) {
auto& entry = mEffectLibMap[in_impl_uuid];
getDlSyms_l(entry);
auto& libInterface = std::get<kMapEntryInterfaceIndex>(entry);
RETURN_IF(!libInterface || !libInterface->createEffectFunc, EX_NULL_POINTER,
"dlNullcreateEffectFunc");
std::shared_ptr<IEffect> effectSp;
RETURN_IF_BINDER_EXCEPTION(libInterface->createEffectFunc(&in_impl_uuid, &effectSp));
if (!effectSp) {
LOG(ERROR) << __func__ << ": library created null instance without return error!";
return ndk::ScopedAStatus::fromExceptionCode(EX_TRANSACTION_FAILED);
}
*_aidl_return = effectSp;
ndk::SpAIBinder effectBinder = effectSp->asBinder();
AIBinder_setMinSchedulerPolicy(effectBinder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
AIBinder_setInheritRt(effectBinder.get(), true);
mEffectMap[std::weak_ptr<IEffect>(effectSp)] =
std::make_pair(in_impl_uuid, std::move(effectBinder));
LOG(DEBUG) << __func__ << ": UUID " << toString(in_impl_uuid) << ": instance "
<< effectSp.get() << " created successfully";
return ndk::ScopedAStatus::ok();
} else {
LOG(ERROR) << __func__ << ": library doesn't exist for uuid" << toString(in_impl_uuid);
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus Factory::destroyEffectImpl_l(const std::shared_ptr<IEffect>& in_handle) {
std::weak_ptr<IEffect> wpHandle(in_handle);
// find the effect entry with key (std::weak_ptr<IEffect>)
if (auto effectIt = mEffectMap.find(wpHandle); effectIt != mEffectMap.end()) {
auto& uuid = effectIt->second.first;
// find implementation library with UUID
if (auto libIt = mEffectLibMap.find(uuid); libIt != mEffectLibMap.end()) {
auto& interface = std::get<kMapEntryInterfaceIndex>(libIt->second);
RETURN_IF(!interface || !interface->destroyEffectFunc, EX_NULL_POINTER,
"dlNulldestroyEffectFunc");
RETURN_IF_BINDER_EXCEPTION(interface->destroyEffectFunc(in_handle));
} else {
LOG(ERROR) << __func__ << ": UUID " << uuid.toString() << " does not exist in libMap!";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
mEffectMap.erase(effectIt);
return ndk::ScopedAStatus::ok();
} else {
LOG(ERROR) << __func__ << ": instance " << in_handle << " does not exist!";
return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
}
}
// go over the map and cleanup all expired weak_ptrs.
void Factory::cleanupEffectMap_l() {
for (auto it = mEffectMap.begin(); it != mEffectMap.end();) {
if (nullptr == it->first.lock()) {
it = mEffectMap.erase(it);
} else {
++it;
}
}
}
ndk::ScopedAStatus Factory::destroyEffect(const std::shared_ptr<IEffect>& in_handle) {
LOG(DEBUG) << __func__ << ": instance " << in_handle.get();
std::lock_guard lg(mMutex);
ndk::ScopedAStatus status = destroyEffectImpl_l(in_handle);
// always do the cleanup
cleanupEffectMap_l();
return status;
}
bool Factory::openEffectLibrary(const AudioUuid& impl,
const std::string& path) NO_THREAD_SAFETY_ANALYSIS {
std::function<void(void*)> dlClose = [](void* handle) -> void {
if (handle && dlclose(handle)) {
LOG(ERROR) << " dlclose failed " << dlerror();
}
};
auto libHandle =
std::unique_ptr<void, decltype(dlClose)>{dlopen(path.c_str(), RTLD_LAZY), dlClose};
if (!libHandle) {
LOG(ERROR) << __func__ << ": dlopen failed, err: " << dlerror();
return false;
}
LOG(VERBOSE) << __func__ << " dlopen lib: " << path.c_str() << " Impl " << toString(impl)
<< " handle:" << libHandle;
auto interface = new effect_dl_interface_s{nullptr, nullptr, nullptr};
mEffectLibMap.insert(
{impl,
std::make_tuple(std::move(libHandle),
std::unique_ptr<struct effect_dl_interface_s>(interface), path)});
return true;
}
void Factory::createIdentityWithConfig(
const EffectConfig::Library& configLib, const AudioUuid& typeUuid,
const std::optional<AudioUuid> proxyUuid) NO_THREAD_SAFETY_ANALYSIS {
static const auto& libMap = mConfig.getLibraryMap();
const std::string& libName = configLib.name;
if (auto path = libMap.find(libName); path != libMap.end()) {
Descriptor::Identity id;
id.type = typeUuid;
id.uuid = configLib.uuid;
id.proxy = proxyUuid;
LOG(VERBOSE) << __func__ << " loading lib " << path->second << ": typeUuid "
<< toString(id.type) << " implUuid " << toString(id.uuid) << " proxyUuid "
<< (proxyUuid.has_value() ? toString(proxyUuid.value()) : "null");
if (openEffectLibrary(id.uuid, path->second)) {
mIdentitySet.insert(std::move(id));
}
} else {
LOG(ERROR) << __func__ << ": library " << libName << " does not exist!";
}
}
void Factory::loadEffectLibs() {
const auto& configEffectsMap = mConfig.getEffectsMap();
for (const auto& configEffects : configEffectsMap) {
if (AudioUuid type; EffectConfig::findUuid(configEffects /* xml effect */, &type)) {
const auto& configLibs = configEffects.second;
std::optional<AudioUuid> proxyUuid;
if (configLibs.proxyLibrary.has_value()) {
const auto& proxyLib = configLibs.proxyLibrary.value();
proxyUuid = proxyLib.uuid;
}
for (const auto& configLib : configLibs.libraries) {
createIdentityWithConfig(configLib, type, proxyUuid);
}
} else {
LOG(ERROR) << __func__ << ": can not find type UUID for effect " << configEffects.first
<< " skipping!";
}
}
}
void Factory::getDlSyms_l(DlEntry& entry) {
auto& dlHandle = std::get<kMapEntryHandleIndex>(entry);
RETURN_VALUE_IF(!dlHandle, void(), "dlNullHandle");
// Get the reference of the DL interfaces in library map tuple.
auto& dlInterface = std::get<kMapEntryInterfaceIndex>(entry);
// return if interface already exists
if (!dlInterface->createEffectFunc) {
dlInterface->createEffectFunc = (EffectCreateFunctor)dlsym(dlHandle.get(), "createEffect");
}
if (!dlInterface->queryEffectFunc) {
dlInterface->queryEffectFunc = (EffectQueryFunctor)dlsym(dlHandle.get(), "queryEffect");
}
if (!dlInterface->destroyEffectFunc) {
dlInterface->destroyEffectFunc =
(EffectDestroyFunctor)dlsym(dlHandle.get(), "destroyEffect");
}
if (!dlInterface->createEffectFunc || !dlInterface->destroyEffectFunc ||
!dlInterface->queryEffectFunc) {
LOG(ERROR) << __func__ << ": create (" << dlInterface->createEffectFunc << "), query ("
<< dlInterface->queryEffectFunc << "), or destroy ("
<< dlInterface->destroyEffectFunc
<< ") not exist in library: " << std::get<kMapEntryLibNameIndex>(entry)
<< " handle: " << dlHandle << " with dlerror: " << dlerror();
}
}
} // namespace aidl::qti::effects

View File

@@ -1,395 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_EffectImplQti"
#include "effect-impl/EffectImpl.h"
#include <memory>
#include "effect-impl/EffectTypes.h"
#include "include/effect-impl/EffectTypes.h"
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
using aidl::android::hardware::audio::effect::kReopenSupportedVersion;
using aidl::android::hardware::audio::effect::State;
using aidl::android::media::audio::common::PcmType;
using ::android::hardware::EventFlag;
using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
extern "C" binder_exception_t destroyEffect(const std::shared_ptr<IEffect>& instanceSp) {
State state;
ndk::ScopedAStatus status = instanceSp->getState(&state);
if (!status.isOk() || State::INIT != state) {
LOG(ERROR) << __func__ << " instance " << instanceSp.get()
<< " in state: " << toString(state) << ", status: " << status.getDescription();
return EX_ILLEGAL_STATE;
}
LOG(VERBOSE) << __func__ << " instance " << instanceSp.get() << " destroyed";
return EX_NONE;
}
namespace aidl::qti::effects {
ndk::ScopedAStatus EffectImpl::open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
OpenEffectReturn* ret) {
LOG(DEBUG) << getEffectName() << " " << __func__ << " sessionId " << common.session
<< " ioHandle " << common.ioHandle;
// effect only support 32bits float
RETURN_IF(common.input.base.format.pcm != common.output.base.format.pcm ||
common.input.base.format.pcm != PcmType::FLOAT_32_BIT,
EX_ILLEGAL_ARGUMENT, "dataMustBe32BitsFloat");
std::lock_guard lg(mImplMutex);
RETURN_OK_IF(mState != State::INIT);
// check if the effects needs data processing or not, based on that init worker thread & FMQs
mProcessData = !isOffloadOrBypass();
mImplContext = createContext(common, mProcessData);
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
"FailedToGetInterfaceVersion");
mImplContext->setVersion(mVersion);
mEventFlag = mImplContext->getStatusEventFlag();
mDataMqNotEmptyEf =
mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
if (specific.has_value()) {
RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
}
mState = State::IDLE;
if (mProcessData) {
mEventFlag = mImplContext->getStatusEventFlag();
mImplContext->dupeFmq(ret);
RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
"FailedToCreateWorker");
} else {
LOG(VERBOSE) << __func__ << " " << getEffectName() << " effect does not process data";
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::reopen(OpenEffectReturn* ret) {
std::lock_guard lg(mImplMutex);
RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "alreadyClosed");
LOG(DEBUG) << getEffectName() << " " << __func__;
// TODO: add reopen implementation
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
mImplContext->dupeFmqWithReopen(ret);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::close() {
{
std::lock_guard lg(mImplMutex);
RETURN_OK_IF(mState == State::INIT);
RETURN_IF(mState == State::PROCESSING, EX_ILLEGAL_STATE, "closeAtProcessing");
mState = State::INIT;
}
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagFailed");
// stop the worker thread, ignore the return code
RETURN_IF(destroyThread() != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
"FailedToDestroyWorker");
{
std::lock_guard lg(mImplMutex);
releaseContext();
mImplContext.reset();
}
LOG(DEBUG) << getEffectName() << " " << __func__;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::setParameter(const Parameter& param) {
std::lock_guard lg(mImplMutex);
LOG(VERBOSE) << getEffectName() << __func__ << " with: " << param.toString();
const auto& tag = param.getTag();
switch (tag) {
case Parameter::common:
case Parameter::deviceDescription:
case Parameter::mode:
case Parameter::source:
case Parameter::offload:
FALLTHROUGH_INTENDED;
case Parameter::volumeStereo:
return setParameterCommon(param);
case Parameter::specific: {
return setParameterSpecific(param.get<Parameter::specific>());
}
default: {
LOG(DEBUG) << getEffectName() << " " << __func__ << " unsupportedParameterTag "
<< toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"ParameterNotSupported");
}
}
}
ndk::ScopedAStatus EffectImpl::getParameter(const Parameter::Id& id, Parameter* param) {
std::lock_guard lg(mImplMutex);
LOG(VERBOSE) << getEffectName() << " " << __func__ << id.toString();
auto tag = id.getTag();
switch (tag) {
case Parameter::Id::commonTag: {
RETURN_IF_ASTATUS_NOT_OK(getParameterCommon(id.get<Parameter::Id::commonTag>(), param),
"CommonParamNotSupported");
break;
}
case Parameter::Id::vendorEffectTag:
FALLTHROUGH_INTENDED;
default: {
Parameter::Specific specific;
RETURN_IF_ASTATUS_NOT_OK(getParameterSpecific(id, &specific), "SpecParamNotSupported");
param->set<Parameter::specific>(specific);
break;
}
}
LOG(VERBOSE) << getEffectName() << __func__ << id.toString() << param->toString();
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::setParameterCommon(const Parameter& param) {
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
const auto& tag = param.getTag();
LOG(VERBOSE) << getEffectName() << __func__ << param.toString();
switch (tag) {
case Parameter::common:
RETURN_IF(mImplContext->setCommon(param.get<Parameter::common>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setCommFailed");
break;
case Parameter::deviceDescription:
RETURN_IF(mImplContext->setOutputDevice(param.get<Parameter::deviceDescription>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setDeviceFailed");
break;
case Parameter::mode:
RETURN_IF(mImplContext->setAudioMode(param.get<Parameter::mode>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setModeFailed");
break;
case Parameter::source:
RETURN_IF(mImplContext->setAudioSource(param.get<Parameter::source>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setSourceFailed");
break;
case Parameter::volumeStereo:
RETURN_IF(mImplContext->setVolumeStereo(param.get<Parameter::volumeStereo>()) !=
RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setVolumeStereoFailed");
break;
case Parameter::offload:
RETURN_IF(mImplContext->setOffload(param.get<Parameter::offload>()) != RetCode::SUCCESS,
EX_ILLEGAL_ARGUMENT, "setOffloadError");
break;
default: {
LOG(DEBUG) << getEffectName() << " " << __func__ << " unsupportedParameterTag "
<< toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"commonParamNotSupported");
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::getParameterCommon(const Parameter::Tag& tag, Parameter* param) {
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
switch (tag) {
case Parameter::common: {
param->set<Parameter::common>(mImplContext->getCommon());
break;
}
case Parameter::deviceDescription: {
param->set<Parameter::deviceDescription>(mImplContext->getOutputDevice());
break;
}
case Parameter::mode: {
param->set<Parameter::mode>(mImplContext->getAudioMode());
break;
}
case Parameter::source: {
param->set<Parameter::source>(mImplContext->getAudioSource());
break;
}
case Parameter::volumeStereo: {
param->set<Parameter::volumeStereo>(mImplContext->getVolumeStereo());
break;
}
default: {
LOG(DEBUG) << getEffectName() << " " << __func__ << " unsupported tag "
<< toString(tag);
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"tagNotSupported");
}
}
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::getState(State* state) {
*state = mState;
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::command(CommandId command) {
std::lock_guard lg(mImplMutex);
RETURN_IF(mState == State::INIT, EX_ILLEGAL_STATE, "instanceNotOpen");
LOG(DEBUG) << getEffectName() << " " << __func__ << ": receive command: " << toString(command)
<< " at state " << toString(mState);
switch (command) {
case CommandId::START:
RETURN_OK_IF(mState == State::PROCESSING);
RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed");
mState = State::PROCESSING;
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagFailed");
startThread();
break;
case CommandId::STOP:
case CommandId::RESET:
RETURN_OK_IF(mState == State::IDLE);
mState = State::IDLE;
RETURN_IF(notifyEventFlag(mDataMqNotEmptyEf) != RetCode::SUCCESS, EX_ILLEGAL_STATE,
"notifyEventFlagFailed");
stopThread();
RETURN_IF_ASTATUS_NOT_OK(commandImpl(command), "commandImplFailed");
break;
default:
LOG(DEBUG) << getEffectName() << " " << __func__ << " instance still processing";
return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
"CommandIdNotSupported");
}
LOG(DEBUG) << getEffectName() << " " << __func__ << " transfer to state: " << toString(mState);
return ndk::ScopedAStatus::ok();
}
ndk::ScopedAStatus EffectImpl::commandImpl(CommandId command) {
RETURN_IF(!mImplContext, EX_NULL_POINTER, "nullContext");
if (command == CommandId::RESET) {
mImplContext->resetBuffer();
}
return ndk::ScopedAStatus::ok();
}
std::shared_ptr<EffectContext> EffectImpl::createContext(const Parameter::Common& common,
bool processData) {
return std::make_shared<EffectContext>(common, processData);
}
RetCode EffectImpl::releaseContext() {
if (mImplContext) {
mImplContext.reset();
}
return RetCode::SUCCESS;
}
void EffectImpl::cleanUp() {
command(CommandId::STOP);
close();
}
RetCode EffectImpl::notifyEventFlag(uint32_t flag) {
if (!mProcessData) {
return RetCode::SUCCESS;
}
if (!mEventFlag) {
LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
if (const auto ret = mEventFlag->wake(flag); ret != ::android::OK) {
LOG(ERROR) << getEffectName() << __func__ << ": wake failure with ret " << ret;
return RetCode::ERROR_EVENT_FLAG_ERROR;
}
return RetCode::SUCCESS;
}
IEffect::Status EffectImpl::status(binder_status_t status, size_t consumed, size_t produced) {
IEffect::Status ret;
ret.status = status;
ret.fmqConsumed = consumed;
ret.fmqProduced = produced;
return ret;
}
void EffectImpl::process() {
// ATRACE_CALL();
/**
* wait for the EventFlag without lock, it's ok because the mEfGroup pointer will not change
* in the life cycle of workerThread (threadLoop).
*/
uint32_t efState = 0;
if (!mEventFlag ||
::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
true /* retry */) ||
!(efState & mDataMqNotEmptyEf)) {
LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag - " << mEventFlag
<< " efState - " << std::hex << efState;
return;
}
{
std::lock_guard lg(mImplMutex);
if (mState != State::PROCESSING) {
LOG(DEBUG) << getEffectName() << " skip process in state: " << toString(mState);
return;
}
RETURN_VALUE_IF(!mImplContext, void(), "nullContext");
auto statusMQ = mImplContext->getStatusFmq();
auto inputMQ = mImplContext->getInputDataFmq();
auto outputMQ = mImplContext->getOutputDataFmq();
auto buffer = mImplContext->getWorkBuffer();
if (!inputMQ || !outputMQ) {
return;
}
assert(mImplContext->getWorkBufferSize() >=
std::max(inputMQ->availableToRead(), outputMQ->availableToWrite()));
auto processSamples = std::min(inputMQ->availableToRead(), outputMQ->availableToWrite());
if (processSamples) {
inputMQ->read(buffer, processSamples);
IEffect::Status status = effectProcessImpl(buffer, buffer, processSamples);
outputMQ->write(buffer, status.fmqProduced);
statusMQ->writeBlocking(&status, 1);
LOG(VERBOSE) << getEffectName() << __func__ << ": done processing, effect consumed "
<< status.fmqConsumed << " produced " << status.fmqProduced;
}
}
}
// A placeholder processing implementation to copy samples from input to output
IEffect::Status EffectImpl::effectProcessImpl(float* in, float* out, int samples) {
for (int i = 0; i < samples; i++) {
*out++ = *in++;
}
LOG(VERBOSE) << getEffectName() << __func__ << " done processing " << samples << " samples";
return {STATUS_OK, samples, samples};
}
} // namespace aidl::qti::effects

View File

@@ -1,70 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_EffectMainQti"
#include "effectFactory-impl/EffectFactory.h"
#include <android-base/logging.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
#include <android-base/properties.h>
#include <system/audio_config.h>
// { SEC_AUDIO_SUPPORT_AIDL_EFFECT
#include <system/audio.h>
// } SEC_AUDIO_SUPPORT_AIDL_EFFECT
#ifdef SEC_AUDIO_SUPPORT_AIDL_EFFECT
static const char* kDefaultConfigName = "audio_effects_config_sec.xml";
#else
/** Default name of effect configuration file. */
static const char* kDefaultConfigName = "audio_effects_config.xml";
#endif
static const char* kStubConfigName = "audio_effects_config_stub.xml";
static inline std::string getEffectConfig() {
auto stubmode = ::android::base::GetIntProperty<int8_t>("vendor.audio.hal.stubmode", 0);
if (stubmode) {
LOG(INFO) << __func__ << " using effects in stub mode";
return android::audio_find_readable_configuration_file(kStubConfigName);
}
return android::audio_find_readable_configuration_file(kDefaultConfigName);
}
extern "C" __attribute__((visibility("default"))) binder_status_t registerService() {
auto configFile = getEffectConfig();
if (configFile == "") {
LOG(ERROR) << __func__ << ": config file " << kDefaultConfigName << " not found!";
return EXIT_FAILURE;
}
LOG(INFO) << __func__ << ": start factory with configFile:" << configFile;
auto effectFactory = ndk::SharedRefBase::make<aidl::qti::effects::Factory>(configFile);
int version = 0;
effectFactory->getInterfaceVersion(&version);
std::string serviceName = std::string() + effectFactory->descriptor + "/default";
binder_status_t status =
AServiceManager_addService(effectFactory->asBinder().get(), serviceName.c_str());
LOG(DEBUG) << __func__ << " " << serviceName << " version " << version << " status " << status;
return status;
}

View File

@@ -1,134 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#include <cstddef>
#include <memory>
#define LOG_TAG "AHAL_EffectThreadQti"
#include <android-base/logging.h>
#include <pthread.h>
#include <sys/resource.h>
#include "effect-impl/EffectThread.h"
#include "effect-impl/EffectTypes.h"
using ::android::hardware::EventFlag;
using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
namespace aidl::qti::effects {
#define RET_SUCCESS_IF_THREAD_NOT_CREATED(threadCreated) \
{ \
if (!threadCreated) { \
LOG(VERBOSE) << __func__ << " no-op as thread wasn't created "; \
return RetCode::SUCCESS; \
} \
}
EffectThread::EffectThread() {
LOG(VERBOSE) << __func__ << this;
}
EffectThread::~EffectThread() {
destroyThread();
LOG(VERBOSE) << __func__ << " done" << this;
}
RetCode EffectThread::createThread(const std::string& name, int priority) {
if (mThread.joinable()) {
LOG(WARNING) << mName << __func__ << " thread already created, no-op";
return RetCode::SUCCESS;
}
mName = name;
mPriority = priority;
{
std::lock_guard lg(mThreadMutex);
mStop = true;
mExit = false;
}
mThread = std::thread(&EffectThread::threadLoop, this);
mThreadCreated = true;
LOG(VERBOSE) << mName << __func__ << " priority " << mPriority << " done";
return RetCode::SUCCESS;
}
RetCode EffectThread::destroyThread() {
RET_SUCCESS_IF_THREAD_NOT_CREATED(mThreadCreated);
{
std::lock_guard lg(mThreadMutex);
mStop = mExit = true;
}
mCv.notify_one();
if (mThread.joinable()) {
mThread.join();
}
LOG(DEBUG) << mName << __func__;
return RetCode::SUCCESS;
}
RetCode EffectThread::startThread() {
RET_SUCCESS_IF_THREAD_NOT_CREATED(mThreadCreated);
{
std::lock_guard lg(mThreadMutex);
mStop = false;
mCv.notify_one();
}
LOG(DEBUG) << mName << __func__;
return RetCode::SUCCESS;
}
RetCode EffectThread::stopThread() {
RET_SUCCESS_IF_THREAD_NOT_CREATED(mThreadCreated);
{
std::lock_guard lg(mThreadMutex);
mStop = true;
mCv.notify_one();
}
LOG(DEBUG) << mName << __func__;
return RetCode::SUCCESS;
}
void EffectThread::threadLoop() {
pthread_setname_np(pthread_self(), mName.substr(0, kMaxTaskNameLen - 1).c_str());
setpriority(PRIO_PROCESS, 0, mPriority);
LOG(VERBOSE) << mName << __func__ << "Enter: ";
while (true) {
{
std::unique_lock l(mThreadMutex);
::android::base::ScopedLockAssertion lock_assertion(mThreadMutex);
mCv.wait(l, [&]() REQUIRES(mThreadMutex) { return mExit || !mStop; });
if (mExit) {
LOG(VERBOSE) << " " << mName << __func__ << " EXIT!";
return;
}
}
process();
}
}
} // namespace aidl::qti::effects

View File

@@ -1,11 +0,0 @@
<!--
Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
SPDX-License-Identifier: BSD-3-Clause-Clear
-->
<manifest version="1.0" type="device">
<hal format="aidl">
<name>android.hardware.audio.effect</name>
<version>2</version>
<fqname>IFactory/default</fqname>
</hal>
</manifest>

View File

@@ -1,123 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <memory>
#include <vector>
#include <Utils.h>
#include <android-base/logging.h>
#include <fmq/AidlMessageQueue.h>
#include <fmq/EventFlag.h>
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include "EffectTypes.h"
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::Parameter;
namespace aidl::qti::effects {
class EffectContext {
public:
typedef ::android::AidlMessageQueue<
IEffect::Status, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
StatusMQ;
typedef ::android::AidlMessageQueue<
float, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
DataMQ;
EffectContext(const Parameter::Common& common, bool processData);
void initMessageQueues(bool processData);
virtual ~EffectContext();
void setVersion(int version) { mVersion = version; }
std::shared_ptr<StatusMQ> getStatusFmq() const;
std::shared_ptr<DataMQ> getInputDataFmq() const;
std::shared_ptr<DataMQ> getOutputDataFmq() const;
float* getWorkBuffer();
size_t getWorkBufferSize() const;
// reset buffer status by abandon input data in FMQ
void resetBuffer();
void dupeFmq(IEffect::OpenEffectReturn* effectRet);
size_t getInputFrameSize() const;
size_t getOutputFrameSize() const;
int getSessionId() const;
int getIoHandle() const;
virtual void dupeFmqWithReopen(IEffect::OpenEffectReturn* effectRet);
virtual RetCode setOutputDevice(
const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& device);
virtual std::vector<aidl::android::media::audio::common::AudioDeviceDescription>
getOutputDevice();
virtual RetCode setAudioMode(const aidl::android::media::audio::common::AudioMode& mode);
virtual aidl::android::media::audio::common::AudioMode getAudioMode();
virtual RetCode setAudioSource(const aidl::android::media::audio::common::AudioSource& source);
virtual aidl::android::media::audio::common::AudioSource getAudioSource();
virtual RetCode setVolumeStereo(const Parameter::VolumeStereo& volumeStereo);
virtual Parameter::VolumeStereo getVolumeStereo();
virtual RetCode setCommon(const Parameter::Common& common);
virtual Parameter::Common getCommon();
virtual ::android::hardware::EventFlag* getStatusEventFlag();
virtual RetCode setOffload(bool offload);
protected:
int mVersion = 0;
size_t mInputFrameSize = 0;
size_t mOutputFrameSize = 0;
size_t mInputChannelCount = 0;
size_t mOutputChannelCount = 0;
Parameter::Common mCommon = {};
std::vector<aidl::android::media::audio::common::AudioDeviceDescription> mOutputDevice = {};
aidl::android::media::audio::common::AudioMode mMode =
aidl::android::media::audio::common::AudioMode::SYS_RESERVED_INVALID;
aidl::android::media::audio::common::AudioSource mSource =
aidl::android::media::audio::common::AudioSource::SYS_RESERVED_INVALID;
Parameter::VolumeStereo mVolumeStereo = {};
RetCode updateIOFrameSize(const Parameter::Common& common);
RetCode notifyDataMqUpdate();
bool mOffload;
private:
// fmq and buffers
bool mProcessData;
std::shared_ptr<StatusMQ> mStatusMQ = nullptr;
std::shared_ptr<DataMQ> mInputMQ = nullptr;
std::shared_ptr<DataMQ> mOutputMQ = nullptr;
// TODO handle effect process input and output
// work buffer set by effect instances, the access and update are in same thread
std::vector<float> mWorkBuffer = {};
::android::hardware::EventFlag* mEfGroup = nullptr;
};
} // namespace aidl::qti::effects

View File

@@ -1,139 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <cstdlib>
#include <memory>
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <fmq/AidlMessageQueue.h>
#include "EffectContext.h"
#include "EffectThread.h"
#include "EffectTypes.h"
#include "effect-impl/EffectContext.h"
#include "effect-impl/EffectThread.h"
#include "effect-impl/EffectTypes.h"
extern "C" binder_exception_t destroyEffect(
const std::shared_ptr<aidl::android::hardware::audio::effect::IEffect>& instanceSp);
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::BnEffect;
using aidl::android::hardware::audio::effect::Parameter;
using aidl::android::hardware::audio::effect::CommandId;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::State;
using aidl::android::hardware::audio::effect::Flags;
namespace aidl::qti::effects {
class EffectImpl : public BnEffect, public EffectThread {
public:
EffectImpl() = default;
virtual ~EffectImpl() = default;
virtual ndk::ScopedAStatus open(const Parameter::Common& common,
const std::optional<Parameter::Specific>& specific,
OpenEffectReturn* ret) override;
virtual ndk::ScopedAStatus close() override;
virtual ndk::ScopedAStatus command(CommandId id) override;
virtual ndk::ScopedAStatus reopen(OpenEffectReturn* ret) override;
virtual ndk::ScopedAStatus getState(State* state) override;
virtual ndk::ScopedAStatus setParameter(const Parameter& param) override;
virtual ndk::ScopedAStatus getParameter(const Parameter::Id& id, Parameter* param) override;
virtual ndk::ScopedAStatus setParameterCommon(const Parameter& param);
virtual ndk::ScopedAStatus getParameterCommon(const Parameter::Tag& tag, Parameter* param);
/* Methods MUST be implemented by each effect instances */
virtual ndk::ScopedAStatus getDescriptor(Descriptor* desc) = 0;
virtual ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific) = 0;
virtual ndk::ScopedAStatus getParameterSpecific(const Parameter::Id& id,
Parameter::Specific* specific) = 0;
virtual std::string getEffectName() = 0;
virtual std::shared_ptr<EffectContext> createContext(const Parameter::Common& common,
bool processData);
virtual RetCode releaseContext() = 0;
/**
* @brief effectProcessImpl is running in worker thread which created in EffectThread.
*
* EffectThread will make sure effectProcessImpl only be called after startThread() successful
* and before stopThread() successful.
*
* effectProcessImpl implementation must not call any EffectThread interface, otherwise it will
* cause deadlock.
*
* @param in address of input float buffer.
* @param out address of output float buffer.
* @param samples number of samples to process.
* @return IEffect::Status
*/
virtual IEffect::Status effectProcessImpl(float* in, float* out, int samples);
/**
* process() get data from data MQs, and call effectProcessImpl() for effect data processing.
* Its important for the implementation to use mImplMutex for context synchronization.
*/
void process() override;
protected:
// current Hal version
int mVersion = 0;
// Use kEventFlagNotEmpty for V1 HAL, kEventFlagDataMqNotEmpty for V2 and above
int mDataMqNotEmptyEf = aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
State mState = State::INIT;
const Descriptor* mDescriptor;
const std::string* mEffectName;
IEffect::Status status(binder_status_t status, size_t consumed, size_t produced);
void cleanUp();
std::mutex mImplMutex;
std::shared_ptr<EffectContext> mImplContext;
/**
* Optional CommandId handling methods for effects to override.
* For CommandId::START, EffectImpl call commandImpl before starting the EffectThread
* processing.
* For CommandId::STOP and CommandId::RESET, EffectImpl call commandImpl after stop the
* EffectThread processing.
*/
virtual ndk::ScopedAStatus commandImpl(CommandId id);
RetCode notifyEventFlag(uint32_t flag);
// used with data processing.
::android::hardware::EventFlag* mEventFlag;
private:
bool mProcessData = false;
bool isOffloadOrBypass() {
return (mDescriptor->common.flags.hwAcceleratorMode == Flags::HardwareAccelerator::TUNNEL ||
mDescriptor->common.flags.bypass);
}
};
} // namespace aidl::qti::effects

View File

@@ -1,54 +0,0 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <algorithm>
#include <tuple>
#include <utility>
#include <vector>
namespace aidl::qti::effects {
template <typename T>
bool isInRange(const T& value, const T& low, const T& high) {
return (value >= low) && (value <= high);
}
template <typename T, std::size_t... Is>
bool isTupleInRange(const T& test, const T& min, const T& max, std::index_sequence<Is...>) {
return (isInRange(std::get<Is>(test), std::get<Is>(min), std::get<Is>(max)) && ...);
}
template <typename T, std::size_t TupSize = std::tuple_size_v<T>>
bool isTupleInRange(const T& test, const T& min, const T& max) {
return isTupleInRange(test, min, max, std::make_index_sequence<TupSize>{});
}
template <typename T, typename F>
bool isTupleInRange(const std::vector<T>& cfgs, const T& min, const T& max, const F& func) {
auto minT = func(min), maxT = func(max);
return std::all_of(cfgs.cbegin(), cfgs.cend(),
[&](const T& cfg) { return isTupleInRange(func(cfg), minT, maxT); });
}
} // namespace aidl::qti::effects

View File

@@ -1,72 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <atomic>
#include <memory>
#include <string>
#include <thread>
#include <android-base/thread_annotations.h>
#include <fmq/EventFlag.h>
#include <system/thread_defs.h>
#include "effect-impl/EffectContext.h"
#include "effect-impl/EffectTypes.h"
namespace aidl::qti::effects {
class EffectThread {
public:
// default priority is same as HIDL: ANDROID_PRIORITY_URGENT_AUDIO
EffectThread();
virtual ~EffectThread();
// called by effect implementation.
RetCode createThread(const std::string& name, int priority = ANDROID_PRIORITY_URGENT_AUDIO);
RetCode destroyThread();
RetCode startThread();
RetCode stopThread();
// Will call process() in a loop if the thread is running.
void threadLoop();
/**
* process() call effectProcessImpl() for effect data processing, it is necessary for the
* processing to be called under Effect thread mutex mThreadMutex, to avoid the effect state
* change before/during data processing, and keep the thread and effect state consistent.
*/
virtual void process() = 0;
private:
static constexpr int kMaxTaskNameLen = 15;
std::mutex mThreadMutex;
std::condition_variable mCv;
bool mStop GUARDED_BY(mThreadMutex) = true;
bool mExit GUARDED_BY(mThreadMutex) = false;
std::thread mThread;
int mPriority;
std::string mName;
bool mThreadCreated = false;
};
} // namespace aidl::qti::effects

View File

@@ -1,139 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023-2024 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <string>
#include <aidl/android/hardware/audio/effect/BnEffect.h>
#include <aidl/android/hardware/audio/effect/Range.h>
#include <android-base/logging.h>
#include <system/audio_effects/aidl_effects_utils.h>
typedef binder_exception_t (*EffectCreateFunctor)(
const ::aidl::android::media::audio::common::AudioUuid*,
std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>*);
typedef binder_exception_t (*EffectDestroyFunctor)(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&);
typedef binder_exception_t (*EffectQueryFunctor)(
const ::aidl::android::media::audio::common::AudioUuid*,
::aidl::android::hardware::audio::effect::Descriptor*);
struct effect_dl_interface_s {
EffectCreateFunctor createEffectFunc;
EffectDestroyFunctor destroyEffectFunc;
EffectQueryFunctor queryEffectFunc;
};
namespace aidl::qti::effects {
enum class RetCode {
SUCCESS,
ERROR_ILLEGAL_PARAMETER, /* Illegal parameter */
ERROR_THREAD, /* Effect thread error */
ERROR_NULL_POINTER, /* NULL pointer */
ERROR_ALIGNMENT_ERROR, /* Memory alignment error */
ERROR_BLOCK_SIZE_EXCEED, /* Maximum block size exceeded */
ERROR_EFFECT_LIB_ERROR, /* Effect implementation library error */
ERROR_EVENT_FLAG_ERROR /* Error with effect event flags */
};
static const int INVALID_AUDIO_SESSION_ID = -1;
inline std::ostream& operator<<(std::ostream& out, const RetCode& code) {
switch (code) {
case RetCode::SUCCESS:
return out << "SUCCESS";
case RetCode::ERROR_ILLEGAL_PARAMETER:
return out << "ERROR_ILLEGAL_PARAMETER";
case RetCode::ERROR_THREAD:
return out << "ERROR_THREAD";
case RetCode::ERROR_NULL_POINTER:
return out << "ERROR_NULL_POINTER";
case RetCode::ERROR_ALIGNMENT_ERROR:
return out << "ERROR_ALIGNMENT_ERROR";
case RetCode::ERROR_BLOCK_SIZE_EXCEED:
return out << "ERROR_BLOCK_SIZE_EXCEED";
case RetCode::ERROR_EFFECT_LIB_ERROR:
return out << "ERROR_EFFECT_LIB_ERROR";
case RetCode::ERROR_EVENT_FLAG_ERROR:
return out << "ERROR_EVENT_FLAG_ERROR";
}
return out << "EnumError: " << code;
}
#define RETURN_IF_ASTATUS_NOT_OK(status, message) \
do { \
const ::ndk::ScopedAStatus curr_status = (status); \
if (!curr_status.isOk()) { \
LOG(ERROR) << __func__ << ":" << __LINE__ \
<< "return with status: " << curr_status.getDescription() << (message); \
return ndk::ScopedAStatus::fromExceptionCodeWithMessage( \
curr_status.getExceptionCode(), (message)); \
} \
} while (0)
#define RETURN_IF(expr, exception, message) \
do { \
if (expr) { \
LOG(VERBOSE) << __func__ << ":" << __LINE__ << " return with expr " << #expr; \
return ndk::ScopedAStatus::fromExceptionCodeWithMessage((exception), (message)); \
} \
} while (0)
#define RETURN_OK_IF(expr) \
do { \
if (expr) { \
LOG(VERBOSE) << __func__ << ":" << __LINE__ << " return with expr " << #expr; \
return ndk::ScopedAStatus::ok(); \
} \
} while (0)
#define RETURN_VALUE_IF(expr, ret, log) \
do { \
if (expr) { \
LOG(ERROR) << __func__ << ":" << __LINE__ << " return with expr \"" << #expr \
<< "\":" << (log); \
return ret; \
} \
} while (0)
#define RETURN_IF_BINDER_EXCEPTION(functor) \
{ \
binder_exception_t exception = functor; \
if (EX_NONE != exception) { \
LOG(ERROR) << #functor << ": failed with error " << exception; \
return ndk::ScopedAStatus::fromExceptionCode(exception); \
} \
}
/**
* Make a Range::$EffectType$Range.
* T: The $EffectType$, Visualizer for example.
* Tag: The union tag name in $EffectType$ definition, latencyMs for example.
* l: The value of Range::$EffectType$Range.min.
* r: The value of Range::$EffectType$Range.max.
*/
#define MAKE_RANGE(T, Tag, l, r) \
{ .min = T::make<T::Tag>(l), .max = T::make<T::Tag>(r) }
} // namespace aidl::qti::effects

View File

@@ -1,464 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <map>
#include <aidl/android/media/audio/common/AudioUuid.h>
#include <android-base/stringprintf.h>
namespace aidl::qti::effects {
using ::aidl::android::media::audio::common::AudioUuid;
static inline std::string toString(const AudioUuid& uuid) {
return ::android::base::StringPrintf("%08x-%04x-%04x-%04x-%02x%02x%02x%02x%02x%02x",
uuid.timeLow, uuid.timeMid, uuid.timeHiAndVersion,
uuid.clockSeq, uuid.node[0], uuid.node[1], uuid.node[2],
uuid.node[3], uuid.node[4], uuid.node[5]);
}
// ec7178ec-e5e1-4432-a3f4-4657e6795210
static const AudioUuid kEffectNullUuid = {static_cast<int32_t>(0xec7178ec),
0xe5e1,
0x4432,
0xa3f4,
{0x46, 0x57, 0xe6, 0x79, 0x52, 0x10}};
// Zero UUID
static const AudioUuid kEffectZeroUuid = {
static_cast<int32_t>(0x0), 0x0, 0x0, 0x0, {0x0, 0x0, 0x0, 0x0, 0x0, 0x0}};
// 7b491460-8d4d-11e0-bd61-0002a5d5c51b.
static const AudioUuid kAcousticEchoCancelerTypeUUID = {static_cast<int32_t>(0x7b491460),
0x8d4d,
0x11e0,
0xbd61,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// bb392ec0-8d4d-11e0-a896-0002a5d5c51b
static const AudioUuid kAcousticEchoCancelerSwImplUUID = {static_cast<int32_t>(0xbb392ec0),
0x8d4d,
0x11e0,
0xa896,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 0a8abfe0-654c-11e0-ba26-0002a5d5c51b
static const AudioUuid kAutomaticGainControlV1TypeUUID = {static_cast<int32_t>(0x0a8abfe0),
0x654c,
0x11e0,
0xba26,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// aa8130e0-66fc-11e0-bad0-0002a5d5c51b
static const AudioUuid kAutomaticGainControlV1SwImplUUID = {static_cast<int32_t>(0xaa8130e0),
0x66fc,
0x11e0,
0xbad0,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// ae3c653b-be18-4ab8-8938-418f0a7f06ac
static const AudioUuid kAutomaticGainControlV2TypeUUID = {static_cast<int32_t>(0xae3c653b),
0xbe18,
0x4ab8,
0x8938,
{0x41, 0x8f, 0x0a, 0x7f, 0x06, 0xac}};
// 89f38e65-d4d2-4d64-ad0e-2b3e799ea886
static const AudioUuid kAutomaticGainControlV2SwImplUUID = {static_cast<int32_t>(0x89f38e65),
0xd4d2,
0x4d64,
0xad0e,
{0x2b, 0x3e, 0x79, 0x9e, 0xa8, 0x86}};
// 0634f220-ddd4-11db-a0fc-0002a5d5c51b
static const AudioUuid kBassBoostTypeUUID = {static_cast<int32_t>(0x0634f220),
0xddd4,
0x11db,
0xa0fc,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// fa8181f2-588b-11ed-9b6a-0242ac120002
static const AudioUuid kBassBoostSwImplUUID = {static_cast<int32_t>(0xfa8181f2),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 8631f300-72e2-11df-b57e-0002a5d5c51b
static const AudioUuid kBassBoostBundleImplUUID = {static_cast<int32_t>(0x8631f300),
0x72e2,
0x11df,
0xb57e,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 14804144-a5ee-4d24-aa88-0002a5d5c51b
static const AudioUuid kBassBoostProxyUUID = {static_cast<int32_t>(0x14804144),
0xa5ee,
0x4d24,
0xaa88,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 381e49cc-a858-4aa2-87f6-e8388e7601b2
static const AudioUuid kDownmixTypeUUID = {static_cast<int32_t>(0x381e49cc),
0xa858,
0x4aa2,
0x87f6,
{0xe8, 0x38, 0x8e, 0x76, 0x01, 0xb2}};
// fa8187ba-588b-11ed-9b6a-0242ac120002
static const AudioUuid kDownmixSwImplUUID = {static_cast<int32_t>(0xfa8187ba),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 93f04452-e4fe-41cc-91f9-e475b6d1d69f
static const AudioUuid kDownmixImplUUID = {static_cast<int32_t>(0x93f04452),
0xe4fe,
0x41cc,
0x91f9,
{0xe4, 0x75, 0xb6, 0xd1, 0xd6, 0x9f}};
// 0bed4300-ddd6-11db-8f34-0002a5d5c51b.
static const AudioUuid kEqualizerTypeUUID = {static_cast<int32_t>(0x0bed4300),
0xddd6,
0x11db,
0x8f34,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 0bed4300-847d-11df-bb17-0002a5d5c51b
static const AudioUuid kEqualizerSwImplUUID = {static_cast<int32_t>(0x0bed4300),
0x847d,
0x11df,
0xbb17,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// ce772f20-847d-11df-bb17-0002a5d5c51b
static const AudioUuid kEqualizerBundleImplUUID = {static_cast<int32_t>(0xce772f20),
0x847d,
0x11df,
0xbb17,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// c8e70ecd-48ca-456e-8a4f-0002a5d5c51b
static const AudioUuid kEqualizerProxyUUID = {static_cast<int32_t>(0xc8e70ecd),
0x48ca,
0x456e,
0x8a4f,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 7261676f-6d75-7369-6364-28e2fd3ac39e
static const AudioUuid kDynamicsProcessingTypeUUID = {static_cast<int32_t>(0x7261676f),
0x6d75,
0x7369,
0x6364,
{0x28, 0xe2, 0xfd, 0x3a, 0xc3, 0x9e}};
// fa818d78-588b-11ed-9b6a-0242ac120002
static const AudioUuid kDynamicsProcessingSwImplUUID = {static_cast<int32_t>(0xfa818d78),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// e0e6539b-1781-7261-676f-6d7573696340
static const AudioUuid kDynamicsProcessingImplUUID = {static_cast<int32_t>(0xe0e6539b),
0x1781,
0x7261,
0x676f,
{0x6d, 0x75, 0x73, 0x69, 0x63, 0x40}};
// 1411e6d6-aecd-4021-a1cf-a6aceb0d71e5
static const AudioUuid kHapticGeneratorTypeUUID = {static_cast<int32_t>(0x1411e6d6),
0xaecd,
0x4021,
0xa1cf,
{0xa6, 0xac, 0xeb, 0x0d, 0x71, 0xe5}};
// fa819110-588b-11ed-9b6a-0242ac120002
static const AudioUuid kHapticGeneratorSwImplUUID = {static_cast<int32_t>(0xfa819110),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 97c4acd1-8b82-4f2f-832e-c2fe5d7a9931
static const AudioUuid kHapticGeneratorImplUUID = {static_cast<int32_t>(0x97c4acd1),
0x8b82,
0x4f2f,
0x832e,
{0xc2, 0xfe, 0x5d, 0x7a, 0x99, 0x31}};
// fe3199be-aed0-413f-87bb-11260eb63cf1
static const AudioUuid kLoudnessEnhancerTypeUUID = {static_cast<int32_t>(0xfe3199be),
0xaed0,
0x413f,
0x87bb,
{0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}};
// fa819610-588b-11ed-9b6a-0242ac120002
static const AudioUuid kLoudnessEnhancerSwImplUUID = {static_cast<int32_t>(0xfa819610),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// fa415329-2034-4bea-b5dc-5b381c8d1e2c
static const AudioUuid kLoudnessEnhancerImplUUID = {static_cast<int32_t>(0xfa415329),
0x2034,
0x4bea,
0xb5dc,
{0x5b, 0x38, 0x1c, 0x8d, 0x1e, 0x2c}};
// c2e5d5f0-94bd-4763-9cac-4e234d06839e
static const AudioUuid kEnvReverbTypeUUID = {static_cast<int32_t>(0xc2e5d5f0),
0x94bd,
0x4763,
0x9cac,
{0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}};
// fa819886-588b-11ed-9b6a-0242ac120002
static const AudioUuid kEnvReverbSwImplUUID = {static_cast<int32_t>(0xfa819886),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 4a387fc0-8ab3-11df-8bad-0002a5d5c51b
static const AudioUuid kAuxEnvReverbImplUUID = {static_cast<int32_t>(0x4a387fc0),
0x8ab3,
0x11df,
0x8bad,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// c7a511a0-a3bb-11df-860e-0002a5d5c51b
static const AudioUuid kInsertEnvReverbImplUUID = {static_cast<int32_t>(0xc7a511a0),
0xa3bb,
0x11df,
0x860e,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 58b4b260-8e06-11e0-aa8e-0002a5d5c51b
static const AudioUuid kNoiseSuppressionTypeUUID = {static_cast<int32_t>(0x58b4b260),
0x8e06,
0x11e0,
0xaa8e,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// c06c8400-8e06-11e0-9cb6-0002a5d5c51b
static const AudioUuid kNoiseSuppressionSwImplUUID = {static_cast<int32_t>(0xc06c8400),
0x8e06,
0x11e0,
0x9cb6,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 47382d60-ddd8-11db-bf3a-0002a5d5c51b
static const AudioUuid kPresetReverbTypeUUID = {static_cast<int32_t>(0x47382d60),
0xddd8,
0x11db,
0xbf3a,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// fa8199c6-588b-11ed-9b6a-0242ac120002
static const AudioUuid kPresetReverbSwImplUUID = {static_cast<int32_t>(0xfa8199c6),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// f29a1400-a3bb-11df-8ddc-0002a5d5c51b
static const AudioUuid kAuxPresetReverbImplUUID = {static_cast<int32_t>(0xf29a1400),
0xa3bb,
0x11df,
0x8ddc,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 172cdf00-a3bc-11df-a72f-0002a5d5c51b
static const AudioUuid kInsertPresetReverbImplUUID = {static_cast<int32_t>(0x172cdf00),
0xa3bc,
0x11df,
0xa72f,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// 37cc2c00-dddd-11db-8577-0002a5d5c51b
static const AudioUuid kVirtualizerTypeUUID = {static_cast<int32_t>(0x37cc2c00),
0xdddd,
0x11db,
0x8577,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// fa819d86-588b-11ed-9b6a-0242ac120002
static const AudioUuid kVirtualizerSwImplUUID = {static_cast<int32_t>(0xfa819d86),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 1d4033c0-8557-11df-9f2d-0002a5d5c51b
static const AudioUuid kVirtualizerBundleImplUUID = {static_cast<int32_t>(0x1d4033c0),
0x8557,
0x11df,
0x9f2d,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// d3467faa-acc7-4d34-acaf-0002a5d5c51b
static const AudioUuid kVirtualizerProxyUUID = {static_cast<int32_t>(0xd3467faa),
0xacc7,
0x4d34,
0xacaf,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// fa819f3e-588b-11ed-9b6a-0242ac120002
static const AudioUuid kVisualizerTypeUUID = {static_cast<int32_t>(0xfa819f3e),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// fa81a0f6-588b-11ed-9b6a-0242ac120002
static const AudioUuid kVisualizerSwImplUUID = {static_cast<int32_t>(0xfa81a0f6),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// fa81a2b8-588b-11ed-9b6a-0242ac120002
static const AudioUuid kVolumeTypeUUID = {static_cast<int32_t>(0xfa81a2b8),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// fa81a718-588b-11ed-9b6a-0242ac120002
static const AudioUuid kVolumeSwImplUUID = {static_cast<int32_t>(0xfa81a718),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 119341a0-8469-11df-81f9-0002a5d5c51b
static const AudioUuid kVolumeBundleImplUUID = {static_cast<int32_t>(0x119341a0),
0x8469,
0x11df,
0x81f9,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
static const AudioUuid kExtensionEffectTypeUUID = {static_cast<int32_t>(0xfa81dbde),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// fa81dd00-588b-11ed-9b6a-0242ac120002
static const AudioUuid kExtensionEffectImplUUID = {static_cast<int32_t>(0xfa81dd00),
0x588b,
0x11ed,
0x9b6a,
{0x02, 0x42, 0xac, 0x12, 0x00, 0x02}};
// 08b8b058-0590-11e5-ac71-0025b32654a0
static const AudioUuid kMusicVolumeListenerUUID = {static_cast<int32_t>(0x08b8b058),
0x0590,
0x11e5,
0xac71,
{0x00, 0x25, 0xb3, 0x26, 0x54, 0xa0}};
// 0956df94-0590-11e5-bdbe-0025b32654a0
static const AudioUuid kRingVolumeListenerUUID = {static_cast<int32_t>(0x0956df94),
0x0590,
0x11e5,
0xbdbe,
{0x00, 0x25, 0xb3, 0x26, 0x54, 0xa0}};
// 09f303e2-0590-11e5-8fdb-0025b32654a0
static const AudioUuid kAlarmVolumeListenerUUID = {static_cast<int32_t>(0x09f303e2),
0x0590,
0x11e5,
0x8fdb,
{0x00, 0x25, 0xb3, 0x26, 0x54, 0xa0}};
// 0ace5c08-0590-11e5-ae9e-0025b32654a0
static const AudioUuid kVoiceCallVolumeListenerUUID = {static_cast<int32_t>(0x0ace5c08),
0x0590,
0x11e5,
0xae9e,
{0x00, 0x25, 0xb3, 0x26, 0x54, 0xa0}};
// 0b776dde-0590-11e5-81ba-0025b32654a0
static const AudioUuid kNotificationVolumeListenerUUID = {static_cast<int32_t>(0x0b776dde),
0x0590,
0x11e5,
0x81ba,
{0x00, 0x25, 0xb3, 0x26, 0x54, 0xa0}};
// 0f8d0d2a-59e5-45fe-b6e4-248c8a799109
static const AudioUuid kAcousticEchoCancelerQtiUUID = {static_cast<int32_t>(0x0f8d0d2a),
0x59e5,
0x45fe,
0xb6e4,
{0x24, 0x8c, 0x8a, 0x79, 0x91, 0x09}};
// 1d97bb0b-9e2f-4403-9ae3-58c2554306f8
static const AudioUuid kNoiseSuppressionQtiUUID = {static_cast<int32_t>(0x1d97bb0b),
0x9e2f,
0x4403,
0x9ae3,
{0x58, 0xc2, 0x55, 0x43, 0x06, 0xf8}};
// 7a8044a0-1a71-11e3-a184-0002a5d5c51b
static const AudioUuid kVisualizerOffloadQtiUUID = {static_cast<int32_t>(0x7a8044a0),
0x1a71,
0x11e3,
0xa184,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
/* Offload bassboost UUID: 2c4a8c24-1581-487f-94f6-0002a5d5c51b */
static const AudioUuid kBassBoostOffloadQtiUUID = {static_cast<int32_t>(0x2c4a8c24),
0x1581,
0x487f,
0x94f6,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
/* Offload Equalizer UUID: a0dac280-401c-11e3-9379-0002a5d5c51b */
static const AudioUuid kEqualizerOffloadQtiUUID = {static_cast<int32_t>(0xa0dac280),
0x401c,
0x11e3,
0x9379,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
/* Offload virtualizer UUID: 2c4a8c24-1581-487f-94f6-0002a5d5c51b */
/* 509a4498-561a-4bea-b3b1-0002a5d5c51b*/
static const AudioUuid kVirtualizerOffloadQtiUUID = {static_cast<int32_t>(0x509a4498),
0x561a,
0x4bea,
0xb3b1,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
/* Offload auxiliary environmental reverb UUID: 79a18026-18fd-4185-8233-0002a5d5c51b */
static const AudioUuid kAuxEnvReverbOffloadQtiUUID = {static_cast<int32_t>(0x79a18026),
0x18fd,
0x4185,
0x8233,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
/* Offload insert environmental reverb UUID: eb64ea04-973b-43d2-8f5e-0002a5d5c51b */
static const AudioUuid kInsertEnvReverbOffloadQtiUUID = {static_cast<int32_t>(0xeb64ea04),
0x973b,
0x43d2,
0x8f5e,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// Offload auxiliary preset reverb UUID: 6987be09-b142-4b41-9056-0002a5d5c51b */
static const AudioUuid kAuxPresetReverbOffloadQtiUUID = {static_cast<int32_t>(0x6987be09),
0xb142,
0x4b41,
0x9056,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// Offload insert preset reverb UUID: aa2bebf6-47cf-4613-9bca-0002a5d5c51b */
static const AudioUuid kInsertPresetReverbOffloadQtiUUID = {static_cast<int32_t>(0xaa2bebf6),
0x47cf,
0x4613,
0x9bca,
{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}};
// quasar UUID: 71d0e2ee-e44d-483d-a809-09e75ee55ecd */
static const AudioUuid kQuasarEffectQtiUUID = {static_cast<int32_t>(0x71d0e2ee),
0xe44d,
0x483d,
0xa809,
{0x09, 0xe7, 0x5e, 0xe5, 0x5e, 0xcd}};
/**
* @brief A map between effect name and effect type UUID.
* All <name> attribution in effect/effectProxy of audio_effects.xml should be listed in this map.
* We need this map is because existing audio_effects.xml don't have a type UUID defined.
*/
static const std::map<const std::string /* effect type */, const AudioUuid&> kUuidNameTypeMap = {
{"aec", kAcousticEchoCancelerTypeUUID}, // TODO aec vs AcousticEcho
{"agc", kAutomaticGainControlV1TypeUUID}, // TODO agc vs Automatic Gain
{"bassboost", kBassBoostTypeUUID},
{"downmix", kDownmixTypeUUID},
{"dynamics_processing", kDynamicsProcessingTypeUUID},
{"equalizer", kEqualizerTypeUUID},
{"haptic_generator", kHapticGeneratorTypeUUID},
{"loudness_enhancer", kLoudnessEnhancerTypeUUID},
{"env_reverb", kEnvReverbTypeUUID},
{"ns", kNoiseSuppressionTypeUUID}, // TODO ns or noise_suppression
{"preset_reverb", kPresetReverbTypeUUID},
{"reverb_env_aux", kEnvReverbTypeUUID},
{"reverb_env_ins", kEnvReverbTypeUUID},
{"reverb_pre_aux", kPresetReverbTypeUUID},
{"reverb_pre_ins", kPresetReverbTypeUUID},
{"virtualizer", kVirtualizerTypeUUID},
{"visualizer", kVisualizerTypeUUID},
{"volume", kVolumeTypeUUID},
{"voice_helper", kVoiceCallVolumeListenerUUID},
{"music_helper", kMusicVolumeListenerUUID},
{"alarm_helper", kAlarmVolumeListenerUUID},
{"ring_helper", kRingVolumeListenerUUID},
{"notification_helper", kNotificationVolumeListenerUUID},
// {"audiosphere", kNotificationVolumeListenerUUID},
{"quasar", kQuasarEffectQtiUUID},
};
} // namespace aidl::qti::effects

View File

@@ -1,143 +0,0 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <functional>
#include <map>
#include <memory>
#include <string>
#include <unordered_map>
#include <vector>
#include <cutils/properties.h>
#include <system/audio_effects/effect_uuid.h>
#include <tinyxml2.h>
#include <aidl/android/hardware/audio/effect/Processing.h>
#include "effect-impl/EffectTypes.h"
#include "effect-impl/EffectUUID.h"
using aidl::android::hardware::audio::effect::getEffectTypeUuidAcousticEchoCanceler;
using aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV1;
using aidl::android::hardware::audio::effect::getEffectTypeUuidAutomaticGainControlV2;
using aidl::android::hardware::audio::effect::getEffectTypeUuidBassBoost;
using aidl::android::hardware::audio::effect::getEffectTypeUuidDownmix;
using aidl::android::hardware::audio::effect::getEffectTypeUuidDynamicsProcessing;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEqualizer;
using aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator;
using aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb;
using aidl::android::hardware::audio::effect::getEffectTypeUuidEnvReverb;
using aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb;
using aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb;
using aidl::android::hardware::audio::effect::getEffectTypeUuidPresetReverb;
using aidl::android::hardware::audio::effect::getEffectTypeUuidNoiseSuppression;
using aidl::android::hardware::audio::effect::getEffectTypeUuidSpatializer;
using aidl::android::hardware::audio::effect::getEffectTypeUuidVirtualizer;
using aidl::android::hardware::audio::effect::getEffectTypeUuidVisualizer;
using aidl::android::hardware::audio::effect::getEffectTypeUuidVolume;
using aidl::android::hardware::audio::effect::getEffectUuidZero;
using aidl::android::hardware::audio::effect::Processing;
namespace aidl::qti::effects {
/**
* Library contains a mapping from library name to path.
* Effect contains a mapping from effect name to Libraries and implementation UUID.
* Pre/post processor contains a mapping from processing name to effect names.
*/
class EffectConfig {
public:
explicit EffectConfig(const std::string& file);
struct Library {
std::string name; // library name
::aidl::android::media::audio::common::AudioUuid uuid; // implementation UUID
std::optional<::aidl::android::media::audio::common::AudioUuid> type; // optional type UUID
};
// <effects>
struct EffectLibraries {
std::optional<struct Library> proxyLibrary;
std::vector<struct Library> libraries;
};
int getSkippedElements() const { return mSkippedElements; }
const std::unordered_map<std::string, std::string> getLibraryMap() const { return mLibraryMap; }
const std::unordered_map<std::string, struct EffectLibraries> getEffectsMap() const {
return mEffectsMap;
}
static bool findUuid(const std::pair<std::string, struct EffectLibraries>& effectElem,
::aidl::android::media::audio::common::AudioUuid* uuid);
using ProcessingLibrariesMap = std::map<Processing::Type, std::vector<struct EffectLibraries>>;
const ProcessingLibrariesMap& getProcessingMap() const;
private:
static constexpr const char* kEffectLibPath[] =
#ifdef __LP64__
{"/odm/lib64/soundfx", "/vendor/lib64/soundfx", "/system/lib64/soundfx"};
#else
{"/odm/lib/soundfx", "/vendor/lib/soundfx", "/system/lib/soundfx"};
#endif
int mSkippedElements;
/* Parsed Libraries result */
std::unordered_map<std::string, std::string> mLibraryMap;
/* Parsed Effects result */
std::unordered_map<std::string, struct EffectLibraries> mEffectsMap;
/**
* For parsed pre/post processing result: {key: AudioStreamType/AudioSource, value:
* EffectLibraries}
*/
ProcessingLibrariesMap mProcessingMap;
/** @return all `node`s children that are elements and match the tag if provided. */
std::vector<std::reference_wrapper<const tinyxml2::XMLElement>> getChildren(
const tinyxml2::XMLNode& node, const char* childTag = nullptr);
/** Parse a library xml note and push the result in mLibraryMap or return false on failure. */
bool parseLibrary(const tinyxml2::XMLElement& xml);
/** Parse an effect from an xml element describing it.
* @return true and pushes the effect in mEffectsMap on success, false on failure.
*/
bool parseEffect(const tinyxml2::XMLElement& xml);
bool parseProcessing(Processing::Type::Tag typeTag, const tinyxml2::XMLElement& xml);
// Function to parse effect.library name and effect.uuid from xml
bool parseLibrary(const tinyxml2::XMLElement& xml, struct Library& library,
bool isProxy = false);
const char* dump(const tinyxml2::XMLElement& element,
tinyxml2::XMLPrinter&& printer = {}) const;
bool resolveLibrary(const std::string& path, std::string* resolvedPath);
std::optional<Processing::Type> stringToProcessingType(Processing::Type::Tag typeTag,
const std::string& type);
};
} // namespace aidl::qti::effects

View File

@@ -1,135 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes from Qualcomm Innovation Center, Inc. are provided under the following license:
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <any>
#include <map>
#include <optional>
#include <set>
#include <tuple>
#include <vector>
#include <aidl/android/hardware/audio/effect/BnFactory.h>
#include <android-base/thread_annotations.h>
#include "EffectConfig.h"
using aidl::android::hardware::audio::effect::IEffect;
using aidl::android::hardware::audio::effect::BnFactory;
using aidl::android::hardware::audio::effect::Descriptor;
using aidl::android::hardware::audio::effect::Processing;
namespace aidl::qti::effects {
class Factory : public BnFactory {
public:
explicit Factory(const std::string& file);
/**
* @brief Get identity of all effects supported by the device, with the optional filter by type
* and/or by instance UUID.
*
* @param in_type Type UUID.
* @param in_instance Instance UUID.
* @param in_proxy Proxy UUID.
* @param out_descriptor List of Descriptors.
* @return ndk::ScopedAStatus
*/
ndk::ScopedAStatus queryEffects(
const std::optional<::aidl::android::media::audio::common::AudioUuid>& in_type,
const std::optional<::aidl::android::media::audio::common::AudioUuid>& in_instance,
const std::optional<::aidl::android::media::audio::common::AudioUuid>& in_proxy,
std::vector<Descriptor>* out_descriptor) override;
/**
* @brief Query list of defined processing, with the optional filter by AudioStreamType
*
* @param in_type Type of processing, could be AudioStreamType or AudioSource. Optional.
* @param _aidl_return List of processing filtered by in_type.
* @return ndk::ScopedAStatus
*/
ndk::ScopedAStatus queryProcessing(const std::optional<Processing::Type>& in_type,
std::vector<Processing>* _aidl_return) override;
/**
* @brief Create an effect instance for a certain implementation (identified by UUID).
*
* @param in_impl_uuid Effect implementation UUID.
* @param _aidl_return A pointer to created effect instance.
* @return ndk::ScopedAStatus
*/
ndk::ScopedAStatus createEffect(
const ::aidl::android::media::audio::common::AudioUuid& in_impl_uuid,
std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>* _aidl_return)
override;
/**
* @brief Destroy an effect instance.
*
* @param in_handle Effect instance handle.
* @return ndk::ScopedAStatus
*/
ndk::ScopedAStatus destroyEffect(
const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>& in_handle)
override;
private:
const EffectConfig mConfig;
~Factory();
std::mutex mMutex;
// Set of effect descriptors supported by the devices.
std::set<Descriptor> mDescSet GUARDED_BY(mMutex);
std::set<Descriptor::Identity> mIdentitySet GUARDED_BY(mMutex);
static constexpr int kMapEntryHandleIndex = 0;
static constexpr int kMapEntryInterfaceIndex = 1;
static constexpr int kMapEntryLibNameIndex = 2;
typedef std::tuple<std::unique_ptr<void, std::function<void(void*)>> /* dlHandle */,
std::unique_ptr<struct effect_dl_interface_s> /* interfaces */,
std::string /* library name */>
DlEntry;
std::map<aidl::android::media::audio::common::AudioUuid /* implUUID */, DlEntry> mEffectLibMap
GUARDED_BY(mMutex);
typedef std::pair<aidl::android::media::audio::common::AudioUuid, ndk::SpAIBinder> EffectEntry;
std::map<std::weak_ptr<IEffect>, EffectEntry, std::owner_less<>> mEffectMap GUARDED_BY(mMutex);
ndk::ScopedAStatus destroyEffectImpl_l(const std::shared_ptr<IEffect>& in_handle)
REQUIRES(mMutex);
void cleanupEffectMap_l() REQUIRES(mMutex);
bool openEffectLibrary(const ::aidl::android::media::audio::common::AudioUuid& impl,
const std::string& path);
void createIdentityWithConfig(
const EffectConfig::Library& configLib,
const ::aidl::android::media::audio::common::AudioUuid& typeUuidStr,
const std::optional<::aidl::android::media::audio::common::AudioUuid> proxyUuid);
ndk::ScopedAStatus getDescriptorWithUuid_l(
const aidl::android::media::audio::common::AudioUuid& uuid, Descriptor* desc)
REQUIRES(mMutex);
void loadEffectLibs();
/* Get effect_dl_interface_s from library handle */
void getDlSyms_l(DlEntry& entry) REQUIRES(mMutex);
};
} // namespace aidl::qti::effects

View File

@@ -1,20 +0,0 @@
ifneq ($(AUDIO_USE_STUB_HAL), true)
CURRENT_PATH := $(call my-dir)
ifeq (0,1)
############################################
#[samsung audio feature - unused
include $(CURRENT_PATH)/offloadbundle/Android.mk
include $(CURRENT_PATH)/offloadvisualizer/Android.mk
include $(CURRENT_PATH)/voiceprocessing/Android.mk
include $(CURRENT_PATH)/volumelistener/Android.mk
#samsung audio feature - unused]
############################################
else
############################################
#[samsung audio feature - used
include $(CURRENT_PATH)/voiceprocessing/Android.mk
#samsung audio feature]
############################################
endif
#include $(call all-subdir-makefiles)
endif

View File

@@ -1,31 +0,0 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE:= libqcompostprocbundle
LOCAL_VENDOR_MODULE := true
LOCAL_MODULE_RELATIVE_PATH := soundfx
LOCAL_MODULE_OWNER := qti
LOCAL_C_FLAGS += -Werror -Wall -Wextra
LOCAL_SRC_FILES:= \
OffloadBundleAidl.cpp \
OffloadBundleContext.cpp \
BassBoostContext.cpp \
EqualizerContext.cpp \
ReverbContext.cpp \
VirtualizerContext.cpp \
ParamDelegator.cpp
LOCAL_STATIC_LIBRARIES := libaudioeffecthal_base_impl_static
LOCAL_SHARED_LIBRARIES:= \
$(EFFECTS_DEFAULTS_SHARED_LIBRARIES) \
libar-pal
LOCAL_HEADER_LIBRARIES:= \
$(EFFECTS_DEFAULTS_HEADERS_LIBRARIES) \
libacdb_headers
include $(BUILD_SHARED_LIBRARY)

View File

@@ -1,148 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_Effect_BassBoostQti"
#include <Utils.h>
#include <cstddef>
#include "OffloadBundleContext.h"
#include "OffloadBundleTypes.h"
namespace aidl::qti::effects {
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
BassBoostContext::BassBoostContext(const Parameter::Common& common,
const OffloadBundleEffectType& type, bool processData)
: OffloadBundleContext(common, type, processData) {
LOG(DEBUG) << __func__ << type << " ioHandle " << common.ioHandle;
mState = EffectState::INITIALIZED;
}
BassBoostContext::~BassBoostContext() {
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
deInit();
}
void BassBoostContext::deInit() {
LOG(DEBUG) << __func__ << " ioHandle" << getIoHandle();
stop();
}
RetCode BassBoostContext::enable() {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
if (isEffectActive()) return RetCode::ERROR_ILLEGAL_PARAMETER;
mState = EffectState::ACTIVE;
mBassParams.mEnabled = 1;
setOffloadParameters(BASSBOOST_ENABLE_FLAG);
return RetCode::SUCCESS;
}
RetCode BassBoostContext::disable() {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
if (!isEffectActive()) return RetCode::ERROR_ILLEGAL_PARAMETER;
mState = EffectState::INITIALIZED;
mBassParams.mEnabled = 0;
setOffloadParameters(BASSBOOST_ENABLE_FLAG);
return RetCode::SUCCESS;
}
RetCode BassBoostContext::start(pal_stream_handle_t* palHandle) {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
mPalHandle = palHandle;
if (isEffectActive()) {
setOffloadParameters(BASSBOOST_ENABLE_FLAG | BASSBOOST_STRENGTH);
} else {
LOG(DEBUG) << "Not yet enabled";
}
return RetCode::SUCCESS;
}
RetCode BassBoostContext::stop() {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
struct BassBoostParams bassParams; // by default enable is 0
setOffloadParameters(&bassParams, BASSBOOST_ENABLE_FLAG);
mPalHandle = nullptr;
return RetCode::SUCCESS;
}
bool BassBoostContext::deviceSupportsEffect(const std::vector<AudioDeviceDescription>& devices) {
for (const auto& device : devices) {
if (device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
AudioDeviceDescription::CONNECTION_ANALOG} &&
device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
AudioDeviceDescription::CONNECTION_ANALOG} &&
device != AudioDeviceDescription{AudioDeviceType::OUT_HEADPHONE,
AudioDeviceDescription::CONNECTION_BT_A2DP} &&
device != AudioDeviceDescription{AudioDeviceType::OUT_HEADSET,
AudioDeviceDescription::CONNECTION_USB}) {
return false;
}
}
return true;
}
RetCode BassBoostContext::setOutputDevice(
const std::vector<aidl::android::media::audio::common::AudioDeviceDescription>& device) {
std::lock_guard lg(mMutex);
mOutputDevice = device;
if (deviceSupportsEffect(mOutputDevice)) {
if (mTempDisabled) {
if (isEffectActive()) {
mBassParams.mEnabled = 1;
setOffloadParameters(BASSBOOST_ENABLE_FLAG);
}
}
mTempDisabled = false;
} else if (!mTempDisabled) {
if (isEffectActive()) {
mBassParams.mEnabled = 0;
setOffloadParameters(BASSBOOST_ENABLE_FLAG);
}
mTempDisabled = true;
}
return RetCode::SUCCESS;
}
RetCode BassBoostContext::setBassBoostStrength(int strength) {
LOG(DEBUG) << __func__ << " strength " << strength;
mBassParams.mStrength = strength;
setOffloadParameters(BASSBOOST_ENABLE_FLAG | BASSBOOST_STRENGTH);
return RetCode::SUCCESS;
}
int BassBoostContext::getBassBoostStrength() {
LOG(DEBUG) << __func__ << " strength " << mBassParams.mStrength;
return mBassParams.mStrength;
}
int BassBoostContext::setOffloadParameters(uint64_t flags) {
if (mPalHandle) {
LOG(DEBUG) << " Strength " << mBassParams.mStrength << " enabled " << mBassParams.mEnabled;
ParamDelegator::updatePalParameters(mPalHandle, &mBassParams, flags);
} else {
LOG(VERBOSE) << " PalHandle not set";
}
return 0;
}
int BassBoostContext::setOffloadParameters(BassBoostParams* bassParams, uint64_t flags) {
if (mPalHandle) {
LOG(DEBUG) << " Strength " << bassParams->mStrength << " enabled " << bassParams->mEnabled;
ParamDelegator::updatePalParameters(mPalHandle, bassParams, flags);
} else {
LOG(VERBOSE) << " PalHandle not set";
}
return 0;
}
} // namespace aidl::qti::effects

View File

@@ -1,196 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#define LOG_TAG "AHAL_Effect_EqualizerQti"
#include <Utils.h>
#include <cstddef>
#include "OffloadBundleContext.h"
#include "OffloadBundleTypes.h"
namespace aidl::qti::effects {
using aidl::android::media::audio::common::AudioDeviceDescription;
using aidl::android::media::audio::common::AudioDeviceType;
EqualizerContext::EqualizerContext(const Parameter::Common& common,
const OffloadBundleEffectType& type, bool processData)
: OffloadBundleContext(common, type, processData) {
LOG(DEBUG) << __func__ << type << " ioHandle " << common.ioHandle;
init(); // init default state
mState = EffectState::INITIALIZED;
}
EqualizerContext::~EqualizerContext() {
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
deInit();
}
void EqualizerContext::init() {
// init with pre-defined preset NORMAL
for (std::size_t i = 0; i < MAX_NUM_BANDS; i++) {
mBandLevels[i] = kBandPresetLevels[0 /* normal */][i];
}
memset(&mEqParams, 0, sizeof(struct EqualizerParams));
mEqParams.config.presetId = PRESET_INVALID;
mEqParams.config.pregain = Q27_UNITY;
mEqParams.config.numBands = MAX_NUM_BANDS;
}
void EqualizerContext::deInit() {
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
stop();
}
RetCode EqualizerContext::enable() {
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
std::lock_guard lg(mMutex);
if (isEffectActive()) return RetCode::ERROR_ILLEGAL_PARAMETER;
mState = EffectState::ACTIVE;
mEqParams.enable = 1;
setOffloadParameters(EQ_ENABLE_FLAG | EQ_BANDS_LEVEL);
return RetCode::SUCCESS;
}
RetCode EqualizerContext::disable() {
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
std::lock_guard lg(mMutex);
if (!isEffectActive()) return RetCode::ERROR_ILLEGAL_PARAMETER;
mState = EffectState::INITIALIZED;
mEqParams.enable = 0;
setOffloadParameters(EQ_ENABLE_FLAG);
return RetCode::SUCCESS;
}
RetCode EqualizerContext::start(pal_stream_handle_t* palHandle) {
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
std::lock_guard lg(mMutex);
mPalHandle = palHandle;
if (isEffectActive()) {
setOffloadParameters(EQ_ENABLE_FLAG | EQ_BANDS_LEVEL);
} else {
LOG(DEBUG) << "Not yet enabled";
}
return RetCode::SUCCESS;
}
RetCode EqualizerContext::stop() {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << getIoHandle();
struct EqualizerParams eqParam = {0}; // by default enable bit is 0
setOffloadParameters(&eqParam, EQ_ENABLE_FLAG);
mPalHandle = nullptr;
return RetCode::SUCCESS;
}
RetCode EqualizerContext::setEqualizerPreset(const std::size_t presetIdx) {
std::lock_guard lg(mMutex);
if (presetIdx < 0 || presetIdx >= MAX_NUM_PRESETS) {
return RetCode::ERROR_ILLEGAL_PARAMETER;
}
// Translation from existing implementation, first we update then send config to PAL.
// ideally, send it to PAL and check if operation is successful then only update
mCurrentPreset = presetIdx;
for (std::size_t i = 0; i < MAX_NUM_BANDS; i++) {
mBandLevels[i] = kBandPresetLevels[presetIdx][i];
}
updateOffloadParameters();
setOffloadParameters(EQ_ENABLE_FLAG | EQ_PRESET);
return RetCode::SUCCESS;
}
bool EqualizerContext::isBandLevelIndexInRange(
const std::vector<Equalizer::BandLevel>& bandLevels) const {
const auto[min, max] =
std::minmax_element(bandLevels.begin(), bandLevels.end(),
[](const auto& a, const auto& b) { return a.index < b.index; });
return min->index >= 0 && max->index < MAX_NUM_BANDS;
}
RetCode EqualizerContext::setEqualizerBandLevels(
const std::vector<Equalizer::BandLevel>& bandLevels) {
std::lock_guard lg(mMutex);
RETURN_VALUE_IF(bandLevels.size() > MAX_NUM_BANDS, RetCode::ERROR_ILLEGAL_PARAMETER,
"Exceeds Max Size");
RETURN_VALUE_IF(bandLevels.empty(), RetCode::ERROR_ILLEGAL_PARAMETER, "Empty Bands");
RETURN_VALUE_IF(!isBandLevelIndexInRange(bandLevels), RetCode::ERROR_ILLEGAL_PARAMETER,
"indexOutOfRange");
// Translation from existing implementation, first we update then send config to PAL.
// ideally, send it to PAL and check if operation is successful then only update
for (auto& bandLevel : bandLevels) {
int level = bandLevel.levelMb;
if (level > 0) {
level = (int)((level + 50) / 100);
} else {
level = (int)((level - 50) / 100);
}
LOG(VERBOSE) << __func__ << " level " << bandLevel.index << " level" << bandLevel.levelMb
<< " refined level" << level;
mBandLevels[bandLevel.index] = level;
mCurrentPreset = PRESET_CUSTOM;
}
updateOffloadParameters();
setOffloadParameters(EQ_ENABLE_FLAG | EQ_BANDS_LEVEL);
return RetCode::SUCCESS;
}
std::vector<Equalizer::BandLevel> EqualizerContext::getEqualizerBandLevels() const {
std::vector<Equalizer::BandLevel> bandLevels;
bandLevels.reserve(MAX_NUM_BANDS);
for (std::size_t i = 0; i < MAX_NUM_BANDS; i++) {
bandLevels.emplace_back(
Equalizer::BandLevel{static_cast<int32_t>(i), mBandLevels[i] * 100});
}
return bandLevels;
}
std::vector<int32_t> EqualizerContext::getEqualizerCenterFreqs() {
std::vector<int32_t> result;
std::for_each(kBandFrequencies.begin(), kBandFrequencies.end(),
[&](const auto& band) { result.emplace_back((band.minMh + band.maxMh) / 2); });
return result;
}
void EqualizerContext::updateOffloadParameters() {
for (int i = 0; i < MAX_NUM_BANDS; i++) {
mEqParams.config.presetId = mCurrentPreset;
mEqParams.bandConfig[i].bandIndex = i;
mEqParams.bandConfig[i].filterType = EQ_BAND_BOOST;
mEqParams.bandConfig[i].frequencyMhz = kPresetsFrequencies[i] * 1000;
mEqParams.bandConfig[i].gainMb = mBandLevels[i] * 100;
mEqParams.bandConfig[i].qFactor = Q8_UNITY;
}
}
int EqualizerContext::setOffloadParameters(uint64_t flags) {
if (mPalHandle) {
ParamDelegator::updatePalParameters(mPalHandle, &mEqParams, flags);
} else {
LOG(VERBOSE) << " PalHandle not set";
}
return 0;
}
int EqualizerContext::setOffloadParameters(EqualizerParams* params, uint64_t flags) {
if (mPalHandle) {
ParamDelegator::updatePalParameters(mPalHandle, params, flags);
} else {
LOG(VERBOSE) << " PalHandle not set";
}
return 0;
}
} // namespace aidl::qti::effects

View File

@@ -1,174 +0,0 @@
/*
* Copyright (c) 2023 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause-Clear
*/
#pragma once
#include <algorithm>
#include <memory>
#include <unordered_map>
#include <android-base/logging.h>
#include <android-base/thread_annotations.h>
#include "OffloadBundleContext.h"
#include "OffloadBundleTypes.h"
namespace aidl::qti::effects {
/**
* @brief Maintain all effect offload bundle sessions.
*
*/
class GlobalOffloadSession {
public:
static GlobalOffloadSession& getGlobalSession() {
static GlobalOffloadSession instance;
return instance;
}
static bool findTypeInContextList(std::vector<std::shared_ptr<OffloadBundleContext>>& list,
const OffloadBundleEffectType& type, bool remove = false) {
auto itr = std::find_if(list.begin(), list.end(),
[type](const std::shared_ptr<OffloadBundleContext>& bundle) {
return bundle->getBundleType() == type;
});
if (itr == list.end()) {
return false;
}
if (remove) {
(*itr)->deInit(); // call release inside of it.
list.erase(itr);
}
return true;
}
std::shared_ptr<OffloadBundleContext> createContext(const OffloadBundleEffectType& type,
const Parameter::Common& common,
bool processData) {
switch (type) {
case OffloadBundleEffectType::BASS_BOOST:
return std::make_shared<BassBoostContext>(common, type, processData);
case OffloadBundleEffectType::EQUALIZER:
return std::make_shared<EqualizerContext>(common, type, processData);
case OffloadBundleEffectType::VIRTUALIZER:
return std::make_shared<VirtualizerContext>(common, type, processData);
case OffloadBundleEffectType::AUX_ENV_REVERB:
case OffloadBundleEffectType::INSERT_ENV_REVERB:
case OffloadBundleEffectType::AUX_PRESET_REVERB:
case OffloadBundleEffectType::INSERT_PRESET_REVERB:
return std::make_shared<ReverbContext>(common, type, processData);
}
return nullptr;
}
/**
* Create a certain type of BundleContext in shared_ptr container, each session must not have
* more than one session for each type.
*/
std::shared_ptr<OffloadBundleContext> createSession(const OffloadBundleEffectType& type,
const Parameter::Common& common,
bool processData) {
std::lock_guard lg(mMutex);
int ioHandle = common.ioHandle;
int sessionId = common.session;
LOG(DEBUG) << __func__ << " " << type << " with ioHandle " << ioHandle << " sessionId"
<< sessionId;
if (mSessionsMap.count(sessionId)) {
if (findTypeInContextList(mSessionsMap[sessionId], type)) {
LOG(ERROR) << __func__ << type << " already exist in " << sessionId;
return nullptr;
}
}
auto& list = mSessionsMap[sessionId];
LOG(DEBUG) << __func__ << type << " createContext ioHandle " << ioHandle << " sessionId"
<< sessionId;
auto context = createContext(type, common, processData);
RETURN_VALUE_IF(!context, nullptr, "failedToCreateContext");
list.push_back(context);
// find ioHandle in the mActiveIoHandles
for (const auto& pair : mActiveIoHandles) {
if (pair.first == ioHandle) {
LOG(DEBUG) << "IoHandle is active " << ioHandle << " session " << sessionId;
context->start(pair.second);
}
}
return context;
}
void releaseSession(const OffloadBundleEffectType& type, int sessionId) {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " Enter: " << type << " sessionId " << sessionId;
if (mSessionsMap.count(sessionId)) {
auto& list = mSessionsMap[sessionId];
if (!findTypeInContextList(list, type, true /* remove */)) {
LOG(ERROR) << __func__ << " can't find " << type << "in sessionId " << sessionId;
return;
}
if (list.empty()) {
mSessionsMap.erase(sessionId);
}
}
LOG(DEBUG) << __func__ << " Exit: " << type << " sessionId " << sessionId << " sessions "
<< mSessionsMap.size();
}
// Used by AudioHal to link effect with output.
void startEffect(int ioHandle, pal_stream_handle_t* palHandle) {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << ioHandle << " palHandle " << palHandle
<< " sessions " << mSessionsMap.size();
// start the context having same ioHandle
for (const auto& handles : mSessionsMap) {
auto& list = handles.second;
for (const auto& context : list) {
if (context->getIoHandle() == ioHandle) {
context->start(palHandle);
}
}
}
mActiveIoHandles[ioHandle] = palHandle;
}
// Used by AudioHal to link effect with output.
void stopEffect(int ioHandle) {
std::lock_guard lg(mMutex);
LOG(DEBUG) << __func__ << " ioHandle " << ioHandle << " sessions " << mSessionsMap.size()
<< "activeHandles " << mActiveIoHandles.count(ioHandle);
// stop the context having same ioHandle
for (const auto& handles : mSessionsMap) {
auto& list = handles.second;
for (const auto& context : list) {
if (context->getIoHandle() == ioHandle) {
context->stop();
}
}
}
if (mActiveIoHandles.count(ioHandle)) {
mActiveIoHandles.erase(ioHandle);
LOG(VERBOSE) << __func__ << " Removed ioHandle " << ioHandle << " sessions "
<< mSessionsMap.size() << " activeHandles "
<< mActiveIoHandles.count(ioHandle);
}
}
private:
// Lock for mSessionsMap access.
std::mutex mMutex;
// map between sessionId and list of effect contexts for that session
std::unordered_map<int /* sessionId */, std::vector<std::shared_ptr<OffloadBundleContext>>>
mSessionsMap GUARDED_BY(mMutex);
// io Handle to palHandle mapping.
std::unordered_map<int, pal_stream_handle_t*> mActiveIoHandles;
};
} // namespace aidl::qti::effects

Some files were not shown because too many files have changed in this diff Show More