Android Audio 코드 분석 4 - AudioSystem::get Output Sampling Rate
앞에서 본 코드에서는 AudioSystem 클래스의 get Output Sampling Rate 함수, get Output Frame Count 함수, get Output Latency 함수 등이 자주 호출됩니다.이 함수들의 실현은 기본적으로 유사하다. 오늘은AudioSystem: get Output Sampling Rate 함수를 자세히 음미해 보자.
***********************************************************************************************************
status_t AudioSystem::getOutputSamplingRate(int* samplingRate, int streamType)
{
OutputDescriptor *outputDesc;
audio_io_handle_t output;
if (streamType == DEFAULT) {
streamType = MUSIC;
}
output = getOutput((stream_type)streamType);
if (output == 0) {
return PERMISSION_DENIED;
}
gLock.lock();
outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == 0) {
LOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output);
gLock.unlock();
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*samplingRate = af->sampleRate(output);
} else {
LOGV("getOutputSamplingRate() reading from output desc");
*samplingRate = outputDesc->samplingRate;
gLock.unlock();
}
LOGV("getOutputSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, *samplingRate);
return NO_ERROR;
}
*************************************************************************************************************** 원본 경로:frameworks\base\media\AudioSystem.cpp
######################################################################################
status_t AudioSystem::getOutputSamplingRate(int* samplingRate, int streamType)
{
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// audio output descritor used to cache output configurations in client process to avoid frequent calls
// through IAudioFlinger
class OutputDescriptor {
public:
OutputDescriptor()
: samplingRate(0), format(0), channels(0), frameCount(0), latency(0) {}
uint32_t samplingRate;
int32_t format;
int32_t channels;
size_t frameCount;
uint32_t latency;
};
----------------------------------------------------------------
OutputDescriptor *outputDesc;
// typedef int audio_io_handle_t;
audio_io_handle_t output;
// Default stream, music
if (streamType == DEFAULT) {
streamType = MUSIC;
}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
audio_io_handle_t AudioSystem::getOutput(stream_type stream,
uint32_t samplingRate,
uint32_t format,
uint32_t channels,
output_flags flags)
{
audio_io_handle_t output = 0;
// direct , direct , cache output
// Do not use stream to output map cache if the direct output
// flag is set or if we are likely to use a direct output
// (e.g voice call stream @ 8kHz could use BT SCO device and be routed to
// a direct output on some platforms).
// TODO: the output cache and stream to output mapping implementation needs to
// be reworked for proper operation with direct outputs. This code is too specific
// to the first use case we want to cover (Voice Recognition and Voice Dialer over
// Bluetooth SCO
if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) == 0 &&
((stream != AudioSystem::VOICE_CALL && stream != AudioSystem::BLUETOOTH_SCO) ||
channels != AudioSystem::CHANNEL_OUT_MONO ||
(samplingRate != 8000 && samplingRate != 16000))) {
Mutex::Autolock _l(gLock);
// gStreamOutputMap stream output
// getOutput , output, AudioPolicyService getOutput output,
// stream output gStreamOutputMap。( )
output = AudioSystem::gStreamOutputMap.valueFor(stream);
LOGV_IF((output != 0), "getOutput() read %d from cache for stream %d", output, stream);
}
// Cache stream output
if (output == 0) {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// establish binder interface to AudioFlinger service
const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
{
gLock.lock();
// gAudioPolicyService , , gAudioPolicyService
if (gAudioPolicyService.get() == 0) {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// ServiceManager service
sp<IServiceManager> defaultServiceManager()
{
// gDefaultServiceManager ,
if (gDefaultServiceManager != NULL) return gDefaultServiceManager;
{
AutoMutex _l(gDefaultServiceManagerLock);
if (gDefaultServiceManager == NULL) {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
sp<ProcessState> ProcessState::self()
{
if (gProcess != NULL) return gProcess;
AutoMutex _l(gProcessMutex);
if (gProcess == NULL) gProcess = new ProcessState;
return gProcess;
}
----------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
sp<IBinder> ProcessState::getContextObject(const sp<IBinder>& caller)
{
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
bool ProcessState::supportsProcesses() const
{
return mDriverFD >= 0;
}
----------------------------------------------------------------
if (supportsProcesses()) {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
sp<IBinder> ProcessState::getStrongProxyForHandle(int32_t handle)
{
sp<IBinder> result;
AutoMutex _l(mLock);
handle_entry* e = lookupHandleLocked(handle);
if (e != NULL) {
// We need to create a new BpBinder if there isn't currently one, OR we
// are unable to acquire a weak reference on this current one. See comment
// in getWeakProxyForHandle() for more info about this.
IBinder* b = e->binder;
if (b == NULL || !e->refs->attemptIncWeak(this)) {
b = new BpBinder(handle);
e->binder = b;
if (b) e->refs = b->getWeakRefs();
result = b;
} else {
// This little bit of nastyness is to allow us to add a primary
// reference to the remote proxy when this team doesn't have one
// but another team is sending the handle to us.
result.force_set(b);
e->refs->decWeak(this);
}
}
return result;
}
----------------------------------------------------------------
return getStrongProxyForHandle(0);
} else {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
sp<IBinder> ProcessState::getContextObject(const String16& name, const sp<IBinder>& caller)
{
mLock.lock();
sp<IBinder> object(
mContexts.indexOfKey(name) >= 0 ? mContexts.valueFor(name) : NULL);
mLock.unlock();
//printf("Getting context object %s for %p
", String8(name).string(), caller.get());
if (object != NULL) return object;
// Don't attempt to retrieve contexts if we manage them
if (mManagesContexts) {
LOGE("getContextObject(%s) failed, but we manage the contexts!
",
String8(name).string());
return NULL;
}
IPCThreadState* ipc = IPCThreadState::self();
{
Parcel data, reply;
// no interface token on this magic transaction
data.writeString16(name);
data.writeStrongBinder(caller);
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
status_t IPCThreadState::transact(int32_t handle,
uint32_t code, const Parcel& data,
Parcel* reply, uint32_t flags)
{
status_t err = data.errorCheck();
flags |= TF_ACCEPT_FDS;
IF_LOG_TRANSACTIONS() {
TextOutput::Bundle _b(alog);
alog << "BC_TRANSACTION thr " << (void*)pthread_self() << " / hand "
<< handle << " / code " << TypeCode(code) << ": "
<< indent << data << dedent << endl;
}
if (err == NO_ERROR) {
LOG_ONEWAY(">>>> SEND from pid %d uid %d %s", getpid(), getuid(),
(flags & TF_ONE_WAY) == 0 ? "READ REPLY" : "ONE WAY");
err = writeTransactionData(BC_TRANSACTION, flags, handle, code, data, NULL);
}
if (err != NO_ERROR) {
if (reply) reply->setError(err);
return (mLastError = err);
}
if ((flags & TF_ONE_WAY) == 0) {
#if 0
if (code == 4) { // relayout
LOGI(">>>>>> CALLING transaction 4");
} else {
LOGI(">>>>>> CALLING transaction %d", code);
}
#endif
if (reply) {
err = waitForResponse(reply);
} else {
Parcel fakeReply;
err = waitForResponse(&fakeReply);
}
#if 0
if (code == 4) { // relayout
LOGI("<<<<<< RETURNING transaction 4");
} else {
LOGI("<<<<<< RETURNING transaction %d", code);
}
#endif
IF_LOG_TRANSACTIONS() {
TextOutput::Bundle _b(alog);
alog << "BR_REPLY thr " << (void*)pthread_self() << " / hand "
<< handle << ": ";
if (reply) alog << indent << *reply << dedent << endl;
else alog << "(none requested)" << endl;
}
} else {
err = waitForResponse(NULL, NULL);
}
return err;
}
----------------------------------------------------------------
status_t result = ipc->transact(0 /*magic*/, 0, data, &reply, 0);
if (result == NO_ERROR) {
object = reply.readStrongBinder();
}
}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
void IPCThreadState::flushCommands()
{
if (mProcess->mDriverFD <= 0)
return;
// ?
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
status_t IPCThreadState::talkWithDriver(bool doReceive)
{
LOG_ASSERT(mProcess->mDriverFD >= 0, "Binder driver is not opened");
binder_write_read bwr;
// Is the read buffer empty?
const bool needRead = mIn.dataPosition() >= mIn.dataSize();
// We don't want to write anything if we are still reading
// from data left in the input buffer and the caller
// has requested to read the next data.
const size_t outAvail = (!doReceive || needRead) ? mOut.dataSize() : 0;
bwr.write_size = outAvail;
bwr.write_buffer = (long unsigned int)mOut.data();
// This is what we'll read.
if (doReceive && needRead) {
bwr.read_size = mIn.dataCapacity();
bwr.read_buffer = (long unsigned int)mIn.data();
} else {
bwr.read_size = 0;
}
IF_LOG_COMMANDS() {
TextOutput::Bundle _b(alog);
if (outAvail != 0) {
alog << "Sending commands to driver: " << indent;
const void* cmds = (const void*)bwr.write_buffer;
const void* end = ((const uint8_t*)cmds)+bwr.write_size;
alog << HexDump(cmds, bwr.write_size) << endl;
while (cmds < end) cmds = printCommand(alog, cmds);
alog << dedent;
}
alog << "Size of receive buffer: " << bwr.read_size
<< ", needRead: " << needRead << ", doReceive: " << doReceive << endl;
}
// Return immediately if there is nothing to do.
if ((bwr.write_size == 0) && (bwr.read_size == 0)) return NO_ERROR;
bwr.write_consumed = 0;
bwr.read_consumed = 0;
status_t err;
do {
IF_LOG_COMMANDS() {
alog << "About to read/write, write size = " << mOut.dataSize() << endl;
}
#if defined(HAVE_ANDROID_OS)
// ioctl
if (ioctl(mProcess->mDriverFD, BINDER_WRITE_READ, &bwr) >= 0)
err = NO_ERROR;
else
err = -errno;
#else
err = INVALID_OPERATION;
#endif
IF_LOG_COMMANDS() {
alog << "Finished read/write, write size = " << mOut.dataSize() << endl;
}
} while (err == -EINTR);
IF_LOG_COMMANDS() {
alog << "Our err: " << (void*)err << ", write consumed: "
<< bwr.write_consumed << " (of " << mOut.dataSize()
<< "), read consumed: " << bwr.read_consumed << endl;
}
if (err >= NO_ERROR) {
if (bwr.write_consumed > 0) {
if (bwr.write_consumed < (ssize_t)mOut.dataSize())
mOut.remove(0, bwr.write_consumed);
else
mOut.setDataSize(0);
}
if (bwr.read_consumed > 0) {
mIn.setDataSize(bwr.read_consumed);
mIn.setDataPosition(0);
}
IF_LOG_COMMANDS() {
TextOutput::Bundle _b(alog);
alog << "Remaining data size: " << mOut.dataSize() << endl;
alog << "Received commands from driver: " << indent;
const void* cmds = mIn.data();
const void* end = mIn.data() + mIn.dataSize();
alog << HexDump(cmds, mIn.dataSize()) << endl;
while (cmds < end) cmds = printReturnCommand(alog, cmds);
alog << dedent;
}
return NO_ERROR;
}
return err;
}
----------------------------------------------------------------
talkWithDriver(false);
}
----------------------------------------------------------------
ipc->flushCommands();
if (object != NULL) setContextObject(object, name);
return object;
}
----------------------------------------------------------------
return getContextObject(String16("default"), caller);
}
}
----------------------------------------------------------------
gDefaultServiceManager = interface_cast<IServiceManager>(
ProcessState::self()->getContextObject(NULL));
}
}
return gDefaultServiceManager;
}
----------------------------------------------------------------
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
do {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
public IBinder getService(String name) throws RemoteException {
Parcel data = Parcel.obtain();
Parcel reply = Parcel.obtain();
data.writeInterfaceToken(IServiceManager.descriptor);
data.writeString(name);
// Binder 。transact Bn
// ... ServiceManagerNative
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
public boolean onTransact(int code, Parcel data, Parcel reply, int flags)
{
try {
switch (code) {
case IServiceManager.GET_SERVICE_TRANSACTION: {
data.enforceInterface(IServiceManager.descriptor);
String name = data.readString();
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
/**
* Returns a reference to a service with the given name.
*
* @param name the name of the service to get
* @return a reference to the service, or <code>null</code> if the service doesn't exist
*/
public static IBinder getService(String name) {
try {
IBinder service = sCache.get(name);
if (service != null) {
return service;
} else {
return getIServiceManager().getService(name);
}
} catch (RemoteException e) {
Log.e(TAG, "error in getService", e);
}
return null;
}
----------------------------------------------------------------
IBinder service = getService(name);
reply.writeStrongBinder(service);
return true;
}
case IServiceManager.CHECK_SERVICE_TRANSACTION: {
data.enforceInterface(IServiceManager.descriptor);
String name = data.readString();
IBinder service = checkService(name);
reply.writeStrongBinder(service);
return true;
}
case IServiceManager.ADD_SERVICE_TRANSACTION: {
data.enforceInterface(IServiceManager.descriptor);
String name = data.readString();
IBinder service = data.readStrongBinder();
addService(name, service);
return true;
}
case IServiceManager.LIST_SERVICES_TRANSACTION: {
data.enforceInterface(IServiceManager.descriptor);
String[] list = listServices();
reply.writeStringArray(list);
return true;
}
case IServiceManager.SET_PERMISSION_CONTROLLER_TRANSACTION: {
data.enforceInterface(IServiceManager.descriptor);
IPermissionController controller
= IPermissionController.Stub.asInterface(
data.readStrongBinder());
setPermissionController(controller);
return true;
}
}
} catch (RemoteException e) {
}
return false;
}
----------------------------------------------------------------
mRemote.transact(GET_SERVICE_TRANSACTION, data, reply, 0);
IBinder binder = reply.readStrongBinder();
reply.recycle();
data.recycle();
return binder;
}
----------------------------------------------------------------
binder = sm->getService(String16("media.audio_policy"));
if (binder != 0)
break;
LOGW("AudioPolicyService not published, waiting...");
usleep(500000); // 0.5 s
} while(true);
if (gAudioPolicyServiceClient == NULL) {
gAudioPolicyServiceClient = new AudioPolicyServiceClient();
}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
status_t BpBinder::linkToDeath(
const sp<DeathRecipient>& recipient, void* cookie, uint32_t flags)
{
Obituary ob;
ob.recipient = recipient;
ob.cookie = cookie;
ob.flags = flags;
LOG_ALWAYS_FATAL_IF(recipient == NULL,
"linkToDeath(): recipient must be non-NULL");
{
AutoMutex _l(mLock);
if (!mObitsSent) {
if (!mObituaries) {
mObituaries = new Vector<Obituary>;
if (!mObituaries) {
return NO_MEMORY;
}
LOGV("Requesting death notification: %p handle %d
", this, mHandle);
getWeakRefs()->incWeak(this);
IPCThreadState* self = IPCThreadState::self();
self->requestDeathNotification(mHandle, this);
self->flushCommands();
}
ssize_t res = mObituaries->add(ob);
return res >= (ssize_t)NO_ERROR ? (status_t)NO_ERROR : res;
}
}
return DEAD_OBJECT;
}
----------------------------------------------------------------
binder->linkToDeath(gAudioPolicyServiceClient);
gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
gLock.unlock();
} else {
gLock.unlock();
}
return gAudioPolicyService;
}
----------------------------------------------------------------
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return 0;
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// BpAudioPolicyService(class BpAudioPolicyService : public BpInterface<IAudioPolicyService>)
virtual audio_io_handle_t getOutput(
AudioSystem::stream_type stream,
uint32_t samplingRate,
uint32_t format,
uint32_t channels,
AudioSystem::output_flags flags)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(static_cast <uint32_t>(stream));
data.writeInt32(samplingRate);
data.writeInt32(static_cast <uint32_t>(format));
data.writeInt32(channels);
data.writeInt32(static_cast <uint32_t>(flags));
// Binder, Native
// AudioPolicyService(class AudioPolicyService: public BnAudioPolicyService,
// public AudioPolicyClientInterface,public IBinder::DeathRecipient)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
audio_io_handle_t AudioPolicyService::getOutput(AudioSystem::stream_type stream,
uint32_t samplingRate,
uint32_t format,
uint32_t channels,
AudioSystem::output_flags flags)
{
if (mpPolicyManager == NULL) {
return 0;
}
LOGV("getOutput() tid %d", gettid());
Mutex::Autolock _l(mLock);
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//
audio_io_handle_t AudioPolicyManagerBase::getOutput(AudioSystem::stream_type stream,
uint32_t samplingRate,
uint32_t format,
uint32_t channels,
AudioSystem::output_flags flags)
{
// AudioOutputDescriptor , output
audio_io_handle_t output = 0;
uint32_t latency = 0;
routing_strategy strategy = getStrategy((AudioSystem::stream_type)stream);
uint32_t device = getDeviceForStrategy(strategy);
LOGV("getOutput() stream %d, samplingRate %d, format %d, channels %x, flags %x", stream, samplingRate, format, channels, flags);
#ifdef AUDIO_POLICY_TEST
if (mCurOutput != 0) {
LOGV("getOutput() test output mCurOutput %d, samplingRate %d, format %d, channels %x, mDirectOutput %d",
mCurOutput, mTestSamplingRate, mTestFormat, mTestChannels, mDirectOutput);
if (mTestOutputs[mCurOutput] == 0) {
LOGV("getOutput() opening test output");
AudioOutputDescriptor *outputDesc = new AudioOutputDescriptor();
outputDesc->mDevice = mTestDevice;
outputDesc->mSamplingRate = mTestSamplingRate;
outputDesc->mFormat = mTestFormat;
outputDesc->mChannels = mTestChannels;
outputDesc->mLatency = mTestLatencyMs;
outputDesc->mFlags = (AudioSystem::output_flags)(mDirectOutput ? AudioSystem::OUTPUT_FLAG_DIRECT : 0);
outputDesc->mRefCount[stream] = 0;
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
audio_io_handle_t AudioPolicyService::openOutput(uint32_t *pDevices,
uint32_t *pSamplingRate,
uint32_t *pFormat,
uint32_t *pChannels,
uint32_t *pLatencyMs,
AudioSystem::output_flags flags)
{
sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
if (af == 0) {
LOGW("openOutput() could not get AudioFlinger");
return 0;
}
// AudioFlinger
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
int AudioFlinger::openOutput(uint32_t *pDevices,
uint32_t *pSamplingRate,
uint32_t *pFormat,
uint32_t *pChannels,
uint32_t *pLatencyMs,
uint32_t flags)
{
status_t status;
PlaybackThread *thread = NULL;
mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0;
uint32_t format = pFormat ? *pFormat : 0;
uint32_t channels = pChannels ? *pChannels : 0;
uint32_t latency = pLatencyMs ? *pLatencyMs : 0;
LOGV("openOutput(), Device %x, SamplingRate %d, Format %d, Channels %x, flags %x",
pDevices ? *pDevices : 0,
samplingRate,
format,
channels,
flags);
if (pDevices == NULL || *pDevices == 0) {
return 0;
}
Mutex::Autolock _l(mLock);
//
AudioStreamOut *output = mAudioHardware->openOutputStream(*pDevices,
(int *)&format,
&channels,
&samplingRate,
&status);
LOGV("openOutput() openOutputStream returned output %p, SamplingRate %d, Format %d, Channels %x, status %d",
output,
samplingRate,
format,
channels,
status);
mHardwareStatus = AUDIO_HW_IDLE;
if (output != 0) {
int id = nextUniqueId();
if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) ||
(format != AudioSystem::PCM_16_BIT) ||
(channels != AudioSystem::CHANNEL_OUT_STEREO)) {
thread = new DirectOutputThread(this, output, id, *pDevices);
LOGV("openOutput() created direct output: ID %d thread %p", id, thread);
} else {
thread = new MixerThread(this, output, id, *pDevices);
LOGV("openOutput() created mixer output: ID %d thread %p", id, thread);
#ifdef LVMX
unsigned bitsPerSample =
(format == AudioSystem::PCM_16_BIT) ? 16 :
((format == AudioSystem::PCM_8_BIT) ? 8 : 0);
unsigned channelCount = (channels == AudioSystem::CHANNEL_OUT_STEREO) ? 2 : 1;
int audioOutputType = LifeVibes::threadIdToAudioOutputType(thread->id());
LifeVibes::init_aot(audioOutputType, samplingRate, bitsPerSample, channelCount);
LifeVibes::setDevice(audioOutputType, *pDevices);
#endif
}
mPlaybackThreads.add(id, thread);
if (pSamplingRate) *pSamplingRate = samplingRate;
if (pFormat) *pFormat = format;
if (pChannels) *pChannels = channels;
if (pLatencyMs) *pLatencyMs = thread->latency();
// notify client processes of the new output creation
thread->audioConfigChanged_l(AudioSystem::OUTPUT_OPENED);
return id;
}
return 0;
}
----------------------------------------------------------------
return af->openOutput(pDevices,
pSamplingRate,
(uint32_t *)pFormat,
pChannels,
pLatencyMs,
flags);
}
----------------------------------------------------------------
mTestOutputs[mCurOutput] = mpClientInterface->openOutput(&outputDesc->mDevice,
&outputDesc->mSamplingRate,
&outputDesc->mFormat,
&outputDesc->mChannels,
&outputDesc->mLatency,
outputDesc->mFlags);
if (mTestOutputs[mCurOutput]) {
AudioParameter outputCmd = AudioParameter();
outputCmd.addInt(String8("set_id"),mCurOutput);
mpClientInterface->setParameters(mTestOutputs[mCurOutput],outputCmd.toString());
addOutput(mTestOutputs[mCurOutput], outputDesc);
}
}
return mTestOutputs[mCurOutput];
}
#endif //AUDIO_POLICY_TEST
// open a direct output if required by specified parameters
if (needsDirectOuput(stream, samplingRate, format, channels, flags, device)) {
LOGV("getOutput() opening direct output device %x", device);
AudioOutputDescriptor *outputDesc = new AudioOutputDescriptor();
outputDesc->mDevice = device;
outputDesc->mSamplingRate = samplingRate;
outputDesc->mFormat = format;
outputDesc->mChannels = channels;
outputDesc->mLatency = 0;
outputDesc->mFlags = (AudioSystem::output_flags)(flags | AudioSystem::OUTPUT_FLAG_DIRECT);
outputDesc->mRefCount[stream] = 0;
output = mpClientInterface->openOutput(&outputDesc->mDevice,
&outputDesc->mSamplingRate,
&outputDesc->mFormat,
&outputDesc->mChannels,
&outputDesc->mLatency,
outputDesc->mFlags);
// only accept an output with the requeted parameters
if (output == 0 ||
(samplingRate != 0 && samplingRate != outputDesc->mSamplingRate) ||
(format != 0 && format != outputDesc->mFormat) ||
(channels != 0 && channels != outputDesc->mChannels)) {
LOGV("getOutput() failed opening direct output: samplingRate %d, format %d, channels %d",
samplingRate, format, channels);
if (output != 0) {
mpClientInterface->closeOutput(output);
}
delete outputDesc;
return 0;
}
addOutput(output, outputDesc);
return output;
}
if (channels != 0 && channels != AudioSystem::CHANNEL_OUT_MONO &&
channels != AudioSystem::CHANNEL_OUT_STEREO) {
return 0;
}
// open a non direct output
// get which output is suitable for the specified stream. The actual routing change will happen
// when startOutput() will be called
uint32_t a2dpDevice = device & AudioSystem::DEVICE_OUT_ALL_A2DP;
if (AudioSystem::popCount((AudioSystem::audio_devices)device) == 2) {
#ifdef WITH_A2DP
if (a2dpUsedForSonification() && a2dpDevice != 0) {
// if playing on 2 devices among which one is A2DP, use duplicated output
LOGV("getOutput() using duplicated output");
LOGW_IF((mA2dpOutput == 0), "getOutput() A2DP device in multiple %x selected but A2DP output not opened", device);
output = mDuplicatedOutput;
} else
#endif
{
// if playing on 2 devices among which none is A2DP, use hardware output
output = mHardwareOutput;
}
LOGV("getOutput() using output %d for 2 devices %x", output, device);
} else {
#ifdef WITH_A2DP
if (a2dpDevice != 0) {
// if playing on A2DP device, use a2dp output
LOGW_IF((mA2dpOutput == 0), "getOutput() A2DP device %x selected but A2DP output not opened", device);
output = mA2dpOutput;
} else
#endif
{
// if playing on not A2DP device, use hardware output
output = mHardwareOutput;
}
}
LOGW_IF((output ==0), "getOutput() could not find output for stream %d, samplingRate %d, format %d, channels %x, flags %x",
stream, samplingRate, format, channels, flags);
return output;
}
----------------------------------------------------------------
return mpPolicyManager->getOutput(stream, samplingRate, format, channels, flags);
}
----------------------------------------------------------------
remote()->transact(GET_OUTPUT, data, &reply);
return static_cast <audio_io_handle_t> (reply.readInt32());
}
----------------------------------------------------------------
output = aps->getOutput(stream, samplingRate, format, channels, flags);
if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) == 0) {
Mutex::Autolock _l(gLock);
AudioSystem::gStreamOutputMap.add(stream, output);
}
}
return output;
}
----------------------------------------------------------------
output = getOutput((stream_type)streamType);
if (output == 0) {
return PERMISSION_DENIED;
}
// getOutputSamplingRate,
gLock.lock();
// AudioSystem output
outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == 0) {
// , AudioFlinger
LOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output);
gLock.unlock();
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
uint32_t AudioFlinger::sampleRate(int output) const
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
LOGW("sampleRate() unknown thread %d", output);
return 0;
}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
uint32_t AudioFlinger::ThreadBase::sampleRate() const
{
// AudioFlinger::PlaybackThread::readOutputParameters mSampleRate
// AudioFlinger::RecordThread::readInputParameters mSampleRate
return mSampleRate;
}
----------------------------------------------------------------
return thread->sampleRate();
}
----------------------------------------------------------------
*samplingRate = af->sampleRate(output);
} else {
// ,
LOGV("getOutputSamplingRate() reading from output desc");
*samplingRate = outputDesc->samplingRate;
gLock.unlock();
}
LOGV("getOutputSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, *samplingRate);
return NO_ERROR;
이 내용에 흥미가 있습니까?
현재 기사가 여러분의 문제를 해결하지 못하는 경우 AI 엔진은 머신러닝 분석(스마트 모델이 방금 만들어져 부정확한 경우가 있을 수 있음)을 통해 가장 유사한 기사를 추천합니다:
다양한 언어의 JSONJSON은 Javascript 표기법을 사용하여 데이터 구조를 레이아웃하는 데이터 형식입니다. 그러나 Javascript가 코드에서 이러한 구조를 나타낼 수 있는 유일한 언어는 아닙니다. 저는 일반적으로 '객체'{}...
텍스트를 자유롭게 공유하거나 복사할 수 있습니다.하지만 이 문서의 URL은 참조 URL로 남겨 두십시오.
CC BY-SA 2.5, CC BY-SA 3.0 및 CC BY-SA 4.0에 따라 라이센스가 부여됩니다.