列出可用的输出音频目标AVAudioSession

问题描述 投票:27回答:4

我需要列出一个iOS应用程序可用的音频输出。我的问题和这个问题类似。如何在iOS上列出可用的音频输出路径

我试过这个代码。

NSError *setCategoryError = nil;
BOOL success = [[AVAudioSession sharedInstance] setCategory: AVAudioSessionCategoryPlayback
                                                      error: &setCategoryError];

NSError *activationError = nil;
[[AVAudioSession sharedInstance] setActive: YES error: &activationError];

…
NSLog(@"session.currentRoute.outputs count %d", [[[[AVAudioSession sharedInstance] currentRoute] outputs ] count]);
for (AVAudioSessionPortDescription *portDesc in [[[AVAudioSession sharedInstance] currentRoute] outputs ]) {
    NSLog(@"-----");
    NSLog(@"portDesc UID %@", portDesc.UID);
    NSLog(@"portDesc portName %@", portDesc.portName);
    NSLog(@"portDesc portType %@", portDesc.portType);
    NSLog(@"portDesc channels %@", portDesc.channels);
}

然而,我总是只看到一个输出端口(计数为1),如果我有两个(一个Airplay和一个内置扬声器),也是如此.如果我使用音乐应用程序,我能够看到两个端口,并在它们之间切换.在我的应用程序中,我只看到我选择的一个。

还有什么需要我做的吗?

谢谢你了

编辑:

我也试过这个代码。

CFDictionaryRef asCFType = nil;
UInt32 dataSize = sizeof(asCFType);
AudioSessionGetProperty(kAudioSessionProperty_AudioRouteDescription, &dataSize, &asCFType);
NSDictionary *audioRoutesDesc = (__bridge NSDictionary *)asCFType;
NSLog(@"audioRoutesDesc %@", audioRoutesDesc);

但字典中只列出了一个输出目的地。此外,输入源数组是空的(我有一个iPhone 4s)。

EDIT2:

我得到了一些工作使用MPVolumeView 。这个组件有一个按钮,让你选择输出音频路线,就像在音乐应用中一样。

如果你想隐藏滑块(只有按钮),你可以使用。

self.myMPVolumeView.showsVolumeSlider = NO;
ios objective-c airplay avaudiosession
4个回答
3
投票

试试这样的东西,它比你需要的多,但你可以把它缩小。

    + (NSString *) demonstrateInputSelection
{
    NSError* theError = nil;
    BOOL result = YES;
    NSMutableString *info = [[NSMutableString alloc] init];
    [info appendString: @"     Device Audio Input Hardware\n"];

    NSString *str = nil;
    if( iOSMajorVersion < 7 ){
        str = @"No input device information available";
        NSLog(@"%@",str);
        [info appendFormat:@"%@\n",str];

        return info;
    }

    AVAudioSession* myAudioSession = [AVAudioSession sharedInstance];

    result = [myAudioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:&theError];
    if (!result)
    {
        NSLog(@"setCategory failed");
    }

    result = [myAudioSession setActive:YES error:&theError];
    if (!result)
    {
        NSLog(@"setActive failed");
    }

    // Get the set of available inputs. If there are no audio accessories attached, there will be
    // only one available input -- the built in microphone.
    NSArray* inputs = [myAudioSession availableInputs];
    str = [NSString stringWithFormat:@"\n--- Ports available on %@: %d ---", [UIDevice currentDevice].name , [inputs count]];
    NSLog(@"%@",str);
    [info appendFormat:@"%@\n",str];

    // Locate the Port corresponding to the built-in microphone.
    AVAudioSessionPortDescription* builtInMicPort = nil;
    AVAudioSessionDataSourceDescription* frontDataSource = nil;

    for (AVAudioSessionPortDescription* port in inputs)
    {
        // Print out a description of the data sources for the built-in microphone
        str = @"\n**********";
        NSLog(@"%@",str);
        [info appendFormat:@"%@\n",str];
        str = [NSString stringWithFormat:@"Port :\"%@\": UID:%@", port.portName, port.UID ];
        NSLog(@"%@",str);
        [info appendFormat:@"%@\n",str];
        if( [port.dataSources count] ){
            str = [NSString stringWithFormat:@"Port has %d data sources",(unsigned)[port.dataSources count] ];
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }

        str = [NSString stringWithFormat:@">%@", port.dataSources];
        NSLog(@"%@",str);
   //     [info appendFormat:@"%@\n",str];

        if( [port.portType isEqualToString:AVAudioSessionPortLineIn] ){
            str = @"Line Input found";
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }
        else if( [port.portType isEqualToString:AVAudioSessionPortUSBAudio] ){
            str = @"USB Audio found";
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }
        else if ([port.portType isEqualToString:AVAudioSessionPortBuiltInMic]){
            builtInMicPort = port;
            str = @"Built-in Mic found";
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }
        else if ([port.portType isEqualToString:AVAudioSessionPortHeadsetMic]){
            builtInMicPort = port;
            str = @"Headset Mic found";
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }
        else{
            str = @"Other input source found";
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }

        // loop over the built-in mic's data sources and attempt to locate the front microphone
        for (AVAudioSessionDataSourceDescription* source in port.dataSources)
        {
            str = [NSString stringWithFormat:@"\nName:%@ (%d) \nPolar:%@ \nType:%@ \nPatterns:%@", source.dataSourceName, [source.dataSourceID intValue], source.selectedPolarPattern, port.portType, source.supportedPolarPatterns];
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];

            //           if ([source.orientation isEqual:AVAudioSessionOrientationFront])
            //           {
            //               frontDataSource = source;
            //               break;
            //           }
        } // end data source iteration

    }

    str = @"\n----  Current Selected Ports ----\n";
    NSLog(@"%@",str);
    [info appendFormat:@"%@",str];

    NSArray *currentInputs = myAudioSession.currentRoute.inputs;
//    str = [NSString stringWithFormat:@"\n%d current input ports", [currentInputs count]];
//    NSLog(@"%@",str);
//    [info appendFormat:@"%@\n",str];
    for( AVAudioSessionPortDescription *port in currentInputs ){
        str = [NSString stringWithFormat:@"\nInput Port :\"%@\":", port.portName ];
        NSLog(@"%@",str);
        [info appendFormat:@"%@\n",str];
        if( [port.dataSources count] ){
            str = [NSString stringWithFormat:@"Port has %d data sources",(unsigned)[port.dataSources count] ];
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];

            str = [NSString stringWithFormat:@"Selected data source:%@",  port.selectedDataSource.dataSourceName];
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];

            if( [port.selectedDataSource.supportedPolarPatterns count] > 0 ){
                str = [NSString stringWithFormat:@"Selected polar pattern:%@", port.selectedDataSource.selectedPolarPattern];
                NSLog(@"%@",str);
                [info appendFormat:@"%@\n",str];
            }
        }
    }

    NSArray *currentOutputs = myAudioSession.currentRoute.outputs;
//    str = [NSString stringWithFormat:@"\n%d current output ports", [currentOutputs count]];
//    NSLog(@"%@",str);
//    [info appendFormat:@"%@\n",str];
    for( AVAudioSessionPortDescription *port in currentOutputs ){
        str = [NSString stringWithFormat:@"\nOutput Port :\"%@\":", port.portName ];
        NSLog(@"%@",str);
        [info appendFormat:@"%@\n",str];
        if( [port.dataSources count] ){
            str = [NSString stringWithFormat:@"Port has %d data sources",(unsigned)[port.dataSources count] ];
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];

            str = [NSString stringWithFormat:@"Selected data source:%@",  port.selectedDataSource.dataSourceName];
            NSLog(@"%@",str);
            [info appendFormat:@"%@\n",str];
        }

    }

//    str = [NSString stringWithFormat:@"\Current Route: %@ Source:%@\n", myAudioSession.currentRoute.portName, myAudioSession.preferredInput.selectedDataSource.dataSourceName];
//    NSLog(@"%@",str);
//    [info appendFormat:@"%@\n",str];


    if( myAudioSession.preferredInput.portName ){
        str = [NSString stringWithFormat:@"\nPreferred Port: %@ Source:%@\n", myAudioSession.preferredInput.portName, myAudioSession.preferredInput.selectedDataSource.dataSourceName];
    } else {
        str = @"\nNo Preferred Port set";
    }
    NSLog(@"%@",str);
    [info appendFormat:@"%@\n",str];

    return info;

    if (frontDataSource)
    {
        NSLog(@"Currently selected source is \"%@\" for port \"%@\"", builtInMicPort.selectedDataSource.dataSourceName, builtInMicPort.portName);
        NSLog(@"Attempting to select source \"%@\" on port \"%@\"", frontDataSource, builtInMicPort.portName);

        // Set a preference for the front data source.
        theError = nil;
        result = [builtInMicPort setPreferredDataSource:frontDataSource error:&theError];
        if (!result)
        {
            // an error occurred. Handle it!
            NSLog(@"setPreferredDataSource failed");
        }
    }

    // Make sure the built-in mic is selected for input. This will be a no-op if the built-in mic is
    // already the current input Port.
    theError = nil;
    result = [myAudioSession setPreferredInput:builtInMicPort error:&theError];
    if (!result)
    {
        // an error occurred. Handle it!
        NSLog(@"setPreferredInput failed");
    }

    return info;
}

3
投票

这将取决于你的AVAudioSession类别。

你可以放心地假设在iPhone上,你至少有一个麦克风作为输入,一个扬声器作为输出。如果你想获得蓝牙AirPlay的输出列表,首先你得确保你的会话类别正在向你报告它们。

do 
{
    try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, withOptions: .AllowBluetooth)
    try audioSession.setActive(true)
} 
catch let e
{
    debugPrint("failed to initialize audio session: \(e)")
}

那么获取可用输出的一个非直观的方法是检查 AVAudioSession.availableInputs 因为通常蓝牙HFP设备也会有一个麦克风......。我现在可能假设的太多了......但这是持续获得你的availableOutputs的唯一方法。

一个更好的方法是使用MultipleRoute类别,它将给你更多的自由度,在访问 AVAudioSessionPort


2
投票
AVAudioSessionRouteDescription *currentRoute = [[AVAudioSession sharedInstance] currentRoute];
    for (AVAudioSessionPortDescription *output in currentRoute.outputs) {

    }

-1
投票

请查看这个完整的工作代码。

完整的音频会话代码,输出设备手柄,并在动作表中显示。

在给定的链接中,有以下文件: 以下是每个文件的简要说明。

AVAudioSessionHandler.swift ->

所有所需的方法都是可用的,这些方法可以根据所选的输出设备覆盖路由。

AudioOutputDeviceHandler.swift->

所有需要的方法都是可用的,这些方法对于获取输入设备列表、当前输出设备以及显示所有可用输入设备的动作表都很有用。

SpeakerUIHandler.swift ->

所有所需的方法都是可用的,这些方法有助于根据所选的输出设备更新扬声器用户界面。

AudioSession.swift ->

所有的方法都是可用的,这些方法对于创建音频会话和设置音频会话所需的所有参数非常有用。

请查看下面的代码以获得可用的输入设备列表。

extension AVAudioSession {

    @objc func ChangeAudioOutput(_ presenterViewController : UIViewController, _ speakerButton: UIButton) {

        let CHECKED_KEY = "checked"
        var deviceAction = UIAlertAction()
        var headphonesExist = false

        if AudioOutputDeviceHandler.sharedInstance.isDeviceListRequired() {

            let optionMenu = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)

            for audioPort in self.availableInputs!{

                switch audioPort.portType {

                case AVAudioSession.Port.bluetoothA2DP, AVAudioSession.Port.bluetoothHFP, AVAudioSession.Port.bluetoothLE :

                    overrideBluetooth(audioPort, optionMenu)
                    break

                case AVAudioSession.Port.builtInMic, AVAudioSession.Port.builtInReceiver:

                    deviceAction = overrideBuiltInReceiver(audioPort)
                    break

                case AVAudioSession.Port.headphones, AVAudioSession.Port.headsetMic:

                    headphonesExist = true
                    overrideheadphones(audioPort,optionMenu)
                    break

                case AVAudioSession.Port.carAudio:
                    overrideCarAudio(port: audioPort, optionMenu: optionMenu)
                    break

                default:
                    break
                }
            }

            if !headphonesExist {

                if self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.builtInReceiver}) || self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.builtInMic}) {
                    deviceAction.setValue(true, forKey: CHECKED_KEY)
                }
                optionMenu.addAction(deviceAction)
            }

            overrideSpeaker(optionMenu)

            let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: {
                (alert: UIAlertAction!) -> Void in

            })

            optionMenu.addAction(cancelAction)

            alertViewSetupForIpad(optionMenu, speakerButton)
            presenterViewController.present(optionMenu, animated: false, completion: nil)

            // auto dismiss after 5 seconds
            DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
                optionMenu.dismiss(animated: true, completion: nil)
            }

        } else {
            if self.isBuiltInSpeaker {

                if AudioOutputDeviceHandler.sharedInstance.isSpeaker {
                    let port = self.currentRoute.inputs.first!
                    setPortToNone(port)
                    AudioOutputDeviceHandler.sharedInstance.isSpeaker = false
                }
            }
            else if self.isReceiver || self.isBuiltInMic  || self.isHeadphonesConnected {

                setPortToSpeaker()
                AudioOutputDeviceHandler.sharedInstance.isSpeaker = true
            }
        }
    }

    func overrideCarAudio(port: AVAudioSessionPortDescription, optionMenu: UIAlertController) {

        let action = UIAlertAction(title: port.portName, style: .default) { (action) in
            do {
                // set new input
                try self.setPreferredInput(port)
            } catch let error as NSError {
                print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
            }
        }

        if self.currentRoute.outputs.contains(where: {return $0.portType == port.portType}){
            action.setValue(true, forKey: "checked")
        }

        if let image = UIImage(named: "CarAudio") {
            action.setValue(image, forKey: "image")
        }
        optionMenu.addAction(action)
    }

    func overrideheadphones(_ port: AVAudioSessionPortDescription, _ optionMenu: UIAlertController) {

        let CHECKED_KEY = "checked"
        let HEADPHONES_TITLE = "Headphones"
        let action = UIAlertAction(title: HEADPHONES_TITLE, style: .default) { (action) in
            do {
                // set new input
                try self.setPreferredInput(port)
            } catch let error as NSError {
                print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
            }
        }

        if self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.headphones}) || self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.headsetMic}) {
            action.setValue(true, forKey: CHECKED_KEY)
        }

        if let image = UIImage(named: "Headphone") {
            action.setValue(image, forKey: "image")
        }

        optionMenu.addAction(action)
    }

    func overrideSpeaker(_ optionMenu: UIAlertController) {

        let SPEAKER_TITLE = "Speaker"
        let CHECKED_KEY = "checked"
        let speakerOutput = UIAlertAction(title: SPEAKER_TITLE, style: .default, handler: {
            [weak self] (alert: UIAlertAction!) -> Void in
            self?.setPortToSpeaker()
        })
        AudioOutputDeviceHandler.sharedInstance.isSpeaker = true

        if self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.builtInSpeaker}){

            speakerOutput.setValue(true, forKey: CHECKED_KEY)
        }

        if let image = UIImage(named: "Speaker") {
            speakerOutput.setValue(image, forKey: "image")
        }
        optionMenu.addAction(speakerOutput)
    }

    func overrideBluetooth(_ port: AVAudioSessionPortDescription, _ optionMenu: UIAlertController) {

        let CHECKED_KEY = "checked"
        let action = UIAlertAction(title: port.portName, style: .default) { (action) in
            do {
                // set new input
                try self.setPreferredInput(port)
            } catch let error as NSError {
                print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
            }
        }

        if self.currentRoute.outputs.contains(where: {return $0.portType == port.portType}){
            action.setValue(true, forKey: CHECKED_KEY)
        }
        if let image = UIImage(named: "Bluetooth") {
            action.setValue(image, forKey: "image")
        }
        optionMenu.addAction(action)
    }

    func overrideBuiltInReceiver(_ port: AVAudioSessionPortDescription) -> UIAlertAction {

        let IPHONE_TITLE = "iPhone"
        let deviceAction = UIAlertAction(title: IPHONE_TITLE, style: .default) {[weak self] (action) in
            self?.setPortToNone(port)
        }

        if let image = UIImage(named: "Device") {
            deviceAction.setValue(image, forKey: "image")
        }
        return deviceAction
    }

    func setPortToSpeaker() {

        do {
            try self.overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
        } catch let error as NSError {
            print("audioSession error turning on speaker: \(error.localizedDescription)")
        }
    }

    func setPortToNone(_ port: AVAudioSessionPortDescription) {

        do {
            // remove speaker if needed
            try self.overrideOutputAudioPort(AVAudioSession.PortOverride.none)
            // set new input
            try self.setPreferredInput(port)
        } catch let error as NSError {
            print("audioSession error change to input: \(AVAudioSession.PortOverride.none.rawValue) with error: \(error.localizedDescription)")
        }
    }

    func alertViewSetupForIpad(_ optionMenu: UIAlertController, _ speakerButton: UIButton) {
        optionMenu.modalPresentationStyle = .popover
        if let presenter = optionMenu.popoverPresentationController {
            presenter.sourceView = speakerButton;
            presenter.sourceRect = speakerButton.bounds;
        }
    }
}

extension AVAudioSession {

    static var isHeadphonesConnected: Bool {
        return sharedInstance().isHeadphonesConnected
    }

    static var isBluetoothConnected: Bool {
        return sharedInstance().isBluetoothConnected
    }

    static var isCarAudioConnected: Bool {
        return sharedInstance().isCarAudioConnected
    }

    static var isBuiltInSpeaker: Bool {
        return sharedInstance().isBuiltInSpeaker
    }

    static var isReceiver: Bool {
        return sharedInstance().isReceiver
    }

    static var isBuiltInMic: Bool {
        return sharedInstance().isBuiltInMic
    }

    var isCarAudioConnected: Bool {
        return !currentRoute.outputs.filter { $0.isCarAudio }.isEmpty
    }

    var isHeadphonesConnected: Bool {
        return !currentRoute.outputs.filter { $0.isHeadphones }.isEmpty
    }

    var isBluetoothConnected: Bool {
        return !currentRoute.outputs.filter { $0.isBluetooth }.isEmpty
    }

    var isBuiltInSpeaker: Bool {
        return !currentRoute.outputs.filter { $0.isSpeaker }.isEmpty
    }

    var isReceiver: Bool {
        return !currentRoute.outputs.filter { $0.isReceiver }.isEmpty
    }

    var isBuiltInMic: Bool {
        return !currentRoute.outputs.filter { $0.isBuiltInMic }.isEmpty
    }
}

extension AVAudioSessionPortDescription {

    var isHeadphones: Bool {
        return portType == AVAudioSession.Port.headphones  ||  portType == AVAudioSession.Port.headsetMic
    }

    var isBluetooth: Bool {
        return portType == AVAudioSession.Port.bluetoothHFP || portType == AVAudioSession.Port.bluetoothA2DP || portType == AVAudioSession.Port.bluetoothLE
    }

    var isCarAudio: Bool {
        return portType == AVAudioSession.Port.carAudio
    }

    var isSpeaker: Bool {
        return portType == AVAudioSession.Port.builtInSpeaker
    }

    var isBuiltInMic: Bool {
        return portType == AVAudioSession.Port.builtInMic
    }

    var isReceiver: Bool {
        return portType == AVAudioSession.Port.builtInReceiver
    }
}
© www.soinside.com 2019 - 2024. All rights reserved.