• Advertisement
Sign in to follow this  

iOS AvAudioEngine trouble

This topic is 870 days old which is more than the 365 day threshold we allow for new replies. Please post a new topic.

If you intended to correct an error in the post then please contact us.

Recommended Posts

Not sure if anyone here has any experience with AVAudioEngine, but I'm having a few difficulties with it. In my code below, I have created two sounds, sound1 and sound2. Each sound is a sub class that contains a number of samples that allow the same sound to be played multiple times simultaneously. The problem is that if I create seemingly more than 6 to 8 AVAudioPlayerNodes with a AVAudioUnitTimePitch for each, the audio gets completely messed up. I can't even play a single sound when I increase the number of samples too high. I'm not sure if my code is wrong, or what the node limit of AVAudioEngine is. I'm testing it on an iPhone4S. The documentation is really sparse on this api. If anyone can figure out what I'm doing wrong, feel free to have this code. 

import Foundation
import AVFoundation

class AudioManager{
    var audioEngine:AVAudioEngine!;
    var mixer:AVAudioMixerNode!;
    var sound1:Sound!;
    var sound2:Sound!;
    init(){
        audioEngine = AVAudioEngine();
        mixer = audioEngine.mainMixerNode; //automatically creates instance of mixer node, output node, and connects
        
        do{
            try audioEngine.start();
        }catch let e as NSError{
            print("Error Starting AudioEngine \(e)");
        }
        
        sound1 = Sound(aManager: self, path: "assets/sounds/waterRefill", ofType: "mp3", numOfSamples: 7);
        sound2 = Sound(aManager: self, path: "assets/sounds/balloonCreate", ofType: "mp3", numOfSamples: 2);

        
    }
    
    func playSound(){
        sound1.play(1.0, pitch: 1.0);
    }
    
    func playSound2(){
       sound2.play(1.0, pitch: 1.0);
    }
    
    class Sound {
        var audioManager:AudioManager!;
        var audioFileBuffer:AVAudioPCMBuffer!;
        var numSamples:Int = 1;
        var audioIndex:Int = 0;
        var sampleList:[Sample] = [Sample]();
        
        init(aManager:AudioManager, path:String, ofType:String, numOfSamples:Int){
            audioManager = aManager;
            if(numOfSamples < 1){
                numSamples = 1;
            }else{
                numSamples = numOfSamples;
            }
            audioFileBuffer = createAudioBuffer(path, ofType: ofType);
            for (var i = 0; i < numSamples; i++){
                sampleList.append(Sample(sound: self));
            }
        }
        
        func createAudioBuffer(path:String, ofType:String)-> AVAudioPCMBuffer?{
            let filePath: String = NSBundle.mainBundle().pathForResource(path, ofType: ofType)!
            let fileURL: NSURL = NSURL(fileURLWithPath: filePath)
            do{
                let audioFile = try AVAudioFile(forReading: fileURL)
                let audioFormat = audioFile.processingFormat
                let audioFrameCount = UInt32(audioFile.length)
                let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
                do{
                    try audioFile.readIntoBuffer(audioFileBuffer)
                    return audioFileBuffer;
                }catch let e as NSError{
                    print("Error loading Audio Into Buffer: \(e)");
                }
            }catch let e as NSError{
                print("Error loading Audio File: \(e)");
            }
            return nil;
        }
        
        private func runIndex(){
            if(audioIndex < (numSamples-1)){
                audioIndex++;
            }else{
                audioIndex = 0;
            }
        }
        
        func play(volume:Float, pitch:Float){
            
            var count:Int = 0;
            while(count < numSamples){
                if(numSamples > 1){
                    runIndex();
                }
                if (!sampleList[audioIndex].pitchPlayer.playing) {
                    sampleList[audioIndex].volume = volume;
                    sampleList[audioIndex].pitch = pitch;
                    sampleList[audioIndex].playSample();
                    break;
                }
                count++;
            }

        }
        
        class Sample{
            var parentSound:Sound!
            var pitchPlayer:AVAudioPlayerNode!;
            var timePitch:AVAudioUnitTimePitch!;
            var volume:Float = 1.0
            var pitch:Float = 1.0
            
            init(sound:Sound){
                parentSound = sound;
                pitchPlayer = AVAudioPlayerNode();
                timePitch = AVAudioUnitTimePitch();
                
                parentSound.audioManager.audioEngine.attachNode(pitchPlayer);
                parentSound.audioManager.audioEngine.attachNode(timePitch);
                
                parentSound.audioManager.audioEngine.connect(pitchPlayer, to: timePitch, format: parentSound.audioFileBuffer.format);
                parentSound.audioManager.audioEngine.connect(timePitch, to: parentSound.audioManager.mixer, format: parentSound.audioFileBuffer.format);
            }
            
            func playSample(){
                pitchPlayer.volume = volume;
                timePitch.pitch = pitch;
                print("Sample Play");
                
                pitchPlayer.play();
                pitchPlayer.scheduleBuffer(parentSound.audioFileBuffer, atTime: nil, options:.Interrupts, completionHandler: {[unowned self]() in
                    print("Is Stopped: \(self.pitchPlayer.playing)");
                    self.pitchPlayer.stop();
                    print("Is Stopped: \(self.pitchPlayer.playing)");
                    });
            }
        }
    }
}
Edited by Njguy

Share this post


Link to post
Share on other sites
Advertisement
Sign in to follow this  

  • Advertisement