Mix two audio files in swift with different start point entry
.everyoneloves__top-leaderboard:empty,.everyoneloves__mid-leaderboard:empty,.everyoneloves__bot-mid-leaderboard:empty{ height:90px;width:728px;box-sizing:border-box;
}
I'm trying to mix two audio files in swift AVFoundations. I have a solution that mixes two audio and plays them simultaneously. What I really want is to start the second audio after some time. For example audio 1 plays then second audio starts playing after 10(given time) seconds. Any help is appreciated. Thanks in advance.
private var audioFiles: Array<String>
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
func Play() {
// do work in a background thread
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let fileManager = FileManager.default
for audioFile in self.audioFiles {
// Create and attach the audioPlayer node for this file
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
let fileUrl = NSURL.init(fileURLWithPath: fileName.removingPercentEncoding!)
var file : AVAudioFile
// We should probably check if the file exists here ¯_(ツ)_/¯
try! AVAudioFile.init(forReading: fileUrl.absoluteURL!)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
}
}
}
ios swift cocoa cocoa-touch avfoundation
add a comment |
I'm trying to mix two audio files in swift AVFoundations. I have a solution that mixes two audio and plays them simultaneously. What I really want is to start the second audio after some time. For example audio 1 plays then second audio starts playing after 10(given time) seconds. Any help is appreciated. Thanks in advance.
private var audioFiles: Array<String>
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
func Play() {
// do work in a background thread
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let fileManager = FileManager.default
for audioFile in self.audioFiles {
// Create and attach the audioPlayer node for this file
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
let fileUrl = NSURL.init(fileURLWithPath: fileName.removingPercentEncoding!)
var file : AVAudioFile
// We should probably check if the file exists here ¯_(ツ)_/¯
try! AVAudioFile.init(forReading: fileUrl.absoluteURL!)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
}
}
}
ios swift cocoa cocoa-touch avfoundation
add a comment |
I'm trying to mix two audio files in swift AVFoundations. I have a solution that mixes two audio and plays them simultaneously. What I really want is to start the second audio after some time. For example audio 1 plays then second audio starts playing after 10(given time) seconds. Any help is appreciated. Thanks in advance.
private var audioFiles: Array<String>
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
func Play() {
// do work in a background thread
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let fileManager = FileManager.default
for audioFile in self.audioFiles {
// Create and attach the audioPlayer node for this file
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
let fileUrl = NSURL.init(fileURLWithPath: fileName.removingPercentEncoding!)
var file : AVAudioFile
// We should probably check if the file exists here ¯_(ツ)_/¯
try! AVAudioFile.init(forReading: fileUrl.absoluteURL!)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
}
}
}
ios swift cocoa cocoa-touch avfoundation
I'm trying to mix two audio files in swift AVFoundations. I have a solution that mixes two audio and plays them simultaneously. What I really want is to start the second audio after some time. For example audio 1 plays then second audio starts playing after 10(given time) seconds. Any help is appreciated. Thanks in advance.
private var audioFiles: Array<String>
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
func Play() {
// do work in a background thread
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let fileManager = FileManager.default
for audioFile in self.audioFiles {
// Create and attach the audioPlayer node for this file
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
let fileUrl = NSURL.init(fileURLWithPath: fileName.removingPercentEncoding!)
var file : AVAudioFile
// We should probably check if the file exists here ¯_(ツ)_/¯
try! AVAudioFile.init(forReading: fileUrl.absoluteURL!)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
}
}
}
ios swift cocoa cocoa-touch avfoundation
ios swift cocoa cocoa-touch avfoundation
asked Nov 16 '18 at 15:48
AbhiAbhi
15613
15613
add a comment |
add a comment |
3 Answers
3
active
oldest
votes
You should use scheduleSegment.
func scheduleWithOffset(_ offset: TimeInterval) {
let samplerate1 = file1.processingFormat.sampleRate
player1.scheduleSegment(file1,
startingFrame: 0,
frameCount: AVAudioFrameCount(file1.length),
at: AVAudioTime(sampleTime: 0, atRate: samplerate1))
let samplerate2 = file2.processingFormat.sampleRate
player2.scheduleSegment(file2,
startingFrame: 0,
frameCount: AVAudioFrameCount(file2.length),
at: AVAudioTime(sampleTime: AVAudioFramePosition(offset * samplerate2), atRate: samplerate2))
//This can take an indeterminate amount of time, so both files should be prepared before either starts.
player1.prepare(withFrameCount: 8192)
player2.prepare(withFrameCount: 8192)
// Start the files at common time slightly in the future to ensure a synchronous start.
let hostTimeNow = mach_absolute_time()
let hostTimeFuture = hostTimeNow + AVAudioTime.hostTime(forSeconds: 0.2);
let startTime = AVAudioTime(hostTime: hostTimeFuture)
player1.play(at: startTime)
player2.play(at: startTime)
}
add a comment |
I don't personally have experience with AVAudioEngine
or AVAudioMixerNode
, however, you can do this with Timer
.
You currently have audioPlayer.play(at: nil)
, however, this will always play immediately after establishing this player. What I would do, or try to do, is apply some kind of timer to this.
var timeAfter = 10
var timer = NSTimer.scheduledTimerWithInterval(timeAfter, target: self, selector: #selector(playAudio), userInfo: audioPlayer, repeats: false)
which would replace
audioPlayer.play(at: nil)
Then you would add the function of the timer to play the audio.
func playAudio(timer:NSTimer){
var audioPlayer = timer.userInfo as AVAudioPlayerNode
audioPlayer.play()
}
add a comment |
You may add another playerNode to the mixer and then play it after delay like this :
let audioPlayer2 = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer2)
self.audioEngine.connect(audioPlayer2, to: self.mixer, format: nil)
var file2 : AVAudioFile = try! AVAudioFile.init(forReading: fileUrl2!)
func delayTime(_ delayTime : TimeInterval) -> AVAudioTime{
let outputFormat = audioPlayer2.outputFormat(forBus: 0)
let startSampleTime = (audioPlayer2.lastRenderTime ?? AVAudioTime()).sampleTime + Int64(Double(delayTime) * outputFormat.sampleRate);
return AVAudioTime(sampleTime: startSampleTime, atRate: outputFormat.sampleRate)
}
audioPlayer2.scheduleFile(file2, at: nil, completionHandler: nil)
audioPlayer2.play(at: delayTime(3.0))
You can also hook a sampleDelay AU between node2 and mixer. That makes things easier without programming manually.
add a comment |
Your Answer
StackExchange.ifUsing("editor", function () {
StackExchange.using("externalEditor", function () {
StackExchange.using("snippets", function () {
StackExchange.snippets.init();
});
});
}, "code-snippets");
StackExchange.ready(function() {
var channelOptions = {
tags: "".split(" "),
id: "1"
};
initTagRenderer("".split(" "), "".split(" "), channelOptions);
StackExchange.using("externalEditor", function() {
// Have to fire editor after snippets, if snippets enabled
if (StackExchange.settings.snippets.snippetsEnabled) {
StackExchange.using("snippets", function() {
createEditor();
});
}
else {
createEditor();
}
});
function createEditor() {
StackExchange.prepareEditor({
heartbeatType: 'answer',
autoActivateHeartbeat: false,
convertImagesToLinks: true,
noModals: true,
showLowRepImageUploadWarning: true,
reputationToPostImages: 10,
bindNavPrevention: true,
postfix: "",
imageUploader: {
brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
allowUrls: true
},
onDemand: true,
discardSelector: ".discard-answer"
,immediatelyShowMarkdownHelp:true
});
}
});
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53341213%2fmix-two-audio-files-in-swift-with-different-start-point-entry%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
3 Answers
3
active
oldest
votes
3 Answers
3
active
oldest
votes
active
oldest
votes
active
oldest
votes
You should use scheduleSegment.
func scheduleWithOffset(_ offset: TimeInterval) {
let samplerate1 = file1.processingFormat.sampleRate
player1.scheduleSegment(file1,
startingFrame: 0,
frameCount: AVAudioFrameCount(file1.length),
at: AVAudioTime(sampleTime: 0, atRate: samplerate1))
let samplerate2 = file2.processingFormat.sampleRate
player2.scheduleSegment(file2,
startingFrame: 0,
frameCount: AVAudioFrameCount(file2.length),
at: AVAudioTime(sampleTime: AVAudioFramePosition(offset * samplerate2), atRate: samplerate2))
//This can take an indeterminate amount of time, so both files should be prepared before either starts.
player1.prepare(withFrameCount: 8192)
player2.prepare(withFrameCount: 8192)
// Start the files at common time slightly in the future to ensure a synchronous start.
let hostTimeNow = mach_absolute_time()
let hostTimeFuture = hostTimeNow + AVAudioTime.hostTime(forSeconds: 0.2);
let startTime = AVAudioTime(hostTime: hostTimeFuture)
player1.play(at: startTime)
player2.play(at: startTime)
}
add a comment |
You should use scheduleSegment.
func scheduleWithOffset(_ offset: TimeInterval) {
let samplerate1 = file1.processingFormat.sampleRate
player1.scheduleSegment(file1,
startingFrame: 0,
frameCount: AVAudioFrameCount(file1.length),
at: AVAudioTime(sampleTime: 0, atRate: samplerate1))
let samplerate2 = file2.processingFormat.sampleRate
player2.scheduleSegment(file2,
startingFrame: 0,
frameCount: AVAudioFrameCount(file2.length),
at: AVAudioTime(sampleTime: AVAudioFramePosition(offset * samplerate2), atRate: samplerate2))
//This can take an indeterminate amount of time, so both files should be prepared before either starts.
player1.prepare(withFrameCount: 8192)
player2.prepare(withFrameCount: 8192)
// Start the files at common time slightly in the future to ensure a synchronous start.
let hostTimeNow = mach_absolute_time()
let hostTimeFuture = hostTimeNow + AVAudioTime.hostTime(forSeconds: 0.2);
let startTime = AVAudioTime(hostTime: hostTimeFuture)
player1.play(at: startTime)
player2.play(at: startTime)
}
add a comment |
You should use scheduleSegment.
func scheduleWithOffset(_ offset: TimeInterval) {
let samplerate1 = file1.processingFormat.sampleRate
player1.scheduleSegment(file1,
startingFrame: 0,
frameCount: AVAudioFrameCount(file1.length),
at: AVAudioTime(sampleTime: 0, atRate: samplerate1))
let samplerate2 = file2.processingFormat.sampleRate
player2.scheduleSegment(file2,
startingFrame: 0,
frameCount: AVAudioFrameCount(file2.length),
at: AVAudioTime(sampleTime: AVAudioFramePosition(offset * samplerate2), atRate: samplerate2))
//This can take an indeterminate amount of time, so both files should be prepared before either starts.
player1.prepare(withFrameCount: 8192)
player2.prepare(withFrameCount: 8192)
// Start the files at common time slightly in the future to ensure a synchronous start.
let hostTimeNow = mach_absolute_time()
let hostTimeFuture = hostTimeNow + AVAudioTime.hostTime(forSeconds: 0.2);
let startTime = AVAudioTime(hostTime: hostTimeFuture)
player1.play(at: startTime)
player2.play(at: startTime)
}
You should use scheduleSegment.
func scheduleWithOffset(_ offset: TimeInterval) {
let samplerate1 = file1.processingFormat.sampleRate
player1.scheduleSegment(file1,
startingFrame: 0,
frameCount: AVAudioFrameCount(file1.length),
at: AVAudioTime(sampleTime: 0, atRate: samplerate1))
let samplerate2 = file2.processingFormat.sampleRate
player2.scheduleSegment(file2,
startingFrame: 0,
frameCount: AVAudioFrameCount(file2.length),
at: AVAudioTime(sampleTime: AVAudioFramePosition(offset * samplerate2), atRate: samplerate2))
//This can take an indeterminate amount of time, so both files should be prepared before either starts.
player1.prepare(withFrameCount: 8192)
player2.prepare(withFrameCount: 8192)
// Start the files at common time slightly in the future to ensure a synchronous start.
let hostTimeNow = mach_absolute_time()
let hostTimeFuture = hostTimeNow + AVAudioTime.hostTime(forSeconds: 0.2);
let startTime = AVAudioTime(hostTime: hostTimeFuture)
player1.play(at: startTime)
player2.play(at: startTime)
}
edited Nov 19 '18 at 1:01
answered Nov 19 '18 at 0:51
dave234dave234
3,66511023
3,66511023
add a comment |
add a comment |
I don't personally have experience with AVAudioEngine
or AVAudioMixerNode
, however, you can do this with Timer
.
You currently have audioPlayer.play(at: nil)
, however, this will always play immediately after establishing this player. What I would do, or try to do, is apply some kind of timer to this.
var timeAfter = 10
var timer = NSTimer.scheduledTimerWithInterval(timeAfter, target: self, selector: #selector(playAudio), userInfo: audioPlayer, repeats: false)
which would replace
audioPlayer.play(at: nil)
Then you would add the function of the timer to play the audio.
func playAudio(timer:NSTimer){
var audioPlayer = timer.userInfo as AVAudioPlayerNode
audioPlayer.play()
}
add a comment |
I don't personally have experience with AVAudioEngine
or AVAudioMixerNode
, however, you can do this with Timer
.
You currently have audioPlayer.play(at: nil)
, however, this will always play immediately after establishing this player. What I would do, or try to do, is apply some kind of timer to this.
var timeAfter = 10
var timer = NSTimer.scheduledTimerWithInterval(timeAfter, target: self, selector: #selector(playAudio), userInfo: audioPlayer, repeats: false)
which would replace
audioPlayer.play(at: nil)
Then you would add the function of the timer to play the audio.
func playAudio(timer:NSTimer){
var audioPlayer = timer.userInfo as AVAudioPlayerNode
audioPlayer.play()
}
add a comment |
I don't personally have experience with AVAudioEngine
or AVAudioMixerNode
, however, you can do this with Timer
.
You currently have audioPlayer.play(at: nil)
, however, this will always play immediately after establishing this player. What I would do, or try to do, is apply some kind of timer to this.
var timeAfter = 10
var timer = NSTimer.scheduledTimerWithInterval(timeAfter, target: self, selector: #selector(playAudio), userInfo: audioPlayer, repeats: false)
which would replace
audioPlayer.play(at: nil)
Then you would add the function of the timer to play the audio.
func playAudio(timer:NSTimer){
var audioPlayer = timer.userInfo as AVAudioPlayerNode
audioPlayer.play()
}
I don't personally have experience with AVAudioEngine
or AVAudioMixerNode
, however, you can do this with Timer
.
You currently have audioPlayer.play(at: nil)
, however, this will always play immediately after establishing this player. What I would do, or try to do, is apply some kind of timer to this.
var timeAfter = 10
var timer = NSTimer.scheduledTimerWithInterval(timeAfter, target: self, selector: #selector(playAudio), userInfo: audioPlayer, repeats: false)
which would replace
audioPlayer.play(at: nil)
Then you would add the function of the timer to play the audio.
func playAudio(timer:NSTimer){
var audioPlayer = timer.userInfo as AVAudioPlayerNode
audioPlayer.play()
}
answered Nov 16 '18 at 17:07
impression7vximpression7vx
52411038
52411038
add a comment |
add a comment |
You may add another playerNode to the mixer and then play it after delay like this :
let audioPlayer2 = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer2)
self.audioEngine.connect(audioPlayer2, to: self.mixer, format: nil)
var file2 : AVAudioFile = try! AVAudioFile.init(forReading: fileUrl2!)
func delayTime(_ delayTime : TimeInterval) -> AVAudioTime{
let outputFormat = audioPlayer2.outputFormat(forBus: 0)
let startSampleTime = (audioPlayer2.lastRenderTime ?? AVAudioTime()).sampleTime + Int64(Double(delayTime) * outputFormat.sampleRate);
return AVAudioTime(sampleTime: startSampleTime, atRate: outputFormat.sampleRate)
}
audioPlayer2.scheduleFile(file2, at: nil, completionHandler: nil)
audioPlayer2.play(at: delayTime(3.0))
You can also hook a sampleDelay AU between node2 and mixer. That makes things easier without programming manually.
add a comment |
You may add another playerNode to the mixer and then play it after delay like this :
let audioPlayer2 = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer2)
self.audioEngine.connect(audioPlayer2, to: self.mixer, format: nil)
var file2 : AVAudioFile = try! AVAudioFile.init(forReading: fileUrl2!)
func delayTime(_ delayTime : TimeInterval) -> AVAudioTime{
let outputFormat = audioPlayer2.outputFormat(forBus: 0)
let startSampleTime = (audioPlayer2.lastRenderTime ?? AVAudioTime()).sampleTime + Int64(Double(delayTime) * outputFormat.sampleRate);
return AVAudioTime(sampleTime: startSampleTime, atRate: outputFormat.sampleRate)
}
audioPlayer2.scheduleFile(file2, at: nil, completionHandler: nil)
audioPlayer2.play(at: delayTime(3.0))
You can also hook a sampleDelay AU between node2 and mixer. That makes things easier without programming manually.
add a comment |
You may add another playerNode to the mixer and then play it after delay like this :
let audioPlayer2 = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer2)
self.audioEngine.connect(audioPlayer2, to: self.mixer, format: nil)
var file2 : AVAudioFile = try! AVAudioFile.init(forReading: fileUrl2!)
func delayTime(_ delayTime : TimeInterval) -> AVAudioTime{
let outputFormat = audioPlayer2.outputFormat(forBus: 0)
let startSampleTime = (audioPlayer2.lastRenderTime ?? AVAudioTime()).sampleTime + Int64(Double(delayTime) * outputFormat.sampleRate);
return AVAudioTime(sampleTime: startSampleTime, atRate: outputFormat.sampleRate)
}
audioPlayer2.scheduleFile(file2, at: nil, completionHandler: nil)
audioPlayer2.play(at: delayTime(3.0))
You can also hook a sampleDelay AU between node2 and mixer. That makes things easier without programming manually.
You may add another playerNode to the mixer and then play it after delay like this :
let audioPlayer2 = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer2)
self.audioEngine.connect(audioPlayer2, to: self.mixer, format: nil)
var file2 : AVAudioFile = try! AVAudioFile.init(forReading: fileUrl2!)
func delayTime(_ delayTime : TimeInterval) -> AVAudioTime{
let outputFormat = audioPlayer2.outputFormat(forBus: 0)
let startSampleTime = (audioPlayer2.lastRenderTime ?? AVAudioTime()).sampleTime + Int64(Double(delayTime) * outputFormat.sampleRate);
return AVAudioTime(sampleTime: startSampleTime, atRate: outputFormat.sampleRate)
}
audioPlayer2.scheduleFile(file2, at: nil, completionHandler: nil)
audioPlayer2.play(at: delayTime(3.0))
You can also hook a sampleDelay AU between node2 and mixer. That makes things easier without programming manually.
edited Nov 16 '18 at 20:14
answered Nov 16 '18 at 20:01
E.ComsE.Coms
3,4292416
3,4292416
add a comment |
add a comment |
Thanks for contributing an answer to Stack Overflow!
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53341213%2fmix-two-audio-files-in-swift-with-different-start-point-entry%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown