Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
aure committed Apr 8, 2018
2 parents 245d285 + 6a429df commit 8ee681b
Show file tree
Hide file tree
Showing 69 changed files with 2,616 additions and 293 deletions.
4 changes: 2 additions & 2 deletions AudioKit.podspec.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "AudioKit",
"version": "4.2.1",
"version": "4.2.2",
"authors": {
"Aurelius Prochazka": "[email protected]"
},
Expand All @@ -13,7 +13,7 @@
"documentation_url": "http://audiokit.io/docs/",
"static_framework": true,
"source": {
"http": "https://github.com/audiokit/AudioKit/releases/download/v4.2.1/AudioKit.framework.zip"
"http": "https://files.audiokit.io/releases/4.2.2/AudioKit.framework.zip"
},
"summary": "Open-source audio synthesis, processing, & analysis platform.",
"platforms": {
Expand Down
172 changes: 151 additions & 21 deletions AudioKit/Common/Internals/Utilities/AVAudioBufferConvenience.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@

extension AVAudioPCMBuffer {

public struct Peak {
public init() {}
public static var min: Float = -10_000.0
public var time: Double = 0
public var framePosition: Int = 0
public var amplitude: Float = 1
}

/**
Copies data from another PCM buffer. Will copy to the end of the buffer (frameLength), and
increment frameLength. Will not exceed frameCapacity.
Expand All @@ -32,12 +40,8 @@ extension AVAudioPCMBuffer {
return 0
}

let count = Int(
min(
min(frames == 0 ? buffer.frameLength : frames, remainingCapacity),
buffer.frameLength - readOffset
)
)
let count = Int(min(min(frames == 0 ? buffer.frameLength : frames, remainingCapacity),
buffer.frameLength - readOffset))

if count <= 0 {
print("AVAudioBuffer copy(from) - No frames to copy!")
Expand All @@ -47,17 +51,17 @@ extension AVAudioPCMBuffer {
let frameSize = Int(format.streamDescription.pointee.mBytesPerFrame)
if let src = buffer.floatChannelData,
let dst = floatChannelData {
for channel in 0..<Int(format.channelCount) {
for channel in 0 ..< Int(format.channelCount) {
memcpy(dst[channel] + Int(frameLength), src[channel] + Int(readOffset), count * frameSize)
}
} else if let src = buffer.int16ChannelData,
let dst = int16ChannelData {
for channel in 0..<Int(format.channelCount) {
for channel in 0 ..< Int(format.channelCount) {
memcpy(dst[channel] + Int(frameLength), src[channel] + Int(readOffset), count * frameSize)
}
} else if let src = buffer.int32ChannelData,
let dst = int32ChannelData {
for channel in 0..<Int(format.channelCount) {
for channel in 0 ..< Int(format.channelCount) {
memcpy(dst[channel] + Int(frameLength), src[channel] + Int(readOffset), count * frameSize)
}
} else {
Expand Down Expand Up @@ -88,12 +92,20 @@ extension AVAudioPCMBuffer {

/// - Returns: The time in seconds of the peak of the buffer or 0 if it failed
open func peakTime() -> Double {
guard self.frameLength > 0 else { return 0 }
guard let floatData = self.floatChannelData else { return 0 }
if let time = peak()?.time {
return time
}
return 0
}

/// - Returns: A Peak struct containing the time, frame position and peak amplitude
open func peak() -> Peak? {
guard self.frameLength > 0 else { return nil }
guard let floatData = self.floatChannelData else { return nil }

var framePosition = 0
var value = Peak()
var position = 0
var lastPeak: Float = -10_000.0
var peakValue: Float = Peak.min
let frameLength = 512
let channelCount = Int(self.format.channelCount)

Expand All @@ -112,24 +124,26 @@ extension AVAudioPCMBuffer {
block[i] = floatData[channel][i + position]
}
// scan the block
let peak = getPeak(from: block)
let blockPeak = getPeak(from: block)

if peak > lastPeak {
framePosition = position
lastPeak = peak
if blockPeak > peakValue {
value.framePosition = position
value.time = Double(position / self.format.sampleRate)
peakValue = blockPeak
}
position += block.count
}
}

let time = Double(framePosition / self.format.sampleRate)
return time
value.amplitude = peakValue
// AKLog(value)
return value
}

// return the highest level in the given array
// Returns the highest level in the given array
private func getPeak(from buffer: [Float]) -> Float {
// create variable with very small value to hold the peak value
var peak: Float = -10_000.0
var peak: Float = Peak.min

for i in 0 ..< buffer.count {
// store the absolute value of the sample
Expand All @@ -138,4 +152,120 @@ extension AVAudioPCMBuffer {
}
return peak
}

/// Returns a normalized buffer
open func normalize() -> AVAudioPCMBuffer? {
guard let floatData = self.floatChannelData else { return self }

let normalizedBuffer = AVAudioPCMBuffer(pcmFormat: self.format,
frameCapacity: self.frameCapacity)

let length: AVAudioFrameCount = self.frameLength
let channelCount = Int(self.format.channelCount)

guard let peak: AVAudioPCMBuffer.Peak = peak() else {
AKLog("Failed getting peak amplitude, returning original buffer")
return self
}

let gainFactor: Float = 1 / peak.amplitude

// i is the index in the buffer
for i in 0 ..< Int(length) {
// n is the channel
for n in 0 ..< channelCount {
let sample = floatData[n][i] * gainFactor
normalizedBuffer?.floatChannelData?[n][i] = sample
}
}
normalizedBuffer?.frameLength = length

// AKLog("Old Peak", peakAmplitude, "New Peak", normalizedBuffer?.peak())
return normalizedBuffer
}

/// Returns a reversed buffer
open func reverse() -> AVAudioPCMBuffer? {
let reversedBuffer = AVAudioPCMBuffer(pcmFormat: self.format,
frameCapacity: self.frameCapacity)

var j: Int = 0
let length: AVAudioFrameCount = self.frameLength
let channelCount = Int(self.format.channelCount)

// i represents the normal buffer read in reverse
for i in (0 ..< Int(length)).reversed() {
// n is the channel
for n in 0 ..< channelCount {
// we write the reverseBuffer via the j index
reversedBuffer?.floatChannelData?[n][j] = self.floatChannelData?[n][i] ?? 0.0
}
j += 1
}
reversedBuffer?.frameLength = length
return reversedBuffer
}

/// Returns a new buffer that has had fades applied to it. Pass 0 if you don't want either inTime or outTime.
open func fade(inTime: Double, outTime: Double) -> AVAudioPCMBuffer? {
guard let floatData = self.floatChannelData, inTime > 0 || outTime > 0 else {
AKLog("Error fading buffer, returning original...")
return self
}

let fadeBuffer = AVAudioPCMBuffer(pcmFormat: self.format,
frameCapacity: self.frameCapacity)

let length: UInt32 = self.frameLength
let sampleRate = self.format.sampleRate
let channelCount = Int(self.format.channelCount)

// AKLog("fadeBuffer() inTime: \(inTime) outTime: \(outTime)")

// initial starting point for the gain, if there is a fade in, start it at .01 otherwise at 1
var gain: Double = inTime > 0 ? 0.01 : 1

let sampleTime: Double = 1.0 / sampleRate

// from -20db?
let fadeInPower: Double = exp(log(10) * sampleTime / inTime)

// for decay to x% amplitude (-dB) over the given decay time
let fadeOutPower: Double = exp(-log(25) * sampleTime / outTime)

// where in the buffer to end the fade in
let fadeInSamples = Int(sampleRate * inTime)
// where in the buffer to start the fade out
let fadeOutSamples = Int(Double(length) - (sampleRate * outTime))

// AKLog("fadeInPower \(fadeInPower) fadeOutPower \(fadeOutPower)")

// i is the index in the buffer
for i in 0 ..< Int(length) {
// n is the channel
for n in 0 ..< channelCount {
if i < fadeInSamples && inTime > 0 {
gain *= fadeInPower
} else if i > fadeOutSamples && outTime > 0 {
gain *= fadeOutPower
} else {
gain = 1.0
}

// sanity check
if gain > 1 {
gain = 1
}

let sample = floatData[n][i] * Float(gain)
fadeBuffer?.floatChannelData?[n][i] = sample
}
}
// update this
fadeBuffer?.frameLength = length

// set the buffer now to be the faded one
return fadeBuffer
}

}
7 changes: 3 additions & 4 deletions AudioKit/Common/MIDI/AKCallbackInstrument.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,9 @@ open class AKCallbackInstrument: AKMIDIInstrument {
/// - parameter midiInputName: Name of the instrument's MIDI input
/// - parameter callback: Initial callback
///
public init(midiInputName: String = "callback midi in", callback: AKMIDICallback? = nil) {
super.init()
let midi = AudioKit.midi
self.enableMIDI(midi.client, name: midiInputName)
public init(midiInputName: String = "AudioKit Callback Instrument", callback: AKMIDICallback? = nil) {
super.init(midiInputName: midiInputName)
self.name = midiInputName
self.callback = callback
avAudioNode = AVAudioMixerNode()
AudioKit.engine.attach(self.avAudioNode)
Expand Down
6 changes: 3 additions & 3 deletions AudioKit/Common/MIDI/AKMIDIEvent.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ public struct AKMIDIEvent {

/// Internal data
public var internalData = [MIDIByte](zeros: 256)

/// Internal MIDIByte-sized packets
public var internalPackets: [[MIDIByte]] {
/// Internal MIDIByte-sized packets - in development / not used yet
public var internalPackets:[[MIDIByte]] {
var splitData = [[MIDIByte]]()
let byteLimit = Int(internalData.count / 256)
for i in 0...byteLimit {
Expand Down
19 changes: 14 additions & 5 deletions AudioKit/Common/MIDI/AKMIDIInstrument.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,17 @@ open class AKMIDIInstrument: AKPolyphonicNode, AKMIDIListener {
open var midiIn = MIDIEndpointRef()

/// Name of the instrument
open var name = "AKMIDIInstrument"
open var name = "AudioKit MIDI Instrument"

/// Initialize the MIDI Instrument
///
/// - Parameter midiOutputName: Name of the instrument's MIDI output
/// - Parameter midiInputName: Name of the instrument's MIDI input
///
public init(midiOutputName: String? = nil) {
public init(midiInputName: String? = nil) {
super.init()
enableMIDI(name: midiOutputName ?? "Unnamed")
name = midiInputName ?? name
enableMIDI(name: midiInputName ?? name)
hideVirtualMIDIPort()
}

/// Enable MIDI input from a given MIDI client
Expand All @@ -37,7 +39,7 @@ open class AKMIDIInstrument: AKPolyphonicNode, AKMIDIListener {
/// - name: Name to connect with
///
open func enableMIDI(_ midiClient: MIDIClientRef = AudioKit.midi.client,
name: String = "Unnamed") {
name: String = "AudioKit MIDI Instrument") {
CheckError(MIDIDestinationCreateWithBlock(midiClient, name as CFString, &midiIn) { packetList, _ in
for e in packetList.pointee {
let event = AKMIDIEvent(packet: e)
Expand Down Expand Up @@ -121,4 +123,11 @@ open class AKMIDIInstrument: AKPolyphonicNode, AKMIDIListener {
stop(noteNumber: MIDINoteNumber(data2), channel: MIDIChannel(channel))
}
}

func showVirtualMIDIPort(){
MIDIObjectSetIntegerProperty(midiIn, kMIDIPropertyPrivate, 0)
}
func hideVirtualMIDIPort(){
MIDIObjectSetIntegerProperty(midiIn, kMIDIPropertyPrivate, 1)
}
}
11 changes: 9 additions & 2 deletions AudioKit/Common/MIDI/AKMIDISampler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ open class AKMIDISampler: AKAppleSampler {
public init(midiOutputName: String? = nil) {
super.init()
enableMIDI(name: midiOutputName ?? name)
hideVirtualMIDIPort()
}

/// Enable MIDI input from a given MIDI client
Expand Down Expand Up @@ -127,13 +128,19 @@ open class AKMIDISampler: AKAppleSampler {
self.samplerUnit.stopNote(noteNumber, onChannel: channel)
}
}

/// Discard all virtual ports
open func destroyEndpoint() {
if midiIn != 0 {
MIDIEndpointDispose(midiIn)
midiIn = 0
}
}


func showVirtualMIDIPort(){
MIDIObjectSetIntegerProperty(midiIn, kMIDIPropertyPrivate, 0)
}
func hideVirtualMIDIPort(){
MIDIObjectSetIntegerProperty(midiIn, kMIDIPropertyPrivate, 1)
}
}
Loading

0 comments on commit 8ee681b

Please sign in to comment.