diff --git a/CHANGELOG.md b/CHANGELOG.md index ee3383d..42af548 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,13 +4,13 @@ All notable changes to this project will be documented in this file. --- -## [Master](https://github.com/fulldecent/FDWaveformView/compare/4.0.1...master) +## [Master](https://github.com/fulldecent/FDWaveformView/compare/5.0.1...master) #### Changed --- -## [4.0.1](https://github.com/fulldecent/FDWaveformView/releases/tag/4.0.1) +## [5.0.1](https://github.com/fulldecent/FDWaveformView/releases/tag/5.0.1) Released on 2020-02-12. #### Changed @@ -21,7 +21,7 @@ Released on 2020-02-12. --- -## [4.0.0](https://github.com/fulldecent/FDWaveformView/releases/tag/4.0.0) +## [5.0.0](https://github.com/fulldecent/FDWaveformView/releases/tag/5.0.0) Released on 2019-04-08. #### Changed diff --git a/FDWaveformView.podspec b/FDWaveformView.podspec index 2b2bc33..5056b74 100644 --- a/FDWaveformView.podspec +++ b/FDWaveformView.podspec @@ -1,6 +1,6 @@ Pod::Spec.new do |s| s.name = 'FDWaveformView' - s.version = '4.0.1' + s.version = '5.0.1' s.license = { :type => 'MIT', :file => 'LICENSE' } s.summary = 'Reads an audio file and displays the waveform' s.description = <<-DESC @@ -9,7 +9,7 @@ DESC s.homepage = 'https://github.com/fulldecent/FDWaveformView' s.screenshots = 'https://camo.githubusercontent.com/8c51361597e3c150cce6f60db5055663a7a7f8f1/68747470733a2f2f692e696d6775722e636f6d2f354e376f7a6f672e706e67', 'https://camo.githubusercontent.com/3c21c8437f922ba6cb1a44b0701c02c140221d84/68747470733a2f2f692e696d6775722e636f6d2f665272486952502e706e67', 'https://camo.githubusercontent.com/771973985f42a25931bfafba291f313ba8e46e32/68747470733a2f2f692e696d6775722e636f6d2f4a514f4b51336f2e706e67', 'https://camo.githubusercontent.com/21e361bff1e2351a8f54636881c4290e4818501a/68747470733a2f2f692e696d6775722e636f6d2f386f52376370712e676966', 'https://camo.githubusercontent.com/700a0eeb4bfbf5bab688dcb11ef60784b2074eef/68747470733a2f2f692e696d6775722e636f6d2f456778586143592e676966' s.author = { 'William Entriken' => 'github.com@phor.net' } - s.source = { :git => 'https://github.com/fulldecent/FDWaveformView.git', :tag => s.version.to_s } + s.source = { :git => 'https://github.com/fulldecent/FDWaveformView.git', :tag => "#{s.version}" } s.social_media_url = 'https://twitter.com/fulldecent' s.ios.deployment_target = '8.0' s.swift_version = '5.0' diff --git a/Package.swift b/Package.swift index 88effbb..3288b71 100644 --- a/Package.swift +++ b/Package.swift @@ -1,4 +1,4 @@ -// swift-tools-version:5.1 +// swift-tools-version:5.3 import PackageDescription diff --git a/Sources/FDWaveformView/FDAudioContext.swift b/Sources/FDWaveformView/FDAudioContext.swift deleted file mode 100644 index ec0345c..0000000 --- a/Sources/FDWaveformView/FDAudioContext.swift +++ /dev/null @@ -1,64 +0,0 @@ -// -// Copyright 2013 - 2017, William Entriken and the FDWaveformView contributors. -// -import UIKit -import AVFoundation - -/// Holds audio information used for building waveforms -final class FDAudioContext { - - /// The audio asset URL used to load the context - public let audioURL: URL - - /// Total number of samples in loaded asset - public let totalSamples: Int - - /// Loaded asset - public let asset: AVAsset - - // Loaded assetTrack - public let assetTrack: AVAssetTrack - - private init(audioURL: URL, totalSamples: Int, asset: AVAsset, assetTrack: AVAssetTrack) { - self.audioURL = audioURL - self.totalSamples = totalSamples - self.asset = asset - self.assetTrack = assetTrack - } - - public static func load(fromAudioURL audioURL: URL, completionHandler: @escaping (_ audioContext: FDAudioContext?) -> ()) { - let asset = AVURLAsset(url: audioURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)]) - - guard let assetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else { - NSLog("FDWaveformView failed to load AVAssetTrack") - completionHandler(nil) - return - } - - asset.loadValuesAsynchronously(forKeys: ["duration"]) { - var error: NSError? - let status = asset.statusOfValue(forKey: "duration", error: &error) - switch status { - case .loaded: - guard - let formatDescriptions = assetTrack.formatDescriptions as? [CMAudioFormatDescription], - let audioFormatDesc = formatDescriptions.first, - let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDesc) - else { break } - - let totalSamples = Int((asbd.pointee.mSampleRate) * Float64(asset.duration.value) / Float64(asset.duration.timescale)) - let audioContext = FDAudioContext(audioURL: audioURL, totalSamples: totalSamples, asset: asset, assetTrack: assetTrack) - completionHandler(audioContext) - return - - case .failed, .cancelled, .loading, .unknown: - print("FDWaveformView could not load asset: \(error?.localizedDescription ?? "Unknown error")") - @unknown default: - print("FDWaveformView could not load asset: \(error?.localizedDescription ?? "Unknown error")") - } - - completionHandler(nil) - } - } -} - diff --git a/Sources/FDWaveformView/FDAudioSource.swift b/Sources/FDWaveformView/FDAudioSource.swift new file mode 100644 index 0000000..8d21d95 --- /dev/null +++ b/Sources/FDWaveformView/FDAudioSource.swift @@ -0,0 +1,115 @@ +// Copyright 2013–2020, William Entriken and the FDWaveformView contributors. +// Released under the MIT license as part of the FDWaveformView project. + +import Foundation +import AVFoundation + +enum FDAudioSourceError: Error { + case FailedToReadTrackSamples +} + +/// Reads samples from an audio file +final class FDAudioSource: FDWaveformViewDataSource { + + public let startIndex: Int = 0 + + public let endIndex: Int + + /// Number of samples available + public let count: Int + + /// The audio asset URL used to load the context + let audioURL: URL + + /// Loaded asset + let asset: AVAsset + + /// Loaded assetTrack + let assetTrack: AVAssetTrack + + // MARK: - Initialization + + // This is private beacuse details are not known until audio is asynchronously loaded + private init(audioURL: URL, totalSamples: Int, asset: AVAsset, assetTrack: AVAssetTrack) { + self.endIndex = totalSamples + self.audioURL = audioURL + self.asset = asset + self.assetTrack = assetTrack + count = endIndex - startIndex + } + + /// Attempt to create collection of samples from an auedio track inside `audioURL`. This is a static function rather than a constructor because we run asynchronously. + /// - Parameters: + /// - audioURL: A media file to load + /// - completionHandler: The asynchronous callback (can call on any thread) and may possibly be called synchronousle + /// - Returns: Void + public static func load(fromAudioURL audioURL: URL, completionHandler: @escaping (_ audioContext: FDAudioSource?) -> ()) { + let assetOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)] + let asset = AVURLAsset(url: audioURL, options: assetOptions) + guard let assetTrack = asset.tracks(withMediaType: .audio).first else + { + NSLog("FDWaveformView failed to load AVAssetTrack audio track") + completionHandler(nil) + return + } + + asset.loadValuesAsynchronously(forKeys: ["duration"]) { + var error: NSError? + if asset.statusOfValue(forKey: "duration", error: &error) == .loaded { + let totalSamples = Int(Float64(assetTrack.naturalTimeScale) * Float64(asset.duration.value) / Float64(asset.duration.timescale)) + completionHandler(Self.init(audioURL: audioURL, totalSamples: totalSamples, asset: asset, assetTrack: assetTrack)) + return + } + print("FDWaveformView could not load asset: \(error?.localizedDescription ?? "Unknown error")") + completionHandler(nil) + } + } + + // MARK: - Features + + /// Get audio sample data + /// - Parameter bounds: These are in units of the `assetTrack`'s natural timescale, as is startIndex and endIndex + /// - Throws:FDAudioSourceError + /// - Returns: 16-bit data values (2 bytes per sample) + func readSampleData(bounds: Range) throws -> Data { + //TODO: Consider outputting [Float] directly here. Then possible this could conform to RandomAccessCollection + let assetReader = try AVAssetReader(asset: asset) // AVAssetReader is a one-shot reader so we cannot make it a class property + assetReader.timeRange = CMTimeRange(start: CMTime(value: Int64(bounds.lowerBound), timescale: assetTrack.naturalTimeScale), + duration: CMTime(value: Int64(bounds.count), timescale: assetTrack.naturalTimeScale)) + let outputSettingsDict: [String : Any] = [ + AVFormatIDKey: Int(kAudioFormatLinearPCM), + AVLinearPCMBitDepthKey: 16, + AVLinearPCMIsBigEndianKey: false, + AVLinearPCMIsFloatKey: false, // TODO: Maybe use float here because we convert using DSP later anyway. Need to profile performance of this change. + AVLinearPCMIsNonInterleaved: false + ] + + let readerOutput = AVAssetReaderTrackOutput(track: assetTrack, outputSettings: outputSettingsDict) + readerOutput.alwaysCopiesSampleData = false + assetReader.add(readerOutput) + + var sampleBuffer = Data() // 16-bit samples + assetReader.startReading() + defer { assetReader.cancelReading() } // Cancel reading if we exit early or if operation is cancelled + + while assetReader.status == .reading { + guard let readSampleBuffer = readerOutput.copyNextSampleBuffer(), + let readBuffer = CMSampleBufferGetDataBuffer(readSampleBuffer) else { + break + } + // Append audio sample buffer into our current sample buffer + var readBufferLength: Int = 0 + var readBufferPointer: UnsafeMutablePointer? + CMBlockBufferGetDataPointer(readBuffer, atOffset: 0, lengthAtOffsetOut: &readBufferLength, totalLengthOut: nil, dataPointerOut: &readBufferPointer) + sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength)) + CMSampleBufferInvalidate(readSampleBuffer) + } + + // if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown) + // Something went wrong. Handle it or do not, depending on if you can get above to work + if assetReader.status == .completed { + return sampleBuffer + } + throw FDAudioSourceError.FailedToReadTrackSamples + } +} diff --git a/Sources/FDWaveformView/FDWaveformRenderOperation.swift b/Sources/FDWaveformView/FDWaveformRenderOperation.swift index 447dfa8..6b61c1c 100644 --- a/Sources/FDWaveformView/FDWaveformRenderOperation.swift +++ b/Sources/FDWaveformView/FDWaveformRenderOperation.swift @@ -44,8 +44,8 @@ struct FDWaveformRenderFormat { /// Operation used for rendering waveform images final public class FDWaveformRenderOperation: Operation { - /// The audio context used to build the waveform - let audioContext: FDAudioContext + /// Where we get data from + let dataSource: FDAudioSource /// Size of waveform image to render public let imageSize: CGSize @@ -74,10 +74,10 @@ final public class FDWaveformRenderOperation: Operation { /// Final rendered image. Used to hold image for completionHandler. private var renderedImage: UIImage? - init(audioContext: FDAudioContext, imageSize: CGSize, sampleRange: CountableRange? = nil, format: FDWaveformRenderFormat = FDWaveformRenderFormat(), completionHandler: @escaping (_ image: UIImage?) -> ()) { - self.audioContext = audioContext + init(dataSource: FDAudioSource, imageSize: CGSize, sampleRange: CountableRange? = nil, format: FDWaveformRenderFormat = FDWaveformRenderFormat(), completionHandler: @escaping (_ image: UIImage?) -> ()) { + self.dataSource = dataSource self.imageSize = imageSize - self.sampleRange = sampleRange ?? 0.., andDownsampleTo targetSamples: Int) -> (samples: [CGFloat], sampleMax: CGFloat)? { - guard !isCancelled else { return nil } - - guard - !slice.isEmpty, - targetSamples > 0, - let reader = try? AVAssetReader(asset: audioContext.asset) - else { return nil } - - var channelCount = 1 - var sampleRate: CMTimeScale = 44100 - let formatDescriptions = audioContext.assetTrack.formatDescriptions as! [CMAudioFormatDescription] - for item in formatDescriptions { - guard let fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription(item) else { return nil } - channelCount = Int(fmtDesc.pointee.mChannelsPerFrame) - sampleRate = Int32(fmtDesc.pointee.mSampleRate) - } - - reader.timeRange = CMTimeRange(start: CMTime(value: Int64(slice.lowerBound), timescale: sampleRate), - duration: CMTime(value: Int64(slice.count), timescale: sampleRate)) - let outputSettingsDict: [String : Any] = [ - AVFormatIDKey: Int(kAudioFormatLinearPCM), - AVLinearPCMBitDepthKey: 16, - AVLinearPCMIsBigEndianKey: false, - AVLinearPCMIsFloatKey: false, - AVLinearPCMIsNonInterleaved: false - ] - - let readerOutput = AVAssetReaderTrackOutput(track: audioContext.assetTrack, outputSettings: outputSettingsDict) - readerOutput.alwaysCopiesSampleData = false - reader.add(readerOutput) - - var sampleMax = format.type.floorValue - let samplesPerPixel = max(1, channelCount * slice.count / targetSamples) - let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel) - - var outputSamples = [CGFloat]() - var sampleBuffer = Data() - - // 16-bit samples - reader.startReading() - defer { reader.cancelReading() } // Cancel reading if we exit early or if operation is cancelled - - while reader.status == .reading { - guard !isCancelled else { return nil } - - guard let readSampleBuffer = readerOutput.copyNextSampleBuffer(), - let readBuffer = CMSampleBufferGetDataBuffer(readSampleBuffer) else { - break - } - // Append audio sample buffer into our current sample buffer - var readBufferLength = 0 - var readBufferPointer: UnsafeMutablePointer? - CMBlockBufferGetDataPointer(readBuffer, atOffset: 0, lengthAtOffsetOut: &readBufferLength, totalLengthOut: nil, dataPointerOut: &readBufferPointer) - sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength)) - CMSampleBufferInvalidate(readSampleBuffer) - - let totalSamples = sampleBuffer.count / MemoryLayout.size - let downSampledLength = totalSamples / samplesPerPixel - let samplesToProcess = downSampledLength * samplesPerPixel - - guard samplesToProcess > 0 else { continue } - - processSamples(fromData: &sampleBuffer, - sampleMax: &sampleMax, - outputSamples: &outputSamples, - samplesToProcess: samplesToProcess, - downSampledLength: downSampledLength, - samplesPerPixel: samplesPerPixel, - filter: filter) - //print("Status: \(reader.status)") - } - - // Process the remaining samples that did not fit into samplesPerPixel at the end - let samplesToProcess = sampleBuffer.count / MemoryLayout.size - if samplesToProcess > 0 { - guard !isCancelled else { return nil } - - let downSampledLength = 1 - let samplesPerPixel = samplesToProcess - let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel) - - processSamples(fromData: &sampleBuffer, - sampleMax: &sampleMax, - outputSamples: &outputSamples, - samplesToProcess: samplesToProcess, - downSampledLength: downSampledLength, - samplesPerPixel: samplesPerPixel, - filter: filter) - //print("Status: \(reader.status)") - } - - // if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown) - // Something went wrong. Handle it or do not, depending on if you can get above to work - if reader.status == .completed || true{ - return (outputSamples, sampleMax) - } else { - print("FDWaveformRenderOperation failed to read audio: \(String(describing: reader.error))") - return nil - } - } - // TODO: report progress? (for issue #2) - func processSamples(fromData sampleBuffer: inout Data, sampleMax: inout CGFloat, outputSamples: inout [CGFloat], samplesToProcess: Int, downSampledLength: Int, samplesPerPixel: Int, filter: [Float]) { - sampleBuffer.withUnsafeBytes { bytes in - guard let samples = bytes.bindMemory(to: Int16.self).baseAddress else { + /// Get data and downsample to an approximate size + /// - Parameters: + /// - slice: Samples to get + /// - targetSamples: Requested minimum outputsize + /// - Returns: An array of samples at least as large as targetSamples + func sliceAsset(withRange slice: CountableRange, andDownsampleTo targetSamples: Int) -> (samples: [Float], sampleMax: Float)? { + guard !isCancelled, + !slice.isEmpty, + targetSamples != 0, + var inputSampleData = try? dataSource.readSampleData(bounds: slice) else { return nil } + + let inputSampleCount = inputSampleData.count / MemoryLayout.size + let downsampleFactor = max(1, inputSampleCount / targetSamples) + let averagingFilter = [Float](repeating: 1.0 / Float(downsampleFactor), count: downsampleFactor) + let outputSampleCount = inputSampleCount / downsampleFactor + + var outputSamples = [Float](repeating: 0.0, count: outputSampleCount) + var outputSampleMax: Float = .nan + + inputSampleData.withUnsafeBytes { bytes in + guard let inputSampleInt16Data = bytes.bindMemory(to: Int16.self).baseAddress else { return } - var processingBuffer = [Float](repeating: 0.0, count: samplesToProcess) - - let sampleCount = vDSP_Length(samplesToProcess) + var inputSamples = [Float](repeating: 0.0, count: inputSampleCount) - //Convert 16bit int samples to floats - vDSP_vflt16(samples, 1, &processingBuffer, 1, sampleCount) + // Convert 16-bit Int samples to Floats + vDSP_vflt16(inputSampleInt16Data, 1, &inputSamples, 1, vDSP_Length(inputSampleCount)) - //Take the absolute values to get amplitude - vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, sampleCount) + // Take the absolute values to get amplitude + vDSP_vabs(inputSamples, 1, &inputSamples, 1, vDSP_Length(inputSampleCount)) - //Let current type further process the samples - format.type.process(normalizedSamples: &processingBuffer) + // Let current type further process the samples + format.type.process(normalizedSamples: &inputSamples) - //Downsample and average - var downSampledData = [Float](repeating: 0.0, count: downSampledLength) - vDSP_desamp(processingBuffer, - vDSP_Stride(samplesPerPixel), - filter, &downSampledData, - vDSP_Length(downSampledLength), - vDSP_Length(samplesPerPixel)) + // Downsample and average + vDSP_desamp(inputSamples, + vDSP_Stride(downsampleFactor), + averagingFilter, + &outputSamples, + vDSP_Length(outputSampleCount), + vDSP_Length(downsampleFactor)) - let downSampledDataCG = downSampledData.map { (value: Float) -> CGFloat in - let element = CGFloat(value) - if element > sampleMax { sampleMax = element } - return element - } - - // Remove processed samples - sampleBuffer.removeFirst(samplesToProcess * MemoryLayout.size) - - outputSamples += downSampledDataCG + // Find maximum value + vDSP_maxv(outputSamples, 1, &outputSampleMax, vDSP_Length(outputSampleCount)) } + + return (outputSamples, outputSampleMax) } - // TODO: report progress? (for issue #2) - func plotWaveformGraph(_ samples: [CGFloat], maximumValue max: CGFloat, zeroValue min: CGFloat) -> UIImage? { + func plotWaveformGraph(_ samples: [Float], maximumValue max: Float, zeroValue min: Float) -> UIImage? { guard !isCancelled else { return nil } let imageSize = CGSize(width: CGFloat(samples.count) / format.scale, @@ -308,11 +215,11 @@ final public class FDWaveformRenderOperation: Operation { if max == min { sampleDrawingScale = 0 } else { - sampleDrawingScale = (imageSize.height * format.scale) / 2 / (max - min) + sampleDrawingScale = (imageSize.height * format.scale) / 2 / CGFloat(max - min) } let verticalMiddle = (imageSize.height * format.scale) / 2 for (x, sample) in samples.enumerated() { - let height = (sample - min) * sampleDrawingScale + let height = CGFloat(sample - min) * sampleDrawingScale context.move(to: CGPoint(x: CGFloat(x), y: verticalMiddle - height)) context.addLine(to: CGPoint(x: CGFloat(x), y: verticalMiddle + height)) context.strokePath(); diff --git a/Sources/FDWaveformView/FDWaveformView.swift b/Sources/FDWaveformView/FDWaveformView.swift index 28fbcad..fad2849 100644 --- a/Sources/FDWaveformView/FDWaveformView.swift +++ b/Sources/FDWaveformView/FDWaveformView.swift @@ -16,38 +16,47 @@ open class FDWaveformView: UIView { /// A delegate to accept progress reporting /*@IBInspectable*/ open weak var delegate: FDWaveformViewDelegate? - /// The audio file to render - /*@IBInspectable*/ open var audioURL: URL? { + /// The source for sample data + private var dataSource: FDAudioSource? { didSet { - guard let audioURL = audioURL else { - NSLog("FDWaveformView received nil audioURL") - audioContext = nil - return - } - - loadingInProgress = true - delegate?.waveformViewWillLoad?(self) - - FDAudioContext.load(fromAudioURL: audioURL) { audioContext in - DispatchQueue.main.async { - guard self.audioURL == audioContext?.audioURL else { return } - - if audioContext == nil { - NSLog("FDWaveformView failed to load URL: \(audioURL)") - } + waveformImage = nil + zoomSamples = 0 ..< self.totalSamples + highlightedSamples = nil + inProgressWaveformRenderOperation = nil + cachedWaveformRenderOperation = nil + renderForCurrentAssetFailed = false - self.audioContext = audioContext // This will reset the view and kick off a layout + setNeedsDisplay() + setNeedsLayout() + } + } + + public func setAudioURL(audioURL: URL?) { + guard let audioURL = audioURL else { + NSLog("FDWaveformView received nil audioURL") + dataSource = nil + return + } - self.loadingInProgress = false - self.delegate?.waveformViewDidLoad?(self) - } + loadingInProgress = true + delegate?.waveformViewWillLoad?(self) + + FDAudioSource.load(fromAudioURL: audioURL) { dataSource in + if dataSource == nil { + NSLog("FDWaveformView failed to load URL: \(audioURL)") + } + DispatchQueue.main.async { + self.dataSource = dataSource // This will reset the view and kick off a layout + self.loadingInProgress = false + self.delegate?.waveformViewDidLoad?(self) } } + } /// The total number of audio samples in the file open var totalSamples: Int { - return audioContext?.totalSamples ?? 0 + return dataSource?.count ?? 0 } /// The samples to be highlighted in a different color @@ -138,7 +147,7 @@ open class FDWaveformView: UIView { private var verticalOverdrawAllowed = 1.0 ... 3.0 /// The "zero" level (in dB) - fileprivate let noiseFloor: CGFloat = -50.0 + fileprivate let noiseFloor: Float = -50.0 @@ -146,22 +155,7 @@ open class FDWaveformView: UIView { /// Whether rendering for the current asset failed private var renderForCurrentAssetFailed = false - - /// Current audio context to be used for rendering - private var audioContext: FDAudioContext? { - didSet { - waveformImage = nil - zoomSamples = 0 ..< self.totalSamples - highlightedSamples = nil - inProgressWaveformRenderOperation = nil - cachedWaveformRenderOperation = nil - renderForCurrentAssetFailed = false - - setNeedsDisplay() - setNeedsLayout() - } - } - + /// Currently running renderer private var inProgressWaveformRenderOperation: FDWaveformRenderOperation? { willSet { @@ -356,7 +350,8 @@ open class FDWaveformView: UIView { override open func layoutSubviews() { super.layoutSubviews() - guard audioContext != nil && !zoomSamples.isEmpty else { + guard dataSource != nil, + !zoomSamples.isEmpty else { return } @@ -410,8 +405,10 @@ open class FDWaveformView: UIView { } func renderWaveform() { - guard let audioContext = audioContext else { return } - guard !zoomSamples.isEmpty else { return } + guard let dataSource = dataSource, + !zoomSamples.isEmpty else { + return + } let renderSamples = zoomSamples.extended(byFactor: horizontalBleedTarget).clamped(to: 0 ..< totalSamples) let widthInPixels = floor(frame.width * CGFloat(horizontalOverdrawTarget)) @@ -419,7 +416,7 @@ open class FDWaveformView: UIView { let imageSize = CGSize(width: widthInPixels, height: heightInPixels) let renderFormat = FDWaveformRenderFormat(type: waveformRenderType, wavesColor: .black, scale: desiredImageScale) - let waveformRenderOperation = FDWaveformRenderOperation(audioContext: audioContext, imageSize: imageSize, sampleRange: renderSamples, format: renderFormat) { [weak self] image in + let waveformRenderOperation = FDWaveformRenderOperation(dataSource: dataSource, imageSize: imageSize, sampleRange: renderSamples, format: renderFormat) { [weak self] image in DispatchQueue.main.async { guard let strongSelf = self else { return } @@ -448,7 +445,7 @@ enum FDWaveformType: Equatable { /// Waveform is rendered using a logarithmic scale /// noiseFloor: The "zero" level (in dB) - case logarithmic(noiseFloor: CGFloat) + case logarithmic(noiseFloor: Float) // See http://stackoverflow.com/questions/24339807/how-to-test-equality-of-swift-enums-with-associated-values public static func ==(lhs: FDWaveformType, rhs: FDWaveformType) -> Bool { @@ -465,7 +462,7 @@ enum FDWaveformType: Equatable { return false } - public var floorValue: CGFloat { + public var floorValue: Float { switch self { case .linear: return 0 case .logarithmic(let noiseFloor): return noiseFloor @@ -482,7 +479,7 @@ enum FDWaveformType: Equatable { var zero: Float = 32768.0 vDSP_vdbcon(normalizedSamples, 1, &zero, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count), 1) - //Clip to [noiseFloor, 0] + // Clip to [noiseFloor, 0] var ceil: Float = 0.0 var noiseFloorFloat = Float(noiseFloor) vDSP_vclip(normalizedSamples, 1, &noiseFloorFloat, &ceil, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count)) @@ -606,6 +603,23 @@ extension FDWaveformView: UIGestureRecognizerDelegate { @objc optional func waveformDidEndScrubbing(_ waveformView: FDWaveformView) } +/// Te connect to the data we want to plot +public protocol FDWaveformViewDataSource { + //TODO: Consider using RandomAccessCollection instead of FDWaveformViewDataSource + + /// The first data offset available (usually 0) + var startIndex: Int { get } + + /// One past the last data offset available + var endIndex: Int { get } + + /// Number of data samples available + var count: Int { get } + + /// Get samples + func readSampleData(bounds: Range) throws -> Data //TODO: make this [Float] return +} + //MARK - extension CountableRange where Bound: Strideable { diff --git a/iOS Example/Sources/ViewController.swift b/iOS Example/Sources/ViewController.swift index fb4f3de..f14a425 100644 --- a/iOS Example/Sources/ViewController.swift +++ b/iOS Example/Sources/ViewController.swift @@ -81,19 +81,19 @@ class ViewController: UIViewController { @IBAction func doLoadAAC() { let thisBundle = Bundle(for: type(of: self)) let url = thisBundle.url(forResource: "TchaikovskyExample2", withExtension: "m4a") - waveform.audioURL = url + waveform.setAudioURL(audioURL: url) } @IBAction func doLoadMP3() { let thisBundle = Bundle(for: type(of: self)) let url = thisBundle.url(forResource: "TchaikovskyExample2", withExtension: "mp3") - waveform.audioURL = url + waveform.setAudioURL(audioURL: url) } @IBAction func doLoadOGG() { let thisBundle = Bundle(for: type(of: self)) let url = thisBundle.url(forResource: "TchaikovskyExample2", withExtension: "ogg") - waveform.audioURL = url + waveform.setAudioURL(audioURL: url) } @IBAction func toggleScrub(_ sender: UISwitch) { @@ -139,7 +139,7 @@ class ViewController: UIViewController { // Animate the waveform view when it is rendered waveform.delegate = self waveform.alpha = 0.0 - waveform.audioURL = url + waveform.setAudioURL(audioURL: url) waveform.zoomSamples = 0 ..< waveform.totalSamples / 3 waveform.doesAllowScrubbing = true waveform.doesAllowStretch = true diff --git a/iOS Example/iOS Example.xcodeproj/project.pbxproj b/iOS Example/iOS Example.xcodeproj/project.pbxproj index a175c92..a97cb68 100644 --- a/iOS Example/iOS Example.xcodeproj/project.pbxproj +++ b/iOS Example/iOS Example.xcodeproj/project.pbxproj @@ -139,7 +139,7 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0730; - LastUpgradeCheck = 1020; + LastUpgradeCheck = 1230; ORGANIZATIONNAME = "William Entriken"; TargetAttributes = { D94BE1B81CCEBF2C0042282A = { @@ -247,6 +247,7 @@ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -271,7 +272,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 9.3; + IPHONEOS_DEPLOYMENT_TARGET = 10.3; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -305,6 +306,7 @@ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -323,7 +325,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 9.3; + IPHONEOS_DEPLOYMENT_TARGET = 10.3; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SWIFT_COMPILATION_MODE = wholemodule; @@ -342,7 +344,6 @@ DEVELOPMENT_TEAM = 8Q693ZG5RN; FRAMEWORK_SEARCH_PATHS = ""; INFOPLIST_FILE = "$(SRCROOT)/Sources/Info.plist"; - IPHONEOS_DEPLOYMENT_TARGET = 9.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", @@ -361,7 +362,6 @@ DEVELOPMENT_TEAM = 8Q693ZG5RN; FRAMEWORK_SEARCH_PATHS = ""; INFOPLIST_FILE = "$(SRCROOT)/Sources/Info.plist"; - IPHONEOS_DEPLOYMENT_TARGET = 9.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", diff --git a/iOS Example/iOS Example.xcodeproj/xcshareddata/xcschemes/iOS Example.xcscheme b/iOS Example/iOS Example.xcodeproj/xcshareddata/xcschemes/iOS Example.xcscheme new file mode 100644 index 0000000..2795fb8 --- /dev/null +++ b/iOS Example/iOS Example.xcodeproj/xcshareddata/xcschemes/iOS Example.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +