diff --git a/Package.swift b/Package.swift index 6c8dccb0..8467cb89 100644 --- a/Package.swift +++ b/Package.swift @@ -22,6 +22,7 @@ let package = Package( .library(name: "StdoutExporter", type: .static, targets: ["StdoutExporter"]), .library(name: "PrometheusExporter", type: .static, targets: ["PrometheusExporter"]), .library(name: "OpenTelemetryProtocolExporter", type: .static, targets: ["OpenTelemetryProtocolExporter"]), + .library(name: "PersistenceExporter", type: .static, targets: ["PersistenceExporter"]), .library(name: "InMemoryExporter", type: .static, targets: ["InMemoryExporter"]), .library(name: "DatadogExporter", type: .static, targets: ["DatadogExporter"]), .library(name: "NetworkStatus", type: .static, targets: ["NetworkStatus"]), @@ -96,6 +97,9 @@ let package = Package( dependencies: ["OpenTelemetrySdk"], path: "Sources/Exporters/DatadogExporter", exclude: ["NOTICE", "README.md"]), + .target(name: "PersistenceExporter", + dependencies: ["OpenTelemetrySdk"], + path: "Sources/Exporters/Persistence"), .testTarget(name: "NetworkStatusTests", dependencies: ["NetworkStatus"], path: "Tests/InstrumentationTests/NetworkStatusTests"), @@ -142,6 +146,9 @@ let package = Package( .product(name: "NIO", package: "swift-nio"), .product(name: "NIOHTTP1", package: "swift-nio")], path: "Tests/ExportersTests/DatadogExporter"), + .testTarget(name: "PersistenceExporterTests", + dependencies: ["PersistenceExporter"], + path: "Tests/ExportersTests/PersistenceExporter"), .target(name: "LoggingTracer", dependencies: ["OpenTelemetryApi"], path: "Examples/Logging Tracer"), diff --git a/Sources/Exporters/Persistence/Export/DataExportDelay.swift b/Sources/Exporters/Persistence/Export/DataExportDelay.swift new file mode 100644 index 00000000..d41d0a50 --- /dev/null +++ b/Sources/Exporters/Persistence/Export/DataExportDelay.swift @@ -0,0 +1,40 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +internal protocol Delay { + var current: TimeInterval { get } + mutating func decrease() + mutating func increase() +} + +/// Mutable interval used for periodic data exports. +internal struct DataExportDelay: Delay { + private let defaultDelay: TimeInterval + private let minDelay: TimeInterval + private let maxDelay: TimeInterval + private let changeRate: Double + + private var delay: TimeInterval + + init(performance: ExportPerformancePreset) { + self.defaultDelay = performance.defaultExportDelay + self.minDelay = performance.minExportDelay + self.maxDelay = performance.maxExportDelay + self.changeRate = performance.exportDelayChangeRate + self.delay = performance.initialExportDelay + } + + var current: TimeInterval { delay } + + mutating func decrease() { + delay = max(minDelay, delay * (1.0 - changeRate)) + } + + mutating func increase() { + delay = min(delay * (1.0 + changeRate), maxDelay) + } +} diff --git a/Sources/Exporters/Persistence/Export/DataExportStatus.swift b/Sources/Exporters/Persistence/Export/DataExportStatus.swift new file mode 100644 index 00000000..e0995fe8 --- /dev/null +++ b/Sources/Exporters/Persistence/Export/DataExportStatus.swift @@ -0,0 +1,15 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +/// The status of a single export attempt. +internal struct DataExportStatus { + /// If export needs to be retried (`true`) because its associated data was not delivered but it may succeed + /// in the next attempt (i.e. it failed due to device leaving signal range or a temporary server unavailability occured). + /// If set to `false` then data associated with the upload should be deleted as it does not need any more export + /// attempts (i.e. the upload succeeded or failed due to unrecoverable client error). + let needsRetry: Bool +} diff --git a/Sources/Exporters/Persistence/Export/DataExportWorker.swift b/Sources/Exporters/Persistence/Export/DataExportWorker.swift new file mode 100644 index 00000000..0e99d2c0 --- /dev/null +++ b/Sources/Exporters/Persistence/Export/DataExportWorker.swift @@ -0,0 +1,112 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +// a protocol for an exporter of `Data` to which a `DataExportWorker` can delegate persisted +// data export +internal protocol DataExporter { + func export(data: Data) -> DataExportStatus +} + +// a protocol needed for mocking `DataExportWorker` +internal protocol DataExportWorkerProtocol { + func flush() -> Bool +} + +internal class DataExportWorker: DataExportWorkerProtocol { + /// Queue to execute exports. + private let queue: DispatchQueue + /// File reader providing data to export. + private let fileReader: FileReader + /// Data exporter sending data to server. + private let dataExporter: DataExporter + /// Variable system conditions determining if export should be performed. + private let exportCondition: () -> Bool + + /// Delay used to schedule consecutive exports. + private var delay: Delay + + /// Export work scheduled by this worker. + private var exportWork: DispatchWorkItem? + + init( + queue: DispatchQueue, + fileReader: FileReader, + dataExporter: DataExporter, + exportCondition: @escaping () -> Bool, + delay: Delay + ) { + self.queue = queue + self.fileReader = fileReader + self.exportCondition = exportCondition + self.dataExporter = dataExporter + self.delay = delay + + let exportWork = DispatchWorkItem { [weak self] in + guard let self = self else { + return + } + + let isSystemReady = self.exportCondition() + let nextBatch = isSystemReady ? self.fileReader.readNextBatch() : nil + if let batch = nextBatch { + // Export batch + let exportStatus = self.dataExporter.export(data: batch.data) + + // Delete or keep batch depending on the export status + if exportStatus.needsRetry { + self.delay.increase() + } else { + self.fileReader.markBatchAsRead(batch) + self.delay.decrease() + } + } else { + self.delay.increase() + } + + self.scheduleNextExport(after: self.delay.current) + } + + self.exportWork = exportWork + + scheduleNextExport(after: self.delay.current) + } + + private func scheduleNextExport(after delay: TimeInterval) { + guard let work = exportWork else { + return + } + + queue.asyncAfter(deadline: .now() + delay, execute: work) + } + + /// This method gets remaining files at once, and exports them + /// It assures that periodic exporter cannot read or export the files while the flush is being processed + internal func flush() -> Bool { + let success = queue.sync { + self.fileReader.onRemainingBatches { + let exportStatus = self.dataExporter.export(data: $0.data) + if !exportStatus.needsRetry { + self.fileReader.markBatchAsRead($0) + } + } + } + return success + } + + /// Cancels scheduled exports and stops scheduling next ones. + /// - It does not affect the export that has already begun. + /// - It blocks the caller thread if called in the middle of export execution. + internal func cancelSynchronously() { + queue.sync(flags: .barrier) { + // This cancellation must be performed on the `queue` to ensure that it is not called + // in the middle of a `DispatchWorkItem` execution - otherwise, as the pending block would be + // fully executed, it will schedule another export by calling `nextScheduledWork(after:)` at the end. + self.exportWork?.cancel() + self.exportWork = nil + } + } +} diff --git a/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift new file mode 100644 index 00000000..42881fbc --- /dev/null +++ b/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift @@ -0,0 +1,131 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation +import OpenTelemetrySdk + +// protocol for exporters that can be decorated with `PersistenceExporterDecorator` +protocol DecoratedExporter { + associatedtype SignalType + + func export(values: [SignalType]) -> DataExportStatus +} + +// a generic decorator of `DecoratedExporter` adding filesystem persistence of batches of `[T.SignalType]`. +// `T.SignalType` must conform to `Codable`. +internal class PersistenceExporterDecorator where T: DecoratedExporter, T.SignalType: Codable { + + // a wrapper of `DecoratedExporter` (T) to add conformance to `DataExporter` that can be + // used with `DataExportWorker`. + private class DecoratedDataExporter: DataExporter { + + private let decoratedExporter: T + + init(decoratedExporter: T) { + self.decoratedExporter = decoratedExporter + } + + func export(data: Data) -> DataExportStatus { + + // decode batches of `[T.SignalType]` from the raw data. + // the data is made of batches of comma-suffixed JSON arrays, so in order to utilize + // `JSONDecoder`, add a "[" prefix and "null]" suffix making the data a valid + // JSON array of `[T.SignalType]`. + var arrayData: Data = JSONDataConstants.arrayPrefix + arrayData.append(data) + arrayData.append(JSONDataConstants.arraySuffix) + + do { + let decoder = JSONDecoder() + let exportables = try decoder.decode( + [[T.SignalType]?].self, + from: arrayData).compactMap { $0 }.flatMap { $0 } + + return decoratedExporter.export(values: exportables) + } catch { + return DataExportStatus(needsRetry: false) + } + } + } + + private let performancePreset: PersistencePerformancePreset + + private let fileWriter: FileWriter + + private let worker: DataExportWorkerProtocol + + public convenience init(decoratedExporter: T, + storageURL: URL, + writerQueue: DispatchQueue, + readerQueue: DispatchQueue, + exportQueue: DispatchQueue, + exportCondition: @escaping () -> Bool, + performancePreset: PersistencePerformancePreset = .default) { + + // orchestrate writes and reads over the folder given by `storageURL` + let filesOrchestrator = FilesOrchestrator( + directory: Directory(url: storageURL), + performance: performancePreset, + dateProvider: SystemDateProvider() + ) + + let fileWriter = OrchestratedFileWriter( + orchestrator: filesOrchestrator, + queue: writerQueue + ) + + let fileReader = OrchestratedFileReader( + orchestrator: filesOrchestrator, + queue: readerQueue + ) + + self.init(decoratedExporter: decoratedExporter, + fileWriter: fileWriter, + workerFactory: { + return DataExportWorker( + queue: exportQueue, + fileReader: fileReader, + dataExporter: $0, + exportCondition: exportCondition, + delay: DataExportDelay(performance: performancePreset)) + }, + performancePreset: performancePreset) + } + + // internal initializer for testing that accepts a worker factory that allows mocking the worker + internal init(decoratedExporter: T, + fileWriter: FileWriter, + workerFactory createWorker: (DataExporter) -> DataExportWorkerProtocol, + performancePreset: PersistencePerformancePreset) { + self.performancePreset = performancePreset + + self.fileWriter = fileWriter + + self.worker = createWorker(DecoratedDataExporter(decoratedExporter: decoratedExporter)) + } + + public func export(values: [T.SignalType]) throws { + let encoder = JSONEncoder() + var data = try encoder.encode(values) + data.append(JSONDataConstants.arraySeparator) + + if (performancePreset.synchronousWrite) { + fileWriter.writeSync(data: data) + } else { + fileWriter.write(data: data) + } + } + + public func flush() { + fileWriter.flush() + _ = worker.flush() + } +} + +fileprivate struct JSONDataConstants { + static let arrayPrefix = "[".data(using: .utf8)! + static let arraySuffix = "null]".data(using: .utf8)! + static let arraySeparator = ",".data(using: .utf8)! +} diff --git a/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift new file mode 100644 index 00000000..b6b0a826 --- /dev/null +++ b/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift @@ -0,0 +1,58 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation +import OpenTelemetrySdk + +// a persistence exporter decorator for `Metric`. +// specialization of `PersistenceExporterDecorator` for `MetricExporter`. +public class PersistenceMetricExporterDecorator: MetricExporter { + + struct MetricDecoratedExporter: DecoratedExporter { + typealias SignalType = Metric + + private let metricExporter: MetricExporter + + init(metricExporter: MetricExporter) { + self.metricExporter = metricExporter + } + + func export(values: [Metric]) -> DataExportStatus { + let result = metricExporter.export(metrics: values, shouldCancel: nil) + return DataExportStatus(needsRetry: result == .failureRetryable) + } + } + + private let persistenceExporter: PersistenceExporterDecorator + + public init(metricExporter: MetricExporter, + storageURL: URL, + writerQueue: DispatchQueue, + readerQueue: DispatchQueue, + exportQueue: DispatchQueue, + exportCondition: @escaping () -> Bool, + performancePreset: PersistencePerformancePreset = .default) throws { + + self.persistenceExporter = + PersistenceExporterDecorator( + decoratedExporter: MetricDecoratedExporter(metricExporter: metricExporter), + storageURL: storageURL, + writerQueue: writerQueue, + readerQueue: readerQueue, + exportQueue: exportQueue, + exportCondition: exportCondition, + performancePreset: performancePreset) + } + + public func export(metrics: [Metric], shouldCancel: (() -> Bool)?) -> MetricExporterResultCode { + do { + try persistenceExporter.export(values: metrics) + + return .success + } catch { + return .failureNotRetryable + } + } +} diff --git a/Sources/Exporters/Persistence/PersistencePerformancePreset.swift b/Sources/Exporters/Persistence/PersistencePerformancePreset.swift new file mode 100644 index 00000000..c31077db --- /dev/null +++ b/Sources/Exporters/Persistence/PersistencePerformancePreset.swift @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +internal protocol StoragePerformancePreset { + /// Maximum size of a single file (in bytes). + /// Each feature (logging, tracing, ...) serializes its objects data to that file for later export. + /// If last written file is too big to append next data, new file is created. + var maxFileSize: UInt64 { get } + /// Maximum size of data directory (in bytes). + /// Each feature uses separate directory. + /// If this size is exceeded, the oldest files are deleted until this limit is met again. + var maxDirectorySize: UInt64 { get } + /// Maximum age qualifying given file for reuse (in seconds). + /// If recently used file is younger than this, it is reused - otherwise: new file is created. + var maxFileAgeForWrite: TimeInterval { get } + /// Minimum age qualifying given file for export (in seconds). + /// If the file is older than this, it is exported (and then deleted if export succeeded). + /// It has an arbitrary offset (~0.5s) over `maxFileAgeForWrite` to ensure that no export can start for the file being currently written. + var minFileAgeForRead: TimeInterval { get } + /// Maximum age qualifying given file for export (in seconds). + /// Files older than this are considered obsolete and get deleted without exporting. + var maxFileAgeForRead: TimeInterval { get } + /// Maximum number of serialized objects written to a single file. + /// If number of objects in recently used file reaches this limit, new file is created for new data. + var maxObjectsInFile: Int { get } + /// Maximum size of serialized object data (in bytes). + /// If serialized object data exceeds this limit, it is skipped (not written to file and not exported). + var maxObjectSize: UInt64 { get } +} + +internal protocol ExportPerformancePreset { + /// First export delay (in seconds). + /// It is used as a base value until no more files eligible for export are found - then `defaultExportDelay` is used as a new base. + var initialExportDelay: TimeInterval { get } + /// Default exports interval (in seconds). + /// At runtime, the export interval ranges from `minExportDelay` to `maxExportDelay` depending + /// on delivery success or failure. + var defaultExportDelay: TimeInterval { get } + /// Mininum interval of data export (in seconds). + var minExportDelay: TimeInterval { get } + /// Maximum interval of data export (in seconds). + var maxExportDelay: TimeInterval { get } + /// If export succeeds or fails, current interval is changed by this rate. Should be less or equal `1.0`. + /// E.g: if rate is `0.1` then `delay` can be increased or decreased by `delay * 0.1`. + var exportDelayChangeRate: Double { get } +} + +public struct PersistencePerformancePreset: Equatable, StoragePerformancePreset, ExportPerformancePreset { + // MARK: - StoragePerformancePreset + + let maxFileSize: UInt64 + let maxDirectorySize: UInt64 + let maxFileAgeForWrite: TimeInterval + let minFileAgeForRead: TimeInterval + let maxFileAgeForRead: TimeInterval + let maxObjectsInFile: Int + let maxObjectSize: UInt64 + let synchronousWrite: Bool + + // MARK: - ExportPerformancePreset + + let initialExportDelay: TimeInterval + let defaultExportDelay: TimeInterval + let minExportDelay: TimeInterval + let maxExportDelay: TimeInterval + let exportDelayChangeRate: Double + + // MARK: - Predefined presets + + /// Default performance preset. + public static let `default` = lowRuntimeImpact + + /// Performance preset optimized for low runtime impact. + /// Minimalizes number of data requests send to the server. + public static let lowRuntimeImpact = PersistencePerformancePreset( + // persistence + maxFileSize: 4 * 1_024 * 1_024, // 4MB + maxDirectorySize: 512 * 1_024 * 1_024, // 512 MB + maxFileAgeForWrite: 4.75, + minFileAgeForRead: 4.75 + 0.5, // `maxFileAgeForWrite` + 0.5s margin + maxFileAgeForRead: 18 * 60 * 60, // 18h + maxObjectsInFile: 500, + maxObjectSize: 256 * 1_024, // 256KB + synchronousWrite: false, + + // export + initialExportDelay: 5, // postpone to not impact app launch time + defaultExportDelay: 5, + minExportDelay: 1, + maxExportDelay: 20, + exportDelayChangeRate: 0.1 + ) + + /// Performance preset optimized for instant data delivery. + /// Minimalizes the time between receiving data form the user and delivering it to the server. + public static let instantDataDelivery = PersistencePerformancePreset( + // persistence + maxFileSize: `default`.maxFileSize, + maxDirectorySize: `default`.maxDirectorySize, + maxFileAgeForWrite: 2.75, + minFileAgeForRead: 2.75 + 0.5, // `maxFileAgeForWrite` + 0.5s margin + maxFileAgeForRead: `default`.maxFileAgeForRead, + maxObjectsInFile: `default`.maxObjectsInFile, + maxObjectSize: `default`.maxObjectSize, + synchronousWrite: true, + + // export + initialExportDelay: 0.5, // send quick to have a chance for export in short-lived app extensions + defaultExportDelay: 3, + minExportDelay: 1, + maxExportDelay: 5, + exportDelayChangeRate: 0.5 // reduce significantly for more exports in short-lived app extensions + ) +} diff --git a/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift new file mode 100644 index 00000000..e8624318 --- /dev/null +++ b/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift @@ -0,0 +1,71 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation +import OpenTelemetrySdk + +// a persistence exporter decorator for `SpanData`. +// specialization of `PersistenceExporterDecorator` for `SpanExporter`. +public class PersistenceSpanExporterDecorator: SpanExporter { + + struct SpanDecoratedExporter: DecoratedExporter { + typealias SignalType = SpanData + + private let spanExporter: SpanExporter + + init(spanExporter: SpanExporter) { + self.spanExporter = spanExporter + } + + func export(values: [SpanData]) -> DataExportStatus { + _ = spanExporter.export(spans: values) + return DataExportStatus(needsRetry: false) + } + } + + private let spanExporter: SpanExporter + private let persistenceExporter: PersistenceExporterDecorator + + public init(spanExporter: SpanExporter, + storageURL: URL, + writerQueue: DispatchQueue, + readerQueue: DispatchQueue, + exportQueue: DispatchQueue, + exportCondition: @escaping () -> Bool, + performancePreset: PersistencePerformancePreset = .default) throws { + + self.spanExporter = spanExporter + + self.persistenceExporter = + PersistenceExporterDecorator( + decoratedExporter: SpanDecoratedExporter(spanExporter: spanExporter), + storageURL: storageURL, + writerQueue: writerQueue, + readerQueue: readerQueue, + exportQueue: exportQueue, + exportCondition: exportCondition, + performancePreset: performancePreset) + } + + public func export(spans: [SpanData]) -> SpanExporterResultCode { + do { + try persistenceExporter.export(values: spans) + + return .success + } catch { + return .failure + } + } + + public func flush() -> SpanExporterResultCode { + persistenceExporter.flush() + return spanExporter.flush() + } + + public func shutdown() { + persistenceExporter.flush() + spanExporter.shutdown() + } +} diff --git a/Sources/Exporters/Persistence/README.md b/Sources/Exporters/Persistence/README.md new file mode 100644 index 00000000..51a02ce0 --- /dev/null +++ b/Sources/Exporters/Persistence/README.md @@ -0,0 +1,42 @@ +# Persistence Exporter + +The Persistence Exporter is not an actual exporter by itself, but an exporter decorator. It decorates a given exporter by persisting the exported data to the disk first, and then proceeds to forward it to the decorated exporter. The goal is to allow dealing with situations where telemetry data is generated in an environment that can't guarantee stable export. + +An example use case is mobile apps that operate while the device has no network connectivity. With the Persistence Exporter decorating the actual exporters used by the app, telemetry can be collected while the device is offline. Later - possibly after the app is terminated and relaunched - when network connectivity is back the collected telemetry data can be picked up from the disk and exported. + +The Persistence Exporter provides decorators for MetricExporter and SpanExporter. The decorators handle exported data by: + +- Asynchronously serializing and writing the exported data to the disk to a specified path. Writing to the disk is performed on a specified DispatchQueue. +- Asynchronously picking up persisted data, deserializing it, and forwarding it to the decorated exporter. + +## Getting Started + +*This is a work in progress, and currently in an alpha stage, should not be used in production.* + +### Usage + +An example of decorating a `MetricExporter`: + +```swift +let metricExporter = ... // create some MetricExporter +let persistenceMetricExporter = try PersistenceMetricExporterDecorator( + metricExporter: metricExporter, + storageURL: metricsSubdirectoryURL, + writerQueue: DispatchQueue(label: "metricWriterQueue"), + readerQueue: DispatchQueue(label: "metricReaderQueue"), + exportQueue: DispatchQueue(label: "metricExportQueue"), + exportCondition: { return true }) +``` + +An example of decorating a `SpanExporter`: + +```swift +let spanExporter = ... // create some SpanExporter +let persistenceTraceExporter = try PersistenceSpanExporterDecorator( + spanExporter: spanExporter, + storageURL: tracesSubdirectoryURL, + writerQueue: DispatchQueue(label: "spanWriterQueue"), + readerQueue: DispatchQueue(label: "spanWriterQueue"), + exportQueue: DispatchQueue(label: "spanWriterQueue"), + exportCondition: { return true }) +``` diff --git a/Sources/Exporters/Persistence/Storage/Directory.swift b/Sources/Exporters/Persistence/Storage/Directory.swift new file mode 100644 index 00000000..97316cda --- /dev/null +++ b/Sources/Exporters/Persistence/Storage/Directory.swift @@ -0,0 +1,65 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +/// An abstraction over file system directory where SDK stores its files. +internal struct Directory { + let url: URL + + /// Creates subdirectory with given path under system caches directory. + init(withSubdirectoryPath path: String) throws { + self.init(url: try createCachesSubdirectoryIfNotExists(subdirectoryPath: path)) + } + + init(url: URL) { + self.url = url + } + + /// Creates file with given name. + func createFile(named fileName: String) throws -> File { + let fileURL = url.appendingPathComponent(fileName, isDirectory: false) + guard FileManager.default.createFile(atPath: fileURL.path, contents: nil, attributes: nil) == true else { + throw StorageError.createFileError(path: fileURL) + } + return File(url: fileURL) + } + + /// Returns file with given name. + func file(named fileName: String) -> File? { + let fileURL = url.appendingPathComponent(fileName, isDirectory: false) + if FileManager.default.fileExists(atPath: fileURL.path) { + return File(url: fileURL) + } else { + return nil + } + } + + /// Returns all files of this directory. + func files() throws -> [File] { + return try FileManager.default + .contentsOfDirectory(at: url, includingPropertiesForKeys: [.isRegularFileKey, .canonicalPathKey]) + .map { url in File(url: url) } + } +} + +/// Creates subdirectory at given path in `/Library/Caches` if it does not exist. Might throw `PersistenceError` when it's not possible. +/// * `/Library/Caches` is exclduded from iTunes and iCloud backups by default. +/// * System may delete data in `/Library/Cache` to free up disk space which reduces the impact on devices working under heavy space pressure. +private func createCachesSubdirectoryIfNotExists(subdirectoryPath: String) throws -> URL { + guard let cachesDirectoryURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first else { + throw StorageError.obtainCacheLibraryError + } + + let subdirectoryURL = cachesDirectoryURL.appendingPathComponent(subdirectoryPath, isDirectory: true) + + do { + try FileManager.default.createDirectory(at: subdirectoryURL, withIntermediateDirectories: true, attributes: nil) + } catch let error { + throw StorageError.createDirectoryError(path: subdirectoryURL, error: error) + } + + return subdirectoryURL +} diff --git a/Sources/Exporters/Persistence/Storage/File.swift b/Sources/Exporters/Persistence/Storage/File.swift new file mode 100644 index 00000000..4872ef50 --- /dev/null +++ b/Sources/Exporters/Persistence/Storage/File.swift @@ -0,0 +1,130 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +/// Provides convenient interface for reading metadata and appending data to the file. +internal protocol WritableFile { + /// Name of this file. + var name: String { get } + + /// Current size of this file. + func size() throws -> UInt64 + + /// Synchronously appends given data at the end of this file. + func append(data: Data, synchronized: Bool ) throws +} + +/// Provides convenient interface for reading contents and metadata of the file. +internal protocol ReadableFile { + /// Name of this file. + var name: String { get } + + /// Reads the available data in this file. + func read() throws -> Data + + /// Deletes this file. + func delete() throws +} + +/// An immutable `struct` designed to provide optimized and thread safe interface for file manipulation. +/// It doesn't own the file, which means the file presence is not guaranteed - the file can be deleted by OS at any time (e.g. due to memory pressure). +internal struct File: WritableFile, ReadableFile { + let url: URL + let name: String + + init(url: URL) { + self.url = url + self.name = url.lastPathComponent + } + + /// Appends given data at the end of this file. + func append(data: Data, synchronized: Bool = false) throws { + let fileHandle = try FileHandle(forWritingTo: url) + + // https://en.wikipedia.org/wiki/Xcode#11.x_series + // compiler version needs to have iOS 13.4+ as base SDK + #if compiler(>=5.2) + /** + Even though the `fileHandle.seekToEnd()` should be available since iOS 13.0: + ``` + @available(OSX 10.15, iOS 13.0, watchOS 6.0, tvOS 13.0, *) + public func seekToEnd() throws -> UInt64 + ``` + it crashes on iOS Simulators prior to iOS 13.4: + ``` + Symbol not found: _$sSo12NSFileHandleC10FoundationE9seekToEnds6UInt64VyKF + ``` + This is fixed in iOS 14/Xcode 12 + */ + if #available(OSX 10.15.4, iOS 13.4, watchOS 6.0, tvOS 13.4, *) { + defer { + if synchronized { + try? fileHandle.synchronize() + } + try? fileHandle.close() + } + try fileHandle.seekToEnd() + try fileHandle.write(contentsOf: data) + } else { + legacyAppend(data, to: fileHandle) + } + #else + try legacyAppend(data, to: fileHandle) + #endif + } + + private func legacyAppend(_ data: Data, to fileHandle: FileHandle) { + defer { + fileHandle.closeFile() + } + fileHandle.seekToEndOfFile() + fileHandle.write(data) + } + + func read() throws -> Data { + let fileHandle = try FileHandle(forReadingFrom: url) + + // https://en.wikipedia.org/wiki/Xcode#11.x_series + // compiler version needs to have iOS 13.4+ as base SDK + #if compiler(>=5.2) + /** + Even though the `fileHandle.seekToEnd()` should be available since iOS 13.0: + ``` + @available(OSX 10.15, iOS 13.0, watchOS 6.0, tvOS 13.0, *) + public func readToEnd() throws -> Data? + ``` + it crashes on iOS Simulators prior to iOS 13.4: + ``` + Symbol not found: _$sSo12NSFileHandleC10FoundationE9readToEndAC4DataVSgyKF + ``` + This is fixed in iOS 14/Xcode 12 + */ + if #available(OSX 10.15.4, iOS 13.4, watchOS 6.0, tvOS 13.4, *) { + defer { try? fileHandle.close() } + return try fileHandle.readToEnd() ?? Data() + } else { + return legacyRead(from: fileHandle) + } + #else + return legacyRead(from: fileHandle) + #endif + } + + private func legacyRead(from fileHandle: FileHandle) -> Data { + let data = fileHandle.readDataToEndOfFile() + fileHandle.closeFile() + return data + } + + func size() throws -> UInt64 { + let attributes = try FileManager.default.attributesOfItem(atPath: url.path) + return attributes[.size] as? UInt64 ?? 0 + } + + func delete() throws { + try FileManager.default.removeItem(at: url) + } +} diff --git a/Sources/Exporters/Persistence/Storage/FileReader.swift b/Sources/Exporters/Persistence/Storage/FileReader.swift new file mode 100644 index 00000000..9de877df --- /dev/null +++ b/Sources/Exporters/Persistence/Storage/FileReader.swift @@ -0,0 +1,72 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +internal struct Batch { + /// Data read from file + let data: Data + /// File from which `data` was read. + let file: ReadableFile +} + +internal protocol FileReader { + func readNextBatch() -> Batch? + + func onRemainingBatches(process: (Batch) -> ()) -> Bool + + func markBatchAsRead(_ batch: Batch) +} + +internal final class OrchestratedFileReader: FileReader { + /// Orchestrator producing reference to readable file. + private let orchestrator: FilesOrchestrator + /// Queue used to synchronize files access (read / write). + private let queue: DispatchQueue + + /// Files marked as read. + private var filesRead: [ReadableFile] = [] + + init(orchestrator: FilesOrchestrator, queue: DispatchQueue) { + self.orchestrator = orchestrator + self.queue = queue + } + + // MARK: - Reading batches + + func readNextBatch() -> Batch? { + if let file = orchestrator.getReadableFile(excludingFilesNamed: Set(filesRead.map { $0.name })) { + do { + let fileData = try file.read() + return Batch(data: fileData, file: file) + } catch { + return nil + } + } + + return nil + } + + /// This method gets remaining files at once, and process each file after with the block passed. + /// Currently called from flush method + func onRemainingBatches(process: (Batch) -> ()) -> Bool { + do { + try orchestrator.getAllFiles(excludingFilesNamed: Set(filesRead.map { $0.name }))?.forEach { + let fileData = try $0.read() + process(Batch(data: fileData, file: $0)) + } + } catch { + return false + } + return true + } + + // MARK: - Accepting batches + + func markBatchAsRead(_ batch: Batch) { + orchestrator.delete(readableFile: batch.file) + filesRead.append(batch.file) + } +} diff --git a/Sources/Exporters/Persistence/Storage/FileWriter.swift b/Sources/Exporters/Persistence/Storage/FileWriter.swift new file mode 100644 index 00000000..80470257 --- /dev/null +++ b/Sources/Exporters/Persistence/Storage/FileWriter.swift @@ -0,0 +1,51 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +protocol FileWriter { + func write(data: Data) + + func writeSync(data: Data) + + func flush() +} + +internal final class OrchestratedFileWriter: FileWriter { + /// Orchestrator producing reference to writable file. + private let orchestrator: FilesOrchestrator + /// Queue used to synchronize files access (read / write) and perform decoding on background thread. + private let queue: DispatchQueue + + init(orchestrator: FilesOrchestrator, queue: DispatchQueue) { + self.orchestrator = orchestrator + self.queue = queue + } + + // MARK: - Writing data + func write(data: Data) { + queue.async { [weak self] in + self?.synchronizedWrite(data: data) + } + } + + func writeSync(data: Data) { + queue.sync { [weak self] in + self?.synchronizedWrite(data: data, syncOnEnd: true) + } + } + + private func synchronizedWrite(data: Data, syncOnEnd: Bool = false) { + do { + let file = try orchestrator.getWritableFile(writeSize: UInt64(data.count)) + try file.append(data: data, synchronized: syncOnEnd) + } catch { + } + } + + func flush() { + queue.sync(flags: .barrier) {} + } +} diff --git a/Sources/Exporters/Persistence/Storage/FilesOrchestrator.swift b/Sources/Exporters/Persistence/Storage/FilesOrchestrator.swift new file mode 100644 index 00000000..5c25050e --- /dev/null +++ b/Sources/Exporters/Persistence/Storage/FilesOrchestrator.swift @@ -0,0 +1,194 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +internal class FilesOrchestrator { + /// Directory where files are stored. + private let directory: Directory + /// Date provider. + private let dateProvider: DateProvider + /// Performance rules for writing and reading files. + private let performance: StoragePerformancePreset + /// Name of the last file returned by `getWritableFile()`. + private var lastWritableFileName: String? + /// Tracks number of times the file at `lastWritableFileURL` was returned from `getWritableFile()`. + /// This should correspond with number of objects stored in file, assuming that majority of writes succeed (the difference is negligible). + private var lastWritableFileUsesCount: Int = 0 + + init( + directory: Directory, + performance: StoragePerformancePreset, + dateProvider: DateProvider + ) { + self.directory = directory + self.performance = performance + self.dateProvider = dateProvider + } + + // MARK: - `WritableFile` orchestration + + func getWritableFile(writeSize: UInt64) throws -> WritableFile { + if writeSize > performance.maxObjectSize { + throw StorageError.dataExceedsMaxSizeError(dataSize: writeSize, maxSize: performance.maxObjectSize) + } + + let lastWritableFileOrNil = reuseLastWritableFileIfPossible(writeSize: writeSize) + + if let lastWritableFile = lastWritableFileOrNil { // if last writable file can be reused + lastWritableFileUsesCount += 1 + return lastWritableFile + } else { + // NOTE: As purging files directory is a memory-expensive operation, do it only when we know + // that a new file will be created. With SDK's `PerformancePreset` this gives + // the process enough time to not over-allocate internal `_FileCache` and `_NSFastEnumerationEnumerator` + // objects, resulting with a flat allocations graph in a long term. + try purgeFilesDirectoryIfNeeded() + + let newFileName = fileNameFrom(fileCreationDate: dateProvider.currentDate()) + let newFile = try directory.createFile(named: newFileName) + lastWritableFileName = newFile.name + lastWritableFileUsesCount = 1 + return newFile + } + } + + private func reuseLastWritableFileIfPossible(writeSize: UInt64) -> WritableFile? { + if let lastFileName = lastWritableFileName { + do { + guard let lastFile = directory.file(named: lastFileName) else { + return nil + } + let lastFileCreationDate = fileCreationDateFrom(fileName: lastFile.name) + let lastFileAge = dateProvider.currentDate().timeIntervalSince(lastFileCreationDate) + + let fileIsRecentEnough = lastFileAge <= performance.maxFileAgeForWrite + let fileHasRoomForMore = (try lastFile.size() + writeSize) <= performance.maxFileSize + let fileCanBeUsedMoreTimes = (lastWritableFileUsesCount + 1) <= performance.maxObjectsInFile + + if fileIsRecentEnough, fileHasRoomForMore, fileCanBeUsedMoreTimes { + return lastFile + } + } catch { + return nil + } + } + + return nil + } + + // MARK: - `ReadableFile` orchestration + + func getReadableFile(excludingFilesNamed excludedFileNames: Set = []) -> ReadableFile? { + do { + let filesWithCreationDate = try directory.files() + .map { (file: $0, creationDate: fileCreationDateFrom(fileName: $0.name)) } + .compactMap { try deleteFileIfItsObsolete(file: $0.file, fileCreationDate: $0.creationDate) } + + guard let (oldestFile, creationDate) = filesWithCreationDate + .filter({ excludedFileNames.contains($0.file.name) == false }) + .sorted(by: { $0.creationDate < $1.creationDate }) + .first + else { + return nil + } + + let oldestFileAge = dateProvider.currentDate().timeIntervalSince(creationDate) + let fileIsOldEnough = oldestFileAge >= performance.minFileAgeForRead + + return fileIsOldEnough ? oldestFile : nil + } catch { + return nil + } + } + + func getAllFiles(excludingFilesNamed excludedFileNames: Set = []) -> [ReadableFile]? { + do { + return try directory.files() + .filter { excludedFileNames.contains($0.name) == false } + } catch { + return nil + } + } + + func delete(readableFile: ReadableFile) { + do { + try readableFile.delete() + } catch { + } + } + + // MARK: - Directory size management + + /// Removes oldest files from the directory if it becomes too big. + private func purgeFilesDirectoryIfNeeded() throws { + let filesSortedByCreationDate = try directory.files() + .map { (file: $0, creationDate: fileCreationDateFrom(fileName: $0.name)) } + .sorted { $0.creationDate < $1.creationDate } + + var filesWithSizeSortedByCreationDate = try filesSortedByCreationDate + .map { (file: $0.file, size: try $0.file.size()) } + + let accumulatedFilesSize = filesWithSizeSortedByCreationDate.map { $0.size }.reduce(0, +) + + if accumulatedFilesSize > performance.maxDirectorySize { + let sizeToFree = accumulatedFilesSize - performance.maxDirectorySize + var sizeFreed: UInt64 = 0 + + while sizeFreed < sizeToFree, !filesWithSizeSortedByCreationDate.isEmpty { + let fileWithSize = filesWithSizeSortedByCreationDate.removeFirst() + try fileWithSize.file.delete() + sizeFreed += fileWithSize.size + } + } + } + + private func deleteFileIfItsObsolete(file: File, fileCreationDate: Date) throws -> (file: File, creationDate: Date)? { + let fileAge = dateProvider.currentDate().timeIntervalSince(fileCreationDate) + + if fileAge > performance.maxFileAgeForRead { + try file.delete() + return nil + } else { + return (file: file, creationDate: fileCreationDate) + } + } +} + +/// File creation date is used as file name - timestamp in milliseconds is used for date representation. +/// This function converts file creation date into file name. +internal func fileNameFrom(fileCreationDate: Date) -> String { + let milliseconds = fileCreationDate.timeIntervalSinceReferenceDate * 1_000 + let converted = (try? UInt64(withReportingOverflow: milliseconds)) ?? 0 + return String(converted) +} + +/// File creation date is used as file name - timestamp in milliseconds is used for date representation. +/// This function converts file name into file creation date. +internal func fileCreationDateFrom(fileName: String) -> Date { + let millisecondsSinceReferenceDate = TimeInterval(UInt64(fileName) ?? 0) / 1_000 + return Date(timeIntervalSinceReferenceDate: TimeInterval(millisecondsSinceReferenceDate)) +} + +private enum FixedWidthIntegerError: Error { + case overflow(overflowingValue: T) +} + +private extension FixedWidthInteger { + /* + Self(:) is commonly used for conversion, however it fatalError() in case of conversion failure + Self(exactly:) does the exact same thing internally yet it returns nil instead of fatalError() + It is not trivial to guess if the conversion would fail or succeed, therefore we use Self(exactly:) + so that we don't need to guess in order to save the app from crashing + + IMPORTANT: If you pass floatingPoint to Self(exactly:) without rounded(), it may return nil + */ + init(withReportingOverflow floatingPoint: T) throws { + guard let converted = Self(exactly: floatingPoint.rounded()) else { + throw FixedWidthIntegerError.overflow(overflowingValue: floatingPoint) + } + self = converted + } +} diff --git a/Sources/Exporters/Persistence/Storage/StorageError.swift b/Sources/Exporters/Persistence/Storage/StorageError.swift new file mode 100644 index 00000000..94341ec8 --- /dev/null +++ b/Sources/Exporters/Persistence/Storage/StorageError.swift @@ -0,0 +1,13 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +enum StorageError: Error { + case createFileError(path: URL) + case createDirectoryError(path: URL, error: Error) + case obtainCacheLibraryError + case dataExceedsMaxSizeError(dataSize: UInt64, maxSize: UInt64) +} diff --git a/Sources/Exporters/Persistence/Utils/DateProvider.swift b/Sources/Exporters/Persistence/Utils/DateProvider.swift new file mode 100644 index 00000000..6cccd8a0 --- /dev/null +++ b/Sources/Exporters/Persistence/Utils/DateProvider.swift @@ -0,0 +1,16 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +/// Interface for date provider used for files orchestration. +internal protocol DateProvider { + func currentDate() -> Date +} + +internal struct SystemDateProvider: DateProvider { + @inlinable + func currentDate() -> Date { return Date() } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Export/DataExportDelayTests.swift b/Tests/ExportersTests/PersistenceExporter/Export/DataExportDelayTests.swift new file mode 100644 index 00000000..fccb5e12 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Export/DataExportDelayTests.swift @@ -0,0 +1,60 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import XCTest + +class DataExportDelayTests: XCTestCase { + private let mockPerformance = ExportPerformanceMock( + initialExportDelay: 3, + defaultExportDelay: 5, + minExportDelay: 1, + maxExportDelay: 20, + exportDelayChangeRate: 0.1 + ) + + func testWhenNotModified_itReturnsInitialDelay() { + let delay = DataExportDelay(performance: mockPerformance) + XCTAssertEqual(delay.current, mockPerformance.initialExportDelay) + XCTAssertEqual(delay.current, mockPerformance.initialExportDelay) + } + + func testWhenDecreasing_itGoesDownToMinimumDelay() { + var delay = DataExportDelay(performance: mockPerformance) + var previousValue: TimeInterval = delay.current + + while previousValue > mockPerformance.minExportDelay { + delay.decrease() + + let nextValue = delay.current + XCTAssertEqual( + nextValue / previousValue, + 1.0 - mockPerformance.exportDelayChangeRate, + accuracy: 0.1 + ) + XCTAssertLessThanOrEqual(nextValue, max(previousValue, mockPerformance.minExportDelay)) + + previousValue = nextValue + } + } + + func testWhenIncreasing_itClampsToMaximumDelay() { + var delay = DataExportDelay(performance: mockPerformance) + var previousValue: TimeInterval = delay.current + + while previousValue < mockPerformance.maxExportDelay { + delay.increase() + + let nextValue = delay.current + XCTAssertEqual( + nextValue / previousValue, + 1.0 + mockPerformance.exportDelayChangeRate, + accuracy: 0.1 + ) + XCTAssertGreaterThanOrEqual(nextValue, min(previousValue, mockPerformance.maxExportDelay)) + previousValue = nextValue + } + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Export/DataExportWorkerTests.swift b/Tests/ExportersTests/PersistenceExporter/Export/DataExportWorkerTests.swift new file mode 100644 index 00000000..9a9b0760 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Export/DataExportWorkerTests.swift @@ -0,0 +1,295 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import XCTest + +class DataExportWorkerTests: XCTestCase { + private let fileReadQueue = DispatchQueue(label: "persistence-tests-read", attributes: .concurrent) + private let exporterQueue = DispatchQueue(label: "persistence-tests-exporter", attributes: .concurrent) + + lazy var dateProvider = RelativeDateProvider(advancingBySeconds: 1) + + override func setUp() { + super.setUp() + } + + override func tearDown() { + super.tearDown() + } + + // MARK: - Data Exports + + func testItExportsAllData() { + let v1ExportExpectation = self.expectation(description: "V1 exported") + let v2ExportExpectation = self.expectation(description: "V2 exported") + let v3ExportExpectation = self.expectation(description: "V3 exported") + + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: false)) + + mockDataExporter.onExport = { data in + switch data.utf8String { + case "v1": v1ExportExpectation.fulfill() + case "v2": v2ExportExpectation.fulfill() + case "v3": v3ExportExpectation.fulfill() + default: break + } + } + + // Given + let fileReader = FileReaderMock() + fileReader.addFile(name: "1", data: "v1".utf8Data) + fileReader.addFile(name: "2", data: "v2".utf8Data) + fileReader.addFile(name: "3", data: "v3".utf8Data) + + // When + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { true }, + delay: DataExportDelay(performance: ExportPerformanceMock.veryQuick) + ) + + // Then + waitForExpectations(timeout: 1, handler: nil) + + worker.cancelSynchronously() + + XCTAssertEqual(fileReader.files.count, 0) + } + + func testGivenDataToExport_whenExportFinishesAndDoesNotNeedToBeRetried_thenDataIsDeleted() { + let startExportExpectation = self.expectation(description: "Export has started") + + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: false)) + mockDataExporter.onExport = { _ in startExportExpectation.fulfill() } + + // Given + let fileReader = FileReaderMock() + fileReader.addFile(name: "file", data: "value".utf8Data) + + // When + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { true }, + delay: DataExportDelay(performance: ExportPerformanceMock.veryQuick) + ) + + wait(for: [startExportExpectation], timeout: 0.5) + + worker.cancelSynchronously() + + // Then + XCTAssertEqual(fileReader.files.count, 0, "When export finishes with `needsRetry: false`, data should be deleted") + } + + func testGivenDataToExport_whenExportFinishesAndNeedsToBeRetried_thenDataIsPreserved() { + let startExportExpectation = self.expectation(description: "Export has started") + + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: true)) + mockDataExporter.onExport = { _ in startExportExpectation.fulfill() } + + // Given + let fileReader = FileReaderMock() + fileReader.addFile(name: "file", data: "value".utf8Data) + + // When + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { true }, + delay: DataExportDelay(performance: ExportPerformanceMock.veryQuick) + ) + + wait(for: [startExportExpectation], timeout: 0.5) + worker.cancelSynchronously() + + // Then + XCTAssertEqual(fileReader.files.count, 1, "When export finishes with `needsRetry: true`, data should be preserved") + } + + // MARK: - Export Interval Changes + + func testWhenThereIsNoBatch_thenIntervalIncreases() { + let delayChangeExpectation = expectation(description: "Export delay is increased") + let mockDelay = MockDelay { command in + if case .increase = command { + delayChangeExpectation.fulfill() + } else { + XCTFail("Wrong command is sent!") + } + } + + // When + let fileReader = FileReaderMock() + let mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: false)) + + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { false }, + delay: mockDelay + ) + + // Then + waitForExpectations(timeout: 1, handler: nil) + worker.cancelSynchronously() + } + + func testWhenBatchFails_thenIntervalIncreases() { + let delayChangeExpectation = expectation(description: "Export delay is increased") + let mockDelay = MockDelay { command in + if case .increase = command { + delayChangeExpectation.fulfill() + } else { + XCTFail("Wrong command is sent!") + } + } + + let exportExpectation = self.expectation(description: "value exported") + + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: true)) + + mockDataExporter.onExport = { data in exportExpectation.fulfill() } + + // When + let fileReader = FileReaderMock() + fileReader.addFile(name: "file", data: "value".utf8Data) + + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { true }, + delay: mockDelay + ) + + // Then + waitForExpectations(timeout: 1, handler: nil) + worker.cancelSynchronously() + } + + func testWhenBatchSucceeds_thenIntervalDecreases() { + let delayChangeExpectation = expectation(description: "Export delay is decreased") + let mockDelay = MockDelay { command in + if case .decrease = command { + delayChangeExpectation.fulfill() + } else { + XCTFail("Wrong command is sent!") + } + } + + let exportExpectation = self.expectation(description: "value exported") + + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: false)) + + mockDataExporter.onExport = { data in exportExpectation.fulfill() } + + // When + let fileReader = FileReaderMock() + fileReader.addFile(name: "file", data: "value".utf8Data) + + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { true }, + delay: mockDelay + ) + + // Then + waitForExpectations(timeout: 1, handler: nil) + worker.cancelSynchronously() + } + + // MARK: - Tearing Down + + func testWhenCancelled_itPerformsNoMoreExports() { + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: false)) + + mockDataExporter.onExport = { _ in XCTFail("Expected no exports after cancel") } + + // When + let fileReader = FileReaderMock() + + // Given + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { false }, + delay: MockDelay() + ) + + worker.cancelSynchronously() + fileReader.addFile(name: "file", data: "value".utf8Data) + + // Then + exporterQueue.sync(flags: .barrier) { } + } + + func testItFlushesAllData() { + let v1ExportExpectation = self.expectation(description: "V1 exported") + let v2ExportExpectation = self.expectation(description: "V2 exported") + let v3ExportExpectation = self.expectation(description: "V3 exported") + + var mockDataExporter = DataExporterMock(exportStatus: .mockWith(needsRetry: false)) + + mockDataExporter.onExport = { data in + switch data.utf8String { + case "v1": v1ExportExpectation.fulfill() + case "v2": v2ExportExpectation.fulfill() + case "v3": v3ExportExpectation.fulfill() + default: break + } + } + + // Given + let fileReader = FileReaderMock() + fileReader.addFile(name: "1", data: "v1".utf8Data) + fileReader.addFile(name: "2", data: "v2".utf8Data) + fileReader.addFile(name: "3", data: "v3".utf8Data) + + // When + let worker = DataExportWorker( + queue: exporterQueue, + fileReader: fileReader, + dataExporter: mockDataExporter, + exportCondition: { true }, + delay: DataExportDelay(performance: ExportPerformanceMock.veryQuick) + ) + + // When + XCTAssertTrue(worker.flush()) + + // Then + waitForExpectations(timeout: 1, handler: nil) + XCTAssertEqual(fileReader.files.count, 0) + } +} + +struct MockDelay: Delay { + enum Command { + case increase, decrease + } + + var callback: ((Command) -> Void)? + let current: TimeInterval = 0.1 + + mutating func decrease() { + callback?(.decrease) + callback = nil + } + + mutating func increase() { + callback?(.increase) + callback = nil + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift b/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift new file mode 100644 index 00000000..c79fa1b8 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift @@ -0,0 +1,221 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import Foundation + +// MARK: - PerformancePreset Mocks + +struct StoragePerformanceMock: StoragePerformancePreset { + let maxFileSize: UInt64 + let maxDirectorySize: UInt64 + let maxFileAgeForWrite: TimeInterval + let minFileAgeForRead: TimeInterval + let maxFileAgeForRead: TimeInterval + let maxObjectsInFile: Int + let maxObjectSize: UInt64 + + static let readAllFiles = StoragePerformanceMock( + maxFileSize: .max, + maxDirectorySize: .max, + maxFileAgeForWrite: 0, + minFileAgeForRead: -1, // make all files eligible for read + maxFileAgeForRead: .distantFuture, // make all files eligible for read + maxObjectsInFile: .max, + maxObjectSize: .max + ) + + static let writeEachObjectToNewFileAndReadAllFiles = StoragePerformanceMock( + maxFileSize: .max, + maxDirectorySize: .max, + maxFileAgeForWrite: 0, // always return new file for writting + minFileAgeForRead: readAllFiles.minFileAgeForRead, + maxFileAgeForRead: readAllFiles.maxFileAgeForRead, + maxObjectsInFile: 1, // write each data to new file + maxObjectSize: .max + ) + + static let writeAllObjectsToTheSameFile = StoragePerformanceMock( + maxFileSize: .max, + maxDirectorySize: .max, + maxFileAgeForWrite: .distantFuture, + minFileAgeForRead: -1, // make all files eligible for read + maxFileAgeForRead: .distantFuture, // make all files eligible for read + maxObjectsInFile: .max, + maxObjectSize: .max + ) +} + +struct ExportPerformanceMock: ExportPerformancePreset { + let initialExportDelay: TimeInterval + let defaultExportDelay: TimeInterval + let minExportDelay: TimeInterval + let maxExportDelay: TimeInterval + let exportDelayChangeRate: Double + + static let veryQuick = ExportPerformanceMock( + initialExportDelay: 0.05, + defaultExportDelay: 0.05, + minExportDelay: 0.05, + maxExportDelay: 0.05, + exportDelayChangeRate: 0 + ) +} + +extension PersistencePerformancePreset { + + static func mockWith(storagePerformance: StoragePerformancePreset, + synchronousWrite: Bool, + exportPerformance: ExportPerformancePreset) -> PersistencePerformancePreset { + return PersistencePerformancePreset(maxFileSize: storagePerformance.maxFileSize, + maxDirectorySize: storagePerformance.maxDirectorySize, + maxFileAgeForWrite: storagePerformance.maxFileAgeForWrite, + minFileAgeForRead: storagePerformance.minFileAgeForRead, + maxFileAgeForRead: storagePerformance.maxFileAgeForRead, + maxObjectsInFile: storagePerformance.maxObjectsInFile, + maxObjectSize: storagePerformance.maxObjectSize, + synchronousWrite: synchronousWrite, + initialExportDelay: exportPerformance.initialExportDelay, + defaultExportDelay: exportPerformance.defaultExportDelay, + minExportDelay: exportPerformance.minExportDelay, + maxExportDelay: exportPerformance.maxExportDelay, + exportDelayChangeRate: exportPerformance.exportDelayChangeRate) + } +} + +/// `DateProvider` mock returning consecutive dates in custom intervals, starting from given reference date. +class RelativeDateProvider: DateProvider { + private(set) var date: Date + internal let timeInterval: TimeInterval + private let queue = DispatchQueue(label: "queue-RelativeDateProvider-\(UUID().uuidString)") + + private init(date: Date, timeInterval: TimeInterval) { + self.date = date + self.timeInterval = timeInterval + } + + convenience init(using date: Date = Date()) { + self.init(date: date, timeInterval: 0) + } + + convenience init(startingFrom referenceDate: Date = Date(), advancingBySeconds timeInterval: TimeInterval = 0) { + self.init(date: referenceDate, timeInterval: timeInterval) + } + + /// Returns current date and advances next date by `timeInterval`. + func currentDate() -> Date { + defer { + queue.async { + self.date.addTimeInterval(self.timeInterval) + } + } + return queue.sync { + return date + } + } + + /// Pushes time forward by given number of seconds. + func advance(bySeconds seconds: TimeInterval) { + queue.async { + self.date = self.date.addingTimeInterval(seconds) + } + } +} + +struct DataExporterMock: DataExporter { + let exportStatus: DataExportStatus + + var onExport: ((Data) -> Void)? = nil + + func export(data: Data) -> DataExportStatus { + onExport?(data) + return exportStatus + } +} + +extension DataExportStatus { + static func mockWith(needsRetry: Bool) -> DataExportStatus { + return DataExportStatus(needsRetry: needsRetry) + } +} + +class FileWriterMock: FileWriter { + var onWrite: ((Bool, Data) -> Void)? = nil + + func write(data: Data) { + onWrite?(false, data) + } + + func writeSync(data: Data) { + onWrite?(true, data) + } + + var onFlush: (() -> Void)? = nil + + func flush() { + onFlush?() + } +} + +class FileReaderMock: FileReader { + + private class ReadableFileMock: ReadableFile { + private var deleted = false + private let data: Data + + private(set) var name: String + + init(name: String, data: Data) { + self.name = name + self.data = data + } + + func read() throws -> Data { + guard deleted == false else { + throw ErrorMock("read failed because delete was called") + } + return data + } + + func delete() throws { + deleted = true + } + } + + var files: [ReadableFile] = [] + + func addFile(name: String, data: Data) { + files.append(ReadableFileMock(name: name, data: data)) + } + + func readNextBatch() -> Batch? { + if let file = files.first, + let fileData = try? file.read() { + return Batch(data: fileData, file: file) + } + + return nil + } + + func onRemainingBatches(process: (Batch) -> ()) -> Bool { + do { + try files.forEach { + let fileData = try $0.read() + process(Batch(data: fileData, file: $0)) + } + + return true + } catch { + return false + } + } + + func markBatchAsRead(_ batch: Batch) { + try? batch.file.delete() + files.removeAll { file -> Bool in + return file.name == batch.file.name + } + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Helpers/FoundationMocks.swift b/Tests/ExportersTests/PersistenceExporter/Helpers/FoundationMocks.swift new file mode 100644 index 00000000..93e7ea8c --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Helpers/FoundationMocks.swift @@ -0,0 +1,103 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation + +/* + A collection of mocks for different `Foundation` types. The convention we use is to extend + types with static factory function prefixed with "mock". For example: + + ``` + extension URL { + static func mockAny() -> URL { + // ... + } + } + + extension URLSession { + static func mockDeliverySuccess(data: Data, response: HTTPURLResponse) -> URLSessionMock { + // ... + } + } + ``` + + Other conventions to follow: + * Use the name `mockAny()` to name functions that return any value of given type. + * Use descriptive function and parameter names for functions that configure the object for particular scenario. + * Always use the minimal set of parameters which is required by given mock scenario. + + */ + +// MARK: - Basic types + +protocol AnyMockable { + static func mockAny() -> Self +} + +extension Data: AnyMockable { + static func mockAny() -> Data { + return Data() + } + + static func mockRepeating(byte: UInt8, times count: Int) -> Data { + return Data(repeating: byte, count: count) + } + + static func mock(ofSize size: UInt64) -> Data { + return mockRepeating(byte: 0x41, times: Int(size)) + } +} + +extension Array where Element == Data { + /// Returns chunks of mocked data. Accumulative size of all chunks equals `totalSize`. + static func mockChunksOf(totalSize: UInt64, maxChunkSize: UInt64) -> [Data] { + var chunks: [Data] = [] + var bytesWritten: UInt64 = 0 + + while bytesWritten < totalSize { + let bytesLeft = totalSize - bytesWritten + var nextChunkSize: UInt64 = bytesLeft > Int.max ? UInt64(Int.max) : bytesLeft // prevents `Int` overflow + nextChunkSize = nextChunkSize > maxChunkSize ? maxChunkSize : nextChunkSize // caps the next chunk to its max size + chunks.append(.mockRepeating(byte: 0x1, times: Int(nextChunkSize))) + bytesWritten += UInt64(nextChunkSize) + } + + return chunks + } +} + +extension Date: AnyMockable { + static func mockAny() -> Date { + return Date(timeIntervalSinceReferenceDate: 1) + } +} + +extension TimeInterval: AnyMockable { + static func mockAny() -> TimeInterval { + return 0 + } + + static let distantFuture = TimeInterval(integerLiteral: .max) +} + +struct ErrorMock: Error, CustomStringConvertible { + let description: String + + init(_ description: String = "") { + self.description = description + } +} + +struct FailingCodableMock: Codable { + init() {} + + init(from decoder: Decoder) throws { + throw ErrorMock("Failing codable failed to decode") + } + + func encode(to encoder: Encoder) throws { + throw ErrorMock("Failing codable failed to encode") + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Helpers/PersistenceExtensions.swift b/Tests/ExportersTests/PersistenceExporter/Helpers/PersistenceExtensions.swift new file mode 100644 index 00000000..6c9ab9a5 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Helpers/PersistenceExtensions.swift @@ -0,0 +1,29 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import Foundation + +/* + Set of Persistence domain extensions over standard types for writting more readable tests. + Domain agnostic extensions should be put in `SwiftExtensions.swift`. + */ + +extension Date { + /// Returns name of the logs file createde at this date. + var toFileName: String { + return fileNameFrom(fileCreationDate: self) + } +} + +extension File { + func makeReadonly() throws { + try FileManager.default.setAttributes([.immutable: true], ofItemAtPath: url.path) + } + + func makeReadWrite() throws { + try FileManager.default.setAttributes([.immutable: false], ofItemAtPath: url.path) + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Helpers/SwiftExtensions.swift b/Tests/ExportersTests/PersistenceExporter/Helpers/SwiftExtensions.swift new file mode 100644 index 00000000..853f5352 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Helpers/SwiftExtensions.swift @@ -0,0 +1,46 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +import Foundation +import XCTest + +/* + Set of general extensions over standard types for writting more readable tests. + Extensiosn using Persistence domain objects should be put in `PersistenceExtensions.swift`. + */ + +extension Optional { + struct UnwrappingException: Error {} + + func unwrapOrThrow(file: StaticString = #file, line: UInt = #line) throws -> Wrapped { + switch self { + case .some(let unwrappedValue): + return unwrappedValue + case .none: + XCTFail("Expected value, got `nil`.", file: file, line: line) + throw UnwrappingException() + } + } +} + +extension Date { + func secondsAgo(_ seconds: TimeInterval) -> Date { + return addingTimeInterval(-seconds) + } +} + +extension TimeZone { + static var UTC: TimeZone { TimeZone(abbreviation: "UTC")! } + static var EET: TimeZone { TimeZone(abbreviation: "EET")! } + static func mockAny() -> TimeZone { .EET } +} + +extension String { + var utf8Data: Data { data(using: .utf8)! } +} + +extension Data { + var utf8String: String { String(decoding: self, as: UTF8.self) } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Helpers/TestsDirectory.swift b/Tests/ExportersTests/PersistenceExporter/Helpers/TestsDirectory.swift new file mode 100644 index 00000000..165dff41 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Helpers/TestsDirectory.swift @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import Foundation +import XCTest + +/// Creates `Directory` pointing to unique subfolder in `/var/folders/`. +/// Does not create the subfolder - it must be later created with `.create()`. +func obtainUniqueTemporaryDirectory() -> Directory { + let subdirectoryName = "com.otel.persistence-tests-\(UUID().uuidString)" + let osTemporaryDirectoryURL = URL(fileURLWithPath: NSTemporaryDirectory(), isDirectory: true).appendingPathComponent(subdirectoryName, isDirectory: true) + print("💡 Obtained temporary directory URL: \(osTemporaryDirectoryURL)") + return Directory(url: osTemporaryDirectoryURL) +} + +/// `Directory` pointing to subfolder in `/var/folders/`. +/// The subfolder does not exist and can be created and deleted by calling `.create()` and `.delete()`. +let temporaryDirectory = obtainUniqueTemporaryDirectory() + +/// Extends `Directory` with set of utilities for convenient work with files in tests. +/// Provides handy methods to create / delete files and directires. +extension Directory { + /// Creates empty directory with given attributes . + func create(attributes: [FileAttributeKey: Any]? = nil, file: StaticString = #file, line: UInt = #line) { + do { + try FileManager.default.createDirectory(at: url, withIntermediateDirectories: true, attributes: attributes) + let initialFilesCount = try files().count + XCTAssert(initialFilesCount == 0, "🔥 `TestsDirectory` is not empty: \(url)", file: file, line: line) + } catch { + XCTFail("🔥 Failed to create `TestsDirectory`: \(error)", file: file, line: line) + } + } + + /// Deletes entire directory with its content. + func delete(file: StaticString = #file, line: UInt = #line) { + if FileManager.default.fileExists(atPath: url.path) { + do { + try FileManager.default.removeItem(at: url) + } catch { + XCTFail("🔥 Failed to delete `TestsDirectory`: \(error)", file: file, line: line) + } + } + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift b/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift new file mode 100644 index 00000000..3a6573bc --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift @@ -0,0 +1,215 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import XCTest + +class PersistenceExporterDecoratorTests: XCTestCase { + + class DecoratedExporterMock: DecoratedExporter { + typealias SignalType = T + let exporter: ([T]) -> DataExportStatus + init(exporter: @escaping ([T]) -> DataExportStatus) { + self.exporter = exporter + } + + func export(values: [T]) -> DataExportStatus { + return exporter(values) + } + } + + class DataExportWorkerMock: DataExportWorkerProtocol { + var dataExporter: DataExporter? = nil + var onFlush: (() -> Bool)? = nil + + func flush() -> Bool { + return onFlush?() ?? true + } + } + + private typealias PersistenceExporter = PersistenceExporterDecorator> + + private func createPersistenceExporter( + fileWriter: FileWriterMock = FileWriterMock(), + worker: inout DataExportWorkerMock, + decoratedExporter: DecoratedExporterMock = DecoratedExporterMock(exporter: { _ in + return DataExportStatus(needsRetry: false) + }), + storagePerformance: StoragePerformancePreset = StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, + synchronousWrite: Bool = true, + exportPerformance: ExportPerformancePreset = ExportPerformanceMock.veryQuick + ) -> PersistenceExporter { + + return PersistenceExporterDecorator>( + decoratedExporter: decoratedExporter, + fileWriter: fileWriter, + workerFactory: { + worker.dataExporter = $0 + return worker + }, + performancePreset: PersistencePerformancePreset.mockWith( + storagePerformance: storagePerformance, + synchronousWrite: synchronousWrite, + exportPerformance: exportPerformance)) + } + + func testWhenSetupWithSynchronousWrite_thenWritesAreSynchronous() throws { + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker) + + fileWriter.onWrite = { writeSync, _ in + XCTAssertTrue(writeSync) + } + + try exporter.export(values: ["value"]) + } + + func testWhenSetupWithAsynchronousWrite_thenWritesAreAsynchronous() throws { + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker, + synchronousWrite: false) + + fileWriter.onWrite = { writeSync, _ in + XCTAssertFalse(writeSync) + } + + try exporter.export(values: ["value"]) + } + + func testWhenValueCannotBeEncoded_itThrowsAnError() { + // When + var worker = DataExportWorkerMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + worker: &worker) + + XCTAssertThrowsError(try exporter.export(values: [FailingCodableMock()])) + } + + func testWhenValueCannotBeDecoded_itReportsNoRetryIsNeeded() { + var worker = DataExportWorkerMock() + + _ = createPersistenceExporter(worker: &worker) as PersistenceExporter + + let result = worker.dataExporter?.export(data: Data()) + + XCTAssertNotNil(result) + XCTAssertFalse(result!.needsRetry) + } + + func testWhenItIsFlushed_thenItFlushesTheWriterAndWorker() { + let writerIsFlushedExpectation = self.expectation(description: "FileWriter was flushed") + let workerIsFlushedExpectation = self.expectation(description: "DataExportWorker was flushed") + + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker) + + fileWriter.onFlush = { + writerIsFlushedExpectation.fulfill() + } + + worker.onFlush = { + workerIsFlushedExpectation.fulfill() + return true + } + + exporter.flush() + + waitForExpectations(timeout: 1, handler: nil) + } + + func testWhenObjectsDataIsExportedSeparately_thenObjectsAreExported() throws { + let v1ExportExpectation = self.expectation(description: "V1 exported") + let v2ExportExpectation = self.expectation(description: "V2 exported") + let v3ExportExpectation = self.expectation(description: "V3 exported") + + let decoratedExporter = DecoratedExporterMock(exporter: { values in + values.forEach { value in + switch value { + case "v1": v1ExportExpectation.fulfill() + case "v2": v2ExportExpectation.fulfill() + case "v3": v3ExportExpectation.fulfill() + default: break + } + } + + return DataExportStatus(needsRetry: false) + }) + + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker, + decoratedExporter: decoratedExporter) + + fileWriter.onWrite = { _, data in + if let dataExporter = worker.dataExporter { + XCTAssertFalse(dataExporter.export(data: data).needsRetry) + } + } + + try exporter.export(values: ["v1"]) + try exporter.export(values: ["v2"]) + try exporter.export(values: ["v3"]) + + waitForExpectations(timeout: 1, handler: nil) + } + + func testWhenObjectsDataIsExportedConcatenated_thenObjectsAreExported() throws { + let v1ExportExpectation = self.expectation(description: "V1 exported") + let v2ExportExpectation = self.expectation(description: "V2 exported") + let v3ExportExpectation = self.expectation(description: "V3 exported") + + let decoratedExporter = DecoratedExporterMock(exporter: { values in + values.forEach { value in + switch value { + case "v1": v1ExportExpectation.fulfill() + case "v2": v2ExportExpectation.fulfill() + case "v3": v3ExportExpectation.fulfill() + default: break + } + } + + return DataExportStatus(needsRetry: false) + }) + + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker, + decoratedExporter: decoratedExporter) + + var writtenData = Data() + fileWriter.onWrite = { _, data in + writtenData.append(data) + } + + try exporter.export(values: ["v1"]) + try exporter.export(values: ["v2"]) + try exporter.export(values: ["v3"]) + + if let dataExporter = worker.dataExporter { + XCTAssertFalse(dataExporter.export(data: writtenData).needsRetry) + } + + waitForExpectations(timeout: 1, handler: nil) + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/PersistenceMetricExporterDecoratorTests.swift b/Tests/ExportersTests/PersistenceExporter/PersistenceMetricExporterDecoratorTests.swift new file mode 100644 index 00000000..56a08ab0 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/PersistenceMetricExporterDecoratorTests.swift @@ -0,0 +1,82 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import OpenTelemetryApi +import OpenTelemetrySdk +import XCTest + +class PersistenceMetricExporterDecoratorTests: XCTestCase { + + class MetricExporterMock: MetricExporter { + + let onExport: ([Metric]) -> MetricExporterResultCode + init(onExport: @escaping ([Metric]) -> MetricExporterResultCode) { + self.onExport = onExport + } + + func export(metrics: [Metric], shouldCancel: (() -> Bool)?) -> MetricExporterResultCode { + return onExport(metrics) + } + + } + + override func setUp() { + super.setUp() + temporaryDirectory.create() + } + + override func tearDown() { + temporaryDirectory.delete() + super.tearDown() + } + + func testWhenExportMetricIsCalled_thenMetricsAreExported() throws { + let metricsExportExpectation = self.expectation(description: "metrics exported") + + let mockMetricExporter = MetricExporterMock(onExport: { metrics in + metrics.forEach { metric in + if metric.name == "MyCounter" && + metric.namespace == "MyMeter" && + metric.data.count == 1 { + + if let metricData = metric.data[0] as? SumData, + metricData.sum == 100, + metricData.labels == ["labelKey": "labelValue"] + { + metricsExportExpectation.fulfill() + } + + } + } + + return .success + }) + + let persistenceMetricExporter = + try PersistenceMetricExporterDecorator( + metricExporter: mockMetricExporter, + storageURL: temporaryDirectory.url, + writerQueue: DispatchQueue(label: "metricWriterQueue"), + readerQueue: DispatchQueue(label: "metricReaderQueue"), + exportQueue: DispatchQueue(label: "metricExportQueue"), + exportCondition: { return true }, + performancePreset: PersistencePerformancePreset.mockWith( + storagePerformance: StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, + synchronousWrite: true, + exportPerformance: ExportPerformanceMock.veryQuick)) + + let provider = MeterProviderSdk(metricProcessor: MetricProcessorSdk(), + metricExporter: persistenceMetricExporter, + metricPushInterval: 0.1) + + let meter = provider.get(instrumentationName: "MyMeter") + + let myCounter = meter.createIntCounter(name: "MyCounter") + myCounter.add(value: 100, labels: ["labelKey": "labelValue"]) + + waitForExpectations(timeout: 10, handler: nil) + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift b/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift new file mode 100644 index 00000000..fecae909 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift @@ -0,0 +1,100 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import OpenTelemetryApi +import OpenTelemetrySdk +import XCTest + +class PersistenceSpanExporterDecoratorTests: XCTestCase { + + class SpanExporterMock: SpanExporter { + + let onExport: ([SpanData]) -> SpanExporterResultCode + let onFlush: () -> SpanExporterResultCode + let onShutdown: () -> Void + + init(onExport: @escaping ([SpanData]) -> SpanExporterResultCode, + onFlush: @escaping () -> SpanExporterResultCode = { return .success }, + onShutdown: @escaping () -> Void = {}) { + self.onExport = onExport + self.onFlush = onFlush + self.onShutdown = onShutdown + } + + @discardableResult func export(spans: [SpanData]) -> SpanExporterResultCode { + return onExport(spans) + } + + func flush() -> SpanExporterResultCode { + return onFlush() + } + + func shutdown() { + onShutdown() + } + } + + override func setUp() { + super.setUp() + temporaryDirectory.create() + } + + override func tearDown() { + temporaryDirectory.delete() + super.tearDown() + } + + func testWhenExportMetricIsCalled_thenSpansAreExported() throws { + let spansExportExpectation = self.expectation(description: "spans exported") + let exporterShutdownExpectation = self.expectation(description: "exporter shut down") + + let mockSpanExporter = SpanExporterMock(onExport: { spans in + spans.forEach { span in + if span.name == "SimpleSpan" && + span.events.count == 1 && + span.events.first!.name == "My event" { + spansExportExpectation.fulfill() + } + } + + return .success + }, onShutdown: { + exporterShutdownExpectation.fulfill() + }) + + let persistenceSpanExporter = + try PersistenceSpanExporterDecorator( + spanExporter: mockSpanExporter, + storageURL: temporaryDirectory.url, + writerQueue: DispatchQueue(label: "spanWriterQueue"), + readerQueue: DispatchQueue(label: "spanReaderQueue"), + exportQueue: DispatchQueue(label: "spanExportQueue"), + exportCondition: { return true }, + performancePreset: PersistencePerformancePreset.mockWith( + storagePerformance: StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, + synchronousWrite: true, + exportPerformance: ExportPerformanceMock.veryQuick)) + + let instrumentationLibraryName = "SimpleExporter" + let instrumentationLibraryVersion = "semver:0.1.0" + + let tracer = OpenTelemetrySDK.instance.tracerProvider.get(instrumentationName: instrumentationLibraryName, instrumentationVersion: instrumentationLibraryVersion) as! TracerSdk + + let spanProcessor = SimpleSpanProcessor(spanExporter: persistenceSpanExporter) + OpenTelemetrySDK.instance.tracerProvider.addSpanProcessor(spanProcessor) + + simpleSpan(tracer: tracer) + spanProcessor.shutdown() + + waitForExpectations(timeout: 10, handler: nil) + } + + private func simpleSpan(tracer: TracerSdk) { + let span = tracer.spanBuilder(spanName: "SimpleSpan").setSpanKind(spanKind: .client).startSpan() + span.addEvent(name: "My event", timestamp: Date()) + span.end() + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Storage/DirectoryTests.swift b/Tests/ExportersTests/PersistenceExporter/Storage/DirectoryTests.swift new file mode 100644 index 00000000..033a539a --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Storage/DirectoryTests.swift @@ -0,0 +1,78 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import XCTest + +class DirectoryTests: XCTestCase { + private let uniqueSubdirectoryName = UUID().uuidString + private let fileManager = FileManager.default + + // MARK: - Directory creation + + func testGivenSubdirectoryName_itCreatesIt() throws { + let directory = try Directory(withSubdirectoryPath: uniqueSubdirectoryName) + defer { directory.delete() } + + XCTAssertTrue(fileManager.fileExists(atPath: directory.url.path)) + } + + func testGivenSubdirectoryPath_itCreatesIt() throws { + let path = uniqueSubdirectoryName + "/subdirectory/another-subdirectory" + let directory = try Directory(withSubdirectoryPath: path) + defer { directory.delete() } + + XCTAssertTrue(fileManager.fileExists(atPath: directory.url.path)) + } + + func testWhenDirectoryExists_itDoesNothing() throws { + let path = uniqueSubdirectoryName + "/subdirectory/another-subdirectory" + let originalDirectory = try Directory(withSubdirectoryPath: path) + defer { originalDirectory.delete() } + _ = try originalDirectory.createFile(named: "abcd") + + // Try again when directory exists + let retrievedDirectory = try Directory(withSubdirectoryPath: path) + + XCTAssertEqual(retrievedDirectory.url, originalDirectory.url) + XCTAssertTrue(fileManager.fileExists(atPath: retrievedDirectory.url.appendingPathComponent("abcd").path)) + } + + // MARK: - Files manipulation + + func testItCreatesFile() throws { + let path = uniqueSubdirectoryName + "/subdirectory/another-subdirectory" + let directory = try Directory(withSubdirectoryPath: path) + defer { directory.delete() } + + let file = try directory.createFile(named: "abcd") + + XCTAssertEqual(file.url, directory.url.appendingPathComponent("abcd")) + XCTAssertTrue(fileManager.fileExists(atPath: file.url.path)) + } + + func testItRetrievesFile() throws { + let directory = try Directory(withSubdirectoryPath: uniqueSubdirectoryName) + defer { directory.delete() } + _ = try directory.createFile(named: "abcd") + + let file = directory.file(named: "abcd") + XCTAssertEqual(file?.url, directory.url.appendingPathComponent("abcd")) + XCTAssertTrue(fileManager.fileExists(atPath: file!.url.path)) + } + + func testItRetrievesAllFiles() throws { + let directory = try Directory(withSubdirectoryPath: uniqueSubdirectoryName) + defer { directory.delete() } + _ = try directory.createFile(named: "f1") + _ = try directory.createFile(named: "f2") + _ = try directory.createFile(named: "f3") + + let files = try directory.files() + XCTAssertEqual(files.count, 3) + files.forEach { file in XCTAssertTrue(file.url.relativePath.contains(directory.url.relativePath)) } + files.forEach { file in XCTAssertTrue(fileManager.fileExists(atPath: file.url.path)) } + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Storage/FileTests.swift b/Tests/ExportersTests/PersistenceExporter/Storage/FileTests.swift new file mode 100644 index 00000000..d7e85efb --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Storage/FileTests.swift @@ -0,0 +1,91 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import XCTest + +class FileTests: XCTestCase { + private let fileManager = FileManager.default + + override func setUp() { + super.setUp() + temporaryDirectory.create() + } + + override func tearDown() { + temporaryDirectory.delete() + super.tearDown() + } + + func testItAppendsDataToFile() throws { + let file = try temporaryDirectory.createFile(named: "file") + + try file.append(data: Data([0x41, 0x41, 0x41, 0x41, 0x41])) // 5 bytes + + XCTAssertEqual( + try Data(contentsOf: file.url), + Data([0x41, 0x41, 0x41, 0x41, 0x41]) + ) + + try file.append(data: Data([0x42, 0x42, 0x42, 0x42, 0x42])) // 5 bytes + try file.append(data: Data([0x41, 0x41, 0x41, 0x41, 0x41])) // 5 bytes + + XCTAssertEqual( + try Data(contentsOf: file.url), + Data( + [ + 0x41, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x42, 0x42, 0x42, 0x42, + 0x41, 0x41, 0x41, 0x41, 0x41, + ] + ) + ) + } + + func testItReadsDataFromFile() throws { + let file = try temporaryDirectory.createFile(named: "file") + try file.append(data: "Hello 👋".utf8Data) + + XCTAssertEqual(try file.read().utf8String, "Hello 👋") + } + + func testItDeletesFile() throws { + let file = try temporaryDirectory.createFile(named: "file") + XCTAssertTrue(fileManager.fileExists(atPath: file.url.path)) + + try file.delete() + + XCTAssertFalse(fileManager.fileExists(atPath: file.url.path)) + } + + func testItReturnsFileSize() throws { + let file = try temporaryDirectory.createFile(named: "file") + + try file.append(data: .mock(ofSize: 5)) + XCTAssertEqual(try file.size(), 5) + + try file.append(data: .mock(ofSize: 10)) + XCTAssertEqual(try file.size(), 15) + } + + func testWhenIOExceptionHappens_itThrowsWhenWritting() throws { + let file = try temporaryDirectory.createFile(named: "file") + try file.delete() + + XCTAssertThrowsError(try file.append(data: .mock(ofSize: 5))) { error in + XCTAssertEqual((error as NSError).localizedDescription, "The file “file” doesn’t exist.") + } + } + + func testWhenIOExceptionHappens_itThrowsWhenReading() throws { + let file = try temporaryDirectory.createFile(named: "file") + try file.append(data: .mock(ofSize: 5)) + try file.delete() + + XCTAssertThrowsError(try file.read()) { error in + XCTAssertEqual((error as NSError).localizedDescription, "The file “file” doesn’t exist.") + } + } +} diff --git a/Tests/ExportersTests/PersistenceExporter/Storage/FilesOrchestratorTests.swift b/Tests/ExportersTests/PersistenceExporter/Storage/FilesOrchestratorTests.swift new file mode 100644 index 00000000..5b7d5bf0 --- /dev/null +++ b/Tests/ExportersTests/PersistenceExporter/Storage/FilesOrchestratorTests.swift @@ -0,0 +1,279 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 + */ + +@testable import PersistenceExporter +import XCTest + +class FilesOrchestratorTests: XCTestCase { + private let performance: PersistencePerformancePreset = .default + + override func setUp() { + super.setUp() + temporaryDirectory.create() + } + + override func tearDown() { + temporaryDirectory.delete() + super.tearDown() + } + + /// Configures `FilesOrchestrator` under tests. + private func configureOrchestrator(using dateProvider: DateProvider) -> FilesOrchestrator { + return FilesOrchestrator( + directory: temporaryDirectory, + performance: performance, + dateProvider: dateProvider + ) + } + + // MARK: - Writable file tests + + func testGivenDefaultWriteConditions_whenUsedFirstTime_itCreatesNewWritableFile() throws { + let dateProvider = RelativeDateProvider() + let orchestrator = configureOrchestrator(using: dateProvider) + _ = try orchestrator.getWritableFile(writeSize: 1) + + XCTAssertEqual(try temporaryDirectory.files().count, 1) + XCTAssertNotNil(temporaryDirectory.file(named: dateProvider.currentDate().toFileName)) + } + + func testGivenDefaultWriteConditions_whenUsedNextTime_itReusesWritableFile() throws { + let orchestrator = configureOrchestrator(using: RelativeDateProvider(advancingBySeconds: 1)) + let file1 = try orchestrator.getWritableFile(writeSize: 1) + let file2 = try orchestrator.getWritableFile(writeSize: 1) + + XCTAssertEqual(try temporaryDirectory.files().count, 1) + XCTAssertEqual(file1.name, file2.name) + } + + func testGivenDefaultWriteConditions_whenFileCanNotBeUsedMoreTimes_itCreatesNewFile() throws { + let orchestrator = configureOrchestrator(using: RelativeDateProvider(advancingBySeconds: 0.001)) + var previousFile: WritableFile = try orchestrator.getWritableFile(writeSize: 1) // first use + var nextFile: WritableFile + + // use file maximum number of times + for _ in (0..