From 4521fbe3b90af5b37aff51daf7040e31154e892d Mon Sep 17 00:00:00 2001 From: Bryce Buchanan Date: Thu, 21 Sep 2023 12:07:12 -0700 Subject: [PATCH] clean up --- .../DatadogExporter/DatadogExporter.swift | 6 +- .../Exporters/InMemory/InMemoryExporter.swift | 64 +-- .../Exporters/Jaeger/JaegerSpanExporter.swift | 44 +- .../logs/OtlpLogExporter.swift | 102 ++-- .../trace/OtlpTraceExporter.swift | 90 ++-- .../trace/OtlpTraceJsonExporter.swift | 82 +-- .../OtlpHttpExporterBase.swift | 59 +- .../logs/OtlpHttpLogExporter.swift | 114 ++-- .../metric/OltpHTTPMetricExporter.swift | 94 ++-- .../trace/OtlpHttpTraceExporter.swift | 96 ++-- .../Persistence/Export/DataExportWorker.swift | 172 +++--- .../PersistenceExporterDecorator.swift | 6 +- .../PersistenceLogExporterDecorator.swift | 10 +- .../PersistenceMetricExporterDecorator.swift | 2 +- .../PersistenceSpanExporterDecorator.swift | 6 +- Sources/Exporters/Stdout/StdoutExporter.swift | 6 +- .../Zipkin/ZipkinTraceExporter.swift | 4 +- .../SignPostIntegration.swift | 38 +- .../Export/InMemoryLogRecordExporter.swift | 44 +- .../Logs/Export/LogRecordExporter.swift | 28 +- .../Logs/Export/MultiLogRecordExporter.swift | 50 +- .../Logs/Export/NoopLogRecordExporter.swift | 20 +- .../Logs/LoggerSharedState.swift | 78 +-- .../Processors/BatchLogRecordProcessor.swift | 226 ++++---- .../Logs/Processors/LogRecordProcessor.swift | 44 +- .../Processors/MultiLogRecordProcessor.swift | 4 +- .../Trace/Export/MultiSpanExporter.swift | 6 +- .../Trace/Export/SpanExporter.swift | 46 +- .../Trace/SpanProcessor.swift | 52 +- .../SpanProcessors/BatchSpanProcessor.swift | 8 +- .../SpanProcessors/MultiSpanProcessor.swift | 82 +-- .../SpanProcessors/SimpleSpanProcessor.swift | 88 +-- .../OtlpHttpMetricsExporterTest.swift | 198 +++---- .../OtlpLogRecordExporterTests.swift | 280 +++++----- .../Helpers/CoreMocks.swift | 350 ++++++------ .../PersistenceExporterDecoratorTests.swift | 373 ++++++------- ...ersistenceSpanExporterDecoratorTests.swift | 162 +++--- .../Logs/BatchLogRecordProcessorTests.swift | 2 +- .../Exporters/InMemoryLogExporterTests.swift | 24 +- .../Logs/Mocks/LogRecordExporterMock.swift | 46 +- .../Logs/Mocks/LogRecordProcessorMock.swift | 50 +- .../Logs/MultiLogRecordProcessorTests.swift | 62 +-- .../Export/BatchSpansProcessorTests.swift | 502 +++++++++--------- .../Trace/Export/MultiSpanExporterTests.swift | 100 ++-- .../Trace/Mocks/ReadableSpanMock.swift | 110 ++-- .../Trace/Mocks/SpanExporterMock.swift | 42 +- .../Trace/Mocks/SpanProcessorMock.swift | 62 +-- 47 files changed, 2085 insertions(+), 2049 deletions(-) diff --git a/Sources/Exporters/DatadogExporter/DatadogExporter.swift b/Sources/Exporters/DatadogExporter/DatadogExporter.swift index 9b5264f4..35ece18e 100644 --- a/Sources/Exporters/DatadogExporter/DatadogExporter.swift +++ b/Sources/Exporters/DatadogExporter/DatadogExporter.swift @@ -19,7 +19,7 @@ public class DatadogExporter: SpanExporter, MetricExporter { metricsExporter = try MetricsExporter(config: configuration) } - public func export(spans: [SpanData]) -> SpanExporterResultCode { + public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { spans.forEach { if $0.traceFlags.sampled || configuration.exportUnsampledSpans { spansExporter?.exportSpan(span: $0) @@ -38,7 +38,7 @@ public class DatadogExporter: SpanExporter, MetricExporter { return .success } - public func flush() -> SpanExporterResultCode { + public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { spansExporter?.tracesStorage.writer.queue.sync {} logsExporter?.logsStorage.writer.queue.sync {} metricsExporter?.metricsStorage.writer.queue.sync {} @@ -49,7 +49,7 @@ public class DatadogExporter: SpanExporter, MetricExporter { return .success } - public func shutdown() { + public func shutdown(explicitTimeout: TimeInterval?) { _ = self.flush() } diff --git a/Sources/Exporters/InMemory/InMemoryExporter.swift b/Sources/Exporters/InMemory/InMemoryExporter.swift index 1251cb13..e1ad010f 100644 --- a/Sources/Exporters/InMemory/InMemoryExporter.swift +++ b/Sources/Exporters/InMemory/InMemoryExporter.swift @@ -7,38 +7,38 @@ import Foundation import OpenTelemetrySdk public class InMemoryExporter: SpanExporter { - private var finishedSpanItems: [SpanData] = [] - private var isRunning: Bool = true - - public init() {} - - public func getFinishedSpanItems() -> [SpanData] { - return finishedSpanItems - } - - public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - guard isRunning else { - return .failure - } - - finishedSpanItems.append(contentsOf: spans) - return .success + private var finishedSpanItems: [SpanData] = [] + private var isRunning: Bool = true + + public init() {} + + public func getFinishedSpanItems() -> [SpanData] { + return finishedSpanItems + } + + public func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + guard isRunning else { + return .failure } - - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - guard isRunning else { - return .failure - } - - return .success - } - - public func reset() { - finishedSpanItems.removeAll() - } - - public func shutdown(explicitTimeout: TimeInterval?) { - finishedSpanItems.removeAll() - isRunning = false + + finishedSpanItems.append(contentsOf: spans) + return .success + } + + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + guard isRunning else { + return .failure } + + return .success + } + + public func reset() { + finishedSpanItems.removeAll() + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + finishedSpanItems.removeAll() + isRunning = false + } } diff --git a/Sources/Exporters/Jaeger/JaegerSpanExporter.swift b/Sources/Exporters/Jaeger/JaegerSpanExporter.swift index 6ec2ee1d..459d027f 100644 --- a/Sources/Exporters/Jaeger/JaegerSpanExporter.swift +++ b/Sources/Exporters/Jaeger/JaegerSpanExporter.swift @@ -10,29 +10,29 @@ import OpenTelemetrySdk import Thrift public class JaegerSpanExporter: SpanExporter { - let collectorAddress: String - let process: Process - - public init(serviceName: String, collectorAddress: String) { - process = Process(serviceName: serviceName, tags: TList()) - self.collectorAddress = collectorAddress - } - - public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - var spanList = TList() - spanList.append(contentsOf: Adapter.toJaeger(spans: spans)) - let batch = Batch(process: process, spans: spanList) + let collectorAddress: String + let process: Process + + public init(serviceName: String, collectorAddress: String) { + process = Process(serviceName: serviceName, tags: TList()) + self.collectorAddress = collectorAddress + } + + public func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + var spanList = TList() + spanList.append(contentsOf: Adapter.toJaeger(spans: spans)) + let batch = Batch(process: process, spans: spanList) let sender = Sender(host: collectorAddress) - let success = sender.sendBatch(batch: batch) - return success ? SpanExporterResultCode.success : SpanExporterResultCode.failure - } - - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - return .success - } - - public func shutdown(explicitTimeout: TimeInterval?) { - } + let success = sender.sendBatch(batch: batch) + return success ? SpanExporterResultCode.success : SpanExporterResultCode.failure + } + + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + return .success + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + } } #endif diff --git a/Sources/Exporters/OpenTelemetryProtocolGrpc/logs/OtlpLogExporter.swift b/Sources/Exporters/OpenTelemetryProtocolGrpc/logs/OtlpLogExporter.swift index 6e71a6d1..41b9c9f2 100644 --- a/Sources/Exporters/OpenTelemetryProtocolGrpc/logs/OtlpLogExporter.swift +++ b/Sources/Exporters/OpenTelemetryProtocolGrpc/logs/OtlpLogExporter.swift @@ -13,59 +13,59 @@ import OpenTelemetrySdk import OpenTelemetryProtocolExporterCommon public class OtlpLogExporter : LogRecordExporter { - let channel : GRPCChannel - var logClient : Opentelemetry_Proto_Collector_Logs_V1_LogsServiceNIOClient - let config : OtlpConfiguration - var callOptions : CallOptions - - public init(channel: GRPCChannel, - config: OtlpConfiguration = OtlpConfiguration(), - logger: Logging.Logger = Logging.Logger(label: "io.grpc", factory: { _ in SwiftLogNoOpLogHandler() }), - envVarHeaders: [(String, String)]? = EnvVarHeaders.attributes){ - self.channel = channel - logClient = Opentelemetry_Proto_Collector_Logs_V1_LogsServiceNIOClient(channel: channel) - self.config = config - let userAgentHeader = (Constants.HTTP.userAgent, Headers.getUserAgentHeader()) - if let headers = envVarHeaders { - var updatedHeaders = headers - updatedHeaders.append(userAgentHeader) - callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) - } else if let headers = config.headers { - var updatedHeaders = headers - updatedHeaders.append(userAgentHeader) - callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) - } - else { - var headers = [(String, String)]() - headers.append(userAgentHeader) - callOptions = CallOptions(customMetadata: HPACKHeaders(headers), logger: logger) - } + let channel : GRPCChannel + var logClient : Opentelemetry_Proto_Collector_Logs_V1_LogsServiceNIOClient + let config : OtlpConfiguration + var callOptions : CallOptions + + public init(channel: GRPCChannel, + config: OtlpConfiguration = OtlpConfiguration(), + logger: Logging.Logger = Logging.Logger(label: "io.grpc", factory: { _ in SwiftLogNoOpLogHandler() }), + envVarHeaders: [(String, String)]? = EnvVarHeaders.attributes){ + self.channel = channel + logClient = Opentelemetry_Proto_Collector_Logs_V1_LogsServiceNIOClient(channel: channel) + self.config = config + let userAgentHeader = (Constants.HTTP.userAgent, Headers.getUserAgentHeader()) + if let headers = envVarHeaders { + var updatedHeaders = headers + updatedHeaders.append(userAgentHeader) + callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) + } else if let headers = config.headers { + var updatedHeaders = headers + updatedHeaders.append(userAgentHeader) + callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) } - - public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval?) -> ExportResult { - let logRequest = Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest.with { request in - request.resourceLogs = LogRecordAdapter.toProtoResourceRecordLog(logRecordList: logRecords) - } - let timeout = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, config.timeout) - if timeout > 0 { - callOptions.timeLimit = TimeLimit.timeout(TimeAmount.nanoseconds(Int64(timeout.toNanoseconds))) - } - - - let export = logClient.export(logRequest, callOptions: callOptions) - do { - _ = try export.response.wait() - return .success - } catch { - return .failure - } + else { + var headers = [(String, String)]() + headers.append(userAgentHeader) + callOptions = CallOptions(customMetadata: HPACKHeaders(headers), logger: logger) } - - public func shutdown(explicitTimeout: TimeInterval?) { - _ = channel.close() + } + + public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> ExportResult { + let logRequest = Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest.with { request in + request.resourceLogs = LogRecordAdapter.toProtoResourceRecordLog(logRecordList: logRecords) } - - public func forceFlush(explicitTimeout: TimeInterval?) -> ExportResult { - .success + let timeout = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, config.timeout) + if timeout > 0 { + callOptions.timeLimit = TimeLimit.timeout(TimeAmount.nanoseconds(Int64(timeout.toNanoseconds))) + } + + + let export = logClient.export(logRequest, callOptions: callOptions) + do { + _ = try export.response.wait() + return .success + } catch { + return .failure } + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + _ = channel.close() + } + + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { + .success + } } diff --git a/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceExporter.swift b/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceExporter.swift index c958b5fd..5078038f 100644 --- a/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceExporter.swift +++ b/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceExporter.swift @@ -14,57 +14,57 @@ import OpenTelemetryProtocolExporterCommon public class OtlpTraceExporter: SpanExporter { - let channel: GRPCChannel - var traceClient: Opentelemetry_Proto_Collector_Trace_V1_TraceServiceNIOClient - let config : OtlpConfiguration - var callOptions : CallOptions - - public init(channel: GRPCChannel, config: OtlpConfiguration = OtlpConfiguration(), logger: Logging.Logger = Logging.Logger(label: "io.grpc", factory: { _ in SwiftLogNoOpLogHandler() }), envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { - self.channel = channel - traceClient = Opentelemetry_Proto_Collector_Trace_V1_TraceServiceNIOClient(channel: channel) - self.config = config - let userAgentHeader = (Constants.HTTP.userAgent, Headers.getUserAgentHeader()) - if let headers = envVarHeaders { - var updatedHeaders = headers - updatedHeaders.append(userAgentHeader) - callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) - } else if let headers = config.headers { - var updatedHeaders = headers - updatedHeaders.append(userAgentHeader) - callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) - } else { - var headers = [(String, String)]() - headers.append(userAgentHeader) - callOptions = CallOptions(customMetadata: HPACKHeaders(headers), logger: logger) - } + let channel: GRPCChannel + var traceClient: Opentelemetry_Proto_Collector_Trace_V1_TraceServiceNIOClient + let config : OtlpConfiguration + var callOptions : CallOptions + + public init(channel: GRPCChannel, config: OtlpConfiguration = OtlpConfiguration(), logger: Logging.Logger = Logging.Logger(label: "io.grpc", factory: { _ in SwiftLogNoOpLogHandler() }), envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { + self.channel = channel + traceClient = Opentelemetry_Proto_Collector_Trace_V1_TraceServiceNIOClient(channel: channel) + self.config = config + let userAgentHeader = (Constants.HTTP.userAgent, Headers.getUserAgentHeader()) + if let headers = envVarHeaders { + var updatedHeaders = headers + updatedHeaders.append(userAgentHeader) + callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) + } else if let headers = config.headers { + var updatedHeaders = headers + updatedHeaders.append(userAgentHeader) + callOptions = CallOptions(customMetadata: HPACKHeaders(updatedHeaders), logger: logger) + } else { + var headers = [(String, String)]() + headers.append(userAgentHeader) + callOptions = CallOptions(customMetadata: HPACKHeaders(headers), logger: logger) } - - + } + + public func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { let exportRequest = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: spans) } let timeout = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, config.timeout) - if timeout > 0 { - callOptions.timeLimit = TimeLimit.timeout(TimeAmount.nanoseconds(Int64(timeout.toNanoseconds))) - } - - let export = traceClient.export(exportRequest, callOptions: callOptions) - - do { - // wait() on the response to stop the program from exiting before the response is received. - _ = try export.response.wait() - return .success - } catch { - return .failure - } - } - - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - return .success + if timeout > 0 { + callOptions.timeLimit = TimeLimit.timeout(TimeAmount.nanoseconds(Int64(timeout.toNanoseconds))) } - - public func shutdown(explicitTimeout: TimeInterval?) { - _ = channel.close() + + let export = traceClient.export(exportRequest, callOptions: callOptions) + + do { + // wait() on the response to stop the program from exiting before the response is received. + _ = try export.response.wait() + return .success + } catch { + return .failure } + } + + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + return .success + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + _ = channel.close() + } } diff --git a/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceJsonExporter.swift b/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceJsonExporter.swift index ad8f82a2..f0db0b61 100644 --- a/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceJsonExporter.swift +++ b/Sources/Exporters/OpenTelemetryProtocolGrpc/trace/OtlpTraceJsonExporter.swift @@ -8,49 +8,49 @@ import OpenTelemetrySdk import OpenTelemetryProtocolExporterCommon public class OtlpTraceJsonExporter: SpanExporter { - - - // MARK: - Variables declaration - private var exportedSpans = [OtlpSpan]() - private var isRunning: Bool = true - - // MARK: - Json Exporter helper methods - func getExportedSpans() -> [OtlpSpan] { - exportedSpans - } - - public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - guard isRunning else { return .failure } - - let exportRequest = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { - $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: spans) - } - - do { - let jsonData = try exportRequest.jsonUTF8Data() - do { - let span = try JSONDecoder().decode(OtlpSpan.self, from: jsonData) - exportedSpans.append(span) - } catch { - print("Decode Error: \(error)") - } - return .success - } catch { - return .failure - } - } - - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - guard isRunning else { return .failure } - return .success - } + + + // MARK: - Variables declaration + private var exportedSpans = [OtlpSpan]() + private var isRunning: Bool = true + + // MARK: - Json Exporter helper methods + func getExportedSpans() -> [OtlpSpan] { + exportedSpans + } + + public func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + guard isRunning else { return .failure } - public func reset() { - exportedSpans.removeAll() + let exportRequest = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { + $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: spans) } - public func shutdown(explicitTimeout: TimeInterval?) { - exportedSpans.removeAll() - isRunning = false + do { + let jsonData = try exportRequest.jsonUTF8Data() + do { + let span = try JSONDecoder().decode(OtlpSpan.self, from: jsonData) + exportedSpans.append(span) + } catch { + print("Decode Error: \(error)") + } + return .success + } catch { + return .failure } + } + + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + guard isRunning else { return .failure } + return .success + } + + public func reset() { + exportedSpans.removeAll() + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + exportedSpans.removeAll() + isRunning = false + } } diff --git a/Sources/Exporters/OpenTelemetryProtocolHttp/OtlpHttpExporterBase.swift b/Sources/Exporters/OpenTelemetryProtocolHttp/OtlpHttpExporterBase.swift index da42cf61..269f0f8d 100644 --- a/Sources/Exporters/OpenTelemetryProtocolHttp/OtlpHttpExporterBase.swift +++ b/Sources/Exporters/OpenTelemetryProtocolHttp/OtlpHttpExporterBase.swift @@ -1,45 +1,46 @@ // // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 -// +// import Foundation import SwiftProtobuf import OpenTelemetryProtocolExporterCommon public class OtlpHttpExporterBase { - let endpoint: URL - let httpClient: HTTPClient + let endpoint: URL + let httpClient: HTTPClient let envVarHeaders : [(String,String)]? - + let config : OtlpConfiguration - public init(endpoint: URL, config: OtlpConfiguration = OtlpConfiguration(), useSession: URLSession? = nil, envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { - self.envVarHeaders = envVarHeaders - - self.endpoint = endpoint - self.config = config - if let providedSession = useSession { - self.httpClient = HTTPClient(session: providedSession) - } else { - self.httpClient = HTTPClient() - } - } + public init(endpoint: URL, config: OtlpConfiguration = OtlpConfiguration(), useSession: URLSession? = nil, envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { + self.envVarHeaders = envVarHeaders - public func createRequest(body: Message, endpoint: URL) -> URLRequest { - var request = URLRequest(url: endpoint) - - do { - request.httpMethod = "POST" - request.httpBody = try body.serializedData() - request.setValue(Headers.getUserAgentHeader(), forHTTPHeaderField: Constants.HTTP.userAgent) - request.setValue("application/x-protobuf", forHTTPHeaderField: "Content-Type") - } catch { - print("Error serializing body: \(error)") - } - - return request + self.endpoint = endpoint + self.config = config + if let providedSession = useSession { + self.httpClient = HTTPClient(session: providedSession) + } else { + self.httpClient = HTTPClient() } + } + + public func createRequest(body: Message, endpoint: URL) -> URLRequest { + var request = URLRequest(url: endpoint) - public func shutdown(explicitTimeout: TimeInterval?) { + do { + request.httpMethod = "POST" + request.httpBody = try body.serializedData() + request.setValue(Headers.getUserAgentHeader(), forHTTPHeaderField: Constants.HTTP.userAgent) + request.setValue("application/x-protobuf", forHTTPHeaderField: "Content-Type") + } catch { + print("Error serializing body: \(error)") } + + return request + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + + } } diff --git a/Sources/Exporters/OpenTelemetryProtocolHttp/logs/OtlpHttpLogExporter.swift b/Sources/Exporters/OpenTelemetryProtocolHttp/logs/OtlpHttpLogExporter.swift index 49958a44..d3e9cb97 100644 --- a/Sources/Exporters/OpenTelemetryProtocolHttp/logs/OtlpHttpLogExporter.swift +++ b/Sources/Exporters/OpenTelemetryProtocolHttp/logs/OtlpHttpLogExporter.swift @@ -1,79 +1,79 @@ // // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 -// +// import Foundation import OpenTelemetrySdk import OpenTelemetryProtocolExporterCommon public func defaultOltpHttpLoggingEndpoint() -> URL { - URL(string: "http://localhost:4318/v1/logs")! + URL(string: "http://localhost:4318/v1/logs")! } public class OtlpHttpLogExporter : OtlpHttpExporterBase, LogRecordExporter { - - var pendingLogRecords: [ReadableLogRecord] = [] + + var pendingLogRecords: [ReadableLogRecord] = [] + + override public init(endpoint: URL = defaultOltpHttpLoggingEndpoint(), + config: OtlpConfiguration = OtlpConfiguration(), + useSession: URLSession? = nil, + envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes){ + super.init(endpoint: endpoint, config: config, useSession: useSession, envVarHeaders: envVarHeaders) + } + + public func export(logRecords: [OpenTelemetrySdk.ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> OpenTelemetrySdk.ExportResult { + pendingLogRecords.append(contentsOf: logRecords) + let sendingLogRecords = pendingLogRecords + pendingLogRecords = [] - override public init(endpoint: URL = defaultOltpHttpLoggingEndpoint(), - config: OtlpConfiguration = OtlpConfiguration(), - useSession: URLSession? = nil, - envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes){ - super.init(endpoint: endpoint, config: config, useSession: useSession, envVarHeaders: envVarHeaders) + let body = Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest.with { request in + request.resourceLogs = LogRecordAdapter.toProtoResourceRecordLog(logRecordList: sendingLogRecords) } - public func export(logRecords: [OpenTelemetrySdk.ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> OpenTelemetrySdk.ExportResult { - pendingLogRecords.append(contentsOf: logRecords) - let sendingLogRecords = pendingLogRecords - pendingLogRecords = [] - - let body = Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest.with { request in - request.resourceLogs = LogRecordAdapter.toProtoResourceRecordLog(logRecordList: sendingLogRecords) - } - - var request = createRequest(body: body, endpoint: endpoint) + var request = createRequest(body: body, endpoint: endpoint) request.timeoutInterval = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude , config.timeout) - httpClient.send(request: request) { [weak self] result in - switch result { - case .success(_): - break - case .failure(let error): - self?.pendingLogRecords.append(contentsOf: sendingLogRecords) - print(error) - } - } - - return .success - } - - public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { - self.flush(explicitTimeout: explicitTimeout) + httpClient.send(request: request) { [weak self] result in + switch result { + case .success(_): + break + case .failure(let error): + self?.pendingLogRecords.append(contentsOf: sendingLogRecords) + print(error) + } } + return .success + } + + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { + self.flush(explicitTimeout: explicitTimeout) + } + public func flush(explicitTimeout: TimeInterval? = nil) -> ExportResult { - var exporterResult: ExportResult = .success - - if !pendingLogRecords.isEmpty { - let body = Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest.with { request in - request.resourceLogs = LogRecordAdapter.toProtoResourceRecordLog(logRecordList: pendingLogRecords) - } - let semaphore = DispatchSemaphore(value: 0) - var request = createRequest(body: body, endpoint: endpoint) - request.timeoutInterval = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude , config.timeout) - - httpClient.send(request: request) { result in - switch result { - case .success(_): - exporterResult = ExportResult.success - case .failure(let error): - print(error) - exporterResult = ExportResult.failure - } - semaphore.signal() - } - semaphore.wait() + var exporterResult: ExportResult = .success + + if !pendingLogRecords.isEmpty { + let body = Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest.with { request in + request.resourceLogs = LogRecordAdapter.toProtoResourceRecordLog(logRecordList: pendingLogRecords) + } + let semaphore = DispatchSemaphore(value: 0) + var request = createRequest(body: body, endpoint: endpoint) + request.timeoutInterval = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude , config.timeout) + + httpClient.send(request: request) { result in + switch result { + case .success(_): + exporterResult = ExportResult.success + case .failure(let error): + print(error) + exporterResult = ExportResult.failure } - - return exporterResult + semaphore.signal() + } + semaphore.wait() } + + return exporterResult + } } diff --git a/Sources/Exporters/OpenTelemetryProtocolHttp/metric/OltpHTTPMetricExporter.swift b/Sources/Exporters/OpenTelemetryProtocolHttp/metric/OltpHTTPMetricExporter.swift index 0047bbe0..2ac04ab8 100644 --- a/Sources/Exporters/OpenTelemetryProtocolHttp/metric/OltpHTTPMetricExporter.swift +++ b/Sources/Exporters/OpenTelemetryProtocolHttp/metric/OltpHTTPMetricExporter.swift @@ -8,61 +8,61 @@ import OpenTelemetryProtocolExporterCommon import Foundation public func defaultOltpHTTPMetricsEndpoint() -> URL { - URL(string: "http://localhost:4318/v1/metrics")! + URL(string: "http://localhost:4318/v1/metrics")! } public class OtlpHttpMetricExporter: OtlpHttpExporterBase, MetricExporter { - var pendingMetrics: [Metric] = [] - - override + var pendingMetrics: [Metric] = [] + + override public init(endpoint: URL = defaultOltpHTTPMetricsEndpoint(), config : OtlpConfiguration = OtlpConfiguration(), useSession: URLSession? = nil, envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { super.init(endpoint: endpoint, config: config, useSession: useSession, envVarHeaders: envVarHeaders) + } + + public func export(metrics: [Metric], shouldCancel: (() -> Bool)?) -> MetricExporterResultCode { + pendingMetrics.append(contentsOf: metrics) + let sendingMetrics = pendingMetrics + pendingMetrics = [] + let body = Opentelemetry_Proto_Collector_Metrics_V1_ExportMetricsServiceRequest.with { + $0.resourceMetrics = MetricsAdapter.toProtoResourceMetrics(metricDataList: sendingMetrics) } - public func export(metrics: [Metric], shouldCancel: (() -> Bool)?) -> MetricExporterResultCode { - pendingMetrics.append(contentsOf: metrics) - let sendingMetrics = pendingMetrics - pendingMetrics = [] - let body = Opentelemetry_Proto_Collector_Metrics_V1_ExportMetricsServiceRequest.with { - $0.resourceMetrics = MetricsAdapter.toProtoResourceMetrics(metricDataList: sendingMetrics) - } - - let request = createRequest(body: body, endpoint: endpoint) - httpClient.send(request: request) { [weak self] result in - switch result { - case .success(_): - break - case .failure(let error): - self?.pendingMetrics.append(contentsOf: sendingMetrics) - print(error) - } - } - - return .success + let request = createRequest(body: body, endpoint: endpoint) + httpClient.send(request: request) { [weak self] result in + switch result { + case .success(_): + break + case .failure(let error): + self?.pendingMetrics.append(contentsOf: sendingMetrics) + print(error) + } } - - public func flush() -> MetricExporterResultCode { - var exporterResult: MetricExporterResultCode = .success - - if !pendingMetrics.isEmpty { - let body = Opentelemetry_Proto_Collector_Metrics_V1_ExportMetricsServiceRequest.with { - $0.resourceMetrics = MetricsAdapter.toProtoResourceMetrics(metricDataList: pendingMetrics) - } - - let semaphore = DispatchSemaphore(value: 0) - let request = createRequest(body: body, endpoint: endpoint) - httpClient.send(request: request) { result in - switch result { - case .success(_): - break - case .failure(let error): - print(error) - exporterResult = MetricExporterResultCode.failureNotRetryable - } - semaphore.signal() - } - semaphore.wait() + + return .success + } + + public func flush() -> MetricExporterResultCode { + var exporterResult: MetricExporterResultCode = .success + + if !pendingMetrics.isEmpty { + let body = Opentelemetry_Proto_Collector_Metrics_V1_ExportMetricsServiceRequest.with { + $0.resourceMetrics = MetricsAdapter.toProtoResourceMetrics(metricDataList: pendingMetrics) + } + + let semaphore = DispatchSemaphore(value: 0) + let request = createRequest(body: body, endpoint: endpoint) + httpClient.send(request: request) { result in + switch result { + case .success(_): + break + case .failure(let error): + print(error) + exporterResult = MetricExporterResultCode.failureNotRetryable } - return exporterResult + semaphore.signal() + } + semaphore.wait() } + return exporterResult + } } diff --git a/Sources/Exporters/OpenTelemetryProtocolHttp/trace/OtlpHttpTraceExporter.swift b/Sources/Exporters/OpenTelemetryProtocolHttp/trace/OtlpHttpTraceExporter.swift index 3faced8d..b0545865 100644 --- a/Sources/Exporters/OpenTelemetryProtocolHttp/trace/OtlpHttpTraceExporter.swift +++ b/Sources/Exporters/OpenTelemetryProtocolHttp/trace/OtlpHttpTraceExporter.swift @@ -8,71 +8,71 @@ import OpenTelemetrySdk import OpenTelemetryProtocolExporterCommon public func defaultOltpHttpTracesEndpoint() -> URL { - URL(string: "http://localhost:4318/v1/traces")! + URL(string: "http://localhost:4318/v1/traces")! } public class OtlpHttpTraceExporter: OtlpHttpExporterBase, SpanExporter { - - var pendingSpans: [SpanData] = [] - override + + var pendingSpans: [SpanData] = [] + override public init(endpoint: URL = defaultOltpHttpTracesEndpoint(), config: OtlpConfiguration = OtlpConfiguration(), - useSession: URLSession? = nil, envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { + useSession: URLSession? = nil, envVarHeaders: [(String,String)]? = EnvVarHeaders.attributes) { super.init(endpoint: endpoint, config: config, useSession: useSession) + } + + public func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + pendingSpans.append(contentsOf: spans) + let sendingSpans = pendingSpans + pendingSpans = [] + + let body = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { + $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: spans) } - - public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - pendingSpans.append(contentsOf: spans) - let sendingSpans = pendingSpans - pendingSpans = [] - - let body = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { - $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: spans) - } - var request = createRequest(body: body, endpoint: endpoint) + var request = createRequest(body: body, endpoint: endpoint) if let headers = envVarHeaders { headers.forEach { (key, value) in request.addValue(value, forHTTPHeaderField: key) } - + } else if let headers = config.headers { headers.forEach { (key, value) in request.addValue(value, forHTTPHeaderField: key) } } - httpClient.send(request: request) { [weak self] result in - switch result { - case .success: - break - case .failure(let error): - self?.pendingSpans.append(contentsOf: sendingSpans) - print(error) - } - } - return .success + httpClient.send(request: request) { [weak self] result in + switch result { + case .success: + break + case .failure(let error): + self?.pendingSpans.append(contentsOf: sendingSpans) + print(error) + } } - - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - var resultValue: SpanExporterResultCode = .success - if !pendingSpans.isEmpty { - let body = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { - $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: pendingSpans) - } - let semaphore = DispatchSemaphore(value: 0) - let request = createRequest(body: body, endpoint: endpoint) - - httpClient.send(request: request) { result in - switch result { - case .success: - break - case .failure(let error): - print(error) - resultValue = .failure - } - semaphore.signal() - } - semaphore.wait() + return .success + } + + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { + var resultValue: SpanExporterResultCode = .success + if !pendingSpans.isEmpty { + let body = Opentelemetry_Proto_Collector_Trace_V1_ExportTraceServiceRequest.with { + $0.resourceSpans = SpanAdapter.toProtoResourceSpans(spanDataList: pendingSpans) + } + let semaphore = DispatchSemaphore(value: 0) + let request = createRequest(body: body, endpoint: endpoint) + + httpClient.send(request: request) { result in + switch result { + case .success: + break + case .failure(let error): + print(error) + resultValue = .failure } - return resultValue + semaphore.signal() + } + semaphore.wait() } + return resultValue + } } diff --git a/Sources/Exporters/Persistence/Export/DataExportWorker.swift b/Sources/Exporters/Persistence/Export/DataExportWorker.swift index d64b6664..bd078588 100644 --- a/Sources/Exporters/Persistence/Export/DataExportWorker.swift +++ b/Sources/Exporters/Persistence/Export/DataExportWorker.swift @@ -8,103 +8,103 @@ import Foundation // a protocol for an exporter of `Data` to which a `DataExportWorker` can delegate persisted // data export internal protocol DataExporter { - func export(data: Data, explicitTimeout: TimeInterval?) -> DataExportStatus + func export(data: Data) -> DataExportStatus } // a protocol needed for mocking `DataExportWorker` internal protocol DataExportWorkerProtocol { - func flush() -> Bool + func flush() -> Bool } internal class DataExportWorker: DataExportWorkerProtocol { - /// Queue to execute exports. - internal let queue = DispatchQueue(label: "com.otel.persistence.dataExportWorker", target: .global(qos: .utility)) - /// File reader providing data to export. - private let fileReader: FileReader - /// Data exporter sending data to server. - private let dataExporter: DataExporter - /// Variable system conditions determining if export should be performed. - private let exportCondition: () -> Bool - - /// Delay used to schedule consecutive exports. - private var delay: Delay - - /// Export work scheduled by this worker. - private var exportWork: DispatchWorkItem? - - init( - fileReader: FileReader, - dataExporter: DataExporter, - exportCondition: @escaping () -> Bool, - delay: Delay - ) { - self.fileReader = fileReader - self.exportCondition = exportCondition - self.dataExporter = dataExporter - self.delay = delay - - let exportWork = DispatchWorkItem { [weak self] in - guard let self = self else { - return - } - - let isSystemReady = self.exportCondition() - let nextBatch = isSystemReady ? self.fileReader.readNextBatch() : nil - if let batch = nextBatch { - // Export batch - let exportStatus = self.dataExporter.export(data: batch.data, explicitTimeout: nil) - - // Delete or keep batch depending on the export status - if exportStatus.needsRetry { - self.delay.increase() - } else { - self.fileReader.markBatchAsRead(batch) - self.delay.decrease() - } - } else { - self.delay.increase() - } - - self.scheduleNextExport(after: self.delay.current) + /// Queue to execute exports. + internal let queue = DispatchQueue(label: "com.otel.persistence.dataExportWorker", target: .global(qos: .utility)) + /// File reader providing data to export. + private let fileReader: FileReader + /// Data exporter sending data to server. + private let dataExporter: DataExporter + /// Variable system conditions determining if export should be performed. + private let exportCondition: () -> Bool + + /// Delay used to schedule consecutive exports. + private var delay: Delay + + /// Export work scheduled by this worker. + private var exportWork: DispatchWorkItem? + + init( + fileReader: FileReader, + dataExporter: DataExporter, + exportCondition: @escaping () -> Bool, + delay: Delay + ) { + self.fileReader = fileReader + self.exportCondition = exportCondition + self.dataExporter = dataExporter + self.delay = delay + + let exportWork = DispatchWorkItem { [weak self] in + guard let self = self else { + return + } + + let isSystemReady = self.exportCondition() + let nextBatch = isSystemReady ? self.fileReader.readNextBatch() : nil + if let batch = nextBatch { + // Export batch + let exportStatus = self.dataExporter.export(data: batch.data) + + // Delete or keep batch depending on the export status + if exportStatus.needsRetry { + self.delay.increase() + } else { + self.fileReader.markBatchAsRead(batch) + self.delay.decrease() } - - self.exportWork = exportWork - - scheduleNextExport(after: self.delay.current) + } else { + self.delay.increase() + } + + self.scheduleNextExport(after: self.delay.current) } - - private func scheduleNextExport(after delay: TimeInterval) { - guard let work = exportWork else { - return - } - - queue.asyncAfter(deadline: .now() + delay, execute: work) + + self.exportWork = exportWork + + scheduleNextExport(after: self.delay.current) + } + + private func scheduleNextExport(after delay: TimeInterval) { + guard let work = exportWork else { + return } - - /// This method gets remaining files at once, and exports them - /// It assures that periodic exporter cannot read or export the files while the flush is being processed - internal func flush() -> Bool { - let success = queue.sync { - self.fileReader.onRemainingBatches { - let exportStatus = self.dataExporter.export(data: $0.data, explicitTimeout: nil) - if !exportStatus.needsRetry { - self.fileReader.markBatchAsRead($0) - } - } + + queue.asyncAfter(deadline: .now() + delay, execute: work) + } + + /// This method gets remaining files at once, and exports them + /// It assures that periodic exporter cannot read or export the files while the flush is being processed + internal func flush() -> Bool { + let success = queue.sync { + self.fileReader.onRemainingBatches { + let exportStatus = self.dataExporter.export(data: $0.data) + if !exportStatus.needsRetry { + self.fileReader.markBatchAsRead($0) } - return success + } } - - /// Cancels scheduled exports and stops scheduling next ones. - /// - It does not affect the export that has already begun. - /// - It blocks the caller thread if called in the middle of export execution. - internal func cancelSynchronously() { - queue.sync(flags: .barrier) { - // This cancellation must be performed on the `queue` to ensure that it is not called - // in the middle of a `DispatchWorkItem` execution - otherwise, as the pending block would be - // fully executed, it will schedule another export by calling `nextScheduledWork(after:)` at the end. - self.exportWork?.cancel() - self.exportWork = nil - } + return success + } + + /// Cancels scheduled exports and stops scheduling next ones. + /// - It does not affect the export that has already begun. + /// - It blocks the caller thread if called in the middle of export execution. + internal func cancelSynchronously() { + queue.sync(flags: .barrier) { + // This cancellation must be performed on the `queue` to ensure that it is not called + // in the middle of a `DispatchWorkItem` execution - otherwise, as the pending block would be + // fully executed, it will schedule another export by calling `nextScheduledWork(after:)` at the end. + self.exportWork?.cancel() + self.exportWork = nil } + } } diff --git a/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift index eaf029c2..62e3fd62 100644 --- a/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift +++ b/Sources/Exporters/Persistence/PersistenceExporterDecorator.swift @@ -10,7 +10,7 @@ import OpenTelemetrySdk protocol DecoratedExporter { associatedtype SignalType - func export(values: [SignalType], explicitTimeout: TimeInterval?) -> DataExportStatus + func export(values: [SignalType]) -> DataExportStatus } // a generic decorator of `DecoratedExporter` adding filesystem persistence of batches of `[T.SignalType]`. @@ -25,7 +25,7 @@ internal class PersistenceExporterDecorator where T: DecoratedExporter, T.Sig self.decoratedExporter = decoratedExporter } - func export(data: Data, explicitTimeout: TimeInterval?) -> DataExportStatus { + func export(data: Data) -> DataExportStatus { // decode batches of `[T.SignalType]` from the raw data. // the data is made of batches of comma-suffixed JSON arrays, so in order to utilize // `JSONDecoder`, add a "[" prefix and "null]" suffix making the data a valid @@ -41,7 +41,7 @@ internal class PersistenceExporterDecorator where T: DecoratedExporter, T.Sig from: arrayData ).compactMap { $0 }.flatMap { $0 } - return decoratedExporter.export(values: exportables, explicitTimeout: explicitTimeout) + return decoratedExporter.export(values: exportables) } catch { return DataExportStatus(needsRetry: false) } diff --git a/Sources/Exporters/Persistence/PersistenceLogExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceLogExporterDecorator.swift index 281a4cec..9dd430fd 100644 --- a/Sources/Exporters/Persistence/PersistenceLogExporterDecorator.swift +++ b/Sources/Exporters/Persistence/PersistenceLogExporterDecorator.swift @@ -20,8 +20,8 @@ public class PersistenceLogExporterDecorator: LogRecordExporter { self.logRecordExporter = logRecordExporter } - func export(values: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> DataExportStatus { - let result = logRecordExporter.export(logRecords: values, explicitTimeout: explicitTimeout) + func export(values: [ReadableLogRecord]) -> DataExportStatus { + let result = logRecordExporter.export(logRecords: values) return DataExportStatus(needsRetry: result == .failure) } } @@ -43,7 +43,7 @@ public class PersistenceLogExporterDecorator: LogRecordExporter { self.logRecordExporter = logRecordExporter } - public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval?) -> ExportResult { + public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> ExportResult { do { try persistenceExporter.export(values: logRecords) return .success @@ -52,12 +52,12 @@ public class PersistenceLogExporterDecorator: LogRecordExporter { } } - public func shutdown(explicitTimeout: TimeInterval?) { + public func shutdown(explicitTimeout: TimeInterval? = nil) { persistenceExporter.flush() logRecordExporter.shutdown(explicitTimeout: explicitTimeout) } - public func forceFlush(explicitTimeout: TimeInterval?) -> ExportResult { + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { persistenceExporter.flush() return logRecordExporter.forceFlush(explicitTimeout: explicitTimeout) diff --git a/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift index 830a6041..b0e4c463 100644 --- a/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift +++ b/Sources/Exporters/Persistence/PersistenceMetricExporterDecorator.swift @@ -18,7 +18,7 @@ public class PersistenceMetricExporterDecorator: MetricExporter { self.metricExporter = metricExporter } - func export(values: [Metric], explicitTimeout: TimeInterval?) -> DataExportStatus { + func export(values: [Metric]) -> DataExportStatus { let result = metricExporter.export(metrics: values, shouldCancel: nil) return DataExportStatus(needsRetry: result == .failureRetryable) } diff --git a/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift b/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift index eb45421e..ecae7007 100644 --- a/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift +++ b/Sources/Exporters/Persistence/PersistenceSpanExporterDecorator.swift @@ -10,7 +10,7 @@ import OpenTelemetrySdk // specialization of `PersistenceExporterDecorator` for `SpanExporter`. public class PersistenceSpanExporterDecorator: SpanExporter { - struct SpanDecoratedExporter: DecoratedExporter { + struct SpanDecoratedExporter: DecoratedExporter { typealias SignalType = SpanData private let spanExporter: SpanExporter @@ -19,8 +19,8 @@ public class PersistenceSpanExporterDecorator: SpanExporter { self.spanExporter = spanExporter } - func export(values: [SpanData], explicitTimeout: TimeInterval?) -> DataExportStatus { - _ = spanExporter.export(spans: values, explicitTimeout: explicitTimeout) + func export(values: [SpanData]) -> DataExportStatus { + _ = spanExporter.export(spans: values) return DataExportStatus(needsRetry: false) } } diff --git a/Sources/Exporters/Stdout/StdoutExporter.swift b/Sources/Exporters/Stdout/StdoutExporter.swift index 72a0b568..005cc09f 100644 --- a/Sources/Exporters/Stdout/StdoutExporter.swift +++ b/Sources/Exporters/Stdout/StdoutExporter.swift @@ -14,7 +14,7 @@ public class StdoutExporter: SpanExporter { self.isDebug = isDebug } - public func export(spans: [SpanData]) -> SpanExporterResultCode { + public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { let jsonEncoder = JSONEncoder() for span in spans { if isDebug { @@ -51,11 +51,11 @@ public class StdoutExporter: SpanExporter { return .success } - public func flush() -> SpanExporterResultCode { + public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { return .success } - public func shutdown() {} + public func shutdown(explicitTimeout: TimeInterval?) {} } private struct SpanExporterData { diff --git a/Sources/Exporters/Zipkin/ZipkinTraceExporter.swift b/Sources/Exporters/Zipkin/ZipkinTraceExporter.swift index e9ae3639..701b11bf 100644 --- a/Sources/Exporters/Zipkin/ZipkinTraceExporter.swift +++ b/Sources/Exporters/Zipkin/ZipkinTraceExporter.swift @@ -51,11 +51,11 @@ public class ZipkinTraceExporter: SpanExporter { return status } - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { return .success } - public func shutdown(explicitTimeout: TimeInterval?) { + public func shutdown(explicitTimeout: TimeInterval? = nil) { } func encodeSpans(spans: [SpanData]) -> [ZipkinSpan] { diff --git a/Sources/Instrumentation/SignPostIntegration/SignPostIntegration.swift b/Sources/Instrumentation/SignPostIntegration/SignPostIntegration.swift index f20b330f..fde5bcaa 100644 --- a/Sources/Instrumentation/SignPostIntegration/SignPostIntegration.swift +++ b/Sources/Instrumentation/SignPostIntegration/SignPostIntegration.swift @@ -13,24 +13,26 @@ import OpenTelemetrySdk /// A span processor that decorates spans with the origin attribute @available(macOS 10.14, iOS 12.0, tvOS 12.0, *) public class SignPostIntegration: SpanProcessor { - public let isStartRequired = true - public let isEndRequired = true - public let osLog = OSLog(subsystem: "OpenTelemetry", category: .pointsOfInterest) - - public init() {} - - public func onStart(parentContext: SpanContext?, span: ReadableSpan) { - let signpostID = OSSignpostID(log: osLog, object: self) - os_signpost(.begin, log: osLog, name: "Span", signpostID: signpostID, "%{public}@", span.name) - } - - public func onEnd(span: ReadableSpan) { - let signpostID = OSSignpostID(log: osLog, object: self) - os_signpost(.end, log: osLog, name: "Span", signpostID: signpostID) - } - - public func shutdown() {} - public func forceFlush(timeout: TimeInterval? = nil) {} + + public let isStartRequired = true + public let isEndRequired = true + public let osLog = OSLog(subsystem: "OpenTelemetry", category: .pointsOfInterest) + + public init() {} + + public func onStart(parentContext: SpanContext?, span: ReadableSpan) { + let signpostID = OSSignpostID(log: osLog, object: self) + os_signpost(.begin, log: osLog, name: "Span", signpostID: signpostID, "%{public}@", span.name) + } + + public func onEnd(span: ReadableSpan) { + let signpostID = OSSignpostID(log: osLog, object: self) + os_signpost(.end, log: osLog, name: "Span", signpostID: signpostID) + } + + public func forceFlush(timeout: TimeInterval? = nil) {} + public func shutdown(explicitTimeout: TimeInterval?) { + } } #endif diff --git a/Sources/OpenTelemetrySdk/Logs/Export/InMemoryLogRecordExporter.swift b/Sources/OpenTelemetrySdk/Logs/Export/InMemoryLogRecordExporter.swift index d5fd6cde..e5488960 100644 --- a/Sources/OpenTelemetrySdk/Logs/Export/InMemoryLogRecordExporter.swift +++ b/Sources/OpenTelemetrySdk/Logs/Export/InMemoryLogRecordExporter.swift @@ -6,30 +6,30 @@ import Foundation public class InMemoryLogRecordExporter : LogRecordExporter { - private var finishedLogRecords = [ReadableLogRecord]() - private var isRunning = true - - public func getFinishedLogRecords() -> [ReadableLogRecord] { - return finishedLogRecords - } - + private var finishedLogRecords = [ReadableLogRecord]() + private var isRunning = true + + public func getFinishedLogRecords() -> [ReadableLogRecord] { + return finishedLogRecords + } + public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> ExportResult { - guard isRunning else { - return .failure - } - finishedLogRecords.append(contentsOf: logRecords) - return .success + guard isRunning else { + return .failure } - + finishedLogRecords.append(contentsOf: logRecords) + return .success + } + public func shutdown(explicitTimeout: TimeInterval? = nil) { - finishedLogRecords.removeAll() - isRunning = false - } - - public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { - guard isRunning else { - return .failure - } - return .success + finishedLogRecords.removeAll() + isRunning = false + } + + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { + guard isRunning else { + return .failure } + return .success + } } diff --git a/Sources/OpenTelemetrySdk/Logs/Export/LogRecordExporter.swift b/Sources/OpenTelemetrySdk/Logs/Export/LogRecordExporter.swift index 0e7fcf53..9f29cb70 100644 --- a/Sources/OpenTelemetrySdk/Logs/Export/LogRecordExporter.swift +++ b/Sources/OpenTelemetrySdk/Logs/Export/LogRecordExporter.swift @@ -6,14 +6,28 @@ import Foundation public protocol LogRecordExporter { - + func export(logRecords: [ReadableLogRecord], explicitTimeout : TimeInterval?) -> ExportResult - - /// Shutdown the log exporter - /// + + /// Shutdown the log exporter + /// func shutdown(explicitTimeout: TimeInterval?) - - /// Processes all the log records that have not yet been processed - /// + + /// Processes all the log records that have not yet been processed + /// func forceFlush(explicitTimeout: TimeInterval?) -> ExportResult } + +public extension LogRecordExporter { + func export(logRecords: [ReadableLogRecord]) -> ExportResult { + return export(logRecords: logRecords, explicitTimeout: nil) + } + + func shutdown() { + shutdown(explicitTimeout: nil) + } + + func forceFlush() -> ExportResult { + return forceFlush(explicitTimeout: nil) + } +} diff --git a/Sources/OpenTelemetrySdk/Logs/Export/MultiLogRecordExporter.swift b/Sources/OpenTelemetrySdk/Logs/Export/MultiLogRecordExporter.swift index a26ef237..615aac33 100644 --- a/Sources/OpenTelemetrySdk/Logs/Export/MultiLogRecordExporter.swift +++ b/Sources/OpenTelemetrySdk/Logs/Export/MultiLogRecordExporter.swift @@ -1,36 +1,36 @@ // // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 -// +// import Foundation public class MultiLogRecordExporter : LogRecordExporter { - var logRecordExporters : [LogRecordExporter] - - public init(logRecordExporters: [LogRecordExporter]) { - self.logRecordExporters = logRecordExporters - } - - public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> ExportResult { - var result = ExportResult.success - logRecordExporters.forEach { - result.mergeResultCode(newResultCode: $0.export(logRecords: logRecords, explicitTimeout: explicitTimeout)) - } - return result + var logRecordExporters : [LogRecordExporter] + + public init(logRecordExporters: [LogRecordExporter]) { + self.logRecordExporters = logRecordExporters + } + + public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> ExportResult { + var result = ExportResult.success + logRecordExporters.forEach { + result.mergeResultCode(newResultCode: $0.export(logRecords: logRecords, explicitTimeout: explicitTimeout)) } - - public func shutdown(explicitTimeout: TimeInterval? = nil) { - logRecordExporters.forEach { - $0.shutdown(explicitTimeout: explicitTimeout) - } + return result + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + logRecordExporters.forEach { + $0.shutdown(explicitTimeout: explicitTimeout) } - - public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { - var result = ExportResult.success - logRecordExporters.forEach { - result.mergeResultCode(newResultCode: $0.forceFlush(explicitTimeout: explicitTimeout)) - } - return result + } + + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { + var result = ExportResult.success + logRecordExporters.forEach { + result.mergeResultCode(newResultCode: $0.forceFlush(explicitTimeout: explicitTimeout)) } + return result + } } diff --git a/Sources/OpenTelemetrySdk/Logs/Export/NoopLogRecordExporter.swift b/Sources/OpenTelemetrySdk/Logs/Export/NoopLogRecordExporter.swift index fae809d3..59b22342 100644 --- a/Sources/OpenTelemetrySdk/Logs/Export/NoopLogRecordExporter.swift +++ b/Sources/OpenTelemetrySdk/Logs/Export/NoopLogRecordExporter.swift @@ -6,17 +6,17 @@ import Foundation public class NoopLogRecordExporter : LogRecordExporter { - public static let instance = NoopLogRecordExporter() - + public static let instance = NoopLogRecordExporter() + public func export(logRecords: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) -> ExportResult { - .success - } - + .success + } + public func shutdown(explicitTimeout: TimeInterval? = nil) { - - } - + + } + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { - .success - } + .success + } } diff --git a/Sources/OpenTelemetrySdk/Logs/LoggerSharedState.swift b/Sources/OpenTelemetrySdk/Logs/LoggerSharedState.swift index f7c07bb4..216ff49a 100644 --- a/Sources/OpenTelemetrySdk/Logs/LoggerSharedState.swift +++ b/Sources/OpenTelemetrySdk/Logs/LoggerSharedState.swift @@ -7,46 +7,46 @@ import Foundation import OpenTelemetryApi class LoggerSharedState { - var resource : Resource - var logLimits : LogLimits - var activeLogRecordProcessor : LogRecordProcessor - var clock : Clock - var hasBeenShutdown = false - var registeredLogRecordProcessors = [LogRecordProcessor]() - - init(resource: Resource, logLimits: LogLimits, processors: [LogRecordProcessor], clock: Clock) { - self.resource = resource - self.logLimits = logLimits - self.clock = clock - if processors.count > 1 { - self.activeLogRecordProcessor = MultiLogRecordProcessor(logRecordProcessors: processors) - self.registeredLogRecordProcessors = processors - } else if processors.count == 1 { - self.activeLogRecordProcessor = processors[0] - self.registeredLogRecordProcessors = processors - } else { - self.activeLogRecordProcessor = NoopLogRecordProcessor() - } - } - - func addLogRecordProcessor(_ logRecordProcessor: LogRecordProcessor) { - registeredLogRecordProcessors.append(logRecordProcessor) - if registeredLogRecordProcessors.count > 1 { - activeLogRecordProcessor = MultiLogRecordProcessor(logRecordProcessors: registeredLogRecordProcessors) - } else { - activeLogRecordProcessor = registeredLogRecordProcessors[0] - } + var resource : Resource + var logLimits : LogLimits + var activeLogRecordProcessor : LogRecordProcessor + var clock : Clock + var hasBeenShutdown = false + var registeredLogRecordProcessors = [LogRecordProcessor]() + + init(resource: Resource, logLimits: LogLimits, processors: [LogRecordProcessor], clock: Clock) { + self.resource = resource + self.logLimits = logLimits + self.clock = clock + if processors.count > 1 { + self.activeLogRecordProcessor = MultiLogRecordProcessor(logRecordProcessors: processors) + self.registeredLogRecordProcessors = processors + } else if processors.count == 1 { + self.activeLogRecordProcessor = processors[0] + self.registeredLogRecordProcessors = processors + } else { + self.activeLogRecordProcessor = NoopLogRecordProcessor() } - - func stop() { - if hasBeenShutdown { - return - } - _ = activeLogRecordProcessor.shutdown(explicitTimeout: nil) - hasBeenShutdown = true + } + + func addLogRecordProcessor(_ logRecordProcessor: LogRecordProcessor) { + registeredLogRecordProcessors.append(logRecordProcessor) + if registeredLogRecordProcessors.count > 1 { + activeLogRecordProcessor = MultiLogRecordProcessor(logRecordProcessors: registeredLogRecordProcessors) + } else { + activeLogRecordProcessor = registeredLogRecordProcessors[0] } - - func setLogLimits(limits: LogLimits) { - self.logLimits = limits + } + + func stop() { + if hasBeenShutdown { + return } + _ = activeLogRecordProcessor.shutdown() + hasBeenShutdown = true + } + + func setLogLimits(limits: LogLimits) { + self.logLimits = limits + } } diff --git a/Sources/OpenTelemetrySdk/Logs/Processors/BatchLogRecordProcessor.swift b/Sources/OpenTelemetrySdk/Logs/Processors/BatchLogRecordProcessor.swift index b03ac375..d2d101a2 100644 --- a/Sources/OpenTelemetrySdk/Logs/Processors/BatchLogRecordProcessor.swift +++ b/Sources/OpenTelemetrySdk/Logs/Processors/BatchLogRecordProcessor.swift @@ -7,138 +7,138 @@ import Foundation import OpenTelemetryApi public class BatchLogRecordProcessor : LogRecordProcessor { - - - fileprivate var worker : BatchWorker - - public init(logRecordExporter: LogRecordExporter, scheduleDelay: TimeInterval = 5, exportTimeout: TimeInterval = 30, maxQueueSize: Int = 2048, maxExportBatchSize: Int = 512, willExportCallback: ((inout [ReadableLogRecord])->Void)? = nil) { - worker = BatchWorker(logRecordExporter: logRecordExporter, scheduleDelay: scheduleDelay, exportTimeout: exportTimeout, maxQueueSize: maxQueueSize, maxExportBatchSize: maxExportBatchSize, willExportCallback: willExportCallback) - - worker.start() - } - - public func onEmit(logRecord: ReadableLogRecord) { - worker.emit(logRecord: logRecord) - } + + + fileprivate var worker : BatchWorker + + public init(logRecordExporter: LogRecordExporter, scheduleDelay: TimeInterval = 5, exportTimeout: TimeInterval = 30, maxQueueSize: Int = 2048, maxExportBatchSize: Int = 512, willExportCallback: ((inout [ReadableLogRecord])->Void)? = nil) { + worker = BatchWorker(logRecordExporter: logRecordExporter, scheduleDelay: scheduleDelay, exportTimeout: exportTimeout, maxQueueSize: maxQueueSize, maxExportBatchSize: maxExportBatchSize, willExportCallback: willExportCallback) + worker.start() + } + + public func onEmit(logRecord: ReadableLogRecord) { + worker.emit(logRecord: logRecord) + } + public func forceFlush(explicitTimeout: TimeInterval?) -> ExportResult { - forceFlush(timeout: explicitTimeout) - return .success - } - - public func forceFlush(timeout: TimeInterval? = nil) { - worker.forceFlush(explicitTimeout: timeout) - } - - + forceFlush(timeout: explicitTimeout) + return .success + } + + public func forceFlush(timeout: TimeInterval? = nil) { + worker.forceFlush(explicitTimeout: timeout) + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) -> ExportResult { - worker.cancel() - worker.shutdown(explicitTimeout: explicitTimeout) - return .success - } + worker.cancel() + worker.shutdown(explicitTimeout: explicitTimeout) + return .success + } } private class BatchWorker : Thread { - let logRecordExporter : LogRecordExporter - let scheduleDelay : TimeInterval - let maxQueueSize : Int - let maxExportBatchSize : Int - let exportTimeout : TimeInterval - let willExportCallback: ((inout [ReadableLogRecord])->Void)? - let halfMaxQueueSize: Int - private let cond = NSCondition() - var logRecordList = [ReadableLogRecord]() - var queue : OperationQueue + let logRecordExporter : LogRecordExporter + let scheduleDelay : TimeInterval + let maxQueueSize : Int + let maxExportBatchSize : Int + let exportTimeout : TimeInterval + let willExportCallback: ((inout [ReadableLogRecord])->Void)? + let halfMaxQueueSize: Int + private let cond = NSCondition() + var logRecordList = [ReadableLogRecord]() + var queue : OperationQueue + + init(logRecordExporter: LogRecordExporter, + scheduleDelay: TimeInterval, + exportTimeout: TimeInterval, + maxQueueSize: Int, + maxExportBatchSize: Int, + willExportCallback: ((inout [ReadableLogRecord])->Void)?) { - init(logRecordExporter: LogRecordExporter, - scheduleDelay: TimeInterval, - exportTimeout: TimeInterval, - maxQueueSize: Int, - maxExportBatchSize: Int, - willExportCallback: ((inout [ReadableLogRecord])->Void)?) { - - self.logRecordExporter = logRecordExporter - self.scheduleDelay = scheduleDelay - self.exportTimeout = exportTimeout - self.maxExportBatchSize = maxExportBatchSize - self.maxQueueSize = maxQueueSize - self.willExportCallback = willExportCallback - self.halfMaxQueueSize = maxQueueSize >> 1 - queue = OperationQueue() - queue.name = "BatchWorker Queue" - queue.maxConcurrentOperationCount = 1 + self.logRecordExporter = logRecordExporter + self.scheduleDelay = scheduleDelay + self.exportTimeout = exportTimeout + self.maxExportBatchSize = maxExportBatchSize + self.maxQueueSize = maxQueueSize + self.willExportCallback = willExportCallback + self.halfMaxQueueSize = maxQueueSize >> 1 + queue = OperationQueue() + queue.name = "BatchWorker Queue" + queue.maxConcurrentOperationCount = 1 + } + + func emit(logRecord: ReadableLogRecord) { + cond.lock() + defer { cond.unlock()} + if logRecordList.count == maxQueueSize { + // TODO: record a counter for dropped logs + return } - func emit(logRecord: ReadableLogRecord) { - cond.lock() - defer { cond.unlock()} - if logRecordList.count == maxQueueSize { - // TODO: record a counter for dropped logs - return - } - - // TODO: record a gauge for referenced logs - logRecordList.append(logRecord) - if logRecordList.count >= halfMaxQueueSize { - cond.broadcast() - } - } - - override func main() { - repeat { - autoreleasepool { - var logRecordsCopy : [ReadableLogRecord] - cond.lock() - if logRecordList.count < maxExportBatchSize { - repeat { - cond.wait(until: Date().addingTimeInterval(scheduleDelay)) - } while logRecordList.isEmpty - } - logRecordsCopy = logRecordList - logRecordList.removeAll() - cond.unlock() - self.exportBatch(logRecordList: logRecordsCopy, explicitTimeout: exportTimeout) - } - } while true + // TODO: record a gauge for referenced logs + logRecordList.append(logRecord) + if logRecordList.count >= halfMaxQueueSize { + cond.broadcast() } - - public func forceFlush(explicitTimeout: TimeInterval?) { - var logRecordsCopy: [ReadableLogRecord] + } + + override func main() { + repeat { + autoreleasepool { + var logRecordsCopy : [ReadableLogRecord] cond.lock() + if logRecordList.count < maxExportBatchSize { + repeat { + cond.wait(until: Date().addingTimeInterval(scheduleDelay)) + } while logRecordList.isEmpty + } logRecordsCopy = logRecordList logRecordList.removeAll() cond.unlock() - - exportBatch(logRecordList: logRecordsCopy, explicitTimeout: explicitTimeout) - } - + self.exportBatch(logRecordList: logRecordsCopy, explicitTimeout: exportTimeout) + } + } while true + } + + public func forceFlush(explicitTimeout: TimeInterval? = nil) { + var logRecordsCopy: [ReadableLogRecord] + cond.lock() + logRecordsCopy = logRecordList + logRecordList.removeAll() + cond.unlock() + exportBatch(logRecordList: logRecordsCopy, explicitTimeout: explicitTimeout) + } + + public func shutdown(explicitTimeout: TimeInterval?) { let timeout = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, exportTimeout) forceFlush(explicitTimeout: timeout) _ = logRecordExporter.shutdown(explicitTimeout: timeout) + } + + private func exportBatch(logRecordList: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) { + let exportOperation = BlockOperation { [weak self] in + self?.exportAction(logRecordList : logRecordList, explicitTimeout: explicitTimeout) } - - private func exportBatch(logRecordList: [ReadableLogRecord], explicitTimeout: TimeInterval?) { - let exportOperation = BlockOperation { [weak self] in - self?.exportAction(logRecordList : logRecordList, explicitTimeout: explicitTimeout) - } - let timeoutTimer = DispatchSource.makeTimerSource(queue: DispatchQueue.global()) - timeoutTimer.setEventHandler { exportOperation.cancel() } - let maxTimeOut = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, exportTimeout) - timeoutTimer.schedule(deadline: .now() + .milliseconds(Int(maxTimeOut.toMilliseconds)), leeway: .milliseconds(1)) - timeoutTimer.activate() - queue.addOperation(exportOperation) - queue.waitUntilAllOperationsAreFinished() - timeoutTimer.cancel() - } - - private func exportAction(logRecordList: [ReadableLogRecord], explicitTimeout: TimeInterval?) { - stride(from: 0, to: logRecordList.endIndex, by: maxExportBatchSize).forEach { - var logRecordToExport = logRecordList[$0 ..< min($0 + maxExportBatchSize, logRecordList.count)].map {$0} - willExportCallback?(&logRecordToExport) - _ = logRecordExporter.export(logRecords: logRecordToExport, explicitTimeout: explicitTimeout) - } - } + let timeoutTimer = DispatchSource.makeTimerSource(queue: DispatchQueue.global()) + timeoutTimer.setEventHandler { exportOperation.cancel() } + let maxTimeOut = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, exportTimeout) + timeoutTimer.schedule(deadline: .now() + .milliseconds(Int(maxTimeOut.toMilliseconds)), leeway: .milliseconds(1)) + timeoutTimer.activate() + queue.addOperation(exportOperation) + queue.waitUntilAllOperationsAreFinished() + timeoutTimer.cancel() + } + + private func exportAction(logRecordList: [ReadableLogRecord], explicitTimeout: TimeInterval? = nil) { + stride(from: 0, to: logRecordList.endIndex, by: maxExportBatchSize).forEach { + var logRecordToExport = logRecordList[$0 ..< min($0 + maxExportBatchSize, logRecordList.count)].map {$0} + willExportCallback?(&logRecordToExport) + _ = logRecordExporter.export(logRecords: logRecordToExport, explicitTimeout: explicitTimeout) } + } +} diff --git a/Sources/OpenTelemetrySdk/Logs/Processors/LogRecordProcessor.swift b/Sources/OpenTelemetrySdk/Logs/Processors/LogRecordProcessor.swift index f1bd28b6..86cf7e57 100644 --- a/Sources/OpenTelemetrySdk/Logs/Processors/LogRecordProcessor.swift +++ b/Sources/OpenTelemetrySdk/Logs/Processors/LogRecordProcessor.swift @@ -6,19 +6,37 @@ import Foundation public protocol LogRecordProcessor { - - /// Called when a Logger's LogRecordBuilder emits a log record - /// - /// - Parameter logRecord: the log record emitted - func onEmit(logRecord: ReadableLogRecord) - - /// Processes all span events that have not yet been processes - /// - /// - returns whether the task was successful + + /// Called when a Logger's LogRecordBuilder emits a log record + /// + /// - Parameter logRecord: the log record emitted + func onEmit(logRecord: ReadableLogRecord) + + /// Processes all span events that have not yet been processes + /// + /// - returns whether the task was successful func forceFlush(explicitTimeout: TimeInterval?) -> ExportResult - - /// Processes all span events that have not yet been processes anc closes used resources - /// - /// - returns whether the task was successful + + /// Processes all span events that have not yet been processes anc closes used resources + /// + /// - returns whether the task was successful func shutdown(explicitTimeout: TimeInterval?) -> ExportResult } + +public extension LogRecordProcessor { + + + /// Processes all span events that have not yet been processes + /// + /// - returns whether the task was successful + func forceFlush() -> ExportResult { + forceFlush(explicitTimeout: nil) + } + + /// Processes all span events that have not yet been processes anc closes used resources + /// + /// - returns whether the task was successful + func shutdown() -> ExportResult { + shutdown(explicitTimeout: nil) + } +} diff --git a/Sources/OpenTelemetrySdk/Logs/Processors/MultiLogRecordProcessor.swift b/Sources/OpenTelemetrySdk/Logs/Processors/MultiLogRecordProcessor.swift index 8e9da99c..392f7976 100644 --- a/Sources/OpenTelemetrySdk/Logs/Processors/MultiLogRecordProcessor.swift +++ b/Sources/OpenTelemetrySdk/Logs/Processors/MultiLogRecordProcessor.swift @@ -9,7 +9,7 @@ import OpenTelemetryApi public class MultiLogRecordProcessor : LogRecordProcessor { var logRecordProcessors = [LogRecordProcessor]() - public func forceFlush(explicitTimeout: TimeInterval?) -> ExportResult { + public func forceFlush(explicitTimeout: TimeInterval? = nil) -> ExportResult { var result : ExportResult = .success logRecordProcessors.forEach { result.mergeResultCode(newResultCode: $0.forceFlush(explicitTimeout: explicitTimeout)) @@ -17,7 +17,7 @@ public class MultiLogRecordProcessor : LogRecordProcessor { return result } - public func shutdown(explicitTimeout: TimeInterval?) -> ExportResult { + public func shutdown(explicitTimeout: TimeInterval? = nil) -> ExportResult { var result : ExportResult = .success logRecordProcessors.forEach { result.mergeResultCode(newResultCode: $0.shutdown(explicitTimeout: explicitTimeout)) diff --git a/Sources/OpenTelemetrySdk/Trace/Export/MultiSpanExporter.swift b/Sources/OpenTelemetrySdk/Trace/Export/MultiSpanExporter.swift index b0daa949..94505553 100644 --- a/Sources/OpenTelemetrySdk/Trace/Export/MultiSpanExporter.swift +++ b/Sources/OpenTelemetrySdk/Trace/Export/MultiSpanExporter.swift @@ -16,7 +16,7 @@ public class MultiSpanExporter: SpanExporter { self.spanExporters = spanExporters } - public func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + public func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { var currentResultCode = SpanExporterResultCode.success for exporter in spanExporters { currentResultCode.mergeResultCode(newResultCode: exporter.export(spans: spans, explicitTimeout: explicitTimeout)) @@ -24,7 +24,7 @@ public class MultiSpanExporter: SpanExporter { return currentResultCode } - public func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + public func flush(explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { var currentResultCode = SpanExporterResultCode.success for exporter in spanExporters { currentResultCode.mergeResultCode(newResultCode: exporter.flush(explicitTimeout: explicitTimeout)) @@ -32,7 +32,7 @@ public class MultiSpanExporter: SpanExporter { return currentResultCode } - public func shutdown(explicitTimeout: TimeInterval?) { + public func shutdown(explicitTimeout: TimeInterval? = nil) { for exporter in spanExporters { exporter.shutdown(explicitTimeout:explicitTimeout) } diff --git a/Sources/OpenTelemetrySdk/Trace/Export/SpanExporter.swift b/Sources/OpenTelemetrySdk/Trace/Export/SpanExporter.swift index 7cf623dc..b295f062 100644 --- a/Sources/OpenTelemetrySdk/Trace/Export/SpanExporter.swift +++ b/Sources/OpenTelemetrySdk/Trace/Export/SpanExporter.swift @@ -10,16 +10,16 @@ import Foundation /// To export data this MUST be register to the TracerSdk using a SimpleSpansProcessor or /// a BatchSampledSpansProcessor. public protocol SpanExporter: AnyObject { - /// Called to export sampled Spans. - /// - Parameter spans: the list of sampled Spans to be exported. + /// Called to export sampled Spans. + /// - Parameter spans: the list of sampled Spans to be exported. @discardableResult func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode - - ///Exports the collection of sampled Spans that have not yet been exported. + + ///Exports the collection of sampled Spans that have not yet been exported. func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode - - /// Called when TracerSdkFactory.shutdown()} is called, if this SpanExporter is registered - /// to a TracerSdkFactory object. - func shutdown(explicitTimeout: TimeInterval?) + + /// Called when TracerSdkFactory.shutdown()} is called, if this SpanExporter is registered + /// to a TracerSdkFactory object. + func shutdown(explicitTimeout: TimeInterval?) } public extension SpanExporter { @@ -36,20 +36,20 @@ public extension SpanExporter { /// The possible results for the export method. public enum SpanExporterResultCode { - /// The export operation finished successfully. - case success - - /// The export operation finished with an error. - case failure - - /// Merges the current result code with other result code - /// - Parameter newResultCode: the result code to merge with - mutating func mergeResultCode(newResultCode: SpanExporterResultCode) { - // If both results are success then return success. - if self == .success && newResultCode == .success { - self = .success - return - } - self = .failure + /// The export operation finished successfully. + case success + + /// The export operation finished with an error. + case failure + + /// Merges the current result code with other result code + /// - Parameter newResultCode: the result code to merge with + mutating func mergeResultCode(newResultCode: SpanExporterResultCode) { + // If both results are success then return success. + if self == .success && newResultCode == .success { + self = .success + return } + self = .failure + } } diff --git a/Sources/OpenTelemetrySdk/Trace/SpanProcessor.swift b/Sources/OpenTelemetrySdk/Trace/SpanProcessor.swift index 2f00697a..f08764d5 100644 --- a/Sources/OpenTelemetrySdk/Trace/SpanProcessor.swift +++ b/Sources/OpenTelemetrySdk/Trace/SpanProcessor.swift @@ -9,33 +9,33 @@ import OpenTelemetryApi /// SpanProcessor is the interface TracerSdk uses to allow synchronous hooks for when a Span /// is started or when a Span is ended. public protocol SpanProcessor { - /// Is true if this SpanProcessor requires start events. - var isStartRequired: Bool { get } - - /// Returns true if this SpanProcessor requires end events. - var isEndRequired: Bool { get } - - /// Called when a Span is started, if the Span.isRecording is true. - /// This method is called synchronously on the execution thread, should not throw or block the - /// execution thread. - /// - Parameter parentContext: the context of the span parent, if exists - /// - Parameter span: the ReadableSpan that just started - func onStart(parentContext: SpanContext?, span: ReadableSpan) - - /// Called when a Span is ended, if the Span.isRecording() is true. - /// This method is called synchronously on the execution thread, should not throw or block the - /// execution thread. - /// - Parameter span: the ReadableSpan that just ended. - mutating func onEnd(span: ReadableSpan) - - /// Called when TracerSdk.shutdown() is called. - /// Implementations must ensure that all span events are processed before returning + /// Is true if this SpanProcessor requires start events. + var isStartRequired: Bool { get } + + /// Returns true if this SpanProcessor requires end events. + var isEndRequired: Bool { get } + + /// Called when a Span is started, if the Span.isRecording is true. + /// This method is called synchronously on the execution thread, should not throw or block the + /// execution thread. + /// - Parameter parentContext: the context of the span parent, if exists + /// - Parameter span: the ReadableSpan that just started + func onStart(parentContext: SpanContext?, span: ReadableSpan) + + /// Called when a Span is ended, if the Span.isRecording() is true. + /// This method is called synchronously on the execution thread, should not throw or block the + /// execution thread. + /// - Parameter span: the ReadableSpan that just ended. + mutating func onEnd(span: ReadableSpan) + + /// Called when TracerSdk.shutdown() is called. + /// Implementations must ensure that all span events are processed before returning mutating func shutdown(explicitTimeout: TimeInterval?) - - /// Processes all span events that have not yet been processed. - /// This method is executed synchronously on the calling thread - /// - Parameter timeout: Maximum time the flush complete or abort. If nil, it will wait indefinitely - func forceFlush(timeout: TimeInterval?) + + /// Processes all span events that have not yet been processed. + /// This method is executed synchronously on the calling thread + /// - Parameter timeout: Maximum time the flush complete or abort. If nil, it will wait indefinitely + func forceFlush(timeout: TimeInterval?) } extension SpanProcessor { diff --git a/Sources/OpenTelemetrySdk/Trace/SpanProcessors/BatchSpanProcessor.swift b/Sources/OpenTelemetrySdk/Trace/SpanProcessors/BatchSpanProcessor.swift index 4f881d98..1c2c6c8f 100644 --- a/Sources/OpenTelemetrySdk/Trace/SpanProcessors/BatchSpanProcessor.swift +++ b/Sources/OpenTelemetrySdk/Trace/SpanProcessors/BatchSpanProcessor.swift @@ -15,7 +15,7 @@ import OpenTelemetryApi /// exports the spans to wake up and start a new export cycle. /// This batchSpanProcessor can cause high contention in a very high traffic service. public struct BatchSpanProcessor: SpanProcessor { - + fileprivate var worker: BatchWorker @@ -121,7 +121,7 @@ private class BatchWorker: Thread { spanExporter.shutdown() } - public func forceFlush(explicitTimeout: TimeInterval?) { + public func forceFlush(explicitTimeout: TimeInterval? = nil) { var spansCopy: [ReadableSpan] cond.lock() spansCopy = spanList @@ -131,7 +131,7 @@ private class BatchWorker: Thread { exportBatch(spanList: spansCopy, explicitTimeout: explicitTimeout) } - private func exportBatch(spanList: [ReadableSpan], explicitTimeout: TimeInterval?) { + private func exportBatch(spanList: [ReadableSpan], explicitTimeout: TimeInterval? = nil) { let maxTimeOut = min(explicitTimeout ?? TimeInterval.greatestFiniteMagnitude, exportTimeout) let exportOperation = BlockOperation { [weak self] in self?.exportAction(spanList: spanList, explicitTimeout: maxTimeOut) @@ -148,7 +148,7 @@ private class BatchWorker: Thread { timeoutTimer.cancel() } - private func exportAction(spanList: [ReadableSpan], explicitTimeout: TimeInterval?) { + private func exportAction(spanList: [ReadableSpan], explicitTimeout: TimeInterval? = nil) { stride(from: 0, to: spanList.endIndex, by: maxExportBatchSize).forEach { var spansToExport = spanList[$0 ..< min($0 + maxExportBatchSize, spanList.count)].map { $0.toSpanData() } willExportCallback?(&spansToExport) diff --git a/Sources/OpenTelemetrySdk/Trace/SpanProcessors/MultiSpanProcessor.swift b/Sources/OpenTelemetrySdk/Trace/SpanProcessors/MultiSpanProcessor.swift index 1a713e68..f448f4f4 100644 --- a/Sources/OpenTelemetrySdk/Trace/SpanProcessors/MultiSpanProcessor.swift +++ b/Sources/OpenTelemetrySdk/Trace/SpanProcessors/MultiSpanProcessor.swift @@ -9,51 +9,51 @@ import OpenTelemetryApi /// Implementation of the SpanProcessor that simply forwards all received events to a list of /// SpanProcessors. public struct MultiSpanProcessor: SpanProcessor { - var spanProcessorsStart = [SpanProcessor]() - var spanProcessorsEnd = [SpanProcessor]() - var spanProcessorsAll = [SpanProcessor]() - - public init(spanProcessors: [SpanProcessor]) { - spanProcessorsAll = spanProcessors - spanProcessorsAll.forEach { - if $0.isStartRequired { - spanProcessorsStart.append($0) - } - if $0.isEndRequired { - spanProcessorsEnd.append($0) - } - } + var spanProcessorsStart = [SpanProcessor]() + var spanProcessorsEnd = [SpanProcessor]() + var spanProcessorsAll = [SpanProcessor]() + + public init(spanProcessors: [SpanProcessor]) { + spanProcessorsAll = spanProcessors + spanProcessorsAll.forEach { + if $0.isStartRequired { + spanProcessorsStart.append($0) + } + if $0.isEndRequired { + spanProcessorsEnd.append($0) + } } - - public var isStartRequired: Bool { - return spanProcessorsStart.count > 0 + } + + public var isStartRequired: Bool { + return spanProcessorsStart.count > 0 + } + + public var isEndRequired: Bool { + return spanProcessorsEnd.count > 0 + } + + public func onStart(parentContext: SpanContext?, span: ReadableSpan) { + spanProcessorsStart.forEach { + $0.onStart(parentContext: parentContext, span: span) } - - public var isEndRequired: Bool { - return spanProcessorsEnd.count > 0 - } - - public func onStart(parentContext: SpanContext?, span: ReadableSpan) { - spanProcessorsStart.forEach { - $0.onStart(parentContext: parentContext, span: span) - } + } + + public func onEnd(span: ReadableSpan) { + for var processor in spanProcessorsEnd { + processor.onEnd(span: span) } - - public func onEnd(span: ReadableSpan) { - for var processor in spanProcessorsEnd { - processor.onEnd(span: span) - } - } - + } + public func shutdown(explicitTimeout:TimeInterval? = nil) { - for var processor in spanProcessorsAll { - processor.shutdown(explicitTimeout: explicitTimeout) - } + for var processor in spanProcessorsAll { + processor.shutdown(explicitTimeout: explicitTimeout) } - - public func forceFlush(timeout: TimeInterval? = nil) { - spanProcessorsAll.forEach { - $0.forceFlush(timeout: timeout) - } + } + + public func forceFlush(timeout: TimeInterval? = nil) { + spanProcessorsAll.forEach { + $0.forceFlush(timeout: timeout) } + } } diff --git a/Sources/OpenTelemetrySdk/Trace/SpanProcessors/SimpleSpanProcessor.swift b/Sources/OpenTelemetrySdk/Trace/SpanProcessors/SimpleSpanProcessor.swift index abe58044..7b230d3f 100644 --- a/Sources/OpenTelemetrySdk/Trace/SpanProcessors/SimpleSpanProcessor.swift +++ b/Sources/OpenTelemetrySdk/Trace/SpanProcessors/SimpleSpanProcessor.swift @@ -10,52 +10,52 @@ import OpenTelemetryApi /// and passes it to the configured exporter. /// For production environment BatchSpanProcessor is configurable and is preferred. public struct SimpleSpanProcessor: SpanProcessor { - private let spanExporter: SpanExporter - private var sampled: Bool = true - private let processorQueue = DispatchQueue(label: "io.opentelemetry.simplespanprocessor") - - public let isStartRequired = false - public let isEndRequired = true - - public func onStart(parentContext: SpanContext?, span: ReadableSpan) {} - - public mutating func onEnd(span: ReadableSpan) { - if sampled, !span.context.traceFlags.sampled { - return - } - let span = span.toSpanData() - let spanExporterAux = self.spanExporter - processorQueue.async { - _ = spanExporterAux.export(spans: [span]) - } - } - - public func shutdown(explicitTimeout: TimeInterval? = nil) { - processorQueue.sync { - spanExporter.shutdown(explicitTimeout: explicitTimeout) - } + private let spanExporter: SpanExporter + private var sampled: Bool = true + private let processorQueue = DispatchQueue(label: "io.opentelemetry.simplespanprocessor") + + public let isStartRequired = false + public let isEndRequired = true + + public func onStart(parentContext: SpanContext?, span: ReadableSpan) {} + + public mutating func onEnd(span: ReadableSpan) { + if sampled, !span.context.traceFlags.sampled { + return } - - /// Forces the processing of the remaining spans - /// - Parameter timeout: unused in this processor - public func forceFlush(timeout: TimeInterval? = nil) { - processorQueue.sync { - _ = spanExporter.flush() - } + let span = span.toSpanData() + let spanExporterAux = self.spanExporter + processorQueue.async { + _ = spanExporterAux.export(spans: [span]) } - - /// Returns a new SimpleSpansProcessor that converts spans to proto and forwards them to - /// the given spanExporter. - /// - Parameter spanExporter: the SpanExporter to where the Spans are pushed. - public init(spanExporter: SpanExporter) { - self.spanExporter = spanExporter + } + + public func shutdown(explicitTimeout: TimeInterval? = nil) { + processorQueue.sync { + spanExporter.shutdown(explicitTimeout: explicitTimeout) } - - /// Set whether only sampled spans should be reported. - /// - Parameter sampled: report only sampled spans. - public func reportingOnlySampled(sampled: Bool) -> Self { - var processor = self - processor.sampled = sampled - return processor + } + + /// Forces the processing of the remaining spans + /// - Parameter timeout: unused in this processor + public func forceFlush(timeout: TimeInterval? = nil) { + processorQueue.sync { + _ = spanExporter.flush() } + } + + /// Returns a new SimpleSpansProcessor that converts spans to proto and forwards them to + /// the given spanExporter. + /// - Parameter spanExporter: the SpanExporter to where the Spans are pushed. + public init(spanExporter: SpanExporter) { + self.spanExporter = spanExporter + } + + /// Set whether only sampled spans should be reported. + /// - Parameter sampled: report only sampled spans. + public func reportingOnlySampled(sampled: Bool) -> Self { + var processor = self + processor.sampled = sampled + return processor + } } diff --git a/Tests/ExportersTests/OpenTelemetryProtocol/OtlpHttpMetricsExporterTest.swift b/Tests/ExportersTests/OpenTelemetryProtocol/OtlpHttpMetricsExporterTest.swift index 54f626cc..852fbd82 100644 --- a/Tests/ExportersTests/OpenTelemetryProtocol/OtlpHttpMetricsExporterTest.swift +++ b/Tests/ExportersTests/OpenTelemetryProtocol/OtlpHttpMetricsExporterTest.swift @@ -15,113 +15,113 @@ import OpenTelemetryProtocolExporterCommon import XCTest class OtlpHttpMetricsExporterTest: XCTestCase { - var exporter: OtlpHttpMetricExporter! - var testServer: NIOHTTP1TestServer! - var group: MultiThreadedEventLoopGroup! - - override func setUp() { - group = MultiThreadedEventLoopGroup(numberOfThreads: 1) - testServer = NIOHTTP1TestServer(group: group) + var exporter: OtlpHttpMetricExporter! + var testServer: NIOHTTP1TestServer! + var group: MultiThreadedEventLoopGroup! + + override func setUp() { + group = MultiThreadedEventLoopGroup(numberOfThreads: 1) + testServer = NIOHTTP1TestServer(group: group) + } + + override func tearDown() { + XCTAssertNoThrow(try testServer.stop()) + XCTAssertNoThrow(try group.syncShutdownGracefully()) + } + + // The shutdown() function is a no-op, This test is just here to make codecov happy + func testShutdown() { + let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! + let exporter = OtlpHttpMetricExporter(endpoint: endpoint) + XCTAssertNoThrow(exporter.shutdown()) + } + + // This test and testGaugeExport() are somewhat hacky solutions to verifying that the metrics got across correctly. It + // simply looks for the metric description strings (which is why I made them unique) in the body returned by + // testServer.receiveBodyAndVerify(). It should ideally turn that body into [Metric] using protobuf and then confirm content + func testExport() { + let words = ["foo", "bar", "fizz", "buzz"] + var metrics: [Metric] = [] + var metricDescriptions: [String] = [] + for word in words { + let metricDescription = word + String(Int.random(in: 1...100)) + metricDescriptions.append(metricDescription) + metrics.append(generateSumMetric(description: metricDescription)) } - override func tearDown() { - XCTAssertNoThrow(try testServer.stop()) - XCTAssertNoThrow(try group.syncShutdownGracefully()) + let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! + let exporter = OtlpHttpMetricExporter(endpoint: endpoint) + let result = exporter.export(metrics: metrics) { () -> Bool in + false } + XCTAssertEqual(result, MetricExporterResultCode.success) - // The shutdown() function is a no-op, This test is just here to make codecov happy - func testShutdown() { - let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! - let exporter = OtlpHttpMetricExporter(endpoint: endpoint) - XCTAssertNoThrow(exporter.shutdown(explicitTimeout: nil)) - } + XCTAssertNoThrow(try testServer.receiveHeadAndVerify { head in + let otelVersion = Headers.getUserAgentHeader() + XCTAssertTrue(head.headers.contains(name: Constants.HTTP.userAgent)) + XCTAssertEqual(otelVersion, head.headers.first(name: Constants.HTTP.userAgent)) + }) + XCTAssertNoThrow(try testServer.receiveBodyAndVerify() { body in + var contentsBuffer = ByteBuffer(buffer: body) + let contents = contentsBuffer.readString(length: contentsBuffer.readableBytes)! + for metricDescription in metricDescriptions { + XCTAssertTrue(contents.contains(metricDescription)) + } + }) - // This test and testGaugeExport() are somewhat hacky solutions to verifying that the metrics got across correctly. It - // simply looks for the metric description strings (which is why I made them unique) in the body returned by - // testServer.receiveBodyAndVerify(). It should ideally turn that body into [Metric] using protobuf and then confirm content - func testExport() { - let words = ["foo", "bar", "fizz", "buzz"] - var metrics: [Metric] = [] - var metricDescriptions: [String] = [] - for word in words { - let metricDescription = word + String(Int.random(in: 1...100)) - metricDescriptions.append(metricDescription) - metrics.append(generateSumMetric(description: metricDescription)) - } - - let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! - let exporter = OtlpHttpMetricExporter(endpoint: endpoint) - let result = exporter.export(metrics: metrics) { () -> Bool in - false - } - XCTAssertEqual(result, MetricExporterResultCode.success) - - XCTAssertNoThrow(try testServer.receiveHeadAndVerify { head in - let otelVersion = Headers.getUserAgentHeader() - XCTAssertTrue(head.headers.contains(name: Constants.HTTP.userAgent)) - XCTAssertEqual(otelVersion, head.headers.first(name: Constants.HTTP.userAgent)) - }) - XCTAssertNoThrow(try testServer.receiveBodyAndVerify() { body in - var contentsBuffer = ByteBuffer(buffer: body) - let contents = contentsBuffer.readString(length: contentsBuffer.readableBytes)! - for metricDescription in metricDescriptions { - XCTAssertTrue(contents.contains(metricDescription)) - } - }) - - XCTAssertNoThrow(try testServer.receiveEnd()) + XCTAssertNoThrow(try testServer.receiveEnd()) + } + + func testGaugeExport() { + let words = ["foo", "bar", "fizz", "buzz"] + var metrics: [Metric] = [] + var metricDescriptions: [String] = [] + for word in words { + let metricDescription = word + String(Int.random(in: 1...100)) + metricDescriptions.append(metricDescription) + metrics.append(generateGaugeMetric(description: metricDescription)) } - func testGaugeExport() { - let words = ["foo", "bar", "fizz", "buzz"] - var metrics: [Metric] = [] - var metricDescriptions: [String] = [] - for word in words { - let metricDescription = word + String(Int.random(in: 1...100)) - metricDescriptions.append(metricDescription) - metrics.append(generateGaugeMetric(description: metricDescription)) - } - - let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! - let exporter = OtlpHttpMetricExporter(endpoint: endpoint) - - let result = exporter.export(metrics: metrics) { () -> Bool in - false - } - XCTAssertEqual(result, MetricExporterResultCode.success) - - XCTAssertNoThrow(try testServer.receiveHead()) - XCTAssertNoThrow(try testServer.receiveBodyAndVerify() { body in - var contentsBuffer = ByteBuffer(buffer: body) - let contents = contentsBuffer.readString(length: contentsBuffer.readableBytes)! - for metricDescription in metricDescriptions { - XCTAssertTrue(contents.contains(metricDescription)) - } - }) - XCTAssertNoThrow(try testServer.receiveEnd()) - - // TODO: if we can turn contents back into [Metric], look at OtlpMetricExporterTests for additional checks - } + let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! + let exporter = OtlpHttpMetricExporter(endpoint: endpoint) - func testFlush() { - let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! - let exporter = OtlpHttpMetricExporter(endpoint: endpoint) - XCTAssertEqual(MetricExporterResultCode.success, exporter.flush()) + let result = exporter.export(metrics: metrics) { () -> Bool in + false } + XCTAssertEqual(result, MetricExporterResultCode.success) - func generateSumMetric(description: String = "description") -> Metric { - let scope = InstrumentationScopeInfo(name: "lib", version: "semver:0.0.0") - var metric = Metric(namespace: "namespace", name: "metric", desc: description, type: .doubleSum, resource: Resource(), instrumentationScopeInfo: scope) - let data = SumData(startTimestamp: Date(), timestamp: Date(), labels: ["hello": "world"], sum: 1) - metric.data.append(data) - return metric - } - - func generateGaugeMetric(description: String = "description") -> Metric { - let scope = InstrumentationScopeInfo(name: "lib", version: "semver:0.0.0") - var metric = Metric(namespace: "namespace", name: "MyGauge", desc: description, type: .intGauge, resource: Resource(), instrumentationScopeInfo: scope) - let data = SumData(startTimestamp: Date(), timestamp: Date(), labels: ["hello": "world"], sum: 100) - metric.data.append(data) - return metric - } + XCTAssertNoThrow(try testServer.receiveHead()) + XCTAssertNoThrow(try testServer.receiveBodyAndVerify() { body in + var contentsBuffer = ByteBuffer(buffer: body) + let contents = contentsBuffer.readString(length: contentsBuffer.readableBytes)! + for metricDescription in metricDescriptions { + XCTAssertTrue(contents.contains(metricDescription)) + } + }) + XCTAssertNoThrow(try testServer.receiveEnd()) + + // TODO: if we can turn contents back into [Metric], look at OtlpMetricExporterTests for additional checks + } + + func testFlush() { + let endpoint = URL(string: "http://localhost:\(testServer.serverPort)")! + let exporter = OtlpHttpMetricExporter(endpoint: endpoint) + XCTAssertEqual(MetricExporterResultCode.success, exporter.flush()) + } + + func generateSumMetric(description: String = "description") -> Metric { + let scope = InstrumentationScopeInfo(name: "lib", version: "semver:0.0.0") + var metric = Metric(namespace: "namespace", name: "metric", desc: description, type: .doubleSum, resource: Resource(), instrumentationScopeInfo: scope) + let data = SumData(startTimestamp: Date(), timestamp: Date(), labels: ["hello": "world"], sum: 1) + metric.data.append(data) + return metric + } + + func generateGaugeMetric(description: String = "description") -> Metric { + let scope = InstrumentationScopeInfo(name: "lib", version: "semver:0.0.0") + var metric = Metric(namespace: "namespace", name: "MyGauge", desc: description, type: .intGauge, resource: Resource(), instrumentationScopeInfo: scope) + let data = SumData(startTimestamp: Date(), timestamp: Date(), labels: ["hello": "world"], sum: 100) + metric.data.append(data) + return metric + } } diff --git a/Tests/ExportersTests/OpenTelemetryProtocol/OtlpLogRecordExporterTests.swift b/Tests/ExportersTests/OpenTelemetryProtocol/OtlpLogRecordExporterTests.swift index 924e0fc6..42f751a8 100644 --- a/Tests/ExportersTests/OpenTelemetryProtocol/OtlpLogRecordExporterTests.swift +++ b/Tests/ExportersTests/OpenTelemetryProtocol/OtlpLogRecordExporterTests.swift @@ -15,154 +15,154 @@ import XCTest class OtlpLogRecordExporterTests: XCTestCase { - let traceIdBytes: [UInt8] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4] - let spanIdBytes: [UInt8] = [0, 0, 0, 0, 4, 3, 2, 1] - var traceId: TraceId! - var spanId: SpanId! - let tracestate = TraceState() - var spanContext: SpanContext! + let traceIdBytes: [UInt8] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4] + let spanIdBytes: [UInt8] = [0, 0, 0, 0, 4, 3, 2, 1] + var traceId: TraceId! + var spanId: SpanId! + let tracestate = TraceState() + var spanContext: SpanContext! + + + + + var fakeCollector: FakeLogCollector! + var server: EventLoopFuture! + var channel: ClientConnection! + + let channelGroup = MultiThreadedEventLoopGroup(numberOfThreads: 1) + let serverGroup = MultiThreadedEventLoopGroup(numberOfThreads: 1) + + func startServer() -> EventLoopFuture { + // Start the server and print its address once it has started. + let server = Server.insecure(group: serverGroup) + .withServiceProviders([fakeCollector]) + .bind(host: "localhost", port: 4317) - - - - var fakeCollector: FakeLogCollector! - var server: EventLoopFuture! - var channel: ClientConnection! - - let channelGroup = MultiThreadedEventLoopGroup(numberOfThreads: 1) - let serverGroup = MultiThreadedEventLoopGroup(numberOfThreads: 1) - - func startServer() -> EventLoopFuture { - // Start the server and print its address once it has started. - let server = Server.insecure(group: serverGroup) - .withServiceProviders([fakeCollector]) - .bind(host: "localhost", port: 4317) - - server.map { - $0.channel.localAddress - }.whenSuccess { address in - print("server started on port \(address!.port!)") - } - return server - } - - func startChannel() -> ClientConnection { - let channel = ClientConnection.insecure(group: channelGroup) - .connect(host: "localhost", port: 4317) - return channel - } - - override func setUp() { - fakeCollector = FakeLogCollector() - server = startServer() - channel = startChannel() - traceId = TraceId(fromBytes: traceIdBytes) - spanId = SpanId(fromBytes: spanIdBytes) - spanContext = SpanContext.create(traceId: traceId, spanId: spanId, traceFlags: TraceFlags(), traceState: tracestate) - - } - - override func tearDown() { - try! serverGroup.syncShutdownGracefully() - try! channelGroup.syncShutdownGracefully() - } - - func testExport() { - let logRecord = ReadableLogRecord(resource: Resource(), - instrumentationScopeInfo: InstrumentationScopeInfo(name: "scope"), - timestamp: Date(), - observedTimestamp: Date.distantPast, - spanContext: spanContext, - severity: .fatal, - body: "Hello, world", - attributes: ["event.name":AttributeValue.string("name"), "event.domain": AttributeValue.string("domain")]) - - let exporter = OtlpLogExporter(channel: channel) - let result = exporter.export(logRecords: [logRecord], explicitTimeout: nil) - XCTAssertEqual(result, ExportResult.success) - XCTAssertEqual(fakeCollector.receivedLogs, LogRecordAdapter.toProtoResourceRecordLog(logRecordList: [logRecord])) - exporter.shutdown(explicitTimeout: nil) + server.map { + $0.channel.localAddress + }.whenSuccess { address in + print("server started on port \(address!.port!)") } + return server + } + + func startChannel() -> ClientConnection { + let channel = ClientConnection.insecure(group: channelGroup) + .connect(host: "localhost", port: 4317) + return channel + } + + override func setUp() { + fakeCollector = FakeLogCollector() + server = startServer() + channel = startChannel() + traceId = TraceId(fromBytes: traceIdBytes) + spanId = SpanId(fromBytes: spanIdBytes) + spanContext = SpanContext.create(traceId: traceId, spanId: spanId, traceFlags: TraceFlags(), traceState: tracestate) - func testImplicitGrpcLoggingConfig() throws { - let exporter = OtlpLogExporter(channel: channel) - let logger = exporter.callOptions.logger - XCTAssertEqual(logger.label, "io.grpc") - } - func testExplicitGrpcLoggingConfig() throws { - let exporter = OtlpLogExporter(channel: channel, logger: Logger(label: "my.grpc.logger")) - let logger = exporter.callOptions.logger - XCTAssertEqual(logger.label, "my.grpc.logger") - } - - func testConfigHeadersIsNil_whenDefaultInitCalled() throws { - let exporter = OtlpLogExporter(channel: channel) - XCTAssertNil(exporter.config.headers) - } - - func testConfigHeadersAreSet_whenInitCalledWithCustomConfig() throws { - let config: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10), headers: [("FOO", "BAR")]) - let exporter = OtlpLogExporter(channel: channel, config: config) - XCTAssertNotNil(exporter.config.headers) - XCTAssertEqual(exporter.config.headers?[0].0, "FOO") - XCTAssertEqual(exporter.config.headers?[0].1, "BAR") - XCTAssertEqual("BAR", exporter.callOptions.customMetadata.first(name: "FOO")) - } - - func testConfigHeadersAreSet_whenInitCalledWithExplicitHeaders() throws { - let exporter = OtlpLogExporter(channel: channel, envVarHeaders: [("FOO", "BAR")]) - XCTAssertNil(exporter.config.headers) - XCTAssertEqual("BAR", exporter.callOptions.customMetadata.first(name: "FOO")) - } - - func testExportAfterShutdown() { - let logRecord = ReadableLogRecord(resource: Resource(), - instrumentationScopeInfo: InstrumentationScopeInfo(name: "scope"), - timestamp: Date(), - observedTimestamp: Date.distantPast, - spanContext: spanContext, - severity: .fatal, - body: "Hello, world", - attributes: ["event.name":AttributeValue.string("name"), "event.domain": AttributeValue.string("domain")]) - let exporter = OtlpLogExporter(channel: channel) - exporter.shutdown(explicitTimeout: nil) - let result = exporter.export(logRecords: [logRecord], explicitTimeout: nil) - XCTAssertEqual(result, ExportResult.failure) - } - - func testExportCancelled() { - fakeCollector.returnedStatus = GRPCStatus(code: .cancelled, message: nil) - let exporter = OtlpLogExporter(channel: channel) - let logRecord = ReadableLogRecord(resource: Resource(), - instrumentationScopeInfo: InstrumentationScopeInfo(name: "scope"), - timestamp: Date(), - observedTimestamp: Date.distantPast, - spanContext: spanContext, - severity: .fatal, - body: "Hello, world", - attributes: ["event.name":AttributeValue.string("name"), - "event.domain": AttributeValue.string("domain")]) - let result = exporter.export(logRecords: [logRecord], explicitTimeout: nil) - XCTAssertEqual(result, ExportResult.failure) - exporter.shutdown(explicitTimeout: nil) - } + } + + override func tearDown() { + try! serverGroup.syncShutdownGracefully() + try! channelGroup.syncShutdownGracefully() + } + + func testExport() { + let logRecord = ReadableLogRecord(resource: Resource(), + instrumentationScopeInfo: InstrumentationScopeInfo(name: "scope"), + timestamp: Date(), + observedTimestamp: Date.distantPast, + spanContext: spanContext, + severity: .fatal, + body: "Hello, world", + attributes: ["event.name":AttributeValue.string("name"), "event.domain": AttributeValue.string("domain")]) + let exporter = OtlpLogExporter(channel: channel) + let result = exporter.export(logRecords: [logRecord]) + XCTAssertEqual(result, ExportResult.success) + XCTAssertEqual(fakeCollector.receivedLogs, LogRecordAdapter.toProtoResourceRecordLog(logRecordList: [logRecord])) + exporter.shutdown() + } + + func testImplicitGrpcLoggingConfig() throws { + let exporter = OtlpLogExporter(channel: channel) + let logger = exporter.callOptions.logger + XCTAssertEqual(logger.label, "io.grpc") + } + func testExplicitGrpcLoggingConfig() throws { + let exporter = OtlpLogExporter(channel: channel, logger: Logger(label: "my.grpc.logger")) + let logger = exporter.callOptions.logger + XCTAssertEqual(logger.label, "my.grpc.logger") + } + + func testConfigHeadersIsNil_whenDefaultInitCalled() throws { + let exporter = OtlpLogExporter(channel: channel) + XCTAssertNil(exporter.config.headers) + } + + func testConfigHeadersAreSet_whenInitCalledWithCustomConfig() throws { + let config: OtlpConfiguration = OtlpConfiguration(timeout: TimeInterval(10), headers: [("FOO", "BAR")]) + let exporter = OtlpLogExporter(channel: channel, config: config) + XCTAssertNotNil(exporter.config.headers) + XCTAssertEqual(exporter.config.headers?[0].0, "FOO") + XCTAssertEqual(exporter.config.headers?[0].1, "BAR") + XCTAssertEqual("BAR", exporter.callOptions.customMetadata.first(name: "FOO")) + } + + func testConfigHeadersAreSet_whenInitCalledWithExplicitHeaders() throws { + let exporter = OtlpLogExporter(channel: channel, envVarHeaders: [("FOO", "BAR")]) + XCTAssertNil(exporter.config.headers) + XCTAssertEqual("BAR", exporter.callOptions.customMetadata.first(name: "FOO")) + } + + func testExportAfterShutdown() { + let logRecord = ReadableLogRecord(resource: Resource(), + instrumentationScopeInfo: InstrumentationScopeInfo(name: "scope"), + timestamp: Date(), + observedTimestamp: Date.distantPast, + spanContext: spanContext, + severity: .fatal, + body: "Hello, world", + attributes: ["event.name":AttributeValue.string("name"), "event.domain": AttributeValue.string("domain")]) + let exporter = OtlpLogExporter(channel: channel) + exporter.shutdown() + let result = exporter.export(logRecords: [logRecord]) + XCTAssertEqual(result, ExportResult.failure) + } + + func testExportCancelled() { + fakeCollector.returnedStatus = GRPCStatus(code: .cancelled, message: nil) + let exporter = OtlpLogExporter(channel: channel) + let logRecord = ReadableLogRecord(resource: Resource(), + instrumentationScopeInfo: InstrumentationScopeInfo(name: "scope"), + timestamp: Date(), + observedTimestamp: Date.distantPast, + spanContext: spanContext, + severity: .fatal, + body: "Hello, world", + attributes: ["event.name":AttributeValue.string("name"), + "event.domain": AttributeValue.string("domain")]) + let result = exporter.export(logRecords: [logRecord]) + XCTAssertEqual(result, ExportResult.failure) + exporter.shutdown() + } + } class FakeLogCollector: Opentelemetry_Proto_Collector_Logs_V1_LogsServiceProvider { - var interceptors: Opentelemetry_Proto_Collector_Logs_V1_LogsServiceServerInterceptorFactoryProtocol? - - func export(request: Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest, context: GRPC.StatusOnlyCallContext) -> NIOCore.EventLoopFuture { - receivedLogs.append(contentsOf: request.resourceLogs) - if returnedStatus != GRPCStatus.ok { - return context.eventLoop.makeFailedFuture(returnedStatus) - } - return context.eventLoop.makeSucceededFuture(Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceResponse()) + var interceptors: Opentelemetry_Proto_Collector_Logs_V1_LogsServiceServerInterceptorFactoryProtocol? + + func export(request: Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceRequest, context: GRPC.StatusOnlyCallContext) -> NIOCore.EventLoopFuture { + receivedLogs.append(contentsOf: request.resourceLogs) + if returnedStatus != GRPCStatus.ok { + return context.eventLoop.makeFailedFuture(returnedStatus) } - - var receivedLogs = [Opentelemetry_Proto_Logs_V1_ResourceLogs]() - var returnedStatus = GRPCStatus.ok - + return context.eventLoop.makeSucceededFuture(Opentelemetry_Proto_Collector_Logs_V1_ExportLogsServiceResponse()) + } + + var receivedLogs = [Opentelemetry_Proto_Logs_V1_ResourceLogs]() + var returnedStatus = GRPCStatus.ok + } diff --git a/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift b/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift index 5ff40fc5..606f4a47 100644 --- a/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift +++ b/Tests/ExportersTests/PersistenceExporter/Helpers/CoreMocks.swift @@ -9,214 +9,214 @@ import Foundation // MARK: - PerformancePreset Mocks struct StoragePerformanceMock: StoragePerformancePreset { - let maxFileSize: UInt64 - let maxDirectorySize: UInt64 - let maxFileAgeForWrite: TimeInterval - let minFileAgeForRead: TimeInterval - let maxFileAgeForRead: TimeInterval - let maxObjectsInFile: Int - let maxObjectSize: UInt64 - - static let readAllFiles = StoragePerformanceMock( - maxFileSize: .max, - maxDirectorySize: .max, - maxFileAgeForWrite: 0, - minFileAgeForRead: -1, // make all files eligible for read - maxFileAgeForRead: .distantFuture, // make all files eligible for read - maxObjectsInFile: .max, - maxObjectSize: .max - ) - - static let writeEachObjectToNewFileAndReadAllFiles = StoragePerformanceMock( - maxFileSize: .max, - maxDirectorySize: .max, - maxFileAgeForWrite: 0, // always return new file for writting - minFileAgeForRead: readAllFiles.minFileAgeForRead, - maxFileAgeForRead: readAllFiles.maxFileAgeForRead, - maxObjectsInFile: 1, // write each data to new file - maxObjectSize: .max - ) - - static let writeAllObjectsToTheSameFile = StoragePerformanceMock( - maxFileSize: .max, - maxDirectorySize: .max, - maxFileAgeForWrite: .distantFuture, - minFileAgeForRead: -1, // make all files eligible for read - maxFileAgeForRead: .distantFuture, // make all files eligible for read - maxObjectsInFile: .max, - maxObjectSize: .max - ) + let maxFileSize: UInt64 + let maxDirectorySize: UInt64 + let maxFileAgeForWrite: TimeInterval + let minFileAgeForRead: TimeInterval + let maxFileAgeForRead: TimeInterval + let maxObjectsInFile: Int + let maxObjectSize: UInt64 + + static let readAllFiles = StoragePerformanceMock( + maxFileSize: .max, + maxDirectorySize: .max, + maxFileAgeForWrite: 0, + minFileAgeForRead: -1, // make all files eligible for read + maxFileAgeForRead: .distantFuture, // make all files eligible for read + maxObjectsInFile: .max, + maxObjectSize: .max + ) + + static let writeEachObjectToNewFileAndReadAllFiles = StoragePerformanceMock( + maxFileSize: .max, + maxDirectorySize: .max, + maxFileAgeForWrite: 0, // always return new file for writting + minFileAgeForRead: readAllFiles.minFileAgeForRead, + maxFileAgeForRead: readAllFiles.maxFileAgeForRead, + maxObjectsInFile: 1, // write each data to new file + maxObjectSize: .max + ) + + static let writeAllObjectsToTheSameFile = StoragePerformanceMock( + maxFileSize: .max, + maxDirectorySize: .max, + maxFileAgeForWrite: .distantFuture, + minFileAgeForRead: -1, // make all files eligible for read + maxFileAgeForRead: .distantFuture, // make all files eligible for read + maxObjectsInFile: .max, + maxObjectSize: .max + ) } struct ExportPerformanceMock: ExportPerformancePreset { - let initialExportDelay: TimeInterval - let defaultExportDelay: TimeInterval - let minExportDelay: TimeInterval - let maxExportDelay: TimeInterval - let exportDelayChangeRate: Double - - static let veryQuick = ExportPerformanceMock( - initialExportDelay: 0.05, - defaultExportDelay: 0.05, - minExportDelay: 0.05, - maxExportDelay: 0.05, - exportDelayChangeRate: 0 - ) + let initialExportDelay: TimeInterval + let defaultExportDelay: TimeInterval + let minExportDelay: TimeInterval + let maxExportDelay: TimeInterval + let exportDelayChangeRate: Double + + static let veryQuick = ExportPerformanceMock( + initialExportDelay: 0.05, + defaultExportDelay: 0.05, + minExportDelay: 0.05, + maxExportDelay: 0.05, + exportDelayChangeRate: 0 + ) } extension PersistencePerformancePreset { - - static func mockWith(storagePerformance: StoragePerformancePreset, - synchronousWrite: Bool, - exportPerformance: ExportPerformancePreset) -> PersistencePerformancePreset { - return PersistencePerformancePreset(maxFileSize: storagePerformance.maxFileSize, - maxDirectorySize: storagePerformance.maxDirectorySize, - maxFileAgeForWrite: storagePerformance.maxFileAgeForWrite, - minFileAgeForRead: storagePerformance.minFileAgeForRead, - maxFileAgeForRead: storagePerformance.maxFileAgeForRead, - maxObjectsInFile: storagePerformance.maxObjectsInFile, - maxObjectSize: storagePerformance.maxObjectSize, - synchronousWrite: synchronousWrite, - initialExportDelay: exportPerformance.initialExportDelay, - defaultExportDelay: exportPerformance.defaultExportDelay, - minExportDelay: exportPerformance.minExportDelay, - maxExportDelay: exportPerformance.maxExportDelay, - exportDelayChangeRate: exportPerformance.exportDelayChangeRate) - } + + static func mockWith(storagePerformance: StoragePerformancePreset, + synchronousWrite: Bool, + exportPerformance: ExportPerformancePreset) -> PersistencePerformancePreset { + return PersistencePerformancePreset(maxFileSize: storagePerformance.maxFileSize, + maxDirectorySize: storagePerformance.maxDirectorySize, + maxFileAgeForWrite: storagePerformance.maxFileAgeForWrite, + minFileAgeForRead: storagePerformance.minFileAgeForRead, + maxFileAgeForRead: storagePerformance.maxFileAgeForRead, + maxObjectsInFile: storagePerformance.maxObjectsInFile, + maxObjectSize: storagePerformance.maxObjectSize, + synchronousWrite: synchronousWrite, + initialExportDelay: exportPerformance.initialExportDelay, + defaultExportDelay: exportPerformance.defaultExportDelay, + minExportDelay: exportPerformance.minExportDelay, + maxExportDelay: exportPerformance.maxExportDelay, + exportDelayChangeRate: exportPerformance.exportDelayChangeRate) + } } /// `DateProvider` mock returning consecutive dates in custom intervals, starting from given reference date. class RelativeDateProvider: DateProvider { - private(set) var date: Date - internal let timeInterval: TimeInterval - private let queue = DispatchQueue(label: "queue-RelativeDateProvider-\(UUID().uuidString)") - - private init(date: Date, timeInterval: TimeInterval) { - self.date = date - self.timeInterval = timeInterval - } - - convenience init(using date: Date = Date()) { - self.init(date: date, timeInterval: 0) - } - - convenience init(startingFrom referenceDate: Date = Date(), advancingBySeconds timeInterval: TimeInterval = 0) { - self.init(date: referenceDate, timeInterval: timeInterval) + private(set) var date: Date + internal let timeInterval: TimeInterval + private let queue = DispatchQueue(label: "queue-RelativeDateProvider-\(UUID().uuidString)") + + private init(date: Date, timeInterval: TimeInterval) { + self.date = date + self.timeInterval = timeInterval + } + + convenience init(using date: Date = Date()) { + self.init(date: date, timeInterval: 0) + } + + convenience init(startingFrom referenceDate: Date = Date(), advancingBySeconds timeInterval: TimeInterval = 0) { + self.init(date: referenceDate, timeInterval: timeInterval) + } + + /// Returns current date and advances next date by `timeInterval`. + func currentDate() -> Date { + defer { + queue.async { + self.date.addTimeInterval(self.timeInterval) + } } - - /// Returns current date and advances next date by `timeInterval`. - func currentDate() -> Date { - defer { - queue.async { - self.date.addTimeInterval(self.timeInterval) - } - } - return queue.sync { - return date - } + return queue.sync { + return date } - - /// Pushes time forward by given number of seconds. - func advance(bySeconds seconds: TimeInterval) { - queue.async { - self.date = self.date.addingTimeInterval(seconds) - } + } + + /// Pushes time forward by given number of seconds. + func advance(bySeconds seconds: TimeInterval) { + queue.async { + self.date = self.date.addingTimeInterval(seconds) } + } } struct DataExporterMock: DataExporter { - let exportStatus: DataExportStatus - - var onExport: ((Data) -> Void)? = nil - - func export(data: Data, explicitTimeout: TimeInterval?) -> DataExportStatus { - onExport?(data) - return exportStatus - } + let exportStatus: DataExportStatus + + var onExport: ((Data) -> Void)? = nil + + func export(data: Data) -> DataExportStatus { + onExport?(data) + return exportStatus + } } extension DataExportStatus { - static func mockWith(needsRetry: Bool) -> DataExportStatus { - return DataExportStatus(needsRetry: needsRetry) - } + static func mockWith(needsRetry: Bool) -> DataExportStatus { + return DataExportStatus(needsRetry: needsRetry) + } } class FileWriterMock: FileWriter { - var onWrite: ((Bool, Data) -> Void)? = nil - - func write(data: Data) { - onWrite?(false, data) - } - - func writeSync(data: Data) { - onWrite?(true, data) - } - - var onFlush: (() -> Void)? = nil - - func flush() { - onFlush?() - } + var onWrite: ((Bool, Data) -> Void)? = nil + + func write(data: Data) { + onWrite?(false, data) + } + + func writeSync(data: Data) { + onWrite?(true, data) + } + + var onFlush: (() -> Void)? = nil + + func flush() { + onFlush?() + } } class FileReaderMock: FileReader { + + private class ReadableFileMock: ReadableFile { + private var deleted = false + private let data: Data - private class ReadableFileMock: ReadableFile { - private var deleted = false - private let data: Data - - private(set) var name: String - - init(name: String, data: Data) { - self.name = name - self.data = data - } - - func read() throws -> Data { - guard deleted == false else { - throw ErrorMock("read failed because delete was called") - } - return data - } - - func delete() throws { - deleted = true - } - } - - var files: [ReadableFile] = [] + private(set) var name: String - func addFile(name: String, data: Data) { - files.append(ReadableFileMock(name: name, data: data)) + init(name: String, data: Data) { + self.name = name + self.data = data } - func readNextBatch() -> Batch? { - if let file = files.first, - let fileData = try? file.read() { - return Batch(data: fileData, file: file) - } - - return nil + func read() throws -> Data { + guard deleted == false else { + throw ErrorMock("read failed because delete was called") + } + return data } - func onRemainingBatches(process: (Batch) -> ()) -> Bool { - do { - try files.forEach { - let fileData = try $0.read() - process(Batch(data: fileData, file: $0)) - } - - return true - } catch { - return false - } + func delete() throws { + deleted = true + } + } + + var files: [ReadableFile] = [] + + func addFile(name: String, data: Data) { + files.append(ReadableFileMock(name: name, data: data)) + } + + func readNextBatch() -> Batch? { + if let file = files.first, + let fileData = try? file.read() { + return Batch(data: fileData, file: file) } - func markBatchAsRead(_ batch: Batch) { - try? batch.file.delete() - files.removeAll { file -> Bool in - return file.name == batch.file.name - } + return nil + } + + func onRemainingBatches(process: (Batch) -> ()) -> Bool { + do { + try files.forEach { + let fileData = try $0.read() + process(Batch(data: fileData, file: $0)) + } + + return true + } catch { + return false + } + } + + func markBatchAsRead(_ batch: Batch) { + try? batch.file.delete() + files.removeAll { file -> Bool in + return file.name == batch.file.name } + } } diff --git a/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift b/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift index 8985c82d..0af15417 100644 --- a/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift +++ b/Tests/ExportersTests/PersistenceExporter/PersistenceExporterDecoratorTests.swift @@ -7,210 +7,211 @@ import XCTest class PersistenceExporterDecoratorTests: XCTestCase { - + class DecoratedExporterMock: DecoratedExporter { - - typealias SignalType = T - let exporter: ([T]) -> DataExportStatus - init(exporter: @escaping ([T]) -> DataExportStatus) { - self.exporter = exporter - } - - func export(values: [T], explicitTimeout: TimeInterval?) -> DataExportStatus { - return exporter(values) - } + + typealias SignalType = T + let exporter: ([T]) -> DataExportStatus + init(exporter: @escaping ([T]) -> DataExportStatus) { + self.exporter = exporter } - class DataExportWorkerMock: DataExportWorkerProtocol { - var dataExporter: DataExporter? = nil - var onFlush: (() -> Bool)? = nil - - func flush() -> Bool { - return onFlush?() ?? true - } + func export(values: [T]) -> DataExportStatus { + return exporter(values) } + } + + class DataExportWorkerMock: DataExportWorkerProtocol { + + var dataExporter: DataExporter? = nil + var onFlush: (() -> Bool)? = nil - private typealias PersistenceExporter = PersistenceExporterDecorator> - - private func createPersistenceExporter( - fileWriter: FileWriterMock = FileWriterMock(), - worker: inout DataExportWorkerMock, - decoratedExporter: DecoratedExporterMock = DecoratedExporterMock(exporter: { _ in - return DataExportStatus(needsRetry: false) - }), - storagePerformance: StoragePerformancePreset = StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, - synchronousWrite: Bool = true, - exportPerformance: ExportPerformancePreset = ExportPerformanceMock.veryQuick - ) -> PersistenceExporter { - - return PersistenceExporterDecorator>( - decoratedExporter: decoratedExporter, - fileWriter: fileWriter, - workerFactory: { - worker.dataExporter = $0 - return worker - }, - performancePreset: PersistencePerformancePreset.mockWith( - storagePerformance: storagePerformance, - synchronousWrite: synchronousWrite, - exportPerformance: exportPerformance)) + func flush() -> Bool { + return onFlush?() ?? true } + } + + private typealias PersistenceExporter = PersistenceExporterDecorator> + + private func createPersistenceExporter( + fileWriter: FileWriterMock = FileWriterMock(), + worker: inout DataExportWorkerMock, + decoratedExporter: DecoratedExporterMock = DecoratedExporterMock(exporter: { _ in + return DataExportStatus(needsRetry: false) + }), + storagePerformance: StoragePerformancePreset = StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, + synchronousWrite: Bool = true, + exportPerformance: ExportPerformancePreset = ExportPerformanceMock.veryQuick + ) -> PersistenceExporter { - func testWhenSetupWithSynchronousWrite_thenWritesAreSynchronous() throws { - var worker = DataExportWorkerMock() - let fileWriter = FileWriterMock() - - let exporter: PersistenceExporter = createPersistenceExporter( - fileWriter: fileWriter, - worker: &worker) - - fileWriter.onWrite = { writeSync, _ in - XCTAssertTrue(writeSync) - } - - try exporter.export(values: ["value"]) + return PersistenceExporterDecorator>( + decoratedExporter: decoratedExporter, + fileWriter: fileWriter, + workerFactory: { + worker.dataExporter = $0 + return worker + }, + performancePreset: PersistencePerformancePreset.mockWith( + storagePerformance: storagePerformance, + synchronousWrite: synchronousWrite, + exportPerformance: exportPerformance)) + } + + func testWhenSetupWithSynchronousWrite_thenWritesAreSynchronous() throws { + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker) + + fileWriter.onWrite = { writeSync, _ in + XCTAssertTrue(writeSync) } - func testWhenSetupWithAsynchronousWrite_thenWritesAreAsynchronous() throws { - var worker = DataExportWorkerMock() - let fileWriter = FileWriterMock() - - let exporter: PersistenceExporter = createPersistenceExporter( - fileWriter: fileWriter, - worker: &worker, - synchronousWrite: false) - - fileWriter.onWrite = { writeSync, _ in - XCTAssertFalse(writeSync) - } - - try exporter.export(values: ["value"]) + try exporter.export(values: ["value"]) + } + + func testWhenSetupWithAsynchronousWrite_thenWritesAreAsynchronous() throws { + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker, + synchronousWrite: false) + + fileWriter.onWrite = { writeSync, _ in + XCTAssertFalse(writeSync) } - func testWhenValueCannotBeEncoded_itThrowsAnError() { - // When - var worker = DataExportWorkerMock() - - let exporter: PersistenceExporter = createPersistenceExporter( - worker: &worker) - - XCTAssertThrowsError(try exporter.export(values: [FailingCodableMock()])) + try exporter.export(values: ["value"]) + } + + func testWhenValueCannotBeEncoded_itThrowsAnError() { + // When + var worker = DataExportWorkerMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + worker: &worker) + + XCTAssertThrowsError(try exporter.export(values: [FailingCodableMock()])) + } + + func testWhenValueCannotBeDecoded_itReportsNoRetryIsNeeded() { + var worker = DataExportWorkerMock() + + _ = createPersistenceExporter(worker: &worker) as PersistenceExporter + + let result = worker.dataExporter?.export(data: Data()) + + XCTAssertNotNil(result) + XCTAssertFalse(result!.needsRetry) + } + + func testWhenItIsFlushed_thenItFlushesTheWriterAndWorker() { + let writerIsFlushedExpectation = self.expectation(description: "FileWriter was flushed") + let workerIsFlushedExpectation = self.expectation(description: "DataExportWorker was flushed") + + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker) + + fileWriter.onFlush = { + writerIsFlushedExpectation.fulfill() } - func testWhenValueCannotBeDecoded_itReportsNoRetryIsNeeded() { - var worker = DataExportWorkerMock() - - _ = createPersistenceExporter(worker: &worker) as PersistenceExporter - - let result = worker.dataExporter?.export(data: Data(),explicitTimeout: nil) - - XCTAssertNotNil(result) - XCTAssertFalse(result!.needsRetry) + worker.onFlush = { + workerIsFlushedExpectation.fulfill() + return true } - func testWhenItIsFlushed_thenItFlushesTheWriterAndWorker() { - let writerIsFlushedExpectation = self.expectation(description: "FileWriter was flushed") - let workerIsFlushedExpectation = self.expectation(description: "DataExportWorker was flushed") - - var worker = DataExportWorkerMock() - let fileWriter = FileWriterMock() - - let exporter: PersistenceExporter = createPersistenceExporter( - fileWriter: fileWriter, - worker: &worker) - - fileWriter.onFlush = { - writerIsFlushedExpectation.fulfill() - } - - worker.onFlush = { - workerIsFlushedExpectation.fulfill() - return true + exporter.flush() + + waitForExpectations(timeout: 1, handler: nil) + } + + func testWhenObjectsDataIsExportedSeparately_thenObjectsAreExported() throws { + let v1ExportExpectation = self.expectation(description: "V1 exported") + let v2ExportExpectation = self.expectation(description: "V2 exported") + let v3ExportExpectation = self.expectation(description: "V3 exported") + + let decoratedExporter = DecoratedExporterMock(exporter: { values in + values.forEach { value in + switch value { + case "v1": v1ExportExpectation.fulfill() + case "v2": v2ExportExpectation.fulfill() + case "v3": v3ExportExpectation.fulfill() + default: break } - - exporter.flush() - - waitForExpectations(timeout: 1, handler: nil) + } + + return DataExportStatus(needsRetry: false) + }) + + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker, + decoratedExporter: decoratedExporter) + + fileWriter.onWrite = { _, data in + if let dataExporter = worker.dataExporter { + XCTAssertFalse(dataExporter.export(data: data).needsRetry) + } } - - func testWhenObjectsDataIsExportedSeparately_thenObjectsAreExported() throws { - let v1ExportExpectation = self.expectation(description: "V1 exported") - let v2ExportExpectation = self.expectation(description: "V2 exported") - let v3ExportExpectation = self.expectation(description: "V3 exported") - - let decoratedExporter = DecoratedExporterMock(exporter: { values in - values.forEach { value in - switch value { - case "v1": v1ExportExpectation.fulfill() - case "v2": v2ExportExpectation.fulfill() - case "v3": v3ExportExpectation.fulfill() - default: break - } - } - - return DataExportStatus(needsRetry: false) - }) - - var worker = DataExportWorkerMock() - let fileWriter = FileWriterMock() - - let exporter: PersistenceExporter = createPersistenceExporter( - fileWriter: fileWriter, - worker: &worker, - decoratedExporter: decoratedExporter) - - fileWriter.onWrite = { _, data in - if let dataExporter = worker.dataExporter { - XCTAssertFalse(dataExporter.export(data: data, explicitTimeout: nil).needsRetry) - } + + try exporter.export(values: ["v1"]) + try exporter.export(values: ["v2"]) + try exporter.export(values: ["v3"]) + + waitForExpectations(timeout: 1, handler: nil) + } + + func testWhenObjectsDataIsExportedConcatenated_thenObjectsAreExported() throws { + let v1ExportExpectation = self.expectation(description: "V1 exported") + let v2ExportExpectation = self.expectation(description: "V2 exported") + let v3ExportExpectation = self.expectation(description: "V3 exported") + + let decoratedExporter = DecoratedExporterMock(exporter: { values in + values.forEach { value in + switch value { + case "v1": v1ExportExpectation.fulfill() + case "v2": v2ExportExpectation.fulfill() + case "v3": v3ExportExpectation.fulfill() + default: break } - - try exporter.export(values: ["v1"]) - try exporter.export(values: ["v2"]) - try exporter.export(values: ["v3"]) - - waitForExpectations(timeout: 1, handler: nil) + } + + return DataExportStatus(needsRetry: false) + }) + + var worker = DataExportWorkerMock() + let fileWriter = FileWriterMock() + + let exporter: PersistenceExporter = createPersistenceExporter( + fileWriter: fileWriter, + worker: &worker, + decoratedExporter: decoratedExporter) + + var writtenData = Data() + fileWriter.onWrite = { _, data in + writtenData.append(data) } - func testWhenObjectsDataIsExportedConcatenated_thenObjectsAreExported() throws { - let v1ExportExpectation = self.expectation(description: "V1 exported") - let v2ExportExpectation = self.expectation(description: "V2 exported") - let v3ExportExpectation = self.expectation(description: "V3 exported") - - let decoratedExporter = DecoratedExporterMock(exporter: { values in - values.forEach { value in - switch value { - case "v1": v1ExportExpectation.fulfill() - case "v2": v2ExportExpectation.fulfill() - case "v3": v3ExportExpectation.fulfill() - default: break - } - } - - return DataExportStatus(needsRetry: false) - }) - - var worker = DataExportWorkerMock() - let fileWriter = FileWriterMock() - - let exporter: PersistenceExporter = createPersistenceExporter( - fileWriter: fileWriter, - worker: &worker, - decoratedExporter: decoratedExporter) - - var writtenData = Data() - fileWriter.onWrite = { _, data in - writtenData.append(data) - } - - try exporter.export(values: ["v1"]) - try exporter.export(values: ["v2"]) - try exporter.export(values: ["v3"]) - - if let dataExporter = worker.dataExporter { - XCTAssertFalse(dataExporter.export(data: writtenData, explicitTimeout: nil).needsRetry) - } - - waitForExpectations(timeout: 1, handler: nil) - } + try exporter.export(values: ["v1"]) + try exporter.export(values: ["v2"]) + try exporter.export(values: ["v3"]) + + if let dataExporter = worker.dataExporter { + XCTAssertFalse(dataExporter.export(data: writtenData).needsRetry) + } + + waitForExpectations(timeout: 1, handler: nil) + } } diff --git a/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift b/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift index 2a28c527..6f732f3d 100644 --- a/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift +++ b/Tests/ExportersTests/PersistenceExporter/PersistenceSpanExporterDecoratorTests.swift @@ -9,92 +9,92 @@ import OpenTelemetrySdk import XCTest class PersistenceSpanExporterDecoratorTests: XCTestCase { - private let temporaryDirectory = obtainUniqueTemporaryDirectory() - - class SpanExporterMock: SpanExporter { - let onExport: ([SpanData], TimeInterval?) -> SpanExporterResultCode - let onFlush: (TimeInterval?) -> SpanExporterResultCode - let onShutdown: (TimeInterval?) -> Void - - init(onExport: @escaping ([SpanData], TimeInterval?) -> SpanExporterResultCode, - onFlush: @escaping (TimeInterval?) -> SpanExporterResultCode = {_ in .success }, - onShutdown: @escaping (TimeInterval?) -> Void = {_ in}) - { - self.onExport = onExport - self.onFlush = onFlush - self.onShutdown = onShutdown - } - - @discardableResult func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - return onExport(spans, explicitTimeout) - } - - func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - return onFlush(explicitTimeout) - } - - func shutdown(explicitTimeout: TimeInterval?) { - onShutdown(explicitTimeout) - } - } + private let temporaryDirectory = obtainUniqueTemporaryDirectory() + + class SpanExporterMock: SpanExporter { + let onExport: ([SpanData], TimeInterval?) -> SpanExporterResultCode + let onFlush: (TimeInterval?) -> SpanExporterResultCode + let onShutdown: (TimeInterval?) -> Void - override func setUp() { - super.setUp() - temporaryDirectory.create() + init(onExport: @escaping ([SpanData], TimeInterval?) -> SpanExporterResultCode, + onFlush: @escaping (TimeInterval?) -> SpanExporterResultCode = {_ in .success }, + onShutdown: @escaping (TimeInterval?) -> Void = {_ in}) + { + self.onExport = onExport + self.onFlush = onFlush + self.onShutdown = onShutdown } - - override func tearDown() { - temporaryDirectory.delete() - super.tearDown() + + @discardableResult func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + return onExport(spans, explicitTimeout) } - func testWhenExportMetricIsCalled_thenSpansAreExported() throws { - let spansExportExpectation = self.expectation(description: "spans exported") - let exporterShutdownExpectation = self.expectation(description: "exporter shut down") - - let mockSpanExporter = SpanExporterMock(onExport: { spans, _ in - spans.forEach { span in - if span.name == "SimpleSpan", - span.events.count == 1, - span.events.first!.name == "My event" - { - spansExportExpectation.fulfill() - } - } - - return .success - }, onShutdown: { _ in - exporterShutdownExpectation.fulfill() - }) - - let persistenceSpanExporter = - try PersistenceSpanExporterDecorator( - spanExporter: mockSpanExporter, - storageURL: temporaryDirectory.url, - exportCondition: { true }, - performancePreset: PersistencePerformancePreset.mockWith( - storagePerformance: StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, - synchronousWrite: true, - exportPerformance: ExportPerformanceMock.veryQuick)) - - let instrumentationScopeName = "SimpleExporter" - let instrumentationScopeVersion = "semver:0.1.0" - let tracerProviderSDK = TracerProviderSdk() - OpenTelemetry.registerTracerProvider(tracerProvider: tracerProviderSDK) - let tracer = tracerProviderSDK.get(instrumentationName: instrumentationScopeName, instrumentationVersion: instrumentationScopeVersion) as! TracerSdk - - let spanProcessor = SimpleSpanProcessor(spanExporter: persistenceSpanExporter) - tracerProviderSDK.addSpanProcessor(spanProcessor) - - simpleSpan(tracer: tracer) - spanProcessor.shutdown() - - waitForExpectations(timeout: 10, handler: nil) + func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + return onFlush(explicitTimeout) } - private func simpleSpan(tracer: TracerSdk) { - let span = tracer.spanBuilder(spanName: "SimpleSpan").setSpanKind(spanKind: .client).startSpan() - span.addEvent(name: "My event", timestamp: Date()) - span.end() + func shutdown(explicitTimeout: TimeInterval?) { + onShutdown(explicitTimeout) } + } + + override func setUp() { + super.setUp() + temporaryDirectory.create() + } + + override func tearDown() { + temporaryDirectory.delete() + super.tearDown() + } + + func testWhenExportMetricIsCalled_thenSpansAreExported() throws { + let spansExportExpectation = self.expectation(description: "spans exported") + let exporterShutdownExpectation = self.expectation(description: "exporter shut down") + + let mockSpanExporter = SpanExporterMock(onExport: { spans, _ in + spans.forEach { span in + if span.name == "SimpleSpan", + span.events.count == 1, + span.events.first!.name == "My event" + { + spansExportExpectation.fulfill() + } + } + + return .success + }, onShutdown: { _ in + exporterShutdownExpectation.fulfill() + }) + + let persistenceSpanExporter = + try PersistenceSpanExporterDecorator( + spanExporter: mockSpanExporter, + storageURL: temporaryDirectory.url, + exportCondition: { true }, + performancePreset: PersistencePerformancePreset.mockWith( + storagePerformance: StoragePerformanceMock.writeEachObjectToNewFileAndReadAllFiles, + synchronousWrite: true, + exportPerformance: ExportPerformanceMock.veryQuick)) + + let instrumentationScopeName = "SimpleExporter" + let instrumentationScopeVersion = "semver:0.1.0" + let tracerProviderSDK = TracerProviderSdk() + OpenTelemetry.registerTracerProvider(tracerProvider: tracerProviderSDK) + let tracer = tracerProviderSDK.get(instrumentationName: instrumentationScopeName, instrumentationVersion: instrumentationScopeVersion) as! TracerSdk + + let spanProcessor = SimpleSpanProcessor(spanExporter: persistenceSpanExporter) + tracerProviderSDK.addSpanProcessor(spanProcessor) + + simpleSpan(tracer: tracer) + spanProcessor.shutdown() + + waitForExpectations(timeout: 10, handler: nil) + } + + private func simpleSpan(tracer: TracerSdk) { + let span = tracer.spanBuilder(spanName: "SimpleSpan").setSpanKind(spanKind: .client).startSpan() + span.addEvent(name: "My event", timestamp: Date()) + span.end() + } } diff --git a/Tests/OpenTelemetrySdkTests/Logs/BatchLogRecordProcessorTests.swift b/Tests/OpenTelemetrySdkTests/Logs/BatchLogRecordProcessorTests.swift index f8214574..03c10b50 100644 --- a/Tests/OpenTelemetrySdkTests/Logs/BatchLogRecordProcessorTests.swift +++ b/Tests/OpenTelemetrySdkTests/Logs/BatchLogRecordProcessorTests.swift @@ -35,7 +35,7 @@ class BatchLogRecordProcessorTests : XCTestCase { logger.logRecordBuilder().emit() logger.logRecordBuilder().emit() let exported = waitingExporter.waitForExport() - waitingExporter.shutdown(explicitTimeout: nil) + waitingExporter.shutdown() XCTAssertEqual(exported?.count, 6) XCTAssertGreaterThanOrEqual(waitingExporter.exporter.exportCalledTimes, 3) } diff --git a/Tests/OpenTelemetrySdkTests/Logs/Exporters/InMemoryLogExporterTests.swift b/Tests/OpenTelemetrySdkTests/Logs/Exporters/InMemoryLogExporterTests.swift index 096904d0..6bad7d7e 100644 --- a/Tests/OpenTelemetrySdkTests/Logs/Exporters/InMemoryLogExporterTests.swift +++ b/Tests/OpenTelemetrySdkTests/Logs/Exporters/InMemoryLogExporterTests.swift @@ -10,17 +10,17 @@ import XCTest class InMemoryLogExporterTests : XCTestCase { - let exporter = InMemoryLogRecordExporter() + let exporter = InMemoryLogRecordExporter() + + func testInMemoryExporter() { + XCTAssertEqual(exporter.export(logRecords: [ReadableLogRecord(resource: Resource(), instrumentationScopeInfo: InstrumentationScopeInfo(name:"default"), timestamp: Date(), attributes: [String:AttributeValue]())]), .success) + XCTAssertEqual(exporter.getFinishedLogRecords().count, 1) + XCTAssertEqual(exporter.forceFlush(), .success) - func testInMemoryExporter() { - XCTAssertEqual(exporter.export(logRecords: [ReadableLogRecord(resource: Resource(), instrumentationScopeInfo: InstrumentationScopeInfo(name:"default"), timestamp: Date(), attributes: [String:AttributeValue]())]), .success) - XCTAssertEqual(exporter.getFinishedLogRecords().count, 1) - XCTAssertEqual(exporter.forceFlush(), .success) - - exporter.shutdown() - - XCTAssertEqual(exporter.getFinishedLogRecords().count, 0) - XCTAssertEqual(exporter.export(logRecords: [ReadableLogRecord(resource: Resource(), instrumentationScopeInfo: InstrumentationScopeInfo(name:"default"), timestamp: Date(), attributes:[String:AttributeValue]())]), .failure) - XCTAssertEqual(exporter.forceFlush(), .failure) - } + exporter.shutdown() + + XCTAssertEqual(exporter.getFinishedLogRecords().count, 0) + XCTAssertEqual(exporter.export(logRecords: [ReadableLogRecord(resource: Resource(), instrumentationScopeInfo: InstrumentationScopeInfo(name:"default"), timestamp: Date(), attributes:[String:AttributeValue]())]), .failure) + XCTAssertEqual(exporter.forceFlush(), .failure) + } } diff --git a/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordExporterMock.swift b/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordExporterMock.swift index 11ec7c01..bb66cdaf 100644 --- a/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordExporterMock.swift +++ b/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordExporterMock.swift @@ -7,28 +7,28 @@ import Foundation import OpenTelemetrySdk class LogRecordExporterMock : LogRecordExporter { - var exportCalledTimes : Int = 0 - var exportCalledData : [ReadableLogRecord]? - - var shutdownCalledTimes : Int = 0 - - var forceFlushCalledTimes : Int = 0 - var returnValue: ExportResult = .success - - func export(logRecords: [OpenTelemetrySdk.ReadableLogRecord], explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { - exportCalledTimes += 1 - exportCalledData = logRecords - return returnValue - } - + var exportCalledTimes : Int = 0 + var exportCalledData : [ReadableLogRecord]? + + var shutdownCalledTimes : Int = 0 + + var forceFlushCalledTimes : Int = 0 + var returnValue: ExportResult = .success + + func export(logRecords: [OpenTelemetrySdk.ReadableLogRecord], explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { + exportCalledTimes += 1 + exportCalledData = logRecords + return returnValue + } + func shutdown(explicitTimeout: TimeInterval?) { - shutdownCalledTimes += 1 - } - - func forceFlush(explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { - forceFlushCalledTimes += 1 - return returnValue - } - - + shutdownCalledTimes += 1 + } + + func forceFlush(explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { + forceFlushCalledTimes += 1 + return returnValue + } + + } diff --git a/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordProcessorMock.swift b/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordProcessorMock.swift index 3119af14..c325836b 100644 --- a/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordProcessorMock.swift +++ b/Tests/OpenTelemetrySdkTests/Logs/Mocks/LogRecordProcessorMock.swift @@ -7,29 +7,29 @@ import Foundation import OpenTelemetrySdk class LogRecordProcessorMock : LogRecordProcessor { - var onEmitCalledTimes = 0 - lazy var onEmitCalled : Bool = { self.onEmitCalledTimes > 0 }() - var onEmitCalledLogRecord: ReadableLogRecord? - - var forceFlushCalledTimes = 0 - lazy var forceFlushCalled : Bool = {self.forceFlushCalledTimes > 0}() - - var shutdownCalledTimes = 0 - lazy var shutdownCalled : Bool = {self.shutdownCalledTimes > 0}() - - - func onEmit(logRecord: OpenTelemetrySdk.ReadableLogRecord) { - onEmitCalledTimes += 1 - onEmitCalledLogRecord = logRecord - } - - func forceFlush(explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { - forceFlushCalledTimes += 1 - return .success - } - - func shutdown(explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { - shutdownCalledTimes += 1 - return .success - } + var onEmitCalledTimes = 0 + lazy var onEmitCalled : Bool = { self.onEmitCalledTimes > 0 }() + var onEmitCalledLogRecord: ReadableLogRecord? + + var forceFlushCalledTimes = 0 + lazy var forceFlushCalled : Bool = {self.forceFlushCalledTimes > 0}() + + var shutdownCalledTimes = 0 + lazy var shutdownCalled : Bool = {self.shutdownCalledTimes > 0}() + + + func onEmit(logRecord: OpenTelemetrySdk.ReadableLogRecord) { + onEmitCalledTimes += 1 + onEmitCalledLogRecord = logRecord + } + + func forceFlush(explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { + forceFlushCalledTimes += 1 + return .success + } + + func shutdown(explicitTimeout: TimeInterval?) -> OpenTelemetrySdk.ExportResult { + shutdownCalledTimes += 1 + return .success + } } diff --git a/Tests/OpenTelemetrySdkTests/Logs/MultiLogRecordProcessorTests.swift b/Tests/OpenTelemetrySdkTests/Logs/MultiLogRecordProcessorTests.swift index f52d09da..008008db 100644 --- a/Tests/OpenTelemetrySdkTests/Logs/MultiLogRecordProcessorTests.swift +++ b/Tests/OpenTelemetrySdkTests/Logs/MultiLogRecordProcessorTests.swift @@ -1,7 +1,7 @@ // // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 -// +// import Foundation import OpenTelemetryApi @@ -9,34 +9,34 @@ import OpenTelemetryApi import XCTest class MultiLogRecordProcessorTest : XCTestCase { - var processor1 = LogRecordProcessorMock() - var processor2 = LogRecordProcessorMock() - var readableLogRecord = ReadableLogRecord(resource: Resource(), instrumentationScopeInfo: InstrumentationScopeInfo(name: "Test"), timestamp: Date(), attributes: [String : AttributeValue]()) - - func testEmpty() { - let multiLog = MultiLogRecordProcessor(logRecordProcessors: [LogRecordProcessor]()) - multiLog.onEmit(logRecord: readableLogRecord) - _ = multiLog.shutdown(explicitTimeout: nil) - } - - func testMultiProcessor() { - let multiLog = MultiLogRecordProcessor(logRecordProcessors: [processor1, processor2]) - multiLog.onEmit(logRecord: readableLogRecord) - - XCTAssertTrue(processor1.onEmitCalled) - XCTAssertTrue(processor2.onEmitCalled) - - _ = multiLog.forceFlush(explicitTimeout: nil) - - XCTAssertTrue(processor1.forceFlushCalled) - XCTAssertTrue(processor2.forceFlushCalled) - - _ = multiLog.shutdown(explicitTimeout: nil) - - XCTAssertTrue(processor1.shutdownCalled) - XCTAssertTrue(processor2.shutdownCalled) - - - - } + var processor1 = LogRecordProcessorMock() + var processor2 = LogRecordProcessorMock() + var readableLogRecord = ReadableLogRecord(resource: Resource(), instrumentationScopeInfo: InstrumentationScopeInfo(name: "Test"), timestamp: Date(), attributes: [String : AttributeValue]()) + + func testEmpty() { + let multiLog = MultiLogRecordProcessor(logRecordProcessors: [LogRecordProcessor]()) + multiLog.onEmit(logRecord: readableLogRecord) + _ = multiLog.shutdown(explicitTimeout: nil) + } + + func testMultiProcessor() { + let multiLog = MultiLogRecordProcessor(logRecordProcessors: [processor1, processor2]) + multiLog.onEmit(logRecord: readableLogRecord) + + XCTAssertTrue(processor1.onEmitCalled) + XCTAssertTrue(processor2.onEmitCalled) + + _ = multiLog.forceFlush(explicitTimeout: nil) + + XCTAssertTrue(processor1.forceFlushCalled) + XCTAssertTrue(processor2.forceFlushCalled) + + _ = multiLog.shutdown(explicitTimeout: nil) + + XCTAssertTrue(processor1.shutdownCalled) + XCTAssertTrue(processor2.shutdownCalled) + + + + } } diff --git a/Tests/OpenTelemetrySdkTests/Trace/Export/BatchSpansProcessorTests.swift b/Tests/OpenTelemetrySdkTests/Trace/Export/BatchSpansProcessorTests.swift index 7f63ce05..fbbdb010 100644 --- a/Tests/OpenTelemetrySdkTests/Trace/Export/BatchSpansProcessorTests.swift +++ b/Tests/OpenTelemetrySdkTests/Trace/Export/BatchSpansProcessorTests.swift @@ -8,274 +8,274 @@ import OpenTelemetryApi import XCTest class BatchSpansProcessorTests: XCTestCase { - let spanName1 = "MySpanName/1" - let spanName2 = "MySpanName/2" - let maxScheduleDelay = 0.5 - let tracerSdkFactory = TracerProviderSdk() - var tracer: Tracer! - let blockingSpanExporter = BlockingSpanExporter() - var mockServiceHandler = SpanExporterMock() - - override func setUp() { - tracer = tracerSdkFactory.get(instrumentationName: "BatchSpansProcessorTest") + let spanName1 = "MySpanName/1" + let spanName2 = "MySpanName/2" + let maxScheduleDelay = 0.5 + let tracerSdkFactory = TracerProviderSdk() + var tracer: Tracer! + let blockingSpanExporter = BlockingSpanExporter() + var mockServiceHandler = SpanExporterMock() + + override func setUp() { + tracer = tracerSdkFactory.get(instrumentationName: "BatchSpansProcessorTest") + } + + override func tearDown() { + tracerSdkFactory.shutdown() + } + + @discardableResult private func createSampledEndedSpan(spanName: String) -> ReadableSpan { + let span = TestUtils.createSpanWithSampler(tracerSdkFactory: tracerSdkFactory, + tracer: tracer, + spanName: spanName, + sampler: Samplers.alwaysOn) + .startSpan() as! ReadableSpan + span.end() + return span + } + + private func createNotSampledEndedSpan(spanName: String) { + TestUtils.createSpanWithSampler(tracerSdkFactory: tracerSdkFactory, + tracer: tracer, + spanName: spanName, + sampler: Samplers.alwaysOff) + .startSpan() + .end() + } + + func testStartEndRequirements() { + let spansProcessor = BatchSpanProcessor(spanExporter: WaitingSpanExporter(numberToWaitFor: 0)) + XCTAssertFalse(spansProcessor.isStartRequired) + XCTAssertTrue(spansProcessor.isEndRequired) + } + + func testExportDifferentSampledSpans() { + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 2) + + tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: maxScheduleDelay)) + let span1 = createSampledEndedSpan(spanName: spanName1) + let span2 = createSampledEndedSpan(spanName: spanName2) + let exported = waitingSpanExporter.waitForExport() + + XCTAssertEqual(exported, [span1.toSpanData(), span2.toSpanData()]) + } + + func testExportMoreSpansThanTheBufferSize() { + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 6) + + tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: maxScheduleDelay, maxQueueSize: 6, maxExportBatchSize: 2)) + + let span1 = createSampledEndedSpan(spanName: spanName1) + let span2 = createSampledEndedSpan(spanName: spanName1) + let span3 = createSampledEndedSpan(spanName: spanName1) + let span4 = createSampledEndedSpan(spanName: spanName1) + let span5 = createSampledEndedSpan(spanName: spanName1) + let span6 = createSampledEndedSpan(spanName: spanName1) + let exported = waitingSpanExporter.waitForExport() + XCTAssertEqual(exported, [span1.toSpanData(), + span2.toSpanData(), + span3.toSpanData(), + span4.toSpanData(), + span5.toSpanData(), + span6.toSpanData()]) + } + + func testForceExport() { + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 1) + let batchSpansProcessor = BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: 10, maxQueueSize: 10000, maxExportBatchSize: 2000) + tracerSdkFactory.addSpanProcessor(batchSpansProcessor) + + for _ in 0 ..< 100 { + createSampledEndedSpan(spanName: "notExported") } - - override func tearDown() { - tracerSdkFactory.shutdown() + batchSpansProcessor.forceFlush() + let exported = waitingSpanExporter.waitForExport() + XCTAssertEqual(exported?.count, 100) + } + + func testExportSpansToMultipleServices() { + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 2) + let waitingSpanExporter2 = WaitingSpanExporter(numberToWaitFor: 2) + tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: MultiSpanExporter(spanExporters: [waitingSpanExporter, waitingSpanExporter2]), scheduleDelay: maxScheduleDelay)) + + let span1 = createSampledEndedSpan(spanName: spanName1) + let span2 = createSampledEndedSpan(spanName: spanName2) + let exported1 = waitingSpanExporter.waitForExport() + let exported2 = waitingSpanExporter2.waitForExport() + XCTAssertEqual(exported1, [span1.toSpanData(), span2.toSpanData()]) + XCTAssertEqual(exported2, [span1.toSpanData(), span2.toSpanData()]) + } + + func testExportMoreSpansThanTheMaximumLimit() { + let maxQueuedSpans = 8 + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: maxQueuedSpans) + + tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: MultiSpanExporter(spanExporters: [waitingSpanExporter, blockingSpanExporter]), scheduleDelay: maxScheduleDelay, maxQueueSize: maxQueuedSpans, maxExportBatchSize: maxQueuedSpans / 2)) + + var spansToExport = [SpanData]() + // Wait to block the worker thread in the BatchSampledSpansProcessor. This ensures that no items + // can be removed from the queue. Need to add a span to trigger the export otherwise the + // pipeline is never called. + spansToExport.append(createSampledEndedSpan(spanName: "blocking_span").toSpanData()) + blockingSpanExporter.waitUntilIsBlocked() + + for i in 0 ..< maxQueuedSpans { + // First export maxQueuedSpans, the worker thread is blocked so all items should be queued. + spansToExport.append(createSampledEndedSpan(spanName: "span_1_\(i)").toSpanData()) } - - @discardableResult private func createSampledEndedSpan(spanName: String) -> ReadableSpan { - let span = TestUtils.createSpanWithSampler(tracerSdkFactory: tracerSdkFactory, - tracer: tracer, - spanName: spanName, - sampler: Samplers.alwaysOn) - .startSpan() as! ReadableSpan - span.end() - return span - } - - private func createNotSampledEndedSpan(spanName: String) { - TestUtils.createSpanWithSampler(tracerSdkFactory: tracerSdkFactory, - tracer: tracer, - spanName: spanName, - sampler: Samplers.alwaysOff) - .startSpan() - .end() + + // TODO: assertThat(spanExporter.getReferencedSpans()).isEqualTo(maxQueuedSpans); + + // Now we should start dropping. + for i in 0 ..< 7 { + createSampledEndedSpan(spanName: "span_2_\(i)") + // TODO: assertThat(getDroppedSpans()).isEqualTo(i + 1); } - - func testStartEndRequirements() { - let spansProcessor = BatchSpanProcessor(spanExporter: WaitingSpanExporter(numberToWaitFor: 0)) - XCTAssertFalse(spansProcessor.isStartRequired) - XCTAssertTrue(spansProcessor.isEndRequired) - } - - func testExportDifferentSampledSpans() { - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 2) - - tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: maxScheduleDelay)) - let span1 = createSampledEndedSpan(spanName: spanName1) - let span2 = createSampledEndedSpan(spanName: spanName2) - let exported = waitingSpanExporter.waitForExport() - - XCTAssertEqual(exported, [span1.toSpanData(), span2.toSpanData()]) - } - - func testExportMoreSpansThanTheBufferSize() { - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 6) - - tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: maxScheduleDelay, maxQueueSize: 6, maxExportBatchSize: 2)) - - let span1 = createSampledEndedSpan(spanName: spanName1) - let span2 = createSampledEndedSpan(spanName: spanName1) - let span3 = createSampledEndedSpan(spanName: spanName1) - let span4 = createSampledEndedSpan(spanName: spanName1) - let span5 = createSampledEndedSpan(spanName: spanName1) - let span6 = createSampledEndedSpan(spanName: spanName1) - let exported = waitingSpanExporter.waitForExport() - XCTAssertEqual(exported, [span1.toSpanData(), - span2.toSpanData(), - span3.toSpanData(), - span4.toSpanData(), - span5.toSpanData(), - span6.toSpanData()]) - } - - func testForceExport() { - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 1) - let batchSpansProcessor = BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: 10, maxQueueSize: 10000, maxExportBatchSize: 2000) - tracerSdkFactory.addSpanProcessor(batchSpansProcessor) - - for _ in 0 ..< 100 { - createSampledEndedSpan(spanName: "notExported") - } - batchSpansProcessor.forceFlush() - let exported = waitingSpanExporter.waitForExport() - XCTAssertEqual(exported?.count, 100) - } - - func testExportSpansToMultipleServices() { - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 2) - let waitingSpanExporter2 = WaitingSpanExporter(numberToWaitFor: 2) - tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: MultiSpanExporter(spanExporters: [waitingSpanExporter, waitingSpanExporter2]), scheduleDelay: maxScheduleDelay)) - - let span1 = createSampledEndedSpan(spanName: spanName1) - let span2 = createSampledEndedSpan(spanName: spanName2) - let exported1 = waitingSpanExporter.waitForExport() - let exported2 = waitingSpanExporter2.waitForExport() - XCTAssertEqual(exported1, [span1.toSpanData(), span2.toSpanData()]) - XCTAssertEqual(exported2, [span1.toSpanData(), span2.toSpanData()]) - } - - func testExportMoreSpansThanTheMaximumLimit() { - let maxQueuedSpans = 8 - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: maxQueuedSpans) - - tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: MultiSpanExporter(spanExporters: [waitingSpanExporter, blockingSpanExporter]), scheduleDelay: maxScheduleDelay, maxQueueSize: maxQueuedSpans, maxExportBatchSize: maxQueuedSpans / 2)) - - var spansToExport = [SpanData]() - // Wait to block the worker thread in the BatchSampledSpansProcessor. This ensures that no items - // can be removed from the queue. Need to add a span to trigger the export otherwise the - // pipeline is never called. - spansToExport.append(createSampledEndedSpan(spanName: "blocking_span").toSpanData()) - blockingSpanExporter.waitUntilIsBlocked() - - for i in 0 ..< maxQueuedSpans { - // First export maxQueuedSpans, the worker thread is blocked so all items should be queued. - spansToExport.append(createSampledEndedSpan(spanName: "span_1_\(i)").toSpanData()) - } - - // TODO: assertThat(spanExporter.getReferencedSpans()).isEqualTo(maxQueuedSpans); - - // Now we should start dropping. - for i in 0 ..< 7 { - createSampledEndedSpan(spanName: "span_2_\(i)") - // TODO: assertThat(getDroppedSpans()).isEqualTo(i + 1); - } - - // TODO: assertThat(getReferencedSpans()).isEqualTo(maxQueuedSpans); - - // Release the blocking exporter - blockingSpanExporter.unblock() - - // While we wait for maxQueuedSpans we ensure that the queue is also empty after this. - var exported = waitingSpanExporter.waitForExport() - XCTAssertEqual(exported, spansToExport) - - exported?.removeAll() - spansToExport.removeAll() - - // We cannot compare with maxReferencedSpans here because the worker thread may get - // unscheduled immediately after exporting, but before updating the pushed spans, if that is - // the case at most bufferSize spans will miss. - // TODO: assertThat(getPushedSpans()).isAtLeast((long) maxQueuedSpans - maxBatchSize); - - for i in 0 ..< maxQueuedSpans { - spansToExport.append(createSampledEndedSpan(spanName: "span_3_\(i)").toSpanData()) - // No more dropped spans. - // TODO: assertThat(getDroppedSpans()).isEqualTo(7); - } - - exported = waitingSpanExporter.waitForExport() - XCTAssertEqual(exported, spansToExport) - } - - func testExportNotSampledSpans() { - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 1) - - tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: maxScheduleDelay)) - - createNotSampledEndedSpan(spanName: spanName1) - createNotSampledEndedSpan(spanName: spanName2) - let span2 = createSampledEndedSpan(spanName: spanName2) - // Spans are recorded and exported in the same order as they are ended, we test that a non - // sampled span is not exported by creating and ending a sampled span after a non sampled span - // and checking that the first exported span is the sampled span (the non sampled did not get - // exported). - let exported = waitingSpanExporter.waitForExport() - // Need to check this because otherwise the variable span1 is unused, other option is to not - // have a span1 variable. - XCTAssertEqual(exported, [span2.toSpanData()]) - } - - func testShutdownFlushes() { - let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 1) - - // Set the export delay to zero, for no timeout, in order to confirm the #flush() below works - tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: 0)) - - let span2 = createSampledEndedSpan(spanName: spanName2) - - // Force a shutdown, without this, the waitForExport() call below would block indefinitely. - tracerSdkFactory.shutdown() - - let exported = waitingSpanExporter.waitForExport() - XCTAssertEqual(exported, [span2.toSpanData()]) - XCTAssertTrue(waitingSpanExporter.shutdownCalled) + + // TODO: assertThat(getReferencedSpans()).isEqualTo(maxQueuedSpans); + + // Release the blocking exporter + blockingSpanExporter.unblock() + + // While we wait for maxQueuedSpans we ensure that the queue is also empty after this. + var exported = waitingSpanExporter.waitForExport() + XCTAssertEqual(exported, spansToExport) + + exported?.removeAll() + spansToExport.removeAll() + + // We cannot compare with maxReferencedSpans here because the worker thread may get + // unscheduled immediately after exporting, but before updating the pushed spans, if that is + // the case at most bufferSize spans will miss. + // TODO: assertThat(getPushedSpans()).isAtLeast((long) maxQueuedSpans - maxBatchSize); + + for i in 0 ..< maxQueuedSpans { + spansToExport.append(createSampledEndedSpan(spanName: "span_3_\(i)").toSpanData()) + // No more dropped spans. + // TODO: assertThat(getDroppedSpans()).isEqualTo(7); } + + exported = waitingSpanExporter.waitForExport() + XCTAssertEqual(exported, spansToExport) + } + + func testExportNotSampledSpans() { + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 1) + + tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: maxScheduleDelay)) + + createNotSampledEndedSpan(spanName: spanName1) + createNotSampledEndedSpan(spanName: spanName2) + let span2 = createSampledEndedSpan(spanName: spanName2) + // Spans are recorded and exported in the same order as they are ended, we test that a non + // sampled span is not exported by creating and ending a sampled span after a non sampled span + // and checking that the first exported span is the sampled span (the non sampled did not get + // exported). + let exported = waitingSpanExporter.waitForExport() + // Need to check this because otherwise the variable span1 is unused, other option is to not + // have a span1 variable. + XCTAssertEqual(exported, [span2.toSpanData()]) + } + + func testShutdownFlushes() { + let waitingSpanExporter = WaitingSpanExporter(numberToWaitFor: 1) + + // Set the export delay to zero, for no timeout, in order to confirm the #flush() below works + tracerSdkFactory.addSpanProcessor(BatchSpanProcessor(spanExporter: waitingSpanExporter, scheduleDelay: 0)) + + let span2 = createSampledEndedSpan(spanName: spanName2) + + // Force a shutdown, without this, the waitForExport() call below would block indefinitely. + tracerSdkFactory.shutdown() + + let exported = waitingSpanExporter.waitForExport() + XCTAssertEqual(exported, [span2.toSpanData()]) + XCTAssertTrue(waitingSpanExporter.shutdownCalled) + } } class BlockingSpanExporter: SpanExporter { - let cond = NSCondition() - - enum State { - case waitToBlock - case blocked - case unblocked - } - - var state: State = .waitToBlock - + let cond = NSCondition() + + enum State { + case waitToBlock + case blocked + case unblocked + } + + var state: State = .waitToBlock + func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - cond.lock() - while state != .unblocked { - state = .blocked - // Some threads may wait for Blocked State. - cond.broadcast() - cond.wait() - } - cond.unlock() - return .success + cond.lock() + while state != .unblocked { + state = .blocked + // Some threads may wait for Blocked State. + cond.broadcast() + cond.wait() } - - func waitUntilIsBlocked() { - cond.lock() - while state != .blocked { - cond.wait() - } - cond.unlock() + cond.unlock() + return .success + } + + func waitUntilIsBlocked() { + cond.lock() + while state != .blocked { + cond.wait() } - + cond.unlock() + } + func flush(explicitTimeout:TimeInterval?) -> SpanExporterResultCode { - return .success - } - + return .success + } + func shutdown(explicitTimeout: TimeInterval?) {} - - fileprivate func unblock() { - cond.lock() - state = .unblocked - cond.unlock() - cond.broadcast() - } + + fileprivate func unblock() { + cond.lock() + state = .unblocked + cond.unlock() + cond.broadcast() + } } class WaitingSpanExporter: SpanExporter { - var spanDataList = [SpanData]() - let cond = NSCondition() - let numberToWaitFor: Int - var shutdownCalled = false - - init(numberToWaitFor: Int) { - self.numberToWaitFor = numberToWaitFor - } - - func waitForExport() -> [SpanData]? { - var ret: [SpanData] - cond.lock() - defer { cond.unlock() } - - while spanDataList.count < numberToWaitFor { - cond.wait() - } - ret = spanDataList - spanDataList.removeAll() - - return ret + var spanDataList = [SpanData]() + let cond = NSCondition() + let numberToWaitFor: Int + var shutdownCalled = false + + init(numberToWaitFor: Int) { + self.numberToWaitFor = numberToWaitFor + } + + func waitForExport() -> [SpanData]? { + var ret: [SpanData] + cond.lock() + defer { cond.unlock() } + + while spanDataList.count < numberToWaitFor { + cond.wait() } - + ret = spanDataList + spanDataList.removeAll() + + return ret + } + func export(spans: [SpanData], explicitTimeout: TimeInterval? = nil) -> SpanExporterResultCode { - cond.lock() - spanDataList.append(contentsOf: spans) - cond.unlock() - cond.broadcast() - return .success - } - + cond.lock() + spanDataList.append(contentsOf: spans) + cond.unlock() + cond.broadcast() + return .success + } + func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - return .success - } - + return .success + } + func shutdown(explicitTimeout: TimeInterval?) { - shutdownCalled = true - } + shutdownCalled = true + } } diff --git a/Tests/OpenTelemetrySdkTests/Trace/Export/MultiSpanExporterTests.swift b/Tests/OpenTelemetrySdkTests/Trace/Export/MultiSpanExporterTests.swift index fdede148..04779831 100644 --- a/Tests/OpenTelemetrySdkTests/Trace/Export/MultiSpanExporterTests.swift +++ b/Tests/OpenTelemetrySdkTests/Trace/Export/MultiSpanExporterTests.swift @@ -7,54 +7,54 @@ import OpenTelemetrySdk import XCTest class MultiSpanExporterTests: XCTestCase { - var spanExporter1: SpanExporterMock! - var spanExporter2: SpanExporterMock! - var spanList: [SpanData]! - - override func setUp() { - spanExporter1 = SpanExporterMock() - spanExporter2 = SpanExporterMock() - spanList = [TestUtils.makeBasicSpan()] - } - - func testEmpty() { - let multiSpanExporter = MultiSpanExporter(spanExporters: [SpanExporter]()) - _ = multiSpanExporter.export(spans: spanList) - multiSpanExporter.shutdown() - } - - func testOneSpanExporter() { - let multiSpanExporter = MultiSpanExporter(spanExporters: [spanExporter1]) - spanExporter1.returnValue = .success - XCTAssertEqual(multiSpanExporter.export(spans: spanList), SpanExporterResultCode.success) - XCTAssertEqual(spanExporter1.exportCalledTimes, 1) - XCTAssertEqual(spanExporter1.exportCalledData, spanList) - multiSpanExporter.shutdown() - XCTAssertEqual(spanExporter1.shutdownCalledTimes, 1) - } - - func testTwoSpanExporter() { - let multiSpanExporter = MultiSpanExporter(spanExporters: [spanExporter1, spanExporter2]) - spanExporter1.returnValue = .success - spanExporter2.returnValue = .success - XCTAssertEqual(multiSpanExporter.export(spans: spanList), SpanExporterResultCode.success) - XCTAssertEqual(spanExporter1.exportCalledTimes, 1) - XCTAssertEqual(spanExporter1.exportCalledData, spanList) - XCTAssertEqual(spanExporter2.exportCalledTimes, 1) - XCTAssertEqual(spanExporter2.exportCalledData, spanList) - multiSpanExporter.shutdown() - XCTAssertEqual(spanExporter1.shutdownCalledTimes, 1) - XCTAssertEqual(spanExporter2.shutdownCalledTimes, 1) - } - - func testTwoSpanExporter_OneReturnFailure() { - let multiSpanExporter = MultiSpanExporter(spanExporters: [spanExporter1, spanExporter2]) - spanExporter1.returnValue = .success - spanExporter2.returnValue = .failure - XCTAssertEqual(multiSpanExporter.export(spans: spanList), SpanExporterResultCode.failure) - XCTAssertEqual(spanExporter1.exportCalledTimes, 1) - XCTAssertEqual(spanExporter1.exportCalledData, spanList) - XCTAssertEqual(spanExporter2.exportCalledTimes, 1) - XCTAssertEqual(spanExporter2.exportCalledData, spanList) - } + var spanExporter1: SpanExporterMock! + var spanExporter2: SpanExporterMock! + var spanList: [SpanData]! + + override func setUp() { + spanExporter1 = SpanExporterMock() + spanExporter2 = SpanExporterMock() + spanList = [TestUtils.makeBasicSpan()] + } + + func testEmpty() { + let multiSpanExporter = MultiSpanExporter(spanExporters: [SpanExporter]()) + _ = multiSpanExporter.export(spans: spanList) + multiSpanExporter.shutdown() + } + + func testOneSpanExporter() { + let multiSpanExporter = MultiSpanExporter(spanExporters: [spanExporter1]) + spanExporter1.returnValue = .success + XCTAssertEqual(multiSpanExporter.export(spans: spanList), SpanExporterResultCode.success) + XCTAssertEqual(spanExporter1.exportCalledTimes, 1) + XCTAssertEqual(spanExporter1.exportCalledData, spanList) + multiSpanExporter.shutdown() + XCTAssertEqual(spanExporter1.shutdownCalledTimes, 1) + } + + func testTwoSpanExporter() { + let multiSpanExporter = MultiSpanExporter(spanExporters: [spanExporter1, spanExporter2]) + spanExporter1.returnValue = .success + spanExporter2.returnValue = .success + XCTAssertEqual(multiSpanExporter.export(spans: spanList), SpanExporterResultCode.success) + XCTAssertEqual(spanExporter1.exportCalledTimes, 1) + XCTAssertEqual(spanExporter1.exportCalledData, spanList) + XCTAssertEqual(spanExporter2.exportCalledTimes, 1) + XCTAssertEqual(spanExporter2.exportCalledData, spanList) + multiSpanExporter.shutdown() + XCTAssertEqual(spanExporter1.shutdownCalledTimes, 1) + XCTAssertEqual(spanExporter2.shutdownCalledTimes, 1) + } + + func testTwoSpanExporter_OneReturnFailure() { + let multiSpanExporter = MultiSpanExporter(spanExporters: [spanExporter1, spanExporter2]) + spanExporter1.returnValue = .success + spanExporter2.returnValue = .failure + XCTAssertEqual(multiSpanExporter.export(spans: spanList), SpanExporterResultCode.failure) + XCTAssertEqual(spanExporter1.exportCalledTimes, 1) + XCTAssertEqual(spanExporter1.exportCalledData, spanList) + XCTAssertEqual(spanExporter2.exportCalledTimes, 1) + XCTAssertEqual(spanExporter2.exportCalledData, spanList) + } } diff --git a/Tests/OpenTelemetrySdkTests/Trace/Mocks/ReadableSpanMock.swift b/Tests/OpenTelemetrySdkTests/Trace/Mocks/ReadableSpanMock.swift index 8915446e..70aac329 100644 --- a/Tests/OpenTelemetrySdkTests/Trace/Mocks/ReadableSpanMock.swift +++ b/Tests/OpenTelemetrySdkTests/Trace/Mocks/ReadableSpanMock.swift @@ -8,59 +8,59 @@ import OpenTelemetryApi @testable import OpenTelemetrySdk class ReadableSpanMock: ReadableSpan { - var hasEnded: Bool = false - var latency: TimeInterval = 0 - - var kind: SpanKind { - return .client - } - - var instrumentationScopeInfo = InstrumentationScopeInfo() - - var name: String = "ReadableSpanMock" - - var forcedReturnSpanContext: SpanContext? - var forcedReturnSpanData: SpanData? - - func end() { - OpenTelemetry.instance.contextProvider.removeContextForSpan(self) - } - - func end(time: Date) { end() } - - func toSpanData() -> SpanData { - return forcedReturnSpanData ?? SpanData(traceId: context.traceId, - spanId: context.spanId, - traceFlags: context.traceFlags, - traceState: TraceState(), - resource: Resource(attributes: [String: AttributeValue]()), - instrumentationScope: InstrumentationScopeInfo(), - name: "ReadableSpanMock", - kind: .client, - startTime: Date(timeIntervalSinceReferenceDate: 0), - endTime: Date(timeIntervalSinceReferenceDate: 0), - hasRemoteParent: false) - } - - var context: SpanContext { - forcedReturnSpanContext ?? SpanContext.create(traceId: TraceId.random(), spanId: SpanId.random(), traceFlags: TraceFlags(), traceState: TraceState()) - } - - var isRecording: Bool = false - - var status: Status = .unset - - func updateName(name: String) {} - - func setAttribute(key: String, value: AttributeValue?) {} - - func addEvent(name: String) {} - - func addEvent(name: String, attributes: [String: AttributeValue]) {} - - func addEvent(name: String, timestamp: Date) {} - - func addEvent(name: String, attributes: [String: AttributeValue], timestamp: Date) {} - - var description: String = "ReadableSpanMock" + var hasEnded: Bool = false + var latency: TimeInterval = 0 + + var kind: SpanKind { + return .client + } + + var instrumentationScopeInfo = InstrumentationScopeInfo() + + var name: String = "ReadableSpanMock" + + var forcedReturnSpanContext: SpanContext? + var forcedReturnSpanData: SpanData? + + func end() { + OpenTelemetry.instance.contextProvider.removeContextForSpan(self) + } + + func end(time: Date) { end() } + + func toSpanData() -> SpanData { + return forcedReturnSpanData ?? SpanData(traceId: context.traceId, + spanId: context.spanId, + traceFlags: context.traceFlags, + traceState: TraceState(), + resource: Resource(attributes: [String: AttributeValue]()), + instrumentationScope: InstrumentationScopeInfo(), + name: "ReadableSpanMock", + kind: .client, + startTime: Date(timeIntervalSinceReferenceDate: 0), + endTime: Date(timeIntervalSinceReferenceDate: 0), + hasRemoteParent: false) + } + + var context: SpanContext { + forcedReturnSpanContext ?? SpanContext.create(traceId: TraceId.random(), spanId: SpanId.random(), traceFlags: TraceFlags(), traceState: TraceState()) + } + + var isRecording: Bool = false + + var status: Status = .unset + + func updateName(name: String) {} + + func setAttribute(key: String, value: AttributeValue?) {} + + func addEvent(name: String) {} + + func addEvent(name: String, attributes: [String: AttributeValue]) {} + + func addEvent(name: String, timestamp: Date) {} + + func addEvent(name: String, attributes: [String: AttributeValue], timestamp: Date) {} + + var description: String = "ReadableSpanMock" } diff --git a/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanExporterMock.swift b/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanExporterMock.swift index 88c8f8af..cd58a1b2 100644 --- a/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanExporterMock.swift +++ b/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanExporterMock.swift @@ -7,25 +7,25 @@ import Foundation import OpenTelemetrySdk class SpanExporterMock: SpanExporter { - - var exportCalledTimes: Int = 0 - var exportCalledData: [SpanData]? - var shutdownCalledTimes: Int = 0 - var flushCalled: Bool = false - var returnValue: SpanExporterResultCode = .success - - func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - exportCalledTimes += 1 - exportCalledData = spans - return returnValue - } - - func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { - flushCalled = true - return returnValue - } - - func shutdown(explicitTimeout: TimeInterval?) { - shutdownCalledTimes += 1 - } + + var exportCalledTimes: Int = 0 + var exportCalledData: [SpanData]? + var shutdownCalledTimes: Int = 0 + var flushCalled: Bool = false + var returnValue: SpanExporterResultCode = .success + + func export(spans: [SpanData], explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + exportCalledTimes += 1 + exportCalledData = spans + return returnValue + } + + func flush(explicitTimeout: TimeInterval?) -> SpanExporterResultCode { + flushCalled = true + return returnValue + } + + func shutdown(explicitTimeout: TimeInterval?) { + shutdownCalledTimes += 1 + } } diff --git a/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanProcessorMock.swift b/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanProcessorMock.swift index 74c11ee4..ecdd44c7 100644 --- a/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanProcessorMock.swift +++ b/Tests/OpenTelemetrySdkTests/Trace/Mocks/SpanProcessorMock.swift @@ -8,35 +8,35 @@ import OpenTelemetrySdk import OpenTelemetryApi class SpanProcessorMock: SpanProcessor { - var onStartCalledTimes = 0 - lazy var onStartCalled: Bool = { self.onStartCalledTimes > 0 }() - var onStartCalledSpan: ReadableSpan? - var onEndCalledTimes = 0 - lazy var onEndCalled: Bool = { self.onEndCalledTimes > 0 }() - var onEndCalledSpan: ReadableSpan? - var shutdownCalledTimes = 0 - lazy var shutdownCalled: Bool = { self.shutdownCalledTimes > 0 }() - var forceFlushCalledTimes = 0 - lazy var forceFlushCalled: Bool = { self.forceFlushCalledTimes > 0 }() - - var isStartRequired = true - var isEndRequired = true - - func onStart(parentContext: SpanContext?, span: ReadableSpan) { - onStartCalledTimes += 1 - onStartCalledSpan = span - } - - func onEnd(span: ReadableSpan) { - onEndCalledTimes += 1 - onEndCalledSpan = span - } - - func shutdown(explicitTimeout: TimeInterval?) { - shutdownCalledTimes += 1 - } - - func forceFlush(timeout: TimeInterval? = nil) { - forceFlushCalledTimes += 1 - } + var onStartCalledTimes = 0 + lazy var onStartCalled: Bool = { self.onStartCalledTimes > 0 }() + var onStartCalledSpan: ReadableSpan? + var onEndCalledTimes = 0 + lazy var onEndCalled: Bool = { self.onEndCalledTimes > 0 }() + var onEndCalledSpan: ReadableSpan? + var shutdownCalledTimes = 0 + lazy var shutdownCalled: Bool = { self.shutdownCalledTimes > 0 }() + var forceFlushCalledTimes = 0 + lazy var forceFlushCalled: Bool = { self.forceFlushCalledTimes > 0 }() + + var isStartRequired = true + var isEndRequired = true + + func onStart(parentContext: SpanContext?, span: ReadableSpan) { + onStartCalledTimes += 1 + onStartCalledSpan = span + } + + func onEnd(span: ReadableSpan) { + onEndCalledTimes += 1 + onEndCalledSpan = span + } + + func shutdown(explicitTimeout: TimeInterval?) { + shutdownCalledTimes += 1 + } + + func forceFlush(timeout: TimeInterval? = nil) { + forceFlushCalledTimes += 1 + } }