diff --git a/benchmark/benchmark_memory.py b/benchmark/benchmark_memory.py index d335029..d813d72 100644 --- a/benchmark/benchmark_memory.py +++ b/benchmark/benchmark_memory.py @@ -1,15 +1,6 @@ -import unittest -from typing import Sequence - +from benchmark.util import get_logs_data from snowflake.telemetry._internal.exporter.otlp.proto.logs import encode_logs -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.util.instrumentation import InstrumentationScope - -from opentelemetry._logs import SeverityNumber -from opentelemetry.sdk._logs import LogData, LogRecord - -from opentelemetry.trace import TraceFlags """ v0.5.0 @@ -22,103 +13,22 @@ - _encode_resource_logs:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py:88 -> 5.7KiB - _encode_resource_logs:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py:82 -> 3.5KiB - __setitem__:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/opentelemetry/attributes/__init__.py:173 -> 804.0B - """ - """ v0.6.0.dev -📦 Total memory allocated: 6.8KiB +📦 Total memory allocated: 4.9KiB 📏 Total allocations: 18 -📊 Histogram of allocation sizes: |█ ▄ | +📊 Histogram of allocation sizes: |█ ▄| 🥇 Biggest allocating functions: - - __bytes__:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/snowflake/telemetry/serialize/__init__.py:23 -> 1.4KiB - - __setitem__:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/opentelemetry/attributes/__init__.py:173 -> 804.0B - - serialize_bytes:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/snowflake/telemetry/serialize/__init__.py:86 -> 774.0B - - serialize_message:/Users/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/snowflake/telemetry/serialize/__init__.py:103 -> 690.0B + - __setitem__:/home/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/opentelemetry/attributes/__init__.py:173 -> 804.0B + - encode_logs:/home/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py:38 -> 723.0B + - serialize_message:/home/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/snowflake/telemetry/_internal/serialize/__init__.py:95 -> 691.0B + - _encode_log:/home/jopel/workspace/snowflake-telemetry-python/.venv/lib/python3.11/site-packages/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py:58 -> 646.0B """ # Run with pytest --memray tests/benchmark_memory.py def test_serialize_logs(): logs_data = get_logs_data() bytes(encode_logs(logs_data)) - -def get_logs_data() -> Sequence[LogData]: - log1 = LogData( - log_record=LogRecord( - timestamp=1644650195189786880, - observed_timestamp=1644660000000000000, - trace_id=89564621134313219400156819398935297684, - span_id=1312458408527513268, - trace_flags=TraceFlags(0x01), - severity_text="WARN", - severity_number=SeverityNumber.WARN, - body="Do not go gentle into that good night. Rage, rage against the dying of the light", - resource=Resource( - {"first_resource": "value"}, - "resource_schema_url", - ), - attributes={"a": 1, "b": "c"}, - ), - instrumentation_scope=InstrumentationScope( - "first_name", "first_version" - ), - ) - - log2 = LogData( - log_record=LogRecord( - timestamp=1644650249738562048, - observed_timestamp=1644660000000000000, - trace_id=0, - span_id=0, - trace_flags=TraceFlags.DEFAULT, - severity_text="WARN", - severity_number=SeverityNumber.WARN, - body="Cooper, this is no time for caution!", - resource=Resource({"second_resource": "CASE"}), - attributes={}, - ), - instrumentation_scope=InstrumentationScope( - "second_name", "second_version" - ), - ) - - log3 = LogData( - log_record=LogRecord( - timestamp=1644650427658989056, - observed_timestamp=1644660000000000000, - trace_id=271615924622795969659406376515024083555, - span_id=4242561578944770265, - trace_flags=TraceFlags(0x01), - severity_text="DEBUG", - severity_number=SeverityNumber.DEBUG, - body="To our galaxy", - resource=Resource({"second_resource": "CASE"}), - attributes={"a": 1, "b": "c"}, - ), - instrumentation_scope=None, - ) - - log4 = LogData( - log_record=LogRecord( - timestamp=1644650584292683008, - observed_timestamp=1644660000000000000, - trace_id=212592107417388365804938480559624925555, - span_id=6077757853989569223, - trace_flags=TraceFlags(0x01), - severity_text="INFO", - severity_number=SeverityNumber.INFO, - body="Love is the one thing that transcends time and space", - resource=Resource( - {"first_resource": "value"}, - "resource_schema_url", - ), - attributes={"filename": "model.py", "func_name": "run_method"}, - ), - instrumentation_scope=InstrumentationScope( - "another_name", "another_version" - ), - ) - - return [log1, log2, log3, log4] diff --git a/benchmark/benchmark_serialize.py b/benchmark/benchmark_serialize.py index 032f786..8312cf7 100644 --- a/benchmark/benchmark_serialize.py +++ b/benchmark/benchmark_serialize.py @@ -1,8 +1,6 @@ -from typing import Sequence - import google_benchmark as benchmark -from snowflake.telemetry.test.metrictestutil import _generate_gauge, _generate_sum +from benchmark.util import get_logs_data, get_metrics_data, get_traces_data from snowflake.telemetry._internal.opentelemetry.exporter.otlp.proto.common._log_encoder import encode_logs from snowflake.telemetry._internal.opentelemetry.exporter.otlp.proto.common.metrics_encoder import encode_metrics @@ -12,292 +10,32 @@ from opentelemetry.exporter.otlp.proto.common.metrics_encoder import encode_metrics as pb2_encode_metrics from opentelemetry.exporter.otlp.proto.common.trace_encoder import encode_spans as pb2_encode_spans -from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.util.instrumentation import InstrumentationScope - -from opentelemetry._logs import SeverityNumber -from opentelemetry.sdk._logs import LogData, LogLimits, LogRecord - -from opentelemetry.sdk.metrics.export import ( - AggregationTemporality, - Buckets, - ExponentialHistogram, - Histogram, - ExponentialHistogramDataPoint, - HistogramDataPoint, - Metric, - MetricsData, - ResourceMetrics, - ScopeMetrics, -) - -from opentelemetry.sdk.trace import Event, SpanContext, _Span -from opentelemetry.trace import SpanKind, Link, TraceFlags -from opentelemetry.trace.status import Status, StatusCode - -def get_logs_data() -> Sequence[LogData]: - log1 = LogData( - log_record=LogRecord( - timestamp=1644650195189786880, - observed_timestamp=1644660000000000000, - trace_id=89564621134313219400156819398935297684, - span_id=1312458408527513268, - trace_flags=TraceFlags(0x01), - severity_text="WARN", - severity_number=SeverityNumber.WARN, - body="Do not go gentle into that good night. Rage, rage against the dying of the light", - resource=Resource( - {"first_resource": "value"}, - "resource_schema_url", - ), - attributes={"a": 1, "b": "c"}, - ), - instrumentation_scope=InstrumentationScope( - "first_name", "first_version" - ), - ) - - log2 = LogData( - log_record=LogRecord( - timestamp=1644650249738562048, - observed_timestamp=1644660000000000000, - trace_id=0, - span_id=0, - trace_flags=TraceFlags.DEFAULT, - severity_text="WARN", - severity_number=SeverityNumber.WARN, - body="Cooper, this is no time for caution!", - resource=Resource({"second_resource": "CASE"}), - attributes={}, - ), - instrumentation_scope=InstrumentationScope( - "second_name", "second_version" - ), - ) - - log3 = LogData( - log_record=LogRecord( - timestamp=1644650427658989056, - observed_timestamp=1644660000000000000, - trace_id=271615924622795969659406376515024083555, - span_id=4242561578944770265, - trace_flags=TraceFlags(0x01), - severity_text="DEBUG", - severity_number=SeverityNumber.DEBUG, - body="To our galaxy", - resource=Resource({"second_resource": "CASE"}), - attributes={"a": 1, "b": "c"}, - ), - instrumentation_scope=None, - ) - - log4 = LogData( - log_record=LogRecord( - timestamp=1644650584292683008, - observed_timestamp=1644660000000000000, - trace_id=212592107417388365804938480559624925555, - span_id=6077757853989569223, - trace_flags=TraceFlags(0x01), - severity_text="INFO", - severity_number=SeverityNumber.INFO, - body="Love is the one thing that transcends time and space", - resource=Resource( - {"first_resource": "value"}, - "resource_schema_url", - ), - attributes={"filename": "model.py", "func_name": "run_method"}, - ), - instrumentation_scope=InstrumentationScope( - "another_name", "another_version" - ), - ) - - return [log1, log2, log3, log4] - - -HISTOGRAM = Metric( - name="histogram", - description="foo", - unit="s", - data=Histogram( - data_points=[ - HistogramDataPoint( - attributes={"a": 1, "b": True}, - start_time_unix_nano=1641946016139533244, - time_unix_nano=1641946016139533244, - count=5, - sum=67, - bucket_counts=[1, 4], - explicit_bounds=[10.0, 20.0], - min=8, - max=18, - ) - ], - aggregation_temporality=AggregationTemporality.DELTA, - ), -) -def get_metrics_data() -> MetricsData: - metrics1 = MetricsData( - resource_metrics=[ - ResourceMetrics( - resource=Resource( - attributes={"a": 1, "b": False}, - schema_url="resource_schema_url", - ), - scope_metrics=[ - ScopeMetrics( - scope=InstrumentationScope( - name="first_name", - version="first_version", - schema_url="insrumentation_scope_schema_url", - ), - metrics=[_generate_sum("sum_int", 33)], - schema_url="instrumentation_scope_schema_url", - ) - ], - schema_url="resource_schema_url", - ) - ] - ) - - metrics2 = MetricsData( - resource_metrics=[ - ResourceMetrics( - resource=Resource( - attributes={"a": 1, "b": False}, - schema_url="resource_schema_url", - ), - scope_metrics=[ - ScopeMetrics( - scope=InstrumentationScope( - name="first_name", - version="first_version", - schema_url="insrumentation_scope_schema_url", - ), - metrics=[HISTOGRAM, HISTOGRAM], - schema_url="instrumentation_scope_schema_url", - ), - ScopeMetrics( - scope=InstrumentationScope( - name="second_name", - version="second_version", - schema_url="insrumentation_scope_schema_url", - ), - metrics=[HISTOGRAM], - schema_url="instrumentation_scope_schema_url", - ), - ScopeMetrics( - scope=InstrumentationScope( - name="third_name", - version="third_version", - schema_url="insrumentation_scope_schema_url", - ), - metrics=[HISTOGRAM], - schema_url="instrumentation_scope_schema_url", - ), - ], - schema_url="resource_schema_url", - ) - ] - ) - - return metrics1 - -def get_traces_data() -> Sequence[_Span]: - trace_id = 0x3E0C63257DE34C926F9EFCD03927272E - - base_time = 683647322 * 10**9 # in ns - start_times = ( - base_time, - base_time + 150 * 10**6, - base_time + 300 * 10**6, - base_time + 400 * 10**6, - ) - end_times = ( - start_times[0] + (50 * 10**6), - start_times[1] + (100 * 10**6), - start_times[2] + (200 * 10**6), - start_times[3] + (300 * 10**6), - ) - - parent_span_context = SpanContext( - trace_id, 0x1111111111111111, is_remote=True - ) - - other_context = SpanContext( - trace_id, 0x2222222222222222, is_remote=False - ) - - span1 = _Span( - name="test-span-1", - context=SpanContext( - trace_id, - 0x34BF92DEEFC58C92, - is_remote=False, - trace_flags=TraceFlags(TraceFlags.SAMPLED), - ), - parent=parent_span_context, - events=( - Event( - name="event0", - timestamp=base_time + 50 * 10**6, - attributes={ - "annotation_bool": True, - "annotation_string": "annotation_test", - "key_float": 0.3, - }, - ), - ), - links=( - Link(context=other_context, attributes={"key_bool": True}), - ), - resource=Resource({}, "resource_schema_url"), - ) - span1.start(start_time=start_times[0]) - span1.set_attribute("key_bool", False) - span1.set_attribute("key_string", "hello_world") - span1.set_attribute("key_float", 111.22) - span1.set_status(Status(StatusCode.ERROR, "Example description")) - span1.end(end_time=end_times[0]) - - span2 = _Span( - name="test-span-2", - context=parent_span_context, - parent=None, - resource=Resource(attributes={"key_resource": "some_resource"}), - ) - span2.start(start_time=start_times[1]) - span2.end(end_time=end_times[1]) - - span3 = _Span( - name="test-span-3", - context=other_context, - parent=None, - resource=Resource(attributes={"key_resource": "some_resource"}), - ) - span3.start(start_time=start_times[2]) - span3.set_attribute("key_string", "hello_world") - span3.end(end_time=end_times[2]) - - span4 = _Span( - name="test-span-4", - context=other_context, - parent=None, - resource=Resource({}, "resource_schema_url"), - instrumentation_scope=InstrumentationScope( - name="name", version="version" - ), - ) - span4.start(start_time=start_times[3]) - span4.end(end_time=end_times[3]) +""" +----------------------------------------------------------------------------- +Benchmark Time CPU Iterations +----------------------------------------------------------------------------- +test_bm_serialize_logs_data 78590 ns 78590 ns 8893 +test_bm_pb2_serialize_logs_data 96043 ns 96043 ns 7277 +test_bm_serialize_metrics_data 132482 ns 132482 ns 5285 +test_bm_pb2_serialize_metrics_data 163929 ns 163931 ns 4270 +test_bm_serialize_traces_data 103523 ns 103524 ns 6656 +test_bm_pb2_serialize_traces_data 132048 ns 132048 ns 5294 +""" - return [span1, span2, span3, span4] +def sanity_check(): + logs_data = get_logs_data() + metrics_data = get_metrics_data() + traces_data = get_traces_data() + + assert encode_logs(logs_data) == pb2_encode_logs(logs_data).SerializeToString() + assert encode_metrics(metrics_data) == pb2_encode_metrics(metrics_data).SerializeToString() + assert encode_spans(traces_data) == pb2_encode_spans(traces_data).SerializeToString() @benchmark.register def test_bm_serialize_logs_data(state): logs_data = get_logs_data() while state: - bytes(encode_logs(logs_data)) + encode_logs(logs_data) @benchmark.register def test_bm_pb2_serialize_logs_data(state): @@ -309,7 +47,7 @@ def test_bm_pb2_serialize_logs_data(state): def test_bm_serialize_metrics_data(state): metrics_data = get_metrics_data() while state: - bytes(encode_metrics(metrics_data)) + encode_metrics(metrics_data) @benchmark.register def test_bm_pb2_serialize_metrics_data(state): @@ -321,7 +59,7 @@ def test_bm_pb2_serialize_metrics_data(state): def test_bm_serialize_traces_data(state): traces_data = get_traces_data() while state: - bytes(encode_spans(traces_data)) + encode_spans(traces_data) @benchmark.register def test_bm_pb2_serialize_traces_data(state): @@ -329,17 +67,6 @@ def test_bm_pb2_serialize_traces_data(state): while state: pb2_encode_spans(traces_data).SerializeToString() -""" ------------------------------------------------------------------------------ -Benchmark Time CPU Iterations ------------------------------------------------------------------------------ -test_bm_serialize_logs_data 43847 ns 43847 ns 15571 -test_bm_pb2_serialize_logs_data 43450 ns 43450 ns 16097 -test_bm_serialize_metrics_data 13299 ns 13299 ns 52040 -test_bm_pb2_serialize_metrics_data 13465 ns 13465 ns 53244 -test_bm_serialize_traces_data 56274 ns 56274 ns 12254 -test_bm_pb2_serialize_traces_data 60691 ns 60687 ns 11751 -""" - if __name__ == "__main__": + sanity_check() benchmark.main() diff --git a/benchmark/util.py b/benchmark/util.py new file mode 100644 index 0000000..586d205 --- /dev/null +++ b/benchmark/util.py @@ -0,0 +1,334 @@ +from typing import Sequence + +from snowflake.telemetry.test.metrictestutil import _generate_gauge, _generate_sum + +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.util.instrumentation import InstrumentationScope + +from opentelemetry._logs import SeverityNumber +from opentelemetry.sdk._logs import LogData, LogLimits, LogRecord + +from opentelemetry.sdk.metrics.export import ( + AggregationTemporality, + Buckets, + ExponentialHistogram, + Histogram, + ExponentialHistogramDataPoint, + HistogramDataPoint, + Metric, + MetricsData, + ResourceMetrics, + ScopeMetrics, +) + +from opentelemetry.sdk.trace import Event, SpanContext, _Span +from opentelemetry.trace import SpanKind, Link, TraceFlags +from opentelemetry.trace.status import Status, StatusCode + + + +def get_logs_data() -> Sequence[LogData]: + log1 = LogData( + log_record=LogRecord( + timestamp=1644650195189786880, + observed_timestamp=1644660000000000000, + trace_id=89564621134313219400156819398935297684, + span_id=1312458408527513268, + trace_flags=TraceFlags(0x01), + severity_text="WARN", + severity_number=SeverityNumber.WARN, + body="Do not go gentle into that good night. Rage, rage against the dying of the light", + resource=Resource( + {"first_resource": "value"}, + "resource_schema_url", + ), + attributes={"a": 1, "b": "c"}, + ), + instrumentation_scope=InstrumentationScope( + "first_name", "first_version" + ), + ) + + log2 = LogData( + log_record=LogRecord( + timestamp=1644650249738562048, + observed_timestamp=1644660000000000000, + trace_id=0, + span_id=0, + trace_flags=TraceFlags.DEFAULT, + severity_text="WARN", + severity_number=SeverityNumber.WARN, + body="Cooper, this is no time for caution!", + resource=Resource({"second_resource": "CASE"}), + attributes={}, + ), + instrumentation_scope=InstrumentationScope( + "second_name", "second_version" + ), + ) + + log3 = LogData( + log_record=LogRecord( + timestamp=1644650427658989056, + observed_timestamp=1644660000000000000, + trace_id=271615924622795969659406376515024083555, + span_id=4242561578944770265, + trace_flags=TraceFlags(0x01), + severity_text="DEBUG", + severity_number=SeverityNumber.DEBUG, + body="To our galaxy", + resource=Resource({"second_resource": "CASE"}), + attributes={"a": 1, "b": "c"}, + ), + instrumentation_scope=None, + ) + + log4 = LogData( + log_record=LogRecord( + timestamp=1644650584292683008, + observed_timestamp=1644660000000000000, + trace_id=212592107417388365804938480559624925555, + span_id=6077757853989569223, + trace_flags=TraceFlags(0x01), + severity_text="INFO", + severity_number=SeverityNumber.INFO, + body="Love is the one thing that transcends time and space", + resource=Resource( + {"first_resource": "value"}, + "resource_schema_url", + ), + attributes={"filename": "model.py", "func_name": "run_method"}, + ), + instrumentation_scope=InstrumentationScope( + "another_name", "another_version" + ), + ) + + return [log1, log2, log3, log4] + + +HISTOGRAM = Metric( + name="histogram", + description="foo", + unit="s", + data=Histogram( + data_points=[ + HistogramDataPoint( + attributes={"a": 1, "b": True}, + start_time_unix_nano=1641946016139533244, + time_unix_nano=1641946016139533244, + count=5, + sum=67, + bucket_counts=[1, 4], + explicit_bounds=[10.0, 20.0], + min=8, + max=18, + ) + ], + aggregation_temporality=AggregationTemporality.DELTA, + ), +) + +EXPONENTIAL_HISTOGRAM = Metric( + name="exponential_histogram", + description="description", + unit="unit", + data=ExponentialHistogram( + data_points=[ + ExponentialHistogramDataPoint( + attributes={"a": 1, "b": True}, + start_time_unix_nano=0, + time_unix_nano=1, + count=2, + sum=3, + scale=4, + zero_count=5, + positive=Buckets(offset=6, bucket_counts=[7, 8]), + negative=Buckets(offset=9, bucket_counts=[10, 11]), + flags=12, + min=13.0, + max=14.0, + ) + ], + aggregation_temporality=AggregationTemporality.DELTA, + ), +) +def get_metrics_data() -> MetricsData: + + metrics = MetricsData( + resource_metrics=[ + ResourceMetrics( + resource=Resource( + attributes={"a": 1, "b": False}, + schema_url="resource_schema_url", + ), + scope_metrics=[ + ScopeMetrics( + scope=InstrumentationScope( + name="first_name", + version="first_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[HISTOGRAM, HISTOGRAM], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="second_name", + version="second_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[HISTOGRAM], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="third_name", + version="third_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[HISTOGRAM], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="first_name", + version="first_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[_generate_sum("sum_int", 33)], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="first_name", + version="first_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[_generate_sum("sum_double", 2.98)], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="first_name", + version="first_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[_generate_gauge("gauge_int", 9000)], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="first_name", + version="first_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[_generate_gauge("gauge_double", 52.028)], + schema_url="instrumentation_scope_schema_url", + ), + ScopeMetrics( + scope=InstrumentationScope( + name="first_name", + version="first_version", + schema_url="insrumentation_scope_schema_url", + ), + metrics=[EXPONENTIAL_HISTOGRAM], + schema_url="instrumentation_scope_schema_url", + ) + ], + schema_url="resource_schema_url", + ) + ] + ) + + return metrics + +def get_traces_data() -> Sequence[_Span]: + trace_id = 0x3E0C63257DE34C926F9EFCD03927272E + + base_time = 683647322 * 10**9 # in ns + start_times = ( + base_time, + base_time + 150 * 10**6, + base_time + 300 * 10**6, + base_time + 400 * 10**6, + ) + end_times = ( + start_times[0] + (50 * 10**6), + start_times[1] + (100 * 10**6), + start_times[2] + (200 * 10**6), + start_times[3] + (300 * 10**6), + ) + + parent_span_context = SpanContext( + trace_id, 0x1111111111111111, is_remote=True + ) + + other_context = SpanContext( + trace_id, 0x2222222222222222, is_remote=False + ) + + span1 = _Span( + name="test-span-1", + context=SpanContext( + trace_id, + 0x34BF92DEEFC58C92, + is_remote=False, + trace_flags=TraceFlags(TraceFlags.SAMPLED), + ), + parent=parent_span_context, + events=( + Event( + name="event0", + timestamp=base_time + 50 * 10**6, + attributes={ + "annotation_bool": True, + "annotation_string": "annotation_test", + "key_float": 0.3, + }, + ), + ), + links=( + Link(context=other_context, attributes={"key_bool": True}), + ), + resource=Resource({}, "resource_schema_url"), + ) + span1.start(start_time=start_times[0]) + span1.set_attribute("key_bool", False) + span1.set_attribute("key_string", "hello_world") + span1.set_attribute("key_float", 111.22) + span1.set_status(Status(StatusCode.ERROR, "Example description")) + span1.end(end_time=end_times[0]) + + span2 = _Span( + name="test-span-2", + context=parent_span_context, + parent=None, + resource=Resource(attributes={"key_resource": "some_resource"}), + ) + span2.start(start_time=start_times[1]) + span2.end(end_time=end_times[1]) + + span3 = _Span( + name="test-span-3", + context=other_context, + parent=None, + resource=Resource(attributes={"key_resource": "some_resource"}), + ) + span3.start(start_time=start_times[2]) + span3.set_attribute("key_string", "hello_world") + span3.end(end_time=end_times[2]) + + span4 = _Span( + name="test-span-4", + context=other_context, + parent=None, + resource=Resource({}, "resource_schema_url"), + instrumentation_scope=InstrumentationScope( + name="name", version="version" + ), + ) + span4.start(start_time=start_times[3]) + span4.end(end_time=end_times[3]) + + return [span1, span2, span3, span4] diff --git a/compile/plugin.py b/compile/plugin.py index 4c755ee..5afabcf 100755 --- a/compile/plugin.py +++ b/compile/plugin.py @@ -2,16 +2,14 @@ import os import sys -from collections import defaultdict from dataclasses import dataclass, field -from typing import List, Union +from typing import List, Optional from enum import IntEnum from google.protobuf.compiler import plugin_pb2 as plugin from google.protobuf.descriptor_pb2 import ( FileDescriptorProto, FieldDescriptorProto, - OneofDescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, MethodDescriptorProto, @@ -33,26 +31,25 @@ class ProtoTypeDescriptor: name: str wire_type: WireType python_type: str - default: str proto_type_to_descriptor = { - FieldDescriptorProto.TYPE_BOOL: ProtoTypeDescriptor("bool", WireType.VARINT, "bool", "False"), - FieldDescriptorProto.TYPE_ENUM: ProtoTypeDescriptor("enum", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_INT32: ProtoTypeDescriptor("int32", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_INT64: ProtoTypeDescriptor("int64", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_UINT32: ProtoTypeDescriptor("uint32", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_UINT64: ProtoTypeDescriptor("uint64", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_SINT32: ProtoTypeDescriptor("sint32", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_SINT64: ProtoTypeDescriptor("sint64", WireType.VARINT, "int", "0"), - FieldDescriptorProto.TYPE_FIXED32: ProtoTypeDescriptor("fixed32", WireType.I32, "int", "0"), - FieldDescriptorProto.TYPE_FIXED64: ProtoTypeDescriptor("fixed64", WireType.I64, "int", "0"), - FieldDescriptorProto.TYPE_SFIXED32: ProtoTypeDescriptor("sfixed32", WireType.I32, "int", "0"), - FieldDescriptorProto.TYPE_SFIXED64: ProtoTypeDescriptor("sfixed64", WireType.I64, "int", "0"), - FieldDescriptorProto.TYPE_FLOAT: ProtoTypeDescriptor("float", WireType.I32, "float", "0.0"), - FieldDescriptorProto.TYPE_DOUBLE: ProtoTypeDescriptor("double", WireType.I64, "float", "0.0"), - FieldDescriptorProto.TYPE_STRING: ProtoTypeDescriptor("string", WireType.LEN, "str", '""'), - FieldDescriptorProto.TYPE_BYTES: ProtoTypeDescriptor("bytes", WireType.LEN, "bytes", 'b""'), - FieldDescriptorProto.TYPE_MESSAGE: ProtoTypeDescriptor("message", WireType.LEN, "MessageMarshaler", 'None'), + FieldDescriptorProto.TYPE_BOOL: ProtoTypeDescriptor("bool", WireType.VARINT, "bool"), + FieldDescriptorProto.TYPE_ENUM: ProtoTypeDescriptor("enum", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_INT32: ProtoTypeDescriptor("int32", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_INT64: ProtoTypeDescriptor("int64", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_UINT32: ProtoTypeDescriptor("uint32", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_UINT64: ProtoTypeDescriptor("uint64", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_SINT32: ProtoTypeDescriptor("sint32", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_SINT64: ProtoTypeDescriptor("sint64", WireType.VARINT, "int"), + FieldDescriptorProto.TYPE_FIXED32: ProtoTypeDescriptor("fixed32", WireType.I32, "int"), + FieldDescriptorProto.TYPE_FIXED64: ProtoTypeDescriptor("fixed64", WireType.I64, "int"), + FieldDescriptorProto.TYPE_SFIXED32: ProtoTypeDescriptor("sfixed32", WireType.I32, "int"), + FieldDescriptorProto.TYPE_SFIXED64: ProtoTypeDescriptor("sfixed64", WireType.I64, "int"), + FieldDescriptorProto.TYPE_FLOAT: ProtoTypeDescriptor("float", WireType.I32, "float"), + FieldDescriptorProto.TYPE_DOUBLE: ProtoTypeDescriptor("double", WireType.I64, "float"), + FieldDescriptorProto.TYPE_STRING: ProtoTypeDescriptor("string", WireType.LEN, "str"), + FieldDescriptorProto.TYPE_BYTES: ProtoTypeDescriptor("bytes", WireType.LEN, "bytes"), + FieldDescriptorProto.TYPE_MESSAGE: ProtoTypeDescriptor("message", WireType.LEN, "bytes"), } @dataclass @@ -73,9 +70,9 @@ class EnumTemplate: values: List["EnumValueTemplate"] = field(default_factory=list) @staticmethod - def from_descriptor(descriptor: EnumDescriptorProto) -> "EnumTemplate": + def from_descriptor(descriptor: EnumDescriptorProto, parent: str = "") -> "EnumTemplate": return EnumTemplate( - name=descriptor.name, + name=parent + "_" + descriptor.name if parent else descriptor.name, values=[EnumValueTemplate.from_descriptor(value) for value in descriptor.value], ) @@ -95,21 +92,13 @@ class FieldTemplate: python_type: str proto_type: str repeated: bool - default: str + group: str + encode_presence: bool @staticmethod - def from_descriptor(descriptor: FieldDescriptorProto) -> "FieldTemplate": + def from_descriptor(descriptor: FieldDescriptorProto, group: Optional[str] = None) -> "FieldTemplate": repeated = descriptor.label == FieldDescriptorProto.LABEL_REPEATED type_descriptor = proto_type_to_descriptor[descriptor.type] - - if descriptor.HasField("oneof_index"): - default = None - elif repeated: - # In python, default field values are shared across all instances of the class - # So we should not use mutable objects like list() as default values - default = None - else: - default = type_descriptor.default python_type = type_descriptor.python_type proto_type = type_descriptor.name @@ -127,6 +116,10 @@ def from_descriptor(descriptor: FieldDescriptorProto) -> "FieldTemplate": # Saves us from having to calculate the tag at runtime tag = tag_to_repr_varint(tag) + # For group / oneof fields, we need to encode the presence of the field + # For message fields, we need to encode the presence of the field if it is not None + encode_presence = group is not None or proto_type == "message" + return FieldTemplate( name=descriptor.name, tag=tag, @@ -134,58 +127,30 @@ def from_descriptor(descriptor: FieldDescriptorProto) -> "FieldTemplate": python_type=python_type, proto_type=proto_type, repeated=repeated, - default=default, - ) - -@dataclass -class OneOfTemplate: - name: str - fields: List[FieldTemplate] = field(default_factory=list) - - @staticmethod - def from_descriptor(descriptor: OneofDescriptorProto, fields: List[FieldDescriptorProto]) -> "OneOfTemplate": - - fields = [FieldTemplate.from_descriptor(field) for field in fields] - # Sort the fields by number in descending order to follow "last one wins" semantics - fields.sort(key=lambda field: field.number, reverse=True) - - return OneOfTemplate( - name=descriptor.name, - fields=fields, + group=group, + encode_presence=encode_presence, ) @dataclass class MessageTemplate: name: str - fields: List[Union["FieldTemplate", "OneOfTemplate"]] = field(default_factory=list) + fields: List[FieldTemplate] = field(default_factory=list) enums: List["EnumTemplate"] = field(default_factory=list) messages: List["MessageTemplate"] = field(default_factory=list) @staticmethod - def from_descriptor(descriptor: DescriptorProto) -> "MessageTemplate": - fields = [] - oneofs_map = defaultdict(list) - for field in descriptor.field: - if field.HasField("oneof_index"): - oneofs_map[field.oneof_index].append(field) - else: - fields.append(field) - - # Sort the fields by number in descending order, since we serialize in reverse order - fields = [FieldTemplate.from_descriptor(field) for field in fields] - oneofs = [OneOfTemplate.from_descriptor(descriptor.oneof_decl[oneof_index], fields) for oneof_index, fields in oneofs_map.items()] - fields += oneofs - def sort_key(field: Union[FieldTemplate, OneOfTemplate]): - if isinstance(field, FieldTemplate): - return field.number - return field.fields[0].number - fields.sort(key=sort_key, reverse=True) + def from_descriptor(descriptor: DescriptorProto, parent: str = "") -> "MessageTemplate": + def get_group(field: FieldDescriptorProto) -> str: + return descriptor.oneof_decl[field.oneof_index].name if field.HasField("oneof_index") else None + fields = [FieldTemplate.from_descriptor(field, get_group(field)) for field in descriptor.field] + fields.sort(key=lambda field: field.number) + name = parent + "_" + descriptor.name if parent else descriptor.name return MessageTemplate( - name=descriptor.name, + name=name, fields=fields, - enums=[EnumTemplate.from_descriptor(enum) for enum in descriptor.enum_type], - messages=[MessageTemplate.from_descriptor(message) for message in descriptor.nested_type], + enums=[EnumTemplate.from_descriptor(enum, name) for enum in descriptor.enum_type], + messages=[MessageTemplate.from_descriptor(message, name) for message in descriptor.nested_type], ) @dataclass @@ -219,6 +184,7 @@ class FileTemplate: messages: List["MessageTemplate"] = field(default_factory=list) enums: List["EnumTemplate"] = field(default_factory=list) services: List["ServiceTemplate"] = field(default_factory=list) + name: str = "" @staticmethod def from_descriptor(descriptor: FileDescriptorProto) -> "FileTemplate": @@ -226,6 +192,7 @@ def from_descriptor(descriptor: FileDescriptorProto) -> "FileTemplate": messages=[MessageTemplate.from_descriptor(message) for message in descriptor.message_type], enums=[EnumTemplate.from_descriptor(enum) for enum in descriptor.enum_type], services=[ServiceTemplate.from_descriptor(service) for service in descriptor.service], + name=descriptor.name, ) def main(): diff --git a/compile/templates/template.py.jinja2 b/compile/templates/template.py.jinja2 index 059564f..68d013d 100644 --- a/compile/templates/template.py.jinja2 +++ b/compile/templates/template.py.jinja2 @@ -1,13 +1,11 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: {{ proto_file }} -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: {{ file_template.name }} from snowflake.telemetry._internal.serialize import ( Enum, ProtoSerializer, - MessageMarshaler, ) -from typing import List +from typing import List, Optional {% for enum in file_template.enums %} class {{ enum.name }}(Enum): @@ -17,52 +15,27 @@ class {{ enum.name }}(Enum): {% endfor %} {% macro render_message(message) %} -class {{ message.name }}(MessageMarshaler): - def __init__( - self, -{% for field in message.fields|reverse %} -{% if field.fields is defined %} -{% for oneof_field in field.fields|reverse %} - {{ oneof_field.name }}: {{ oneof_field.python_type }} = {{ oneof_field.default }}, -{% endfor %} -{% else %} - {{ field.name }}: {{ field.python_type }} = {{ field.default }}, -{% endif %} -{% endfor %} - ): -{%- for field in message.fields|reverse %} -{%- if field.fields is defined %} -{%- for oneof_field in field.fields|reverse %} - self.{{ oneof_field.name }} = {{ oneof_field.name }} -{%- endfor %} -{%- else %} - self.{{ field.name }} = {{ field.name }} -{%- endif %} -{%- endfor %} - - def write_to(self, proto_serializer: ProtoSerializer) -> None: +def {{ message.name }}( {%- for field in message.fields %} -{%- if field.fields is defined %} - # oneof group {{ field.name }} -{%- for oneof_field in field.fields %} - {% if loop.index != 1 %}el{% endif %}if self.{{ oneof_field.name }} is not None: - proto_serializer.serialize_{{ oneof_field.proto_type }}({{ oneof_field.tag }}, self.{{ oneof_field.name }}) + {{ field.name }}: Optional[{{ field.python_type }}] = None, {%- endfor %} -{%- else %} - if self.{{ field.name }}: proto_serializer.serialize_{{ field.proto_type }}({{ field.tag }}, self.{{ field.name }}) -{%- endif %} +) -> bytes: + proto_serializer = ProtoSerializer() +{%- for field in message.fields %} + if {{ field.name }}{% if field.encode_presence %} is not None{% endif %}: {% if field.group %}# oneof group {{ field.group }}{% endif %} + proto_serializer.serialize_{{ field.proto_type }}({{ field.tag }}, {{ field.name }}) {%- endfor %} + return proto_serializer.out {% for nested_enum in message.enums %} - class {{ nested_enum.name }}(Enum): +class {{ nested_enum.name }}(Enum): {%- for value in nested_enum.values %} - {{ value.name }} = {{ value.number }} + {{ value.name }} = {{ value.number }} {%- endfor %} {% endfor %} {% for nested_message in message.messages %} -{%- set nested_message_result = render_message(nested_message) -%} -{{ nested_message_result | indent(4) }} +{{ render_message(nested_message) }} {% endfor %} {% endmacro %} diff --git a/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py b/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py index a0d407e..ae39ef2 100644 --- a/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py +++ b/src/snowflake/telemetry/_internal/exporter/otlp/proto/logs/__init__.py @@ -69,7 +69,7 @@ def export(self, batch: typing.Sequence[_logs.LogData]) -> export.LogExportResul def _serialize_logs_data(batch: typing.Sequence[_logs.LogData]) -> bytes: # pylint gets confused by protobuf-generated code, that's why we must # disable the no-member check below. - return bytes(LogsData(resource_logs=encode_logs(batch).resource_logs)) + return encode_logs(batch) def shutdown(self): pass diff --git a/src/snowflake/telemetry/_internal/exporter/otlp/proto/metrics/__init__.py b/src/snowflake/telemetry/_internal/exporter/otlp/proto/metrics/__init__.py index b7afed8..9d7e90c 100644 --- a/src/snowflake/telemetry/_internal/exporter/otlp/proto/metrics/__init__.py +++ b/src/snowflake/telemetry/_internal/exporter/otlp/proto/metrics/__init__.py @@ -82,7 +82,7 @@ def export( def _serialize_metrics_data(data: MetricsData) -> bytes: # pylint gets confused by protobuf-generated code, that's why we must # disable the no-member check below. - return bytes(PB2MetricsData(resource_metrics=encode_metrics(data).resource_metrics)) + return encode_metrics(data) def force_flush(self, timeout_millis: float = 10_000) -> bool: return True diff --git a/src/snowflake/telemetry/_internal/exporter/otlp/proto/traces/__init__.py b/src/snowflake/telemetry/_internal/exporter/otlp/proto/traces/__init__.py index 666b572..92cfa6e 100644 --- a/src/snowflake/telemetry/_internal/exporter/otlp/proto/traces/__init__.py +++ b/src/snowflake/telemetry/_internal/exporter/otlp/proto/traces/__init__.py @@ -72,7 +72,7 @@ def _serialize_traces_data( ) -> bytes: # pylint gets confused by protobuf-generated code, that's why we must # disable the no-member check below. - return bytes(TracesData(resource_spans=encode_spans(sdk_spans).resource_spans)) + return encode_spans(sdk_spans) def shutdown(self) -> None: pass diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py index da44963..2fc3fd8 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/_log_encoder/__init__.py @@ -22,21 +22,19 @@ _encode_value, _encode_attributes, ) -from snowflake.telemetry._internal.opentelemetry.proto.collector.logs.v1.logs_service import ( - ExportLogsServiceRequest, -) from snowflake.telemetry._internal.opentelemetry.proto.logs.v1.logs import ( ScopeLogs, ResourceLogs, + LogsData, ) from snowflake.telemetry._internal.opentelemetry.proto.logs.v1.logs import LogRecord as PB2LogRecord from opentelemetry.sdk._logs import LogData -def encode_logs(batch: Sequence[LogData]) -> ExportLogsServiceRequest: - return ExportLogsServiceRequest(resource_logs=_encode_resource_logs(batch)) - +def encode_logs(batch: Sequence[LogData]) -> bytes: + return bytes(LogsData(resource_logs=_encode_resource_logs(batch)) +) def _encode_log(log_data: LogData) -> PB2LogRecord: span_id = ( diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py index 868472e..223b2fb 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/metrics_encoder/__init__.py @@ -35,9 +35,6 @@ from opentelemetry.sdk.metrics.export import ( AggregationTemporality, ) -from snowflake.telemetry._internal.opentelemetry.proto.collector.metrics.v1.metrics_service import ( - ExportMetricsServiceRequest, -) from snowflake.telemetry._internal.opentelemetry.proto.common.v1.common import InstrumentationScope from snowflake.telemetry._internal.opentelemetry.proto.metrics.v1 import metrics as pb2 from opentelemetry.sdk.metrics.export import ( @@ -173,7 +170,7 @@ def _get_aggregation( return instrument_class_aggregation -def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: +def encode_metrics(data: MetricsData) -> bytes: resource_metrics_dict = {} for resource_metrics in data.resource_metrics: @@ -188,44 +185,37 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: for scope_metrics in resource_metrics.scope_metrics: - instrumentation_scope = scope_metrics.scope - - # The SDK groups metrics in instrumentation scopes already so - # there is no need to check for existing instrumentation scopes - # here. - pb2_scope_metrics = pb2.ScopeMetrics( - scope=InstrumentationScope( - name=instrumentation_scope.name, - version=instrumentation_scope.version, - ) - ) - - scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics - pb2_scope_metrics.metrics = [] + pb2_metrics = [] + pb2_metric_gauge = None + pb2_metric_histogram = None + pb2_metric_sum = None + pb2_metric_exponential_histogram = None for metric in scope_metrics.metrics: - pb2_metric = pb2.Metric( - name=metric.name, - description=metric.description, - unit=metric.unit, - ) - if isinstance(metric.data, Gauge): - pb2_metric.gauge = pb2.Gauge(data_points=[]) + pb2_data_points = [] for data_point in metric.data.data_points: + as_int = None + as_double = None + if isinstance(data_point.value, int): + as_int = data_point.value + else: + as_double = data_point.value + pt = pb2.NumberDataPoint( attributes=_encode_attributes( data_point.attributes ), time_unix_nano=data_point.time_unix_nano, + as_int=as_int, + as_double=as_double, ) - if isinstance(data_point.value, int): - pt.as_int = data_point.value - else: - pt.as_double = data_point.value - pb2_metric.gauge.data_points.append(pt) + pb2_data_points.append(pt) + + pb2_metric_gauge = pb2.Gauge(data_points=pb2_data_points) elif isinstance(metric.data, HistogramType): - pb2_metric.histogram = pb2.Histogram(data_points=[]) + pb2_data_points = [] + pb2_aggregation_temporality = None for data_point in metric.data.data_points: pt = pb2.HistogramDataPoint( attributes=_encode_attributes( @@ -242,14 +232,26 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: max=data_point.max, min=data_point.min, ) - pb2_metric.histogram.aggregation_temporality = ( + pb2_aggregation_temporality = ( metric.data.aggregation_temporality ) - pb2_metric.histogram.data_points.append(pt) + pb2_data_points.append(pt) + pb2_metric_histogram = pb2.Histogram( + data_points=pb2_data_points, + aggregation_temporality=pb2_aggregation_temporality + ) elif isinstance(metric.data, Sum): - pb2_metric.sum = pb2.Sum(data_points=[]) + pb2_data_points = [] + pb2_is_monotonic = None + pb2_aggregation_temporality = None for data_point in metric.data.data_points: + as_int = None + as_double = None + if isinstance(data_point.value, int): + as_int = data_point.value + else: + as_double = data_point.value pt = pb2.NumberDataPoint( attributes=_encode_attributes( data_point.attributes @@ -258,28 +260,30 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: data_point.start_time_unix_nano ), time_unix_nano=data_point.time_unix_nano, + as_int=as_int, + as_double=as_double, ) - if isinstance(data_point.value, int): - pt.as_int = data_point.value - else: - pt.as_double = data_point.value # note that because sum is a message type, the # fields must be set individually rather than # instantiating a pb2.Sum and setting it once - pb2_metric.sum.aggregation_temporality = ( + pb2_aggregation_temporality = ( metric.data.aggregation_temporality ) - pb2_metric.sum.is_monotonic = metric.data.is_monotonic - pb2_metric.sum.data_points.append(pt) + pb2_is_monotonic = metric.data.is_monotonic + pb2_data_points.append(pt) + pb2_metric_sum = pb2.Sum( + data_points=pb2_data_points, + aggregation_temporality=pb2_aggregation_temporality, + is_monotonic=pb2_is_monotonic, + ) elif isinstance(metric.data, ExponentialHistogramType): - pb2_metric.exponential_histogram = pb2.ExponentialHistogram( - data_points=[] - ) + pb2_data_points = [] + pb2_aggregation_temporality = None for data_point in metric.data.data_points: if data_point.positive.bucket_counts: - positive = pb2.ExponentialHistogramDataPoint.Buckets( + positive = pb2.ExponentialHistogramDataPoint_Buckets( offset=data_point.positive.offset, bucket_counts=data_point.positive.bucket_counts, ) @@ -287,7 +291,7 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: positive = None if data_point.negative.bucket_counts: - negative = pb2.ExponentialHistogramDataPoint.Buckets( + negative = pb2.ExponentialHistogramDataPoint_Buckets( offset=data_point.negative.offset, bucket_counts=data_point.negative.bucket_counts, ) @@ -312,10 +316,15 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: max=data_point.max, min=data_point.min, ) - pb2_metric.exponential_histogram.aggregation_temporality = ( + pb2_aggregation_temporality = ( metric.data.aggregation_temporality ) - pb2_metric.exponential_histogram.data_points.append(pt) + pb2_data_points.append(pt) + + pb2_metric_exponential_histogram = pb2.ExponentialHistogram( + data_points=pb2_data_points, + aggregation_temporality=pb2_aggregation_temporality, + ) else: _logger.warning( @@ -324,7 +333,34 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: ) continue - pb2_scope_metrics.metrics.append(pb2_metric) + + pb2_metric = pb2.Metric( + name=metric.name, + description=metric.description, + unit=metric.unit, + gauge=pb2_metric_gauge, + histogram=pb2_metric_histogram, + sum=pb2_metric_sum, + exponential_histogram=pb2_metric_exponential_histogram, + ) + + pb2_metrics.append(pb2_metric) + + + instrumentation_scope = scope_metrics.scope + + # The SDK groups metrics in instrumentation scopes already so + # there is no need to check for existing instrumentation scopes + # here. + pb2_scope_metrics = pb2.ScopeMetrics( + scope=InstrumentationScope( + name=instrumentation_scope.name, + version=instrumentation_scope.version, + ), + metrics=pb2_metrics, + ) + + scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics resource_data = [] for ( @@ -341,4 +377,4 @@ def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest: ) ) resource_metrics = resource_data - return ExportMetricsServiceRequest(resource_metrics=resource_metrics) \ No newline at end of file + return bytes(pb2.MetricsData(resource_metrics=resource_metrics)) \ No newline at end of file diff --git a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py index 8de4349..416ed4f 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/exporter/otlp/proto/common/_internal/trace_encoder/__init__.py @@ -23,27 +23,28 @@ _encode_span_id, _encode_trace_id, ) -from snowflake.telemetry._internal.opentelemetry.proto.collector.trace.v1.trace_service import ( - ExportTraceServiceRequest as PB2ExportTraceServiceRequest, -) from snowflake.telemetry._internal.opentelemetry.proto.trace.v1.trace import ( ResourceSpans as PB2ResourceSpans, + ScopeSpans as PB2ScopeSpans, + Span as PB2Span, + SpanFlags as PB2SpanFlags, + Status as PB2Status, + Span_SpanKind as PB2SpanKind, + Span_Event as PB2SpanEvent, + Span_Link as PB2SpanLink, + TracesData as PB2TracesData, ) -from snowflake.telemetry._internal.opentelemetry.proto.trace.v1.trace import ScopeSpans as PB2ScopeSpans -from snowflake.telemetry._internal.opentelemetry.proto.trace.v1.trace import Span as PB2SPan -from snowflake.telemetry._internal.opentelemetry.proto.trace.v1.trace import SpanFlags as PB2SpanFlags -from snowflake.telemetry._internal.opentelemetry.proto.trace.v1.trace import Status as PB2Status from opentelemetry.sdk.trace import Event, ReadableSpan from opentelemetry.trace import Link, SpanKind from opentelemetry.trace.span import SpanContext, Status, TraceState # pylint: disable=E1101 _SPAN_KIND_MAP = { - SpanKind.INTERNAL: PB2SPan.SpanKind.SPAN_KIND_INTERNAL, - SpanKind.SERVER: PB2SPan.SpanKind.SPAN_KIND_SERVER, - SpanKind.CLIENT: PB2SPan.SpanKind.SPAN_KIND_CLIENT, - SpanKind.PRODUCER: PB2SPan.SpanKind.SPAN_KIND_PRODUCER, - SpanKind.CONSUMER: PB2SPan.SpanKind.SPAN_KIND_CONSUMER, + SpanKind.INTERNAL: PB2SpanKind.SPAN_KIND_INTERNAL, + SpanKind.SERVER: PB2SpanKind.SPAN_KIND_SERVER, + SpanKind.CLIENT: PB2SpanKind.SPAN_KIND_CLIENT, + SpanKind.PRODUCER: PB2SpanKind.SPAN_KIND_PRODUCER, + SpanKind.CONSUMER: PB2SpanKind.SPAN_KIND_CONSUMER, } _logger = logging.getLogger(__name__) @@ -51,10 +52,10 @@ def encode_spans( sdk_spans: Sequence[ReadableSpan], -) -> PB2ExportTraceServiceRequest: - return PB2ExportTraceServiceRequest( +) -> bytes: + return bytes(PB2TracesData( resource_spans=_encode_resource_spans(sdk_spans) - ) + )) def _encode_resource_spans( @@ -109,9 +110,9 @@ def _span_flags(parent_span_context: Optional[SpanContext]) -> int: return flags -def _encode_span(sdk_span: ReadableSpan) -> PB2SPan: +def _encode_span(sdk_span: ReadableSpan) -> PB2Span: span_context = sdk_span.get_span_context() - return PB2SPan( + return PB2Span( trace_id=_encode_trace_id(span_context.trace_id), span_id=_encode_span_id(span_context.span_id), trace_state=_encode_trace_state(span_context.trace_state), @@ -133,12 +134,12 @@ def _encode_span(sdk_span: ReadableSpan) -> PB2SPan: def _encode_events( events: Sequence[Event], -) -> Optional[List[PB2SPan.Event]]: +) -> Optional[List[PB2SpanEvent]]: pb2_events = None if events: pb2_events = [] for event in events: - encoded_event = PB2SPan.Event( + encoded_event = PB2SpanEvent( name=event.name, time_unix_nano=event.timestamp, attributes=_encode_attributes(event.attributes), @@ -148,12 +149,12 @@ def _encode_events( return pb2_events -def _encode_links(links: Sequence[Link]) -> Sequence[PB2SPan.Link]: +def _encode_links(links: Sequence[Link]) -> Sequence[PB2SpanLink]: pb2_links = None if links: pb2_links = [] for link in links: - encoded_link = PB2SPan.Link( + encoded_link = PB2SpanLink( trace_id=_encode_trace_id(link.context.trace_id), span_id=_encode_span_id(link.context.span_id), attributes=_encode_attributes(link.attributes), diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/logs/v1/logs_service.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/logs/v1/logs_service.py index b799f34..2cfad4a 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/logs/v1/logs_service.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/logs/v1/logs_service.py @@ -1,51 +1,42 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/collector/logs/v1/logs_service.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) -class ExportLogsServiceRequest(MessageMarshaler): - def __init__( - self, - resource_logs: List[MessageMarshaler] = None, - ): - self.resource_logs = resource_logs - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.resource_logs: - proto_serializer.serialize_repeated_message(b"\n", self.resource_logs) - - -class ExportLogsServiceResponse(MessageMarshaler): - def __init__( - self, - partial_success: MessageMarshaler = None, - ): - self.partial_success = partial_success - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.partial_success: - proto_serializer.serialize_message(b"\n", self.partial_success) - - -class ExportLogsPartialSuccess(MessageMarshaler): - def __init__( - self, - rejected_log_records: int = 0, - error_message: str = "", - ): - self.rejected_log_records = rejected_log_records - self.error_message = error_message - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.error_message: - proto_serializer.serialize_string(b"\x12", self.error_message) - if self.rejected_log_records: - proto_serializer.serialize_int64(b"\x08", self.rejected_log_records) +def ExportLogsServiceRequest( + resource_logs: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource_logs: + proto_serializer.serialize_repeated_message(b"\n", resource_logs) + return proto_serializer.out + + +def ExportLogsServiceResponse( + partial_success: Optional[bytes] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if partial_success is not None: + proto_serializer.serialize_message(b"\n", partial_success) + return proto_serializer.out + + +def ExportLogsPartialSuccess( + rejected_log_records: Optional[int] = None, + error_message: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if rejected_log_records: + proto_serializer.serialize_int64(b"\x08", rejected_log_records) + if error_message: + proto_serializer.serialize_string(b"\x12", error_message) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/metrics/v1/metrics_service.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/metrics/v1/metrics_service.py index 122ca09..0c31719 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/metrics/v1/metrics_service.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/metrics/v1/metrics_service.py @@ -1,51 +1,42 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/collector/metrics/v1/metrics_service.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) -class ExportMetricsServiceRequest(MessageMarshaler): - def __init__( - self, - resource_metrics: List[MessageMarshaler] = None, - ): - self.resource_metrics = resource_metrics - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.resource_metrics: - proto_serializer.serialize_repeated_message(b"\n", self.resource_metrics) - - -class ExportMetricsServiceResponse(MessageMarshaler): - def __init__( - self, - partial_success: MessageMarshaler = None, - ): - self.partial_success = partial_success - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.partial_success: - proto_serializer.serialize_message(b"\n", self.partial_success) - - -class ExportMetricsPartialSuccess(MessageMarshaler): - def __init__( - self, - rejected_data_points: int = 0, - error_message: str = "", - ): - self.rejected_data_points = rejected_data_points - self.error_message = error_message - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.error_message: - proto_serializer.serialize_string(b"\x12", self.error_message) - if self.rejected_data_points: - proto_serializer.serialize_int64(b"\x08", self.rejected_data_points) +def ExportMetricsServiceRequest( + resource_metrics: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource_metrics: + proto_serializer.serialize_repeated_message(b"\n", resource_metrics) + return proto_serializer.out + + +def ExportMetricsServiceResponse( + partial_success: Optional[bytes] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if partial_success is not None: + proto_serializer.serialize_message(b"\n", partial_success) + return proto_serializer.out + + +def ExportMetricsPartialSuccess( + rejected_data_points: Optional[int] = None, + error_message: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if rejected_data_points: + proto_serializer.serialize_int64(b"\x08", rejected_data_points) + if error_message: + proto_serializer.serialize_string(b"\x12", error_message) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/trace/v1/trace_service.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/trace/v1/trace_service.py index 1deff3c..c4e2496 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/trace/v1/trace_service.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/collector/trace/v1/trace_service.py @@ -1,51 +1,42 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/collector/trace/v1/trace_service.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) -class ExportTraceServiceRequest(MessageMarshaler): - def __init__( - self, - resource_spans: List[MessageMarshaler] = None, - ): - self.resource_spans = resource_spans - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.resource_spans: - proto_serializer.serialize_repeated_message(b"\n", self.resource_spans) - - -class ExportTraceServiceResponse(MessageMarshaler): - def __init__( - self, - partial_success: MessageMarshaler = None, - ): - self.partial_success = partial_success - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.partial_success: - proto_serializer.serialize_message(b"\n", self.partial_success) - - -class ExportTracePartialSuccess(MessageMarshaler): - def __init__( - self, - rejected_spans: int = 0, - error_message: str = "", - ): - self.rejected_spans = rejected_spans - self.error_message = error_message - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.error_message: - proto_serializer.serialize_string(b"\x12", self.error_message) - if self.rejected_spans: - proto_serializer.serialize_int64(b"\x08", self.rejected_spans) +def ExportTraceServiceRequest( + resource_spans: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource_spans: + proto_serializer.serialize_repeated_message(b"\n", resource_spans) + return proto_serializer.out + + +def ExportTraceServiceResponse( + partial_success: Optional[bytes] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if partial_success is not None: + proto_serializer.serialize_message(b"\n", partial_success) + return proto_serializer.out + + +def ExportTracePartialSuccess( + rejected_spans: Optional[int] = None, + error_message: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if rejected_spans: + proto_serializer.serialize_int64(b"\x08", rejected_spans) + if error_message: + proto_serializer.serialize_string(b"\x12", error_message) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/common/v1/common.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/common/v1/common.py index 8a8940a..27f8c02 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/common/v1/common.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/common/v1/common.py @@ -1,112 +1,87 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/common/v1/common.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) -class AnyValue(MessageMarshaler): - def __init__( - self, - string_value: str = None, - bool_value: bool = None, - int_value: int = None, - double_value: float = None, - array_value: MessageMarshaler = None, - kvlist_value: MessageMarshaler = None, - bytes_value: bytes = None, - ): - self.string_value = string_value - self.bool_value = bool_value - self.int_value = int_value - self.double_value = double_value - self.array_value = array_value - self.kvlist_value = kvlist_value - self.bytes_value = bytes_value - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - # oneof group value - if self.bytes_value is not None: - proto_serializer.serialize_bytes(b":", self.bytes_value) - elif self.kvlist_value is not None: - proto_serializer.serialize_message(b"2", self.kvlist_value) - elif self.array_value is not None: - proto_serializer.serialize_message(b"*", self.array_value) - elif self.double_value is not None: - proto_serializer.serialize_double(b"!", self.double_value) - elif self.int_value is not None: - proto_serializer.serialize_int64(b"\x18", self.int_value) - elif self.bool_value is not None: - proto_serializer.serialize_bool(b"\x10", self.bool_value) - elif self.string_value is not None: - proto_serializer.serialize_string(b"\n", self.string_value) - - -class ArrayValue(MessageMarshaler): - def __init__( - self, - values: List[MessageMarshaler] = None, - ): - self.values = values - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.values: - proto_serializer.serialize_repeated_message(b"\n", self.values) - - -class KeyValueList(MessageMarshaler): - def __init__( - self, - values: List[MessageMarshaler] = None, - ): - self.values = values - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.values: - proto_serializer.serialize_repeated_message(b"\n", self.values) - - -class KeyValue(MessageMarshaler): - def __init__( - self, - key: str = "", - value: MessageMarshaler = None, - ): - self.key = key - self.value = value - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.value: - proto_serializer.serialize_message(b"\x12", self.value) - if self.key: - proto_serializer.serialize_string(b"\n", self.key) - - -class InstrumentationScope(MessageMarshaler): - def __init__( - self, - name: str = "", - version: str = "", - attributes: List[MessageMarshaler] = None, - dropped_attributes_count: int = 0, - ): - self.name = name - self.version = version - self.attributes = attributes - self.dropped_attributes_count = dropped_attributes_count - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.dropped_attributes_count: - proto_serializer.serialize_uint32(b" ", self.dropped_attributes_count) - if self.attributes: - proto_serializer.serialize_repeated_message(b"\x1a", self.attributes) - if self.version: - proto_serializer.serialize_string(b"\x12", self.version) - if self.name: - proto_serializer.serialize_string(b"\n", self.name) +def AnyValue( + string_value: Optional[str] = None, + bool_value: Optional[bool] = None, + int_value: Optional[int] = None, + double_value: Optional[float] = None, + array_value: Optional[bytes] = None, + kvlist_value: Optional[bytes] = None, + bytes_value: Optional[bytes] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if string_value is not None: # oneof group value + proto_serializer.serialize_string(b"\n", string_value) + if bool_value is not None: # oneof group value + proto_serializer.serialize_bool(b"\x10", bool_value) + if int_value is not None: # oneof group value + proto_serializer.serialize_int64(b"\x18", int_value) + if double_value is not None: # oneof group value + proto_serializer.serialize_double(b"!", double_value) + if array_value is not None: # oneof group value + proto_serializer.serialize_message(b"*", array_value) + if kvlist_value is not None: # oneof group value + proto_serializer.serialize_message(b"2", kvlist_value) + if bytes_value is not None: # oneof group value + proto_serializer.serialize_bytes(b":", bytes_value) + return proto_serializer.out + + +def ArrayValue( + values: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if values: + proto_serializer.serialize_repeated_message(b"\n", values) + return proto_serializer.out + + +def KeyValueList( + values: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if values: + proto_serializer.serialize_repeated_message(b"\n", values) + return proto_serializer.out + + +def KeyValue( + key: Optional[str] = None, + value: Optional[bytes] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if key: + proto_serializer.serialize_string(b"\n", key) + if value is not None: + proto_serializer.serialize_message(b"\x12", value) + return proto_serializer.out + + +def InstrumentationScope( + name: Optional[str] = None, + version: Optional[str] = None, + attributes: Optional[List[bytes]] = None, + dropped_attributes_count: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if name: + proto_serializer.serialize_string(b"\n", name) + if version: + proto_serializer.serialize_string(b"\x12", version) + if attributes: + proto_serializer.serialize_repeated_message(b"\x1a", attributes) + if dropped_attributes_count: + proto_serializer.serialize_uint32(b" ", dropped_attributes_count) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/logs/v1/logs.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/logs/v1/logs.py index 73b5705..66b0e47 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/logs/v1/logs.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/logs/v1/logs.py @@ -1,12 +1,13 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/logs/v1/logs.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) @@ -44,101 +45,76 @@ class LogRecordFlags(Enum): LOG_RECORD_FLAGS_TRACE_FLAGS_MASK = 255 -class LogsData(MessageMarshaler): - def __init__( - self, - resource_logs: List[MessageMarshaler] = None, - ): - self.resource_logs = resource_logs - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.resource_logs: - proto_serializer.serialize_repeated_message(b"\n", self.resource_logs) - - -class ResourceLogs(MessageMarshaler): - def __init__( - self, - resource: MessageMarshaler = None, - scope_logs: List[MessageMarshaler] = None, - schema_url: str = "", - ): - self.resource = resource - self.scope_logs = scope_logs - self.schema_url = schema_url - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.schema_url: - proto_serializer.serialize_string(b"\x1a", self.schema_url) - if self.scope_logs: - proto_serializer.serialize_repeated_message(b"\x12", self.scope_logs) - if self.resource: - proto_serializer.serialize_message(b"\n", self.resource) - - -class ScopeLogs(MessageMarshaler): - def __init__( - self, - scope: MessageMarshaler = None, - log_records: List[MessageMarshaler] = None, - schema_url: str = "", - ): - self.scope = scope - self.log_records = log_records - self.schema_url = schema_url - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.schema_url: - proto_serializer.serialize_string(b"\x1a", self.schema_url) - if self.log_records: - proto_serializer.serialize_repeated_message(b"\x12", self.log_records) - if self.scope: - proto_serializer.serialize_message(b"\n", self.scope) - - -class LogRecord(MessageMarshaler): - def __init__( - self, - time_unix_nano: int = 0, - severity_number: int = 0, - severity_text: str = "", - body: MessageMarshaler = None, - attributes: List[MessageMarshaler] = None, - dropped_attributes_count: int = 0, - flags: int = 0, - trace_id: bytes = b"", - span_id: bytes = b"", - observed_time_unix_nano: int = 0, - ): - self.time_unix_nano = time_unix_nano - self.severity_number = severity_number - self.severity_text = severity_text - self.body = body - self.attributes = attributes - self.dropped_attributes_count = dropped_attributes_count - self.flags = flags - self.trace_id = trace_id - self.span_id = span_id - self.observed_time_unix_nano = observed_time_unix_nano - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.observed_time_unix_nano: - proto_serializer.serialize_fixed64(b"Y", self.observed_time_unix_nano) - if self.span_id: - proto_serializer.serialize_bytes(b"R", self.span_id) - if self.trace_id: - proto_serializer.serialize_bytes(b"J", self.trace_id) - if self.flags: - proto_serializer.serialize_fixed32(b"E", self.flags) - if self.dropped_attributes_count: - proto_serializer.serialize_uint32(b"8", self.dropped_attributes_count) - if self.attributes: - proto_serializer.serialize_repeated_message(b"2", self.attributes) - if self.body: - proto_serializer.serialize_message(b"*", self.body) - if self.severity_text: - proto_serializer.serialize_string(b"\x1a", self.severity_text) - if self.severity_number: - proto_serializer.serialize_enum(b"\x10", self.severity_number) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\t", self.time_unix_nano) +def LogsData( + resource_logs: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource_logs: + proto_serializer.serialize_repeated_message(b"\n", resource_logs) + return proto_serializer.out + + +def ResourceLogs( + resource: Optional[bytes] = None, + scope_logs: Optional[List[bytes]] = None, + schema_url: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource is not None: + proto_serializer.serialize_message(b"\n", resource) + if scope_logs: + proto_serializer.serialize_repeated_message(b"\x12", scope_logs) + if schema_url: + proto_serializer.serialize_string(b"\x1a", schema_url) + return proto_serializer.out + + +def ScopeLogs( + scope: Optional[bytes] = None, + log_records: Optional[List[bytes]] = None, + schema_url: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if scope is not None: + proto_serializer.serialize_message(b"\n", scope) + if log_records: + proto_serializer.serialize_repeated_message(b"\x12", log_records) + if schema_url: + proto_serializer.serialize_string(b"\x1a", schema_url) + return proto_serializer.out + + +def LogRecord( + time_unix_nano: Optional[int] = None, + severity_number: Optional[int] = None, + severity_text: Optional[str] = None, + body: Optional[bytes] = None, + attributes: Optional[List[bytes]] = None, + dropped_attributes_count: Optional[int] = None, + flags: Optional[int] = None, + trace_id: Optional[bytes] = None, + span_id: Optional[bytes] = None, + observed_time_unix_nano: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\t", time_unix_nano) + if severity_number: + proto_serializer.serialize_enum(b"\x10", severity_number) + if severity_text: + proto_serializer.serialize_string(b"\x1a", severity_text) + if body is not None: + proto_serializer.serialize_message(b"*", body) + if attributes: + proto_serializer.serialize_repeated_message(b"2", attributes) + if dropped_attributes_count: + proto_serializer.serialize_uint32(b"8", dropped_attributes_count) + if flags: + proto_serializer.serialize_fixed32(b"E", flags) + if trace_id: + proto_serializer.serialize_bytes(b"J", trace_id) + if span_id: + proto_serializer.serialize_bytes(b"R", span_id) + if observed_time_unix_nano: + proto_serializer.serialize_fixed64(b"Y", observed_time_unix_nano) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/metrics/v1/metrics.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/metrics/v1/metrics.py index de47911..d71f1e9 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/metrics/v1/metrics.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/metrics/v1/metrics.py @@ -1,12 +1,13 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/metrics/v1/metrics.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) @@ -22,432 +23,319 @@ class DataPointFlags(Enum): DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1 -class MetricsData(MessageMarshaler): - def __init__( - self, - resource_metrics: List[MessageMarshaler] = None, - ): - self.resource_metrics = resource_metrics - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.resource_metrics: - proto_serializer.serialize_repeated_message(b"\n", self.resource_metrics) - - -class ResourceMetrics(MessageMarshaler): - def __init__( - self, - resource: MessageMarshaler = None, - scope_metrics: List[MessageMarshaler] = None, - schema_url: str = "", - ): - self.resource = resource - self.scope_metrics = scope_metrics - self.schema_url = schema_url - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.schema_url: - proto_serializer.serialize_string(b"\x1a", self.schema_url) - if self.scope_metrics: - proto_serializer.serialize_repeated_message(b"\x12", self.scope_metrics) - if self.resource: - proto_serializer.serialize_message(b"\n", self.resource) - - -class ScopeMetrics(MessageMarshaler): - def __init__( - self, - scope: MessageMarshaler = None, - metrics: List[MessageMarshaler] = None, - schema_url: str = "", - ): - self.scope = scope - self.metrics = metrics - self.schema_url = schema_url - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.schema_url: - proto_serializer.serialize_string(b"\x1a", self.schema_url) - if self.metrics: - proto_serializer.serialize_repeated_message(b"\x12", self.metrics) - if self.scope: - proto_serializer.serialize_message(b"\n", self.scope) - - -class Metric(MessageMarshaler): - def __init__( - self, - name: str = "", - description: str = "", - unit: str = "", - gauge: MessageMarshaler = None, - sum: MessageMarshaler = None, - histogram: MessageMarshaler = None, - exponential_histogram: MessageMarshaler = None, - summary: MessageMarshaler = None, - metadata: List[MessageMarshaler] = None, - ): - self.name = name - self.description = description - self.unit = unit - self.gauge = gauge - self.sum = sum - self.histogram = histogram - self.exponential_histogram = exponential_histogram - self.summary = summary - self.metadata = metadata - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.metadata: - proto_serializer.serialize_repeated_message(b"b", self.metadata) - # oneof group data - if self.summary is not None: - proto_serializer.serialize_message(b"Z", self.summary) - elif self.exponential_histogram is not None: - proto_serializer.serialize_message(b"R", self.exponential_histogram) - elif self.histogram is not None: - proto_serializer.serialize_message(b"J", self.histogram) - elif self.sum is not None: - proto_serializer.serialize_message(b":", self.sum) - elif self.gauge is not None: - proto_serializer.serialize_message(b"*", self.gauge) - if self.unit: - proto_serializer.serialize_string(b"\x1a", self.unit) - if self.description: - proto_serializer.serialize_string(b"\x12", self.description) - if self.name: - proto_serializer.serialize_string(b"\n", self.name) - - -class Gauge(MessageMarshaler): - def __init__( - self, - data_points: List[MessageMarshaler] = None, - ): - self.data_points = data_points - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.data_points: - proto_serializer.serialize_repeated_message(b"\n", self.data_points) - - -class Sum(MessageMarshaler): - def __init__( - self, - data_points: List[MessageMarshaler] = None, - aggregation_temporality: int = 0, - is_monotonic: bool = False, - ): - self.data_points = data_points - self.aggregation_temporality = aggregation_temporality - self.is_monotonic = is_monotonic - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.is_monotonic: - proto_serializer.serialize_bool(b"\x18", self.is_monotonic) - if self.aggregation_temporality: - proto_serializer.serialize_enum(b"\x10", self.aggregation_temporality) - if self.data_points: - proto_serializer.serialize_repeated_message(b"\n", self.data_points) - - -class Histogram(MessageMarshaler): - def __init__( - self, - data_points: List[MessageMarshaler] = None, - aggregation_temporality: int = 0, - ): - self.data_points = data_points - self.aggregation_temporality = aggregation_temporality - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.aggregation_temporality: - proto_serializer.serialize_enum(b"\x10", self.aggregation_temporality) - if self.data_points: - proto_serializer.serialize_repeated_message(b"\n", self.data_points) - - -class ExponentialHistogram(MessageMarshaler): - def __init__( - self, - data_points: List[MessageMarshaler] = None, - aggregation_temporality: int = 0, - ): - self.data_points = data_points - self.aggregation_temporality = aggregation_temporality - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.aggregation_temporality: - proto_serializer.serialize_enum(b"\x10", self.aggregation_temporality) - if self.data_points: - proto_serializer.serialize_repeated_message(b"\n", self.data_points) - - -class Summary(MessageMarshaler): - def __init__( - self, - data_points: List[MessageMarshaler] = None, - ): - self.data_points = data_points - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.data_points: - proto_serializer.serialize_repeated_message(b"\n", self.data_points) - - -class NumberDataPoint(MessageMarshaler): - def __init__( - self, - start_time_unix_nano: int = 0, - time_unix_nano: int = 0, - exemplars: List[MessageMarshaler] = None, - as_double: float = None, - as_int: int = None, - attributes: List[MessageMarshaler] = None, - flags: int = 0, - ): - self.start_time_unix_nano = start_time_unix_nano - self.time_unix_nano = time_unix_nano - self.exemplars = exemplars - self.as_double = as_double - self.as_int = as_int - self.attributes = attributes - self.flags = flags - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.flags: - proto_serializer.serialize_uint32(b"@", self.flags) - if self.attributes: - proto_serializer.serialize_repeated_message(b":", self.attributes) - # oneof group value - if self.as_int is not None: - proto_serializer.serialize_sfixed64(b"1", self.as_int) - elif self.as_double is not None: - proto_serializer.serialize_double(b"!", self.as_double) - if self.exemplars: - proto_serializer.serialize_repeated_message(b"*", self.exemplars) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\x19", self.time_unix_nano) - if self.start_time_unix_nano: - proto_serializer.serialize_fixed64(b"\x11", self.start_time_unix_nano) - - -class HistogramDataPoint(MessageMarshaler): - def __init__( - self, - start_time_unix_nano: int = 0, - time_unix_nano: int = 0, - count: int = 0, - sum: float = None, - bucket_counts: List[int] = None, - explicit_bounds: List[float] = None, - exemplars: List[MessageMarshaler] = None, - attributes: List[MessageMarshaler] = None, - flags: int = 0, - min: float = None, - max: float = None, - ): - self.start_time_unix_nano = start_time_unix_nano - self.time_unix_nano = time_unix_nano - self.count = count - self.sum = sum - self.bucket_counts = bucket_counts - self.explicit_bounds = explicit_bounds - self.exemplars = exemplars - self.attributes = attributes - self.flags = flags - self.min = min - self.max = max - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - # oneof group _max - if self.max is not None: - proto_serializer.serialize_double(b"a", self.max) - # oneof group _min - if self.min is not None: - proto_serializer.serialize_double(b"Y", self.min) - if self.flags: - proto_serializer.serialize_uint32(b"P", self.flags) - if self.attributes: - proto_serializer.serialize_repeated_message(b"J", self.attributes) - if self.exemplars: - proto_serializer.serialize_repeated_message(b"B", self.exemplars) - if self.explicit_bounds: - proto_serializer.serialize_repeated_double(b":", self.explicit_bounds) - if self.bucket_counts: - proto_serializer.serialize_repeated_fixed64(b"2", self.bucket_counts) - # oneof group _sum - if self.sum is not None: - proto_serializer.serialize_double(b")", self.sum) - if self.count: - proto_serializer.serialize_fixed64(b"!", self.count) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\x19", self.time_unix_nano) - if self.start_time_unix_nano: - proto_serializer.serialize_fixed64(b"\x11", self.start_time_unix_nano) - - -class ExponentialHistogramDataPoint(MessageMarshaler): - def __init__( - self, - attributes: List[MessageMarshaler] = None, - start_time_unix_nano: int = 0, - time_unix_nano: int = 0, - count: int = 0, - sum: float = None, - scale: int = 0, - zero_count: int = 0, - positive: MessageMarshaler = None, - negative: MessageMarshaler = None, - flags: int = 0, - exemplars: List[MessageMarshaler] = None, - min: float = None, - max: float = None, - zero_threshold: float = 0.0, - ): - self.attributes = attributes - self.start_time_unix_nano = start_time_unix_nano - self.time_unix_nano = time_unix_nano - self.count = count - self.sum = sum - self.scale = scale - self.zero_count = zero_count - self.positive = positive - self.negative = negative - self.flags = flags - self.exemplars = exemplars - self.min = min - self.max = max - self.zero_threshold = zero_threshold - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.zero_threshold: - proto_serializer.serialize_double(b"q", self.zero_threshold) - # oneof group _max - if self.max is not None: - proto_serializer.serialize_double(b"i", self.max) - # oneof group _min - if self.min is not None: - proto_serializer.serialize_double(b"a", self.min) - if self.exemplars: - proto_serializer.serialize_repeated_message(b"Z", self.exemplars) - if self.flags: - proto_serializer.serialize_uint32(b"P", self.flags) - if self.negative: - proto_serializer.serialize_message(b"J", self.negative) - if self.positive: - proto_serializer.serialize_message(b"B", self.positive) - if self.zero_count: - proto_serializer.serialize_fixed64(b"9", self.zero_count) - if self.scale: - proto_serializer.serialize_sint32(b"0", self.scale) - # oneof group _sum - if self.sum is not None: - proto_serializer.serialize_double(b")", self.sum) - if self.count: - proto_serializer.serialize_fixed64(b"!", self.count) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\x19", self.time_unix_nano) - if self.start_time_unix_nano: - proto_serializer.serialize_fixed64(b"\x11", self.start_time_unix_nano) - if self.attributes: - proto_serializer.serialize_repeated_message(b"\n", self.attributes) - - class Buckets(MessageMarshaler): - def __init__( - self, - offset: int = 0, - bucket_counts: List[int] = None, - ): - self.offset = offset - self.bucket_counts = bucket_counts - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.bucket_counts: - proto_serializer.serialize_repeated_uint64(b"\x12", self.bucket_counts) - if self.offset: - proto_serializer.serialize_sint32(b"\x08", self.offset) - - -class SummaryDataPoint(MessageMarshaler): - def __init__( - self, - start_time_unix_nano: int = 0, - time_unix_nano: int = 0, - count: int = 0, - sum: float = 0.0, - quantile_values: List[MessageMarshaler] = None, - attributes: List[MessageMarshaler] = None, - flags: int = 0, - ): - self.start_time_unix_nano = start_time_unix_nano - self.time_unix_nano = time_unix_nano - self.count = count - self.sum = sum - self.quantile_values = quantile_values - self.attributes = attributes - self.flags = flags - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.flags: - proto_serializer.serialize_uint32(b"@", self.flags) - if self.attributes: - proto_serializer.serialize_repeated_message(b":", self.attributes) - if self.quantile_values: - proto_serializer.serialize_repeated_message(b"2", self.quantile_values) - if self.sum: - proto_serializer.serialize_double(b")", self.sum) - if self.count: - proto_serializer.serialize_fixed64(b"!", self.count) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\x19", self.time_unix_nano) - if self.start_time_unix_nano: - proto_serializer.serialize_fixed64(b"\x11", self.start_time_unix_nano) - - class ValueAtQuantile(MessageMarshaler): - def __init__( - self, - quantile: float = 0.0, - value: float = 0.0, - ): - self.quantile = quantile - self.value = value - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.value: - proto_serializer.serialize_double(b"\x11", self.value) - if self.quantile: - proto_serializer.serialize_double(b"\t", self.quantile) - - -class Exemplar(MessageMarshaler): - def __init__( - self, - time_unix_nano: int = 0, - span_id: bytes = b"", - trace_id: bytes = b"", - as_double: float = None, - as_int: int = None, - filtered_attributes: List[MessageMarshaler] = None, - ): - self.time_unix_nano = time_unix_nano - self.span_id = span_id - self.trace_id = trace_id - self.as_double = as_double - self.as_int = as_int - self.filtered_attributes = filtered_attributes - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.filtered_attributes: - proto_serializer.serialize_repeated_message(b":", self.filtered_attributes) - # oneof group value - if self.as_int is not None: - proto_serializer.serialize_sfixed64(b"1", self.as_int) - elif self.as_double is not None: - proto_serializer.serialize_double(b"\x19", self.as_double) - if self.trace_id: - proto_serializer.serialize_bytes(b"*", self.trace_id) - if self.span_id: - proto_serializer.serialize_bytes(b'"', self.span_id) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\x11", self.time_unix_nano) +def MetricsData( + resource_metrics: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource_metrics: + proto_serializer.serialize_repeated_message(b"\n", resource_metrics) + return proto_serializer.out + + +def ResourceMetrics( + resource: Optional[bytes] = None, + scope_metrics: Optional[List[bytes]] = None, + schema_url: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource is not None: + proto_serializer.serialize_message(b"\n", resource) + if scope_metrics: + proto_serializer.serialize_repeated_message(b"\x12", scope_metrics) + if schema_url: + proto_serializer.serialize_string(b"\x1a", schema_url) + return proto_serializer.out + + +def ScopeMetrics( + scope: Optional[bytes] = None, + metrics: Optional[List[bytes]] = None, + schema_url: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if scope is not None: + proto_serializer.serialize_message(b"\n", scope) + if metrics: + proto_serializer.serialize_repeated_message(b"\x12", metrics) + if schema_url: + proto_serializer.serialize_string(b"\x1a", schema_url) + return proto_serializer.out + + +def Metric( + name: Optional[str] = None, + description: Optional[str] = None, + unit: Optional[str] = None, + gauge: Optional[bytes] = None, + sum: Optional[bytes] = None, + histogram: Optional[bytes] = None, + exponential_histogram: Optional[bytes] = None, + summary: Optional[bytes] = None, + metadata: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if name: + proto_serializer.serialize_string(b"\n", name) + if description: + proto_serializer.serialize_string(b"\x12", description) + if unit: + proto_serializer.serialize_string(b"\x1a", unit) + if gauge is not None: # oneof group data + proto_serializer.serialize_message(b"*", gauge) + if sum is not None: # oneof group data + proto_serializer.serialize_message(b":", sum) + if histogram is not None: # oneof group data + proto_serializer.serialize_message(b"J", histogram) + if exponential_histogram is not None: # oneof group data + proto_serializer.serialize_message(b"R", exponential_histogram) + if summary is not None: # oneof group data + proto_serializer.serialize_message(b"Z", summary) + if metadata: + proto_serializer.serialize_repeated_message(b"b", metadata) + return proto_serializer.out + + +def Gauge( + data_points: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if data_points: + proto_serializer.serialize_repeated_message(b"\n", data_points) + return proto_serializer.out + + +def Sum( + data_points: Optional[List[bytes]] = None, + aggregation_temporality: Optional[int] = None, + is_monotonic: Optional[bool] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if data_points: + proto_serializer.serialize_repeated_message(b"\n", data_points) + if aggregation_temporality: + proto_serializer.serialize_enum(b"\x10", aggregation_temporality) + if is_monotonic: + proto_serializer.serialize_bool(b"\x18", is_monotonic) + return proto_serializer.out + + +def Histogram( + data_points: Optional[List[bytes]] = None, + aggregation_temporality: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if data_points: + proto_serializer.serialize_repeated_message(b"\n", data_points) + if aggregation_temporality: + proto_serializer.serialize_enum(b"\x10", aggregation_temporality) + return proto_serializer.out + + +def ExponentialHistogram( + data_points: Optional[List[bytes]] = None, + aggregation_temporality: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if data_points: + proto_serializer.serialize_repeated_message(b"\n", data_points) + if aggregation_temporality: + proto_serializer.serialize_enum(b"\x10", aggregation_temporality) + return proto_serializer.out + + +def Summary( + data_points: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if data_points: + proto_serializer.serialize_repeated_message(b"\n", data_points) + return proto_serializer.out + + +def NumberDataPoint( + start_time_unix_nano: Optional[int] = None, + time_unix_nano: Optional[int] = None, + as_double: Optional[float] = None, + exemplars: Optional[List[bytes]] = None, + as_int: Optional[int] = None, + attributes: Optional[List[bytes]] = None, + flags: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if start_time_unix_nano: + proto_serializer.serialize_fixed64(b"\x11", start_time_unix_nano) + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\x19", time_unix_nano) + if as_double is not None: # oneof group value + proto_serializer.serialize_double(b"!", as_double) + if exemplars: + proto_serializer.serialize_repeated_message(b"*", exemplars) + if as_int is not None: # oneof group value + proto_serializer.serialize_sfixed64(b"1", as_int) + if attributes: + proto_serializer.serialize_repeated_message(b":", attributes) + if flags: + proto_serializer.serialize_uint32(b"@", flags) + return proto_serializer.out + + +def HistogramDataPoint( + start_time_unix_nano: Optional[int] = None, + time_unix_nano: Optional[int] = None, + count: Optional[int] = None, + sum: Optional[float] = None, + bucket_counts: Optional[List[int]] = None, + explicit_bounds: Optional[List[float]] = None, + exemplars: Optional[List[bytes]] = None, + attributes: Optional[List[bytes]] = None, + flags: Optional[int] = None, + min: Optional[float] = None, + max: Optional[float] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if start_time_unix_nano: + proto_serializer.serialize_fixed64(b"\x11", start_time_unix_nano) + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\x19", time_unix_nano) + if count: + proto_serializer.serialize_fixed64(b"!", count) + if sum is not None: # oneof group _sum + proto_serializer.serialize_double(b")", sum) + if bucket_counts: + proto_serializer.serialize_repeated_fixed64(b"2", bucket_counts) + if explicit_bounds: + proto_serializer.serialize_repeated_double(b":", explicit_bounds) + if exemplars: + proto_serializer.serialize_repeated_message(b"B", exemplars) + if attributes: + proto_serializer.serialize_repeated_message(b"J", attributes) + if flags: + proto_serializer.serialize_uint32(b"P", flags) + if min is not None: # oneof group _min + proto_serializer.serialize_double(b"Y", min) + if max is not None: # oneof group _max + proto_serializer.serialize_double(b"a", max) + return proto_serializer.out + + +def ExponentialHistogramDataPoint( + attributes: Optional[List[bytes]] = None, + start_time_unix_nano: Optional[int] = None, + time_unix_nano: Optional[int] = None, + count: Optional[int] = None, + sum: Optional[float] = None, + scale: Optional[int] = None, + zero_count: Optional[int] = None, + positive: Optional[bytes] = None, + negative: Optional[bytes] = None, + flags: Optional[int] = None, + exemplars: Optional[List[bytes]] = None, + min: Optional[float] = None, + max: Optional[float] = None, + zero_threshold: Optional[float] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if attributes: + proto_serializer.serialize_repeated_message(b"\n", attributes) + if start_time_unix_nano: + proto_serializer.serialize_fixed64(b"\x11", start_time_unix_nano) + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\x19", time_unix_nano) + if count: + proto_serializer.serialize_fixed64(b"!", count) + if sum is not None: # oneof group _sum + proto_serializer.serialize_double(b")", sum) + if scale: + proto_serializer.serialize_sint32(b"0", scale) + if zero_count: + proto_serializer.serialize_fixed64(b"9", zero_count) + if positive is not None: + proto_serializer.serialize_message(b"B", positive) + if negative is not None: + proto_serializer.serialize_message(b"J", negative) + if flags: + proto_serializer.serialize_uint32(b"P", flags) + if exemplars: + proto_serializer.serialize_repeated_message(b"Z", exemplars) + if min is not None: # oneof group _min + proto_serializer.serialize_double(b"a", min) + if max is not None: # oneof group _max + proto_serializer.serialize_double(b"i", max) + if zero_threshold: + proto_serializer.serialize_double(b"q", zero_threshold) + return proto_serializer.out + + +def ExponentialHistogramDataPoint_Buckets( + offset: Optional[int] = None, + bucket_counts: Optional[List[int]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if offset: + proto_serializer.serialize_sint32(b"\x08", offset) + if bucket_counts: + proto_serializer.serialize_repeated_uint64(b"\x12", bucket_counts) + return proto_serializer.out + + +def SummaryDataPoint( + start_time_unix_nano: Optional[int] = None, + time_unix_nano: Optional[int] = None, + count: Optional[int] = None, + sum: Optional[float] = None, + quantile_values: Optional[List[bytes]] = None, + attributes: Optional[List[bytes]] = None, + flags: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if start_time_unix_nano: + proto_serializer.serialize_fixed64(b"\x11", start_time_unix_nano) + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\x19", time_unix_nano) + if count: + proto_serializer.serialize_fixed64(b"!", count) + if sum: + proto_serializer.serialize_double(b")", sum) + if quantile_values: + proto_serializer.serialize_repeated_message(b"2", quantile_values) + if attributes: + proto_serializer.serialize_repeated_message(b":", attributes) + if flags: + proto_serializer.serialize_uint32(b"@", flags) + return proto_serializer.out + + +def SummaryDataPoint_ValueAtQuantile( + quantile: Optional[float] = None, + value: Optional[float] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if quantile: + proto_serializer.serialize_double(b"\t", quantile) + if value: + proto_serializer.serialize_double(b"\x11", value) + return proto_serializer.out + + +def Exemplar( + time_unix_nano: Optional[int] = None, + as_double: Optional[float] = None, + span_id: Optional[bytes] = None, + trace_id: Optional[bytes] = None, + as_int: Optional[int] = None, + filtered_attributes: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\x11", time_unix_nano) + if as_double is not None: # oneof group value + proto_serializer.serialize_double(b"\x19", as_double) + if span_id: + proto_serializer.serialize_bytes(b'"', span_id) + if trace_id: + proto_serializer.serialize_bytes(b"*", trace_id) + if as_int is not None: # oneof group value + proto_serializer.serialize_sfixed64(b"1", as_int) + if filtered_attributes: + proto_serializer.serialize_repeated_message(b":", filtered_attributes) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/resource/v1/resource.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/resource/v1/resource.py index bbdedf5..683727c 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/resource/v1/resource.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/resource/v1/resource.py @@ -1,27 +1,24 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/resource/v1/resource.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) -class Resource(MessageMarshaler): - def __init__( - self, - attributes: List[MessageMarshaler] = None, - dropped_attributes_count: int = 0, - ): - self.attributes = attributes - self.dropped_attributes_count = dropped_attributes_count - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.dropped_attributes_count: - proto_serializer.serialize_uint32(b"\x10", self.dropped_attributes_count) - if self.attributes: - proto_serializer.serialize_repeated_message(b"\n", self.attributes) +def Resource( + attributes: Optional[List[bytes]] = None, + dropped_attributes_count: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if attributes: + proto_serializer.serialize_repeated_message(b"\n", attributes) + if dropped_attributes_count: + proto_serializer.serialize_uint32(b"\x10", dropped_attributes_count) + return proto_serializer.out diff --git a/src/snowflake/telemetry/_internal/opentelemetry/proto/trace/v1/trace.py b/src/snowflake/telemetry/_internal/opentelemetry/proto/trace/v1/trace.py index 5026ffa..48f2908 100644 --- a/src/snowflake/telemetry/_internal/opentelemetry/proto/trace/v1/trace.py +++ b/src/snowflake/telemetry/_internal/opentelemetry/proto/trace/v1/trace.py @@ -1,12 +1,13 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: -# plugin: python-serialize +# Generated by the protoc compiler with a custom plugin. DO NOT EDIT! +# sources: opentelemetry/proto/trace/v1/trace.proto -from typing import List +from typing import ( + List, + Optional, +) from snowflake.telemetry._internal.serialize import ( Enum, - MessageMarshaler, ProtoSerializer, ) @@ -18,208 +19,163 @@ class SpanFlags(Enum): SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK = 512 -class TracesData(MessageMarshaler): - def __init__( - self, - resource_spans: List[MessageMarshaler] = None, - ): - self.resource_spans = resource_spans - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.resource_spans: - proto_serializer.serialize_repeated_message(b"\n", self.resource_spans) - - -class ResourceSpans(MessageMarshaler): - def __init__( - self, - resource: MessageMarshaler = None, - scope_spans: List[MessageMarshaler] = None, - schema_url: str = "", - ): - self.resource = resource - self.scope_spans = scope_spans - self.schema_url = schema_url - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.schema_url: - proto_serializer.serialize_string(b"\x1a", self.schema_url) - if self.scope_spans: - proto_serializer.serialize_repeated_message(b"\x12", self.scope_spans) - if self.resource: - proto_serializer.serialize_message(b"\n", self.resource) - - -class ScopeSpans(MessageMarshaler): - def __init__( - self, - scope: MessageMarshaler = None, - spans: List[MessageMarshaler] = None, - schema_url: str = "", - ): - self.scope = scope - self.spans = spans - self.schema_url = schema_url - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.schema_url: - proto_serializer.serialize_string(b"\x1a", self.schema_url) - if self.spans: - proto_serializer.serialize_repeated_message(b"\x12", self.spans) - if self.scope: - proto_serializer.serialize_message(b"\n", self.scope) - - -class Span(MessageMarshaler): - def __init__( - self, - trace_id: bytes = b"", - span_id: bytes = b"", - trace_state: str = "", - parent_span_id: bytes = b"", - name: str = "", - kind: int = 0, - start_time_unix_nano: int = 0, - end_time_unix_nano: int = 0, - attributes: List[MessageMarshaler] = None, - dropped_attributes_count: int = 0, - events: List[MessageMarshaler] = None, - dropped_events_count: int = 0, - links: List[MessageMarshaler] = None, - dropped_links_count: int = 0, - status: MessageMarshaler = None, - flags: int = 0, - ): - self.trace_id = trace_id - self.span_id = span_id - self.trace_state = trace_state - self.parent_span_id = parent_span_id - self.name = name - self.kind = kind - self.start_time_unix_nano = start_time_unix_nano - self.end_time_unix_nano = end_time_unix_nano - self.attributes = attributes - self.dropped_attributes_count = dropped_attributes_count - self.events = events - self.dropped_events_count = dropped_events_count - self.links = links - self.dropped_links_count = dropped_links_count - self.status = status - self.flags = flags - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.flags: - proto_serializer.serialize_fixed32(b"\x85\x01", self.flags) - if self.status: - proto_serializer.serialize_message(b"z", self.status) - if self.dropped_links_count: - proto_serializer.serialize_uint32(b"p", self.dropped_links_count) - if self.links: - proto_serializer.serialize_repeated_message(b"j", self.links) - if self.dropped_events_count: - proto_serializer.serialize_uint32(b"`", self.dropped_events_count) - if self.events: - proto_serializer.serialize_repeated_message(b"Z", self.events) - if self.dropped_attributes_count: - proto_serializer.serialize_uint32(b"P", self.dropped_attributes_count) - if self.attributes: - proto_serializer.serialize_repeated_message(b"J", self.attributes) - if self.end_time_unix_nano: - proto_serializer.serialize_fixed64(b"A", self.end_time_unix_nano) - if self.start_time_unix_nano: - proto_serializer.serialize_fixed64(b"9", self.start_time_unix_nano) - if self.kind: - proto_serializer.serialize_enum(b"0", self.kind) - if self.name: - proto_serializer.serialize_string(b"*", self.name) - if self.parent_span_id: - proto_serializer.serialize_bytes(b'"', self.parent_span_id) - if self.trace_state: - proto_serializer.serialize_string(b"\x1a", self.trace_state) - if self.span_id: - proto_serializer.serialize_bytes(b"\x12", self.span_id) - if self.trace_id: - proto_serializer.serialize_bytes(b"\n", self.trace_id) - - class SpanKind(Enum): - SPAN_KIND_UNSPECIFIED = 0 - SPAN_KIND_INTERNAL = 1 - SPAN_KIND_SERVER = 2 - SPAN_KIND_CLIENT = 3 - SPAN_KIND_PRODUCER = 4 - SPAN_KIND_CONSUMER = 5 - - class Event(MessageMarshaler): - def __init__( - self, - time_unix_nano: int = 0, - name: str = "", - attributes: List[MessageMarshaler] = None, - dropped_attributes_count: int = 0, - ): - self.time_unix_nano = time_unix_nano - self.name = name - self.attributes = attributes - self.dropped_attributes_count = dropped_attributes_count - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.dropped_attributes_count: - proto_serializer.serialize_uint32(b" ", self.dropped_attributes_count) - if self.attributes: - proto_serializer.serialize_repeated_message(b"\x1a", self.attributes) - if self.name: - proto_serializer.serialize_string(b"\x12", self.name) - if self.time_unix_nano: - proto_serializer.serialize_fixed64(b"\t", self.time_unix_nano) - - class Link(MessageMarshaler): - def __init__( - self, - trace_id: bytes = b"", - span_id: bytes = b"", - trace_state: str = "", - attributes: List[MessageMarshaler] = None, - dropped_attributes_count: int = 0, - flags: int = 0, - ): - self.trace_id = trace_id - self.span_id = span_id - self.trace_state = trace_state - self.attributes = attributes - self.dropped_attributes_count = dropped_attributes_count - self.flags = flags - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.flags: - proto_serializer.serialize_fixed32(b"5", self.flags) - if self.dropped_attributes_count: - proto_serializer.serialize_uint32(b"(", self.dropped_attributes_count) - if self.attributes: - proto_serializer.serialize_repeated_message(b'"', self.attributes) - if self.trace_state: - proto_serializer.serialize_string(b"\x1a", self.trace_state) - if self.span_id: - proto_serializer.serialize_bytes(b"\x12", self.span_id) - if self.trace_id: - proto_serializer.serialize_bytes(b"\n", self.trace_id) - - -class Status(MessageMarshaler): - def __init__( - self, - message: str = "", - code: int = 0, - ): - self.message = message - self.code = code - - def write_to(self, proto_serializer: ProtoSerializer) -> None: - if self.code: - proto_serializer.serialize_enum(b"\x18", self.code) - if self.message: - proto_serializer.serialize_string(b"\x12", self.message) - - class StatusCode(Enum): - STATUS_CODE_UNSET = 0 - STATUS_CODE_OK = 1 - STATUS_CODE_ERROR = 2 +def TracesData( + resource_spans: Optional[List[bytes]] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource_spans: + proto_serializer.serialize_repeated_message(b"\n", resource_spans) + return proto_serializer.out + + +def ResourceSpans( + resource: Optional[bytes] = None, + scope_spans: Optional[List[bytes]] = None, + schema_url: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if resource is not None: + proto_serializer.serialize_message(b"\n", resource) + if scope_spans: + proto_serializer.serialize_repeated_message(b"\x12", scope_spans) + if schema_url: + proto_serializer.serialize_string(b"\x1a", schema_url) + return proto_serializer.out + + +def ScopeSpans( + scope: Optional[bytes] = None, + spans: Optional[List[bytes]] = None, + schema_url: Optional[str] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if scope is not None: + proto_serializer.serialize_message(b"\n", scope) + if spans: + proto_serializer.serialize_repeated_message(b"\x12", spans) + if schema_url: + proto_serializer.serialize_string(b"\x1a", schema_url) + return proto_serializer.out + + +def Span( + trace_id: Optional[bytes] = None, + span_id: Optional[bytes] = None, + trace_state: Optional[str] = None, + parent_span_id: Optional[bytes] = None, + name: Optional[str] = None, + kind: Optional[int] = None, + start_time_unix_nano: Optional[int] = None, + end_time_unix_nano: Optional[int] = None, + attributes: Optional[List[bytes]] = None, + dropped_attributes_count: Optional[int] = None, + events: Optional[List[bytes]] = None, + dropped_events_count: Optional[int] = None, + links: Optional[List[bytes]] = None, + dropped_links_count: Optional[int] = None, + status: Optional[bytes] = None, + flags: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if trace_id: + proto_serializer.serialize_bytes(b"\n", trace_id) + if span_id: + proto_serializer.serialize_bytes(b"\x12", span_id) + if trace_state: + proto_serializer.serialize_string(b"\x1a", trace_state) + if parent_span_id: + proto_serializer.serialize_bytes(b'"', parent_span_id) + if name: + proto_serializer.serialize_string(b"*", name) + if kind: + proto_serializer.serialize_enum(b"0", kind) + if start_time_unix_nano: + proto_serializer.serialize_fixed64(b"9", start_time_unix_nano) + if end_time_unix_nano: + proto_serializer.serialize_fixed64(b"A", end_time_unix_nano) + if attributes: + proto_serializer.serialize_repeated_message(b"J", attributes) + if dropped_attributes_count: + proto_serializer.serialize_uint32(b"P", dropped_attributes_count) + if events: + proto_serializer.serialize_repeated_message(b"Z", events) + if dropped_events_count: + proto_serializer.serialize_uint32(b"`", dropped_events_count) + if links: + proto_serializer.serialize_repeated_message(b"j", links) + if dropped_links_count: + proto_serializer.serialize_uint32(b"p", dropped_links_count) + if status is not None: + proto_serializer.serialize_message(b"z", status) + if flags: + proto_serializer.serialize_fixed32(b"\x85\x01", flags) + return proto_serializer.out + + +class Span_SpanKind(Enum): + SPAN_KIND_UNSPECIFIED = 0 + SPAN_KIND_INTERNAL = 1 + SPAN_KIND_SERVER = 2 + SPAN_KIND_CLIENT = 3 + SPAN_KIND_PRODUCER = 4 + SPAN_KIND_CONSUMER = 5 + + +def Span_Event( + time_unix_nano: Optional[int] = None, + name: Optional[str] = None, + attributes: Optional[List[bytes]] = None, + dropped_attributes_count: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if time_unix_nano: + proto_serializer.serialize_fixed64(b"\t", time_unix_nano) + if name: + proto_serializer.serialize_string(b"\x12", name) + if attributes: + proto_serializer.serialize_repeated_message(b"\x1a", attributes) + if dropped_attributes_count: + proto_serializer.serialize_uint32(b" ", dropped_attributes_count) + return proto_serializer.out + + +def Span_Link( + trace_id: Optional[bytes] = None, + span_id: Optional[bytes] = None, + trace_state: Optional[str] = None, + attributes: Optional[List[bytes]] = None, + dropped_attributes_count: Optional[int] = None, + flags: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if trace_id: + proto_serializer.serialize_bytes(b"\n", trace_id) + if span_id: + proto_serializer.serialize_bytes(b"\x12", span_id) + if trace_state: + proto_serializer.serialize_string(b"\x1a", trace_state) + if attributes: + proto_serializer.serialize_repeated_message(b'"', attributes) + if dropped_attributes_count: + proto_serializer.serialize_uint32(b"(", dropped_attributes_count) + if flags: + proto_serializer.serialize_fixed32(b"5", flags) + return proto_serializer.out + + +def Status( + message: Optional[str] = None, + code: Optional[int] = None, +) -> bytes: + proto_serializer = ProtoSerializer() + if message: + proto_serializer.serialize_string(b"\x12", message) + if code: + proto_serializer.serialize_enum(b"\x18", code) + return proto_serializer.out + + +class Status_StatusCode(Enum): + STATUS_CODE_UNSET = 0 + STATUS_CODE_OK = 1 + STATUS_CODE_ERROR = 2 diff --git a/src/snowflake/telemetry/_internal/serialize/__init__.py b/src/snowflake/telemetry/_internal/serialize/__init__.py index 8845ec6..63be518 100644 --- a/src/snowflake/telemetry/_internal/serialize/__init__.py +++ b/src/snowflake/telemetry/_internal/serialize/__init__.py @@ -1,89 +1,80 @@ import struct from enum import IntEnum -from typing import Callable, Any, List, Union +from typing import List, Union Enum = IntEnum -class MessageMarshaler: - def __bytes__(self) -> bytes: - out = ProtoSerializer() - self.write_to(out) - return bytes(out) - - def write_to(self, out: "ProtoSerializer") -> None: - ... - class ProtoSerializer: - __slots__ = ("out",) + __slots__ = ("out") def __init__(self) -> None: self.out = bytearray() def __bytes__(self) -> bytes: - return bytes(self.out)[::-1] + return bytes(self.out) def serialize_bool(self, tag: bytes, value: bool) -> None: - _write_varint_unsigned(1 if value else 0, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(1 if value else 0) def serialize_enum(self, tag: bytes, value: Union[Enum, int]) -> None: if not isinstance(value, int): value = value.value - _write_varint_unsigned(value, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value) def serialize_uint32(self, tag: bytes, value: int) -> None: - _write_varint_unsigned(value, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value) def serialize_uint64(self, tag: bytes, value: int) -> None: - _write_varint_unsigned(value, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value) def serialize_sint32(self, tag: bytes, value: int) -> None: - _write_varint_unsigned(encode_zigzag32(value), self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value << 1 if value >= 0 else (value << 1) ^ (~0)) def serialize_sint64(self, tag: bytes, value: int) -> None: - _write_varint_unsigned(encode_zigzag64(value), self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value << 1 if value >= 0 else (value << 1) ^ (~0)) def serialize_int32(self, tag: bytes, value: int) -> None: - _write_varint_signed(value, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value + 1 << 32 if value < 0 else value) def serialize_int64(self, tag: bytes, value: int) -> None: - _write_varint_signed(value, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(value + 1 << 64 if value < 0 else value) def serialize_fixed32(self, tag: bytes, value: int) -> None: - self.out.extend(struct.pack(" None: - self.out.extend(struct.pack(" None: - self.out.extend(struct.pack(" None: - self.out.extend(struct.pack(" None: - self.out.extend(struct.pack(" None: - self.out.extend(struct.pack(" None: - self.out.extend(value[::-1]) - _write_varint_unsigned(len(value), self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(len(value)) + self.out += value def serialize_string(self, tag: bytes, value: str) -> None: self.serialize_bytes(tag, value.encode("utf-8")) @@ -91,7 +82,7 @@ def serialize_string(self, tag: bytes, value: str) -> None: def serialize_message( self, tag: bytes, - value: MessageMarshaler, + value: bytes, ) -> None: # If value is None, omit message entirely if value is None: @@ -99,74 +90,56 @@ def serialize_message( # Otherwise, write the message # Even if all fields are default (ommnited) # The presence of the message is still encoded - before = len(self.out) - value.write_to(self) - after = len(self.out) - _write_varint_unsigned(after - before, self.out) - self.out.extend(tag[::-1]) + self.out += tag + self._write_varint_unsigned(len(value)) + self.out += value def serialize_repeated_message( self, tag: bytes, - values: List[MessageMarshaler], + values: List[bytes], ) -> None: if not values: return # local reference to avoid repeated lookups _self_serialize = self.serialize_message - for value in reversed(values): + for value in values: _self_serialize(tag, value) - def serialize_repeated_packed( - self, - tag: bytes, - values: List[Any], - write_value: Callable[[Any, bytearray], None], - ) -> None: + def serialize_repeated_double(self, tag: bytes, values: List[float]) -> None: if not values: return - # Packed repeated fields are encoded like a bytearray - # with a total size prefix and a single tag - # (similar to a bytes field) - before = len(self.out) - for value in reversed(values): - write_value(value, self.out) - after = len(self.out) - _write_varint_unsigned(after - before, self.out) - self.out.extend(tag[::-1]) - - def serialize_repeated_double(self, tag: bytes, values: List[float]) -> None: - self.serialize_repeated_packed(tag, values, write_double_no_tag) + self.out += tag + self._write_varint_unsigned(len(values) * 8) + for value in values: + self.write_double_no_tag(value) def serialize_repeated_fixed64(self, tag: bytes, values: List[int]) -> None: - self.serialize_repeated_packed(tag, values, write_fixed64_no_tag) + if not values: + return + self.out += tag + self._write_varint_unsigned(len(values) * 8) + for value in values: + self.write_fixed64_no_tag(value) def serialize_repeated_uint64(self, tag: bytes, values: List[int]) -> None: - self.serialize_repeated_packed(tag, values, _write_varint_unsigned) - -def _write_varint_signed(value: int, out: bytearray) -> None: - if value < 0: - value += 1 << 64 - _write_varint_unsigned(value, out) - -def _write_varint_unsigned(value: int, out: bytearray) -> None: - i = len(out) - while value >= 128: - out.insert(i, (value & 0x7F) | 0x80) - value >>= 7 - out.insert(i, value) - -def write_tag(tag: bytes, out: bytearray) -> None: - out.extend(tag[::-1]) - -def write_double_no_tag(value: float, out: bytearray) -> None: - out.extend(struct.pack(" None: - out.extend(struct.pack(" None: + while value >= 128: + self.out.append((value & 0x7F) | 0x80) + value >>= 7 + self.out.append(value) -def encode_zigzag32(value: int) -> int: - return value << 1 if value >= 0 else (value << 1) ^ (~0) + def write_double_no_tag(self, value: float) -> None: + self.out += struct.pack(" int: - return value << 1 if value >= 0 else (value << 1) ^ (~0) + def write_fixed64_no_tag(self, value: int) -> None: + self.out += struct.pack("