From aa1e60544f9843bb810323ec6c2146a43f15e6b6 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Wed, 24 Sep 2025 16:00:10 -0400 Subject: [PATCH 01/33] wip --- lib/codecs/src/encoding/format/protobuf.rs | 14 +- lib/codecs/src/encoding/mod.rs | 6 + lib/opentelemetry-proto/src/proto.rs | 7 + src/codecs/encoding/config.rs | 4 +- src/sinks/http/config.rs | 149 ++++++++++-------- src/sinks/opentelemetry/mod.rs | 64 ++++++-- src/sources/opentelemetry/config.rs | 51 +++--- .../e2e/opentelemetry/logs/vector_otlp.yaml | 1 + 8 files changed, 183 insertions(+), 113 deletions(-) diff --git a/lib/codecs/src/encoding/format/protobuf.rs b/lib/codecs/src/encoding/format/protobuf.rs index 9100f3fe815f0..1057a880afb14 100644 --- a/lib/codecs/src/encoding/format/protobuf.rs +++ b/lib/codecs/src/encoding/format/protobuf.rs @@ -1,5 +1,6 @@ use std::path::PathBuf; +use crate::encoding::BuildError; use bytes::BytesMut; use prost_reflect::{MessageDescriptor, prost::Message as _}; use tokio_util::codec::Encoder; @@ -9,9 +10,10 @@ use vector_core::{ event::{Event, Value}, schema, }; -use vrl::protobuf::{descriptor::get_message_descriptor, encode::encode_message}; - -use crate::encoding::BuildError; +use vrl::protobuf::{ + descriptor::{get_message_descriptor, get_message_descriptor_from_bytes}, + encode::encode_message, +}; /// Config used to build a `ProtobufSerializer`. #[configurable_component] @@ -72,6 +74,12 @@ impl ProtobufSerializer { Self { message_descriptor } } + /// Creates a new serializer instance using the descriptor bytes directly. + pub fn new_from_bytes(desc_bytes: &[u8], message_type: &str) -> vector_common::Result { + let message_descriptor = get_message_descriptor_from_bytes(desc_bytes, message_type)?; + Ok(Self { message_descriptor }) + } + /// Get a description of the message type used in serialization. pub fn descriptor_proto(&self) -> &prost_reflect::prost_types::DescriptorProto { self.message_descriptor.descriptor_proto() diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index 91e45ffc6d1c7..8352d27559cd5 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -293,6 +293,12 @@ pub enum SerializerConfig { Text(TextSerializerConfig), } +impl Default for SerializerConfig { + fn default() -> Self { + Self::Json(JsonSerializerConfig::default()) + } +} + impl From for SerializerConfig { fn from(config: AvroSerializerConfig) -> Self { Self::Avro { avro: config.avro } diff --git a/lib/opentelemetry-proto/src/proto.rs b/lib/opentelemetry-proto/src/proto.rs index 5559113bd14db..a770d7d51506b 100644 --- a/lib/opentelemetry-proto/src/proto.rs +++ b/lib/opentelemetry-proto/src/proto.rs @@ -1,3 +1,10 @@ +pub const LOGS_REQUEST_MESSAGE_TYPE: &str = + "opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest"; +pub const TRACES_REQUEST_MESSAGE_TYPE: &str = + "opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest"; +pub const METRICS_REQUEST_MESSAGE_TYPE: &str = + "opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest"; + /// Service stub and clients. pub mod collector { pub mod trace { diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index 756bc8e2406f2..dc76b7ce2f46e 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -10,7 +10,7 @@ use crate::codecs::Transformer; /// Encoding configuration. #[configurable_component] -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] /// Configures how events are encoded into raw bytes. /// The selected encoding also determines which input types (logs, metrics, traces) are supported. pub struct EncodingConfig { @@ -60,7 +60,7 @@ where /// Encoding configuration. #[configurable_component] -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] #[serde(deny_unknown_fields)] pub struct EncodingConfigWithFraming { #[configurable(derived)] diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index ab72afb9c4cab..0e53cc21d64ec 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -68,6 +68,8 @@ pub struct HttpSinkConfig { #[serde(default)] pub compression: Compression, + /// If not specified, `encoding.codec` will default to `json`. + /// If `encoding.framing` is not specified, it will be deduced from `encoding.codec`. #[serde(flatten)] pub encoding: EncodingConfigWithFraming, @@ -170,79 +172,15 @@ impl HttpSinkConfig { let (framer, serializer) = self.encoding.build(SinkType::MessageBased)?; Ok(Encoder::::new(framer, serializer)) } -} - -impl GenerateConfig for HttpSinkConfig { - fn generate_config() -> toml::Value { - toml::from_str( - r#"uri = "https://10.22.212.22:9000/endpoint" - encoding.codec = "json""#, - ) - .unwrap() - } -} - -async fn healthcheck(uri: UriSerde, auth: Option, client: HttpClient) -> crate::Result<()> { - let auth = auth.choose_one(&uri.auth)?; - let uri = uri.with_default_parts(); - let mut request = Request::head(&uri.uri).body(Body::empty()).unwrap(); - - if let Some(auth) = auth { - auth.apply(&mut request); - } - - let response = client.send(request).await?; - - match response.status() { - StatusCode::OK => Ok(()), - status => Err(HealthcheckError::UnexpectedStatus { status }.into()), - } -} - -pub(super) fn validate_headers( - headers: &BTreeMap, - configures_auth: bool, -) -> crate::Result> { - let headers = crate::sinks::util::http::validate_headers(headers)?; - - for name in headers.keys() { - if configures_auth && name.inner() == AUTHORIZATION { - return Err("Authorization header can not be used with defined auth options".into()); - } - } - Ok(headers) -} - -pub(super) fn validate_payload_wrapper( - payload_prefix: &str, - payload_suffix: &str, - encoder: &Encoder, -) -> crate::Result<(String, String)> { - let payload = [payload_prefix, "{}", payload_suffix].join(""); - match ( - encoder.serializer(), - encoder.framer(), - serde_json::from_str::(&payload), - ) { - ( - Serializer::Json(_), - Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }), - Err(_), - ) => Err("Payload prefix and suffix wrapper must produce a valid JSON object.".into()), - _ => Ok((payload_prefix.to_owned(), payload_suffix.to_owned())), - } -} - -#[async_trait] -#[typetag::serde(name = "http")] -impl SinkConfig for HttpSinkConfig { - async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { + pub(crate) async fn build_with_encoder( + &self, + cx: SinkContext, + encoder: Encoder, + transformer: Transformer, + ) -> crate::Result<(VectorSink, Healthcheck)> { let batch_settings = self.batch.validate()?.into_batcher_settings()?; - let encoder = self.build_encoder()?; - let transformer = self.encoding.transformer(); - let mut request = self.request.clone(); request.add_old_option(self.headers.clone()); @@ -350,6 +288,77 @@ impl SinkConfig for HttpSinkConfig { Ok((VectorSink::from_event_streamsink(sink), healthcheck)) } +} + +impl GenerateConfig for HttpSinkConfig { + fn generate_config() -> toml::Value { + toml::from_str( + r#"uri = "https://10.22.212.22:9000/endpoint" + encoding.codec = "json""#, + ) + .unwrap() + } +} + +async fn healthcheck(uri: UriSerde, auth: Option, client: HttpClient) -> crate::Result<()> { + let auth = auth.choose_one(&uri.auth)?; + let uri = uri.with_default_parts(); + let mut request = Request::head(&uri.uri).body(Body::empty()).unwrap(); + + if let Some(auth) = auth { + auth.apply(&mut request); + } + + let response = client.send(request).await?; + + match response.status() { + StatusCode::OK => Ok(()), + status => Err(HealthcheckError::UnexpectedStatus { status }.into()), + } +} + +pub(super) fn validate_headers( + headers: &BTreeMap, + configures_auth: bool, +) -> crate::Result> { + let headers = crate::sinks::util::http::validate_headers(headers)?; + + for name in headers.keys() { + if configures_auth && name.inner() == AUTHORIZATION { + return Err("Authorization header can not be used with defined auth options".into()); + } + } + + Ok(headers) +} + +pub(super) fn validate_payload_wrapper( + payload_prefix: &str, + payload_suffix: &str, + encoder: &Encoder, +) -> crate::Result<(String, String)> { + let payload = [payload_prefix, "{}", payload_suffix].join(""); + match ( + encoder.serializer(), + encoder.framer(), + serde_json::from_str::(&payload), + ) { + ( + Serializer::Json(_), + Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }), + Err(_), + ) => Err("Payload prefix and suffix wrapper must produce a valid JSON object.".into()), + _ => Ok((payload_prefix.to_owned(), payload_suffix.to_owned())), + } +} + +#[async_trait] +#[typetag::serde(name = "http")] +impl SinkConfig for HttpSinkConfig { + async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { + let encoder = self.build_encoder()?; + self.build_with_encoder(cx, encoder, self.encoding.transformer()).await + } fn input(&self) -> Input { Input::new(self.encoding.config().1.input_type()) diff --git a/src/sinks/opentelemetry/mod.rs b/src/sinks/opentelemetry/mod.rs index 88963f8603cde..8007e3f2a0b54 100644 --- a/src/sinks/opentelemetry/mod.rs +++ b/src/sinks/opentelemetry/mod.rs @@ -1,5 +1,18 @@ +use crate::codecs::Encoder; +use crate::{ + codecs::{EncodingConfigWithFraming, Transformer}, + config::{AcknowledgementsConfig, Input, SinkConfig, SinkContext}, + sinks::{ + Healthcheck, VectorSink, + http::config::{HttpMethod, HttpSinkConfig}, + }, +}; use indoc::indoc; use vector_config::component::GenerateConfig; +use vector_lib::codecs::encoding::{Framer, ProtobufSerializer, Serializer}; +use vector_lib::opentelemetry::proto::{ + LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, TRACES_REQUEST_MESSAGE_TYPE, +}; use vector_lib::{ codecs::{ JsonSerializerConfig, @@ -8,15 +21,6 @@ use vector_lib::{ configurable::configurable_component, }; -use crate::{ - codecs::{EncodingConfigWithFraming, Transformer}, - config::{AcknowledgementsConfig, Input, SinkConfig, SinkContext}, - sinks::{ - Healthcheck, VectorSink, - http::config::{HttpMethod, HttpSinkConfig}, - }, -}; - /// Configuration for the `OpenTelemetry` sink. #[configurable_component(sink("opentelemetry", "Deliver OTLP data over HTTP."))] #[derive(Clone, Debug, Default)] @@ -24,6 +28,19 @@ pub struct OpenTelemetryConfig { /// Protocol configuration #[configurable(derived)] protocol: Protocol, + + /// Setting this field to `true`, will override all encoding settings and it will encode requests based on the + /// [OpenTelemetry protocol](https://opentelemetry.io/docs/specs/otel/protocol/). + /// + /// The endpoint is used to determine the data type: + /// * v1/logs → OTLP Logs + /// * v1/traces → OTLP Traces + /// * v1/metrics → OTLP Metrics + /// + /// More information available [here](https://opentelemetry.io/docs/specs/otlp/?utm_source=chatgpt.com#otlphttp-request). + #[configurable(derived)] + #[serde(default)] + pub use_otlp_encoding: bool, } /// The protocol used to send data to OpenTelemetry. @@ -78,7 +95,21 @@ impl GenerateConfig for OpenTelemetryConfig { impl SinkConfig for OpenTelemetryConfig { async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { match &self.protocol { - Protocol::Http(config) => config.build(cx).await, + Protocol::Http(config) => { + if self.use_otlp_encoding { + let serializer = ProtobufSerializer::new_from_bytes( + vector_lib::opentelemetry::proto::DESCRIPTOR_BYTES, + to_message_type(&config.uri.to_string())?, + )?; + let encoder = Encoder::::new( + FramingConfig::Bytes.build(), + Serializer::Protobuf(serializer), + ); + config.build_with_encoder(cx, encoder, config.encoding.transformer()).await + } else { + config.build(cx).await + } + } } } @@ -95,6 +126,19 @@ impl SinkConfig for OpenTelemetryConfig { } } +/// Checks if an endpoint ends with a known OTEL proto request. +pub fn to_message_type(endpoint: &str) -> crate::Result<&'static str> { + if endpoint.ends_with("v1/logs") { + Ok(LOGS_REQUEST_MESSAGE_TYPE) + } else if endpoint.ends_with("v1/traces") { + Ok(TRACES_REQUEST_MESSAGE_TYPE) + } else if endpoint.ends_with("v1/metrics") { + Ok(METRICS_REQUEST_MESSAGE_TYPE) + } else { + Err(format!("Endpoint {endpoint} not supported, should end with 'v1/logs', 'v1/metrics' or 'v1/traces'.").into()) + } +} + #[cfg(test)] mod test { #[test] diff --git a/src/sources/opentelemetry/config.rs b/src/sources/opentelemetry/config.rs index b2457f8f70113..14fd14fd26094 100644 --- a/src/sources/opentelemetry/config.rs +++ b/src/sources/opentelemetry/config.rs @@ -1,8 +1,28 @@ use std::net::SocketAddr; +use crate::{ + config::{ + DataType, GenerateConfig, Resource, SourceAcknowledgementsConfig, SourceConfig, + SourceContext, SourceOutput, + }, + http::KeepaliveConfig, + serde::bool_or_struct, + sources::{ + Source, + http_server::{build_param_matcher, remove_duplicates}, + opentelemetry::{ + grpc::Service, + http::{build_warp_filter, run_http_server}, + }, + util::grpc::run_grpc_server_with_routes, + }, +}; use futures::FutureExt; use futures_util::{TryFutureExt, future::join}; use tonic::{codec::CompressionEncoding, transport::server::RoutesBuilder}; +use vector_lib::opentelemetry::proto::{ + LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, TRACES_REQUEST_MESSAGE_TYPE, +}; use vector_lib::{ codecs::decoding::ProtobufDeserializer, config::{LegacyKey, LogNamespace, log_schema}, @@ -28,35 +48,10 @@ use vrl::{ value::{Kind, kind::Collection}, }; -use crate::{ - config::{ - DataType, GenerateConfig, Resource, SourceAcknowledgementsConfig, SourceConfig, - SourceContext, SourceOutput, - }, - http::KeepaliveConfig, - serde::bool_or_struct, - sources::{ - Source, - http_server::{build_param_matcher, remove_duplicates}, - opentelemetry::{ - grpc::Service, - http::{build_warp_filter, run_http_server}, - }, - util::grpc::run_grpc_server_with_routes, - }, -}; - pub const LOGS: &str = "logs"; pub const METRICS: &str = "metrics"; pub const TRACES: &str = "traces"; -pub const OTEL_PROTO_LOGS_REQUEST: &str = - "opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest"; -pub const OTEL_PROTO_TRACES_REQUEST: &str = - "opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest"; -pub const OTEL_PROTO_METRICS_REQUEST: &str = - "opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest"; - /// Configuration for the `opentelemetry` source. #[configurable_component(source("opentelemetry", "Receive OTLP data through gRPC or HTTP."))] #[derive(Clone, Debug)] @@ -198,7 +193,7 @@ impl SourceConfig for OpentelemetryConfig { let grpc_tls_settings = MaybeTlsSettings::from_config(self.grpc.tls.as_ref(), true)?; - let log_deserializer = self.get_deserializer(OTEL_PROTO_LOGS_REQUEST)?; + let log_deserializer = self.get_deserializer(LOGS_REQUEST_MESSAGE_TYPE)?; let log_service = LogsServiceServer::new(Service { pipeline: cx.out.clone(), acknowledgements, @@ -209,7 +204,7 @@ impl SourceConfig for OpentelemetryConfig { .accept_compressed(CompressionEncoding::Gzip) .max_decoding_message_size(usize::MAX); - let metric_deserializer = self.get_deserializer(OTEL_PROTO_METRICS_REQUEST)?; + let metric_deserializer = self.get_deserializer(METRICS_REQUEST_MESSAGE_TYPE)?; let metrics_service = MetricsServiceServer::new(Service { pipeline: cx.out.clone(), acknowledgements, @@ -220,7 +215,7 @@ impl SourceConfig for OpentelemetryConfig { .accept_compressed(CompressionEncoding::Gzip) .max_decoding_message_size(usize::MAX); - let trace_deserializer = self.get_deserializer(OTEL_PROTO_TRACES_REQUEST)?; + let trace_deserializer = self.get_deserializer(TRACES_REQUEST_MESSAGE_TYPE)?; let trace_service = TraceServiceServer::new(Service { pipeline: cx.out.clone(), acknowledgements, diff --git a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml index a3b98647059c5..e84196bcf7a07 100644 --- a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml +++ b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml @@ -19,6 +19,7 @@ sinks: inputs: - source0.logs type: opentelemetry + use_otlp_encoding: true protocol: type: http uri: http://otel-collector-sink:5318/v1/logs From fde54017a76b2ccc2ce12e3da6d6e9f10eece0a3 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Wed, 24 Sep 2025 16:49:58 -0400 Subject: [PATCH 02/33] ran cargo fmt --- src/sinks/http/config.rs | 3 ++- src/sinks/opentelemetry/mod.rs | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index 0e53cc21d64ec..cdc27abeb2bab 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -357,7 +357,8 @@ pub(super) fn validate_payload_wrapper( impl SinkConfig for HttpSinkConfig { async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { let encoder = self.build_encoder()?; - self.build_with_encoder(cx, encoder, self.encoding.transformer()).await + self.build_with_encoder(cx, encoder, self.encoding.transformer()) + .await } fn input(&self) -> Input { diff --git a/src/sinks/opentelemetry/mod.rs b/src/sinks/opentelemetry/mod.rs index 8007e3f2a0b54..c8e820eb933d2 100644 --- a/src/sinks/opentelemetry/mod.rs +++ b/src/sinks/opentelemetry/mod.rs @@ -105,7 +105,9 @@ impl SinkConfig for OpenTelemetryConfig { FramingConfig::Bytes.build(), Serializer::Protobuf(serializer), ); - config.build_with_encoder(cx, encoder, config.encoding.transformer()).await + config + .build_with_encoder(cx, encoder, config.encoding.transformer()) + .await } else { config.build(cx).await } From 94078dc9690c34772be8e0b28f8f9319799d6d66 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Thu, 25 Sep 2025 10:03:48 -0400 Subject: [PATCH 03/33] changelog --- changelog.d/otlp_encoding.feature.md | 4 ++++ .../e2e/opentelemetry/logs/vector_otlp.yaml | 4 ---- .../components/sources/opentelemetry.cue | 17 +---------------- 3 files changed, 5 insertions(+), 20 deletions(-) create mode 100644 changelog.d/otlp_encoding.feature.md diff --git a/changelog.d/otlp_encoding.feature.md b/changelog.d/otlp_encoding.feature.md new file mode 100644 index 0000000000000..55e806aac8f45 --- /dev/null +++ b/changelog.d/otlp_encoding.feature.md @@ -0,0 +1,4 @@ +Added `use_otlp_encoding` option to the `opentelemetry` sink. +When set to `true` the sink assumes the Vector events are structured based on OTLP. + +authors: pront diff --git a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml index e84196bcf7a07..09e83a7812f23 100644 --- a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml +++ b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml @@ -24,10 +24,6 @@ sinks: type: http uri: http://otel-collector-sink:5318/v1/logs method: post - encoding: - codec: json - framing: - method: newline_delimited batch: max_events: 1 request: diff --git a/website/cue/reference/components/sources/opentelemetry.cue b/website/cue/reference/components/sources/opentelemetry.cue index 622e4e8c684f3..d9cc36d266b18 100644 --- a/website/cue/reference/components/sources/opentelemetry.cue +++ b/website/cue/reference/components/sources/opentelemetry.cue @@ -295,30 +295,15 @@ components: sources: opentelemetry: { inputs: - otel.logs type: opentelemetry + use_otlp_encoding: true protocol: type: http uri: http://localhost:5318/v1/logs method: post - encoding: - codec: protobuf - protobuf: - desc_file: path/to/opentelemetry-proto.desc - message_type: opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest - framing: - method: "bytes" request: headers: content-type: "application/x-protobuf" ``` - - The `desc` file was generated with the following command: - ```bash - protoc -I=/path/to/vector/lib/opentelemetry-proto/src/proto/opentelemetry-proto \\ - --include_imports \\ - --include_source_info \\ - --descriptor_set_out=opentelemetry-proto.desc \\ - $(find /path/to/vector/lib/opentelemetry-proto/src/proto/opentelemetry-proto -name '*.proto') - ``` """ } tls: { From f161cbe23ec4a70844881ee52461a44b54e1fa5f Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Thu, 25 Sep 2025 15:04:20 -0400 Subject: [PATCH 04/33] linting --- changelog.d/otlp_encoding.feature.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/otlp_encoding.feature.md b/changelog.d/otlp_encoding.feature.md index 55e806aac8f45..a29c59d128c41 100644 --- a/changelog.d/otlp_encoding.feature.md +++ b/changelog.d/otlp_encoding.feature.md @@ -1,4 +1,4 @@ -Added `use_otlp_encoding` option to the `opentelemetry` sink. +Added `use_otlp_encoding` option to the `opentelemetry` sink. When set to `true` the sink assumes the Vector events are structured based on OTLP. authors: pront From ff38f524c285886a50469107b8b7cf3f1969ed1a Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Thu, 25 Sep 2025 15:30:12 -0400 Subject: [PATCH 05/33] generate component docs --- .../sinks/generated/opentelemetry.cue | 1473 +++++++++-------- 1 file changed, 745 insertions(+), 728 deletions(-) diff --git a/website/cue/reference/components/sinks/generated/opentelemetry.cue b/website/cue/reference/components/sinks/generated/opentelemetry.cue index 7b284dd2fd3df..6c78bef3f11bb 100644 --- a/website/cue/reference/components/sinks/generated/opentelemetry.cue +++ b/website/cue/reference/components/sinks/generated/opentelemetry.cue @@ -1,130 +1,131 @@ package metadata -generated: components: sinks: opentelemetry: configuration: protocol: { - description: "Protocol configuration" - required: true - type: object: options: { - acknowledgements: { - description: """ - Controls how acknowledgements are handled for this sink. - - See [End-to-end Acknowledgements][e2e_acks] for more information on how event acknowledgement is handled. - - [e2e_acks]: https://vector.dev/docs/architecture/end-to-end-acknowledgements/ - """ - required: false - type: object: options: enabled: { +generated: components: sinks: opentelemetry: configuration: { + protocol: { + description: "Protocol configuration" + required: true + type: object: options: { + acknowledgements: { description: """ - Whether or not end-to-end acknowledgements are enabled. + Controls how acknowledgements are handled for this sink. - When enabled for a sink, any source that supports end-to-end - acknowledgements that is connected to that sink waits for events - to be acknowledged by **all connected sinks** before acknowledging them at the source. + See [End-to-end Acknowledgements][e2e_acks] for more information on how event acknowledgement is handled. - Enabling or disabling acknowledgements at the sink level takes precedence over any global - [`acknowledgements`][global_acks] configuration. - - [global_acks]: https://vector.dev/docs/reference/configuration/global-options/#acknowledgements + [e2e_acks]: https://vector.dev/docs/architecture/end-to-end-acknowledgements/ """ required: false - type: bool: {} + type: object: options: enabled: { + description: """ + Whether or not end-to-end acknowledgements are enabled. + + When enabled for a sink, any source that supports end-to-end + acknowledgements that is connected to that sink waits for events + to be acknowledged by **all connected sinks** before acknowledging them at the source. + + Enabling or disabling acknowledgements at the sink level takes precedence over any global + [`acknowledgements`][global_acks] configuration. + + [global_acks]: https://vector.dev/docs/reference/configuration/global-options/#acknowledgements + """ + required: false + type: bool: {} + } } - } - auth: { - description: """ - Configuration of the authentication strategy for HTTP requests. - - HTTP authentication should be used with HTTPS only, as the authentication credentials are passed as an - HTTP header without any additional encryption beyond what is provided by the transport itself. - """ - required: false - type: object: options: { - auth: { - description: "The AWS authentication configuration." - relevant_when: "strategy = \"aws\"" - required: true - type: object: options: { - access_key_id: { - description: "The AWS access key ID." - required: true - type: string: examples: ["AKIAIOSFODNN7EXAMPLE"] - } - assume_role: { - description: """ + auth: { + description: """ + Configuration of the authentication strategy for HTTP requests. + + HTTP authentication should be used with HTTPS only, as the authentication credentials are passed as an + HTTP header without any additional encryption beyond what is provided by the transport itself. + """ + required: false + type: object: options: { + auth: { + description: "The AWS authentication configuration." + relevant_when: "strategy = \"aws\"" + required: true + type: object: options: { + access_key_id: { + description: "The AWS access key ID." + required: true + type: string: examples: ["AKIAIOSFODNN7EXAMPLE"] + } + assume_role: { + description: """ The ARN of an [IAM role][iam_role] to assume. [iam_role]: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles.html """ - required: true - type: string: examples: ["arn:aws:iam::123456789098:role/my_role"] - } - credentials_file: { - description: "Path to the credentials file." - required: true - type: string: examples: ["/my/aws/credentials"] - } - external_id: { - description: """ + required: true + type: string: examples: ["arn:aws:iam::123456789098:role/my_role"] + } + credentials_file: { + description: "Path to the credentials file." + required: true + type: string: examples: ["/my/aws/credentials"] + } + external_id: { + description: """ The optional unique external ID in conjunction with role to assume. [external_id]: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html """ - required: false - type: string: examples: ["randomEXAMPLEidString"] - } - imds: { - description: "Configuration for authenticating with AWS through IMDS." - required: false - type: object: options: { - connect_timeout_seconds: { - description: "Connect timeout for IMDS." - required: false - type: uint: { - default: 1 - unit: "seconds" + required: false + type: string: examples: ["randomEXAMPLEidString"] + } + imds: { + description: "Configuration for authenticating with AWS through IMDS." + required: false + type: object: options: { + connect_timeout_seconds: { + description: "Connect timeout for IMDS." + required: false + type: uint: { + default: 1 + unit: "seconds" + } } - } - max_attempts: { - description: "Number of IMDS retries for fetching tokens and metadata." - required: false - type: uint: default: 4 - } - read_timeout_seconds: { - description: "Read timeout for IMDS." - required: false - type: uint: { - default: 1 - unit: "seconds" + max_attempts: { + description: "Number of IMDS retries for fetching tokens and metadata." + required: false + type: uint: default: 4 + } + read_timeout_seconds: { + description: "Read timeout for IMDS." + required: false + type: uint: { + default: 1 + unit: "seconds" + } } } } - } - load_timeout_secs: { - description: """ + load_timeout_secs: { + description: """ Timeout for successfully loading any credentials, in seconds. Relevant when the default credentials chain or `assume_role` is used. """ - required: false - type: uint: { - examples: [30] - unit: "seconds" + required: false + type: uint: { + examples: [30] + unit: "seconds" + } } - } - profile: { - description: """ + profile: { + description: """ The credentials profile to use. Used to select AWS credentials from a provided credentials file. """ - required: false - type: string: { - default: "default" - examples: ["develop"] + required: false + type: string: { + default: "default" + examples: ["develop"] + } } - } - region: { - description: """ + region: { + description: """ The [AWS region][aws_region] to send STS requests to. If not set, this defaults to the configured region @@ -132,16 +133,16 @@ generated: components: sinks: opentelemetry: configuration: protocol: { [aws_region]: https://docs.aws.amazon.com/general/latest/gr/rande.html#regional-endpoints """ - required: false - type: string: examples: ["us-west-2"] - } - secret_access_key: { - description: "The AWS secret access key." - required: true - type: string: examples: ["wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"] - } - session_name: { - description: """ + required: false + type: string: examples: ["us-west-2"] + } + secret_access_key: { + description: "The AWS secret access key." + required: true + type: string: examples: ["wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"] + } + session_name: { + description: """ The optional [RoleSessionName][role_session_name] is a unique session identifier for your assumed role. Should be unique per principal or reason. @@ -149,210 +150,210 @@ generated: components: sinks: opentelemetry: configuration: protocol: { [role_session_name]: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html """ - required: false - type: string: examples: ["vector-indexer-role"] - } - session_token: { - description: """ + required: false + type: string: examples: ["vector-indexer-role"] + } + session_token: { + description: """ The AWS session token. See [AWS temporary credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_use-resources.html) """ - required: false - type: string: examples: ["AQoDYXdz...AQoDYXdz..."] + required: false + type: string: examples: ["AQoDYXdz...AQoDYXdz..."] + } } } - } - password: { - description: "The basic authentication password." - relevant_when: "strategy = \"basic\"" - required: true - type: string: examples: ["${PASSWORD}", "password"] - } - service: { - description: "The AWS service name to use for signing." - relevant_when: "strategy = \"aws\"" - required: true - type: string: {} - } - strategy: { - description: "The authentication strategy to use." - required: true - type: string: enum: { - aws: "AWS authentication." - basic: """ + password: { + description: "The basic authentication password." + relevant_when: "strategy = \"basic\"" + required: true + type: string: examples: ["${PASSWORD}", "password"] + } + service: { + description: "The AWS service name to use for signing." + relevant_when: "strategy = \"aws\"" + required: true + type: string: {} + } + strategy: { + description: "The authentication strategy to use." + required: true + type: string: enum: { + aws: "AWS authentication." + basic: """ Basic authentication. The username and password are concatenated and encoded using [base64][base64]. [base64]: https://en.wikipedia.org/wiki/Base64 """ - bearer: """ + bearer: """ Bearer authentication. The bearer token value (OAuth2, JWT, etc.) is passed as-is. """ + } + } + token: { + description: "The bearer authentication token." + relevant_when: "strategy = \"bearer\"" + required: true + type: string: {} + } + user: { + description: "The basic authentication username." + relevant_when: "strategy = \"basic\"" + required: true + type: string: examples: ["${USERNAME}", "username"] } - } - token: { - description: "The bearer authentication token." - relevant_when: "strategy = \"bearer\"" - required: true - type: string: {} - } - user: { - description: "The basic authentication username." - relevant_when: "strategy = \"basic\"" - required: true - type: string: examples: ["${USERNAME}", "username"] } } - } - batch: { - description: "Event batching behavior." - required: false - type: object: options: { - max_bytes: { - description: """ - The maximum size of a batch that is processed by a sink. + batch: { + description: "Event batching behavior." + required: false + type: object: options: { + max_bytes: { + description: """ + The maximum size of a batch that is processed by a sink. - This is based on the uncompressed size of the batched events, before they are - serialized or compressed. - """ - required: false - type: uint: { - default: 10000000 - unit: "bytes" + This is based on the uncompressed size of the batched events, before they are + serialized or compressed. + """ + required: false + type: uint: { + default: 10000000 + unit: "bytes" + } } - } - max_events: { - description: "The maximum size of a batch before it is flushed." - required: false - type: uint: unit: "events" - } - timeout_secs: { - description: "The maximum age of a batch before it is flushed." - required: false - type: float: { - default: 1.0 - unit: "seconds" + max_events: { + description: "The maximum size of a batch before it is flushed." + required: false + type: uint: unit: "events" + } + timeout_secs: { + description: "The maximum age of a batch before it is flushed." + required: false + type: float: { + default: 1.0 + unit: "seconds" + } } } } - } - compression: { - description: """ - Compression configuration. - - All compression algorithms use the default compression level unless otherwise specified. - """ - required: false - type: string: { - default: "none" - enum: { - gzip: """ - [Gzip][gzip] compression. - - [gzip]: https://www.gzip.org/ - """ - none: "No compression." - snappy: """ - [Snappy][snappy] compression. - - [snappy]: https://github.com/google/snappy/blob/main/docs/README.md - """ - zlib: """ - [Zlib][zlib] compression. - - [zlib]: https://zlib.net/ - """ - zstd: """ - [Zstandard][zstd] compression. + compression: { + description: """ + Compression configuration. - [zstd]: https://facebook.github.io/zstd/ - """ + All compression algorithms use the default compression level unless otherwise specified. + """ + required: false + type: string: { + default: "none" + enum: { + gzip: """ + [Gzip][gzip] compression. + + [gzip]: https://www.gzip.org/ + """ + none: "No compression." + snappy: """ + [Snappy][snappy] compression. + + [snappy]: https://github.com/google/snappy/blob/main/docs/README.md + """ + zlib: """ + [Zlib][zlib] compression. + + [zlib]: https://zlib.net/ + """ + zstd: """ + [Zstandard][zstd] compression. + + [zstd]: https://facebook.github.io/zstd/ + """ + } } } - } - encoding: { - description: """ - Encoding configuration. - Configures how events are encoded into raw bytes. - The selected encoding also determines which input types (logs, metrics, traces) are supported. - """ - required: true - type: object: options: { - avro: { - description: "Apache Avro-specific encoder options." - relevant_when: "codec = \"avro\"" - required: true - type: object: options: schema: { - description: "The Avro schema." - required: true - type: string: examples: ["{ \"type\": \"record\", \"name\": \"log\", \"fields\": [{ \"name\": \"message\", \"type\": \"string\" }] }"] + encoding: { + description: """ + Encoding configuration. + Configures how events are encoded into raw bytes. + The selected encoding also determines which input types (logs, metrics, traces) are supported. + """ + required: true + type: object: options: { + avro: { + description: "Apache Avro-specific encoder options." + relevant_when: "codec = \"avro\"" + required: true + type: object: options: schema: { + description: "The Avro schema." + required: true + type: string: examples: ["{ \"type\": \"record\", \"name\": \"log\", \"fields\": [{ \"name\": \"message\", \"type\": \"string\" }] }"] + } } - } - cef: { - description: "The CEF Serializer Options." - relevant_when: "codec = \"cef\"" - required: true - type: object: options: { - device_event_class_id: { - description: """ + cef: { + description: "The CEF Serializer Options." + relevant_when: "codec = \"cef\"" + required: true + type: object: options: { + device_event_class_id: { + description: """ Unique identifier for each event type. Identifies the type of event reported. The value length must be less than or equal to 1023. """ - required: true - type: string: {} - } - device_product: { - description: """ + required: true + type: string: {} + } + device_product: { + description: """ Identifies the product of a vendor. The part of a unique device identifier. No two products can use the same combination of device vendor and device product. The value length must be less than or equal to 63. """ - required: true - type: string: {} - } - device_vendor: { - description: """ + required: true + type: string: {} + } + device_vendor: { + description: """ Identifies the vendor of the product. The part of a unique device identifier. No two products can use the same combination of device vendor and device product. The value length must be less than or equal to 63. """ - required: true - type: string: {} - } - device_version: { - description: """ + required: true + type: string: {} + } + device_version: { + description: """ Identifies the version of the problem. The combination of the device product, vendor and this value make up the unique id of the device that sends messages. The value length must be less than or equal to 31. """ - required: true - type: string: {} - } - extensions: { - description: """ + required: true + type: string: {} + } + extensions: { + description: """ The collection of key-value pairs. Keys are the keys of the extensions, and values are paths that point to the extension values of a log event. The event can have any number of key-value pairs in any order. """ - required: false - type: object: options: "*": { - description: "This is a path that points to the extension value of a log event." - required: true - type: string: {} + required: false + type: object: options: "*": { + description: "This is a path that points to the extension value of a log event." + required: true + type: string: {} + } } - } - name: { - description: """ + name: { + description: """ This is a path that points to the human-readable description of a log event. The value length must be less than or equal to 512. Equals "cef.name" by default. """ - required: true - type: string: {} - } - severity: { - description: """ + required: true + type: string: {} + } + severity: { + description: """ This is a path that points to the field of a log event that reflects importance of the event. Reflects importance of the event. @@ -360,38 +361,38 @@ generated: components: sinks: opentelemetry: configuration: protocol: { 0 = lowest_importance, 10 = highest_importance. Set to "cef.severity" by default. """ - required: true - type: string: {} - } - version: { - description: """ + required: true + type: string: {} + } + version: { + description: """ CEF Version. Can be either 0 or 1. Set to "0" by default. """ - required: true - type: string: enum: { - V0: "CEF specification version 0.1." - V1: "CEF specification version 1.x." + required: true + type: string: enum: { + V0: "CEF specification version 0.1." + V1: "CEF specification version 1.x." + } } } } - } - codec: { - description: "The codec to use for encoding events." - required: true - type: string: enum: { - avro: """ + codec: { + description: "The codec to use for encoding events." + required: true + type: string: enum: { + avro: """ Encodes an event as an [Apache Avro][apache_avro] message. [apache_avro]: https://avro.apache.org/ """ - cef: "Encodes an event as a CEF (Common Event Format) formatted message." - csv: """ + cef: "Encodes an event as a CEF (Common Event Format) formatted message." + csv: """ Encodes an event as a CSV message. This codec must be configured with fields to encode. """ - gelf: """ + gelf: """ Encodes an event as a [GELF][gelf] message. This codec is experimental for the following reason: @@ -409,17 +410,17 @@ generated: components: sinks: opentelemetry: configuration: protocol: { [gelf]: https://docs.graylog.org/docs/gelf [implementation]: https://github.com/Graylog2/go-gelf/blob/v2/gelf/reader.go """ - json: """ + json: """ Encodes an event as [JSON][json]. [json]: https://www.json.org/ """ - logfmt: """ + logfmt: """ Encodes an event as a [logfmt][logfmt] message. [logfmt]: https://brandur.org/logfmt """ - native: """ + native: """ Encodes an event in the [native Protocol Buffers format][vector_native_protobuf]. This codec is **[experimental][experimental]**. @@ -427,7 +428,7 @@ generated: components: sinks: opentelemetry: configuration: protocol: { [vector_native_protobuf]: https://github.com/vectordotdev/vector/blob/master/lib/vector-core/proto/event.proto [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ - native_json: """ + native_json: """ Encodes an event in the [native JSON format][vector_native_json]. This codec is **[experimental][experimental]**. @@ -435,12 +436,12 @@ generated: components: sinks: opentelemetry: configuration: protocol: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ - protobuf: """ + protobuf: """ Encodes an event as a [Protobuf][protobuf] message. [protobuf]: https://protobuf.dev/ """ - raw_message: """ + raw_message: """ No encoding. This encoding uses the `message` field of a log event. @@ -449,7 +450,7 @@ generated: components: sinks: opentelemetry: configuration: protocol: { transform) and removing the message field while doing additional parsing on it, as this could lead to the encoding emitting empty strings for the given event. """ - text: """ + text: """ Plain text encoding. This encoding uses the `message` field of a log event. For metrics, it uses an @@ -459,38 +460,38 @@ generated: components: sinks: opentelemetry: configuration: protocol: { transform) and removing the message field while doing additional parsing on it, as this could lead to the encoding emitting empty strings for the given event. """ + } } - } - csv: { - description: "The CSV Serializer Options." - relevant_when: "codec = \"csv\"" - required: true - type: object: options: { - capacity: { - description: """ + csv: { + description: "The CSV Serializer Options." + relevant_when: "codec = \"csv\"" + required: true + type: object: options: { + capacity: { + description: """ Sets the capacity (in bytes) of the internal buffer used in the CSV writer. This defaults to 8KB. """ - required: false - type: uint: default: 8192 - } - delimiter: { - description: "The field delimiter to use when writing CSV." - required: false - type: ascii_char: default: "," - } - double_quote: { - description: """ + required: false + type: uint: default: 8192 + } + delimiter: { + description: "The field delimiter to use when writing CSV." + required: false + type: ascii_char: default: "," + } + double_quote: { + description: """ Enables double quote escapes. This is enabled by default, but you can disable it. When disabled, quotes in field data are escaped instead of doubled. """ - required: false - type: bool: default: true - } - escape: { - description: """ + required: false + type: bool: default: true + } + escape: { + description: """ The escape character to use when writing CSV. In some variants of CSV, quotes are escaped using a special escape character @@ -498,11 +499,11 @@ generated: components: sinks: opentelemetry: configuration: protocol: { To use this, `double_quotes` needs to be disabled as well; otherwise, this setting is ignored. """ - required: false - type: ascii_char: default: "\"" - } - fields: { - description: """ + required: false + type: ascii_char: default: "\"" + } + fields: { + description: """ Configures the fields that are encoded, as well as the order in which they appear in the output. @@ -511,269 +512,269 @@ generated: components: sinks: opentelemetry: configuration: protocol: { Values of type `Array`, `Object`, and `Regex` are not supported, and the output for any of these types is an empty string. """ - required: true - type: array: items: type: string: {} - } - quote: { - description: "The quote character to use when writing CSV." - required: false - type: ascii_char: default: "\"" - } - quote_style: { - description: "The quoting style to use when writing CSV data." - required: false - type: string: { - default: "necessary" - enum: { - always: "Always puts quotes around every field." - necessary: """ + required: true + type: array: items: type: string: {} + } + quote: { + description: "The quote character to use when writing CSV." + required: false + type: ascii_char: default: "\"" + } + quote_style: { + description: "The quoting style to use when writing CSV data." + required: false + type: string: { + default: "necessary" + enum: { + always: "Always puts quotes around every field." + necessary: """ Puts quotes around fields only when necessary. They are necessary when fields contain a quote, delimiter, or record terminator. Quotes are also necessary when writing an empty record (which is indistinguishable from a record with one empty field). """ - never: "Never writes quotes, even if it produces invalid CSV data." - non_numeric: """ + never: "Never writes quotes, even if it produces invalid CSV data." + non_numeric: """ Puts quotes around all fields that are non-numeric. This means that when writing a field that does not parse as a valid float or integer, quotes are used even if they aren't strictly necessary. """ + } } } } } - } - except_fields: { - description: "List of fields that are excluded from the encoded event." - required: false - type: array: items: type: string: {} - } - gelf: { - description: "The GELF Serializer Options." - relevant_when: "codec = \"gelf\"" - required: false - type: object: options: max_chunk_size: { - description: """ + except_fields: { + description: "List of fields that are excluded from the encoded event." + required: false + type: array: items: type: string: {} + } + gelf: { + description: "The GELF Serializer Options." + relevant_when: "codec = \"gelf\"" + required: false + type: object: options: max_chunk_size: { + description: """ Maximum size for each GELF chunked datagram (including 12-byte header). Chunking starts when datagrams exceed this size. For Graylog target, keep at or below 8192 bytes; for Vector target (`gelf` decoding with `chunked_gelf` framing), up to 65,500 bytes is recommended. """ - required: false - type: uint: default: 8192 + required: false + type: uint: default: 8192 + } } - } - json: { - description: "Options for the JsonSerializer." - relevant_when: "codec = \"json\"" - required: false - type: object: options: pretty: { - description: "Whether to use pretty JSON formatting." - required: false - type: bool: default: false + json: { + description: "Options for the JsonSerializer." + relevant_when: "codec = \"json\"" + required: false + type: object: options: pretty: { + description: "Whether to use pretty JSON formatting." + required: false + type: bool: default: false + } } - } - metric_tag_values: { - description: """ - Controls how metric tag values are encoded. - - When set to `single`, only the last non-bare value of tags are displayed with the - metric. When set to `full`, all metric tags are exposed as separate assignments. - """ - relevant_when: "codec = \"json\" or codec = \"text\"" - required: false - type: string: { - default: "single" - enum: { - full: "All tags are exposed as arrays of either string or null values." - single: """ + metric_tag_values: { + description: """ + Controls how metric tag values are encoded. + + When set to `single`, only the last non-bare value of tags are displayed with the + metric. When set to `full`, all metric tags are exposed as separate assignments. + """ + relevant_when: "codec = \"json\" or codec = \"text\"" + required: false + type: string: { + default: "single" + enum: { + full: "All tags are exposed as arrays of either string or null values." + single: """ Tag values are exposed as single strings, the same as they were before this config option. Tags with multiple values show the last assigned value, and null values are ignored. """ + } } } - } - only_fields: { - description: "List of fields that are included in the encoded event." - required: false - type: array: items: type: string: {} - } - protobuf: { - description: "Options for the Protobuf serializer." - relevant_when: "codec = \"protobuf\"" - required: true - type: object: options: { - desc_file: { - description: """ + only_fields: { + description: "List of fields that are included in the encoded event." + required: false + type: array: items: type: string: {} + } + protobuf: { + description: "Options for the Protobuf serializer." + relevant_when: "codec = \"protobuf\"" + required: true + type: object: options: { + desc_file: { + description: """ The path to the protobuf descriptor set file. This file is the output of `protoc -I -o ` You can read more [here](https://buf.build/docs/reference/images/#how-buf-images-work). """ - required: true - type: string: examples: ["/etc/vector/protobuf_descriptor_set.desc"] - } - message_type: { - description: "The name of the message type to use for serializing." - required: true - type: string: examples: ["package.Message"] + required: true + type: string: examples: ["/etc/vector/protobuf_descriptor_set.desc"] + } + message_type: { + description: "The name of the message type to use for serializing." + required: true + type: string: examples: ["package.Message"] + } } } - } - timestamp_format: { - description: "Format used for timestamp fields." - required: false - type: string: enum: { - rfc3339: "Represent the timestamp as a RFC 3339 timestamp." - unix: "Represent the timestamp as a Unix timestamp." - unix_float: "Represent the timestamp as a Unix timestamp in floating point." - unix_ms: "Represent the timestamp as a Unix timestamp in milliseconds." - unix_ns: "Represent the timestamp as a Unix timestamp in nanoseconds." - unix_us: "Represent the timestamp as a Unix timestamp in microseconds" + timestamp_format: { + description: "Format used for timestamp fields." + required: false + type: string: enum: { + rfc3339: "Represent the timestamp as a RFC 3339 timestamp." + unix: "Represent the timestamp as a Unix timestamp." + unix_float: "Represent the timestamp as a Unix timestamp in floating point." + unix_ms: "Represent the timestamp as a Unix timestamp in milliseconds." + unix_ns: "Represent the timestamp as a Unix timestamp in nanoseconds." + unix_us: "Represent the timestamp as a Unix timestamp in microseconds" + } } } } - } - framing: { - description: "Framing configuration." - required: false - type: object: options: { - character_delimited: { - description: "Options for the character delimited encoder." - relevant_when: "method = \"character_delimited\"" - required: true - type: object: options: delimiter: { - description: "The ASCII (7-bit) character that delimits byte sequences." - required: true - type: ascii_char: {} - } - } - length_delimited: { - description: "Options for the length delimited decoder." - relevant_when: "method = \"length_delimited\"" - required: true - type: object: options: { - length_field_is_big_endian: { - description: "Length field byte order (little or big endian)" - required: false - type: bool: default: true - } - length_field_length: { - description: "Number of bytes representing the field length" - required: false - type: uint: default: 4 - } - length_field_offset: { - description: "Number of bytes in the header before the length field" - required: false - type: uint: default: 0 + framing: { + description: "Framing configuration." + required: false + type: object: options: { + character_delimited: { + description: "Options for the character delimited encoder." + relevant_when: "method = \"character_delimited\"" + required: true + type: object: options: delimiter: { + description: "The ASCII (7-bit) character that delimits byte sequences." + required: true + type: ascii_char: {} } - max_frame_length: { - description: "Maximum frame length" - required: false - type: uint: default: 8388608 + } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } } } - } - max_frame_length: { - description: "Maximum frame length" - relevant_when: "method = \"varint_length_delimited\"" - required: false - type: uint: default: 8388608 - } - method: { - description: "The framing method." - required: true - type: string: enum: { - bytes: "Event data is not delimited at all." - character_delimited: "Event data is delimited by a single ASCII (7-bit) character." - length_delimited: """ + max_frame_length: { + description: "Maximum frame length" + relevant_when: "method = \"varint_length_delimited\"" + required: false + type: uint: default: 8388608 + } + method: { + description: "The framing method." + required: true + type: string: enum: { + bytes: "Event data is not delimited at all." + character_delimited: "Event data is delimited by a single ASCII (7-bit) character." + length_delimited: """ Event data is prefixed with its length in bytes. The prefix is a 32-bit unsigned integer, little endian. """ - newline_delimited: "Event data is delimited by a newline (LF) character." - varint_length_delimited: """ + newline_delimited: "Event data is delimited by a newline (LF) character." + varint_length_delimited: """ Event data is prefixed with its length in bytes as a varint. This is compatible with protobuf's length-delimited encoding. """ + } } } } - } - headers: { - deprecated: true - deprecated_message: "This option has been deprecated, use `request.headers` instead." - description: "A list of custom headers to add to each request." - required: false - type: object: options: "*": { - description: "An HTTP request header and it's value." - required: true - type: string: {} + headers: { + deprecated: true + deprecated_message: "This option has been deprecated, use `request.headers` instead." + description: "A list of custom headers to add to each request." + required: false + type: object: options: "*": { + description: "An HTTP request header and it's value." + required: true + type: string: {} + } } - } - method: { - description: "The HTTP method to use when making the request." - required: false - type: string: { - default: "post" - enum: { - delete: "DELETE." - get: "GET." - head: "HEAD." - options: "OPTIONS." - patch: "PATCH." - post: "POST." - put: "PUT." - trace: "TRACE." + method: { + description: "The HTTP method to use when making the request." + required: false + type: string: { + default: "post" + enum: { + delete: "DELETE." + get: "GET." + head: "HEAD." + options: "OPTIONS." + patch: "PATCH." + post: "POST." + put: "PUT." + trace: "TRACE." + } } } - } - payload_prefix: { - description: """ - A string to prefix the payload with. - - This option is ignored if the encoding is not character delimited JSON. - - If specified, the `payload_suffix` must also be specified and together they must produce a valid JSON object. - """ - required: false - type: string: { - default: "" - examples: ["{\"data\":"] + payload_prefix: { + description: """ + A string to prefix the payload with. + + This option is ignored if the encoding is not character delimited JSON. + + If specified, the `payload_suffix` must also be specified and together they must produce a valid JSON object. + """ + required: false + type: string: { + default: "" + examples: ["{\"data\":"] + } } - } - payload_suffix: { - description: """ - A string to suffix the payload with. - - This option is ignored if the encoding is not character delimited JSON. - - If specified, the `payload_prefix` must also be specified and together they must produce a valid JSON object. - """ - required: false - type: string: { - default: "" - examples: ["}"] + payload_suffix: { + description: """ + A string to suffix the payload with. + + This option is ignored if the encoding is not character delimited JSON. + + If specified, the `payload_prefix` must also be specified and together they must produce a valid JSON object. + """ + required: false + type: string: { + default: "" + examples: ["}"] + } } - } - request: { - description: "Outbound HTTP request settings." - required: false - type: object: options: { - adaptive_concurrency: { - description: """ - Configuration of adaptive concurrency parameters. + request: { + description: "Outbound HTTP request settings." + required: false + type: object: options: { + adaptive_concurrency: { + description: """ + Configuration of adaptive concurrency parameters. - These parameters typically do not require changes from the default, and incorrect values can lead to meta-stable or - unstable performance and sink behavior. Proceed with caution. - """ - required: false - type: object: options: { - decrease_ratio: { - description: """ + These parameters typically do not require changes from the default, and incorrect values can lead to meta-stable or + unstable performance and sink behavior. Proceed with caution. + """ + required: false + type: object: options: { + decrease_ratio: { + description: """ The fraction of the current value to set the new concurrency limit when decreasing the limit. Valid values are greater than `0` and less than `1`. Smaller values cause the algorithm to scale back rapidly @@ -781,11 +782,11 @@ generated: components: sinks: opentelemetry: configuration: protocol: { **Note**: The new limit is rounded down after applying this ratio. """ - required: false - type: float: default: 0.9 - } - ewma_alpha: { - description: """ + required: false + type: float: default: 0.9 + } + ewma_alpha: { + description: """ The weighting of new measurements compared to older measurements. Valid values are greater than `0` and less than `1`. @@ -794,31 +795,31 @@ generated: components: sinks: opentelemetry: configuration: protocol: { the current RTT. Smaller values cause this reference to adjust more slowly, which may be useful if a service has unusually high response variability. """ - required: false - type: float: default: 0.4 - } - initial_concurrency: { - description: """ + required: false + type: float: default: 0.4 + } + initial_concurrency: { + description: """ The initial concurrency limit to use. If not specified, the initial limit is 1 (no concurrency). Datadog recommends setting this value to your service's average limit if you're seeing that it takes a long time to ramp up adaptive concurrency after a restart. You can find this value by looking at the `adaptive_concurrency_limit` metric. """ - required: false - type: uint: default: 1 - } - max_concurrency_limit: { - description: """ + required: false + type: uint: default: 1 + } + max_concurrency_limit: { + description: """ The maximum concurrency limit. The adaptive request concurrency limit does not go above this bound. This is put in place as a safeguard. """ - required: false - type: uint: default: 200 - } - rtt_deviation_scale: { - description: """ + required: false + type: uint: default: 200 + } + rtt_deviation_scale: { + description: """ Scale of RTT deviations which are not considered anomalous. Valid values are greater than or equal to `0`, and we expect reasonable values to range from `1.0` to `3.0`. @@ -828,98 +829,98 @@ generated: components: sinks: opentelemetry: configuration: protocol: { can ignore increases in RTT that are within an expected range. This factor is used to scale up the deviation to an appropriate range. Larger values cause the algorithm to ignore larger increases in the RTT. """ - required: false - type: float: default: 2.5 + required: false + type: float: default: 2.5 + } } } - } - concurrency: { - description: """ - Configuration for outbound request concurrency. + concurrency: { + description: """ + Configuration for outbound request concurrency. - This can be set either to one of the below enum values or to a positive integer, which denotes - a fixed concurrency limit. - """ - required: false - type: { - string: { - default: "adaptive" - enum: { - adaptive: """ + This can be set either to one of the below enum values or to a positive integer, which denotes + a fixed concurrency limit. + """ + required: false + type: { + string: { + default: "adaptive" + enum: { + adaptive: """ Concurrency is managed by Vector's [Adaptive Request Concurrency][arc] feature. [arc]: https://vector.dev/docs/architecture/arc/ """ - none: """ + none: """ A fixed concurrency of 1. Only one request can be outstanding at any given time. """ + } } + uint: {} } - uint: {} } - } - headers: { - description: "Additional HTTP headers to add to every HTTP request." - required: false - type: object: { - examples: [{ - Accept: "text/plain" - "X-Event-Level": "{{level}}" - "X-Event-Timestamp": "{{timestamp}}" - "X-My-Custom-Header": "A-Value" - }] - options: "*": { - description: "An HTTP request header and its value. Both header names and values support templating with event data." - required: true - type: string: {} + headers: { + description: "Additional HTTP headers to add to every HTTP request." + required: false + type: object: { + examples: [{ + Accept: "text/plain" + "X-Event-Level": "{{level}}" + "X-Event-Timestamp": "{{timestamp}}" + "X-My-Custom-Header": "A-Value" + }] + options: "*": { + description: "An HTTP request header and its value. Both header names and values support templating with event data." + required: true + type: string: {} + } } } - } - rate_limit_duration_secs: { - description: "The time window used for the `rate_limit_num` option." - required: false - type: uint: { - default: 1 - unit: "seconds" + rate_limit_duration_secs: { + description: "The time window used for the `rate_limit_num` option." + required: false + type: uint: { + default: 1 + unit: "seconds" + } } - } - rate_limit_num: { - description: "The maximum number of requests allowed within the `rate_limit_duration_secs` time window." - required: false - type: uint: { - default: 9223372036854775807 - unit: "requests" + rate_limit_num: { + description: "The maximum number of requests allowed within the `rate_limit_duration_secs` time window." + required: false + type: uint: { + default: 9223372036854775807 + unit: "requests" + } } - } - retry_attempts: { - description: "The maximum number of retries to make for failed requests." - required: false - type: uint: { - default: 9223372036854775807 - unit: "retries" + retry_attempts: { + description: "The maximum number of retries to make for failed requests." + required: false + type: uint: { + default: 9223372036854775807 + unit: "retries" + } } - } - retry_initial_backoff_secs: { - description: """ - The amount of time to wait before attempting the first retry for a failed request. + retry_initial_backoff_secs: { + description: """ + The amount of time to wait before attempting the first retry for a failed request. - After the first retry has failed, the fibonacci sequence is used to select future backoffs. - """ - required: false - type: uint: { - default: 1 - unit: "seconds" + After the first retry has failed, the fibonacci sequence is used to select future backoffs. + """ + required: false + type: uint: { + default: 1 + unit: "seconds" + } } - } - retry_jitter_mode: { - description: "The jitter mode to use for retry backoff behavior." - required: false - type: string: { - default: "Full" - enum: { - Full: """ + retry_jitter_mode: { + description: "The jitter mode to use for retry backoff behavior." + required: false + type: string: { + default: "Full" + enum: { + Full: """ Full jitter. The random delay is anywhere from 0 up to the maximum current delay calculated by the backoff @@ -929,143 +930,159 @@ generated: components: sinks: opentelemetry: configuration: protocol: { of creating accidental denial of service (DoS) conditions against your own systems when many clients are recovering from a failure state. """ - None: "No jitter." + None: "No jitter." + } } } - } - retry_max_duration_secs: { - description: "The maximum amount of time to wait between retries." - required: false - type: uint: { - default: 30 - unit: "seconds" + retry_max_duration_secs: { + description: "The maximum amount of time to wait between retries." + required: false + type: uint: { + default: 30 + unit: "seconds" + } } - } - timeout_secs: { - description: """ - The time a request can take before being aborted. + timeout_secs: { + description: """ + The time a request can take before being aborted. - Datadog highly recommends that you do not lower this value below the service's internal timeout, as this could - create orphaned requests, pile on retries, and result in duplicate data downstream. - """ - required: false - type: uint: { - default: 60 - unit: "seconds" + Datadog highly recommends that you do not lower this value below the service's internal timeout, as this could + create orphaned requests, pile on retries, and result in duplicate data downstream. + """ + required: false + type: uint: { + default: 60 + unit: "seconds" + } } } } - } - tls: { - description: "TLS configuration." - required: false - type: object: options: { - alpn_protocols: { - description: """ - Sets the list of supported ALPN protocols. + tls: { + description: "TLS configuration." + required: false + type: object: options: { + alpn_protocols: { + description: """ + Sets the list of supported ALPN protocols. - Declare the supported ALPN protocols, which are used during negotiation with a peer. They are prioritized in the order - that they are defined. - """ - required: false - type: array: items: type: string: examples: ["h2"] - } - ca_file: { - description: """ - Absolute path to an additional CA certificate file. + Declare the supported ALPN protocols, which are used during negotiation with a peer. They are prioritized in the order + that they are defined. + """ + required: false + type: array: items: type: string: examples: ["h2"] + } + ca_file: { + description: """ + Absolute path to an additional CA certificate file. - The certificate must be in the DER or PEM (X.509) format. Additionally, the certificate can be provided as an inline string in PEM format. - """ - required: false - type: string: examples: ["/path/to/certificate_authority.crt"] - } - crt_file: { - description: """ - Absolute path to a certificate file used to identify this server. + The certificate must be in the DER or PEM (X.509) format. Additionally, the certificate can be provided as an inline string in PEM format. + """ + required: false + type: string: examples: ["/path/to/certificate_authority.crt"] + } + crt_file: { + description: """ + Absolute path to a certificate file used to identify this server. - The certificate must be in DER, PEM (X.509), or PKCS#12 format. Additionally, the certificate can be provided as - an inline string in PEM format. + The certificate must be in DER, PEM (X.509), or PKCS#12 format. Additionally, the certificate can be provided as + an inline string in PEM format. - If this is set _and_ is not a PKCS#12 archive, `key_file` must also be set. - """ - required: false - type: string: examples: ["/path/to/host_certificate.crt"] - } - key_file: { - description: """ - Absolute path to a private key file used to identify this server. + If this is set _and_ is not a PKCS#12 archive, `key_file` must also be set. + """ + required: false + type: string: examples: ["/path/to/host_certificate.crt"] + } + key_file: { + description: """ + Absolute path to a private key file used to identify this server. - The key must be in DER or PEM (PKCS#8) format. Additionally, the key can be provided as an inline string in PEM format. - """ - required: false - type: string: examples: ["/path/to/host_certificate.key"] - } - key_pass: { - description: """ - Passphrase used to unlock the encrypted key file. + The key must be in DER or PEM (PKCS#8) format. Additionally, the key can be provided as an inline string in PEM format. + """ + required: false + type: string: examples: ["/path/to/host_certificate.key"] + } + key_pass: { + description: """ + Passphrase used to unlock the encrypted key file. - This has no effect unless `key_file` is set. - """ - required: false - type: string: examples: ["${KEY_PASS_ENV_VAR}", "PassWord1"] - } - server_name: { - description: """ - Server name to use when using Server Name Indication (SNI). + This has no effect unless `key_file` is set. + """ + required: false + type: string: examples: ["${KEY_PASS_ENV_VAR}", "PassWord1"] + } + server_name: { + description: """ + Server name to use when using Server Name Indication (SNI). - Only relevant for outgoing connections. - """ - required: false - type: string: examples: ["www.example.com"] - } - verify_certificate: { - description: """ - Enables certificate verification. For components that create a server, this requires that the - client connections have a valid client certificate. For components that initiate requests, - this validates that the upstream has a valid certificate. + Only relevant for outgoing connections. + """ + required: false + type: string: examples: ["www.example.com"] + } + verify_certificate: { + description: """ + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. - If enabled, certificates must not be expired and must be issued by a trusted - issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the - certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and - so on, until the verification process reaches a root certificate. + If enabled, certificates must not be expired and must be issued by a trusted + issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the + certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and + so on, until the verification process reaches a root certificate. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. - """ - required: false - type: bool: {} - } - verify_hostname: { - description: """ - Enables hostname verification. + Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. + """ + required: false + type: bool: {} + } + verify_hostname: { + description: """ + Enables hostname verification. - If enabled, the hostname used to connect to the remote host must be present in the TLS certificate presented by - the remote host, either as the Common Name or as an entry in the Subject Alternative Name extension. + If enabled, the hostname used to connect to the remote host must be present in the TLS certificate presented by + the remote host, either as the Common Name or as an entry in the Subject Alternative Name extension. - Only relevant for outgoing connections. + Only relevant for outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the remote hostname. - """ - required: false - type: bool: {} + Do NOT set this to `false` unless you understand the risks of not verifying the remote hostname. + """ + required: false + type: bool: {} + } } } - } - type: { - description: "The communication protocol." - required: true - type: string: enum: http: "Send data over HTTP." - } - uri: { - description: """ - The full URI to make HTTP requests to. - - This should include the protocol and host, but can also include the port, path, and any other valid part of a URI. - """ - required: true - type: string: { - examples: ["https://10.22.212.22:9000/endpoint"] - syntax: "template" + type: { + description: "The communication protocol." + required: true + type: string: enum: http: "Send data over HTTP." + } + uri: { + description: """ + The full URI to make HTTP requests to. + + This should include the protocol and host, but can also include the port, path, and any other valid part of a URI. + """ + required: true + type: string: { + examples: ["https://10.22.212.22:9000/endpoint"] + syntax: "template" + } } } } + use_otlp_encoding: { + description: """ + Setting this field to `true`, will override all encoding settings and it will encode requests based on the + [OpenTelemetry protocol](https://opentelemetry.io/docs/specs/otel/protocol/). + + The endpoint is used to determine the data type: + * v1/logs → OTLP Logs + * v1/traces → OTLP Traces + * v1/metrics → OTLP Metrics + + More information available [here](https://opentelemetry.io/docs/specs/otlp/?utm_source=chatgpt.com#otlphttp-request). + """ + required: false + type: bool: default: false + } } From 65010d9b0d8dbda1d09059c226fbea2ede170bb4 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Thu, 2 Oct 2025 09:27:21 -0400 Subject: [PATCH 06/33] fmt --- lib/codecs/src/encoding/format/protobuf.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lib/codecs/src/encoding/format/protobuf.rs b/lib/codecs/src/encoding/format/protobuf.rs index ac4159b7bab8f..f656f24cf29b5 100644 --- a/lib/codecs/src/encoding/format/protobuf.rs +++ b/lib/codecs/src/encoding/format/protobuf.rs @@ -10,12 +10,9 @@ use vector_core::{ event::{Event, Value}, schema, }; -use vrl::protobuf::encode::Options; -use vrl::protobuf::{descriptor::get_message_descriptor, encode::encode_message}; - use vrl::protobuf::{ descriptor::{get_message_descriptor, get_message_descriptor_from_bytes}, - encode::encode_message, + encode::{Options, encode_message}, }; /// Config used to build a `ProtobufSerializer`. From 5fa6aee2198d692a8ef03adf4284fc1e80b57f53 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 09:24:15 -0400 Subject: [PATCH 07/33] reverting most changes, will go with a codec approach --- src/codecs/encoding/config.rs | 4 +- src/sinks/http/config.rs | 150 +++++++++++++++------------------ src/sinks/opentelemetry/mod.rs | 66 +++------------ 3 files changed, 82 insertions(+), 138 deletions(-) diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index dc76b7ce2f46e..756bc8e2406f2 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -10,7 +10,7 @@ use crate::codecs::Transformer; /// Encoding configuration. #[configurable_component] -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] /// Configures how events are encoded into raw bytes. /// The selected encoding also determines which input types (logs, metrics, traces) are supported. pub struct EncodingConfig { @@ -60,7 +60,7 @@ where /// Encoding configuration. #[configurable_component] -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] #[serde(deny_unknown_fields)] pub struct EncodingConfigWithFraming { #[configurable(derived)] diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index cdc27abeb2bab..ab72afb9c4cab 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -68,8 +68,6 @@ pub struct HttpSinkConfig { #[serde(default)] pub compression: Compression, - /// If not specified, `encoding.codec` will default to `json`. - /// If `encoding.framing` is not specified, it will be deduced from `encoding.codec`. #[serde(flatten)] pub encoding: EncodingConfigWithFraming, @@ -172,15 +170,79 @@ impl HttpSinkConfig { let (framer, serializer) = self.encoding.build(SinkType::MessageBased)?; Ok(Encoder::::new(framer, serializer)) } +} + +impl GenerateConfig for HttpSinkConfig { + fn generate_config() -> toml::Value { + toml::from_str( + r#"uri = "https://10.22.212.22:9000/endpoint" + encoding.codec = "json""#, + ) + .unwrap() + } +} + +async fn healthcheck(uri: UriSerde, auth: Option, client: HttpClient) -> crate::Result<()> { + let auth = auth.choose_one(&uri.auth)?; + let uri = uri.with_default_parts(); + let mut request = Request::head(&uri.uri).body(Body::empty()).unwrap(); + + if let Some(auth) = auth { + auth.apply(&mut request); + } + + let response = client.send(request).await?; + + match response.status() { + StatusCode::OK => Ok(()), + status => Err(HealthcheckError::UnexpectedStatus { status }.into()), + } +} + +pub(super) fn validate_headers( + headers: &BTreeMap, + configures_auth: bool, +) -> crate::Result> { + let headers = crate::sinks::util::http::validate_headers(headers)?; + + for name in headers.keys() { + if configures_auth && name.inner() == AUTHORIZATION { + return Err("Authorization header can not be used with defined auth options".into()); + } + } - pub(crate) async fn build_with_encoder( - &self, - cx: SinkContext, - encoder: Encoder, - transformer: Transformer, - ) -> crate::Result<(VectorSink, Healthcheck)> { + Ok(headers) +} + +pub(super) fn validate_payload_wrapper( + payload_prefix: &str, + payload_suffix: &str, + encoder: &Encoder, +) -> crate::Result<(String, String)> { + let payload = [payload_prefix, "{}", payload_suffix].join(""); + match ( + encoder.serializer(), + encoder.framer(), + serde_json::from_str::(&payload), + ) { + ( + Serializer::Json(_), + Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }), + Err(_), + ) => Err("Payload prefix and suffix wrapper must produce a valid JSON object.".into()), + _ => Ok((payload_prefix.to_owned(), payload_suffix.to_owned())), + } +} + +#[async_trait] +#[typetag::serde(name = "http")] +impl SinkConfig for HttpSinkConfig { + async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { let batch_settings = self.batch.validate()?.into_batcher_settings()?; + let encoder = self.build_encoder()?; + let transformer = self.encoding.transformer(); + let mut request = self.request.clone(); request.add_old_option(self.headers.clone()); @@ -288,78 +350,6 @@ impl HttpSinkConfig { Ok((VectorSink::from_event_streamsink(sink), healthcheck)) } -} - -impl GenerateConfig for HttpSinkConfig { - fn generate_config() -> toml::Value { - toml::from_str( - r#"uri = "https://10.22.212.22:9000/endpoint" - encoding.codec = "json""#, - ) - .unwrap() - } -} - -async fn healthcheck(uri: UriSerde, auth: Option, client: HttpClient) -> crate::Result<()> { - let auth = auth.choose_one(&uri.auth)?; - let uri = uri.with_default_parts(); - let mut request = Request::head(&uri.uri).body(Body::empty()).unwrap(); - - if let Some(auth) = auth { - auth.apply(&mut request); - } - - let response = client.send(request).await?; - - match response.status() { - StatusCode::OK => Ok(()), - status => Err(HealthcheckError::UnexpectedStatus { status }.into()), - } -} - -pub(super) fn validate_headers( - headers: &BTreeMap, - configures_auth: bool, -) -> crate::Result> { - let headers = crate::sinks::util::http::validate_headers(headers)?; - - for name in headers.keys() { - if configures_auth && name.inner() == AUTHORIZATION { - return Err("Authorization header can not be used with defined auth options".into()); - } - } - - Ok(headers) -} - -pub(super) fn validate_payload_wrapper( - payload_prefix: &str, - payload_suffix: &str, - encoder: &Encoder, -) -> crate::Result<(String, String)> { - let payload = [payload_prefix, "{}", payload_suffix].join(""); - match ( - encoder.serializer(), - encoder.framer(), - serde_json::from_str::(&payload), - ) { - ( - Serializer::Json(_), - Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }), - Err(_), - ) => Err("Payload prefix and suffix wrapper must produce a valid JSON object.".into()), - _ => Ok((payload_prefix.to_owned(), payload_suffix.to_owned())), - } -} - -#[async_trait] -#[typetag::serde(name = "http")] -impl SinkConfig for HttpSinkConfig { - async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { - let encoder = self.build_encoder()?; - self.build_with_encoder(cx, encoder, self.encoding.transformer()) - .await - } fn input(&self) -> Input { Input::new(self.encoding.config().1.input_type()) diff --git a/src/sinks/opentelemetry/mod.rs b/src/sinks/opentelemetry/mod.rs index c8e820eb933d2..88963f8603cde 100644 --- a/src/sinks/opentelemetry/mod.rs +++ b/src/sinks/opentelemetry/mod.rs @@ -1,18 +1,5 @@ -use crate::codecs::Encoder; -use crate::{ - codecs::{EncodingConfigWithFraming, Transformer}, - config::{AcknowledgementsConfig, Input, SinkConfig, SinkContext}, - sinks::{ - Healthcheck, VectorSink, - http::config::{HttpMethod, HttpSinkConfig}, - }, -}; use indoc::indoc; use vector_config::component::GenerateConfig; -use vector_lib::codecs::encoding::{Framer, ProtobufSerializer, Serializer}; -use vector_lib::opentelemetry::proto::{ - LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, TRACES_REQUEST_MESSAGE_TYPE, -}; use vector_lib::{ codecs::{ JsonSerializerConfig, @@ -21,6 +8,15 @@ use vector_lib::{ configurable::configurable_component, }; +use crate::{ + codecs::{EncodingConfigWithFraming, Transformer}, + config::{AcknowledgementsConfig, Input, SinkConfig, SinkContext}, + sinks::{ + Healthcheck, VectorSink, + http::config::{HttpMethod, HttpSinkConfig}, + }, +}; + /// Configuration for the `OpenTelemetry` sink. #[configurable_component(sink("opentelemetry", "Deliver OTLP data over HTTP."))] #[derive(Clone, Debug, Default)] @@ -28,19 +24,6 @@ pub struct OpenTelemetryConfig { /// Protocol configuration #[configurable(derived)] protocol: Protocol, - - /// Setting this field to `true`, will override all encoding settings and it will encode requests based on the - /// [OpenTelemetry protocol](https://opentelemetry.io/docs/specs/otel/protocol/). - /// - /// The endpoint is used to determine the data type: - /// * v1/logs → OTLP Logs - /// * v1/traces → OTLP Traces - /// * v1/metrics → OTLP Metrics - /// - /// More information available [here](https://opentelemetry.io/docs/specs/otlp/?utm_source=chatgpt.com#otlphttp-request). - #[configurable(derived)] - #[serde(default)] - pub use_otlp_encoding: bool, } /// The protocol used to send data to OpenTelemetry. @@ -95,23 +78,7 @@ impl GenerateConfig for OpenTelemetryConfig { impl SinkConfig for OpenTelemetryConfig { async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { match &self.protocol { - Protocol::Http(config) => { - if self.use_otlp_encoding { - let serializer = ProtobufSerializer::new_from_bytes( - vector_lib::opentelemetry::proto::DESCRIPTOR_BYTES, - to_message_type(&config.uri.to_string())?, - )?; - let encoder = Encoder::::new( - FramingConfig::Bytes.build(), - Serializer::Protobuf(serializer), - ); - config - .build_with_encoder(cx, encoder, config.encoding.transformer()) - .await - } else { - config.build(cx).await - } - } + Protocol::Http(config) => config.build(cx).await, } } @@ -128,19 +95,6 @@ impl SinkConfig for OpenTelemetryConfig { } } -/// Checks if an endpoint ends with a known OTEL proto request. -pub fn to_message_type(endpoint: &str) -> crate::Result<&'static str> { - if endpoint.ends_with("v1/logs") { - Ok(LOGS_REQUEST_MESSAGE_TYPE) - } else if endpoint.ends_with("v1/traces") { - Ok(TRACES_REQUEST_MESSAGE_TYPE) - } else if endpoint.ends_with("v1/metrics") { - Ok(METRICS_REQUEST_MESSAGE_TYPE) - } else { - Err(format!("Endpoint {endpoint} not supported, should end with 'v1/logs', 'v1/metrics' or 'v1/traces'.").into()) - } -} - #[cfg(test)] mod test { #[test] From 250feb1d2b03a436af0cd764d93555a91d6598a8 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 10:09:13 -0400 Subject: [PATCH 08/33] otlp codec WIP --- Cargo.lock | 1 + lib/codecs/Cargo.toml | 1 + lib/codecs/src/encoding/format/mod.rs | 2 + lib/codecs/src/encoding/format/otlp.rs | 101 +++++++++++++++++++++ lib/codecs/src/encoding/mod.rs | 39 +++++++- src/codecs/encoding/config.rs | 6 +- src/codecs/encoding/encoder.rs | 1 + src/components/validation/resources/mod.rs | 1 + src/sinks/websocket/sink.rs | 25 ++--- src/sinks/websocket_server/sink.rs | 40 ++++---- src/sources/opentelemetry/config.rs | 14 +-- tests/e2e/opentelemetry/logs/mod.rs | 5 +- 12 files changed, 186 insertions(+), 50 deletions(-) create mode 100644 lib/codecs/src/encoding/format/otlp.rs diff --git a/Cargo.lock b/Cargo.lock index 7c2d5014c679e..28a7e3d1abeae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2308,6 +2308,7 @@ dependencies = [ "indoc", "influxdb-line-protocol", "memchr", + "opentelemetry-proto", "ordered-float 4.6.0", "prost 0.12.6", "prost-reflect", diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index 28f5411ef3cd0..d3632ad17b48d 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -20,6 +20,7 @@ flate2.workspace = true influxdb-line-protocol = { version = "2", default-features = false } lookup = { package = "vector-lookup", path = "../vector-lookup", default-features = false, features = ["test"] } memchr = { version = "2", default-features = false } +opentelemetry-proto = { path = "../opentelemetry-proto" } ordered-float.workspace = true prost.workspace = true prost-reflect.workspace = true diff --git a/lib/codecs/src/encoding/format/mod.rs b/lib/codecs/src/encoding/format/mod.rs index d699e3f9942c4..dd2201917e024 100644 --- a/lib/codecs/src/encoding/format/mod.rs +++ b/lib/codecs/src/encoding/format/mod.rs @@ -12,6 +12,7 @@ mod json; mod logfmt; mod native; mod native_json; +mod otlp; mod protobuf; mod raw_message; mod text; @@ -26,6 +27,7 @@ pub use json::{JsonSerializer, JsonSerializerConfig, JsonSerializerOptions}; pub use logfmt::{LogfmtSerializer, LogfmtSerializerConfig}; pub use native::{NativeSerializer, NativeSerializerConfig}; pub use native_json::{NativeJsonSerializer, NativeJsonSerializerConfig}; +pub use otlp::{OtlpSerializer, OtlpSerializerConfig}; pub use protobuf::{ProtobufSerializer, ProtobufSerializerConfig, ProtobufSerializerOptions}; pub use raw_message::{RawMessageSerializer, RawMessageSerializerConfig}; pub use text::{TextSerializer, TextSerializerConfig}; diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs new file mode 100644 index 0000000000000..4ca2197f8c839 --- /dev/null +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -0,0 +1,101 @@ +use crate::encoding::ProtobufSerializer; +use bytes::BytesMut; +use opentelemetry_proto::proto::{ + DESCRIPTOR_BYTES, LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, + TRACES_REQUEST_MESSAGE_TYPE, +}; +use tokio_util::codec::Encoder; +use vector_config_macros::configurable_component; +use vector_core::{config::DataType, event::Event, schema}; +use vrl::protobuf::{descriptor::get_message_descriptor_from_bytes, parse::Options}; + +/// Config used to build an `OtlpSerializer`. +#[configurable_component] +#[derive(Debug, Clone, Default)] +pub struct OtlpSerializerConfig { + // No configuration options needed - OTLP serialization is opinionated +} + +impl OtlpSerializerConfig { + /// Build the `OtlpSerializer` from this configuration. + pub fn build(&self) -> Result { + OtlpSerializer::new() + } + + /// The data type of events that are accepted by `OtlpSerializer`. + pub fn input_type(&self) -> DataType { + DataType::Log | DataType::Metric | DataType::Trace + } + + /// The schema required by the serializer. + pub fn schema_requirement(&self) -> schema::Requirement { + schema::Requirement::empty() + } +} + +/// Serializer that converts an `Event` to bytes using the OTLP (OpenTelemetry Protocol) protobuf format. +/// +/// This serializer encodes events using the OTLP protobuf specification, which is the recommended +/// encoding format for OpenTelemetry data. The output is suitable for sending to OTLP-compatible +/// endpoints with `content-type: application/x-protobuf`. +/// +/// # Implementation approach +/// +/// This serializer converts Vector's internal event representation to the appropriate OTLP message type: +/// - `Event::Log` → `ExportLogsServiceRequest` +/// - `Event::Metric` → `ExportMetricsServiceRequest` +/// - `Event::Trace` → `ExportTraceServiceRequest` +/// +/// The implementation should be the inverse of what the `opentelemetry` source does when +/// `use_otlp_decoding` is enabled, ensuring round-trip compatibility. +#[derive(Debug, Clone)] +#[allow(dead_code)] // Fields will be used once encoding is implemented +pub struct OtlpSerializer { + logs_descriptor: ProtobufSerializer, + metrics_descriptor: ProtobufSerializer, + traces_descriptor: ProtobufSerializer, + options: Options, +} + +impl OtlpSerializer { + /// Creates a new OTLP serializer with the appropriate message descriptors. + pub fn new() -> vector_common::Result { + let options = Options { + use_json_names: true, + }; + + let logs_descriptor = ProtobufSerializer::new(get_message_descriptor_from_bytes( + DESCRIPTOR_BYTES, + LOGS_REQUEST_MESSAGE_TYPE, + )?); + + let metrics_descriptor = ProtobufSerializer::new(get_message_descriptor_from_bytes( + DESCRIPTOR_BYTES, + METRICS_REQUEST_MESSAGE_TYPE, + )?); + + let traces_descriptor = ProtobufSerializer::new(get_message_descriptor_from_bytes( + DESCRIPTOR_BYTES, + TRACES_REQUEST_MESSAGE_TYPE, + )?); + + Ok(Self { + logs_descriptor, + metrics_descriptor, + traces_descriptor, + options, + }) + } +} + +impl Encoder for OtlpSerializer { + type Error = vector_common::Error; + + fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { + match &event { + Event::Log(_) => self.logs_descriptor.encode(event, buffer), + Event::Metric(_) => self.metrics_descriptor.encode(event, buffer), + Event::Trace(_) => self.traces_descriptor.encode(event, buffer), + } + } +} diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index 8352d27559cd5..a5bad14d8298f 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -14,9 +14,9 @@ pub use format::{ CefSerializerConfig, CsvSerializer, CsvSerializerConfig, GelfSerializer, GelfSerializerConfig, JsonSerializer, JsonSerializerConfig, JsonSerializerOptions, LogfmtSerializer, LogfmtSerializerConfig, NativeJsonSerializer, NativeJsonSerializerConfig, NativeSerializer, - NativeSerializerConfig, ProtobufSerializer, ProtobufSerializerConfig, - ProtobufSerializerOptions, RawMessageSerializer, RawMessageSerializerConfig, TextSerializer, - TextSerializerConfig, + NativeSerializerConfig, OtlpSerializer, OtlpSerializerConfig, ProtobufSerializer, + ProtobufSerializerConfig, ProtobufSerializerOptions, RawMessageSerializer, + RawMessageSerializerConfig, TextSerializer, TextSerializerConfig, }; pub use framing::{ BoxedFramer, BoxedFramingError, BytesEncoder, BytesEncoderConfig, CharacterDelimitedEncoder, @@ -268,6 +268,15 @@ pub enum SerializerConfig { /// [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs NativeJson, + /// Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + /// + /// This codec uses protobuf encoding, which is the recommended format for OTLP. + /// The output is suitable for sending to OTLP-compatible endpoints with + /// `content-type: application/x-protobuf`. + /// + /// [otlp]: https://opentelemetry.io/docs/specs/otlp/ + Otlp, + /// Encodes an event as a [Protobuf][protobuf] message. /// /// [protobuf]: https://protobuf.dev/ @@ -347,6 +356,12 @@ impl From for SerializerConfig { } } +impl From for SerializerConfig { + fn from(_: OtlpSerializerConfig) -> Self { + Self::Otlp + } +} + impl From for SerializerConfig { fn from(config: ProtobufSerializerConfig) -> Self { Self::Protobuf(config) @@ -381,6 +396,9 @@ impl SerializerConfig { SerializerConfig::NativeJson => { Ok(Serializer::NativeJson(NativeJsonSerializerConfig.build())) } + SerializerConfig::Otlp => { + Ok(Serializer::Otlp(OtlpSerializerConfig::default().build()?)) + } SerializerConfig::Protobuf(config) => Ok(Serializer::Protobuf(config.build()?)), SerializerConfig::RawMessage => { Ok(Serializer::RawMessage(RawMessageSerializerConfig.build())) @@ -406,7 +424,7 @@ impl SerializerConfig { SerializerConfig::Avro { .. } | SerializerConfig::Native => { FramingConfig::LengthDelimited(LengthDelimitedEncoderConfig::default()) } - SerializerConfig::Protobuf(_) => { + SerializerConfig::Otlp | SerializerConfig::Protobuf(_) => { FramingConfig::VarintLengthDelimited(VarintLengthDelimitedEncoderConfig::default()) } SerializerConfig::Cef(_) @@ -435,6 +453,7 @@ impl SerializerConfig { SerializerConfig::Logfmt => LogfmtSerializerConfig.input_type(), SerializerConfig::Native => NativeSerializerConfig.input_type(), SerializerConfig::NativeJson => NativeJsonSerializerConfig.input_type(), + SerializerConfig::Otlp => OtlpSerializerConfig::default().input_type(), SerializerConfig::Protobuf(config) => config.input_type(), SerializerConfig::RawMessage => RawMessageSerializerConfig.input_type(), SerializerConfig::Text(config) => config.input_type(), @@ -454,6 +473,7 @@ impl SerializerConfig { SerializerConfig::Logfmt => LogfmtSerializerConfig.schema_requirement(), SerializerConfig::Native => NativeSerializerConfig.schema_requirement(), SerializerConfig::NativeJson => NativeJsonSerializerConfig.schema_requirement(), + SerializerConfig::Otlp => OtlpSerializerConfig::default().schema_requirement(), SerializerConfig::Protobuf(config) => config.schema_requirement(), SerializerConfig::RawMessage => RawMessageSerializerConfig.schema_requirement(), SerializerConfig::Text(config) => config.schema_requirement(), @@ -480,6 +500,8 @@ pub enum Serializer { Native(NativeSerializer), /// Uses a `NativeJsonSerializer` for serialization. NativeJson(NativeJsonSerializer), + /// Uses an `OtlpSerializer` for serialization. + Otlp(OtlpSerializer), /// Uses a `ProtobufSerializer` for serialization. Protobuf(ProtobufSerializer), /// Uses a `RawMessageSerializer` for serialization. @@ -499,6 +521,7 @@ impl Serializer { | Serializer::Logfmt(_) | Serializer::Text(_) | Serializer::Native(_) + | Serializer::Otlp(_) | Serializer::Protobuf(_) | Serializer::RawMessage(_) => false, } @@ -521,6 +544,7 @@ impl Serializer { | Serializer::Logfmt(_) | Serializer::Text(_) | Serializer::Native(_) + | Serializer::Otlp(_) | Serializer::Protobuf(_) | Serializer::RawMessage(_) => { panic!("Serializer does not support JSON") @@ -585,6 +609,12 @@ impl From for Serializer { } } +impl From for Serializer { + fn from(serializer: OtlpSerializer) -> Self { + Self::Otlp(serializer) + } +} + impl From for Serializer { fn from(serializer: ProtobufSerializer) -> Self { Self::Protobuf(serializer) @@ -616,6 +646,7 @@ impl tokio_util::codec::Encoder for Serializer { Serializer::Logfmt(serializer) => serializer.encode(event, buffer), Serializer::Native(serializer) => serializer.encode(event, buffer), Serializer::NativeJson(serializer) => serializer.encode(event, buffer), + Serializer::Otlp(serializer) => serializer.encode(event, buffer), Serializer::Protobuf(serializer) => serializer.encode(event, buffer), Serializer::RawMessage(serializer) => serializer.encode(event, buffer), Serializer::Text(serializer) => serializer.encode(event, buffer), diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index 756bc8e2406f2..66a4bd7bfc657 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -1,13 +1,12 @@ +use crate::codecs::Transformer; use vector_lib::{ codecs::{ - CharacterDelimitedEncoder, LengthDelimitedEncoder, NewlineDelimitedEncoder, + BytesEncoder, CharacterDelimitedEncoder, LengthDelimitedEncoder, NewlineDelimitedEncoder, encoding::{Framer, FramingConfig, Serializer, SerializerConfig}, }, configurable::configurable_component, }; -use crate::codecs::Transformer; - /// Encoding configuration. #[configurable_component] #[derive(Clone, Debug)] @@ -130,6 +129,7 @@ impl EncodingConfigWithFraming { | Serializer::RawMessage(_) | Serializer::Text(_), ) => NewlineDelimitedEncoder::default().into(), + (None, Serializer::Otlp(_)) => BytesEncoder.into(), }; Ok((framer, serializer)) diff --git a/src/codecs/encoding/encoder.rs b/src/codecs/encoding/encoder.rs index 14642bb462f82..793a876ae4fc1 100644 --- a/src/codecs/encoding/encoder.rs +++ b/src/codecs/encoding/encoder.rs @@ -128,6 +128,7 @@ impl Encoder { | Serializer::Text(_), _, ) => "text/plain", + (Serializer::Otlp(_), _) => "application/x-protobuf", } } } diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index a0dc1cecad0c5..bfca76ad517f2 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -233,6 +233,7 @@ fn serializer_config_to_deserializer( }) } SerializerConfig::RawMessage | SerializerConfig::Text(_) => DeserializerConfig::Bytes, + SerializerConfig::Otlp => todo!(), }; deserializer_config.build() diff --git a/src/sinks/websocket/sink.rs b/src/sinks/websocket/sink.rs index 1a378803e4fd5..e26c0d6c34265 100644 --- a/src/sinks/websocket/sink.rs +++ b/src/sinks/websocket/sink.rs @@ -4,28 +4,29 @@ use std::{ time::{Duration, Instant}, }; +use crate::{ + codecs::{Encoder, Transformer}, + common::websocket::{PingInterval, WebSocketConnector, is_closed}, + event::{Event, EventStatus, Finalizable}, + internal_events::{ + ConnectionOpen, OpenGauge, WebSocketConnectionError, WebSocketConnectionShutdown, + }, + sinks::{util::StreamSink, websocket::config::WebSocketSinkConfig}, +}; use async_trait::async_trait; use bytes::BytesMut; use futures::{Sink, Stream, StreamExt, pin_mut, sink::SinkExt, stream::BoxStream}; use tokio_tungstenite::tungstenite::{error::Error as TungsteniteError, protocol::Message}; use tokio_util::codec::Encoder as _; use vector_lib::{ - EstimatedJsonEncodedSizeOf, emit, + EstimatedJsonEncodedSizeOf, + codecs::encoding::Serializer::Otlp, + emit, internal_event::{ ByteSize, BytesSent, CountByteSize, EventsSent, InternalEventHandle as _, Output, Protocol, }, }; -use crate::{ - codecs::{Encoder, Transformer}, - common::websocket::{PingInterval, WebSocketConnector, is_closed}, - event::{Event, EventStatus, Finalizable}, - internal_events::{ - ConnectionOpen, OpenGauge, WebSocketConnectionError, WebSocketConnectionShutdown, - }, - sinks::{util::StreamSink, websocket::config::WebSocketSinkConfig}, -}; - pub struct WebSocketSink { transformer: Transformer, encoder: Encoder<()>, @@ -81,7 +82,7 @@ impl WebSocketSink { }; match self.encoder.serializer() { - RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) => true, + RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) | Otlp(_) => true, Cef(_) | Csv(_) | Logfmt(_) | Gelf(_) | Json(_) | Text(_) | NativeJson(_) => false, } } diff --git a/src/sinks/websocket_server/sink.rs b/src/sinks/websocket_server/sink.rs index 033248d573aa7..04e38c8defa84 100644 --- a/src/sinks/websocket_server/sink.rs +++ b/src/sinks/websocket_server/sink.rs @@ -4,6 +4,24 @@ use std::{ sync::{Arc, Mutex}, }; +use super::{ + WebSocketListenerSinkConfig, + buffering::MessageBufferingConfig, + config::{ExtraMetricTagsConfig, SubProtocolConfig}, +}; +use crate::{ + codecs::{Encoder, Transformer}, + common::http::server_auth::HttpServerAuthMatcher, + internal_events::{ + ConnectionOpen, OpenGauge, WebSocketListenerConnectionEstablished, + WebSocketListenerConnectionFailedError, WebSocketListenerConnectionShutdown, + WebSocketListenerMessageSent, WebSocketListenerSendError, + }, + sinks::{ + prelude::*, + websocket_server::buffering::{BufferReplayRequest, WsMessageBufferConfig}, + }, +}; use async_trait::async_trait; use bytes::BytesMut; use futures::{ @@ -24,6 +42,7 @@ use url::Url; use uuid::Uuid; use vector_lib::{ EstimatedJsonEncodedSizeOf, + codecs::encoding::Serializer::Otlp, event::{Event, EventStatus}, finalization::Finalizable, internal_event::{ @@ -33,25 +52,6 @@ use vector_lib::{ tls::{MaybeTlsIncomingStream, MaybeTlsListener, MaybeTlsSettings}, }; -use super::{ - WebSocketListenerSinkConfig, - buffering::MessageBufferingConfig, - config::{ExtraMetricTagsConfig, SubProtocolConfig}, -}; -use crate::{ - codecs::{Encoder, Transformer}, - common::http::server_auth::HttpServerAuthMatcher, - internal_events::{ - ConnectionOpen, OpenGauge, WebSocketListenerConnectionEstablished, - WebSocketListenerConnectionFailedError, WebSocketListenerConnectionShutdown, - WebSocketListenerMessageSent, WebSocketListenerSendError, - }, - sinks::{ - prelude::*, - websocket_server::buffering::{BufferReplayRequest, WsMessageBufferConfig}, - }, -}; - pub struct WebSocketListenerSink { tls: MaybeTlsSettings, transformer: Transformer, @@ -92,7 +92,7 @@ impl WebSocketListenerSink { }; match self.encoder.serializer() { - RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) => true, + RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) | Otlp(_) => true, Cef(_) | Csv(_) | Logfmt(_) | Gelf(_) | Json(_) | Text(_) | NativeJson(_) => false, } } diff --git a/src/sources/opentelemetry/config.rs b/src/sources/opentelemetry/config.rs index 14fd14fd26094..1c6b30bc23a82 100644 --- a/src/sources/opentelemetry/config.rs +++ b/src/sources/opentelemetry/config.rs @@ -20,9 +20,6 @@ use crate::{ use futures::FutureExt; use futures_util::{TryFutureExt, future::join}; use tonic::{codec::CompressionEncoding, transport::server::RoutesBuilder}; -use vector_lib::opentelemetry::proto::{ - LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, TRACES_REQUEST_MESSAGE_TYPE, -}; use vector_lib::{ codecs::decoding::ProtobufDeserializer, config::{LegacyKey, LogNamespace, log_schema}, @@ -34,10 +31,13 @@ use vector_lib::{ ATTRIBUTES_KEY, DROPPED_ATTRIBUTES_COUNT_KEY, FLAGS_KEY, OBSERVED_TIMESTAMP_KEY, RESOURCE_KEY, SEVERITY_NUMBER_KEY, SEVERITY_TEXT_KEY, SPAN_ID_KEY, TRACE_ID_KEY, }, - proto::collector::{ - logs::v1::logs_service_server::LogsServiceServer, - metrics::v1::metrics_service_server::MetricsServiceServer, - trace::v1::trace_service_server::TraceServiceServer, + proto::{ + LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, TRACES_REQUEST_MESSAGE_TYPE, + collector::{ + logs::v1::logs_service_server::LogsServiceServer, + metrics::v1::metrics_service_server::MetricsServiceServer, + trace::v1::trace_service_server::TraceServiceServer, + }, }, }, schema::Definition, diff --git a/tests/e2e/opentelemetry/logs/mod.rs b/tests/e2e/opentelemetry/logs/mod.rs index d3ade1c6d2a15..f20433912744f 100644 --- a/tests/e2e/opentelemetry/logs/mod.rs +++ b/tests/e2e/opentelemetry/logs/mod.rs @@ -1,8 +1,5 @@ use serde_json::Value; -use std::collections::BTreeMap; -use std::io; -use std::path::Path; -use std::process::Command; +use std::{collections::BTreeMap, io, path::Path, process::Command}; const EXPECTED_LOG_COUNT: usize = 100; From 65b9baf66be00e745252cb32ed053f794522872e Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 11:21:29 -0400 Subject: [PATCH 09/33] make generate-component-docs --- Cargo.lock | 72 +++---------------- .../components/sinks/generated/amqp.cue | 9 +++ .../sinks/generated/aws_cloudwatch_logs.cue | 9 +++ .../sinks/generated/aws_kinesis_firehose.cue | 9 +++ .../sinks/generated/aws_kinesis_streams.cue | 9 +++ .../components/sinks/generated/aws_s3.cue | 9 +++ .../components/sinks/generated/aws_sns.cue | 9 +++ .../components/sinks/generated/aws_sqs.cue | 9 +++ .../components/sinks/generated/azure_blob.cue | 9 +++ .../components/sinks/generated/console.cue | 9 +++ .../components/sinks/generated/file.cue | 9 +++ .../generated/gcp_chronicle_unstructured.cue | 9 +++ .../sinks/generated/gcp_cloud_storage.cue | 9 +++ .../components/sinks/generated/gcp_pubsub.cue | 9 +++ .../components/sinks/generated/http.cue | 9 +++ .../components/sinks/generated/humio_logs.cue | 9 +++ .../components/sinks/generated/kafka.cue | 9 +++ .../components/sinks/generated/loki.cue | 9 +++ .../components/sinks/generated/mqtt.cue | 9 +++ .../components/sinks/generated/nats.cue | 9 +++ .../sinks/generated/opentelemetry.cue | 9 +++ .../components/sinks/generated/papertrail.cue | 9 +++ .../components/sinks/generated/pulsar.cue | 9 +++ .../components/sinks/generated/redis.cue | 9 +++ .../components/sinks/generated/socket.cue | 9 +++ .../sinks/generated/splunk_hec_logs.cue | 9 +++ .../components/sinks/generated/webhdfs.cue | 9 +++ .../components/sinks/generated/websocket.cue | 9 +++ .../sinks/generated/websocket_server.cue | 9 +++ 29 files changed, 262 insertions(+), 62 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6450716ca83ad..6195e716b1e35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -560,7 +560,7 @@ dependencies = [ "async-graphql", "futures-util", "serde_json", - "warp 0.3.7", + "warp", ] [[package]] @@ -624,13 +624,13 @@ dependencies = [ [[package]] name = "async-nats" -version = "0.43.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8980b86fddaad5d28d128f4517b23fdacdcbd32c20f7149b5117e07ad65bb50f" +checksum = "08f6da6d49a956424ca4e28fe93656f790d748b469eaccbc7488fec545315180" dependencies = [ "base64 0.22.1", "bytes 1.10.1", - "futures-util", + "futures 0.3.31", "memchr", "nkeys", "nuid", @@ -651,7 +651,6 @@ dependencies = [ "time", "tokio", "tokio-rustls 0.26.2", - "tokio-stream", "tokio-util", "tokio-websockets", "tracing 0.1.41", @@ -4504,28 +4503,13 @@ checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ "base64 0.21.7", "bytes 1.10.1", - "headers-core 0.2.0", + "headers-core", "http 0.2.9", "httpdate", "mime", "sha1", ] -[[package]] -name = "headers" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" -dependencies = [ - "base64 0.22.1", - "bytes 1.10.1", - "headers-core 0.3.0", - "http 1.1.0", - "httpdate", - "mime", - "sha1", -] - [[package]] name = "headers-core" version = "0.2.0" @@ -4535,15 +4519,6 @@ dependencies = [ "http 0.2.9", ] -[[package]] -name = "headers-core" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" -dependencies = [ - "http 1.1.0", -] - [[package]] name = "heck" version = "0.4.1" @@ -4991,7 +4966,7 @@ checksum = "ca815a891b24fdfb243fa3239c86154392b0953ee584aa1a2a1f66d20cbe75cc" dependencies = [ "bytes 1.10.1", "futures 0.3.31", - "headers 0.3.9", + "headers", "http 0.2.9", "hyper 0.14.28", "openssl", @@ -12120,7 +12095,7 @@ dependencies = [ "h2 0.4.12", "hash_hasher", "hashbrown 0.14.5", - "headers 0.3.9", + "headers", "heim", "hex", "hickory-proto", @@ -12244,7 +12219,7 @@ dependencies = [ "vector-lib", "vector-vrl-functions", "vrl", - "warp 0.4.2", + "warp", "windows-service", "wiremock", "zstd 0.13.2", @@ -12410,7 +12385,7 @@ dependencies = [ "float_eq", "futures 0.3.31", "futures-util", - "headers 0.3.9", + "headers", "http 0.2.9", "hyper-proxy", "indexmap 2.11.0", @@ -12772,7 +12747,7 @@ dependencies = [ "bytes 1.10.1", "futures-channel", "futures-util", - "headers 0.3.9", + "headers", "http 0.2.9", "hyper 0.14.28", "log", @@ -12791,33 +12766,6 @@ dependencies = [ "tracing 0.1.41", ] -[[package]] -name = "warp" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d06d9202adc1f15d709c4f4a2069be5428aa912cc025d6f268ac441ab066b0" -dependencies = [ - "bytes 1.10.1", - "futures-util", - "headers 0.4.1", - "http 1.1.0", - "http-body 1.0.0", - "http-body-util", - "log", - "mime", - "mime_guess", - "percent-encoding", - "pin-project", - "scoped-tls", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-util", - "tower-service", - "tracing 0.1.41", -] - [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" diff --git a/website/cue/reference/components/sinks/generated/amqp.cue b/website/cue/reference/components/sinks/generated/amqp.cue index 0d24b39aa8e93..5887fb620c615 100644 --- a/website/cue/reference/components/sinks/generated/amqp.cue +++ b/website/cue/reference/components/sinks/generated/amqp.cue @@ -204,6 +204,15 @@ generated: components: sinks: amqp: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/aws_cloudwatch_logs.cue b/website/cue/reference/components/sinks/generated/aws_cloudwatch_logs.cue index 3857573f5996e..c1b0e1f494998 100644 --- a/website/cue/reference/components/sinks/generated/aws_cloudwatch_logs.cue +++ b/website/cue/reference/components/sinks/generated/aws_cloudwatch_logs.cue @@ -400,6 +400,15 @@ generated: components: sinks: aws_cloudwatch_logs: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/aws_kinesis_firehose.cue b/website/cue/reference/components/sinks/generated/aws_kinesis_firehose.cue index 93ddcadd6e25d..ff1d1cdec8c85 100644 --- a/website/cue/reference/components/sinks/generated/aws_kinesis_firehose.cue +++ b/website/cue/reference/components/sinks/generated/aws_kinesis_firehose.cue @@ -379,6 +379,15 @@ generated: components: sinks: aws_kinesis_firehose: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/aws_kinesis_streams.cue b/website/cue/reference/components/sinks/generated/aws_kinesis_streams.cue index 7dbe25f9021ea..150b66142bbec 100644 --- a/website/cue/reference/components/sinks/generated/aws_kinesis_streams.cue +++ b/website/cue/reference/components/sinks/generated/aws_kinesis_streams.cue @@ -379,6 +379,15 @@ generated: components: sinks: aws_kinesis_streams: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/aws_s3.cue b/website/cue/reference/components/sinks/generated/aws_s3.cue index a0e9b3c76a67d..0a50a9da454e6 100644 --- a/website/cue/reference/components/sinks/generated/aws_s3.cue +++ b/website/cue/reference/components/sinks/generated/aws_s3.cue @@ -488,6 +488,15 @@ generated: components: sinks: aws_s3: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/aws_sns.cue b/website/cue/reference/components/sinks/generated/aws_sns.cue index 70ce7e9a48d5a..67e018c83f76e 100644 --- a/website/cue/reference/components/sinks/generated/aws_sns.cue +++ b/website/cue/reference/components/sinks/generated/aws_sns.cue @@ -310,6 +310,15 @@ generated: components: sinks: aws_sns: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/aws_sqs.cue b/website/cue/reference/components/sinks/generated/aws_sqs.cue index 181730563df9e..626413075a3ef 100644 --- a/website/cue/reference/components/sinks/generated/aws_sqs.cue +++ b/website/cue/reference/components/sinks/generated/aws_sqs.cue @@ -310,6 +310,15 @@ generated: components: sinks: aws_sqs: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/azure_blob.cue b/website/cue/reference/components/sinks/generated/azure_blob.cue index f63fd171a75d5..1099e8e686f9f 100644 --- a/website/cue/reference/components/sinks/generated/azure_blob.cue +++ b/website/cue/reference/components/sinks/generated/azure_blob.cue @@ -334,6 +334,15 @@ generated: components: sinks: azure_blob: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/console.cue b/website/cue/reference/components/sinks/generated/console.cue index c0a52286f6af2..01a8b3764f6c7 100644 --- a/website/cue/reference/components/sinks/generated/console.cue +++ b/website/cue/reference/components/sinks/generated/console.cue @@ -188,6 +188,15 @@ generated: components: sinks: console: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/file.cue b/website/cue/reference/components/sinks/generated/file.cue index df1022a2f2797..472622b5a4f55 100644 --- a/website/cue/reference/components/sinks/generated/file.cue +++ b/website/cue/reference/components/sinks/generated/file.cue @@ -208,6 +208,15 @@ generated: components: sinks: file: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/gcp_chronicle_unstructured.cue b/website/cue/reference/components/sinks/generated/gcp_chronicle_unstructured.cue index 69727212e5235..e4633659e88ce 100644 --- a/website/cue/reference/components/sinks/generated/gcp_chronicle_unstructured.cue +++ b/website/cue/reference/components/sinks/generated/gcp_chronicle_unstructured.cue @@ -276,6 +276,15 @@ generated: components: sinks: gcp_chronicle_unstructured: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/gcp_cloud_storage.cue b/website/cue/reference/components/sinks/generated/gcp_cloud_storage.cue index 524669bbaa830..0a96d8a53eacf 100644 --- a/website/cue/reference/components/sinks/generated/gcp_cloud_storage.cue +++ b/website/cue/reference/components/sinks/generated/gcp_cloud_storage.cue @@ -349,6 +349,15 @@ generated: components: sinks: gcp_cloud_storage: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/gcp_pubsub.cue b/website/cue/reference/components/sinks/generated/gcp_pubsub.cue index a56f9979bd540..fd972d8bd933c 100644 --- a/website/cue/reference/components/sinks/generated/gcp_pubsub.cue +++ b/website/cue/reference/components/sinks/generated/gcp_pubsub.cue @@ -255,6 +255,15 @@ generated: components: sinks: gcp_pubsub: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/http.cue b/website/cue/reference/components/sinks/generated/http.cue index f71f8411b630d..97d6d17178bd5 100644 --- a/website/cue/reference/components/sinks/generated/http.cue +++ b/website/cue/reference/components/sinks/generated/http.cue @@ -431,6 +431,15 @@ generated: components: sinks: http: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/humio_logs.cue b/website/cue/reference/components/sinks/generated/humio_logs.cue index cbe42c0dad7f0..afe28006824d8 100644 --- a/website/cue/reference/components/sinks/generated/humio_logs.cue +++ b/website/cue/reference/components/sinks/generated/humio_logs.cue @@ -254,6 +254,15 @@ generated: components: sinks: humio_logs: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/kafka.cue b/website/cue/reference/components/sinks/generated/kafka.cue index a8f8b1b0baacd..a90c3e1bf0857 100644 --- a/website/cue/reference/components/sinks/generated/kafka.cue +++ b/website/cue/reference/components/sinks/generated/kafka.cue @@ -243,6 +243,15 @@ generated: components: sinks: kafka: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/loki.cue b/website/cue/reference/components/sinks/generated/loki.cue index 13eb339ff98cd..b83cd54280f7f 100644 --- a/website/cue/reference/components/sinks/generated/loki.cue +++ b/website/cue/reference/components/sinks/generated/loki.cue @@ -433,6 +433,15 @@ generated: components: sinks: loki: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/mqtt.cue b/website/cue/reference/components/sinks/generated/mqtt.cue index ada012a020233..c2d192d35ac3e 100644 --- a/website/cue/reference/components/sinks/generated/mqtt.cue +++ b/website/cue/reference/components/sinks/generated/mqtt.cue @@ -198,6 +198,15 @@ generated: components: sinks: mqtt: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/nats.cue b/website/cue/reference/components/sinks/generated/nats.cue index 3c802edad2783..84cba5611b2af 100644 --- a/website/cue/reference/components/sinks/generated/nats.cue +++ b/website/cue/reference/components/sinks/generated/nats.cue @@ -288,6 +288,15 @@ generated: components: sinks: nats: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/opentelemetry.cue b/website/cue/reference/components/sinks/generated/opentelemetry.cue index 29c2008b0f327..e0b4e672fef2e 100644 --- a/website/cue/reference/components/sinks/generated/opentelemetry.cue +++ b/website/cue/reference/components/sinks/generated/opentelemetry.cue @@ -434,6 +434,15 @@ generated: components: sinks: opentelemetry: configuration: protocol: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/papertrail.cue b/website/cue/reference/components/sinks/generated/papertrail.cue index b5f1728e984de..8704ca7a46d57 100644 --- a/website/cue/reference/components/sinks/generated/papertrail.cue +++ b/website/cue/reference/components/sinks/generated/papertrail.cue @@ -188,6 +188,15 @@ generated: components: sinks: papertrail: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/pulsar.cue b/website/cue/reference/components/sinks/generated/pulsar.cue index 4e2034a0c81a5..c5545a0f17082 100644 --- a/website/cue/reference/components/sinks/generated/pulsar.cue +++ b/website/cue/reference/components/sinks/generated/pulsar.cue @@ -322,6 +322,15 @@ generated: components: sinks: pulsar: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/redis.cue b/website/cue/reference/components/sinks/generated/redis.cue index 6d365d7decdeb..2f5ac1cc0890b 100644 --- a/website/cue/reference/components/sinks/generated/redis.cue +++ b/website/cue/reference/components/sinks/generated/redis.cue @@ -247,6 +247,15 @@ generated: components: sinks: redis: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/socket.cue b/website/cue/reference/components/sinks/generated/socket.cue index 753f6a4a02f05..17531379e6155 100644 --- a/website/cue/reference/components/sinks/generated/socket.cue +++ b/website/cue/reference/components/sinks/generated/socket.cue @@ -200,6 +200,15 @@ generated: components: sinks: socket: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/splunk_hec_logs.cue b/website/cue/reference/components/sinks/generated/splunk_hec_logs.cue index 860ce6a0fb6e5..1a68a339a166d 100644 --- a/website/cue/reference/components/sinks/generated/splunk_hec_logs.cue +++ b/website/cue/reference/components/sinks/generated/splunk_hec_logs.cue @@ -304,6 +304,15 @@ generated: components: sinks: splunk_hec_logs: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/webhdfs.cue b/website/cue/reference/components/sinks/generated/webhdfs.cue index a491ddb52c64a..3a6b5703f5c7f 100644 --- a/website/cue/reference/components/sinks/generated/webhdfs.cue +++ b/website/cue/reference/components/sinks/generated/webhdfs.cue @@ -254,6 +254,15 @@ generated: components: sinks: webhdfs: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/websocket.cue b/website/cue/reference/components/sinks/generated/websocket.cue index 817e8247ff15e..a6f86e05d4ff2 100644 --- a/website/cue/reference/components/sinks/generated/websocket.cue +++ b/website/cue/reference/components/sinks/generated/websocket.cue @@ -360,6 +360,15 @@ generated: components: sinks: websocket: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. diff --git a/website/cue/reference/components/sinks/generated/websocket_server.cue b/website/cue/reference/components/sinks/generated/websocket_server.cue index 0817002c06acf..ca04a07cfee13 100644 --- a/website/cue/reference/components/sinks/generated/websocket_server.cue +++ b/website/cue/reference/components/sinks/generated/websocket_server.cue @@ -244,6 +244,15 @@ generated: components: sinks: websocket_server: configuration: { [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs """ + otlp: """ + Encodes an event in the [OTLP (OpenTelemetry Protocol)][otlp] format. + + This codec uses protobuf encoding, which is the recommended format for OTLP. + The output is suitable for sending to OTLP-compatible endpoints with + `content-type: application/x-protobuf`. + + [otlp]: https://opentelemetry.io/docs/specs/otlp/ + """ protobuf: """ Encodes an event as a [Protobuf][protobuf] message. From 6ea106273f412c8fe48d8a57b30fafda910b4a0e Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 13:26:40 -0400 Subject: [PATCH 10/33] forward serializer options --- lib/codecs/src/encoding/format/otlp.rs | 17 ++++++++++------- lib/codecs/src/encoding/format/protobuf.rs | 13 +++++++------ 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 4ca2197f8c839..887da4b2808ad 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -7,7 +7,7 @@ use opentelemetry_proto::proto::{ use tokio_util::codec::Encoder; use vector_config_macros::configurable_component; use vector_core::{config::DataType, event::Event, schema}; -use vrl::protobuf::{descriptor::get_message_descriptor_from_bytes, parse::Options}; +use vrl::protobuf::{encode::Options}; /// Config used to build an `OtlpSerializer`. #[configurable_component] @@ -64,20 +64,23 @@ impl OtlpSerializer { use_json_names: true, }; - let logs_descriptor = ProtobufSerializer::new(get_message_descriptor_from_bytes( + let logs_descriptor = ProtobufSerializer::new_from_bytes( DESCRIPTOR_BYTES, LOGS_REQUEST_MESSAGE_TYPE, - )?); + &options, + )?; - let metrics_descriptor = ProtobufSerializer::new(get_message_descriptor_from_bytes( + let metrics_descriptor = ProtobufSerializer::new_from_bytes( DESCRIPTOR_BYTES, METRICS_REQUEST_MESSAGE_TYPE, - )?); + &options, + )?; - let traces_descriptor = ProtobufSerializer::new(get_message_descriptor_from_bytes( + let traces_descriptor = ProtobufSerializer::new_from_bytes( DESCRIPTOR_BYTES, TRACES_REQUEST_MESSAGE_TYPE, - )?); + &options, + )?; Ok(Self { logs_descriptor, diff --git a/lib/codecs/src/encoding/format/protobuf.rs b/lib/codecs/src/encoding/format/protobuf.rs index f656f24cf29b5..715ffd95fb1be 100644 --- a/lib/codecs/src/encoding/format/protobuf.rs +++ b/lib/codecs/src/encoding/format/protobuf.rs @@ -28,7 +28,7 @@ impl ProtobufSerializerConfig { pub fn build(&self) -> Result { let message_descriptor = get_message_descriptor(&self.protobuf.desc_file, &self.protobuf.message_type)?; - Ok(ProtobufSerializer { message_descriptor }) + Ok(ProtobufSerializer { message_descriptor, options: Options::default() }) } /// The data type of events that are accepted by `ProtobufSerializer`. @@ -66,18 +66,19 @@ pub struct ProtobufSerializerOptions { pub struct ProtobufSerializer { /// The protobuf message definition to use for serialization. message_descriptor: MessageDescriptor, + options: Options, } impl ProtobufSerializer { /// Creates a new `ProtobufSerializer`. pub fn new(message_descriptor: MessageDescriptor) -> Self { - Self { message_descriptor } + Self { message_descriptor, options: Options::default() } } /// Creates a new serializer instance using the descriptor bytes directly. - pub fn new_from_bytes(desc_bytes: &[u8], message_type: &str) -> vector_common::Result { + pub fn new_from_bytes(desc_bytes: &[u8], message_type: &str, options: &Options) -> vector_common::Result { let message_descriptor = get_message_descriptor_from_bytes(desc_bytes, message_type)?; - Ok(Self { message_descriptor }) + Ok(Self { message_descriptor, options: options.clone() }) } /// Get a description of the message type used in serialization. @@ -94,13 +95,13 @@ impl Encoder for ProtobufSerializer { Event::Log(log) => encode_message( &self.message_descriptor, log.into_parts().0, - &Options::default(), + &self.options, ), Event::Metric(_) => unimplemented!(), Event::Trace(trace) => encode_message( &self.message_descriptor, Value::Object(trace.into_parts().0), - &Options::default(), + &self.options, ), }?; message.encode(buffer).map_err(Into::into) From 2ee876271c19ba0bb74cf6a1031fe82a5da2f45e Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 13:26:57 -0400 Subject: [PATCH 11/33] ran cargo fmt --- lib/codecs/src/encoding/format/otlp.rs | 2 +- lib/codecs/src/encoding/format/protobuf.rs | 29 +++++++++++++++------- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 887da4b2808ad..45b8c2337449e 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -7,7 +7,7 @@ use opentelemetry_proto::proto::{ use tokio_util::codec::Encoder; use vector_config_macros::configurable_component; use vector_core::{config::DataType, event::Event, schema}; -use vrl::protobuf::{encode::Options}; +use vrl::protobuf::encode::Options; /// Config used to build an `OtlpSerializer`. #[configurable_component] diff --git a/lib/codecs/src/encoding/format/protobuf.rs b/lib/codecs/src/encoding/format/protobuf.rs index 715ffd95fb1be..1f3d3b7a64939 100644 --- a/lib/codecs/src/encoding/format/protobuf.rs +++ b/lib/codecs/src/encoding/format/protobuf.rs @@ -28,7 +28,10 @@ impl ProtobufSerializerConfig { pub fn build(&self) -> Result { let message_descriptor = get_message_descriptor(&self.protobuf.desc_file, &self.protobuf.message_type)?; - Ok(ProtobufSerializer { message_descriptor, options: Options::default() }) + Ok(ProtobufSerializer { + message_descriptor, + options: Options::default(), + }) } /// The data type of events that are accepted by `ProtobufSerializer`. @@ -72,13 +75,23 @@ pub struct ProtobufSerializer { impl ProtobufSerializer { /// Creates a new `ProtobufSerializer`. pub fn new(message_descriptor: MessageDescriptor) -> Self { - Self { message_descriptor, options: Options::default() } + Self { + message_descriptor, + options: Options::default(), + } } /// Creates a new serializer instance using the descriptor bytes directly. - pub fn new_from_bytes(desc_bytes: &[u8], message_type: &str, options: &Options) -> vector_common::Result { + pub fn new_from_bytes( + desc_bytes: &[u8], + message_type: &str, + options: &Options, + ) -> vector_common::Result { let message_descriptor = get_message_descriptor_from_bytes(desc_bytes, message_type)?; - Ok(Self { message_descriptor, options: options.clone() }) + Ok(Self { + message_descriptor, + options: options.clone(), + }) } /// Get a description of the message type used in serialization. @@ -92,11 +105,9 @@ impl Encoder for ProtobufSerializer { fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { let message = match event { - Event::Log(log) => encode_message( - &self.message_descriptor, - log.into_parts().0, - &self.options, - ), + Event::Log(log) => { + encode_message(&self.message_descriptor, log.into_parts().0, &self.options) + } Event::Metric(_) => unimplemented!(), Event::Trace(trace) => encode_message( &self.message_descriptor, From b8ad47a63345b7e8283b72bf73423a9a08b363e3 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 13:34:58 -0400 Subject: [PATCH 12/33] update e2e tests to use the new codec --- tests/data/e2e/opentelemetry/logs/vector_otlp.yaml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml index d67b6303223e7..5450431367e53 100644 --- a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml +++ b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml @@ -19,16 +19,12 @@ sinks: inputs: - source0.logs type: opentelemetry - use_otlp_encoding: true protocol: type: http uri: http://otel-collector-sink:5318/v1/logs method: post - batch: - max_events: 1 - request: - headers: - content-type: application/json + encoding: + codec: otlp otel_file_sink: type: file From 0a75c4ab66548206e07bf557a7ff5f4f0479cf92 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 14:03:04 -0400 Subject: [PATCH 13/33] automatically set content header --- src/sinks/http/config.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index ab72afb9c4cab..fa3204d2a973d 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -270,6 +270,7 @@ impl SinkConfig for HttpSinkConfig { (Json(_), CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' })) => { Some(CONTENT_TYPE_JSON.to_owned()) } + (Otlp(_), _) => Some("application/x-protobuf".to_owned()), _ => None, } }; From 3516688b9fa77fefdd76c7ed551ff00f206f82f7 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 14:03:04 -0400 Subject: [PATCH 14/33] automatically set content header --- tests/data/e2e/opentelemetry/logs/vector_otlp.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml index 5450431367e53..2c4be656844fb 100644 --- a/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml +++ b/tests/data/e2e/opentelemetry/logs/vector_otlp.yaml @@ -22,7 +22,6 @@ sinks: protocol: type: http uri: http://otel-collector-sink:5318/v1/logs - method: post encoding: codec: otlp From 50d380a5d739d9a0c90efac21b2e7a67f230698b Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Fri, 3 Oct 2025 16:14:58 -0400 Subject: [PATCH 15/33] fix aggressive utilization reporting --- src/utilization.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/src/utilization.rs b/src/utilization.rs index 7e5170c2f59e3..1517d70e91dd1 100644 --- a/src/utilization.rs +++ b/src/utilization.rs @@ -17,6 +17,8 @@ use vector_lib::{id::ComponentKey, shutdown::ShutdownSignal}; use crate::stats; +const UTILIZATION_EMITTER_DURATION: Duration = Duration::from_secs(5); + #[pin_project] pub(crate) struct Utilization { intervals: IntervalStream, @@ -66,6 +68,8 @@ pub(crate) struct Timer { total_wait: Duration, ewma: stats::Ewma, gauge: Gauge, + #[cfg(debug_assertions)] + report_count: u32, } /// A simple, specialized timer for tracking spans of waiting vs not-waiting @@ -85,6 +89,8 @@ impl Timer { total_wait: Duration::new(0, 0), ewma: stats::Ewma::new(0.9), gauge, + #[cfg(debug_assertions)] + report_count: 0, } } @@ -122,8 +128,17 @@ impl Timer { self.ewma.update(utilization); let avg = self.ewma.average().unwrap_or(f64::NAN); - debug!(utilization = %avg); - self.gauge.set(avg); + let avg_rounded = (avg * 10000.0).round() / 10000.0; // 4 digit precision + + #[cfg(debug_assertions)] + { + if self.report_count % 6 == 0 { + debug!(utilization = %avg_rounded); + } + self.report_count = self.report_count.wrapping_add(1); + } + + self.gauge.set(avg_rounded); // Reset overall statistics for the next reporting period. self.overall_start = self.span_start; @@ -182,7 +197,7 @@ impl UtilizationEmitter { let (timer_tx, timer_rx) = channel(4096); Self { timers: HashMap::default(), - intervals: IntervalStream::new(interval(Duration::from_secs(5))), + intervals: IntervalStream::new(interval(UTILIZATION_EMITTER_DURATION)), timer_tx, timer_rx, } From 3df19cb77b7500d82a7da92a736f23d2c5c69e3c Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 11:26:00 -0400 Subject: [PATCH 16/33] inspect top level field (not event type) --- lib/codecs/src/encoding/format/otlp.rs | 20 ++++++++++++++++---- lib/opentelemetry-proto/src/proto.rs | 4 ++++ 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 45b8c2337449e..924cf8a29678e 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -2,6 +2,7 @@ use crate::encoding::ProtobufSerializer; use bytes::BytesMut; use opentelemetry_proto::proto::{ DESCRIPTOR_BYTES, LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, + RESOURCE_LOGS_ROOT_FIELD, RESOURCE_METRICS_ROOT_FIELD, RESOURCE_SPANS_ROOT_FIELD, TRACES_REQUEST_MESSAGE_TYPE, }; use tokio_util::codec::Encoder; @@ -95,10 +96,21 @@ impl Encoder for OtlpSerializer { type Error = vector_common::Error; fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { - match &event { - Event::Log(_) => self.logs_descriptor.encode(event, buffer), - Event::Metric(_) => self.metrics_descriptor.encode(event, buffer), - Event::Trace(_) => self.traces_descriptor.encode(event, buffer), + // Determine which descriptor to use based on top-level OTLP fields + // This handles events that were decoded with use_otlp_decoding enabled + if event.contains(RESOURCE_LOGS_ROOT_FIELD) { + self.logs_descriptor.encode(event, buffer) + } else if event.contains(RESOURCE_METRICS_ROOT_FIELD) { + // Currently the OTLP metrics are Vector logs (not metrics). + self.metrics_descriptor.encode(event, buffer) + } else if event.contains(RESOURCE_SPANS_ROOT_FIELD) { + self.traces_descriptor.encode(event, buffer) + } else { + Err(format!( + "Event does not contain any OTLP top-level fields ({RESOURCE_LOGS_ROOT_FIELD}, \ + {RESOURCE_METRICS_ROOT_FIELD}, or {RESOURCE_SPANS_ROOT_FIELD})", + ) + .into()) } } } diff --git a/lib/opentelemetry-proto/src/proto.rs b/lib/opentelemetry-proto/src/proto.rs index a770d7d51506b..309dc87e7f9d6 100644 --- a/lib/opentelemetry-proto/src/proto.rs +++ b/lib/opentelemetry-proto/src/proto.rs @@ -5,6 +5,10 @@ pub const TRACES_REQUEST_MESSAGE_TYPE: &str = pub const METRICS_REQUEST_MESSAGE_TYPE: &str = "opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest"; +pub const RESOURCE_LOGS_ROOT_FIELD: &str = "resource_logs"; +pub const RESOURCE_METRICS_ROOT_FIELD: &str = "resource_metrics"; +pub const RESOURCE_SPANS_ROOT_FIELD: &str = "resource_spans"; + /// Service stub and clients. pub mod collector { pub mod trace { From 32b366ca41287fdb72322566b6f405121f13c023 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 12:14:19 -0400 Subject: [PATCH 17/33] ran cargo fmt --- lib/codecs/src/encoding/format/otlp.rs | 40 +++++++++++++++++--------- 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 924cf8a29678e..37d989763a6fc 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -98,19 +98,33 @@ impl Encoder for OtlpSerializer { fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { // Determine which descriptor to use based on top-level OTLP fields // This handles events that were decoded with use_otlp_decoding enabled - if event.contains(RESOURCE_LOGS_ROOT_FIELD) { - self.logs_descriptor.encode(event, buffer) - } else if event.contains(RESOURCE_METRICS_ROOT_FIELD) { - // Currently the OTLP metrics are Vector logs (not metrics). - self.metrics_descriptor.encode(event, buffer) - } else if event.contains(RESOURCE_SPANS_ROOT_FIELD) { - self.traces_descriptor.encode(event, buffer) - } else { - Err(format!( - "Event does not contain any OTLP top-level fields ({RESOURCE_LOGS_ROOT_FIELD}, \ - {RESOURCE_METRICS_ROOT_FIELD}, or {RESOURCE_SPANS_ROOT_FIELD})", - ) - .into()) + match &event { + Event::Log(log) => { + if log.contains(RESOURCE_LOGS_ROOT_FIELD) { + self.logs_descriptor.encode(event, buffer) + } else if log.contains(RESOURCE_METRICS_ROOT_FIELD) { + // Currently the OTLP metrics are Vector logs (not metrics). + self.metrics_descriptor.encode(event, buffer) + } else { + Err(format!( + "Log event does not contain OTLP top-level fields ({RESOURCE_LOGS_ROOT_FIELD} or {RESOURCE_METRICS_ROOT_FIELD})", + ) + .into()) + } + } + Event::Trace(trace) => { + if trace.contains(RESOURCE_SPANS_ROOT_FIELD) { + self.traces_descriptor.encode(event, buffer) + } else { + Err(format!( + "Trace event does not contain OTLP top-level field ({RESOURCE_SPANS_ROOT_FIELD})", + ) + .into()) + } + } + Event::Metric(_) => { + Err("OTLP serializer does not support native Vector metrics yet.".into()) + } } } } From 6328d6d857e4fa956f8f588259682377f348ee94 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 12:52:57 -0400 Subject: [PATCH 18/33] Update changelog to reflect OTLP codec implementation --- changelog.d/otlp_encoding.feature.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog.d/otlp_encoding.feature.md b/changelog.d/otlp_encoding.feature.md index a29c59d128c41..513bcf4379d93 100644 --- a/changelog.d/otlp_encoding.feature.md +++ b/changelog.d/otlp_encoding.feature.md @@ -1,4 +1,4 @@ -Added `use_otlp_encoding` option to the `opentelemetry` sink. -When set to `true` the sink assumes the Vector events are structured based on OTLP. +Added `otlp` codec for encoding Vector events to OTLP format. +The codec can be used with sinks that support encoding configuration. authors: pront From ef3ee941e3004edbab8aacf40aa9c2afc9342de7 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 14:58:50 -0400 Subject: [PATCH 19/33] use json names --- lib/codecs/src/encoding/format/otlp.rs | 15 +++++++++------ lib/opentelemetry-proto/src/proto.rs | 5 +++++ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 37d989763a6fc..339dfa8877be2 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -2,7 +2,8 @@ use crate::encoding::ProtobufSerializer; use bytes::BytesMut; use opentelemetry_proto::proto::{ DESCRIPTOR_BYTES, LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, - RESOURCE_LOGS_ROOT_FIELD, RESOURCE_METRICS_ROOT_FIELD, RESOURCE_SPANS_ROOT_FIELD, + RESOURCE_LOGS_JSON_FIELD, RESOURCE_LOGS_ROOT_FIELD, RESOURCE_METRICS_JSON_FIELD, + RESOURCE_METRICS_ROOT_FIELD, RESOURCE_SPANS_JSON_FIELD, RESOURCE_SPANS_ROOT_FIELD, TRACES_REQUEST_MESSAGE_TYPE, }; use tokio_util::codec::Encoder; @@ -98,26 +99,28 @@ impl Encoder for OtlpSerializer { fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { // Determine which descriptor to use based on top-level OTLP fields // This handles events that were decoded with use_otlp_decoding enabled + // We check both protobuf field names (snake_case) and JSON field names (camelCase) + // because the deserializer may use either depending on the use_json_names option match &event { Event::Log(log) => { - if log.contains(RESOURCE_LOGS_ROOT_FIELD) { + if log.contains(RESOURCE_LOGS_ROOT_FIELD) || log.contains(RESOURCE_LOGS_JSON_FIELD) { self.logs_descriptor.encode(event, buffer) - } else if log.contains(RESOURCE_METRICS_ROOT_FIELD) { + } else if log.contains(RESOURCE_METRICS_ROOT_FIELD) || log.contains(RESOURCE_METRICS_JSON_FIELD) { // Currently the OTLP metrics are Vector logs (not metrics). self.metrics_descriptor.encode(event, buffer) } else { Err(format!( - "Log event does not contain OTLP top-level fields ({RESOURCE_LOGS_ROOT_FIELD} or {RESOURCE_METRICS_ROOT_FIELD})", + "Log event does not contain OTLP top-level fields ({RESOURCE_LOGS_ROOT_FIELD}/{RESOURCE_LOGS_JSON_FIELD} or {RESOURCE_METRICS_ROOT_FIELD}/{RESOURCE_METRICS_JSON_FIELD})", ) .into()) } } Event::Trace(trace) => { - if trace.contains(RESOURCE_SPANS_ROOT_FIELD) { + if trace.contains(RESOURCE_SPANS_ROOT_FIELD) || trace.contains(RESOURCE_SPANS_JSON_FIELD) { self.traces_descriptor.encode(event, buffer) } else { Err(format!( - "Trace event does not contain OTLP top-level field ({RESOURCE_SPANS_ROOT_FIELD})", + "Trace event does not contain OTLP top-level field ({RESOURCE_SPANS_ROOT_FIELD}/{RESOURCE_SPANS_JSON_FIELD})", ) .into()) } diff --git a/lib/opentelemetry-proto/src/proto.rs b/lib/opentelemetry-proto/src/proto.rs index 309dc87e7f9d6..d15879c59e065 100644 --- a/lib/opentelemetry-proto/src/proto.rs +++ b/lib/opentelemetry-proto/src/proto.rs @@ -9,6 +9,11 @@ pub const RESOURCE_LOGS_ROOT_FIELD: &str = "resource_logs"; pub const RESOURCE_METRICS_ROOT_FIELD: &str = "resource_metrics"; pub const RESOURCE_SPANS_ROOT_FIELD: &str = "resource_spans"; +// JSON names (camelCase) for the same fields, used when use_json_names is enabled +pub const RESOURCE_LOGS_JSON_FIELD: &str = "resourceLogs"; +pub const RESOURCE_METRICS_JSON_FIELD: &str = "resourceMetrics"; +pub const RESOURCE_SPANS_JSON_FIELD: &str = "resourceSpans"; + /// Service stub and clients. pub mod collector { pub mod trace { From e25d5ba3be455219c239ca52560ae11f3781628d Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 19:15:50 +0000 Subject: [PATCH 20/33] fmt on linux --- lib/codecs/src/encoding/format/otlp.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 339dfa8877be2..7d2d9148ae5f5 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -103,9 +103,12 @@ impl Encoder for OtlpSerializer { // because the deserializer may use either depending on the use_json_names option match &event { Event::Log(log) => { - if log.contains(RESOURCE_LOGS_ROOT_FIELD) || log.contains(RESOURCE_LOGS_JSON_FIELD) { + if log.contains(RESOURCE_LOGS_ROOT_FIELD) || log.contains(RESOURCE_LOGS_JSON_FIELD) + { self.logs_descriptor.encode(event, buffer) - } else if log.contains(RESOURCE_METRICS_ROOT_FIELD) || log.contains(RESOURCE_METRICS_JSON_FIELD) { + } else if log.contains(RESOURCE_METRICS_ROOT_FIELD) + || log.contains(RESOURCE_METRICS_JSON_FIELD) + { // Currently the OTLP metrics are Vector logs (not metrics). self.metrics_descriptor.encode(event, buffer) } else { @@ -116,7 +119,9 @@ impl Encoder for OtlpSerializer { } } Event::Trace(trace) => { - if trace.contains(RESOURCE_SPANS_ROOT_FIELD) || trace.contains(RESOURCE_SPANS_JSON_FIELD) { + if trace.contains(RESOURCE_SPANS_ROOT_FIELD) + || trace.contains(RESOURCE_SPANS_JSON_FIELD) + { self.traces_descriptor.encode(event, buffer) } else { Err(format!( From 829cb2270c9295b20089e2fb4027396d9394a0a3 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 15:34:33 -0400 Subject: [PATCH 21/33] update otel source cue --- website/cue/reference/components/sources/opentelemetry.cue | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/website/cue/reference/components/sources/opentelemetry.cue b/website/cue/reference/components/sources/opentelemetry.cue index d9cc36d266b18..b18a3474e382b 100644 --- a/website/cue/reference/components/sources/opentelemetry.cue +++ b/website/cue/reference/components/sources/opentelemetry.cue @@ -295,14 +295,11 @@ components: sources: opentelemetry: { inputs: - otel.logs type: opentelemetry - use_otlp_encoding: true protocol: type: http uri: http://localhost:5318/v1/logs - method: post - request: - headers: - content-type: "application/x-protobuf" + encoding: + codec: otlp ``` """ } From 2f6e6200b22dfba744973c167629084b9f72a7ac Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 15:37:38 -0400 Subject: [PATCH 22/33] only want JSON names for now --- lib/codecs/src/encoding/format/otlp.rs | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 7d2d9148ae5f5..103eefb6a36e7 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -2,8 +2,7 @@ use crate::encoding::ProtobufSerializer; use bytes::BytesMut; use opentelemetry_proto::proto::{ DESCRIPTOR_BYTES, LOGS_REQUEST_MESSAGE_TYPE, METRICS_REQUEST_MESSAGE_TYPE, - RESOURCE_LOGS_JSON_FIELD, RESOURCE_LOGS_ROOT_FIELD, RESOURCE_METRICS_JSON_FIELD, - RESOURCE_METRICS_ROOT_FIELD, RESOURCE_SPANS_JSON_FIELD, RESOURCE_SPANS_ROOT_FIELD, + RESOURCE_LOGS_JSON_FIELD, RESOURCE_METRICS_JSON_FIELD, RESOURCE_SPANS_JSON_FIELD, TRACES_REQUEST_MESSAGE_TYPE, }; use tokio_util::codec::Encoder; @@ -99,33 +98,27 @@ impl Encoder for OtlpSerializer { fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { // Determine which descriptor to use based on top-level OTLP fields // This handles events that were decoded with use_otlp_decoding enabled - // We check both protobuf field names (snake_case) and JSON field names (camelCase) - // because the deserializer may use either depending on the use_json_names option + // The deserializer uses use_json_names: true, so fields are in camelCase match &event { Event::Log(log) => { - if log.contains(RESOURCE_LOGS_ROOT_FIELD) || log.contains(RESOURCE_LOGS_JSON_FIELD) - { + if log.contains(RESOURCE_LOGS_JSON_FIELD) { self.logs_descriptor.encode(event, buffer) - } else if log.contains(RESOURCE_METRICS_ROOT_FIELD) - || log.contains(RESOURCE_METRICS_JSON_FIELD) - { + } else if log.contains(RESOURCE_METRICS_JSON_FIELD) { // Currently the OTLP metrics are Vector logs (not metrics). self.metrics_descriptor.encode(event, buffer) } else { Err(format!( - "Log event does not contain OTLP top-level fields ({RESOURCE_LOGS_ROOT_FIELD}/{RESOURCE_LOGS_JSON_FIELD} or {RESOURCE_METRICS_ROOT_FIELD}/{RESOURCE_METRICS_JSON_FIELD})", + "Log event does not contain OTLP top-level fields ({RESOURCE_LOGS_JSON_FIELD} or {RESOURCE_METRICS_JSON_FIELD})", ) .into()) } } Event::Trace(trace) => { - if trace.contains(RESOURCE_SPANS_ROOT_FIELD) - || trace.contains(RESOURCE_SPANS_JSON_FIELD) - { + if trace.contains(RESOURCE_SPANS_JSON_FIELD) { self.traces_descriptor.encode(event, buffer) } else { Err(format!( - "Trace event does not contain OTLP top-level field ({RESOURCE_SPANS_ROOT_FIELD}/{RESOURCE_SPANS_JSON_FIELD})", + "Trace event does not contain OTLP top-level field ({RESOURCE_SPANS_JSON_FIELD})", ) .into()) } From 0ec66c08469b639e72352c13a613955eecff0f68 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 16:40:24 -0400 Subject: [PATCH 23/33] address some review points --- lib/codecs/src/encoding/format/otlp.rs | 15 ++++++++------- lib/opentelemetry-proto/src/proto.rs | 4 ---- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/lib/codecs/src/encoding/format/otlp.rs b/lib/codecs/src/encoding/format/otlp.rs index 103eefb6a36e7..fd0ffffe47b04 100644 --- a/lib/codecs/src/encoding/format/otlp.rs +++ b/lib/codecs/src/encoding/format/otlp.rs @@ -25,7 +25,7 @@ impl OtlpSerializerConfig { /// The data type of events that are accepted by `OtlpSerializer`. pub fn input_type(&self) -> DataType { - DataType::Log | DataType::Metric | DataType::Trace + DataType::Log | DataType::Trace } /// The schema required by the serializer. @@ -42,13 +42,14 @@ impl OtlpSerializerConfig { /// /// # Implementation approach /// -/// This serializer converts Vector's internal event representation to the appropriate OTLP message type: -/// - `Event::Log` → `ExportLogsServiceRequest` -/// - `Event::Metric` → `ExportMetricsServiceRequest` -/// - `Event::Trace` → `ExportTraceServiceRequest` +/// This serializer converts Vector's internal event representation to the appropriate OTLP message type +/// based on the top-level field in the event: +/// - `resourceLogs` → `ExportLogsServiceRequest` +/// - `resourceMetrics` → `ExportMetricsServiceRequest` +/// - `resourceSpans` → `ExportTraceServiceRequest` /// -/// The implementation should be the inverse of what the `opentelemetry` source does when -/// `use_otlp_decoding` is enabled, ensuring round-trip compatibility. +/// The implementation is the inverse of what the `opentelemetry` source does when decoding, +/// ensuring round-trip compatibility. #[derive(Debug, Clone)] #[allow(dead_code)] // Fields will be used once encoding is implemented pub struct OtlpSerializer { diff --git a/lib/opentelemetry-proto/src/proto.rs b/lib/opentelemetry-proto/src/proto.rs index d15879c59e065..8b9fd9f460c90 100644 --- a/lib/opentelemetry-proto/src/proto.rs +++ b/lib/opentelemetry-proto/src/proto.rs @@ -5,10 +5,6 @@ pub const TRACES_REQUEST_MESSAGE_TYPE: &str = pub const METRICS_REQUEST_MESSAGE_TYPE: &str = "opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest"; -pub const RESOURCE_LOGS_ROOT_FIELD: &str = "resource_logs"; -pub const RESOURCE_METRICS_ROOT_FIELD: &str = "resource_metrics"; -pub const RESOURCE_SPANS_ROOT_FIELD: &str = "resource_spans"; - // JSON names (camelCase) for the same fields, used when use_json_names is enabled pub const RESOURCE_LOGS_JSON_FIELD: &str = "resourceLogs"; pub const RESOURCE_METRICS_JSON_FIELD: &str = "resourceMetrics"; From 628b7e62f865a50be58f072568849631543e0231 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 6 Oct 2025 16:49:59 -0400 Subject: [PATCH 24/33] review feedback - feature gate - optional --- Cargo.toml | 3 ++- lib/codecs/Cargo.toml | 3 ++- lib/vector-lib/Cargo.toml | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 22ba045541fcc..9aba8959a3d6b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -577,6 +577,7 @@ enrichment-tables-memory = ["dep:evmap", "dep:evmap-derive", "dep:thread_local"] # Codecs codecs-syslog = ["vector-lib/syslog"] +codecs-opentelemetry = ["vector-lib/opentelemetry"] # Secrets secrets = ["secrets-aws-secrets-manager"] @@ -855,7 +856,7 @@ sinks-mqtt = ["dep:rumqttc"] sinks-nats = ["dep:async-nats", "dep:nkeys"] sinks-new_relic_logs = ["sinks-http"] sinks-new_relic = [] -sinks-opentelemetry = ["sinks-http"] +sinks-opentelemetry = ["sinks-http", "codecs-opentelemetry"] sinks-papertrail = ["dep:syslog"] sinks-prometheus = ["dep:base64", "dep:prost", "vector-lib/prometheus"] sinks-postgres = ["dep:sqlx"] diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index d3632ad17b48d..8e4ce532c54b6 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -20,7 +20,7 @@ flate2.workspace = true influxdb-line-protocol = { version = "2", default-features = false } lookup = { package = "vector-lookup", path = "../vector-lookup", default-features = false, features = ["test"] } memchr = { version = "2", default-features = false } -opentelemetry-proto = { path = "../opentelemetry-proto" } +opentelemetry-proto = { path = "../opentelemetry-proto", optional = true } ordered-float.workspace = true prost.workspace = true prost-reflect.workspace = true @@ -54,3 +54,4 @@ vrl.workspace = true [features] syslog = ["dep:syslog_loose"] +opentelemetry = ["dep:opentelemetry-proto"] diff --git a/lib/vector-lib/Cargo.toml b/lib/vector-lib/Cargo.toml index 89ae3bed7547d..394af42114d71 100644 --- a/lib/vector-lib/Cargo.toml +++ b/lib/vector-lib/Cargo.toml @@ -27,7 +27,7 @@ api = ["vector-tap/api"] api-client = ["dep:vector-api-client"] lua = ["vector-core/lua"] file-source = ["dep:file-source", "dep:file-source-common"] -opentelemetry = ["dep:opentelemetry-proto"] +opentelemetry = ["dep:opentelemetry-proto", "codecs/opentelemetry"] prometheus = ["dep:prometheus-parser"] proptest = ["vector-lookup/proptest", "vrl/proptest"] syslog = ["codecs/syslog"] From ecbe9ed38928974fcd54d1190e7949b829b88134 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 09:37:27 -0400 Subject: [PATCH 25/33] more feature gates to fix failing checks --- Cargo.lock | 4 ++-- lib/codecs/src/encoding/format/mod.rs | 2 ++ lib/codecs/src/encoding/mod.rs | 24 ++++++++++++++++++++---- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f0d10d0279ed5..f5b9f0e938dd7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8062,7 +8062,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck 0.5.0", - "itertools 0.10.5", + "itertools 0.11.0", "log", "multimap", "once_cell", @@ -8108,7 +8108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.11.0", "proc-macro2 1.0.101", "quote 1.0.40", "syn 2.0.106", diff --git a/lib/codecs/src/encoding/format/mod.rs b/lib/codecs/src/encoding/format/mod.rs index dd2201917e024..9377cdca5d906 100644 --- a/lib/codecs/src/encoding/format/mod.rs +++ b/lib/codecs/src/encoding/format/mod.rs @@ -12,6 +12,7 @@ mod json; mod logfmt; mod native; mod native_json; +#[cfg(feature = "opentelemetry")] mod otlp; mod protobuf; mod raw_message; @@ -27,6 +28,7 @@ pub use json::{JsonSerializer, JsonSerializerConfig, JsonSerializerOptions}; pub use logfmt::{LogfmtSerializer, LogfmtSerializerConfig}; pub use native::{NativeSerializer, NativeSerializerConfig}; pub use native_json::{NativeJsonSerializer, NativeJsonSerializerConfig}; +#[cfg(feature = "opentelemetry")] pub use otlp::{OtlpSerializer, OtlpSerializerConfig}; pub use protobuf::{ProtobufSerializer, ProtobufSerializerConfig, ProtobufSerializerOptions}; pub use raw_message::{RawMessageSerializer, RawMessageSerializerConfig}; diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index a5bad14d8298f..b292600a947f9 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -14,10 +14,12 @@ pub use format::{ CefSerializerConfig, CsvSerializer, CsvSerializerConfig, GelfSerializer, GelfSerializerConfig, JsonSerializer, JsonSerializerConfig, JsonSerializerOptions, LogfmtSerializer, LogfmtSerializerConfig, NativeJsonSerializer, NativeJsonSerializerConfig, NativeSerializer, - NativeSerializerConfig, OtlpSerializer, OtlpSerializerConfig, ProtobufSerializer, + NativeSerializerConfig, ProtobufSerializer, ProtobufSerializerConfig, ProtobufSerializerOptions, RawMessageSerializer, RawMessageSerializerConfig, TextSerializer, TextSerializerConfig, }; +#[cfg(feature = "opentelemetry")] +pub use format::{OtlpSerializer, OtlpSerializerConfig}; pub use framing::{ BoxedFramer, BoxedFramingError, BytesEncoder, BytesEncoderConfig, CharacterDelimitedEncoder, CharacterDelimitedEncoderConfig, CharacterDelimitedEncoderOptions, LengthDelimitedEncoder, @@ -275,6 +277,7 @@ pub enum SerializerConfig { /// `content-type: application/x-protobuf`. /// /// [otlp]: https://opentelemetry.io/docs/specs/otlp/ + #[cfg(feature = "opentelemetry")] Otlp, /// Encodes an event as a [Protobuf][protobuf] message. @@ -356,6 +359,7 @@ impl From for SerializerConfig { } } +#[cfg(feature = "opentelemetry")] impl From for SerializerConfig { fn from(_: OtlpSerializerConfig) -> Self { Self::Otlp @@ -396,6 +400,7 @@ impl SerializerConfig { SerializerConfig::NativeJson => { Ok(Serializer::NativeJson(NativeJsonSerializerConfig.build())) } + #[cfg(feature = "opentelemetry")] SerializerConfig::Otlp => { Ok(Serializer::Otlp(OtlpSerializerConfig::default().build()?)) } @@ -424,7 +429,9 @@ impl SerializerConfig { SerializerConfig::Avro { .. } | SerializerConfig::Native => { FramingConfig::LengthDelimited(LengthDelimitedEncoderConfig::default()) } - SerializerConfig::Otlp | SerializerConfig::Protobuf(_) => { + #[cfg(feature = "opentelemetry")] + SerializerConfig::Otlp => FramingConfig::Bytes, + SerializerConfig::Protobuf(_) => { FramingConfig::VarintLengthDelimited(VarintLengthDelimitedEncoderConfig::default()) } SerializerConfig::Cef(_) @@ -453,6 +460,7 @@ impl SerializerConfig { SerializerConfig::Logfmt => LogfmtSerializerConfig.input_type(), SerializerConfig::Native => NativeSerializerConfig.input_type(), SerializerConfig::NativeJson => NativeJsonSerializerConfig.input_type(), + #[cfg(feature = "opentelemetry")] SerializerConfig::Otlp => OtlpSerializerConfig::default().input_type(), SerializerConfig::Protobuf(config) => config.input_type(), SerializerConfig::RawMessage => RawMessageSerializerConfig.input_type(), @@ -473,6 +481,7 @@ impl SerializerConfig { SerializerConfig::Logfmt => LogfmtSerializerConfig.schema_requirement(), SerializerConfig::Native => NativeSerializerConfig.schema_requirement(), SerializerConfig::NativeJson => NativeJsonSerializerConfig.schema_requirement(), + #[cfg(feature = "opentelemetry")] SerializerConfig::Otlp => OtlpSerializerConfig::default().schema_requirement(), SerializerConfig::Protobuf(config) => config.schema_requirement(), SerializerConfig::RawMessage => RawMessageSerializerConfig.schema_requirement(), @@ -501,6 +510,7 @@ pub enum Serializer { /// Uses a `NativeJsonSerializer` for serialization. NativeJson(NativeJsonSerializer), /// Uses an `OtlpSerializer` for serialization. + #[cfg(feature = "opentelemetry")] Otlp(OtlpSerializer), /// Uses a `ProtobufSerializer` for serialization. Protobuf(ProtobufSerializer), @@ -521,9 +531,10 @@ impl Serializer { | Serializer::Logfmt(_) | Serializer::Text(_) | Serializer::Native(_) - | Serializer::Otlp(_) | Serializer::Protobuf(_) | Serializer::RawMessage(_) => false, + #[cfg(feature = "opentelemetry")] + Serializer::Otlp(_) => false, } } @@ -544,11 +555,14 @@ impl Serializer { | Serializer::Logfmt(_) | Serializer::Text(_) | Serializer::Native(_) - | Serializer::Otlp(_) | Serializer::Protobuf(_) | Serializer::RawMessage(_) => { panic!("Serializer does not support JSON") } + #[cfg(feature = "opentelemetry")] + Serializer::Otlp(_) => { + panic!("Serializer does not support JSON") + } } } @@ -609,6 +623,7 @@ impl From for Serializer { } } +#[cfg(feature = "opentelemetry")] impl From for Serializer { fn from(serializer: OtlpSerializer) -> Self { Self::Otlp(serializer) @@ -646,6 +661,7 @@ impl tokio_util::codec::Encoder for Serializer { Serializer::Logfmt(serializer) => serializer.encode(event, buffer), Serializer::Native(serializer) => serializer.encode(event, buffer), Serializer::NativeJson(serializer) => serializer.encode(event, buffer), + #[cfg(feature = "opentelemetry")] Serializer::Otlp(serializer) => serializer.encode(event, buffer), Serializer::Protobuf(serializer) => serializer.encode(event, buffer), Serializer::RawMessage(serializer) => serializer.encode(event, buffer), From 3872d833bd4388c70d7ecc4baddb3570bd20ce40 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 13:54:51 +0000 Subject: [PATCH 26/33] fmt linux --- lib/codecs/src/encoding/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index b292600a947f9..21b634602d302 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -14,9 +14,9 @@ pub use format::{ CefSerializerConfig, CsvSerializer, CsvSerializerConfig, GelfSerializer, GelfSerializerConfig, JsonSerializer, JsonSerializerConfig, JsonSerializerOptions, LogfmtSerializer, LogfmtSerializerConfig, NativeJsonSerializer, NativeJsonSerializerConfig, NativeSerializer, - NativeSerializerConfig, ProtobufSerializer, - ProtobufSerializerConfig, ProtobufSerializerOptions, RawMessageSerializer, - RawMessageSerializerConfig, TextSerializer, TextSerializerConfig, + NativeSerializerConfig, ProtobufSerializer, ProtobufSerializerConfig, + ProtobufSerializerOptions, RawMessageSerializer, RawMessageSerializerConfig, TextSerializer, + TextSerializerConfig, }; #[cfg(feature = "opentelemetry")] pub use format::{OtlpSerializer, OtlpSerializerConfig}; From 8585288d0a3f308bb646a6aca6f1a18d45e125a7 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 11:39:02 -0400 Subject: [PATCH 27/33] add feature checks --- src/codecs/encoding/config.rs | 6 +++++- src/codecs/encoding/encoder.rs | 1 + src/components/validation/resources/mod.rs | 1 + src/sinks/http/config.rs | 1 + 4 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index 66a4bd7bfc657..a04f44315047a 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -1,12 +1,15 @@ use crate::codecs::Transformer; use vector_lib::{ codecs::{ - BytesEncoder, CharacterDelimitedEncoder, LengthDelimitedEncoder, NewlineDelimitedEncoder, + CharacterDelimitedEncoder, LengthDelimitedEncoder, NewlineDelimitedEncoder, encoding::{Framer, FramingConfig, Serializer, SerializerConfig}, }, configurable::configurable_component, }; +#[cfg(feature = "codecs-opentelemetry")] +use vector_lib::codecs::BytesEncoder; + /// Encoding configuration. #[configurable_component] #[derive(Clone, Debug)] @@ -129,6 +132,7 @@ impl EncodingConfigWithFraming { | Serializer::RawMessage(_) | Serializer::Text(_), ) => NewlineDelimitedEncoder::default().into(), + #[cfg(feature = "codecs-opentelemetry")] (None, Serializer::Otlp(_)) => BytesEncoder.into(), }; diff --git a/src/codecs/encoding/encoder.rs b/src/codecs/encoding/encoder.rs index 793a876ae4fc1..f1d0741bb669c 100644 --- a/src/codecs/encoding/encoder.rs +++ b/src/codecs/encoding/encoder.rs @@ -128,6 +128,7 @@ impl Encoder { | Serializer::Text(_), _, ) => "text/plain", + #[cfg(feature = "codecs-opentelemetry")] (Serializer::Otlp(_), _) => "application/x-protobuf", } } diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index bfca76ad517f2..a79100791f6c1 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -233,6 +233,7 @@ fn serializer_config_to_deserializer( }) } SerializerConfig::RawMessage | SerializerConfig::Text(_) => DeserializerConfig::Bytes, + #[cfg(feature = "codecs-opentelemetry")] SerializerConfig::Otlp => todo!(), }; diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index fa3204d2a973d..2c9f257fae5e1 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -270,6 +270,7 @@ impl SinkConfig for HttpSinkConfig { (Json(_), CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' })) => { Some(CONTENT_TYPE_JSON.to_owned()) } + #[cfg(feature = "codecs-opentelemetry")] (Otlp(_), _) => Some("application/x-protobuf".to_owned()), _ => None, } From 4741b4a9848b00d1fb8528b40b73507435921a87 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 13:20:00 -0400 Subject: [PATCH 28/33] add codecs-opentelemetry to e2e-tests-opentelemetry --- Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 9aba8959a3d6b..55b2f63ddad29 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1005,7 +1005,8 @@ e2e-tests-opentelemetry = [ "sources-internal_metrics", "transforms-remap", "sinks-console", - "sinks-file" + "sinks-file", + "codecs-opentelemetry", ] vector-api-tests = [ From 5daa0592fc7a4b2782e023859a2cf49a88bb1caa Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 13:48:24 -0400 Subject: [PATCH 29/33] also add to source-opentelemetry --- Cargo.toml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 55b2f63ddad29..99eb681d45fa1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -669,7 +669,17 @@ sources-mqtt = ["dep:rumqttc"] sources-nats = ["dep:async-nats", "dep:nkeys"] sources-nginx_metrics = ["dep:nom"] sources-okta = ["sources-utils-http-client"] -sources-opentelemetry = ["dep:hex", "vector-lib/opentelemetry", "dep:prost", "dep:prost-types", "sources-http_server", "sources-utils-http", "sources-utils-http-headers", "sources-vector"] +sources-opentelemetry = [ + "dep:hex", + "codecs-opentelemetry", + "vector-lib/opentelemetry", + "dep:prost", + "dep:prost-types", + "sources-http_server", + "sources-utils-http", + "sources-utils-http-headers", + "sources-vector", +] sources-postgresql_metrics = ["dep:postgres-openssl", "dep:tokio-postgres"] sources-prometheus = ["sources-prometheus-scrape", "sources-prometheus-remote-write", "sources-prometheus-pushgateway"] sources-prometheus-scrape = ["sinks-prometheus", "sources-utils-http-client", "vector-lib/prometheus"] From 93a7d8af70e9dc730b60a07591b69cb47e93cf4e Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 15:58:18 -0400 Subject: [PATCH 30/33] one more feature gate --- src/sinks/websocket/sink.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/sinks/websocket/sink.rs b/src/sinks/websocket/sink.rs index e26c0d6c34265..e8d6dfa28b4e8 100644 --- a/src/sinks/websocket/sink.rs +++ b/src/sinks/websocket/sink.rs @@ -20,13 +20,15 @@ use tokio_tungstenite::tungstenite::{error::Error as TungsteniteError, protocol: use tokio_util::codec::Encoder as _; use vector_lib::{ EstimatedJsonEncodedSizeOf, - codecs::encoding::Serializer::Otlp, emit, internal_event::{ ByteSize, BytesSent, CountByteSize, EventsSent, InternalEventHandle as _, Output, Protocol, }, }; +#[cfg(feature = "codecs-opentelemetry")] +use vector_lib::codecs::encoding::Serializer::Otlp; + pub struct WebSocketSink { transformer: Transformer, encoder: Encoder<()>, @@ -82,7 +84,9 @@ impl WebSocketSink { }; match self.encoder.serializer() { - RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) | Otlp(_) => true, + RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) => true, + #[cfg(feature = "codecs-opentelemetry")] + Otlp(_) => true, Cef(_) | Csv(_) | Logfmt(_) | Gelf(_) | Json(_) | Text(_) | NativeJson(_) => false, } } From 903e3f012456e3068cbd031aa104a9ac46acd647 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Tue, 7 Oct 2025 20:21:52 +0000 Subject: [PATCH 31/33] fmt linux --- src/sinks/websocket/sink.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/sinks/websocket/sink.rs b/src/sinks/websocket/sink.rs index e8d6dfa28b4e8..4127ec886f3dd 100644 --- a/src/sinks/websocket/sink.rs +++ b/src/sinks/websocket/sink.rs @@ -19,8 +19,7 @@ use futures::{Sink, Stream, StreamExt, pin_mut, sink::SinkExt, stream::BoxStream use tokio_tungstenite::tungstenite::{error::Error as TungsteniteError, protocol::Message}; use tokio_util::codec::Encoder as _; use vector_lib::{ - EstimatedJsonEncodedSizeOf, - emit, + EstimatedJsonEncodedSizeOf, emit, internal_event::{ ByteSize, BytesSent, CountByteSize, EventsSent, InternalEventHandle as _, Output, Protocol, }, From 0891c845e50b24f1c67623f8cfc724c84cbaece8 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Wed, 8 Oct 2025 12:05:06 -0400 Subject: [PATCH 32/33] more feature gates --- src/sinks/websocket_server/sink.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/sinks/websocket_server/sink.rs b/src/sinks/websocket_server/sink.rs index 04e38c8defa84..b37efcbd341e2 100644 --- a/src/sinks/websocket_server/sink.rs +++ b/src/sinks/websocket_server/sink.rs @@ -42,7 +42,6 @@ use url::Url; use uuid::Uuid; use vector_lib::{ EstimatedJsonEncodedSizeOf, - codecs::encoding::Serializer::Otlp, event::{Event, EventStatus}, finalization::Finalizable, internal_event::{ @@ -51,6 +50,8 @@ use vector_lib::{ sink::StreamSink, tls::{MaybeTlsIncomingStream, MaybeTlsListener, MaybeTlsSettings}, }; +#[cfg(feature = "codecs-opentelemetry")] +use vector_lib::codecs::encoding::Serializer::Otlp; pub struct WebSocketListenerSink { tls: MaybeTlsSettings, @@ -92,7 +93,9 @@ impl WebSocketListenerSink { }; match self.encoder.serializer() { - RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) | Otlp(_) => true, + RawMessage(_) | Avro(_) | Native(_) | Protobuf(_) => true, + #[cfg(feature = "codecs-opentelemetry")] + Otlp(_) => true, Cef(_) | Csv(_) | Logfmt(_) | Gelf(_) | Json(_) | Text(_) | NativeJson(_) => false, } } From 80a090627df87db47900711349c80e57e92f54e6 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Wed, 8 Oct 2025 16:25:50 +0000 Subject: [PATCH 33/33] fmt linux --- src/sinks/websocket_server/sink.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sinks/websocket_server/sink.rs b/src/sinks/websocket_server/sink.rs index b37efcbd341e2..978e407be2e5e 100644 --- a/src/sinks/websocket_server/sink.rs +++ b/src/sinks/websocket_server/sink.rs @@ -40,6 +40,8 @@ use tokio_util::codec::Encoder as _; use tracing::Instrument; use url::Url; use uuid::Uuid; +#[cfg(feature = "codecs-opentelemetry")] +use vector_lib::codecs::encoding::Serializer::Otlp; use vector_lib::{ EstimatedJsonEncodedSizeOf, event::{Event, EventStatus}, @@ -50,8 +52,6 @@ use vector_lib::{ sink::StreamSink, tls::{MaybeTlsIncomingStream, MaybeTlsListener, MaybeTlsSettings}, }; -#[cfg(feature = "codecs-opentelemetry")] -use vector_lib::codecs::encoding::Serializer::Otlp; pub struct WebSocketListenerSink { tls: MaybeTlsSettings,