Skip to content

Commit

Permalink
Merge branch 'master' into fix/perf/extract-ip-domain-tags-ii
Browse files Browse the repository at this point in the history
  • Loading branch information
gggritso committed Apr 15, 2024
2 parents 7319c0d + be342fb commit e1fb9f1
Show file tree
Hide file tree
Showing 13 changed files with 122 additions and 357 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:

- name: Deploy
if: github.ref == 'refs/heads/master'
uses: peaceiris/actions-gh-pages@v3
uses: peaceiris/actions-gh-pages@v4
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: target/doc
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
**Features**:

- **Breaking change:** Kafka topic configuration keys now support the default topic name. The previous aliases `metrics` and `metrics_transactions` are no longer supported if configuring topics manually. Use `ingest-metrics` or `metrics_sessions` instead of `metrics`, and `ingest-performance-metrics` or `metrics_generic` instead of `metrics_transactions`. ([#3361](https://github.com/getsentry/relay/pull/3361))
- **Breaking change:** Remove `ShardedProducer` and related code. The sharded configuration for Kafka is no longer supported. ([#3415](https://github.com/getsentry/relay/pull/3415))
- Add support for continuous profiling. ([#3270](https://github.com/getsentry/relay/pull/3270))
- Add support for Reporting API for CSP reports ([#3277](https://github.com/getsentry/relay/pull/3277))
- Extract op and description while converting opentelemetry spans to sentry spans. ([#3287](https://github.com/getsentry/relay/pull/3287))
Expand All @@ -24,6 +25,7 @@
- Add `cardinality_limited` outcome with id `6`. ([#3389](https://github.com/getsentry/relay/pull/3389))
- Extract `cache.item_size` and `cache.hit` metrics. ([#3371]https://github.com/getsentry/relay/pull/3371)
- Optionally convert segment spans to transactions for compatibility. ([#3375](https://github.com/getsentry/relay/pull/3375))
- Add feature flag for replay video event types. ([#3402](https://github.com/getsentry/relay/pull/3402))
- Extract scrubbed IP addresses into the `span.domain` tag. ([#3383](https://github.com/getsentry/relay/pull/3383))

**Internal**:
Expand Down
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion py/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Changelog

## Unreleased
## 0.5.87

- Add a data category for metirc hours. [#3384](https://github.com/getsentry/relay/pull/3384)

Expand Down
2 changes: 1 addition & 1 deletion relay-cabi/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "relay-cabi"
version = "0.8.56"
version = "0.5.87"
authors = ["Sentry <[email protected]>"]
homepage = "https://getsentry.github.io/relay/"
repository = "https://github.com/getsentry/relay"
Expand Down
5 changes: 5 additions & 0 deletions relay-dynamic-config/src/feature.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ pub enum Feature {
/// Serialized as `organizations:session-replay`.
#[serde(rename = "organizations:session-replay")]
SessionReplay,
/// Enables ingestion of video-based Session Replays.
///
/// Serialized as `organizations:session-replay-video`.
#[serde(rename = "organizations:session-replay-video")]
SessionReplayVideo,
/// Enables data scrubbing of replay recording payloads.
///
/// Serialized as `organizations:session-replay-recording-scrubbing`.
Expand Down
146 changes: 32 additions & 114 deletions relay-kafka/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
//! The configuration can be either;
//! - [`TopicAssignment::Primary`] - the main and default kafka configuration,
//! - [`TopicAssignment::Secondary`] - used to configure any additional kafka topic,
//! - [`TopicAssignment::Sharded`] - if we want to configure multiple kafka clusters,
//! we can create a mapping of the range of logical shards to the kafka configuration.

use std::collections::BTreeMap;

Expand Down Expand Up @@ -160,9 +158,6 @@ pub enum TopicAssignment {
/// `secondary_kafka_configs`. In this case that custom kafka config will be used to produce
/// data to the given topic name.
Secondary(KafkaTopicConfig),
/// If we want to configure multiple kafka clusters, we can create a mapping of the
/// range of logical shards to the kafka configuration.
Sharded(Sharded),
}

/// Configuration for topic
Expand All @@ -176,37 +171,6 @@ pub struct KafkaTopicConfig {
kafka_config_name: String,
}

/// Configuration for logical shards -> kafka configuration mapping.
///
/// The configuration for this should look like:
///
/// ```ignore
/// metrics:
/// shards: 65000
/// mapping:
/// 0:
/// name: "ingest-metrics-1"
/// config: "metrics_1"
/// 25000:
/// name: "ingest-metrics-2"
/// config: "metrics_2"
/// 45000:
/// name: "ingest-metrics-3"
/// config: "metrics_3"
/// ```
///
/// where the `shards` defines how many logical shards must be created, and `mapping`
/// describes the per-shard configuration. Index in the `mapping` is the initial inclusive
/// index of the shard and the range is last till the next index or the maximum shard defined in
/// the `shards` option. The first index must always start with 0.
#[derive(Serialize, Deserialize, Debug)]
pub struct Sharded {
/// The number of shards used for this topic.
shards: u64,
/// The Kafka configuration assigned to the specific shard range.
mapping: BTreeMap<u64, KafkaTopicConfig>,
}

/// Describes Kafka config, with all the parameters extracted, which will be used for creating the
/// kafka producer.
#[derive(Debug)]
Expand All @@ -216,14 +180,6 @@ pub enum KafkaConfig<'a> {
/// Kafka parameters to create the kafka producer.
params: KafkaParams<'a>,
},

/// The list of the Kafka configs with related shard configs.
Sharded {
/// The maximum number of logical shards for this set of configs.
shards: u64,
/// The list of the sharded Kafka configs.
configs: BTreeMap<u64, KafkaParams<'a>>,
},
}

/// Sharded Kafka config.
Expand Down Expand Up @@ -273,27 +229,6 @@ impl TopicAssignment {
.ok_or(ConfigError::UnknownKafkaConfigName)?,
},
},
Self::Sharded(Sharded { shards, mapping }) => {
// quick fail if the config does not contain shard 0
if !mapping.contains_key(&0) {
return Err(ConfigError::InvalidShard);
}
let mut kafka_params = BTreeMap::new();
for (shard, kafka_config) in mapping {
let config = KafkaParams {
topic_name: kafka_config.topic_name.as_str(),
config_name: Some(kafka_config.kafka_config_name.as_str()),
params: secondary_configs
.get(kafka_config.kafka_config_name.as_str())
.ok_or(ConfigError::UnknownKafkaConfigName)?,
};
kafka_params.insert(*shard, config);
}
KafkaConfig::Sharded {
shards: *shards,
configs: kafka_params,
}
}
};

Ok(kafka_config)
Expand Down Expand Up @@ -321,18 +256,7 @@ ingest-events: "ingest-events-kafka-topic"
profiles:
name: "ingest-profiles"
config: "profiles"
ingest-metrics:
shards: 65000
mapping:
0:
name: "ingest-metrics-1"
config: "metrics_1"
25000:
name: "ingest-metrics-2"
config: "metrics_2"
45000:
name: "ingest-metrics-3"
config: "metrics_3"
ingest-metrics: "ingest-metrics-3"
transactions: "ingest-transactions-kafka-topic"
"#;

Expand All @@ -348,41 +272,17 @@ transactions: "ingest-transactions-kafka-topic"
value: "test-value".to_string(),
}],
);
second_config.insert(
"metrics_1".to_string(),
vec![KafkaConfigParam {
name: "test".to_string(),
value: "test-value".to_string(),
}],
);
second_config.insert(
"metrics_2".to_string(),
vec![KafkaConfigParam {
name: "test".to_string(),
value: "test-value".to_string(),
}],
);
second_config.insert(
"metrics_3".to_string(),
vec![KafkaConfigParam {
name: "test".to_string(),
value: "test-value".to_string(),
}],
);

let topics: TopicAssignments = serde_yaml::from_str(yaml).unwrap();
let events = topics.events;
let profiles = topics.profiles;
let metrics = topics.metrics_sessions;
let metrics_sessions = topics.metrics_sessions;
let transactions = topics.transactions;

assert!(matches!(events, TopicAssignment::Primary(_)));
assert!(matches!(profiles, TopicAssignment::Secondary { .. }));
assert!(matches!(metrics, TopicAssignment::Sharded { .. }));

let events_config = metrics
.kafka_config(&def_config, &second_config)
.expect("Kafka config for metrics topic");
assert!(matches!(events_config, KafkaConfig::Sharded { .. }));
assert!(matches!(metrics_sessions, TopicAssignment::Primary(_)));
assert!(matches!(transactions, TopicAssignment::Primary(_)));

let events_config = events
.kafka_config(&def_config, &second_config)
Expand All @@ -397,6 +297,33 @@ transactions: "ingest-transactions-kafka-topic"
}
));

let events_config = profiles
.kafka_config(&def_config, &second_config)
.expect("Kafka config for profiles topic");
assert!(matches!(
events_config,
KafkaConfig::Single {
params: KafkaParams {
topic_name: "ingest-profiles",
config_name: Some("profiles"),
..
}
}
));

let events_config = metrics_sessions
.kafka_config(&def_config, &second_config)
.expect("Kafka config for metrics topic");
assert!(matches!(
events_config,
KafkaConfig::Single {
params: KafkaParams {
topic_name: "ingest-metrics-3",
..
}
}
));

// Legacy keys are still supported
let transactions_config = transactions
.kafka_config(&def_config, &second_config)
Expand All @@ -410,15 +337,6 @@ transactions: "ingest-transactions-kafka-topic"
}
}
));

let (shards, mapping) =
if let TopicAssignment::Sharded(Sharded { shards, mapping }) = metrics {
(shards, mapping)
} else {
unreachable!()
};
assert_eq!(shards, 65000);
assert_eq!(3, mapping.len());
}

#[test]
Expand Down
Loading

0 comments on commit e1fb9f1

Please sign in to comment.