Skip to content

Commit

Permalink
chore: remove unnecessary string hashes
Browse files Browse the repository at this point in the history
  • Loading branch information
hamirmahal committed Oct 19, 2024
1 parent dc441a2 commit 4720e97
Show file tree
Hide file tree
Showing 25 changed files with 124 additions and 124 deletions.
12 changes: 6 additions & 6 deletions src/aws/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,8 +388,8 @@ mod tests {
#[test]
fn parsing_default() {
let config = toml::from_str::<ComponentConfig>(
r#"
"#,
r"
",
)
.unwrap();

Expand All @@ -399,9 +399,9 @@ mod tests {
#[test]
fn parsing_default_with_load_timeout() {
let config = toml::from_str::<ComponentConfig>(
r#"
r"
auth.load_timeout_secs = 10
"#,
",
)
.unwrap();

Expand Down Expand Up @@ -435,11 +435,11 @@ mod tests {
#[test]
fn parsing_default_with_imds_client() {
let config = toml::from_str::<ComponentConfig>(
r#"
r"
auth.imds.max_attempts = 5
auth.imds.connect_timeout_seconds = 30
auth.imds.read_timeout_seconds = 10
"#,
",
)
.unwrap();

Expand Down
4 changes: 2 additions & 2 deletions src/aws/region.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ mod tests {

#[test]
fn optional() {
assert!(toml::from_str::<RegionOrEndpoint>(indoc! {r#"
"#})
assert!(toml::from_str::<RegionOrEndpoint>(indoc! {r"
"})
.is_ok());
}

Expand Down
4 changes: 2 additions & 2 deletions src/aws/timeout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,11 @@ mod tests {
#[test]
fn parsing_timeout_configuration() {
let config = toml::from_str::<AwsTimeout>(
r#"
r"
connect_timeout_seconds = 20
operation_timeout_seconds = 20
read_timeout_seconds = 60
"#,
",
)
.unwrap();

Expand Down
8 changes: 4 additions & 4 deletions src/config/loading/secret.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ mod tests {
fn collection() {
let mut keys = HashMap::new();
collect_secret_keys(
indoc! {r#"
indoc! {r"
SECRET[first_backend.secret_key]
SECRET[first_backend.another_secret_key]
SECRET[second_backend.secret_key]
Expand All @@ -208,7 +208,7 @@ mod tests {
SECRET[first_backend...an_extra_secret_key]
SECRET[non_matching_syntax]
SECRET[.non.matching.syntax]
"#},
"},
&mut keys,
);
assert_eq!(keys.len(), 2);
Expand All @@ -232,10 +232,10 @@ mod tests {
fn collection_duplicates() {
let mut keys = HashMap::new();
collect_secret_keys(
indoc! {r#"
indoc! {r"
SECRET[first_backend.secret_key]
SECRET[first_backend.secret_key]
"#},
"},
&mut keys,
);

Expand Down
28 changes: 14 additions & 14 deletions src/config/unit_test/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ async fn parse_no_input() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
inputs[0]: unable to locate target transform 'foo'"#}
inputs[0]: unable to locate target transform 'foo'"}
.to_owned(),]
);

Expand Down Expand Up @@ -69,9 +69,9 @@ async fn parse_no_input() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
inputs[1]: unable to locate target transform 'foo'"#}
inputs[1]: unable to locate target transform 'foo'"}
.to_owned(),]
);
}
Expand Down Expand Up @@ -102,9 +102,9 @@ async fn parse_no_test_input() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
must specify at least one input."#}
must specify at least one input."}
.to_owned(),]
);
}
Expand Down Expand Up @@ -133,9 +133,9 @@ async fn parse_no_outputs() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
unit test must contain at least one of `outputs` or `no_outputs_from`."#}
unit test must contain at least one of `outputs` or `no_outputs_from`."}
.to_owned(),]
);
}
Expand Down Expand Up @@ -170,9 +170,9 @@ async fn parse_invalid_output_targets() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
Invalid extract_from target in test 'broken test': 'nonexistent' does not exist"#}
Invalid extract_from target in test 'broken test': 'nonexistent' does not exist"}
.to_owned(),]
);

Expand All @@ -197,9 +197,9 @@ async fn parse_invalid_output_targets() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
Invalid no_outputs_from target in test 'broken test': 'nonexistent' does not exist"#}
Invalid no_outputs_from target in test 'broken test': 'nonexistent' does not exist"}
.to_owned(),]
);
}
Expand Down Expand Up @@ -338,9 +338,9 @@ async fn parse_bad_input_event() {
let errs = build_unit_tests(config).await.err().unwrap();
assert_eq!(
errs,
vec![indoc! {r#"
vec![indoc! {r"
Failed to build test 'broken test':
unrecognized input type 'nah', expected one of: 'raw', 'log' or 'metric'"#}
unrecognized input type 'nah', expected one of: 'raw', 'log' or 'metric'"}
.to_owned(),]
);
}
Expand Down
4 changes: 2 additions & 2 deletions src/generate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ mod tests {

assert_eq!(
generate_example(&opts, TransformInputsStrategy::Auto).unwrap(),
indoc::indoc! {r#"
indoc::indoc! {r"
data_dir: /var/lib/vector/
sources:
source0:
Expand Down Expand Up @@ -706,7 +706,7 @@ mod tests {
type: memory
max_events: 500
when_full: block
"#}
"}
);
}

Expand Down
4 changes: 2 additions & 2 deletions src/sinks/influxdb/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -597,8 +597,8 @@ mod tests {

#[test]
fn test_influxdb_settings_missing() {
let config = r#"
"#;
let config = r"
";
let config: InfluxDbTestConfig = toml::from_str(config).unwrap();
let settings = influxdb_settings(config.influxdb1_settings, config.influxdb2_settings);
assert_eq!(
Expand Down
12 changes: 6 additions & 6 deletions src/sinks/prometheus/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -569,11 +569,11 @@ mod tests {
fn encodes_set_text() {
assert_eq!(
encode_set::<StringCollector>(),
indoc! { r#"
indoc! { r"
# HELP vector_users users
# TYPE vector_users gauge
vector_users 1 1612325106789
"#}
"}
);
}

Expand Down Expand Up @@ -601,11 +601,11 @@ mod tests {
fn encodes_expired_set_text() {
assert_eq!(
encode_expired_set::<StringCollector>(),
indoc! {r#"
indoc! {r"
# HELP vector_users users
# TYPE vector_users gauge
vector_users 0 1612325106789
"#}
"}
);
}

Expand Down Expand Up @@ -873,11 +873,11 @@ mod tests {
fn encodes_timestamp_text() {
assert_eq!(
encode_timestamp::<StringCollector>(),
indoc! {r#"
indoc! {r"
# HELP temperature temperature
# TYPE temperature counter
temperature 2 1612325106789
"#}
"}
);
}

Expand Down
8 changes: 4 additions & 4 deletions src/sinks/prometheus/remote_write/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ async fn retains_state_between_requests() {
// This sink converts all incremental events to absolute, and
// should accumulate their totals between batches.
let outputs = send_request(
r#"batch.max_events = 1"#,
r"batch.max_events = 1",
vec![
create_inc_event("counter-1".into(), 12.0),
create_inc_event("counter-2".into(), 13.0),
Expand All @@ -167,7 +167,7 @@ async fn retains_state_between_requests() {
#[tokio::test]
async fn aggregates_batches() {
let outputs = send_request(
r#"batch.max_events = 3"#,
r"batch.max_events = 3",
vec![
create_inc_event("counter-1".into(), 12.0),
create_inc_event("counter-1".into(), 14.0),
Expand All @@ -194,10 +194,10 @@ async fn aggregates_batches() {
async fn doesnt_aggregate_batches() {
let outputs = send_request(
indoc! {
r#"
r"
batch.max_events = 3
batch.aggregate = false
"#
"
},
vec![
create_inc_event("counter-1".into(), 12.0),
Expand Down
20 changes: 10 additions & 10 deletions src/sinks/util/buffer/compression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -566,48 +566,48 @@ mod test {

let fixtures_invalid = [
(
r#"42"#,
r#"invalid type: integer `42`, expected string or map at line 1 column 2"#,
r"42",
r"invalid type: integer `42`, expected string or map at line 1 column 2",
),
(
r#""b42""#,
r#"invalid value: string "b42", expected "none" or "gzip" or "zlib" or "zstd" at line 1 column 5"#,
),
(
r#"{"algorithm": "b42"}"#,
r#"unknown variant `b42`, expected one of `none`, `gzip`, `zlib`, `zstd`, `snappy` at line 1 column 20"#,
r"unknown variant `b42`, expected one of `none`, `gzip`, `zlib`, `zstd`, `snappy` at line 1 column 20",
),
(
r#"{"algorithm": "none", "level": "default"}"#,
r#"unknown field `level`, there are no fields at line 1 column 41"#,
r"unknown field `level`, there are no fields at line 1 column 41",
),
(
r#"{"algorithm": "gzip", "level": -1}"#,
r#"integer could not be converted to u32: out of range integral type conversion attempted at line 1 column 33"#,
r"integer could not be converted to u32: out of range integral type conversion attempted at line 1 column 33",
),
(
r#"{"algorithm": "gzip", "level": "good"}"#,
r#"invalid value: string "good", expected "none", "fast", "best" or "default" at line 1 column 37"#,
),
(
r#"{"algorithm": "gzip", "level": {}}"#,
r#"invalid type: map, expected unsigned number or string at line 1 column 33"#,
r"invalid type: map, expected unsigned number or string at line 1 column 33",
),
(
r#"{"algorithm": "gzip", "level": "default", "key": 42}"#,
r#"unknown field `key`, expected `algorithm` or `level` at line 1 column 47"#,
r"unknown field `key`, expected `algorithm` or `level` at line 1 column 47",
),
(
r#"{"algorithm": "gzip", "level": 10}"#,
r#"invalid value `10`, expected value in range [0, 9] at line 1 column 34"#,
r"invalid value `10`, expected value in range [0, 9] at line 1 column 34",
),
(
r#"{"algorithm": "zstd", "level": 22}"#,
r#"invalid value `22`, expected value in range [0, 21] at line 1 column 34"#,
r"invalid value `22`, expected value in range [0, 21] at line 1 column 34",
),
(
r#"{"algorithm": "snappy", "level": 3}"#,
r#"unknown field `level`, there are no fields at line 1 column 35"#,
r"unknown field `level`, there are no fields at line 1 column 35",
),
];
for (source, result) in fixtures_invalid.iter() {
Expand Down
2 changes: 1 addition & 1 deletion src/sinks/util/encoding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ mod tests {
let (written, json_size) = encoding.encode_input(input, &mut writer).unwrap();
assert_eq!(written, 5);

assert_eq!(String::from_utf8(writer).unwrap(), r#"value"#);
assert_eq!(String::from_utf8(writer).unwrap(), r"value");
assert_eq!(CountByteSize(1, input_json_size), json_size.size().unwrap());
}
}
8 changes: 4 additions & 4 deletions src/sinks/util/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -444,10 +444,10 @@ mod tests {
toml::from_str::<TowerRequestConfig>(r#"concurrency = "broken""#)
.expect_err("Invalid concurrency setting didn't fail");

toml::from_str::<TowerRequestConfig>(r#"concurrency = 0"#)
toml::from_str::<TowerRequestConfig>(r"concurrency = 0")
.expect_err("Invalid concurrency setting didn't fail on zero");

toml::from_str::<TowerRequestConfig>(r#"concurrency = -9"#)
toml::from_str::<TowerRequestConfig>(r"concurrency = -9")
.expect_err("Invalid concurrency setting didn't fail on negative number");
}

Expand Down Expand Up @@ -496,14 +496,14 @@ mod tests {
fn into_settings_with_populated_config() {
// Populate with values not equal to the global defaults.
let cfg = toml::from_str::<TowerRequestConfig>(
r#" concurrency = 16
r" concurrency = 16
timeout_secs = 1
rate_limit_duration_secs = 2
rate_limit_num = 3
retry_attempts = 4
retry_max_duration_secs = 5
retry_initial_backoff_secs = 6
"#,
",
)
.expect("Config failed to parse");

Expand Down
4 changes: 2 additions & 2 deletions src/sinks/util/service/concurrency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,10 @@ impl Configurable for Concurrency {
fn metadata() -> Metadata {
let mut metadata = Metadata::default();
metadata.set_description(
r#"Configuration for outbound request concurrency.
r"Configuration for outbound request concurrency.
This can be set either to one of the below enum values or to a positive integer, which denotes
a fixed concurrency limit."#,
a fixed concurrency limit.",
);
metadata.add_custom_attribute(CustomAttribute::kv("docs::enum_tagging", "external"));
metadata
Expand Down
Loading

0 comments on commit 4720e97

Please sign in to comment.