Skip to content

Commit

Permalink
assert_unordered->assert_unordered_eq + verify both are subsets of ea…
Browse files Browse the repository at this point in the history
…ch other
  • Loading branch information
0xNeshi authored Nov 18, 2024
1 parent 2ef1886 commit 5f31d61
Showing 1 changed file with 32 additions and 15 deletions.
47 changes: 32 additions & 15 deletions exercises/practice/word-count/tests/word_count.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ fn count_one_word() {
let mut output = count_words(input);

let expected = array![WordResult { word: "word", count: 1 }].span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -21,7 +21,7 @@ fn count_one_of_each_word() {
WordResult { word: "one", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -38,7 +38,7 @@ fn multiple_occurrences_of_a_word() {
WordResult { word: "fish", count: 4 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -53,7 +53,7 @@ fn handles_cramped_lists() {
WordResult { word: "three", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -68,7 +68,7 @@ fn handles_expanded_lists() {
WordResult { word: "three", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -85,7 +85,7 @@ fn ignore_punctuation() {
WordResult { word: "javascript", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -101,7 +101,7 @@ fn include_numbers() {
]
.span();

assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -114,7 +114,7 @@ fn normalize_case() {
WordResult { word: "go", count: 3 }, WordResult { word: "stop", count: 2 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -134,7 +134,7 @@ fn with_apostrophes() {
WordResult { word: "it", count: 1 },
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -152,7 +152,7 @@ fn with_quotations() {
WordResult { word: "large", count: 2 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -172,7 +172,7 @@ fn substrings_from_the_beginning() {
WordResult { word: "a", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -185,7 +185,7 @@ fn multiple_spaces_not_detected_as_a_word() {
WordResult { word: "multiple", count: 1 }, WordResult { word: "whitespaces", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -200,7 +200,7 @@ fn alternating_word_separators_not_detected_as_a_word() {
WordResult { word: "three", count: 1 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}

#[test]
Expand All @@ -213,12 +213,13 @@ fn quotation_for_word_with_apostrophe() {
WordResult { word: "can", count: 1 }, WordResult { word: "can't", count: 2 }
]
.span();
assert_unordered(output, expected);
assert_unordered_eq(output, expected);
}


// helper function.
fn assert_unordered(span1: Span<WordResult>, span2: Span<WordResult>) {
fn assert_unordered_eq(span1: Span<WordResult>, span2: Span<WordResult>) {
// `span1` should be subset of `span2`
for item in span1 {
let mut found = false;
for other_item in span2 {
Expand All @@ -233,5 +234,21 @@ fn assert_unordered(span1: Span<WordResult>, span2: Span<WordResult>) {
span1,
span2
);
};
// and `span2` should be subset of `span1`
for item in span2 {
let mut found = false;
for other_item in span1 {
if item == other_item {
found = true;
break;
}
};
assert!(
found,
"assertion failed: `(left == right)`\n left: `{:?}`,\n right `{:?}`",
span1,
span2
);
}
}

0 comments on commit 5f31d61

Please sign in to comment.