84 lines
1.3 KiB
Rust
84 lines
1.3 KiB
Rust
|
use insta::assert_debug_snapshot as assert_snapshot;
|
||
|
use oyster_parser::Lexer;
|
||
|
|
||
|
#[test]
|
||
|
fn eof() {
|
||
|
let source = "";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn whitespace() {
|
||
|
let source = " \t \t\t";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn newlines() {
|
||
|
let source = "\n\n\n";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn semicolon() {
|
||
|
let source = ";";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn pipe() {
|
||
|
let source = "|";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn plain_word() {
|
||
|
let source = "whoami";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn word_with_hash() {
|
||
|
let source = "nixpkgs#hello";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn escaped_hash() {
|
||
|
let source = r"\#";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn comment() {
|
||
|
let source = "# hey";
|
||
|
|
||
|
let actual = Lexer::new(source).next_command_token();
|
||
|
|
||
|
assert_snapshot!(actual);
|
||
|
}
|