about summary refs log tree commit diff
path: root/compiler/rustc_lexer/src/tests.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_lexer/src/tests.rs')
-rw-r--r--compiler/rustc_lexer/src/tests.rs160
1 files changed, 140 insertions, 20 deletions
diff --git a/compiler/rustc_lexer/src/tests.rs b/compiler/rustc_lexer/src/tests.rs
index a1ea5ceb1f6..94017b7b286 100644
--- a/compiler/rustc_lexer/src/tests.rs
+++ b/compiler/rustc_lexer/src/tests.rs
@@ -129,6 +129,34 @@ fn check_lexing(src: &str, expect: Expect) {
 }
 
 #[test]
+fn smoke_test() {
+    check_lexing(
+        "/* my source file */ fn main() { println!(\"zebra\"); }\n",
+        expect![[r#"
+            Token { kind: BlockComment { doc_style: None, terminated: true }, len: 20 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Ident, len: 2 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Ident, len: 4 }
+            Token { kind: OpenParen, len: 1 }
+            Token { kind: CloseParen, len: 1 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: OpenBrace, len: 1 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Ident, len: 7 }
+            Token { kind: Bang, len: 1 }
+            Token { kind: OpenParen, len: 1 }
+            Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 7 }, len: 7 }
+            Token { kind: CloseParen, len: 1 }
+            Token { kind: Semi, len: 1 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: CloseBrace, len: 1 }
+            Token { kind: Whitespace, len: 1 }
+        "#]],
+    )
+}
+
+#[test]
 fn comment_flavors() {
     check_lexing(
         r"
@@ -143,25 +171,117 @@ fn comment_flavors() {
 /*! inner doc block */
 ",
         expect![[r#"
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: LineComment { doc_style: None }, len: 7 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: LineComment { doc_style: None }, len: 17 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: LineComment { doc_style: Some(Outer) }, len: 18 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: LineComment { doc_style: Some(Inner) }, len: 18 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: BlockComment { doc_style: None, terminated: true }, len: 4 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: BlockComment { doc_style: None, terminated: true }, len: 18 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: BlockComment { doc_style: Some(Outer), terminated: true }, len: 22 }
-                Token { kind: Whitespace, len: 1 }
-                Token { kind: BlockComment { doc_style: Some(Inner), terminated: true }, len: 22 }
-                Token { kind: Whitespace, len: 1 }
-            "#]],
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: LineComment { doc_style: None }, len: 7 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: LineComment { doc_style: None }, len: 17 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: LineComment { doc_style: Some(Outer) }, len: 18 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: LineComment { doc_style: Some(Inner) }, len: 18 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: BlockComment { doc_style: None, terminated: true }, len: 4 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: BlockComment { doc_style: None, terminated: true }, len: 18 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: BlockComment { doc_style: Some(Outer), terminated: true }, len: 22 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: BlockComment { doc_style: Some(Inner), terminated: true }, len: 22 }
+            Token { kind: Whitespace, len: 1 }
+        "#]],
+    )
+}
+
+#[test]
+fn nested_block_comments() {
+    check_lexing(
+        "/* /* */ */'a'",
+        expect![[r#"
+            Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
+            Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
+        "#]],
+    )
+}
+
+#[test]
+fn characters() {
+    check_lexing(
+        "'a' ' ' '\\n'",
+        expect![[r#"
+            Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 4 }, len: 4 }
+        "#]],
+    );
+}
+
+#[test]
+fn lifetime() {
+    check_lexing(
+        "'abc",
+        expect![[r#"
+            Token { kind: Lifetime { starts_with_number: false }, len: 4 }
+        "#]],
+    );
+}
+
+#[test]
+fn raw_string() {
+    check_lexing(
+        "r###\"\"#a\\b\x00c\"\"###",
+        expect![[r#"
+            Token { kind: Literal { kind: RawStr { n_hashes: 3, err: None }, suffix_start: 17 }, len: 17 }
+        "#]],
+    )
+}
+
+#[test]
+fn literal_suffixes() {
+    check_lexing(
+        r####"
+'a'
+b'a'
+"a"
+b"a"
+1234
+0b101
+0xABC
+1.0
+1.0e10
+2us
+r###"raw"###suffix
+br###"raw"###suffix
+"####,
+        expect![[r#"
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Byte { terminated: true }, suffix_start: 4 }, len: 4 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 3 }, len: 3 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: ByteStr { terminated: true }, suffix_start: 4 }, len: 4 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Int { base: Decimal, empty_int: false }, suffix_start: 4 }, len: 4 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Int { base: Binary, empty_int: false }, suffix_start: 5 }, len: 5 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Int { base: Hexadecimal, empty_int: false }, suffix_start: 5 }, len: 5 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Float { base: Decimal, empty_exponent: false }, suffix_start: 3 }, len: 3 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Float { base: Decimal, empty_exponent: false }, suffix_start: 6 }, len: 6 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: Int { base: Decimal, empty_int: false }, suffix_start: 1 }, len: 3 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: RawStr { n_hashes: 3, err: None }, suffix_start: 12 }, len: 18 }
+            Token { kind: Whitespace, len: 1 }
+            Token { kind: Literal { kind: RawByteStr { n_hashes: 3, err: None }, suffix_start: 13 }, len: 19 }
+            Token { kind: Whitespace, len: 1 }
+        "#]],
     )
 }