tokenized_buffer_test.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "lexer/tokenized_buffer.h"
  5. #include <iterator>
  6. #include "diagnostics/diagnostic_emitter.h"
  7. #include "diagnostics/mocks.h"
  8. #include "gmock/gmock.h"
  9. #include "gtest/gtest.h"
  10. #include "lexer/tokenized_buffer_test_helpers.h"
  11. #include "llvm/ADT/ArrayRef.h"
  12. #include "llvm/ADT/None.h"
  13. #include "llvm/ADT/Sequence.h"
  14. #include "llvm/ADT/SmallString.h"
  15. #include "llvm/ADT/Twine.h"
  16. #include "llvm/Support/SourceMgr.h"
  17. #include "llvm/Support/YAMLParser.h"
  18. #include "llvm/Support/raw_ostream.h"
  19. namespace Carbon {
  20. namespace {
  21. using ::Carbon::Testing::DiagnosticAt;
  22. using ::Carbon::Testing::DiagnosticMessage;
  23. using ::Carbon::Testing::ExpectedToken;
  24. using ::Carbon::Testing::HasTokens;
  25. using ::Carbon::Testing::IsKeyValueScalars;
  26. using ::testing::Eq;
  27. using ::testing::HasSubstr;
  28. using ::testing::NotNull;
  29. using ::testing::StrEq;
  30. struct LexerTest : ::testing::Test {
  31. llvm::SmallVector<SourceBuffer, 16> source_storage;
  32. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  33. source_storage.push_back(SourceBuffer::CreateFromText(text.str()));
  34. return source_storage.back();
  35. }
  36. auto Lex(llvm::Twine text,
  37. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  38. -> TokenizedBuffer {
  39. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  40. }
  41. };
  42. TEST_F(LexerTest, HandlesEmptyBuffer) {
  43. auto buffer = Lex("");
  44. EXPECT_FALSE(buffer.HasErrors());
  45. EXPECT_EQ(buffer.Tokens().begin(), buffer.Tokens().end());
  46. }
  47. TEST_F(LexerTest, TracksLinesAndColumns) {
  48. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" \"\"\"baz\n a\n \"\"\" y");
  49. EXPECT_FALSE(buffer.HasErrors());
  50. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  51. {.kind = TokenKind::Semi(),
  52. .line = 2,
  53. .column = 3,
  54. .indent_column = 3},
  55. {.kind = TokenKind::Semi(),
  56. .line = 2,
  57. .column = 4,
  58. .indent_column = 3},
  59. {.kind = TokenKind::Semi(),
  60. .line = 3,
  61. .column = 4,
  62. .indent_column = 4},
  63. {.kind = TokenKind::Semi(),
  64. .line = 3,
  65. .column = 5,
  66. .indent_column = 4},
  67. {.kind = TokenKind::Semi(),
  68. .line = 3,
  69. .column = 6,
  70. .indent_column = 4},
  71. {.kind = TokenKind::Identifier(),
  72. .line = 4,
  73. .column = 4,
  74. .indent_column = 4,
  75. .text = "x"},
  76. {.kind = TokenKind::StringLiteral(),
  77. .line = 4,
  78. .column = 5,
  79. .indent_column = 4},
  80. {.kind = TokenKind::StringLiteral(),
  81. .line = 4,
  82. .column = 11,
  83. .indent_column = 4},
  84. {.kind = TokenKind::Identifier(),
  85. .line = 6,
  86. .column = 6,
  87. .indent_column = 11,
  88. .text = "y"},
  89. }));
  90. }
  91. TEST_F(LexerTest, HandlesNumericLiteral) {
  92. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  93. EXPECT_FALSE(buffer.HasErrors());
  94. ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  95. {.kind = TokenKind::IntegerLiteral(),
  96. .line = 1,
  97. .column = 1,
  98. .indent_column = 1,
  99. .text = "12"},
  100. {.kind = TokenKind::Minus(),
  101. .line = 1,
  102. .column = 3,
  103. .indent_column = 1},
  104. {.kind = TokenKind::IntegerLiteral(),
  105. .line = 1,
  106. .column = 4,
  107. .indent_column = 1,
  108. .text = "578"},
  109. {.kind = TokenKind::IntegerLiteral(),
  110. .line = 2,
  111. .column = 3,
  112. .indent_column = 3,
  113. .text = "1"},
  114. {.kind = TokenKind::IntegerLiteral(),
  115. .line = 2,
  116. .column = 6,
  117. .indent_column = 3,
  118. .text = "2"},
  119. {.kind = TokenKind::IntegerLiteral(),
  120. .line = 3,
  121. .column = 1,
  122. .indent_column = 1,
  123. .text = "0x12_3ABC"},
  124. {.kind = TokenKind::IntegerLiteral(),
  125. .line = 4,
  126. .column = 1,
  127. .indent_column = 1,
  128. .text = "0b10_10_11"},
  129. {.kind = TokenKind::IntegerLiteral(),
  130. .line = 5,
  131. .column = 1,
  132. .indent_column = 1,
  133. .text = "1_234_567"},
  134. {.kind = TokenKind::RealLiteral(),
  135. .line = 6,
  136. .column = 1,
  137. .indent_column = 1,
  138. .text = "1.5e9"},
  139. }));
  140. auto token_12 = buffer.Tokens().begin();
  141. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  142. auto token_578 = buffer.Tokens().begin() + 2;
  143. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  144. auto token_1 = buffer.Tokens().begin() + 3;
  145. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  146. auto token_2 = buffer.Tokens().begin() + 4;
  147. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  148. auto token_0x12_3abc = buffer.Tokens().begin() + 5;
  149. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  150. auto token_0b10_10_11 = buffer.Tokens().begin() + 6;
  151. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  152. auto token_1_234_567 = buffer.Tokens().begin() + 7;
  153. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  154. auto token_1_5e9 = buffer.Tokens().begin() + 8;
  155. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  156. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  157. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  158. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  159. }
  160. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  161. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  162. EXPECT_TRUE(buffer.HasErrors());
  163. ASSERT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  164. {.kind = TokenKind::Error(),
  165. .line = 1,
  166. .column = 1,
  167. .indent_column = 1,
  168. .text = "14x"},
  169. {.kind = TokenKind::IntegerLiteral(),
  170. .line = 1,
  171. .column = 5,
  172. .indent_column = 1,
  173. .text = "15_49"},
  174. {.kind = TokenKind::Error(),
  175. .line = 1,
  176. .column = 11,
  177. .indent_column = 1,
  178. .text = "0x3.5q"},
  179. {.kind = TokenKind::RealLiteral(),
  180. .line = 1,
  181. .column = 18,
  182. .indent_column = 1,
  183. .text = "0x3_4.5_6"},
  184. {.kind = TokenKind::Error(),
  185. .line = 1,
  186. .column = 28,
  187. .indent_column = 1,
  188. .text = "0ops"},
  189. }));
  190. }
  191. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  192. llvm::StringLiteral source_text = R"(
  193. 1.
  194. .2
  195. 3.+foo
  196. 4.0-bar
  197. 5.0e+123+456
  198. 6.0e+1e+2
  199. 1e7
  200. 8..10
  201. 9.0.9.5
  202. 10.foo
  203. 11.0.foo
  204. 12e+1
  205. 13._
  206. )";
  207. auto buffer = Lex(source_text);
  208. EXPECT_TRUE(buffer.HasErrors());
  209. EXPECT_THAT(buffer,
  210. HasTokens(llvm::ArrayRef<ExpectedToken>{
  211. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  212. {.kind = TokenKind::Period()},
  213. // newline
  214. {.kind = TokenKind::Period()},
  215. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  216. // newline
  217. {.kind = TokenKind::IntegerLiteral(), .text = "3"},
  218. {.kind = TokenKind::Period()},
  219. {.kind = TokenKind::Plus()},
  220. {.kind = TokenKind::Identifier(), .text = "foo"},
  221. // newline
  222. {.kind = TokenKind::RealLiteral(), .text = "4.0"},
  223. {.kind = TokenKind::Minus()},
  224. {.kind = TokenKind::Identifier(), .text = "bar"},
  225. // newline
  226. {.kind = TokenKind::RealLiteral(), .text = "5.0e+123"},
  227. {.kind = TokenKind::Plus()},
  228. {.kind = TokenKind::IntegerLiteral(), .text = "456"},
  229. // newline
  230. {.kind = TokenKind::Error(), .text = "6.0e+1e"},
  231. {.kind = TokenKind::Plus()},
  232. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  233. // newline
  234. {.kind = TokenKind::Error(), .text = "1e7"},
  235. // newline
  236. {.kind = TokenKind::IntegerLiteral(), .text = "8"},
  237. {.kind = TokenKind::Period()},
  238. {.kind = TokenKind::Period()},
  239. {.kind = TokenKind::IntegerLiteral(), .text = "10"},
  240. // newline
  241. {.kind = TokenKind::RealLiteral(), .text = "9.0"},
  242. {.kind = TokenKind::Period()},
  243. {.kind = TokenKind::RealLiteral(), .text = "9.5"},
  244. // newline
  245. {.kind = TokenKind::Error(), .text = "10.foo"},
  246. // newline
  247. {.kind = TokenKind::RealLiteral(), .text = "11.0"},
  248. {.kind = TokenKind::Period()},
  249. {.kind = TokenKind::Identifier(), .text = "foo"},
  250. // newline
  251. {.kind = TokenKind::Error(), .text = "12e"},
  252. {.kind = TokenKind::Plus()},
  253. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  254. // newline
  255. {.kind = TokenKind::IntegerLiteral(), .text = "13"},
  256. {.kind = TokenKind::Period()},
  257. {.kind = TokenKind::UnderscoreKeyword()},
  258. }));
  259. }
  260. TEST_F(LexerTest, HandlesGarbageCharacters) {
  261. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\"\n\"\\";
  262. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  263. EXPECT_TRUE(buffer.HasErrors());
  264. EXPECT_THAT(
  265. buffer,
  266. HasTokens(llvm::ArrayRef<ExpectedToken>{
  267. {.kind = TokenKind::Error(),
  268. .line = 1,
  269. .column = 1,
  270. .text = llvm::StringRef("$$💩", 6)},
  271. // 💩 takes 4 bytes, and we count column as bytes offset.
  272. {.kind = TokenKind::Minus(), .line = 1, .column = 7},
  273. {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
  274. // newline
  275. {.kind = TokenKind::Error(),
  276. .line = 2,
  277. .column = 1,
  278. .text = llvm::StringRef("$\0$", 3)},
  279. {.kind = TokenKind::IntegerLiteral(),
  280. .line = 2,
  281. .column = 4,
  282. .text = "12"},
  283. {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
  284. // newline
  285. {.kind = TokenKind::Error(),
  286. .line = 3,
  287. .column = 1,
  288. .text = llvm::StringRef("\"", 1)},
  289. // newline
  290. {.kind = TokenKind::Error(),
  291. .line = 4,
  292. .column = 1,
  293. .text = llvm::StringRef("\"", 1)},
  294. {.kind = TokenKind::Backslash(),
  295. .line = 4,
  296. .column = 2,
  297. .text = llvm::StringRef("\\", 1)},
  298. }));
  299. }
  300. TEST_F(LexerTest, Symbols) {
  301. // We don't need to exhaustively test symbols here as they're handled with
  302. // common code, but we want to check specific patterns to verify things like
  303. // max-munch rule and handling of interesting symbols.
  304. auto buffer = Lex("<<<");
  305. EXPECT_FALSE(buffer.HasErrors());
  306. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  307. {TokenKind::LessLess()},
  308. {TokenKind::Less()},
  309. }));
  310. buffer = Lex("<<=>>");
  311. EXPECT_FALSE(buffer.HasErrors());
  312. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  313. {TokenKind::LessLessEqual()},
  314. {TokenKind::GreaterGreater()},
  315. }));
  316. buffer = Lex("< <=> >");
  317. EXPECT_FALSE(buffer.HasErrors());
  318. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  319. {TokenKind::Less()},
  320. {TokenKind::LessEqualGreater()},
  321. {TokenKind::Greater()},
  322. }));
  323. buffer = Lex("\\/?@&^!");
  324. EXPECT_FALSE(buffer.HasErrors());
  325. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  326. {TokenKind::Backslash()},
  327. {TokenKind::Slash()},
  328. {TokenKind::Question()},
  329. {TokenKind::At()},
  330. {TokenKind::Amp()},
  331. {TokenKind::Caret()},
  332. {TokenKind::Exclaim()},
  333. }));
  334. }
  335. TEST_F(LexerTest, Parens) {
  336. auto buffer = Lex("()");
  337. EXPECT_FALSE(buffer.HasErrors());
  338. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  339. {TokenKind::OpenParen()},
  340. {TokenKind::CloseParen()},
  341. }));
  342. buffer = Lex("((()()))");
  343. EXPECT_FALSE(buffer.HasErrors());
  344. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  345. {TokenKind::OpenParen()},
  346. {TokenKind::OpenParen()},
  347. {TokenKind::OpenParen()},
  348. {TokenKind::CloseParen()},
  349. {TokenKind::OpenParen()},
  350. {TokenKind::CloseParen()},
  351. {TokenKind::CloseParen()},
  352. {TokenKind::CloseParen()},
  353. }));
  354. }
  355. TEST_F(LexerTest, CurlyBraces) {
  356. auto buffer = Lex("{}");
  357. EXPECT_FALSE(buffer.HasErrors());
  358. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  359. {TokenKind::OpenCurlyBrace()},
  360. {TokenKind::CloseCurlyBrace()},
  361. }));
  362. buffer = Lex("{{{}{}}}");
  363. EXPECT_FALSE(buffer.HasErrors());
  364. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  365. {TokenKind::OpenCurlyBrace()},
  366. {TokenKind::OpenCurlyBrace()},
  367. {TokenKind::OpenCurlyBrace()},
  368. {TokenKind::CloseCurlyBrace()},
  369. {TokenKind::OpenCurlyBrace()},
  370. {TokenKind::CloseCurlyBrace()},
  371. {TokenKind::CloseCurlyBrace()},
  372. {TokenKind::CloseCurlyBrace()},
  373. }));
  374. }
  375. TEST_F(LexerTest, MatchingGroups) {
  376. {
  377. TokenizedBuffer buffer = Lex("(){}");
  378. ASSERT_FALSE(buffer.HasErrors());
  379. auto it = buffer.Tokens().begin();
  380. auto open_paren_token = *it++;
  381. auto close_paren_token = *it++;
  382. EXPECT_EQ(close_paren_token,
  383. buffer.GetMatchedClosingToken(open_paren_token));
  384. EXPECT_EQ(open_paren_token,
  385. buffer.GetMatchedOpeningToken(close_paren_token));
  386. auto open_curly_token = *it++;
  387. auto close_curly_token = *it++;
  388. EXPECT_EQ(close_curly_token,
  389. buffer.GetMatchedClosingToken(open_curly_token));
  390. EXPECT_EQ(open_curly_token,
  391. buffer.GetMatchedOpeningToken(close_curly_token));
  392. EXPECT_EQ(buffer.Tokens().end(), it);
  393. }
  394. {
  395. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  396. ASSERT_FALSE(buffer.HasErrors());
  397. auto it = buffer.Tokens().begin();
  398. auto open_paren_token = *it++;
  399. auto open_curly_token = *it++;
  400. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  401. auto close_curly_token = *it++;
  402. auto close_paren_token = *it++;
  403. EXPECT_EQ(close_paren_token,
  404. buffer.GetMatchedClosingToken(open_paren_token));
  405. EXPECT_EQ(open_paren_token,
  406. buffer.GetMatchedOpeningToken(close_paren_token));
  407. EXPECT_EQ(close_curly_token,
  408. buffer.GetMatchedClosingToken(open_curly_token));
  409. EXPECT_EQ(open_curly_token,
  410. buffer.GetMatchedOpeningToken(close_curly_token));
  411. open_curly_token = *it++;
  412. open_paren_token = *it++;
  413. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  414. close_paren_token = *it++;
  415. close_curly_token = *it++;
  416. EXPECT_EQ(close_curly_token,
  417. buffer.GetMatchedClosingToken(open_curly_token));
  418. EXPECT_EQ(open_curly_token,
  419. buffer.GetMatchedOpeningToken(close_curly_token));
  420. EXPECT_EQ(close_paren_token,
  421. buffer.GetMatchedClosingToken(open_paren_token));
  422. EXPECT_EQ(open_paren_token,
  423. buffer.GetMatchedOpeningToken(close_paren_token));
  424. open_curly_token = *it++;
  425. auto inner_open_curly_token = *it++;
  426. open_paren_token = *it++;
  427. auto inner_open_paren_token = *it++;
  428. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  429. auto inner_close_paren_token = *it++;
  430. close_paren_token = *it++;
  431. auto inner_close_curly_token = *it++;
  432. close_curly_token = *it++;
  433. EXPECT_EQ(close_curly_token,
  434. buffer.GetMatchedClosingToken(open_curly_token));
  435. EXPECT_EQ(open_curly_token,
  436. buffer.GetMatchedOpeningToken(close_curly_token));
  437. EXPECT_EQ(inner_close_curly_token,
  438. buffer.GetMatchedClosingToken(inner_open_curly_token));
  439. EXPECT_EQ(inner_open_curly_token,
  440. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  441. EXPECT_EQ(close_paren_token,
  442. buffer.GetMatchedClosingToken(open_paren_token));
  443. EXPECT_EQ(open_paren_token,
  444. buffer.GetMatchedOpeningToken(close_paren_token));
  445. EXPECT_EQ(inner_close_paren_token,
  446. buffer.GetMatchedClosingToken(inner_open_paren_token));
  447. EXPECT_EQ(inner_open_paren_token,
  448. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  449. EXPECT_EQ(buffer.Tokens().end(), it);
  450. }
  451. }
  452. TEST_F(LexerTest, MismatchedGroups) {
  453. auto buffer = Lex("{");
  454. EXPECT_TRUE(buffer.HasErrors());
  455. EXPECT_THAT(buffer,
  456. HasTokens(llvm::ArrayRef<ExpectedToken>{
  457. {TokenKind::OpenCurlyBrace()},
  458. {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
  459. }));
  460. buffer = Lex("}");
  461. EXPECT_TRUE(buffer.HasErrors());
  462. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  463. {.kind = TokenKind::Error(), .text = "}"},
  464. }));
  465. buffer = Lex("{(}");
  466. EXPECT_TRUE(buffer.HasErrors());
  467. EXPECT_THAT(
  468. buffer,
  469. HasTokens(llvm::ArrayRef<ExpectedToken>{
  470. {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
  471. {.kind = TokenKind::OpenParen(), .column = 2},
  472. {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
  473. {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
  474. }));
  475. buffer = Lex(")({)");
  476. EXPECT_TRUE(buffer.HasErrors());
  477. EXPECT_THAT(
  478. buffer,
  479. HasTokens(llvm::ArrayRef<ExpectedToken>{
  480. {.kind = TokenKind::Error(), .column = 1, .text = ")"},
  481. {.kind = TokenKind::OpenParen(), .column = 2},
  482. {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
  483. {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
  484. {.kind = TokenKind::CloseParen(), .column = 4},
  485. }));
  486. }
  487. TEST_F(LexerTest, Keywords) {
  488. auto buffer = Lex(" fn");
  489. EXPECT_FALSE(buffer.HasErrors());
  490. EXPECT_THAT(
  491. buffer,
  492. HasTokens(llvm::ArrayRef<ExpectedToken>{
  493. {.kind = TokenKind::FnKeyword(), .column = 4, .indent_column = 4},
  494. }));
  495. buffer = Lex("and or not if else for loop return var break continue _");
  496. EXPECT_FALSE(buffer.HasErrors());
  497. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  498. {TokenKind::AndKeyword()},
  499. {TokenKind::OrKeyword()},
  500. {TokenKind::NotKeyword()},
  501. {TokenKind::IfKeyword()},
  502. {TokenKind::ElseKeyword()},
  503. {TokenKind::ForKeyword()},
  504. {TokenKind::LoopKeyword()},
  505. {TokenKind::ReturnKeyword()},
  506. {TokenKind::VarKeyword()},
  507. {TokenKind::BreakKeyword()},
  508. {TokenKind::ContinueKeyword()},
  509. {TokenKind::UnderscoreKeyword()},
  510. }));
  511. }
  512. TEST_F(LexerTest, Comments) {
  513. auto buffer = Lex(" ;\n // foo\n ;");
  514. EXPECT_FALSE(buffer.HasErrors());
  515. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  516. {.kind = TokenKind::Semi(),
  517. .line = 1,
  518. .column = 2,
  519. .indent_column = 2},
  520. {.kind = TokenKind::Semi(),
  521. .line = 3,
  522. .column = 3,
  523. .indent_column = 3},
  524. }));
  525. buffer = Lex("// foo\n//\n// bar");
  526. EXPECT_FALSE(buffer.HasErrors());
  527. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
  528. // Make sure weird characters aren't a problem.
  529. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  530. EXPECT_FALSE(buffer.HasErrors());
  531. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
  532. // Make sure we can lex a comment at the end of the input.
  533. buffer = Lex("//");
  534. EXPECT_FALSE(buffer.HasErrors());
  535. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{}));
  536. }
  537. TEST_F(LexerTest, InvalidComments) {
  538. llvm::StringLiteral testcases[] = {
  539. " /// foo\n",
  540. "foo // bar\n",
  541. "//! hello",
  542. " //world",
  543. };
  544. for (llvm::StringLiteral testcase : testcases) {
  545. auto buffer = Lex(testcase);
  546. EXPECT_TRUE(buffer.HasErrors());
  547. }
  548. }
  549. TEST_F(LexerTest, Identifiers) {
  550. auto buffer = Lex(" foobar");
  551. EXPECT_FALSE(buffer.HasErrors());
  552. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  553. {.kind = TokenKind::Identifier(),
  554. .column = 4,
  555. .indent_column = 4,
  556. .text = "foobar"},
  557. }));
  558. // Check different kinds of identifier character sequences.
  559. buffer = Lex("_foo_bar");
  560. EXPECT_FALSE(buffer.HasErrors());
  561. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  562. {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
  563. }));
  564. buffer = Lex("foo2bar00");
  565. EXPECT_FALSE(buffer.HasErrors());
  566. EXPECT_THAT(buffer,
  567. HasTokens(llvm::ArrayRef<ExpectedToken>{
  568. {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
  569. }));
  570. // Check that we can parse identifiers that start with a keyword.
  571. buffer = Lex("fnord");
  572. EXPECT_FALSE(buffer.HasErrors());
  573. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  574. {.kind = TokenKind::Identifier(), .text = "fnord"},
  575. }));
  576. // Check multiple identifiers with indent and interning.
  577. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  578. EXPECT_FALSE(buffer.HasErrors());
  579. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  580. {.kind = TokenKind::Identifier(),
  581. .line = 1,
  582. .column = 4,
  583. .indent_column = 4,
  584. .text = "foo"},
  585. {.kind = TokenKind::Semi()},
  586. {.kind = TokenKind::Identifier(),
  587. .line = 1,
  588. .column = 8,
  589. .indent_column = 4,
  590. .text = "bar"},
  591. {.kind = TokenKind::Identifier(),
  592. .line = 2,
  593. .column = 1,
  594. .indent_column = 1,
  595. .text = "bar"},
  596. {.kind = TokenKind::Identifier(),
  597. .line = 3,
  598. .column = 3,
  599. .indent_column = 3,
  600. .text = "foo"},
  601. {.kind = TokenKind::Identifier(),
  602. .line = 3,
  603. .column = 7,
  604. .indent_column = 3,
  605. .text = "foo"},
  606. }));
  607. }
  608. TEST_F(LexerTest, StringLiterals) {
  609. llvm::StringLiteral testcase = R"(
  610. "hello world\n"
  611. """foo
  612. test \
  613. \xAB
  614. """ trailing
  615. #"""#
  616. "\0"
  617. #"\0"foo"\1"#
  618. """x"""
  619. )";
  620. auto buffer = Lex(testcase);
  621. EXPECT_FALSE(buffer.HasErrors());
  622. EXPECT_THAT(buffer,
  623. HasTokens(llvm::ArrayRef<ExpectedToken>{
  624. {.kind = TokenKind::StringLiteral(),
  625. .line = 2,
  626. .column = 5,
  627. .indent_column = 5,
  628. .string_contents = {"hello world\n"}},
  629. {.kind = TokenKind::StringLiteral(),
  630. .line = 4,
  631. .column = 5,
  632. .indent_column = 5,
  633. .string_contents = {" test \xAB\n"}},
  634. {.kind = TokenKind::Identifier(),
  635. .line = 7,
  636. .column = 10,
  637. .indent_column = 5,
  638. .text = "trailing"},
  639. {.kind = TokenKind::StringLiteral(),
  640. .line = 9,
  641. .column = 7,
  642. .indent_column = 7,
  643. .string_contents = {"\""}},
  644. {.kind = TokenKind::StringLiteral(),
  645. .line = 11,
  646. .column = 5,
  647. .indent_column = 5,
  648. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  649. {.kind = TokenKind::StringLiteral(),
  650. .line = 13,
  651. .column = 5,
  652. .indent_column = 5,
  653. .string_contents = {"\\0\"foo\"\\1"}},
  654. // """x""" is three string literals, not one.
  655. {.kind = TokenKind::StringLiteral(),
  656. .line = 15,
  657. .column = 5,
  658. .indent_column = 5,
  659. .string_contents = {""}},
  660. {.kind = TokenKind::StringLiteral(),
  661. .line = 15,
  662. .column = 7,
  663. .indent_column = 5,
  664. .string_contents = {"x"}},
  665. {.kind = TokenKind::StringLiteral(),
  666. .line = 15,
  667. .column = 10,
  668. .indent_column = 5,
  669. .string_contents = {""}},
  670. }));
  671. }
  672. TEST_F(LexerTest, InvalidStringLiterals) {
  673. llvm::StringLiteral invalid[] = {
  674. R"(")",
  675. R"("""
  676. "")", //
  677. R"("\)", //
  678. R"("\")", //
  679. R"("\\)", //
  680. R"("\\\")", //
  681. R"(""")",
  682. R"("""
  683. )", //
  684. R"("""\)",
  685. R"(#"""
  686. """)",
  687. };
  688. for (llvm::StringLiteral test : invalid) {
  689. auto buffer = Lex(test);
  690. EXPECT_TRUE(buffer.HasErrors()) << "`" << test << "`";
  691. // We should have formed at least one error token.
  692. bool found_error = false;
  693. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  694. if (buffer.GetKind(token) == TokenKind::Error()) {
  695. found_error = true;
  696. break;
  697. }
  698. }
  699. EXPECT_TRUE(found_error) << "`" << test << "`";
  700. }
  701. }
  702. TEST_F(LexerTest, Diagnostics) {
  703. llvm::StringLiteral testcase = R"(
  704. // Hello!
  705. var String x; // trailing comment
  706. //no space after comment
  707. "hello\bworld\xab"
  708. 0x123abc
  709. #"
  710. )";
  711. Testing::MockDiagnosticConsumer consumer;
  712. EXPECT_CALL(consumer, HandleDiagnostic(AllOf(
  713. DiagnosticAt(3, 19),
  714. DiagnosticMessage(HasSubstr("Trailing comment")))));
  715. EXPECT_CALL(consumer,
  716. HandleDiagnostic(AllOf(
  717. DiagnosticAt(4, 7),
  718. DiagnosticMessage(HasSubstr("Whitespace is required")))));
  719. EXPECT_CALL(
  720. consumer,
  721. HandleDiagnostic(AllOf(
  722. DiagnosticAt(5, 12),
  723. DiagnosticMessage(HasSubstr("Unrecognized escape sequence `b`")))));
  724. EXPECT_CALL(
  725. consumer,
  726. HandleDiagnostic(AllOf(
  727. DiagnosticAt(5, 20),
  728. DiagnosticMessage(HasSubstr("two uppercase hexadecimal digits")))));
  729. EXPECT_CALL(
  730. consumer,
  731. HandleDiagnostic(AllOf(
  732. DiagnosticAt(6, 10),
  733. DiagnosticMessage(HasSubstr("Invalid digit 'a' in hexadecimal")))));
  734. EXPECT_CALL(consumer,
  735. HandleDiagnostic(AllOf(
  736. DiagnosticAt(7, 5),
  737. DiagnosticMessage(HasSubstr("unrecognized character")))));
  738. Lex(testcase, consumer);
  739. }
  740. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  741. auto newline_offset = text.find_first_of('\n');
  742. llvm::StringRef line = text.slice(0, newline_offset);
  743. if (newline_offset != llvm::StringRef::npos) {
  744. text = text.substr(newline_offset + 1);
  745. } else {
  746. text = "";
  747. }
  748. return line.str();
  749. }
  750. TEST_F(LexerTest, Printing) {
  751. auto buffer = Lex(";");
  752. ASSERT_FALSE(buffer.HasErrors());
  753. std::string print_storage;
  754. llvm::raw_string_ostream print_stream(print_storage);
  755. buffer.Print(print_stream);
  756. llvm::StringRef print = print_stream.str();
  757. EXPECT_THAT(GetAndDropLine(print),
  758. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  759. "indent: 1, spelling: ';' }"));
  760. EXPECT_TRUE(print.empty()) << print;
  761. // Test kind padding.
  762. buffer = Lex("(;foo;)");
  763. ASSERT_FALSE(buffer.HasErrors());
  764. print_storage.clear();
  765. buffer.Print(print_stream);
  766. print = print_stream.str();
  767. EXPECT_THAT(GetAndDropLine(print),
  768. StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
  769. "1, indent: 1, spelling: '(', closing_token: 4 }"));
  770. EXPECT_THAT(GetAndDropLine(print),
  771. StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
  772. "2, indent: 1, spelling: ';' }"));
  773. EXPECT_THAT(GetAndDropLine(print),
  774. StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
  775. "3, indent: 1, spelling: 'foo', identifier: 0 }"));
  776. EXPECT_THAT(GetAndDropLine(print),
  777. StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
  778. "6, indent: 1, spelling: ';' }"));
  779. EXPECT_THAT(GetAndDropLine(print),
  780. StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
  781. "7, indent: 1, spelling: ')', opening_token: 0 }"));
  782. EXPECT_TRUE(print.empty()) << print;
  783. // Test digit padding with max values of 9, 10, and 11.
  784. buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  785. ASSERT_FALSE(buffer.HasErrors());
  786. print_storage.clear();
  787. buffer.Print(print_stream);
  788. print = print_stream.str();
  789. EXPECT_THAT(GetAndDropLine(print),
  790. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  791. "indent: 1, spelling: ';' }"));
  792. EXPECT_THAT(GetAndDropLine(print),
  793. StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
  794. "indent: 9, spelling: ';' }"));
  795. EXPECT_THAT(GetAndDropLine(print),
  796. StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
  797. "indent: 9, spelling: ';' }"));
  798. EXPECT_TRUE(print.empty()) << print;
  799. }
  800. TEST_F(LexerTest, PrintingAsYaml) {
  801. // Test that we can parse this into YAML and verify line and indent data.
  802. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  803. ASSERT_FALSE(buffer.HasErrors());
  804. std::string print_output;
  805. llvm::raw_string_ostream print_stream(print_output);
  806. buffer.Print(print_stream);
  807. print_stream.flush();
  808. // Parse the output into a YAML stream. This will print errors to stderr.
  809. llvm::SourceMgr source_manager;
  810. llvm::yaml::Stream yaml_stream(print_output, source_manager);
  811. auto yaml_it = yaml_stream.begin();
  812. auto* root_node = llvm::dyn_cast<llvm::yaml::MappingNode>(yaml_it->getRoot());
  813. ASSERT_THAT(root_node, NotNull());
  814. // Walk the top-level mapping of tokens, dig out the sub-mapping of data for
  815. // each taken, and then verify those entries.
  816. auto mapping_it = llvm::cast<llvm::yaml::MappingNode>(root_node)->begin();
  817. auto* token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
  818. ASSERT_THAT(token_node, NotNull());
  819. auto* token_key_node =
  820. llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
  821. ASSERT_THAT(token_key_node, NotNull());
  822. EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
  823. auto* token_value_node =
  824. llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
  825. ASSERT_THAT(token_value_node, NotNull());
  826. auto token_it = token_value_node->begin();
  827. EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "0"));
  828. ++token_it;
  829. EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
  830. ++token_it;
  831. EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "2"));
  832. ++token_it;
  833. EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "2"));
  834. ++token_it;
  835. EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "2"));
  836. ++token_it;
  837. EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
  838. EXPECT_THAT(++token_it, Eq(token_value_node->end()));
  839. ++mapping_it;
  840. token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
  841. ASSERT_THAT(token_node, NotNull());
  842. token_key_node = llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
  843. ASSERT_THAT(token_key_node, NotNull());
  844. EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
  845. token_value_node =
  846. llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
  847. ASSERT_THAT(token_value_node, NotNull());
  848. token_it = token_value_node->begin();
  849. EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "1"));
  850. ++token_it;
  851. EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
  852. ++token_it;
  853. EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "5"));
  854. ++token_it;
  855. EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "1"));
  856. ++token_it;
  857. EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "1"));
  858. ++token_it;
  859. EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
  860. EXPECT_THAT(++token_it, Eq(token_value_node->end()));
  861. ++mapping_it;
  862. token_node = llvm::dyn_cast<llvm::yaml::KeyValueNode>(&*mapping_it);
  863. ASSERT_THAT(token_node, NotNull());
  864. token_key_node = llvm::dyn_cast<llvm::yaml::ScalarNode>(token_node->getKey());
  865. ASSERT_THAT(token_key_node, NotNull());
  866. EXPECT_THAT(token_key_node->getRawValue(), StrEq("token"));
  867. token_value_node =
  868. llvm::dyn_cast<llvm::yaml::MappingNode>(token_node->getValue());
  869. ASSERT_THAT(token_value_node, NotNull());
  870. token_it = token_value_node->begin();
  871. EXPECT_THAT(&*token_it, IsKeyValueScalars("index", "2"));
  872. ++token_it;
  873. EXPECT_THAT(&*token_it, IsKeyValueScalars("kind", "Semi"));
  874. ++token_it;
  875. EXPECT_THAT(&*token_it, IsKeyValueScalars("line", "5"));
  876. ++token_it;
  877. EXPECT_THAT(&*token_it, IsKeyValueScalars("column", "3"));
  878. ++token_it;
  879. EXPECT_THAT(&*token_it, IsKeyValueScalars("indent", "1"));
  880. ++token_it;
  881. EXPECT_THAT(&*token_it, IsKeyValueScalars("spelling", ";"));
  882. EXPECT_THAT(++token_it, Eq(token_value_node->end()));
  883. ASSERT_THAT(++mapping_it, Eq(root_node->end()));
  884. ASSERT_THAT(++yaml_it, Eq(yaml_stream.end()));
  885. }
  886. } // namespace
  887. } // namespace Carbon