tokenized_buffer_test.cpp 41 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/None.h"
  10. #include "llvm/ADT/Sequence.h"
  11. #include "llvm/ADT/SmallString.h"
  12. #include "llvm/ADT/Twine.h"
  13. #include "llvm/Support/SourceMgr.h"
  14. #include "llvm/Support/raw_ostream.h"
  15. #include "toolchain/common/yaml_test_helpers.h"
  16. #include "toolchain/diagnostics/diagnostic_emitter.h"
  17. #include "toolchain/diagnostics/mocks.h"
  18. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  19. namespace Carbon {
  20. namespace {
  21. using ::Carbon::Testing::DiagnosticAt;
  22. using ::Carbon::Testing::DiagnosticMessage;
  23. using ::Carbon::Testing::ExpectedToken;
  24. using ::Carbon::Testing::HasTokens;
  25. using ::testing::ElementsAre;
  26. using ::testing::Eq;
  27. using ::testing::HasSubstr;
  28. using ::testing::StrEq;
  29. namespace Yaml = Carbon::Testing::Yaml;
  30. class LexerTest : public ::testing::Test {
  31. protected:
  32. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  33. source_storage.push_back(SourceBuffer::CreateFromText(text.str()));
  34. return source_storage.back();
  35. }
  36. auto Lex(llvm::Twine text,
  37. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  38. -> TokenizedBuffer {
  39. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  40. }
  41. llvm::SmallVector<SourceBuffer, 16> source_storage;
  42. };
  43. TEST_F(LexerTest, HandlesEmptyBuffer) {
  44. auto buffer = Lex("");
  45. EXPECT_FALSE(buffer.HasErrors());
  46. EXPECT_THAT(
  47. buffer,
  48. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  49. }
  50. TEST_F(LexerTest, TracksLinesAndColumns) {
  51. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" \"\"\"baz\n a\n \"\"\" y");
  52. EXPECT_FALSE(buffer.HasErrors());
  53. EXPECT_THAT(buffer,
  54. HasTokens(llvm::ArrayRef<ExpectedToken>{
  55. {.kind = TokenKind::Semi(),
  56. .line = 2,
  57. .column = 3,
  58. .indent_column = 3},
  59. {.kind = TokenKind::Semi(),
  60. .line = 2,
  61. .column = 4,
  62. .indent_column = 3},
  63. {.kind = TokenKind::Semi(),
  64. .line = 3,
  65. .column = 4,
  66. .indent_column = 4},
  67. {.kind = TokenKind::Semi(),
  68. .line = 3,
  69. .column = 5,
  70. .indent_column = 4},
  71. {.kind = TokenKind::Semi(),
  72. .line = 3,
  73. .column = 6,
  74. .indent_column = 4},
  75. {.kind = TokenKind::Identifier(),
  76. .line = 4,
  77. .column = 4,
  78. .indent_column = 4,
  79. .text = "x"},
  80. {.kind = TokenKind::StringLiteral(),
  81. .line = 4,
  82. .column = 5,
  83. .indent_column = 4},
  84. {.kind = TokenKind::StringLiteral(),
  85. .line = 4,
  86. .column = 11,
  87. .indent_column = 4},
  88. {.kind = TokenKind::Identifier(),
  89. .line = 6,
  90. .column = 6,
  91. .indent_column = 11,
  92. .text = "y"},
  93. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 7},
  94. }));
  95. }
  96. TEST_F(LexerTest, HandlesNumericLiteral) {
  97. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  98. EXPECT_FALSE(buffer.HasErrors());
  99. ASSERT_THAT(buffer,
  100. HasTokens(llvm::ArrayRef<ExpectedToken>{
  101. {.kind = TokenKind::IntegerLiteral(),
  102. .line = 1,
  103. .column = 1,
  104. .indent_column = 1,
  105. .text = "12"},
  106. {.kind = TokenKind::Minus(),
  107. .line = 1,
  108. .column = 3,
  109. .indent_column = 1},
  110. {.kind = TokenKind::IntegerLiteral(),
  111. .line = 1,
  112. .column = 4,
  113. .indent_column = 1,
  114. .text = "578"},
  115. {.kind = TokenKind::IntegerLiteral(),
  116. .line = 2,
  117. .column = 3,
  118. .indent_column = 3,
  119. .text = "1"},
  120. {.kind = TokenKind::IntegerLiteral(),
  121. .line = 2,
  122. .column = 6,
  123. .indent_column = 3,
  124. .text = "2"},
  125. {.kind = TokenKind::IntegerLiteral(),
  126. .line = 3,
  127. .column = 1,
  128. .indent_column = 1,
  129. .text = "0x12_3ABC"},
  130. {.kind = TokenKind::IntegerLiteral(),
  131. .line = 4,
  132. .column = 1,
  133. .indent_column = 1,
  134. .text = "0b10_10_11"},
  135. {.kind = TokenKind::IntegerLiteral(),
  136. .line = 5,
  137. .column = 1,
  138. .indent_column = 1,
  139. .text = "1_234_567"},
  140. {.kind = TokenKind::RealLiteral(),
  141. .line = 6,
  142. .column = 1,
  143. .indent_column = 1,
  144. .text = "1.5e9"},
  145. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 6},
  146. }));
  147. auto token_12 = buffer.Tokens().begin();
  148. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  149. auto token_578 = buffer.Tokens().begin() + 2;
  150. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  151. auto token_1 = buffer.Tokens().begin() + 3;
  152. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  153. auto token_2 = buffer.Tokens().begin() + 4;
  154. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  155. auto token_0x12_3abc = buffer.Tokens().begin() + 5;
  156. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  157. auto token_0b10_10_11 = buffer.Tokens().begin() + 6;
  158. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  159. auto token_1_234_567 = buffer.Tokens().begin() + 7;
  160. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  161. auto token_1_5e9 = buffer.Tokens().begin() + 8;
  162. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  163. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  164. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  165. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  166. }
  167. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  168. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  169. EXPECT_TRUE(buffer.HasErrors());
  170. ASSERT_THAT(buffer,
  171. HasTokens(llvm::ArrayRef<ExpectedToken>{
  172. {.kind = TokenKind::Error(),
  173. .line = 1,
  174. .column = 1,
  175. .indent_column = 1,
  176. .text = "14x"},
  177. {.kind = TokenKind::IntegerLiteral(),
  178. .line = 1,
  179. .column = 5,
  180. .indent_column = 1,
  181. .text = "15_49"},
  182. {.kind = TokenKind::Error(),
  183. .line = 1,
  184. .column = 11,
  185. .indent_column = 1,
  186. .text = "0x3.5q"},
  187. {.kind = TokenKind::RealLiteral(),
  188. .line = 1,
  189. .column = 18,
  190. .indent_column = 1,
  191. .text = "0x3_4.5_6"},
  192. {.kind = TokenKind::Error(),
  193. .line = 1,
  194. .column = 28,
  195. .indent_column = 1,
  196. .text = "0ops"},
  197. {.kind = TokenKind::EndOfFile(), .line = 1, .column = 32},
  198. }));
  199. }
  200. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  201. llvm::StringLiteral source_text = R"(
  202. 1.
  203. .2
  204. 3.+foo
  205. 4.0-bar
  206. 5.0e+123+456
  207. 6.0e+1e+2
  208. 1e7
  209. 8..10
  210. 9.0.9.5
  211. 10.foo
  212. 11.0.foo
  213. 12e+1
  214. 13._
  215. )";
  216. auto buffer = Lex(source_text);
  217. EXPECT_TRUE(buffer.HasErrors());
  218. EXPECT_THAT(buffer,
  219. HasTokens(llvm::ArrayRef<ExpectedToken>{
  220. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  221. {.kind = TokenKind::Period()},
  222. // newline
  223. {.kind = TokenKind::Period()},
  224. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  225. // newline
  226. {.kind = TokenKind::IntegerLiteral(), .text = "3"},
  227. {.kind = TokenKind::Period()},
  228. {.kind = TokenKind::Plus()},
  229. {.kind = TokenKind::Identifier(), .text = "foo"},
  230. // newline
  231. {.kind = TokenKind::RealLiteral(), .text = "4.0"},
  232. {.kind = TokenKind::Minus()},
  233. {.kind = TokenKind::Identifier(), .text = "bar"},
  234. // newline
  235. {.kind = TokenKind::RealLiteral(), .text = "5.0e+123"},
  236. {.kind = TokenKind::Plus()},
  237. {.kind = TokenKind::IntegerLiteral(), .text = "456"},
  238. // newline
  239. {.kind = TokenKind::Error(), .text = "6.0e+1e"},
  240. {.kind = TokenKind::Plus()},
  241. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  242. // newline
  243. {.kind = TokenKind::Error(), .text = "1e7"},
  244. // newline
  245. {.kind = TokenKind::IntegerLiteral(), .text = "8"},
  246. {.kind = TokenKind::Period()},
  247. {.kind = TokenKind::Period()},
  248. {.kind = TokenKind::IntegerLiteral(), .text = "10"},
  249. // newline
  250. {.kind = TokenKind::RealLiteral(), .text = "9.0"},
  251. {.kind = TokenKind::Period()},
  252. {.kind = TokenKind::RealLiteral(), .text = "9.5"},
  253. // newline
  254. {.kind = TokenKind::Error(), .text = "10.foo"},
  255. // newline
  256. {.kind = TokenKind::RealLiteral(), .text = "11.0"},
  257. {.kind = TokenKind::Period()},
  258. {.kind = TokenKind::Identifier(), .text = "foo"},
  259. // newline
  260. {.kind = TokenKind::Error(), .text = "12e"},
  261. {.kind = TokenKind::Plus()},
  262. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  263. // newline
  264. {.kind = TokenKind::IntegerLiteral(), .text = "13"},
  265. {.kind = TokenKind::Period()},
  266. {.kind = TokenKind::UnderscoreKeyword()},
  267. // newline
  268. {.kind = TokenKind::EndOfFile()},
  269. }));
  270. }
  271. TEST_F(LexerTest, HandlesGarbageCharacters) {
  272. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\"\n\"\\";
  273. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  274. EXPECT_TRUE(buffer.HasErrors());
  275. EXPECT_THAT(
  276. buffer,
  277. HasTokens(llvm::ArrayRef<ExpectedToken>{
  278. {.kind = TokenKind::Error(),
  279. .line = 1,
  280. .column = 1,
  281. .text = llvm::StringRef("$$💩", 6)},
  282. // 💩 takes 4 bytes, and we count column as bytes offset.
  283. {.kind = TokenKind::Minus(), .line = 1, .column = 7},
  284. {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
  285. // newline
  286. {.kind = TokenKind::Error(),
  287. .line = 2,
  288. .column = 1,
  289. .text = llvm::StringRef("$\0$", 3)},
  290. {.kind = TokenKind::IntegerLiteral(),
  291. .line = 2,
  292. .column = 4,
  293. .text = "12"},
  294. {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
  295. // newline
  296. {.kind = TokenKind::Error(),
  297. .line = 3,
  298. .column = 1,
  299. .text = llvm::StringRef("\"", 1)},
  300. // newline
  301. {.kind = TokenKind::Error(),
  302. .line = 4,
  303. .column = 1,
  304. .text = llvm::StringRef("\"", 1)},
  305. {.kind = TokenKind::Backslash(),
  306. .line = 4,
  307. .column = 2,
  308. .text = llvm::StringRef("\\", 1)},
  309. {.kind = TokenKind::EndOfFile(), .line = 4, .column = 3},
  310. }));
  311. }
  312. TEST_F(LexerTest, Symbols) {
  313. // We don't need to exhaustively test symbols here as they're handled with
  314. // common code, but we want to check specific patterns to verify things like
  315. // max-munch rule and handling of interesting symbols.
  316. auto buffer = Lex("<<<");
  317. EXPECT_FALSE(buffer.HasErrors());
  318. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  319. {TokenKind::LessLess()},
  320. {TokenKind::Less()},
  321. {TokenKind::EndOfFile()},
  322. }));
  323. buffer = Lex("<<=>>");
  324. EXPECT_FALSE(buffer.HasErrors());
  325. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  326. {TokenKind::LessLessEqual()},
  327. {TokenKind::GreaterGreater()},
  328. {TokenKind::EndOfFile()},
  329. }));
  330. buffer = Lex("< <=> >");
  331. EXPECT_FALSE(buffer.HasErrors());
  332. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  333. {TokenKind::Less()},
  334. {TokenKind::LessEqualGreater()},
  335. {TokenKind::Greater()},
  336. {TokenKind::EndOfFile()},
  337. }));
  338. buffer = Lex("\\/?@&^!");
  339. EXPECT_FALSE(buffer.HasErrors());
  340. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  341. {TokenKind::Backslash()},
  342. {TokenKind::Slash()},
  343. {TokenKind::Question()},
  344. {TokenKind::At()},
  345. {TokenKind::Amp()},
  346. {TokenKind::Caret()},
  347. {TokenKind::Exclaim()},
  348. {TokenKind::EndOfFile()},
  349. }));
  350. }
  351. TEST_F(LexerTest, Parens) {
  352. auto buffer = Lex("()");
  353. EXPECT_FALSE(buffer.HasErrors());
  354. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  355. {TokenKind::OpenParen()},
  356. {TokenKind::CloseParen()},
  357. {TokenKind::EndOfFile()},
  358. }));
  359. buffer = Lex("((()()))");
  360. EXPECT_FALSE(buffer.HasErrors());
  361. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  362. {TokenKind::OpenParen()},
  363. {TokenKind::OpenParen()},
  364. {TokenKind::OpenParen()},
  365. {TokenKind::CloseParen()},
  366. {TokenKind::OpenParen()},
  367. {TokenKind::CloseParen()},
  368. {TokenKind::CloseParen()},
  369. {TokenKind::CloseParen()},
  370. {TokenKind::EndOfFile()},
  371. }));
  372. }
  373. TEST_F(LexerTest, CurlyBraces) {
  374. auto buffer = Lex("{}");
  375. EXPECT_FALSE(buffer.HasErrors());
  376. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  377. {TokenKind::OpenCurlyBrace()},
  378. {TokenKind::CloseCurlyBrace()},
  379. {TokenKind::EndOfFile()},
  380. }));
  381. buffer = Lex("{{{}{}}}");
  382. EXPECT_FALSE(buffer.HasErrors());
  383. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  384. {TokenKind::OpenCurlyBrace()},
  385. {TokenKind::OpenCurlyBrace()},
  386. {TokenKind::OpenCurlyBrace()},
  387. {TokenKind::CloseCurlyBrace()},
  388. {TokenKind::OpenCurlyBrace()},
  389. {TokenKind::CloseCurlyBrace()},
  390. {TokenKind::CloseCurlyBrace()},
  391. {TokenKind::CloseCurlyBrace()},
  392. {TokenKind::EndOfFile()},
  393. }));
  394. }
  395. TEST_F(LexerTest, MatchingGroups) {
  396. {
  397. TokenizedBuffer buffer = Lex("(){}");
  398. ASSERT_FALSE(buffer.HasErrors());
  399. auto it = buffer.Tokens().begin();
  400. auto open_paren_token = *it++;
  401. auto close_paren_token = *it++;
  402. EXPECT_EQ(close_paren_token,
  403. buffer.GetMatchedClosingToken(open_paren_token));
  404. EXPECT_EQ(open_paren_token,
  405. buffer.GetMatchedOpeningToken(close_paren_token));
  406. auto open_curly_token = *it++;
  407. auto close_curly_token = *it++;
  408. EXPECT_EQ(close_curly_token,
  409. buffer.GetMatchedClosingToken(open_curly_token));
  410. EXPECT_EQ(open_curly_token,
  411. buffer.GetMatchedOpeningToken(close_curly_token));
  412. auto eof_token = *it++;
  413. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  414. EXPECT_EQ(buffer.Tokens().end(), it);
  415. }
  416. {
  417. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  418. ASSERT_FALSE(buffer.HasErrors());
  419. auto it = buffer.Tokens().begin();
  420. auto open_paren_token = *it++;
  421. auto open_curly_token = *it++;
  422. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  423. auto close_curly_token = *it++;
  424. auto close_paren_token = *it++;
  425. EXPECT_EQ(close_paren_token,
  426. buffer.GetMatchedClosingToken(open_paren_token));
  427. EXPECT_EQ(open_paren_token,
  428. buffer.GetMatchedOpeningToken(close_paren_token));
  429. EXPECT_EQ(close_curly_token,
  430. buffer.GetMatchedClosingToken(open_curly_token));
  431. EXPECT_EQ(open_curly_token,
  432. buffer.GetMatchedOpeningToken(close_curly_token));
  433. open_curly_token = *it++;
  434. open_paren_token = *it++;
  435. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  436. close_paren_token = *it++;
  437. close_curly_token = *it++;
  438. EXPECT_EQ(close_curly_token,
  439. buffer.GetMatchedClosingToken(open_curly_token));
  440. EXPECT_EQ(open_curly_token,
  441. buffer.GetMatchedOpeningToken(close_curly_token));
  442. EXPECT_EQ(close_paren_token,
  443. buffer.GetMatchedClosingToken(open_paren_token));
  444. EXPECT_EQ(open_paren_token,
  445. buffer.GetMatchedOpeningToken(close_paren_token));
  446. open_curly_token = *it++;
  447. auto inner_open_curly_token = *it++;
  448. open_paren_token = *it++;
  449. auto inner_open_paren_token = *it++;
  450. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  451. auto inner_close_paren_token = *it++;
  452. close_paren_token = *it++;
  453. auto inner_close_curly_token = *it++;
  454. close_curly_token = *it++;
  455. EXPECT_EQ(close_curly_token,
  456. buffer.GetMatchedClosingToken(open_curly_token));
  457. EXPECT_EQ(open_curly_token,
  458. buffer.GetMatchedOpeningToken(close_curly_token));
  459. EXPECT_EQ(inner_close_curly_token,
  460. buffer.GetMatchedClosingToken(inner_open_curly_token));
  461. EXPECT_EQ(inner_open_curly_token,
  462. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  463. EXPECT_EQ(close_paren_token,
  464. buffer.GetMatchedClosingToken(open_paren_token));
  465. EXPECT_EQ(open_paren_token,
  466. buffer.GetMatchedOpeningToken(close_paren_token));
  467. EXPECT_EQ(inner_close_paren_token,
  468. buffer.GetMatchedClosingToken(inner_open_paren_token));
  469. EXPECT_EQ(inner_open_paren_token,
  470. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  471. auto eof_token = *it++;
  472. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  473. EXPECT_EQ(buffer.Tokens().end(), it);
  474. }
  475. }
  476. TEST_F(LexerTest, MismatchedGroups) {
  477. auto buffer = Lex("{");
  478. EXPECT_TRUE(buffer.HasErrors());
  479. EXPECT_THAT(buffer,
  480. HasTokens(llvm::ArrayRef<ExpectedToken>{
  481. {TokenKind::OpenCurlyBrace()},
  482. {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
  483. {TokenKind::EndOfFile()},
  484. }));
  485. buffer = Lex("}");
  486. EXPECT_TRUE(buffer.HasErrors());
  487. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  488. {.kind = TokenKind::Error(), .text = "}"},
  489. {TokenKind::EndOfFile()},
  490. }));
  491. buffer = Lex("{(}");
  492. EXPECT_TRUE(buffer.HasErrors());
  493. EXPECT_THAT(
  494. buffer,
  495. HasTokens(llvm::ArrayRef<ExpectedToken>{
  496. {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
  497. {.kind = TokenKind::OpenParen(), .column = 2},
  498. {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
  499. {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
  500. {TokenKind::EndOfFile()},
  501. }));
  502. buffer = Lex(")({)");
  503. EXPECT_TRUE(buffer.HasErrors());
  504. EXPECT_THAT(
  505. buffer,
  506. HasTokens(llvm::ArrayRef<ExpectedToken>{
  507. {.kind = TokenKind::Error(), .column = 1, .text = ")"},
  508. {.kind = TokenKind::OpenParen(), .column = 2},
  509. {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
  510. {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
  511. {.kind = TokenKind::CloseParen(), .column = 4},
  512. {TokenKind::EndOfFile()},
  513. }));
  514. }
  515. TEST_F(LexerTest, Whitespace) {
  516. auto buffer = Lex("{( } {(");
  517. // Whether there should be whitespace before/after each token.
  518. bool space[] = {true,
  519. // {
  520. false,
  521. // (
  522. true,
  523. // inserted )
  524. true,
  525. // }
  526. true,
  527. // {
  528. false,
  529. // (
  530. true,
  531. // inserted )
  532. true,
  533. // inserted }
  534. true,
  535. // EOF
  536. false};
  537. int pos = 0;
  538. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  539. ASSERT_LT(pos, std::size(space));
  540. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  541. ++pos;
  542. ASSERT_LT(pos, std::size(space));
  543. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  544. }
  545. ASSERT_EQ(pos + 1, std::size(space));
  546. }
  547. TEST_F(LexerTest, Keywords) {
  548. auto buffer = Lex(" fn");
  549. EXPECT_FALSE(buffer.HasErrors());
  550. EXPECT_THAT(
  551. buffer,
  552. HasTokens(llvm::ArrayRef<ExpectedToken>{
  553. {.kind = TokenKind::FnKeyword(), .column = 4, .indent_column = 4},
  554. {TokenKind::EndOfFile()},
  555. }));
  556. buffer = Lex("and or not if else for loop return var break continue _");
  557. EXPECT_FALSE(buffer.HasErrors());
  558. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  559. {TokenKind::AndKeyword()},
  560. {TokenKind::OrKeyword()},
  561. {TokenKind::NotKeyword()},
  562. {TokenKind::IfKeyword()},
  563. {TokenKind::ElseKeyword()},
  564. {TokenKind::ForKeyword()},
  565. {TokenKind::LoopKeyword()},
  566. {TokenKind::ReturnKeyword()},
  567. {TokenKind::VarKeyword()},
  568. {TokenKind::BreakKeyword()},
  569. {TokenKind::ContinueKeyword()},
  570. {TokenKind::UnderscoreKeyword()},
  571. {TokenKind::EndOfFile()},
  572. }));
  573. }
  574. TEST_F(LexerTest, Comments) {
  575. auto buffer = Lex(" ;\n // foo\n ;\n");
  576. EXPECT_FALSE(buffer.HasErrors());
  577. EXPECT_THAT(buffer,
  578. HasTokens(llvm::ArrayRef<ExpectedToken>{
  579. {.kind = TokenKind::Semi(),
  580. .line = 1,
  581. .column = 2,
  582. .indent_column = 2},
  583. {.kind = TokenKind::Semi(),
  584. .line = 3,
  585. .column = 3,
  586. .indent_column = 3},
  587. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 4},
  588. }));
  589. buffer = Lex("// foo\n//\n// bar");
  590. EXPECT_FALSE(buffer.HasErrors());
  591. EXPECT_THAT(
  592. buffer,
  593. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  594. // Make sure weird characters aren't a problem.
  595. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  596. EXPECT_FALSE(buffer.HasErrors());
  597. EXPECT_THAT(
  598. buffer,
  599. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  600. // Make sure we can lex a comment at the end of the input.
  601. buffer = Lex("//");
  602. EXPECT_FALSE(buffer.HasErrors());
  603. EXPECT_THAT(
  604. buffer,
  605. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  606. }
  607. TEST_F(LexerTest, InvalidComments) {
  608. llvm::StringLiteral testcases[] = {
  609. " /// foo\n",
  610. "foo // bar\n",
  611. "//! hello",
  612. " //world",
  613. };
  614. for (llvm::StringLiteral testcase : testcases) {
  615. auto buffer = Lex(testcase);
  616. EXPECT_TRUE(buffer.HasErrors());
  617. }
  618. }
  619. TEST_F(LexerTest, Identifiers) {
  620. auto buffer = Lex(" foobar");
  621. EXPECT_FALSE(buffer.HasErrors());
  622. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  623. {.kind = TokenKind::Identifier(),
  624. .column = 4,
  625. .indent_column = 4,
  626. .text = "foobar"},
  627. {TokenKind::EndOfFile()},
  628. }));
  629. // Check different kinds of identifier character sequences.
  630. buffer = Lex("_foo_bar");
  631. EXPECT_FALSE(buffer.HasErrors());
  632. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  633. {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
  634. {TokenKind::EndOfFile()},
  635. }));
  636. buffer = Lex("foo2bar00");
  637. EXPECT_FALSE(buffer.HasErrors());
  638. EXPECT_THAT(buffer,
  639. HasTokens(llvm::ArrayRef<ExpectedToken>{
  640. {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
  641. {TokenKind::EndOfFile()},
  642. }));
  643. // Check that we can parse identifiers that start with a keyword.
  644. buffer = Lex("fnord");
  645. EXPECT_FALSE(buffer.HasErrors());
  646. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  647. {.kind = TokenKind::Identifier(), .text = "fnord"},
  648. {TokenKind::EndOfFile()},
  649. }));
  650. // Check multiple identifiers with indent and interning.
  651. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  652. EXPECT_FALSE(buffer.HasErrors());
  653. EXPECT_THAT(buffer,
  654. HasTokens(llvm::ArrayRef<ExpectedToken>{
  655. {.kind = TokenKind::Identifier(),
  656. .line = 1,
  657. .column = 4,
  658. .indent_column = 4,
  659. .text = "foo"},
  660. {.kind = TokenKind::Semi()},
  661. {.kind = TokenKind::Identifier(),
  662. .line = 1,
  663. .column = 8,
  664. .indent_column = 4,
  665. .text = "bar"},
  666. {.kind = TokenKind::Identifier(),
  667. .line = 2,
  668. .column = 1,
  669. .indent_column = 1,
  670. .text = "bar"},
  671. {.kind = TokenKind::Identifier(),
  672. .line = 3,
  673. .column = 3,
  674. .indent_column = 3,
  675. .text = "foo"},
  676. {.kind = TokenKind::Identifier(),
  677. .line = 3,
  678. .column = 7,
  679. .indent_column = 3,
  680. .text = "foo"},
  681. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 10},
  682. }));
  683. }
  684. TEST_F(LexerTest, StringLiterals) {
  685. llvm::StringLiteral testcase = R"(
  686. "hello world\n"
  687. """foo
  688. test \
  689. \xAB
  690. """ trailing
  691. #"""#
  692. "\0"
  693. #"\0"foo"\1"#
  694. """x"""
  695. )";
  696. auto buffer = Lex(testcase);
  697. EXPECT_FALSE(buffer.HasErrors());
  698. EXPECT_THAT(buffer,
  699. HasTokens(llvm::ArrayRef<ExpectedToken>{
  700. {.kind = TokenKind::StringLiteral(),
  701. .line = 2,
  702. .column = 5,
  703. .indent_column = 5,
  704. .string_contents = {"hello world\n"}},
  705. {.kind = TokenKind::StringLiteral(),
  706. .line = 4,
  707. .column = 5,
  708. .indent_column = 5,
  709. .string_contents = {" test \xAB\n"}},
  710. {.kind = TokenKind::Identifier(),
  711. .line = 7,
  712. .column = 10,
  713. .indent_column = 5,
  714. .text = "trailing"},
  715. {.kind = TokenKind::StringLiteral(),
  716. .line = 9,
  717. .column = 7,
  718. .indent_column = 7,
  719. .string_contents = {"\""}},
  720. {.kind = TokenKind::StringLiteral(),
  721. .line = 11,
  722. .column = 5,
  723. .indent_column = 5,
  724. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  725. {.kind = TokenKind::StringLiteral(),
  726. .line = 13,
  727. .column = 5,
  728. .indent_column = 5,
  729. .string_contents = {"\\0\"foo\"\\1"}},
  730. // """x""" is three string literals, not one.
  731. {.kind = TokenKind::StringLiteral(),
  732. .line = 15,
  733. .column = 5,
  734. .indent_column = 5,
  735. .string_contents = {""}},
  736. {.kind = TokenKind::StringLiteral(),
  737. .line = 15,
  738. .column = 7,
  739. .indent_column = 5,
  740. .string_contents = {"x"}},
  741. {.kind = TokenKind::StringLiteral(),
  742. .line = 15,
  743. .column = 10,
  744. .indent_column = 5,
  745. .string_contents = {""}},
  746. {.kind = TokenKind::EndOfFile(), .line = 16, .column = 3},
  747. }));
  748. }
  749. TEST_F(LexerTest, InvalidStringLiterals) {
  750. llvm::StringLiteral invalid[] = {
  751. R"(")",
  752. R"("""
  753. "")", //
  754. R"("\)", //
  755. R"("\")", //
  756. R"("\\)", //
  757. R"("\\\")", //
  758. R"(""")",
  759. R"("""
  760. )", //
  761. R"("""\)",
  762. R"(#"""
  763. """)",
  764. };
  765. for (llvm::StringLiteral test : invalid) {
  766. auto buffer = Lex(test);
  767. EXPECT_TRUE(buffer.HasErrors()) << "`" << test << "`";
  768. // We should have formed at least one error token.
  769. bool found_error = false;
  770. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  771. if (buffer.GetKind(token) == TokenKind::Error()) {
  772. found_error = true;
  773. break;
  774. }
  775. }
  776. EXPECT_TRUE(found_error) << "`" << test << "`";
  777. }
  778. }
  779. TEST_F(LexerTest, TypeLiterals) {
  780. llvm::StringLiteral testcase = R"(
  781. i0 i1 i20 i999999999999 i0x1
  782. u0 u1 u64 u64b
  783. f32 f80 f1 fi
  784. s1
  785. )";
  786. auto buffer = Lex(testcase);
  787. EXPECT_FALSE(buffer.HasErrors());
  788. ASSERT_THAT(buffer,
  789. HasTokens(llvm::ArrayRef<ExpectedToken>{
  790. {.kind = TokenKind::Identifier(),
  791. .line = 2,
  792. .column = 5,
  793. .indent_column = 5,
  794. .text = {"i0"}},
  795. {.kind = TokenKind::IntegerTypeLiteral(),
  796. .line = 2,
  797. .column = 8,
  798. .indent_column = 5,
  799. .text = {"i1"}},
  800. {.kind = TokenKind::IntegerTypeLiteral(),
  801. .line = 2,
  802. .column = 11,
  803. .indent_column = 5,
  804. .text = {"i20"}},
  805. {.kind = TokenKind::IntegerTypeLiteral(),
  806. .line = 2,
  807. .column = 15,
  808. .indent_column = 5,
  809. .text = {"i999999999999"}},
  810. {.kind = TokenKind::Identifier(),
  811. .line = 2,
  812. .column = 29,
  813. .indent_column = 5,
  814. .text = {"i0x1"}},
  815. {.kind = TokenKind::Identifier(),
  816. .line = 3,
  817. .column = 5,
  818. .indent_column = 5,
  819. .text = {"u0"}},
  820. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  821. .line = 3,
  822. .column = 8,
  823. .indent_column = 5,
  824. .text = {"u1"}},
  825. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  826. .line = 3,
  827. .column = 11,
  828. .indent_column = 5,
  829. .text = {"u64"}},
  830. {.kind = TokenKind::Identifier(),
  831. .line = 3,
  832. .column = 15,
  833. .indent_column = 5,
  834. .text = {"u64b"}},
  835. {.kind = TokenKind::FloatingPointTypeLiteral(),
  836. .line = 4,
  837. .column = 5,
  838. .indent_column = 5,
  839. .text = {"f32"}},
  840. {.kind = TokenKind::FloatingPointTypeLiteral(),
  841. .line = 4,
  842. .column = 9,
  843. .indent_column = 5,
  844. .text = {"f80"}},
  845. {.kind = TokenKind::FloatingPointTypeLiteral(),
  846. .line = 4,
  847. .column = 13,
  848. .indent_column = 5,
  849. .text = {"f1"}},
  850. {.kind = TokenKind::Identifier(),
  851. .line = 4,
  852. .column = 16,
  853. .indent_column = 5,
  854. .text = {"fi"}},
  855. {.kind = TokenKind::Identifier(),
  856. .line = 5,
  857. .column = 5,
  858. .indent_column = 5,
  859. .text = {"s1"}},
  860. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 3},
  861. }));
  862. auto token_i1 = buffer.Tokens().begin() + 1;
  863. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  864. auto token_i20 = buffer.Tokens().begin() + 2;
  865. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  866. auto token_i999999999999 = buffer.Tokens().begin() + 3;
  867. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ull);
  868. auto token_u1 = buffer.Tokens().begin() + 6;
  869. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  870. auto token_u64 = buffer.Tokens().begin() + 7;
  871. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  872. auto token_f32 = buffer.Tokens().begin() + 9;
  873. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  874. auto token_f80 = buffer.Tokens().begin() + 10;
  875. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  876. auto token_f1 = buffer.Tokens().begin() + 11;
  877. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  878. }
  879. TEST_F(LexerTest, Diagnostics) {
  880. llvm::StringLiteral testcase = R"(
  881. // Hello!
  882. var String x; // trailing comment
  883. //no space after comment
  884. "hello\bworld\xab"
  885. 0x123abc
  886. #"
  887. )";
  888. Testing::MockDiagnosticConsumer consumer;
  889. EXPECT_CALL(consumer, HandleDiagnostic(AllOf(
  890. DiagnosticAt(3, 19),
  891. DiagnosticMessage(HasSubstr("Trailing comment")))));
  892. EXPECT_CALL(consumer,
  893. HandleDiagnostic(AllOf(
  894. DiagnosticAt(4, 7),
  895. DiagnosticMessage(HasSubstr("Whitespace is required")))));
  896. EXPECT_CALL(
  897. consumer,
  898. HandleDiagnostic(AllOf(
  899. DiagnosticAt(5, 12),
  900. DiagnosticMessage(HasSubstr("Unrecognized escape sequence `b`")))));
  901. EXPECT_CALL(
  902. consumer,
  903. HandleDiagnostic(AllOf(
  904. DiagnosticAt(5, 20),
  905. DiagnosticMessage(HasSubstr("two uppercase hexadecimal digits")))));
  906. EXPECT_CALL(
  907. consumer,
  908. HandleDiagnostic(AllOf(
  909. DiagnosticAt(6, 10),
  910. DiagnosticMessage(HasSubstr("Invalid digit 'a' in hexadecimal")))));
  911. EXPECT_CALL(consumer,
  912. HandleDiagnostic(AllOf(
  913. DiagnosticAt(7, 5),
  914. DiagnosticMessage(HasSubstr("unrecognized character")))));
  915. Lex(testcase, consumer);
  916. }
  917. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  918. auto newline_offset = text.find_first_of('\n');
  919. llvm::StringRef line = text.slice(0, newline_offset);
  920. if (newline_offset != llvm::StringRef::npos) {
  921. text = text.substr(newline_offset + 1);
  922. } else {
  923. text = "";
  924. }
  925. return line.str();
  926. }
  927. TEST_F(LexerTest, Printing) {
  928. auto buffer = Lex(";");
  929. ASSERT_FALSE(buffer.HasErrors());
  930. std::string print_storage;
  931. llvm::raw_string_ostream print_stream(print_storage);
  932. buffer.Print(print_stream);
  933. llvm::StringRef print = print_stream.str();
  934. EXPECT_THAT(GetAndDropLine(print),
  935. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  936. "indent: 1, spelling: ';', has_trailing_space: true }"));
  937. EXPECT_THAT(GetAndDropLine(print),
  938. StrEq("token: { index: 1, kind: 'EndOfFile', line: 1, column: 2, "
  939. "indent: 1, spelling: '' }"));
  940. EXPECT_TRUE(print.empty()) << print;
  941. // Test kind padding.
  942. buffer = Lex("(;foo;)");
  943. ASSERT_FALSE(buffer.HasErrors());
  944. print_storage.clear();
  945. buffer.Print(print_stream);
  946. print = print_stream.str();
  947. EXPECT_THAT(GetAndDropLine(print),
  948. StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
  949. "1, indent: 1, spelling: '(', closing_token: 4 }"));
  950. EXPECT_THAT(GetAndDropLine(print),
  951. StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
  952. "2, indent: 1, spelling: ';' }"));
  953. EXPECT_THAT(GetAndDropLine(print),
  954. StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
  955. "3, indent: 1, spelling: 'foo', identifier: 0 }"));
  956. EXPECT_THAT(GetAndDropLine(print),
  957. StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
  958. "6, indent: 1, spelling: ';' }"));
  959. EXPECT_THAT(GetAndDropLine(print),
  960. StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
  961. "7, indent: 1, spelling: ')', opening_token: 0, "
  962. "has_trailing_space: true }"));
  963. EXPECT_THAT(GetAndDropLine(print),
  964. StrEq("token: { index: 5, kind: 'EndOfFile', line: 1, column: "
  965. "8, indent: 1, spelling: '' }"));
  966. EXPECT_TRUE(print.empty()) << print;
  967. // Test digit padding with max values of 9, 10, and 11.
  968. buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  969. ASSERT_FALSE(buffer.HasErrors());
  970. print_storage.clear();
  971. buffer.Print(print_stream);
  972. print = print_stream.str();
  973. EXPECT_THAT(
  974. GetAndDropLine(print),
  975. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  976. "indent: 1, spelling: ';', has_trailing_space: true }"));
  977. EXPECT_THAT(
  978. GetAndDropLine(print),
  979. StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
  980. "indent: 9, spelling: ';' }"));
  981. EXPECT_THAT(
  982. GetAndDropLine(print),
  983. StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
  984. "indent: 9, spelling: ';', has_trailing_space: true }"));
  985. EXPECT_THAT(
  986. GetAndDropLine(print),
  987. StrEq("token: { index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  988. "indent: 9, spelling: '' }"));
  989. EXPECT_TRUE(print.empty()) << print;
  990. }
  991. TEST_F(LexerTest, PrintingAsYaml) {
  992. // Test that we can parse this into YAML and verify line and indent data.
  993. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  994. ASSERT_FALSE(buffer.HasErrors());
  995. std::string print_output;
  996. llvm::raw_string_ostream print_stream(print_output);
  997. buffer.Print(print_stream);
  998. print_stream.flush();
  999. EXPECT_THAT(Yaml::Value::FromText(print_output),
  1000. ElementsAre(Yaml::MappingValue{
  1001. {"token", Yaml::MappingValue{{"index", "0"},
  1002. {"kind", "Semi"},
  1003. {"line", "2"},
  1004. {"column", "2"},
  1005. {"indent", "2"},
  1006. {"spelling", ";"},
  1007. {"has_trailing_space", "true"}}},
  1008. {"token", Yaml::MappingValue{{"index", "1"},
  1009. {"kind", "Semi"},
  1010. {"line", "5"},
  1011. {"column", "1"},
  1012. {"indent", "1"},
  1013. {"spelling", ";"},
  1014. {"has_trailing_space", "true"}}},
  1015. {"token", Yaml::MappingValue{{"index", "2"},
  1016. {"kind", "Semi"},
  1017. {"line", "5"},
  1018. {"column", "3"},
  1019. {"indent", "1"},
  1020. {"spelling", ";"},
  1021. {"has_trailing_space", "true"}}},
  1022. {"token", Yaml::MappingValue{{"index", "3"},
  1023. {"kind", "EndOfFile"},
  1024. {"line", "15"},
  1025. {"column", "1"},
  1026. {"indent", "1"},
  1027. {"spelling", ""}}}}));
  1028. }
  1029. } // namespace
  1030. } // namespace Carbon