tokenized_buffer_test.cpp 41 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lex/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <forward_list>
  8. #include <iterator>
  9. #include "llvm/ADT/ArrayRef.h"
  10. #include "testing/base/test_raw_ostream.h"
  11. #include "toolchain/base/value_store.h"
  12. #include "toolchain/diagnostics/diagnostic_emitter.h"
  13. #include "toolchain/diagnostics/mocks.h"
  14. #include "toolchain/lex/lex.h"
  15. #include "toolchain/lex/tokenized_buffer_test_helpers.h"
  16. #include "toolchain/testing/yaml_test_helpers.h"
  17. namespace Carbon::Lex {
  18. namespace {
  19. using ::Carbon::Testing::ExpectedToken;
  20. using ::Carbon::Testing::IsSingleDiagnostic;
  21. using ::Carbon::Testing::TestRawOstream;
  22. using ::testing::_;
  23. using ::testing::ElementsAre;
  24. using ::testing::Eq;
  25. using ::testing::HasSubstr;
  26. using ::testing::Pair;
  27. namespace Yaml = ::Carbon::Testing::Yaml;
  28. class LexerTest : public ::testing::Test {
  29. protected:
  30. auto GetSourceBuffer(llvm::StringRef text) -> SourceBuffer& {
  31. std::string filename = llvm::formatv("test{0}.carbon", ++file_index_);
  32. CARBON_CHECK(fs_.addFile(filename, /*ModificationTime=*/0,
  33. llvm::MemoryBuffer::getMemBuffer(text)));
  34. source_storage_.push_front(std::move(*SourceBuffer::MakeFromFile(
  35. fs_, filename, ConsoleDiagnosticConsumer())));
  36. return source_storage_.front();
  37. }
  38. auto Lex(llvm::StringRef text,
  39. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  40. -> TokenizedBuffer {
  41. return Lex::Lex(value_stores_, GetSourceBuffer(text), consumer);
  42. }
  43. SharedValueStores value_stores_;
  44. llvm::vfs::InMemoryFileSystem fs_;
  45. int file_index_ = 0;
  46. std::forward_list<SourceBuffer> source_storage_;
  47. };
  48. TEST_F(LexerTest, HandlesEmptyBuffer) {
  49. auto buffer = Lex("");
  50. EXPECT_FALSE(buffer.has_errors());
  51. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  52. {.kind = TokenKind::FileStart},
  53. {.kind = TokenKind::FileEnd}}));
  54. }
  55. TEST_F(LexerTest, TracksLinesAndColumns) {
  56. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  57. EXPECT_FALSE(buffer.has_errors());
  58. EXPECT_THAT(
  59. buffer,
  60. HasTokens(llvm::ArrayRef<ExpectedToken>{
  61. {.kind = TokenKind::FileStart,
  62. .line = 1,
  63. .column = 1,
  64. .indent_column = 1},
  65. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  66. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  67. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  68. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  69. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  70. {.kind = TokenKind::Identifier,
  71. .line = 4,
  72. .column = 4,
  73. .indent_column = 4,
  74. .text = "x"},
  75. {.kind = TokenKind::StringLiteral,
  76. .line = 4,
  77. .column = 5,
  78. .indent_column = 4},
  79. {.kind = TokenKind::StringLiteral,
  80. .line = 4,
  81. .column = 11,
  82. .indent_column = 4},
  83. {.kind = TokenKind::Identifier,
  84. .line = 6,
  85. .column = 6,
  86. .indent_column = 11,
  87. .text = "y"},
  88. {.kind = TokenKind::FileEnd, .line = 6, .column = 7},
  89. }));
  90. }
  91. TEST_F(LexerTest, HandlesNumericLiteral) {
  92. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  93. EXPECT_FALSE(buffer.has_errors());
  94. ASSERT_THAT(buffer,
  95. HasTokens(llvm::ArrayRef<ExpectedToken>{
  96. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  97. {.kind = TokenKind::IntLiteral,
  98. .line = 1,
  99. .column = 1,
  100. .indent_column = 1,
  101. .text = "12"},
  102. {.kind = TokenKind::Minus,
  103. .line = 1,
  104. .column = 3,
  105. .indent_column = 1},
  106. {.kind = TokenKind::IntLiteral,
  107. .line = 1,
  108. .column = 4,
  109. .indent_column = 1,
  110. .text = "578"},
  111. {.kind = TokenKind::IntLiteral,
  112. .line = 2,
  113. .column = 3,
  114. .indent_column = 3,
  115. .text = "1"},
  116. {.kind = TokenKind::IntLiteral,
  117. .line = 2,
  118. .column = 6,
  119. .indent_column = 3,
  120. .text = "2"},
  121. {.kind = TokenKind::IntLiteral,
  122. .line = 3,
  123. .column = 1,
  124. .indent_column = 1,
  125. .text = "0x12_3ABC"},
  126. {.kind = TokenKind::IntLiteral,
  127. .line = 4,
  128. .column = 1,
  129. .indent_column = 1,
  130. .text = "0b10_10_11"},
  131. {.kind = TokenKind::IntLiteral,
  132. .line = 5,
  133. .column = 1,
  134. .indent_column = 1,
  135. .text = "1_234_567"},
  136. {.kind = TokenKind::RealLiteral,
  137. .line = 6,
  138. .column = 1,
  139. .indent_column = 1,
  140. .text = "1.5e9"},
  141. {.kind = TokenKind::FileEnd, .line = 6, .column = 6},
  142. }));
  143. auto token_start = buffer.tokens().begin();
  144. auto token_12 = token_start + 1;
  145. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_12)), 12);
  146. auto token_578 = token_12 + 2;
  147. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_578)), 578);
  148. auto token_1 = token_578 + 1;
  149. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_1)), 1);
  150. auto token_2 = token_1 + 1;
  151. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_2)), 2);
  152. auto token_0x12_3abc = token_2 + 1;
  153. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_0x12_3abc)),
  154. 0x12'3abc);
  155. auto token_0b10_10_11 = token_0x12_3abc + 1;
  156. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_0b10_10_11)),
  157. 0b10'10'11);
  158. auto token_1_234_567 = token_0b10_10_11 + 1;
  159. EXPECT_EQ(value_stores_.ints().Get(buffer.GetIntLiteral(*token_1_234_567)),
  160. 1'234'567);
  161. auto token_1_5e9 = token_1_234_567 + 1;
  162. auto value_1_5e9 =
  163. value_stores_.reals().Get(buffer.GetRealLiteral(*token_1_5e9));
  164. EXPECT_EQ(value_1_5e9.mantissa.getZExtValue(), 15);
  165. EXPECT_EQ(value_1_5e9.exponent.getSExtValue(), 8);
  166. EXPECT_EQ(value_1_5e9.is_decimal, true);
  167. }
  168. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  169. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  170. EXPECT_TRUE(buffer.has_errors());
  171. ASSERT_THAT(buffer,
  172. HasTokens(llvm::ArrayRef<ExpectedToken>{
  173. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  174. {.kind = TokenKind::Error,
  175. .line = 1,
  176. .column = 1,
  177. .indent_column = 1,
  178. .text = "14x"},
  179. {.kind = TokenKind::IntLiteral,
  180. .line = 1,
  181. .column = 5,
  182. .indent_column = 1,
  183. .text = "15_49"},
  184. {.kind = TokenKind::Error,
  185. .line = 1,
  186. .column = 11,
  187. .indent_column = 1,
  188. .text = "0x3.5q"},
  189. {.kind = TokenKind::RealLiteral,
  190. .line = 1,
  191. .column = 18,
  192. .indent_column = 1,
  193. .text = "0x3_4.5_6"},
  194. {.kind = TokenKind::Error,
  195. .line = 1,
  196. .column = 28,
  197. .indent_column = 1,
  198. .text = "0ops"},
  199. {.kind = TokenKind::FileEnd, .line = 1, .column = 32},
  200. }));
  201. }
  202. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  203. llvm::StringLiteral source_text = R"(
  204. 1.
  205. .2
  206. 3.+foo
  207. 4.0-bar
  208. 5.0e+123+456
  209. 6.0e+1e+2
  210. 1e7
  211. 8..10
  212. 9.0.9.5
  213. 10.foo
  214. 11.0.foo
  215. 12e+1
  216. 13._
  217. )";
  218. auto buffer = Lex(source_text);
  219. EXPECT_TRUE(buffer.has_errors());
  220. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  221. {.kind = TokenKind::FileStart},
  222. {.kind = TokenKind::IntLiteral, .text = "1"},
  223. {.kind = TokenKind::Period},
  224. // newline
  225. {.kind = TokenKind::Period},
  226. {.kind = TokenKind::IntLiteral, .text = "2"},
  227. // newline
  228. {.kind = TokenKind::IntLiteral, .text = "3"},
  229. {.kind = TokenKind::Period},
  230. {.kind = TokenKind::Plus},
  231. {.kind = TokenKind::Identifier, .text = "foo"},
  232. // newline
  233. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  234. {.kind = TokenKind::Minus},
  235. {.kind = TokenKind::Identifier, .text = "bar"},
  236. // newline
  237. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  238. {.kind = TokenKind::Plus},
  239. {.kind = TokenKind::IntLiteral, .text = "456"},
  240. // newline
  241. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  242. {.kind = TokenKind::Plus},
  243. {.kind = TokenKind::IntLiteral, .text = "2"},
  244. // newline
  245. {.kind = TokenKind::Error, .text = "1e7"},
  246. // newline
  247. {.kind = TokenKind::IntLiteral, .text = "8"},
  248. {.kind = TokenKind::Period},
  249. {.kind = TokenKind::Period},
  250. {.kind = TokenKind::IntLiteral, .text = "10"},
  251. // newline
  252. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  253. {.kind = TokenKind::Period},
  254. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  255. // newline
  256. {.kind = TokenKind::Error, .text = "10.foo"},
  257. // newline
  258. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  259. {.kind = TokenKind::Period},
  260. {.kind = TokenKind::Identifier, .text = "foo"},
  261. // newline
  262. {.kind = TokenKind::Error, .text = "12e"},
  263. {.kind = TokenKind::Plus},
  264. {.kind = TokenKind::IntLiteral, .text = "1"},
  265. // newline
  266. {.kind = TokenKind::IntLiteral, .text = "13"},
  267. {.kind = TokenKind::Period},
  268. {.kind = TokenKind::Underscore},
  269. // newline
  270. {.kind = TokenKind::FileEnd},
  271. }));
  272. }
  273. TEST_F(LexerTest, HandlesGarbageCharacters) {
  274. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  275. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  276. EXPECT_TRUE(buffer.has_errors());
  277. EXPECT_THAT(
  278. buffer,
  279. HasTokens(llvm::ArrayRef<ExpectedToken>{
  280. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  281. {.kind = TokenKind::Error,
  282. .line = 1,
  283. .column = 1,
  284. // 💩 takes 4 bytes, and we count column as bytes offset.
  285. .text = llvm::StringRef("$$💩", 6)},
  286. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  287. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  288. // newline
  289. {.kind = TokenKind::Error,
  290. .line = 2,
  291. .column = 1,
  292. .text = llvm::StringRef("$\0$", 3)},
  293. {.kind = TokenKind::IntLiteral, .line = 2, .column = 4, .text = "12"},
  294. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  295. // newline
  296. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  297. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  298. // newline
  299. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  300. {.kind = TokenKind::FileEnd, .line = 4, .column = 3},
  301. }));
  302. }
  303. TEST_F(LexerTest, Symbols) {
  304. // We don't need to exhaustively test symbols here as they're handled with
  305. // common code, but we want to check specific patterns to verify things like
  306. // max-munch rule and handling of interesting symbols.
  307. auto buffer = Lex("<<<");
  308. EXPECT_FALSE(buffer.has_errors());
  309. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  310. {.kind = TokenKind::FileStart},
  311. {.kind = TokenKind::LessLess},
  312. {.kind = TokenKind::Less},
  313. {.kind = TokenKind::FileEnd},
  314. }));
  315. buffer = Lex("<<=>>");
  316. EXPECT_FALSE(buffer.has_errors());
  317. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  318. {.kind = TokenKind::FileStart},
  319. {.kind = TokenKind::LessLessEqual},
  320. {.kind = TokenKind::GreaterGreater},
  321. {.kind = TokenKind::FileEnd},
  322. }));
  323. buffer = Lex("< <=> >");
  324. EXPECT_FALSE(buffer.has_errors());
  325. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  326. {.kind = TokenKind::FileStart},
  327. {.kind = TokenKind::Less},
  328. {.kind = TokenKind::LessEqualGreater},
  329. {.kind = TokenKind::Greater},
  330. {.kind = TokenKind::FileEnd},
  331. }));
  332. buffer = Lex("\\/?@&^!");
  333. EXPECT_FALSE(buffer.has_errors());
  334. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  335. {.kind = TokenKind::FileStart},
  336. {.kind = TokenKind::Backslash},
  337. {.kind = TokenKind::Slash},
  338. {.kind = TokenKind::Question},
  339. {.kind = TokenKind::At},
  340. {.kind = TokenKind::Amp},
  341. {.kind = TokenKind::Caret},
  342. {.kind = TokenKind::Exclaim},
  343. {.kind = TokenKind::FileEnd},
  344. }));
  345. }
  346. TEST_F(LexerTest, Parens) {
  347. auto buffer = Lex("()");
  348. EXPECT_FALSE(buffer.has_errors());
  349. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  350. {.kind = TokenKind::FileStart},
  351. {.kind = TokenKind::OpenParen},
  352. {.kind = TokenKind::CloseParen},
  353. {.kind = TokenKind::FileEnd},
  354. }));
  355. buffer = Lex("((()()))");
  356. EXPECT_FALSE(buffer.has_errors());
  357. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  358. {.kind = TokenKind::FileStart},
  359. {.kind = TokenKind::OpenParen},
  360. {.kind = TokenKind::OpenParen},
  361. {.kind = TokenKind::OpenParen},
  362. {.kind = TokenKind::CloseParen},
  363. {.kind = TokenKind::OpenParen},
  364. {.kind = TokenKind::CloseParen},
  365. {.kind = TokenKind::CloseParen},
  366. {.kind = TokenKind::CloseParen},
  367. {.kind = TokenKind::FileEnd},
  368. }));
  369. }
  370. TEST_F(LexerTest, CurlyBraces) {
  371. auto buffer = Lex("{}");
  372. EXPECT_FALSE(buffer.has_errors());
  373. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  374. {.kind = TokenKind::FileStart},
  375. {.kind = TokenKind::OpenCurlyBrace},
  376. {.kind = TokenKind::CloseCurlyBrace},
  377. {.kind = TokenKind::FileEnd},
  378. }));
  379. buffer = Lex("{{{}{}}}");
  380. EXPECT_FALSE(buffer.has_errors());
  381. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  382. {.kind = TokenKind::FileStart},
  383. {.kind = TokenKind::OpenCurlyBrace},
  384. {.kind = TokenKind::OpenCurlyBrace},
  385. {.kind = TokenKind::OpenCurlyBrace},
  386. {.kind = TokenKind::CloseCurlyBrace},
  387. {.kind = TokenKind::OpenCurlyBrace},
  388. {.kind = TokenKind::CloseCurlyBrace},
  389. {.kind = TokenKind::CloseCurlyBrace},
  390. {.kind = TokenKind::CloseCurlyBrace},
  391. {.kind = TokenKind::FileEnd},
  392. }));
  393. }
  394. TEST_F(LexerTest, MatchingGroups) {
  395. {
  396. TokenizedBuffer buffer = Lex("(){}");
  397. ASSERT_FALSE(buffer.has_errors());
  398. auto it = ++buffer.tokens().begin();
  399. auto open_paren_token = *it++;
  400. auto close_paren_token = *it++;
  401. EXPECT_EQ(close_paren_token,
  402. buffer.GetMatchedClosingToken(open_paren_token));
  403. EXPECT_EQ(open_paren_token,
  404. buffer.GetMatchedOpeningToken(close_paren_token));
  405. auto open_curly_token = *it++;
  406. auto close_curly_token = *it++;
  407. EXPECT_EQ(close_curly_token,
  408. buffer.GetMatchedClosingToken(open_curly_token));
  409. EXPECT_EQ(open_curly_token,
  410. buffer.GetMatchedOpeningToken(close_curly_token));
  411. auto eof_token = *it++;
  412. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  413. EXPECT_EQ(buffer.tokens().end(), it);
  414. }
  415. {
  416. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  417. ASSERT_FALSE(buffer.has_errors());
  418. auto it = ++buffer.tokens().begin();
  419. auto open_paren_token = *it++;
  420. auto open_curly_token = *it++;
  421. ASSERT_EQ("x",
  422. value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
  423. auto close_curly_token = *it++;
  424. auto close_paren_token = *it++;
  425. EXPECT_EQ(close_paren_token,
  426. buffer.GetMatchedClosingToken(open_paren_token));
  427. EXPECT_EQ(open_paren_token,
  428. buffer.GetMatchedOpeningToken(close_paren_token));
  429. EXPECT_EQ(close_curly_token,
  430. buffer.GetMatchedClosingToken(open_curly_token));
  431. EXPECT_EQ(open_curly_token,
  432. buffer.GetMatchedOpeningToken(close_curly_token));
  433. open_curly_token = *it++;
  434. open_paren_token = *it++;
  435. ASSERT_EQ("y",
  436. value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
  437. close_paren_token = *it++;
  438. close_curly_token = *it++;
  439. EXPECT_EQ(close_curly_token,
  440. buffer.GetMatchedClosingToken(open_curly_token));
  441. EXPECT_EQ(open_curly_token,
  442. buffer.GetMatchedOpeningToken(close_curly_token));
  443. EXPECT_EQ(close_paren_token,
  444. buffer.GetMatchedClosingToken(open_paren_token));
  445. EXPECT_EQ(open_paren_token,
  446. buffer.GetMatchedOpeningToken(close_paren_token));
  447. open_curly_token = *it++;
  448. auto inner_open_curly_token = *it++;
  449. open_paren_token = *it++;
  450. auto inner_open_paren_token = *it++;
  451. ASSERT_EQ("z",
  452. value_stores_.identifiers().Get(buffer.GetIdentifier(*it++)));
  453. auto inner_close_paren_token = *it++;
  454. close_paren_token = *it++;
  455. auto inner_close_curly_token = *it++;
  456. close_curly_token = *it++;
  457. EXPECT_EQ(close_curly_token,
  458. buffer.GetMatchedClosingToken(open_curly_token));
  459. EXPECT_EQ(open_curly_token,
  460. buffer.GetMatchedOpeningToken(close_curly_token));
  461. EXPECT_EQ(inner_close_curly_token,
  462. buffer.GetMatchedClosingToken(inner_open_curly_token));
  463. EXPECT_EQ(inner_open_curly_token,
  464. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  465. EXPECT_EQ(close_paren_token,
  466. buffer.GetMatchedClosingToken(open_paren_token));
  467. EXPECT_EQ(open_paren_token,
  468. buffer.GetMatchedOpeningToken(close_paren_token));
  469. EXPECT_EQ(inner_close_paren_token,
  470. buffer.GetMatchedClosingToken(inner_open_paren_token));
  471. EXPECT_EQ(inner_open_paren_token,
  472. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  473. auto eof_token = *it++;
  474. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::FileEnd);
  475. EXPECT_EQ(buffer.tokens().end(), it);
  476. }
  477. }
  478. TEST_F(LexerTest, MismatchedGroups) {
  479. auto buffer = Lex("{");
  480. EXPECT_TRUE(buffer.has_errors());
  481. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  482. {.kind = TokenKind::FileStart},
  483. {.kind = TokenKind::Error, .text = "{"},
  484. {.kind = TokenKind::FileEnd},
  485. }));
  486. buffer = Lex("}");
  487. EXPECT_TRUE(buffer.has_errors());
  488. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  489. {.kind = TokenKind::FileStart},
  490. {.kind = TokenKind::Error, .text = "}"},
  491. {.kind = TokenKind::FileEnd},
  492. }));
  493. buffer = Lex("{(}");
  494. EXPECT_TRUE(buffer.has_errors());
  495. EXPECT_THAT(
  496. buffer,
  497. HasTokens(llvm::ArrayRef<ExpectedToken>{
  498. {.kind = TokenKind::FileStart},
  499. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  500. {.kind = TokenKind::OpenParen, .column = 2},
  501. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  502. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  503. {.kind = TokenKind::FileEnd},
  504. }));
  505. buffer = Lex(")({)");
  506. EXPECT_TRUE(buffer.has_errors());
  507. EXPECT_THAT(
  508. buffer,
  509. HasTokens(llvm::ArrayRef<ExpectedToken>{
  510. {.kind = TokenKind::FileStart},
  511. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  512. {.kind = TokenKind::OpenParen, .column = 2},
  513. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  514. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  515. {.kind = TokenKind::CloseParen, .column = 4},
  516. {.kind = TokenKind::FileEnd},
  517. }));
  518. }
  519. TEST_F(LexerTest, Whitespace) {
  520. auto buffer = Lex("{( } {(");
  521. // Whether there should be whitespace before/after each token.
  522. bool space[] = {true,
  523. // start-of-file
  524. true,
  525. // {
  526. false,
  527. // (
  528. true,
  529. // inserted )
  530. true,
  531. // }
  532. true,
  533. // error {
  534. false,
  535. // error (
  536. true,
  537. // EOF
  538. false};
  539. int pos = 0;
  540. for (TokenIndex token : buffer.tokens()) {
  541. SCOPED_TRACE(
  542. llvm::formatv("Token #{0}: '{1}'", token, buffer.GetTokenText(token)));
  543. ASSERT_LT(pos, std::size(space));
  544. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  545. ++pos;
  546. ASSERT_LT(pos, std::size(space));
  547. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  548. }
  549. ASSERT_EQ(pos + 1, std::size(space));
  550. }
  551. TEST_F(LexerTest, Keywords) {
  552. TokenKind keywords[] = {
  553. #define CARBON_TOKEN(TokenName)
  554. #define CARBON_KEYWORD_TOKEN(TokenName, ...) TokenKind::TokenName,
  555. #include "toolchain/lex/token_kind.def"
  556. };
  557. for (const auto& keyword : keywords) {
  558. auto buffer = Lex(keyword.fixed_spelling());
  559. EXPECT_FALSE(buffer.has_errors());
  560. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  561. {.kind = TokenKind::FileStart},
  562. {.kind = keyword, .column = 1, .indent_column = 1},
  563. {.kind = TokenKind::FileEnd},
  564. }));
  565. }
  566. }
  567. TEST_F(LexerTest, Comments) {
  568. auto buffer = Lex(" ;\n // foo\n ;\n");
  569. EXPECT_FALSE(buffer.has_errors());
  570. EXPECT_THAT(
  571. buffer,
  572. HasTokens(llvm::ArrayRef<ExpectedToken>{
  573. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  574. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  575. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  576. {.kind = TokenKind::FileEnd, .line = 3, .column = 4},
  577. }));
  578. buffer = Lex("// foo\n//\n// bar");
  579. EXPECT_FALSE(buffer.has_errors());
  580. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  581. {.kind = TokenKind::FileStart},
  582. {.kind = TokenKind::FileEnd}}));
  583. // Make sure weird characters aren't a problem.
  584. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  585. EXPECT_FALSE(buffer.has_errors());
  586. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  587. {.kind = TokenKind::FileStart},
  588. {.kind = TokenKind::FileEnd}}));
  589. // Make sure we can lex a comment at the end of the input.
  590. buffer = Lex("//");
  591. EXPECT_FALSE(buffer.has_errors());
  592. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  593. {.kind = TokenKind::FileStart},
  594. {.kind = TokenKind::FileEnd}}));
  595. }
  596. TEST_F(LexerTest, InvalidComments) {
  597. llvm::StringLiteral testcases[] = {
  598. " /// foo\n",
  599. "foo // bar\n",
  600. "//! hello",
  601. " //world",
  602. };
  603. for (llvm::StringLiteral testcase : testcases) {
  604. auto buffer = Lex(testcase);
  605. EXPECT_TRUE(buffer.has_errors());
  606. }
  607. }
  608. TEST_F(LexerTest, Identifiers) {
  609. auto buffer = Lex(" foobar");
  610. EXPECT_FALSE(buffer.has_errors());
  611. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  612. {.kind = TokenKind::FileStart},
  613. {.kind = TokenKind::Identifier,
  614. .column = 4,
  615. .indent_column = 4,
  616. .text = "foobar"},
  617. {.kind = TokenKind::FileEnd},
  618. }));
  619. // Check different kinds of identifier character sequences.
  620. buffer = Lex("_foo_bar");
  621. EXPECT_FALSE(buffer.has_errors());
  622. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  623. {.kind = TokenKind::FileStart},
  624. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  625. {.kind = TokenKind::FileEnd},
  626. }));
  627. buffer = Lex("foo2bar00");
  628. EXPECT_FALSE(buffer.has_errors());
  629. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  630. {.kind = TokenKind::FileStart},
  631. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  632. {.kind = TokenKind::FileEnd},
  633. }));
  634. // Check that we can parse identifiers that start with a keyword.
  635. buffer = Lex("fnord");
  636. EXPECT_FALSE(buffer.has_errors());
  637. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  638. {.kind = TokenKind::FileStart},
  639. {.kind = TokenKind::Identifier, .text = "fnord"},
  640. {.kind = TokenKind::FileEnd},
  641. }));
  642. // Check multiple identifiers with indent and interning.
  643. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  644. EXPECT_FALSE(buffer.has_errors());
  645. EXPECT_THAT(buffer,
  646. HasTokens(llvm::ArrayRef<ExpectedToken>{
  647. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  648. {.kind = TokenKind::Identifier,
  649. .line = 1,
  650. .column = 4,
  651. .indent_column = 4,
  652. .text = "foo"},
  653. {.kind = TokenKind::Semi},
  654. {.kind = TokenKind::Identifier,
  655. .line = 1,
  656. .column = 8,
  657. .indent_column = 4,
  658. .text = "bar"},
  659. {.kind = TokenKind::Identifier,
  660. .line = 2,
  661. .column = 1,
  662. .indent_column = 1,
  663. .text = "bar"},
  664. {.kind = TokenKind::Identifier,
  665. .line = 3,
  666. .column = 3,
  667. .indent_column = 3,
  668. .text = "foo"},
  669. {.kind = TokenKind::Identifier,
  670. .line = 3,
  671. .column = 7,
  672. .indent_column = 3,
  673. .text = "foo"},
  674. {.kind = TokenKind::FileEnd, .line = 3, .column = 10},
  675. }));
  676. }
  677. TEST_F(LexerTest, StringLiterals) {
  678. llvm::StringLiteral testcase = R"(
  679. "hello world\n"
  680. '''foo
  681. test \
  682. \xAB
  683. ''' trailing
  684. #"""#
  685. "\0"
  686. #"\0"foo"\1"#
  687. """x"""
  688. )";
  689. auto buffer = Lex(testcase);
  690. EXPECT_FALSE(buffer.has_errors());
  691. EXPECT_THAT(buffer,
  692. HasTokens(llvm::ArrayRef<ExpectedToken>{
  693. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  694. {.kind = TokenKind::StringLiteral,
  695. .line = 2,
  696. .column = 5,
  697. .indent_column = 5,
  698. .value_stores = &value_stores_,
  699. .string_contents = {"hello world\n"}},
  700. {.kind = TokenKind::StringLiteral,
  701. .line = 4,
  702. .column = 5,
  703. .indent_column = 5,
  704. .value_stores = &value_stores_,
  705. .string_contents = {" test \xAB\n"}},
  706. {.kind = TokenKind::Identifier,
  707. .line = 7,
  708. .column = 10,
  709. .indent_column = 5,
  710. .text = "trailing"},
  711. {.kind = TokenKind::StringLiteral,
  712. .line = 9,
  713. .column = 7,
  714. .indent_column = 7,
  715. .value_stores = &value_stores_,
  716. .string_contents = {"\""}},
  717. {.kind = TokenKind::StringLiteral,
  718. .line = 11,
  719. .column = 5,
  720. .indent_column = 5,
  721. .value_stores = &value_stores_,
  722. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  723. {.kind = TokenKind::StringLiteral,
  724. .line = 13,
  725. .column = 5,
  726. .indent_column = 5,
  727. .value_stores = &value_stores_,
  728. .string_contents = {"\\0\"foo\"\\1"}},
  729. // """x""" is three string literals, not one invalid
  730. // attempt at a block string literal.
  731. {.kind = TokenKind::StringLiteral,
  732. .line = 15,
  733. .column = 5,
  734. .indent_column = 5,
  735. .value_stores = &value_stores_,
  736. .string_contents = {""}},
  737. {.kind = TokenKind::StringLiteral,
  738. .line = 15,
  739. .column = 7,
  740. .indent_column = 5,
  741. .value_stores = &value_stores_,
  742. .string_contents = {"x"}},
  743. {.kind = TokenKind::StringLiteral,
  744. .line = 15,
  745. .column = 10,
  746. .indent_column = 5,
  747. .value_stores = &value_stores_,
  748. .string_contents = {""}},
  749. {.kind = TokenKind::FileEnd, .line = 16, .column = 3},
  750. }));
  751. }
  752. TEST_F(LexerTest, InvalidStringLiterals) {
  753. llvm::StringLiteral invalid[] = {
  754. // clang-format off
  755. R"(")",
  756. R"('''
  757. '')",
  758. R"("\)",
  759. R"("\")",
  760. R"("\\)",
  761. R"("\\\")",
  762. R"(''')",
  763. R"('''
  764. )",
  765. R"('''\)",
  766. R"(#'''
  767. ''')",
  768. // clang-format on
  769. };
  770. for (llvm::StringLiteral test : invalid) {
  771. SCOPED_TRACE(test);
  772. auto buffer = Lex(test);
  773. EXPECT_TRUE(buffer.has_errors());
  774. // We should have formed at least one error token.
  775. bool found_error = false;
  776. for (TokenIndex token : buffer.tokens()) {
  777. if (buffer.GetKind(token) == TokenKind::Error) {
  778. found_error = true;
  779. break;
  780. }
  781. }
  782. EXPECT_TRUE(found_error);
  783. }
  784. }
  785. TEST_F(LexerTest, TypeLiterals) {
  786. llvm::StringLiteral testcase = R"(
  787. i0 i1 i20 i999999999999 i0x1
  788. u0 u1 u64 u64b
  789. f32 f80 f1 fi
  790. s1
  791. )";
  792. auto buffer = Lex(testcase);
  793. EXPECT_FALSE(buffer.has_errors());
  794. ASSERT_THAT(buffer,
  795. HasTokens(llvm::ArrayRef<ExpectedToken>{
  796. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  797. {.kind = TokenKind::Identifier,
  798. .line = 2,
  799. .column = 5,
  800. .indent_column = 5,
  801. .text = {"i0"}},
  802. {.kind = TokenKind::IntTypeLiteral,
  803. .line = 2,
  804. .column = 8,
  805. .indent_column = 5,
  806. .text = {"i1"}},
  807. {.kind = TokenKind::IntTypeLiteral,
  808. .line = 2,
  809. .column = 11,
  810. .indent_column = 5,
  811. .text = {"i20"}},
  812. {.kind = TokenKind::IntTypeLiteral,
  813. .line = 2,
  814. .column = 15,
  815. .indent_column = 5,
  816. .text = {"i999999999999"}},
  817. {.kind = TokenKind::Identifier,
  818. .line = 2,
  819. .column = 29,
  820. .indent_column = 5,
  821. .text = {"i0x1"}},
  822. {.kind = TokenKind::Identifier,
  823. .line = 3,
  824. .column = 5,
  825. .indent_column = 5,
  826. .text = {"u0"}},
  827. {.kind = TokenKind::UnsignedIntTypeLiteral,
  828. .line = 3,
  829. .column = 8,
  830. .indent_column = 5,
  831. .text = {"u1"}},
  832. {.kind = TokenKind::UnsignedIntTypeLiteral,
  833. .line = 3,
  834. .column = 11,
  835. .indent_column = 5,
  836. .text = {"u64"}},
  837. {.kind = TokenKind::Identifier,
  838. .line = 3,
  839. .column = 15,
  840. .indent_column = 5,
  841. .text = {"u64b"}},
  842. {.kind = TokenKind::FloatTypeLiteral,
  843. .line = 4,
  844. .column = 5,
  845. .indent_column = 5,
  846. .text = {"f32"}},
  847. {.kind = TokenKind::FloatTypeLiteral,
  848. .line = 4,
  849. .column = 9,
  850. .indent_column = 5,
  851. .text = {"f80"}},
  852. {.kind = TokenKind::FloatTypeLiteral,
  853. .line = 4,
  854. .column = 13,
  855. .indent_column = 5,
  856. .text = {"f1"}},
  857. {.kind = TokenKind::Identifier,
  858. .line = 4,
  859. .column = 16,
  860. .indent_column = 5,
  861. .text = {"fi"}},
  862. {.kind = TokenKind::Identifier,
  863. .line = 5,
  864. .column = 5,
  865. .indent_column = 5,
  866. .text = {"s1"}},
  867. {.kind = TokenKind::FileEnd, .line = 6, .column = 3},
  868. }));
  869. auto type_size = [&](int token_index) {
  870. auto token = buffer.tokens().begin()[token_index];
  871. return value_stores_.ints().Get(buffer.GetTypeLiteralSize(token));
  872. };
  873. EXPECT_EQ(type_size(2), 1);
  874. EXPECT_EQ(type_size(3), 20);
  875. EXPECT_EQ(type_size(4), 999999999999ULL);
  876. EXPECT_EQ(type_size(7), 1);
  877. EXPECT_EQ(type_size(8), 64);
  878. EXPECT_EQ(type_size(10), 32);
  879. EXPECT_EQ(type_size(11), 80);
  880. EXPECT_EQ(type_size(12), 1);
  881. }
  882. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  883. std::string code = "i";
  884. constexpr int Count = 10000;
  885. code.append(Count, '9');
  886. Testing::MockDiagnosticConsumer consumer;
  887. EXPECT_CALL(consumer,
  888. HandleDiagnostic(IsSingleDiagnostic(
  889. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  890. HasSubstr(llvm::formatv(" {0} ", Count)))));
  891. auto buffer = Lex(code, consumer);
  892. EXPECT_TRUE(buffer.has_errors());
  893. ASSERT_THAT(buffer,
  894. HasTokens(llvm::ArrayRef<ExpectedToken>{
  895. {.kind = TokenKind::FileStart, .line = 1, .column = 1},
  896. {.kind = TokenKind::Error,
  897. .line = 1,
  898. .column = 1,
  899. .indent_column = 1,
  900. .text = code},
  901. {.kind = TokenKind::FileEnd, .line = 1, .column = Count + 2},
  902. }));
  903. }
  904. TEST_F(LexerTest, DiagnosticTrailingComment) {
  905. llvm::StringLiteral testcase = R"(
  906. // Hello!
  907. var String x; // trailing comment
  908. )";
  909. Testing::MockDiagnosticConsumer consumer;
  910. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  911. DiagnosticKind::TrailingComment,
  912. DiagnosticLevel::Error, 3, 19, _)));
  913. Lex(testcase, consumer);
  914. }
  915. TEST_F(LexerTest, DiagnosticWhitespace) {
  916. Testing::MockDiagnosticConsumer consumer;
  917. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  918. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  919. DiagnosticLevel::Error, 1, 3, _)));
  920. Lex("//no space after comment", consumer);
  921. }
  922. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  923. Testing::MockDiagnosticConsumer consumer;
  924. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  925. DiagnosticKind::UnknownEscapeSequence,
  926. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  927. Lex(R"("hello\bworld")", consumer);
  928. }
  929. TEST_F(LexerTest, DiagnosticBadHex) {
  930. Testing::MockDiagnosticConsumer consumer;
  931. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  932. DiagnosticKind::HexadecimalEscapeMissingDigits,
  933. DiagnosticLevel::Error, 1, 9, _)));
  934. Lex(R"("hello\xabworld")", consumer);
  935. }
  936. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  937. Testing::MockDiagnosticConsumer consumer;
  938. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  939. DiagnosticKind::InvalidDigit,
  940. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  941. Lex("0x123abc", consumer);
  942. }
  943. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  944. Testing::MockDiagnosticConsumer consumer;
  945. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  946. DiagnosticKind::UnterminatedString,
  947. DiagnosticLevel::Error, 1, 1, _)));
  948. Lex(R"(#" ")", consumer);
  949. }
  950. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  951. Testing::MockDiagnosticConsumer consumer;
  952. EXPECT_CALL(consumer, HandleDiagnostic(IsSingleDiagnostic(
  953. DiagnosticKind::UnrecognizedCharacters,
  954. DiagnosticLevel::Error, 1, 1, _)));
  955. Lex("\b", consumer);
  956. }
  957. TEST_F(LexerTest, PrintingOutputYaml) {
  958. // Test that we can parse this into YAML and verify line and indent data.
  959. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  960. ASSERT_FALSE(buffer.has_errors());
  961. TestRawOstream print_stream;
  962. buffer.Print(print_stream);
  963. EXPECT_THAT(
  964. Yaml::Value::FromText(print_stream.TakeStr()),
  965. IsYaml(ElementsAre(Yaml::Sequence(ElementsAre(Yaml::Mapping(ElementsAre(
  966. Pair("filename", source_storage_.front().filename().str()),
  967. Pair("tokens",
  968. Yaml::Sequence(ElementsAre(
  969. Yaml::Mapping(ElementsAre(
  970. Pair("index", "0"), Pair("kind", "FileStart"),
  971. Pair("line", "1"), Pair("column", "1"),
  972. Pair("indent", "1"), Pair("spelling", ""),
  973. Pair("has_trailing_space", "true"))),
  974. Yaml::Mapping(
  975. ElementsAre(Pair("index", "1"), Pair("kind", "Semi"),
  976. Pair("line", "2"), Pair("column", "2"),
  977. Pair("indent", "2"), Pair("spelling", ";"),
  978. Pair("has_trailing_space", "true"))),
  979. Yaml::Mapping(
  980. ElementsAre(Pair("index", "2"), Pair("kind", "Semi"),
  981. Pair("line", "5"), Pair("column", "1"),
  982. Pair("indent", "1"), Pair("spelling", ";"),
  983. Pair("has_trailing_space", "true"))),
  984. Yaml::Mapping(
  985. ElementsAre(Pair("index", "3"), Pair("kind", "Semi"),
  986. Pair("line", "5"), Pair("column", "3"),
  987. Pair("indent", "1"), Pair("spelling", ";"),
  988. Pair("has_trailing_space", "true"))),
  989. Yaml::Mapping(ElementsAre(
  990. Pair("index", "4"), Pair("kind", "FileEnd"),
  991. Pair("line", "15"), Pair("column", "1"),
  992. Pair("indent", "1"), Pair("spelling", "")))))))))))));
  993. }
  994. } // namespace
  995. } // namespace Carbon::Lex