tokenized_buffer_test.cpp 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/None.h"
  10. #include "llvm/ADT/Sequence.h"
  11. #include "llvm/ADT/SmallString.h"
  12. #include "llvm/ADT/Twine.h"
  13. #include "llvm/Support/SourceMgr.h"
  14. #include "llvm/Support/raw_ostream.h"
  15. #include "toolchain/common/yaml_test_helpers.h"
  16. #include "toolchain/diagnostics/diagnostic_emitter.h"
  17. #include "toolchain/diagnostics/mocks.h"
  18. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  19. namespace Carbon::Testing {
  20. namespace {
  21. using ::testing::ElementsAre;
  22. using ::testing::Eq;
  23. using ::testing::HasSubstr;
  24. using ::testing::StrEq;
  25. class LexerTest : public ::testing::Test {
  26. protected:
  27. auto GetSourceBuffer(llvm::Twine text) -> SourceBuffer& {
  28. source_storage.push_back(
  29. std::move(*SourceBuffer::CreateFromText(text.str())));
  30. return source_storage.back();
  31. }
  32. auto Lex(llvm::Twine text,
  33. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  34. -> TokenizedBuffer {
  35. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  36. }
  37. llvm::SmallVector<SourceBuffer, 16> source_storage;
  38. };
  39. TEST_F(LexerTest, HandlesEmptyBuffer) {
  40. auto buffer = Lex("");
  41. EXPECT_FALSE(buffer.HasErrors());
  42. EXPECT_THAT(
  43. buffer,
  44. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  45. }
  46. TEST_F(LexerTest, TracksLinesAndColumns) {
  47. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" \"\"\"baz\n a\n \"\"\" y");
  48. EXPECT_FALSE(buffer.HasErrors());
  49. EXPECT_THAT(buffer,
  50. HasTokens(llvm::ArrayRef<ExpectedToken>{
  51. {.kind = TokenKind::Semi(),
  52. .line = 2,
  53. .column = 3,
  54. .indent_column = 3},
  55. {.kind = TokenKind::Semi(),
  56. .line = 2,
  57. .column = 4,
  58. .indent_column = 3},
  59. {.kind = TokenKind::Semi(),
  60. .line = 3,
  61. .column = 4,
  62. .indent_column = 4},
  63. {.kind = TokenKind::Semi(),
  64. .line = 3,
  65. .column = 5,
  66. .indent_column = 4},
  67. {.kind = TokenKind::Semi(),
  68. .line = 3,
  69. .column = 6,
  70. .indent_column = 4},
  71. {.kind = TokenKind::Identifier(),
  72. .line = 4,
  73. .column = 4,
  74. .indent_column = 4,
  75. .text = "x"},
  76. {.kind = TokenKind::StringLiteral(),
  77. .line = 4,
  78. .column = 5,
  79. .indent_column = 4},
  80. {.kind = TokenKind::StringLiteral(),
  81. .line = 4,
  82. .column = 11,
  83. .indent_column = 4},
  84. {.kind = TokenKind::Identifier(),
  85. .line = 6,
  86. .column = 6,
  87. .indent_column = 11,
  88. .text = "y"},
  89. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 7},
  90. }));
  91. }
  92. TEST_F(LexerTest, HandlesNumericLiteral) {
  93. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  94. EXPECT_FALSE(buffer.HasErrors());
  95. ASSERT_THAT(buffer,
  96. HasTokens(llvm::ArrayRef<ExpectedToken>{
  97. {.kind = TokenKind::IntegerLiteral(),
  98. .line = 1,
  99. .column = 1,
  100. .indent_column = 1,
  101. .text = "12"},
  102. {.kind = TokenKind::Minus(),
  103. .line = 1,
  104. .column = 3,
  105. .indent_column = 1},
  106. {.kind = TokenKind::IntegerLiteral(),
  107. .line = 1,
  108. .column = 4,
  109. .indent_column = 1,
  110. .text = "578"},
  111. {.kind = TokenKind::IntegerLiteral(),
  112. .line = 2,
  113. .column = 3,
  114. .indent_column = 3,
  115. .text = "1"},
  116. {.kind = TokenKind::IntegerLiteral(),
  117. .line = 2,
  118. .column = 6,
  119. .indent_column = 3,
  120. .text = "2"},
  121. {.kind = TokenKind::IntegerLiteral(),
  122. .line = 3,
  123. .column = 1,
  124. .indent_column = 1,
  125. .text = "0x12_3ABC"},
  126. {.kind = TokenKind::IntegerLiteral(),
  127. .line = 4,
  128. .column = 1,
  129. .indent_column = 1,
  130. .text = "0b10_10_11"},
  131. {.kind = TokenKind::IntegerLiteral(),
  132. .line = 5,
  133. .column = 1,
  134. .indent_column = 1,
  135. .text = "1_234_567"},
  136. {.kind = TokenKind::RealLiteral(),
  137. .line = 6,
  138. .column = 1,
  139. .indent_column = 1,
  140. .text = "1.5e9"},
  141. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 6},
  142. }));
  143. auto token_12 = buffer.Tokens().begin();
  144. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  145. auto token_578 = buffer.Tokens().begin() + 2;
  146. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  147. auto token_1 = buffer.Tokens().begin() + 3;
  148. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  149. auto token_2 = buffer.Tokens().begin() + 4;
  150. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  151. auto token_0x12_3abc = buffer.Tokens().begin() + 5;
  152. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  153. auto token_0b10_10_11 = buffer.Tokens().begin() + 6;
  154. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  155. auto token_1_234_567 = buffer.Tokens().begin() + 7;
  156. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  157. auto token_1_5e9 = buffer.Tokens().begin() + 8;
  158. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  159. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  160. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  161. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  162. }
  163. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  164. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  165. EXPECT_TRUE(buffer.HasErrors());
  166. ASSERT_THAT(buffer,
  167. HasTokens(llvm::ArrayRef<ExpectedToken>{
  168. {.kind = TokenKind::Error(),
  169. .line = 1,
  170. .column = 1,
  171. .indent_column = 1,
  172. .text = "14x"},
  173. {.kind = TokenKind::IntegerLiteral(),
  174. .line = 1,
  175. .column = 5,
  176. .indent_column = 1,
  177. .text = "15_49"},
  178. {.kind = TokenKind::Error(),
  179. .line = 1,
  180. .column = 11,
  181. .indent_column = 1,
  182. .text = "0x3.5q"},
  183. {.kind = TokenKind::RealLiteral(),
  184. .line = 1,
  185. .column = 18,
  186. .indent_column = 1,
  187. .text = "0x3_4.5_6"},
  188. {.kind = TokenKind::Error(),
  189. .line = 1,
  190. .column = 28,
  191. .indent_column = 1,
  192. .text = "0ops"},
  193. {.kind = TokenKind::EndOfFile(), .line = 1, .column = 32},
  194. }));
  195. }
  196. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  197. llvm::StringLiteral source_text = R"(
  198. 1.
  199. .2
  200. 3.+foo
  201. 4.0-bar
  202. 5.0e+123+456
  203. 6.0e+1e+2
  204. 1e7
  205. 8..10
  206. 9.0.9.5
  207. 10.foo
  208. 11.0.foo
  209. 12e+1
  210. 13._
  211. )";
  212. auto buffer = Lex(source_text);
  213. EXPECT_TRUE(buffer.HasErrors());
  214. EXPECT_THAT(buffer,
  215. HasTokens(llvm::ArrayRef<ExpectedToken>{
  216. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  217. {.kind = TokenKind::Period()},
  218. // newline
  219. {.kind = TokenKind::Period()},
  220. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  221. // newline
  222. {.kind = TokenKind::IntegerLiteral(), .text = "3"},
  223. {.kind = TokenKind::Period()},
  224. {.kind = TokenKind::Plus()},
  225. {.kind = TokenKind::Identifier(), .text = "foo"},
  226. // newline
  227. {.kind = TokenKind::RealLiteral(), .text = "4.0"},
  228. {.kind = TokenKind::Minus()},
  229. {.kind = TokenKind::Identifier(), .text = "bar"},
  230. // newline
  231. {.kind = TokenKind::RealLiteral(), .text = "5.0e+123"},
  232. {.kind = TokenKind::Plus()},
  233. {.kind = TokenKind::IntegerLiteral(), .text = "456"},
  234. // newline
  235. {.kind = TokenKind::Error(), .text = "6.0e+1e"},
  236. {.kind = TokenKind::Plus()},
  237. {.kind = TokenKind::IntegerLiteral(), .text = "2"},
  238. // newline
  239. {.kind = TokenKind::Error(), .text = "1e7"},
  240. // newline
  241. {.kind = TokenKind::IntegerLiteral(), .text = "8"},
  242. {.kind = TokenKind::Period()},
  243. {.kind = TokenKind::Period()},
  244. {.kind = TokenKind::IntegerLiteral(), .text = "10"},
  245. // newline
  246. {.kind = TokenKind::RealLiteral(), .text = "9.0"},
  247. {.kind = TokenKind::Period()},
  248. {.kind = TokenKind::RealLiteral(), .text = "9.5"},
  249. // newline
  250. {.kind = TokenKind::Error(), .text = "10.foo"},
  251. // newline
  252. {.kind = TokenKind::RealLiteral(), .text = "11.0"},
  253. {.kind = TokenKind::Period()},
  254. {.kind = TokenKind::Identifier(), .text = "foo"},
  255. // newline
  256. {.kind = TokenKind::Error(), .text = "12e"},
  257. {.kind = TokenKind::Plus()},
  258. {.kind = TokenKind::IntegerLiteral(), .text = "1"},
  259. // newline
  260. {.kind = TokenKind::IntegerLiteral(), .text = "13"},
  261. {.kind = TokenKind::Period()},
  262. {.kind = TokenKind::Underscore()},
  263. // newline
  264. {.kind = TokenKind::EndOfFile()},
  265. }));
  266. }
  267. TEST_F(LexerTest, HandlesGarbageCharacters) {
  268. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\"\n\"\\";
  269. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  270. EXPECT_TRUE(buffer.HasErrors());
  271. EXPECT_THAT(
  272. buffer,
  273. HasTokens(llvm::ArrayRef<ExpectedToken>{
  274. {.kind = TokenKind::Error(),
  275. .line = 1,
  276. .column = 1,
  277. .text = llvm::StringRef("$$💩", 6)},
  278. // 💩 takes 4 bytes, and we count column as bytes offset.
  279. {.kind = TokenKind::Minus(), .line = 1, .column = 7},
  280. {.kind = TokenKind::Error(), .line = 1, .column = 8, .text = "$"},
  281. // newline
  282. {.kind = TokenKind::Error(),
  283. .line = 2,
  284. .column = 1,
  285. .text = llvm::StringRef("$\0$", 3)},
  286. {.kind = TokenKind::IntegerLiteral(),
  287. .line = 2,
  288. .column = 4,
  289. .text = "12"},
  290. {.kind = TokenKind::Error(), .line = 2, .column = 6, .text = "$"},
  291. // newline
  292. {.kind = TokenKind::Error(),
  293. .line = 3,
  294. .column = 1,
  295. .text = llvm::StringRef("\"", 1)},
  296. // newline
  297. {.kind = TokenKind::Error(),
  298. .line = 4,
  299. .column = 1,
  300. .text = llvm::StringRef("\"", 1)},
  301. {.kind = TokenKind::Backslash(),
  302. .line = 4,
  303. .column = 2,
  304. .text = llvm::StringRef("\\", 1)},
  305. {.kind = TokenKind::EndOfFile(), .line = 4, .column = 3},
  306. }));
  307. }
  308. TEST_F(LexerTest, Symbols) {
  309. // We don't need to exhaustively test symbols here as they're handled with
  310. // common code, but we want to check specific patterns to verify things like
  311. // max-munch rule and handling of interesting symbols.
  312. auto buffer = Lex("<<<");
  313. EXPECT_FALSE(buffer.HasErrors());
  314. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  315. {TokenKind::LessLess()},
  316. {TokenKind::Less()},
  317. {TokenKind::EndOfFile()},
  318. }));
  319. buffer = Lex("<<=>>");
  320. EXPECT_FALSE(buffer.HasErrors());
  321. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  322. {TokenKind::LessLessEqual()},
  323. {TokenKind::GreaterGreater()},
  324. {TokenKind::EndOfFile()},
  325. }));
  326. buffer = Lex("< <=> >");
  327. EXPECT_FALSE(buffer.HasErrors());
  328. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  329. {TokenKind::Less()},
  330. {TokenKind::LessEqualGreater()},
  331. {TokenKind::Greater()},
  332. {TokenKind::EndOfFile()},
  333. }));
  334. buffer = Lex("\\/?@&^!");
  335. EXPECT_FALSE(buffer.HasErrors());
  336. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  337. {TokenKind::Backslash()},
  338. {TokenKind::Slash()},
  339. {TokenKind::Question()},
  340. {TokenKind::At()},
  341. {TokenKind::Amp()},
  342. {TokenKind::Caret()},
  343. {TokenKind::Exclaim()},
  344. {TokenKind::EndOfFile()},
  345. }));
  346. }
  347. TEST_F(LexerTest, Parens) {
  348. auto buffer = Lex("()");
  349. EXPECT_FALSE(buffer.HasErrors());
  350. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  351. {TokenKind::OpenParen()},
  352. {TokenKind::CloseParen()},
  353. {TokenKind::EndOfFile()},
  354. }));
  355. buffer = Lex("((()()))");
  356. EXPECT_FALSE(buffer.HasErrors());
  357. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  358. {TokenKind::OpenParen()},
  359. {TokenKind::OpenParen()},
  360. {TokenKind::OpenParen()},
  361. {TokenKind::CloseParen()},
  362. {TokenKind::OpenParen()},
  363. {TokenKind::CloseParen()},
  364. {TokenKind::CloseParen()},
  365. {TokenKind::CloseParen()},
  366. {TokenKind::EndOfFile()},
  367. }));
  368. }
  369. TEST_F(LexerTest, CurlyBraces) {
  370. auto buffer = Lex("{}");
  371. EXPECT_FALSE(buffer.HasErrors());
  372. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  373. {TokenKind::OpenCurlyBrace()},
  374. {TokenKind::CloseCurlyBrace()},
  375. {TokenKind::EndOfFile()},
  376. }));
  377. buffer = Lex("{{{}{}}}");
  378. EXPECT_FALSE(buffer.HasErrors());
  379. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  380. {TokenKind::OpenCurlyBrace()},
  381. {TokenKind::OpenCurlyBrace()},
  382. {TokenKind::OpenCurlyBrace()},
  383. {TokenKind::CloseCurlyBrace()},
  384. {TokenKind::OpenCurlyBrace()},
  385. {TokenKind::CloseCurlyBrace()},
  386. {TokenKind::CloseCurlyBrace()},
  387. {TokenKind::CloseCurlyBrace()},
  388. {TokenKind::EndOfFile()},
  389. }));
  390. }
  391. TEST_F(LexerTest, MatchingGroups) {
  392. {
  393. TokenizedBuffer buffer = Lex("(){}");
  394. ASSERT_FALSE(buffer.HasErrors());
  395. auto it = buffer.Tokens().begin();
  396. auto open_paren_token = *it++;
  397. auto close_paren_token = *it++;
  398. EXPECT_EQ(close_paren_token,
  399. buffer.GetMatchedClosingToken(open_paren_token));
  400. EXPECT_EQ(open_paren_token,
  401. buffer.GetMatchedOpeningToken(close_paren_token));
  402. auto open_curly_token = *it++;
  403. auto close_curly_token = *it++;
  404. EXPECT_EQ(close_curly_token,
  405. buffer.GetMatchedClosingToken(open_curly_token));
  406. EXPECT_EQ(open_curly_token,
  407. buffer.GetMatchedOpeningToken(close_curly_token));
  408. auto eof_token = *it++;
  409. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  410. EXPECT_EQ(buffer.Tokens().end(), it);
  411. }
  412. {
  413. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  414. ASSERT_FALSE(buffer.HasErrors());
  415. auto it = buffer.Tokens().begin();
  416. auto open_paren_token = *it++;
  417. auto open_curly_token = *it++;
  418. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  419. auto close_curly_token = *it++;
  420. auto close_paren_token = *it++;
  421. EXPECT_EQ(close_paren_token,
  422. buffer.GetMatchedClosingToken(open_paren_token));
  423. EXPECT_EQ(open_paren_token,
  424. buffer.GetMatchedOpeningToken(close_paren_token));
  425. EXPECT_EQ(close_curly_token,
  426. buffer.GetMatchedClosingToken(open_curly_token));
  427. EXPECT_EQ(open_curly_token,
  428. buffer.GetMatchedOpeningToken(close_curly_token));
  429. open_curly_token = *it++;
  430. open_paren_token = *it++;
  431. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  432. close_paren_token = *it++;
  433. close_curly_token = *it++;
  434. EXPECT_EQ(close_curly_token,
  435. buffer.GetMatchedClosingToken(open_curly_token));
  436. EXPECT_EQ(open_curly_token,
  437. buffer.GetMatchedOpeningToken(close_curly_token));
  438. EXPECT_EQ(close_paren_token,
  439. buffer.GetMatchedClosingToken(open_paren_token));
  440. EXPECT_EQ(open_paren_token,
  441. buffer.GetMatchedOpeningToken(close_paren_token));
  442. open_curly_token = *it++;
  443. auto inner_open_curly_token = *it++;
  444. open_paren_token = *it++;
  445. auto inner_open_paren_token = *it++;
  446. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  447. auto inner_close_paren_token = *it++;
  448. close_paren_token = *it++;
  449. auto inner_close_curly_token = *it++;
  450. close_curly_token = *it++;
  451. EXPECT_EQ(close_curly_token,
  452. buffer.GetMatchedClosingToken(open_curly_token));
  453. EXPECT_EQ(open_curly_token,
  454. buffer.GetMatchedOpeningToken(close_curly_token));
  455. EXPECT_EQ(inner_close_curly_token,
  456. buffer.GetMatchedClosingToken(inner_open_curly_token));
  457. EXPECT_EQ(inner_open_curly_token,
  458. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  459. EXPECT_EQ(close_paren_token,
  460. buffer.GetMatchedClosingToken(open_paren_token));
  461. EXPECT_EQ(open_paren_token,
  462. buffer.GetMatchedOpeningToken(close_paren_token));
  463. EXPECT_EQ(inner_close_paren_token,
  464. buffer.GetMatchedClosingToken(inner_open_paren_token));
  465. EXPECT_EQ(inner_open_paren_token,
  466. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  467. auto eof_token = *it++;
  468. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile());
  469. EXPECT_EQ(buffer.Tokens().end(), it);
  470. }
  471. }
  472. TEST_F(LexerTest, MismatchedGroups) {
  473. auto buffer = Lex("{");
  474. EXPECT_TRUE(buffer.HasErrors());
  475. EXPECT_THAT(buffer,
  476. HasTokens(llvm::ArrayRef<ExpectedToken>{
  477. {TokenKind::OpenCurlyBrace()},
  478. {.kind = TokenKind::CloseCurlyBrace(), .recovery = true},
  479. {TokenKind::EndOfFile()},
  480. }));
  481. buffer = Lex("}");
  482. EXPECT_TRUE(buffer.HasErrors());
  483. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  484. {.kind = TokenKind::Error(), .text = "}"},
  485. {TokenKind::EndOfFile()},
  486. }));
  487. buffer = Lex("{(}");
  488. EXPECT_TRUE(buffer.HasErrors());
  489. EXPECT_THAT(
  490. buffer,
  491. HasTokens(llvm::ArrayRef<ExpectedToken>{
  492. {.kind = TokenKind::OpenCurlyBrace(), .column = 1},
  493. {.kind = TokenKind::OpenParen(), .column = 2},
  494. {.kind = TokenKind::CloseParen(), .column = 3, .recovery = true},
  495. {.kind = TokenKind::CloseCurlyBrace(), .column = 3},
  496. {TokenKind::EndOfFile()},
  497. }));
  498. buffer = Lex(")({)");
  499. EXPECT_TRUE(buffer.HasErrors());
  500. EXPECT_THAT(
  501. buffer,
  502. HasTokens(llvm::ArrayRef<ExpectedToken>{
  503. {.kind = TokenKind::Error(), .column = 1, .text = ")"},
  504. {.kind = TokenKind::OpenParen(), .column = 2},
  505. {.kind = TokenKind::OpenCurlyBrace(), .column = 3},
  506. {.kind = TokenKind::CloseCurlyBrace(), .column = 4, .recovery = true},
  507. {.kind = TokenKind::CloseParen(), .column = 4},
  508. {TokenKind::EndOfFile()},
  509. }));
  510. }
  511. TEST_F(LexerTest, Whitespace) {
  512. auto buffer = Lex("{( } {(");
  513. // Whether there should be whitespace before/after each token.
  514. bool space[] = {true,
  515. // {
  516. false,
  517. // (
  518. true,
  519. // inserted )
  520. true,
  521. // }
  522. true,
  523. // {
  524. false,
  525. // (
  526. true,
  527. // inserted )
  528. true,
  529. // inserted }
  530. true,
  531. // EOF
  532. false};
  533. int pos = 0;
  534. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  535. ASSERT_LT(pos, std::size(space));
  536. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  537. ++pos;
  538. ASSERT_LT(pos, std::size(space));
  539. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  540. }
  541. ASSERT_EQ(pos + 1, std::size(space));
  542. }
  543. TEST_F(LexerTest, Keywords) {
  544. auto buffer = Lex(" fn");
  545. EXPECT_FALSE(buffer.HasErrors());
  546. EXPECT_THAT(buffer,
  547. HasTokens(llvm::ArrayRef<ExpectedToken>{
  548. {.kind = TokenKind::Fn(), .column = 4, .indent_column = 4},
  549. {TokenKind::EndOfFile()},
  550. }));
  551. buffer = Lex("and or not if else for return var break continue _");
  552. EXPECT_FALSE(buffer.HasErrors());
  553. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  554. {TokenKind::And()},
  555. {TokenKind::Or()},
  556. {TokenKind::Not()},
  557. {TokenKind::If()},
  558. {TokenKind::Else()},
  559. {TokenKind::For()},
  560. {TokenKind::Return()},
  561. {TokenKind::Var()},
  562. {TokenKind::Break()},
  563. {TokenKind::Continue()},
  564. {TokenKind::Underscore()},
  565. {TokenKind::EndOfFile()},
  566. }));
  567. }
  568. TEST_F(LexerTest, Comments) {
  569. auto buffer = Lex(" ;\n // foo\n ;\n");
  570. EXPECT_FALSE(buffer.HasErrors());
  571. EXPECT_THAT(buffer,
  572. HasTokens(llvm::ArrayRef<ExpectedToken>{
  573. {.kind = TokenKind::Semi(),
  574. .line = 1,
  575. .column = 2,
  576. .indent_column = 2},
  577. {.kind = TokenKind::Semi(),
  578. .line = 3,
  579. .column = 3,
  580. .indent_column = 3},
  581. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 4},
  582. }));
  583. buffer = Lex("// foo\n//\n// bar");
  584. EXPECT_FALSE(buffer.HasErrors());
  585. EXPECT_THAT(
  586. buffer,
  587. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  588. // Make sure weird characters aren't a problem.
  589. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  590. EXPECT_FALSE(buffer.HasErrors());
  591. EXPECT_THAT(
  592. buffer,
  593. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  594. // Make sure we can lex a comment at the end of the input.
  595. buffer = Lex("//");
  596. EXPECT_FALSE(buffer.HasErrors());
  597. EXPECT_THAT(
  598. buffer,
  599. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile()}}));
  600. }
  601. TEST_F(LexerTest, InvalidComments) {
  602. llvm::StringLiteral testcases[] = {
  603. " /// foo\n",
  604. "foo // bar\n",
  605. "//! hello",
  606. " //world",
  607. };
  608. for (llvm::StringLiteral testcase : testcases) {
  609. auto buffer = Lex(testcase);
  610. EXPECT_TRUE(buffer.HasErrors());
  611. }
  612. }
  613. TEST_F(LexerTest, Identifiers) {
  614. auto buffer = Lex(" foobar");
  615. EXPECT_FALSE(buffer.HasErrors());
  616. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  617. {.kind = TokenKind::Identifier(),
  618. .column = 4,
  619. .indent_column = 4,
  620. .text = "foobar"},
  621. {TokenKind::EndOfFile()},
  622. }));
  623. // Check different kinds of identifier character sequences.
  624. buffer = Lex("_foo_bar");
  625. EXPECT_FALSE(buffer.HasErrors());
  626. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  627. {.kind = TokenKind::Identifier(), .text = "_foo_bar"},
  628. {TokenKind::EndOfFile()},
  629. }));
  630. buffer = Lex("foo2bar00");
  631. EXPECT_FALSE(buffer.HasErrors());
  632. EXPECT_THAT(buffer,
  633. HasTokens(llvm::ArrayRef<ExpectedToken>{
  634. {.kind = TokenKind::Identifier(), .text = "foo2bar00"},
  635. {TokenKind::EndOfFile()},
  636. }));
  637. // Check that we can parse identifiers that start with a keyword.
  638. buffer = Lex("fnord");
  639. EXPECT_FALSE(buffer.HasErrors());
  640. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  641. {.kind = TokenKind::Identifier(), .text = "fnord"},
  642. {TokenKind::EndOfFile()},
  643. }));
  644. // Check multiple identifiers with indent and interning.
  645. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  646. EXPECT_FALSE(buffer.HasErrors());
  647. EXPECT_THAT(buffer,
  648. HasTokens(llvm::ArrayRef<ExpectedToken>{
  649. {.kind = TokenKind::Identifier(),
  650. .line = 1,
  651. .column = 4,
  652. .indent_column = 4,
  653. .text = "foo"},
  654. {.kind = TokenKind::Semi()},
  655. {.kind = TokenKind::Identifier(),
  656. .line = 1,
  657. .column = 8,
  658. .indent_column = 4,
  659. .text = "bar"},
  660. {.kind = TokenKind::Identifier(),
  661. .line = 2,
  662. .column = 1,
  663. .indent_column = 1,
  664. .text = "bar"},
  665. {.kind = TokenKind::Identifier(),
  666. .line = 3,
  667. .column = 3,
  668. .indent_column = 3,
  669. .text = "foo"},
  670. {.kind = TokenKind::Identifier(),
  671. .line = 3,
  672. .column = 7,
  673. .indent_column = 3,
  674. .text = "foo"},
  675. {.kind = TokenKind::EndOfFile(), .line = 3, .column = 10},
  676. }));
  677. }
  678. TEST_F(LexerTest, StringLiterals) {
  679. llvm::StringLiteral testcase = R"(
  680. "hello world\n"
  681. """foo
  682. test \
  683. \xAB
  684. """ trailing
  685. #"""#
  686. "\0"
  687. #"\0"foo"\1"#
  688. """x"""
  689. )";
  690. auto buffer = Lex(testcase);
  691. EXPECT_FALSE(buffer.HasErrors());
  692. EXPECT_THAT(buffer,
  693. HasTokens(llvm::ArrayRef<ExpectedToken>{
  694. {.kind = TokenKind::StringLiteral(),
  695. .line = 2,
  696. .column = 5,
  697. .indent_column = 5,
  698. .string_contents = {"hello world\n"}},
  699. {.kind = TokenKind::StringLiteral(),
  700. .line = 4,
  701. .column = 5,
  702. .indent_column = 5,
  703. .string_contents = {" test \xAB\n"}},
  704. {.kind = TokenKind::Identifier(),
  705. .line = 7,
  706. .column = 10,
  707. .indent_column = 5,
  708. .text = "trailing"},
  709. {.kind = TokenKind::StringLiteral(),
  710. .line = 9,
  711. .column = 7,
  712. .indent_column = 7,
  713. .string_contents = {"\""}},
  714. {.kind = TokenKind::StringLiteral(),
  715. .line = 11,
  716. .column = 5,
  717. .indent_column = 5,
  718. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  719. {.kind = TokenKind::StringLiteral(),
  720. .line = 13,
  721. .column = 5,
  722. .indent_column = 5,
  723. .string_contents = {"\\0\"foo\"\\1"}},
  724. // """x""" is three string literals, not one.
  725. {.kind = TokenKind::StringLiteral(),
  726. .line = 15,
  727. .column = 5,
  728. .indent_column = 5,
  729. .string_contents = {""}},
  730. {.kind = TokenKind::StringLiteral(),
  731. .line = 15,
  732. .column = 7,
  733. .indent_column = 5,
  734. .string_contents = {"x"}},
  735. {.kind = TokenKind::StringLiteral(),
  736. .line = 15,
  737. .column = 10,
  738. .indent_column = 5,
  739. .string_contents = {""}},
  740. {.kind = TokenKind::EndOfFile(), .line = 16, .column = 3},
  741. }));
  742. }
  743. TEST_F(LexerTest, InvalidStringLiterals) {
  744. llvm::StringLiteral invalid[] = {
  745. R"(")",
  746. R"("""
  747. "")", //
  748. R"("\)", //
  749. R"("\")", //
  750. R"("\\)", //
  751. R"("\\\")", //
  752. R"(""")",
  753. R"("""
  754. )", //
  755. R"("""\)",
  756. R"(#"""
  757. """)",
  758. };
  759. for (llvm::StringLiteral test : invalid) {
  760. auto buffer = Lex(test);
  761. EXPECT_TRUE(buffer.HasErrors()) << "`" << test << "`";
  762. // We should have formed at least one error token.
  763. bool found_error = false;
  764. for (TokenizedBuffer::Token token : buffer.Tokens()) {
  765. if (buffer.GetKind(token) == TokenKind::Error()) {
  766. found_error = true;
  767. break;
  768. }
  769. }
  770. EXPECT_TRUE(found_error) << "`" << test << "`";
  771. }
  772. }
  773. TEST_F(LexerTest, TypeLiterals) {
  774. llvm::StringLiteral testcase = R"(
  775. i0 i1 i20 i999999999999 i0x1
  776. u0 u1 u64 u64b
  777. f32 f80 f1 fi
  778. s1
  779. )";
  780. auto buffer = Lex(testcase);
  781. EXPECT_FALSE(buffer.HasErrors());
  782. ASSERT_THAT(buffer,
  783. HasTokens(llvm::ArrayRef<ExpectedToken>{
  784. {.kind = TokenKind::Identifier(),
  785. .line = 2,
  786. .column = 5,
  787. .indent_column = 5,
  788. .text = {"i0"}},
  789. {.kind = TokenKind::IntegerTypeLiteral(),
  790. .line = 2,
  791. .column = 8,
  792. .indent_column = 5,
  793. .text = {"i1"}},
  794. {.kind = TokenKind::IntegerTypeLiteral(),
  795. .line = 2,
  796. .column = 11,
  797. .indent_column = 5,
  798. .text = {"i20"}},
  799. {.kind = TokenKind::IntegerTypeLiteral(),
  800. .line = 2,
  801. .column = 15,
  802. .indent_column = 5,
  803. .text = {"i999999999999"}},
  804. {.kind = TokenKind::Identifier(),
  805. .line = 2,
  806. .column = 29,
  807. .indent_column = 5,
  808. .text = {"i0x1"}},
  809. {.kind = TokenKind::Identifier(),
  810. .line = 3,
  811. .column = 5,
  812. .indent_column = 5,
  813. .text = {"u0"}},
  814. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  815. .line = 3,
  816. .column = 8,
  817. .indent_column = 5,
  818. .text = {"u1"}},
  819. {.kind = TokenKind::UnsignedIntegerTypeLiteral(),
  820. .line = 3,
  821. .column = 11,
  822. .indent_column = 5,
  823. .text = {"u64"}},
  824. {.kind = TokenKind::Identifier(),
  825. .line = 3,
  826. .column = 15,
  827. .indent_column = 5,
  828. .text = {"u64b"}},
  829. {.kind = TokenKind::FloatingPointTypeLiteral(),
  830. .line = 4,
  831. .column = 5,
  832. .indent_column = 5,
  833. .text = {"f32"}},
  834. {.kind = TokenKind::FloatingPointTypeLiteral(),
  835. .line = 4,
  836. .column = 9,
  837. .indent_column = 5,
  838. .text = {"f80"}},
  839. {.kind = TokenKind::FloatingPointTypeLiteral(),
  840. .line = 4,
  841. .column = 13,
  842. .indent_column = 5,
  843. .text = {"f1"}},
  844. {.kind = TokenKind::Identifier(),
  845. .line = 4,
  846. .column = 16,
  847. .indent_column = 5,
  848. .text = {"fi"}},
  849. {.kind = TokenKind::Identifier(),
  850. .line = 5,
  851. .column = 5,
  852. .indent_column = 5,
  853. .text = {"s1"}},
  854. {.kind = TokenKind::EndOfFile(), .line = 6, .column = 3},
  855. }));
  856. auto token_i1 = buffer.Tokens().begin() + 1;
  857. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  858. auto token_i20 = buffer.Tokens().begin() + 2;
  859. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  860. auto token_i999999999999 = buffer.Tokens().begin() + 3;
  861. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ull);
  862. auto token_u1 = buffer.Tokens().begin() + 6;
  863. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  864. auto token_u64 = buffer.Tokens().begin() + 7;
  865. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  866. auto token_f32 = buffer.Tokens().begin() + 9;
  867. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  868. auto token_f80 = buffer.Tokens().begin() + 10;
  869. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  870. auto token_f1 = buffer.Tokens().begin() + 11;
  871. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  872. }
  873. TEST_F(LexerTest, Diagnostics) {
  874. llvm::StringLiteral testcase = R"(
  875. // Hello!
  876. var String x; // trailing comment
  877. //no space after comment
  878. "hello\bworld\xab"
  879. 0x123abc
  880. #"
  881. )";
  882. Testing::MockDiagnosticConsumer consumer;
  883. EXPECT_CALL(consumer, HandleDiagnostic(AllOf(
  884. DiagnosticAt(3, 19),
  885. DiagnosticMessage(HasSubstr("Trailing comment")))));
  886. EXPECT_CALL(consumer,
  887. HandleDiagnostic(AllOf(
  888. DiagnosticAt(4, 7),
  889. DiagnosticMessage(HasSubstr("Whitespace is required")))));
  890. EXPECT_CALL(
  891. consumer,
  892. HandleDiagnostic(AllOf(
  893. DiagnosticAt(5, 12),
  894. DiagnosticMessage(HasSubstr("Unrecognized escape sequence `b`")))));
  895. EXPECT_CALL(
  896. consumer,
  897. HandleDiagnostic(AllOf(
  898. DiagnosticAt(5, 20),
  899. DiagnosticMessage(HasSubstr("two uppercase hexadecimal digits")))));
  900. EXPECT_CALL(
  901. consumer,
  902. HandleDiagnostic(AllOf(
  903. DiagnosticAt(6, 10),
  904. DiagnosticMessage(HasSubstr("Invalid digit 'a' in hexadecimal")))));
  905. EXPECT_CALL(consumer,
  906. HandleDiagnostic(AllOf(
  907. DiagnosticAt(7, 5),
  908. DiagnosticMessage(HasSubstr("unrecognized character")))));
  909. Lex(testcase, consumer);
  910. }
  911. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  912. auto newline_offset = text.find_first_of('\n');
  913. llvm::StringRef line = text.slice(0, newline_offset);
  914. if (newline_offset != llvm::StringRef::npos) {
  915. text = text.substr(newline_offset + 1);
  916. } else {
  917. text = "";
  918. }
  919. return line.str();
  920. }
  921. TEST_F(LexerTest, Printing) {
  922. auto buffer = Lex(";");
  923. ASSERT_FALSE(buffer.HasErrors());
  924. std::string print_storage;
  925. llvm::raw_string_ostream print_stream(print_storage);
  926. buffer.Print(print_stream);
  927. llvm::StringRef print = print_stream.str();
  928. EXPECT_THAT(GetAndDropLine(print),
  929. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  930. "indent: 1, spelling: ';', has_trailing_space: true }"));
  931. EXPECT_THAT(GetAndDropLine(print),
  932. StrEq("token: { index: 1, kind: 'EndOfFile', line: 1, column: 2, "
  933. "indent: 1, spelling: '' }"));
  934. EXPECT_TRUE(print.empty()) << print;
  935. // Test kind padding.
  936. buffer = Lex("(;foo;)");
  937. ASSERT_FALSE(buffer.HasErrors());
  938. print_storage.clear();
  939. buffer.Print(print_stream);
  940. print = print_stream.str();
  941. EXPECT_THAT(GetAndDropLine(print),
  942. StrEq("token: { index: 0, kind: 'OpenParen', line: 1, column: "
  943. "1, indent: 1, spelling: '(', closing_token: 4 }"));
  944. EXPECT_THAT(GetAndDropLine(print),
  945. StrEq("token: { index: 1, kind: 'Semi', line: 1, column: "
  946. "2, indent: 1, spelling: ';' }"));
  947. EXPECT_THAT(GetAndDropLine(print),
  948. StrEq("token: { index: 2, kind: 'Identifier', line: 1, column: "
  949. "3, indent: 1, spelling: 'foo', identifier: 0 }"));
  950. EXPECT_THAT(GetAndDropLine(print),
  951. StrEq("token: { index: 3, kind: 'Semi', line: 1, column: "
  952. "6, indent: 1, spelling: ';' }"));
  953. EXPECT_THAT(GetAndDropLine(print),
  954. StrEq("token: { index: 4, kind: 'CloseParen', line: 1, column: "
  955. "7, indent: 1, spelling: ')', opening_token: 0, "
  956. "has_trailing_space: true }"));
  957. EXPECT_THAT(GetAndDropLine(print),
  958. StrEq("token: { index: 5, kind: 'EndOfFile', line: 1, column: "
  959. "8, indent: 1, spelling: '' }"));
  960. EXPECT_TRUE(print.empty()) << print;
  961. // Test digit padding with max values of 9, 10, and 11.
  962. buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  963. ASSERT_FALSE(buffer.HasErrors());
  964. print_storage.clear();
  965. buffer.Print(print_stream);
  966. print = print_stream.str();
  967. EXPECT_THAT(
  968. GetAndDropLine(print),
  969. StrEq("token: { index: 0, kind: 'Semi', line: 1, column: 1, "
  970. "indent: 1, spelling: ';', has_trailing_space: true }"));
  971. EXPECT_THAT(
  972. GetAndDropLine(print),
  973. StrEq("token: { index: 1, kind: 'Semi', line: 11, column: 9, "
  974. "indent: 9, spelling: ';' }"));
  975. EXPECT_THAT(
  976. GetAndDropLine(print),
  977. StrEq("token: { index: 2, kind: 'Semi', line: 11, column: 10, "
  978. "indent: 9, spelling: ';', has_trailing_space: true }"));
  979. EXPECT_THAT(
  980. GetAndDropLine(print),
  981. StrEq("token: { index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  982. "indent: 9, spelling: '' }"));
  983. EXPECT_TRUE(print.empty()) << print;
  984. }
  985. TEST_F(LexerTest, PrintingAsYaml) {
  986. // Test that we can parse this into YAML and verify line and indent data.
  987. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  988. ASSERT_FALSE(buffer.HasErrors());
  989. std::string print_output;
  990. llvm::raw_string_ostream print_stream(print_output);
  991. buffer.Print(print_stream);
  992. print_stream.flush();
  993. EXPECT_THAT(Yaml::Value::FromText(print_output),
  994. ElementsAre(Yaml::MappingValue{
  995. {"token", Yaml::MappingValue{{"index", "0"},
  996. {"kind", "Semi"},
  997. {"line", "2"},
  998. {"column", "2"},
  999. {"indent", "2"},
  1000. {"spelling", ";"},
  1001. {"has_trailing_space", "true"}}},
  1002. {"token", Yaml::MappingValue{{"index", "1"},
  1003. {"kind", "Semi"},
  1004. {"line", "5"},
  1005. {"column", "1"},
  1006. {"indent", "1"},
  1007. {"spelling", ";"},
  1008. {"has_trailing_space", "true"}}},
  1009. {"token", Yaml::MappingValue{{"index", "2"},
  1010. {"kind", "Semi"},
  1011. {"line", "5"},
  1012. {"column", "3"},
  1013. {"indent", "1"},
  1014. {"spelling", ";"},
  1015. {"has_trailing_space", "true"}}},
  1016. {"token", Yaml::MappingValue{{"index", "3"},
  1017. {"kind", "EndOfFile"},
  1018. {"line", "15"},
  1019. {"column", "1"},
  1020. {"indent", "1"},
  1021. {"spelling", ""}}}}));
  1022. }
  1023. } // namespace
  1024. } // namespace Carbon::Testing