tokenized_buffer_test.cpp 43 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lexer/tokenized_buffer.h"
  5. #include <gmock/gmock.h>
  6. #include <gtest/gtest.h>
  7. #include <iterator>
  8. #include "llvm/ADT/ArrayRef.h"
  9. #include "llvm/ADT/Sequence.h"
  10. #include "llvm/ADT/SmallString.h"
  11. #include "llvm/Support/SourceMgr.h"
  12. #include "llvm/Support/raw_ostream.h"
  13. #include "toolchain/common/yaml_test_helpers.h"
  14. #include "toolchain/diagnostics/diagnostic_emitter.h"
  15. #include "toolchain/diagnostics/mocks.h"
  16. #include "toolchain/lexer/tokenized_buffer_test_helpers.h"
  17. namespace Carbon::Testing {
  18. namespace {
  19. using ::testing::_;
  20. using ::testing::ElementsAre;
  21. using ::testing::Eq;
  22. using ::testing::HasSubstr;
  23. using ::testing::StrEq;
  24. class LexerTest : public ::testing::Test {
  25. protected:
  26. auto GetSourceBuffer(llvm::StringRef text) -> SourceBuffer& {
  27. std::string filename = llvm::formatv("test{0}.carbon", ++file_index);
  28. CARBON_CHECK(fs.addFile(filename, /*ModificationTime=*/0,
  29. llvm::MemoryBuffer::getMemBuffer(text)));
  30. source_storage.push_front(
  31. std::move(*SourceBuffer::CreateFromFile(fs, filename)));
  32. return source_storage.front();
  33. }
  34. auto Lex(llvm::StringRef text,
  35. DiagnosticConsumer& consumer = ConsoleDiagnosticConsumer())
  36. -> TokenizedBuffer {
  37. return TokenizedBuffer::Lex(GetSourceBuffer(text), consumer);
  38. }
  39. llvm::vfs::InMemoryFileSystem fs;
  40. int file_index = 0;
  41. std::forward_list<SourceBuffer> source_storage;
  42. };
  43. TEST_F(LexerTest, HandlesEmptyBuffer) {
  44. auto buffer = Lex("");
  45. EXPECT_FALSE(buffer.has_errors());
  46. EXPECT_THAT(buffer,
  47. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  48. }
  49. TEST_F(LexerTest, TracksLinesAndColumns) {
  50. auto buffer = Lex("\n ;;\n ;;;\n x\"foo\" '''baz\n a\n ''' y");
  51. EXPECT_FALSE(buffer.has_errors());
  52. EXPECT_THAT(
  53. buffer,
  54. HasTokens(llvm::ArrayRef<ExpectedToken>{
  55. {.kind = TokenKind::Semi, .line = 2, .column = 3, .indent_column = 3},
  56. {.kind = TokenKind::Semi, .line = 2, .column = 4, .indent_column = 3},
  57. {.kind = TokenKind::Semi, .line = 3, .column = 4, .indent_column = 4},
  58. {.kind = TokenKind::Semi, .line = 3, .column = 5, .indent_column = 4},
  59. {.kind = TokenKind::Semi, .line = 3, .column = 6, .indent_column = 4},
  60. {.kind = TokenKind::Identifier,
  61. .line = 4,
  62. .column = 4,
  63. .indent_column = 4,
  64. .text = "x"},
  65. {.kind = TokenKind::StringLiteral,
  66. .line = 4,
  67. .column = 5,
  68. .indent_column = 4},
  69. {.kind = TokenKind::StringLiteral,
  70. .line = 4,
  71. .column = 11,
  72. .indent_column = 4},
  73. {.kind = TokenKind::Identifier,
  74. .line = 6,
  75. .column = 6,
  76. .indent_column = 11,
  77. .text = "y"},
  78. {.kind = TokenKind::EndOfFile, .line = 6, .column = 7},
  79. }));
  80. }
  81. TEST_F(LexerTest, HandlesNumericLiteral) {
  82. auto buffer = Lex("12-578\n 1 2\n0x12_3ABC\n0b10_10_11\n1_234_567\n1.5e9");
  83. EXPECT_FALSE(buffer.has_errors());
  84. ASSERT_THAT(buffer,
  85. HasTokens(llvm::ArrayRef<ExpectedToken>{
  86. {.kind = TokenKind::IntegerLiteral,
  87. .line = 1,
  88. .column = 1,
  89. .indent_column = 1,
  90. .text = "12"},
  91. {.kind = TokenKind::Minus,
  92. .line = 1,
  93. .column = 3,
  94. .indent_column = 1},
  95. {.kind = TokenKind::IntegerLiteral,
  96. .line = 1,
  97. .column = 4,
  98. .indent_column = 1,
  99. .text = "578"},
  100. {.kind = TokenKind::IntegerLiteral,
  101. .line = 2,
  102. .column = 3,
  103. .indent_column = 3,
  104. .text = "1"},
  105. {.kind = TokenKind::IntegerLiteral,
  106. .line = 2,
  107. .column = 6,
  108. .indent_column = 3,
  109. .text = "2"},
  110. {.kind = TokenKind::IntegerLiteral,
  111. .line = 3,
  112. .column = 1,
  113. .indent_column = 1,
  114. .text = "0x12_3ABC"},
  115. {.kind = TokenKind::IntegerLiteral,
  116. .line = 4,
  117. .column = 1,
  118. .indent_column = 1,
  119. .text = "0b10_10_11"},
  120. {.kind = TokenKind::IntegerLiteral,
  121. .line = 5,
  122. .column = 1,
  123. .indent_column = 1,
  124. .text = "1_234_567"},
  125. {.kind = TokenKind::RealLiteral,
  126. .line = 6,
  127. .column = 1,
  128. .indent_column = 1,
  129. .text = "1.5e9"},
  130. {.kind = TokenKind::EndOfFile, .line = 6, .column = 6},
  131. }));
  132. auto token_12 = buffer.tokens().begin();
  133. EXPECT_EQ(buffer.GetIntegerLiteral(*token_12), 12);
  134. auto token_578 = buffer.tokens().begin() + 2;
  135. EXPECT_EQ(buffer.GetIntegerLiteral(*token_578), 578);
  136. auto token_1 = buffer.tokens().begin() + 3;
  137. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1), 1);
  138. auto token_2 = buffer.tokens().begin() + 4;
  139. EXPECT_EQ(buffer.GetIntegerLiteral(*token_2), 2);
  140. auto token_0x12_3abc = buffer.tokens().begin() + 5;
  141. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0x12_3abc), 0x12'3abc);
  142. auto token_0b10_10_11 = buffer.tokens().begin() + 6;
  143. EXPECT_EQ(buffer.GetIntegerLiteral(*token_0b10_10_11), 0b10'10'11);
  144. auto token_1_234_567 = buffer.tokens().begin() + 7;
  145. EXPECT_EQ(buffer.GetIntegerLiteral(*token_1_234_567), 1'234'567);
  146. auto token_1_5e9 = buffer.tokens().begin() + 8;
  147. auto value_1_5e9 = buffer.GetRealLiteral(*token_1_5e9);
  148. EXPECT_EQ(value_1_5e9.Mantissa().getZExtValue(), 15);
  149. EXPECT_EQ(value_1_5e9.Exponent().getSExtValue(), 8);
  150. EXPECT_EQ(value_1_5e9.IsDecimal(), true);
  151. }
  152. TEST_F(LexerTest, HandlesInvalidNumericLiterals) {
  153. auto buffer = Lex("14x 15_49 0x3.5q 0x3_4.5_6 0ops");
  154. EXPECT_TRUE(buffer.has_errors());
  155. ASSERT_THAT(buffer,
  156. HasTokens(llvm::ArrayRef<ExpectedToken>{
  157. {.kind = TokenKind::Error,
  158. .line = 1,
  159. .column = 1,
  160. .indent_column = 1,
  161. .text = "14x"},
  162. {.kind = TokenKind::IntegerLiteral,
  163. .line = 1,
  164. .column = 5,
  165. .indent_column = 1,
  166. .text = "15_49"},
  167. {.kind = TokenKind::Error,
  168. .line = 1,
  169. .column = 11,
  170. .indent_column = 1,
  171. .text = "0x3.5q"},
  172. {.kind = TokenKind::RealLiteral,
  173. .line = 1,
  174. .column = 18,
  175. .indent_column = 1,
  176. .text = "0x3_4.5_6"},
  177. {.kind = TokenKind::Error,
  178. .line = 1,
  179. .column = 28,
  180. .indent_column = 1,
  181. .text = "0ops"},
  182. {.kind = TokenKind::EndOfFile, .line = 1, .column = 32},
  183. }));
  184. }
  185. TEST_F(LexerTest, SplitsNumericLiteralsProperly) {
  186. llvm::StringLiteral source_text = R"(
  187. 1.
  188. .2
  189. 3.+foo
  190. 4.0-bar
  191. 5.0e+123+456
  192. 6.0e+1e+2
  193. 1e7
  194. 8..10
  195. 9.0.9.5
  196. 10.foo
  197. 11.0.foo
  198. 12e+1
  199. 13._
  200. )";
  201. auto buffer = Lex(source_text);
  202. EXPECT_TRUE(buffer.has_errors());
  203. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  204. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  205. {.kind = TokenKind::Period},
  206. // newline
  207. {.kind = TokenKind::Period},
  208. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  209. // newline
  210. {.kind = TokenKind::IntegerLiteral, .text = "3"},
  211. {.kind = TokenKind::Period},
  212. {.kind = TokenKind::Plus},
  213. {.kind = TokenKind::Identifier, .text = "foo"},
  214. // newline
  215. {.kind = TokenKind::RealLiteral, .text = "4.0"},
  216. {.kind = TokenKind::Minus},
  217. {.kind = TokenKind::Identifier, .text = "bar"},
  218. // newline
  219. {.kind = TokenKind::RealLiteral, .text = "5.0e+123"},
  220. {.kind = TokenKind::Plus},
  221. {.kind = TokenKind::IntegerLiteral, .text = "456"},
  222. // newline
  223. {.kind = TokenKind::Error, .text = "6.0e+1e"},
  224. {.kind = TokenKind::Plus},
  225. {.kind = TokenKind::IntegerLiteral, .text = "2"},
  226. // newline
  227. {.kind = TokenKind::Error, .text = "1e7"},
  228. // newline
  229. {.kind = TokenKind::IntegerLiteral, .text = "8"},
  230. {.kind = TokenKind::Period},
  231. {.kind = TokenKind::Period},
  232. {.kind = TokenKind::IntegerLiteral, .text = "10"},
  233. // newline
  234. {.kind = TokenKind::RealLiteral, .text = "9.0"},
  235. {.kind = TokenKind::Period},
  236. {.kind = TokenKind::RealLiteral, .text = "9.5"},
  237. // newline
  238. {.kind = TokenKind::Error, .text = "10.foo"},
  239. // newline
  240. {.kind = TokenKind::RealLiteral, .text = "11.0"},
  241. {.kind = TokenKind::Period},
  242. {.kind = TokenKind::Identifier, .text = "foo"},
  243. // newline
  244. {.kind = TokenKind::Error, .text = "12e"},
  245. {.kind = TokenKind::Plus},
  246. {.kind = TokenKind::IntegerLiteral, .text = "1"},
  247. // newline
  248. {.kind = TokenKind::IntegerLiteral, .text = "13"},
  249. {.kind = TokenKind::Period},
  250. {.kind = TokenKind::Underscore},
  251. // newline
  252. {.kind = TokenKind::EndOfFile},
  253. }));
  254. }
  255. TEST_F(LexerTest, HandlesGarbageCharacters) {
  256. constexpr char GarbageText[] = "$$💩-$\n$\0$12$\n\\\"\\\n\"x";
  257. auto buffer = Lex(llvm::StringRef(GarbageText, sizeof(GarbageText) - 1));
  258. EXPECT_TRUE(buffer.has_errors());
  259. EXPECT_THAT(
  260. buffer,
  261. HasTokens(llvm::ArrayRef<ExpectedToken>{
  262. {.kind = TokenKind::Error,
  263. .line = 1,
  264. .column = 1,
  265. // 💩 takes 4 bytes, and we count column as bytes offset.
  266. .text = llvm::StringRef("$$💩", 6)},
  267. {.kind = TokenKind::Minus, .line = 1, .column = 7},
  268. {.kind = TokenKind::Error, .line = 1, .column = 8, .text = "$"},
  269. // newline
  270. {.kind = TokenKind::Error,
  271. .line = 2,
  272. .column = 1,
  273. .text = llvm::StringRef("$\0$", 3)},
  274. {.kind = TokenKind::IntegerLiteral,
  275. .line = 2,
  276. .column = 4,
  277. .text = "12"},
  278. {.kind = TokenKind::Error, .line = 2, .column = 6, .text = "$"},
  279. // newline
  280. {.kind = TokenKind::Backslash, .line = 3, .column = 1, .text = "\\"},
  281. {.kind = TokenKind::Error, .line = 3, .column = 2, .text = "\"\\"},
  282. // newline
  283. {.kind = TokenKind::Error, .line = 4, .column = 1, .text = "\"x"},
  284. {.kind = TokenKind::EndOfFile, .line = 4, .column = 3},
  285. }));
  286. }
  287. TEST_F(LexerTest, Symbols) {
  288. // We don't need to exhaustively test symbols here as they're handled with
  289. // common code, but we want to check specific patterns to verify things like
  290. // max-munch rule and handling of interesting symbols.
  291. auto buffer = Lex("<<<");
  292. EXPECT_FALSE(buffer.has_errors());
  293. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  294. {TokenKind::LessLess},
  295. {TokenKind::Less},
  296. {TokenKind::EndOfFile},
  297. }));
  298. buffer = Lex("<<=>>");
  299. EXPECT_FALSE(buffer.has_errors());
  300. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  301. {TokenKind::LessLessEqual},
  302. {TokenKind::GreaterGreater},
  303. {TokenKind::EndOfFile},
  304. }));
  305. buffer = Lex("< <=> >");
  306. EXPECT_FALSE(buffer.has_errors());
  307. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  308. {TokenKind::Less},
  309. {TokenKind::LessEqualGreater},
  310. {TokenKind::Greater},
  311. {TokenKind::EndOfFile},
  312. }));
  313. buffer = Lex("\\/?@&^!");
  314. EXPECT_FALSE(buffer.has_errors());
  315. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  316. {TokenKind::Backslash},
  317. {TokenKind::Slash},
  318. {TokenKind::Question},
  319. {TokenKind::At},
  320. {TokenKind::Amp},
  321. {TokenKind::Caret},
  322. {TokenKind::Exclaim},
  323. {TokenKind::EndOfFile},
  324. }));
  325. }
  326. TEST_F(LexerTest, Parens) {
  327. auto buffer = Lex("()");
  328. EXPECT_FALSE(buffer.has_errors());
  329. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  330. {TokenKind::OpenParen},
  331. {TokenKind::CloseParen},
  332. {TokenKind::EndOfFile},
  333. }));
  334. buffer = Lex("((()()))");
  335. EXPECT_FALSE(buffer.has_errors());
  336. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  337. {TokenKind::OpenParen},
  338. {TokenKind::OpenParen},
  339. {TokenKind::OpenParen},
  340. {TokenKind::CloseParen},
  341. {TokenKind::OpenParen},
  342. {TokenKind::CloseParen},
  343. {TokenKind::CloseParen},
  344. {TokenKind::CloseParen},
  345. {TokenKind::EndOfFile},
  346. }));
  347. }
  348. TEST_F(LexerTest, CurlyBraces) {
  349. auto buffer = Lex("{}");
  350. EXPECT_FALSE(buffer.has_errors());
  351. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  352. {TokenKind::OpenCurlyBrace},
  353. {TokenKind::CloseCurlyBrace},
  354. {TokenKind::EndOfFile},
  355. }));
  356. buffer = Lex("{{{}{}}}");
  357. EXPECT_FALSE(buffer.has_errors());
  358. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  359. {TokenKind::OpenCurlyBrace},
  360. {TokenKind::OpenCurlyBrace},
  361. {TokenKind::OpenCurlyBrace},
  362. {TokenKind::CloseCurlyBrace},
  363. {TokenKind::OpenCurlyBrace},
  364. {TokenKind::CloseCurlyBrace},
  365. {TokenKind::CloseCurlyBrace},
  366. {TokenKind::CloseCurlyBrace},
  367. {TokenKind::EndOfFile},
  368. }));
  369. }
  370. TEST_F(LexerTest, MatchingGroups) {
  371. {
  372. TokenizedBuffer buffer = Lex("(){}");
  373. ASSERT_FALSE(buffer.has_errors());
  374. auto it = buffer.tokens().begin();
  375. auto open_paren_token = *it++;
  376. auto close_paren_token = *it++;
  377. EXPECT_EQ(close_paren_token,
  378. buffer.GetMatchedClosingToken(open_paren_token));
  379. EXPECT_EQ(open_paren_token,
  380. buffer.GetMatchedOpeningToken(close_paren_token));
  381. auto open_curly_token = *it++;
  382. auto close_curly_token = *it++;
  383. EXPECT_EQ(close_curly_token,
  384. buffer.GetMatchedClosingToken(open_curly_token));
  385. EXPECT_EQ(open_curly_token,
  386. buffer.GetMatchedOpeningToken(close_curly_token));
  387. auto eof_token = *it++;
  388. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  389. EXPECT_EQ(buffer.tokens().end(), it);
  390. }
  391. {
  392. TokenizedBuffer buffer = Lex("({x}){(y)} {{((z))}}");
  393. ASSERT_FALSE(buffer.has_errors());
  394. auto it = buffer.tokens().begin();
  395. auto open_paren_token = *it++;
  396. auto open_curly_token = *it++;
  397. ASSERT_EQ("x", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  398. auto close_curly_token = *it++;
  399. auto close_paren_token = *it++;
  400. EXPECT_EQ(close_paren_token,
  401. buffer.GetMatchedClosingToken(open_paren_token));
  402. EXPECT_EQ(open_paren_token,
  403. buffer.GetMatchedOpeningToken(close_paren_token));
  404. EXPECT_EQ(close_curly_token,
  405. buffer.GetMatchedClosingToken(open_curly_token));
  406. EXPECT_EQ(open_curly_token,
  407. buffer.GetMatchedOpeningToken(close_curly_token));
  408. open_curly_token = *it++;
  409. open_paren_token = *it++;
  410. ASSERT_EQ("y", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  411. close_paren_token = *it++;
  412. close_curly_token = *it++;
  413. EXPECT_EQ(close_curly_token,
  414. buffer.GetMatchedClosingToken(open_curly_token));
  415. EXPECT_EQ(open_curly_token,
  416. buffer.GetMatchedOpeningToken(close_curly_token));
  417. EXPECT_EQ(close_paren_token,
  418. buffer.GetMatchedClosingToken(open_paren_token));
  419. EXPECT_EQ(open_paren_token,
  420. buffer.GetMatchedOpeningToken(close_paren_token));
  421. open_curly_token = *it++;
  422. auto inner_open_curly_token = *it++;
  423. open_paren_token = *it++;
  424. auto inner_open_paren_token = *it++;
  425. ASSERT_EQ("z", buffer.GetIdentifierText(buffer.GetIdentifier(*it++)));
  426. auto inner_close_paren_token = *it++;
  427. close_paren_token = *it++;
  428. auto inner_close_curly_token = *it++;
  429. close_curly_token = *it++;
  430. EXPECT_EQ(close_curly_token,
  431. buffer.GetMatchedClosingToken(open_curly_token));
  432. EXPECT_EQ(open_curly_token,
  433. buffer.GetMatchedOpeningToken(close_curly_token));
  434. EXPECT_EQ(inner_close_curly_token,
  435. buffer.GetMatchedClosingToken(inner_open_curly_token));
  436. EXPECT_EQ(inner_open_curly_token,
  437. buffer.GetMatchedOpeningToken(inner_close_curly_token));
  438. EXPECT_EQ(close_paren_token,
  439. buffer.GetMatchedClosingToken(open_paren_token));
  440. EXPECT_EQ(open_paren_token,
  441. buffer.GetMatchedOpeningToken(close_paren_token));
  442. EXPECT_EQ(inner_close_paren_token,
  443. buffer.GetMatchedClosingToken(inner_open_paren_token));
  444. EXPECT_EQ(inner_open_paren_token,
  445. buffer.GetMatchedOpeningToken(inner_close_paren_token));
  446. auto eof_token = *it++;
  447. EXPECT_EQ(buffer.GetKind(eof_token), TokenKind::EndOfFile);
  448. EXPECT_EQ(buffer.tokens().end(), it);
  449. }
  450. }
  451. TEST_F(LexerTest, MismatchedGroups) {
  452. auto buffer = Lex("{");
  453. EXPECT_TRUE(buffer.has_errors());
  454. EXPECT_THAT(buffer,
  455. HasTokens(llvm::ArrayRef<ExpectedToken>{
  456. {TokenKind::OpenCurlyBrace},
  457. {.kind = TokenKind::CloseCurlyBrace, .recovery = true},
  458. {TokenKind::EndOfFile},
  459. }));
  460. buffer = Lex("}");
  461. EXPECT_TRUE(buffer.has_errors());
  462. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  463. {.kind = TokenKind::Error, .text = "}"},
  464. {TokenKind::EndOfFile},
  465. }));
  466. buffer = Lex("{(}");
  467. EXPECT_TRUE(buffer.has_errors());
  468. EXPECT_THAT(
  469. buffer,
  470. HasTokens(llvm::ArrayRef<ExpectedToken>{
  471. {.kind = TokenKind::OpenCurlyBrace, .column = 1},
  472. {.kind = TokenKind::OpenParen, .column = 2},
  473. {.kind = TokenKind::CloseParen, .column = 3, .recovery = true},
  474. {.kind = TokenKind::CloseCurlyBrace, .column = 3},
  475. {TokenKind::EndOfFile},
  476. }));
  477. buffer = Lex(")({)");
  478. EXPECT_TRUE(buffer.has_errors());
  479. EXPECT_THAT(
  480. buffer,
  481. HasTokens(llvm::ArrayRef<ExpectedToken>{
  482. {.kind = TokenKind::Error, .column = 1, .text = ")"},
  483. {.kind = TokenKind::OpenParen, .column = 2},
  484. {.kind = TokenKind::OpenCurlyBrace, .column = 3},
  485. {.kind = TokenKind::CloseCurlyBrace, .column = 4, .recovery = true},
  486. {.kind = TokenKind::CloseParen, .column = 4},
  487. {TokenKind::EndOfFile},
  488. }));
  489. }
  490. TEST_F(LexerTest, Whitespace) {
  491. auto buffer = Lex("{( } {(");
  492. // Whether there should be whitespace before/after each token.
  493. bool space[] = {true,
  494. // {
  495. false,
  496. // (
  497. true,
  498. // inserted )
  499. true,
  500. // }
  501. true,
  502. // {
  503. false,
  504. // (
  505. true,
  506. // inserted )
  507. true,
  508. // inserted }
  509. true,
  510. // EOF
  511. false};
  512. int pos = 0;
  513. for (TokenizedBuffer::Token token : buffer.tokens()) {
  514. ASSERT_LT(pos, std::size(space));
  515. EXPECT_THAT(buffer.HasLeadingWhitespace(token), Eq(space[pos]));
  516. ++pos;
  517. ASSERT_LT(pos, std::size(space));
  518. EXPECT_THAT(buffer.HasTrailingWhitespace(token), Eq(space[pos]));
  519. }
  520. ASSERT_EQ(pos + 1, std::size(space));
  521. }
  522. TEST_F(LexerTest, Keywords) {
  523. auto buffer = Lex(" fn");
  524. EXPECT_FALSE(buffer.has_errors());
  525. EXPECT_THAT(buffer,
  526. HasTokens(llvm::ArrayRef<ExpectedToken>{
  527. {.kind = TokenKind::Fn, .column = 4, .indent_column = 4},
  528. {TokenKind::EndOfFile},
  529. }));
  530. buffer = Lex("and or not if else for return var break continue _");
  531. EXPECT_FALSE(buffer.has_errors());
  532. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  533. {TokenKind::And},
  534. {TokenKind::Or},
  535. {TokenKind::Not},
  536. {TokenKind::If},
  537. {TokenKind::Else},
  538. {TokenKind::For},
  539. {TokenKind::Return},
  540. {TokenKind::Var},
  541. {TokenKind::Break},
  542. {TokenKind::Continue},
  543. {TokenKind::Underscore},
  544. {TokenKind::EndOfFile},
  545. }));
  546. }
  547. TEST_F(LexerTest, Comments) {
  548. auto buffer = Lex(" ;\n // foo\n ;\n");
  549. EXPECT_FALSE(buffer.has_errors());
  550. EXPECT_THAT(
  551. buffer,
  552. HasTokens(llvm::ArrayRef<ExpectedToken>{
  553. {.kind = TokenKind::Semi, .line = 1, .column = 2, .indent_column = 2},
  554. {.kind = TokenKind::Semi, .line = 3, .column = 3, .indent_column = 3},
  555. {.kind = TokenKind::EndOfFile, .line = 3, .column = 4},
  556. }));
  557. buffer = Lex("// foo\n//\n// bar");
  558. EXPECT_FALSE(buffer.has_errors());
  559. EXPECT_THAT(buffer,
  560. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  561. // Make sure weird characters aren't a problem.
  562. buffer = Lex(" // foo#$!^?@-_💩🍫⃠ [̲̅$̲̅(̲̅ ͡° ͜ʖ ͡°̲̅)̲̅$̲̅]");
  563. EXPECT_FALSE(buffer.has_errors());
  564. EXPECT_THAT(buffer,
  565. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  566. // Make sure we can lex a comment at the end of the input.
  567. buffer = Lex("//");
  568. EXPECT_FALSE(buffer.has_errors());
  569. EXPECT_THAT(buffer,
  570. HasTokens(llvm::ArrayRef<ExpectedToken>{{TokenKind::EndOfFile}}));
  571. }
  572. TEST_F(LexerTest, InvalidComments) {
  573. llvm::StringLiteral testcases[] = {
  574. " /// foo\n",
  575. "foo // bar\n",
  576. "//! hello",
  577. " //world",
  578. };
  579. for (llvm::StringLiteral testcase : testcases) {
  580. auto buffer = Lex(testcase);
  581. EXPECT_TRUE(buffer.has_errors());
  582. }
  583. }
  584. TEST_F(LexerTest, Identifiers) {
  585. auto buffer = Lex(" foobar");
  586. EXPECT_FALSE(buffer.has_errors());
  587. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  588. {.kind = TokenKind::Identifier,
  589. .column = 4,
  590. .indent_column = 4,
  591. .text = "foobar"},
  592. {TokenKind::EndOfFile},
  593. }));
  594. // Check different kinds of identifier character sequences.
  595. buffer = Lex("_foo_bar");
  596. EXPECT_FALSE(buffer.has_errors());
  597. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  598. {.kind = TokenKind::Identifier, .text = "_foo_bar"},
  599. {TokenKind::EndOfFile},
  600. }));
  601. buffer = Lex("foo2bar00");
  602. EXPECT_FALSE(buffer.has_errors());
  603. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  604. {.kind = TokenKind::Identifier, .text = "foo2bar00"},
  605. {TokenKind::EndOfFile},
  606. }));
  607. // Check that we can parse identifiers that start with a keyword.
  608. buffer = Lex("fnord");
  609. EXPECT_FALSE(buffer.has_errors());
  610. EXPECT_THAT(buffer, HasTokens(llvm::ArrayRef<ExpectedToken>{
  611. {.kind = TokenKind::Identifier, .text = "fnord"},
  612. {TokenKind::EndOfFile},
  613. }));
  614. // Check multiple identifiers with indent and interning.
  615. buffer = Lex(" foo;bar\nbar \n foo\tfoo");
  616. EXPECT_FALSE(buffer.has_errors());
  617. EXPECT_THAT(buffer,
  618. HasTokens(llvm::ArrayRef<ExpectedToken>{
  619. {.kind = TokenKind::Identifier,
  620. .line = 1,
  621. .column = 4,
  622. .indent_column = 4,
  623. .text = "foo"},
  624. {.kind = TokenKind::Semi},
  625. {.kind = TokenKind::Identifier,
  626. .line = 1,
  627. .column = 8,
  628. .indent_column = 4,
  629. .text = "bar"},
  630. {.kind = TokenKind::Identifier,
  631. .line = 2,
  632. .column = 1,
  633. .indent_column = 1,
  634. .text = "bar"},
  635. {.kind = TokenKind::Identifier,
  636. .line = 3,
  637. .column = 3,
  638. .indent_column = 3,
  639. .text = "foo"},
  640. {.kind = TokenKind::Identifier,
  641. .line = 3,
  642. .column = 7,
  643. .indent_column = 3,
  644. .text = "foo"},
  645. {.kind = TokenKind::EndOfFile, .line = 3, .column = 10},
  646. }));
  647. }
  648. TEST_F(LexerTest, StringLiterals) {
  649. llvm::StringLiteral testcase = R"(
  650. "hello world\n"
  651. '''foo
  652. test \
  653. \xAB
  654. ''' trailing
  655. #"""#
  656. "\0"
  657. #"\0"foo"\1"#
  658. """x"""
  659. )";
  660. auto buffer = Lex(testcase);
  661. EXPECT_FALSE(buffer.has_errors());
  662. EXPECT_THAT(buffer,
  663. HasTokens(llvm::ArrayRef<ExpectedToken>{
  664. {.kind = TokenKind::StringLiteral,
  665. .line = 2,
  666. .column = 5,
  667. .indent_column = 5,
  668. .string_contents = {"hello world\n"}},
  669. {.kind = TokenKind::StringLiteral,
  670. .line = 4,
  671. .column = 5,
  672. .indent_column = 5,
  673. .string_contents = {" test \xAB\n"}},
  674. {.kind = TokenKind::Identifier,
  675. .line = 7,
  676. .column = 10,
  677. .indent_column = 5,
  678. .text = "trailing"},
  679. {.kind = TokenKind::StringLiteral,
  680. .line = 9,
  681. .column = 7,
  682. .indent_column = 7,
  683. .string_contents = {"\""}},
  684. {.kind = TokenKind::StringLiteral,
  685. .line = 11,
  686. .column = 5,
  687. .indent_column = 5,
  688. .string_contents = llvm::StringLiteral::withInnerNUL("\0")},
  689. {.kind = TokenKind::StringLiteral,
  690. .line = 13,
  691. .column = 5,
  692. .indent_column = 5,
  693. .string_contents = {"\\0\"foo\"\\1"}},
  694. // """x""" is three string literals, not one invalid
  695. // attempt at a block string literal.
  696. {.kind = TokenKind::StringLiteral,
  697. .line = 15,
  698. .column = 5,
  699. .indent_column = 5,
  700. .string_contents = {""}},
  701. {.kind = TokenKind::StringLiteral,
  702. .line = 15,
  703. .column = 7,
  704. .indent_column = 5,
  705. .string_contents = {"x"}},
  706. {.kind = TokenKind::StringLiteral,
  707. .line = 15,
  708. .column = 10,
  709. .indent_column = 5,
  710. .string_contents = {""}},
  711. {.kind = TokenKind::EndOfFile, .line = 16, .column = 3},
  712. }));
  713. }
  714. TEST_F(LexerTest, InvalidStringLiterals) {
  715. llvm::StringLiteral invalid[] = {
  716. // clang-format off
  717. R"(")",
  718. R"('''
  719. '')",
  720. R"("\)",
  721. R"("\")",
  722. R"("\\)",
  723. R"("\\\")",
  724. R"(''')",
  725. R"('''
  726. )",
  727. R"('''\)",
  728. R"(#'''
  729. ''')",
  730. // clang-format on
  731. };
  732. for (llvm::StringLiteral test : invalid) {
  733. SCOPED_TRACE(test);
  734. auto buffer = Lex(test);
  735. EXPECT_TRUE(buffer.has_errors());
  736. // We should have formed at least one error token.
  737. bool found_error = false;
  738. for (TokenizedBuffer::Token token : buffer.tokens()) {
  739. if (buffer.GetKind(token) == TokenKind::Error) {
  740. found_error = true;
  741. break;
  742. }
  743. }
  744. EXPECT_TRUE(found_error);
  745. }
  746. }
  747. TEST_F(LexerTest, TypeLiterals) {
  748. llvm::StringLiteral testcase = R"(
  749. i0 i1 i20 i999999999999 i0x1
  750. u0 u1 u64 u64b
  751. f32 f80 f1 fi
  752. s1
  753. )";
  754. auto buffer = Lex(testcase);
  755. EXPECT_FALSE(buffer.has_errors());
  756. ASSERT_THAT(buffer,
  757. HasTokens(llvm::ArrayRef<ExpectedToken>{
  758. {.kind = TokenKind::Identifier,
  759. .line = 2,
  760. .column = 5,
  761. .indent_column = 5,
  762. .text = {"i0"}},
  763. {.kind = TokenKind::IntegerTypeLiteral,
  764. .line = 2,
  765. .column = 8,
  766. .indent_column = 5,
  767. .text = {"i1"}},
  768. {.kind = TokenKind::IntegerTypeLiteral,
  769. .line = 2,
  770. .column = 11,
  771. .indent_column = 5,
  772. .text = {"i20"}},
  773. {.kind = TokenKind::IntegerTypeLiteral,
  774. .line = 2,
  775. .column = 15,
  776. .indent_column = 5,
  777. .text = {"i999999999999"}},
  778. {.kind = TokenKind::Identifier,
  779. .line = 2,
  780. .column = 29,
  781. .indent_column = 5,
  782. .text = {"i0x1"}},
  783. {.kind = TokenKind::Identifier,
  784. .line = 3,
  785. .column = 5,
  786. .indent_column = 5,
  787. .text = {"u0"}},
  788. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  789. .line = 3,
  790. .column = 8,
  791. .indent_column = 5,
  792. .text = {"u1"}},
  793. {.kind = TokenKind::UnsignedIntegerTypeLiteral,
  794. .line = 3,
  795. .column = 11,
  796. .indent_column = 5,
  797. .text = {"u64"}},
  798. {.kind = TokenKind::Identifier,
  799. .line = 3,
  800. .column = 15,
  801. .indent_column = 5,
  802. .text = {"u64b"}},
  803. {.kind = TokenKind::FloatingPointTypeLiteral,
  804. .line = 4,
  805. .column = 5,
  806. .indent_column = 5,
  807. .text = {"f32"}},
  808. {.kind = TokenKind::FloatingPointTypeLiteral,
  809. .line = 4,
  810. .column = 9,
  811. .indent_column = 5,
  812. .text = {"f80"}},
  813. {.kind = TokenKind::FloatingPointTypeLiteral,
  814. .line = 4,
  815. .column = 13,
  816. .indent_column = 5,
  817. .text = {"f1"}},
  818. {.kind = TokenKind::Identifier,
  819. .line = 4,
  820. .column = 16,
  821. .indent_column = 5,
  822. .text = {"fi"}},
  823. {.kind = TokenKind::Identifier,
  824. .line = 5,
  825. .column = 5,
  826. .indent_column = 5,
  827. .text = {"s1"}},
  828. {.kind = TokenKind::EndOfFile, .line = 6, .column = 3},
  829. }));
  830. auto token_i1 = buffer.tokens().begin() + 1;
  831. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i1), 1);
  832. auto token_i20 = buffer.tokens().begin() + 2;
  833. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i20), 20);
  834. auto token_i999999999999 = buffer.tokens().begin() + 3;
  835. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_i999999999999), 999999999999ULL);
  836. auto token_u1 = buffer.tokens().begin() + 6;
  837. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u1), 1);
  838. auto token_u64 = buffer.tokens().begin() + 7;
  839. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_u64), 64);
  840. auto token_f32 = buffer.tokens().begin() + 9;
  841. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f32), 32);
  842. auto token_f80 = buffer.tokens().begin() + 10;
  843. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f80), 80);
  844. auto token_f1 = buffer.tokens().begin() + 11;
  845. EXPECT_EQ(buffer.GetTypeLiteralSize(*token_f1), 1);
  846. }
  847. TEST_F(LexerTest, TypeLiteralTooManyDigits) {
  848. std::string code = "i";
  849. constexpr int Count = 10000;
  850. code.append(Count, '9');
  851. Testing::MockDiagnosticConsumer consumer;
  852. EXPECT_CALL(consumer,
  853. HandleDiagnostic(IsDiagnostic(
  854. DiagnosticKind::TooManyDigits, DiagnosticLevel::Error, 1, 2,
  855. HasSubstr(llvm::formatv(" {0} ", Count)))));
  856. auto buffer = Lex(code, consumer);
  857. EXPECT_TRUE(buffer.has_errors());
  858. ASSERT_THAT(
  859. buffer,
  860. HasTokens(llvm::ArrayRef<ExpectedToken>{
  861. {.kind = TokenKind::Error,
  862. .line = 1,
  863. .column = 1,
  864. .indent_column = 1,
  865. .text = {code}},
  866. {.kind = TokenKind::EndOfFile, .line = 1, .column = Count + 2},
  867. }));
  868. }
  869. TEST_F(LexerTest, DiagnosticTrailingComment) {
  870. llvm::StringLiteral testcase = R"(
  871. // Hello!
  872. var String x; // trailing comment
  873. )";
  874. Testing::MockDiagnosticConsumer consumer;
  875. EXPECT_CALL(consumer,
  876. HandleDiagnostic(IsDiagnostic(DiagnosticKind::TrailingComment,
  877. DiagnosticLevel::Error, 3, 19, _)));
  878. Lex(testcase, consumer);
  879. }
  880. TEST_F(LexerTest, DiagnosticWhitespace) {
  881. Testing::MockDiagnosticConsumer consumer;
  882. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  883. DiagnosticKind::NoWhitespaceAfterCommentIntroducer,
  884. DiagnosticLevel::Error, 1, 3, _)));
  885. Lex("//no space after comment", consumer);
  886. }
  887. TEST_F(LexerTest, DiagnosticUnrecognizedEscape) {
  888. Testing::MockDiagnosticConsumer consumer;
  889. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  890. DiagnosticKind::UnknownEscapeSequence,
  891. DiagnosticLevel::Error, 1, 8, HasSubstr("`b`"))));
  892. Lex(R"("hello\bworld")", consumer);
  893. }
  894. TEST_F(LexerTest, DiagnosticBadHex) {
  895. Testing::MockDiagnosticConsumer consumer;
  896. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  897. DiagnosticKind::HexadecimalEscapeMissingDigits,
  898. DiagnosticLevel::Error, 1, 9, _)));
  899. Lex(R"("hello\xabworld")", consumer);
  900. }
  901. TEST_F(LexerTest, DiagnosticInvalidDigit) {
  902. Testing::MockDiagnosticConsumer consumer;
  903. EXPECT_CALL(consumer, HandleDiagnostic(IsDiagnostic(
  904. DiagnosticKind::InvalidDigit,
  905. DiagnosticLevel::Error, 1, 6, HasSubstr("'a'"))));
  906. Lex("0x123abc", consumer);
  907. }
  908. TEST_F(LexerTest, DiagnosticMissingTerminator) {
  909. Testing::MockDiagnosticConsumer consumer;
  910. EXPECT_CALL(consumer,
  911. HandleDiagnostic(IsDiagnostic(DiagnosticKind::UnterminatedString,
  912. DiagnosticLevel::Error, 1, 1, _)));
  913. Lex(R"(#" ")", consumer);
  914. }
  915. TEST_F(LexerTest, DiagnosticUnrecognizedChar) {
  916. Testing::MockDiagnosticConsumer consumer;
  917. EXPECT_CALL(consumer, HandleDiagnostic(
  918. IsDiagnostic(DiagnosticKind::UnrecognizedCharacters,
  919. DiagnosticLevel::Error, 1, 1, _)));
  920. Lex("\b", consumer);
  921. }
  922. auto GetAndDropLine(llvm::StringRef& text) -> std::string {
  923. auto newline_offset = text.find_first_of('\n');
  924. llvm::StringRef line = text.slice(0, newline_offset);
  925. if (newline_offset != llvm::StringRef::npos) {
  926. text = text.substr(newline_offset + 1);
  927. } else {
  928. text = "";
  929. }
  930. return line.str();
  931. }
  932. TEST_F(LexerTest, PrintingInteger) {
  933. auto buffer = Lex("123");
  934. ASSERT_FALSE(buffer.has_errors());
  935. std::string print_storage;
  936. llvm::raw_string_ostream print_stream(print_storage);
  937. buffer.Print(print_stream);
  938. llvm::StringRef print = print_stream.str();
  939. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  940. EXPECT_THAT(GetAndDropLine(print),
  941. StrEq("{ index: 0, kind: 'IntegerLiteral', line: 1, "
  942. "column: 1, indent: 1, spelling: '123', value: `123`, "
  943. "has_trailing_space: true },"));
  944. EXPECT_THAT(GetAndDropLine(print), HasSubstr("'EndOfFile'"));
  945. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  946. EXPECT_TRUE(print.empty()) << print;
  947. }
  948. TEST_F(LexerTest, PrintingReal) {
  949. auto buffer = Lex("2.5");
  950. ASSERT_FALSE(buffer.has_errors());
  951. std::string print_storage;
  952. llvm::raw_string_ostream print_stream(print_storage);
  953. buffer.Print(print_stream);
  954. llvm::StringRef print = print_stream.str();
  955. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  956. EXPECT_THAT(
  957. GetAndDropLine(print),
  958. StrEq("{ index: 0, kind: 'RealLiteral', line: 1, column: 1, indent: "
  959. "1, spelling: '2.5', value: `25*10^-1`, has_trailing_space: true "
  960. "},"));
  961. EXPECT_THAT(GetAndDropLine(print), HasSubstr("'EndOfFile'"));
  962. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  963. EXPECT_TRUE(print.empty()) << print;
  964. }
  965. TEST_F(LexerTest, PrintingPadding) {
  966. // Test kind padding.
  967. auto buffer = Lex("(;foo;)");
  968. ASSERT_FALSE(buffer.has_errors());
  969. std::string print_storage;
  970. llvm::raw_string_ostream print_stream(print_storage);
  971. buffer.Print(print_stream);
  972. llvm::StringRef print = print_stream.str();
  973. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  974. EXPECT_THAT(GetAndDropLine(print),
  975. StrEq("{ index: 0, kind: 'OpenParen', line: 1, column: "
  976. "1, indent: 1, spelling: '(', closing_token: 4 },"));
  977. EXPECT_THAT(GetAndDropLine(print),
  978. StrEq("{ index: 1, kind: 'Semi', line: 1, column: "
  979. "2, indent: 1, spelling: ';' },"));
  980. EXPECT_THAT(GetAndDropLine(print),
  981. StrEq("{ index: 2, kind: 'Identifier', line: 1, column: "
  982. "3, indent: 1, spelling: 'foo', identifier: 0 },"));
  983. EXPECT_THAT(GetAndDropLine(print),
  984. StrEq("{ index: 3, kind: 'Semi', line: 1, column: "
  985. "6, indent: 1, spelling: ';' },"));
  986. EXPECT_THAT(GetAndDropLine(print),
  987. StrEq("{ index: 4, kind: 'CloseParen', line: 1, column: "
  988. "7, indent: 1, spelling: ')', opening_token: 0, "
  989. "has_trailing_space: true },"));
  990. EXPECT_THAT(GetAndDropLine(print),
  991. StrEq("{ index: 5, kind: 'EndOfFile', line: 1, column: "
  992. "8, indent: 1, spelling: '' },"));
  993. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  994. EXPECT_TRUE(print.empty()) << print;
  995. }
  996. TEST_F(LexerTest, PrintingPaddingDigits) {
  997. // Test digit padding with max values of 9, 10, and 11.
  998. auto buffer = Lex(";\n\n\n\n\n\n\n\n\n\n ;;");
  999. ASSERT_FALSE(buffer.has_errors());
  1000. std::string print_storage;
  1001. llvm::raw_string_ostream print_stream(print_storage);
  1002. buffer.Print(print_stream);
  1003. llvm::StringRef print = print_stream.str();
  1004. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  1005. EXPECT_THAT(GetAndDropLine(print),
  1006. StrEq("{ index: 0, kind: 'Semi', line: 1, column: 1, "
  1007. "indent: 1, spelling: ';', has_trailing_space: true },"));
  1008. EXPECT_THAT(GetAndDropLine(print),
  1009. StrEq("{ index: 1, kind: 'Semi', line: 11, column: 9, "
  1010. "indent: 9, spelling: ';' },"));
  1011. EXPECT_THAT(GetAndDropLine(print),
  1012. StrEq("{ index: 2, kind: 'Semi', line: 11, column: 10, "
  1013. "indent: 9, spelling: ';', has_trailing_space: true },"));
  1014. EXPECT_THAT(GetAndDropLine(print),
  1015. StrEq("{ index: 3, kind: 'EndOfFile', line: 11, column: 11, "
  1016. "indent: 9, spelling: '' },"));
  1017. EXPECT_THAT(GetAndDropLine(print), StrEq("]"));
  1018. EXPECT_TRUE(print.empty()) << print;
  1019. }
  1020. TEST_F(LexerTest, PrintingAsYaml) {
  1021. // Test that we can parse this into YAML and verify line and indent data.
  1022. auto buffer = Lex("\n ;\n\n\n; ;\n\n\n\n\n\n\n\n\n\n\n");
  1023. ASSERT_FALSE(buffer.has_errors());
  1024. std::string print_output;
  1025. llvm::raw_string_ostream print_stream(print_output);
  1026. buffer.Print(print_stream);
  1027. print_stream.flush();
  1028. EXPECT_THAT(Yaml::Value::FromText(print_output),
  1029. ElementsAre(Yaml::SequenceValue{
  1030. Yaml::MappingValue{{"index", "0"},
  1031. {"kind", "Semi"},
  1032. {"line", "2"},
  1033. {"column", "2"},
  1034. {"indent", "2"},
  1035. {"spelling", ";"},
  1036. {"has_trailing_space", "true"}},
  1037. Yaml::MappingValue{{"index", "1"},
  1038. {"kind", "Semi"},
  1039. {"line", "5"},
  1040. {"column", "1"},
  1041. {"indent", "1"},
  1042. {"spelling", ";"},
  1043. {"has_trailing_space", "true"}},
  1044. Yaml::MappingValue{{"index", "2"},
  1045. {"kind", "Semi"},
  1046. {"line", "5"},
  1047. {"column", "3"},
  1048. {"indent", "1"},
  1049. {"spelling", ";"},
  1050. {"has_trailing_space", "true"}},
  1051. Yaml::MappingValue{{"index", "3"},
  1052. {"kind", "EndOfFile"},
  1053. {"line", "15"},
  1054. {"column", "1"},
  1055. {"indent", "1"},
  1056. {"spelling", ""}}}));
  1057. }
  1058. TEST_F(LexerTest, PrintToken) {
  1059. auto buffer = Lex("0x9");
  1060. ASSERT_FALSE(buffer.has_errors());
  1061. std::string print_output;
  1062. llvm::raw_string_ostream print_stream(print_output);
  1063. buffer.Print(print_stream);
  1064. llvm::StringRef print = print_stream.str();
  1065. EXPECT_THAT(GetAndDropLine(print), StrEq("["));
  1066. EXPECT_THAT(GetAndDropLine(print),
  1067. StrEq("{ index: 0, kind: 'IntegerLiteral', line: 1, "
  1068. "column: 1, indent: 1, spelling: '0x9', value: `9`, "
  1069. "has_trailing_space: true },"));
  1070. }
  1071. } // namespace
  1072. } // namespace Carbon::Testing