context.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/check/context.h"
  5. #include <utility>
  6. #include "common/check.h"
  7. #include "common/vlog.h"
  8. #include "llvm/ADT/STLExtras.h"
  9. #include "llvm/ADT/Sequence.h"
  10. #include "toolchain/check/declaration_name_stack.h"
  11. #include "toolchain/check/node_block_stack.h"
  12. #include "toolchain/diagnostics/diagnostic_kind.h"
  13. #include "toolchain/lex/tokenized_buffer.h"
  14. #include "toolchain/parse/node_kind.h"
  15. #include "toolchain/sem_ir/file.h"
  16. #include "toolchain/sem_ir/node.h"
  17. #include "toolchain/sem_ir/node_kind.h"
  18. namespace Carbon::Check {
  19. Context::Context(const Lex::TokenizedBuffer& tokens,
  20. DiagnosticEmitter<Parse::Node>& emitter,
  21. const Parse::Tree& parse_tree, SemIR::File& semantics_ir,
  22. llvm::raw_ostream* vlog_stream)
  23. : tokens_(&tokens),
  24. emitter_(&emitter),
  25. parse_tree_(&parse_tree),
  26. semantics_ir_(&semantics_ir),
  27. vlog_stream_(vlog_stream),
  28. node_stack_(parse_tree, vlog_stream),
  29. node_block_stack_("node_block_stack_", semantics_ir, vlog_stream),
  30. params_or_args_stack_("params_or_args_stack_", semantics_ir, vlog_stream),
  31. args_type_info_stack_("args_type_info_stack_", semantics_ir, vlog_stream),
  32. declaration_name_stack_(this) {
  33. // Inserts the "Error" and "Type" types as "used types" so that
  34. // canonicalization can skip them. We don't emit either for lowering.
  35. canonical_types_.insert({SemIR::NodeId::BuiltinError, SemIR::TypeId::Error});
  36. canonical_types_.insert(
  37. {SemIR::NodeId::BuiltinTypeType, SemIR::TypeId::TypeType});
  38. }
  39. auto Context::TODO(Parse::Node parse_node, std::string label) -> bool {
  40. CARBON_DIAGNOSTIC(SemanticsTodo, Error, "Semantics TODO: `{0}`.",
  41. std::string);
  42. emitter_->Emit(parse_node, SemanticsTodo, std::move(label));
  43. return false;
  44. }
  45. auto Context::VerifyOnFinish() -> void {
  46. // Information in all the various context objects should be cleaned up as
  47. // various pieces of context go out of scope. At this point, nothing should
  48. // remain.
  49. // node_stack_ will still contain top-level entities.
  50. CARBON_CHECK(name_lookup_.empty()) << name_lookup_.size();
  51. CARBON_CHECK(scope_stack_.empty()) << scope_stack_.size();
  52. CARBON_CHECK(node_block_stack_.empty()) << node_block_stack_.size();
  53. CARBON_CHECK(params_or_args_stack_.empty()) << params_or_args_stack_.size();
  54. }
  55. auto Context::AddNode(SemIR::Node node) -> SemIR::NodeId {
  56. auto node_id = node_block_stack_.AddNode(node);
  57. CARBON_VLOG() << "AddNode: " << node << "\n";
  58. return node_id;
  59. }
  60. auto Context::AddNodeAndPush(Parse::Node parse_node, SemIR::Node node) -> void {
  61. auto node_id = AddNode(node);
  62. node_stack_.Push(parse_node, node_id);
  63. }
  64. auto Context::DiagnoseDuplicateName(Parse::Node parse_node,
  65. SemIR::NodeId prev_def_id) -> void {
  66. CARBON_DIAGNOSTIC(NameDeclarationDuplicate, Error,
  67. "Duplicate name being declared in the same scope.");
  68. CARBON_DIAGNOSTIC(NameDeclarationPrevious, Note,
  69. "Name is previously declared here.");
  70. auto prev_def = semantics_ir_->GetNode(prev_def_id);
  71. emitter_->Build(parse_node, NameDeclarationDuplicate)
  72. .Note(prev_def.parse_node(), NameDeclarationPrevious)
  73. .Emit();
  74. }
  75. auto Context::DiagnoseNameNotFound(Parse::Node parse_node,
  76. SemIR::StringId name_id) -> void {
  77. CARBON_DIAGNOSTIC(NameNotFound, Error, "Name `{0}` not found.",
  78. llvm::StringRef);
  79. emitter_->Emit(parse_node, NameNotFound, semantics_ir_->GetString(name_id));
  80. }
  81. auto Context::AddNameToLookup(Parse::Node name_node, SemIR::StringId name_id,
  82. SemIR::NodeId target_id) -> void {
  83. if (current_scope().names.insert(name_id).second) {
  84. name_lookup_[name_id].push_back(target_id);
  85. } else {
  86. DiagnoseDuplicateName(name_node, name_lookup_[name_id].back());
  87. }
  88. }
  89. auto Context::LookupName(Parse::Node parse_node, SemIR::StringId name_id,
  90. SemIR::NameScopeId scope_id, bool print_diagnostics)
  91. -> SemIR::NodeId {
  92. if (scope_id == SemIR::NameScopeId::Invalid) {
  93. auto it = name_lookup_.find(name_id);
  94. if (it == name_lookup_.end()) {
  95. if (print_diagnostics) {
  96. DiagnoseNameNotFound(parse_node, name_id);
  97. }
  98. return SemIR::NodeId::BuiltinError;
  99. }
  100. CARBON_CHECK(!it->second.empty())
  101. << "Should have been erased: " << semantics_ir_->GetString(name_id);
  102. // TODO: Check for ambiguous lookups.
  103. return it->second.back();
  104. } else {
  105. const auto& scope = semantics_ir_->GetNameScope(scope_id);
  106. auto it = scope.find(name_id);
  107. if (it == scope.end()) {
  108. if (print_diagnostics) {
  109. DiagnoseNameNotFound(parse_node, name_id);
  110. }
  111. return SemIR::NodeId::BuiltinError;
  112. }
  113. return it->second;
  114. }
  115. }
  116. auto Context::PushScope() -> void { scope_stack_.push_back({}); }
  117. auto Context::PopScope() -> void {
  118. auto scope = scope_stack_.pop_back_val();
  119. for (const auto& str_id : scope.names) {
  120. auto it = name_lookup_.find(str_id);
  121. if (it->second.size() == 1) {
  122. // Erase names that no longer resolve.
  123. name_lookup_.erase(it);
  124. } else {
  125. it->second.pop_back();
  126. }
  127. }
  128. }
  129. template <typename BranchNode, typename... Args>
  130. static auto AddDominatedBlockAndBranchImpl(Context& context,
  131. Parse::Node parse_node, Args... args)
  132. -> SemIR::NodeBlockId {
  133. if (!context.node_block_stack().is_current_block_reachable()) {
  134. return SemIR::NodeBlockId::Unreachable;
  135. }
  136. auto block_id = context.semantics_ir().AddNodeBlockId();
  137. context.AddNode(BranchNode::Make(parse_node, block_id, args...));
  138. return block_id;
  139. }
  140. auto Context::AddDominatedBlockAndBranch(Parse::Node parse_node)
  141. -> SemIR::NodeBlockId {
  142. return AddDominatedBlockAndBranchImpl<SemIR::Node::Branch>(*this, parse_node);
  143. }
  144. auto Context::AddDominatedBlockAndBranchWithArg(Parse::Node parse_node,
  145. SemIR::NodeId arg_id)
  146. -> SemIR::NodeBlockId {
  147. return AddDominatedBlockAndBranchImpl<SemIR::Node::BranchWithArg>(
  148. *this, parse_node, arg_id);
  149. }
  150. auto Context::AddDominatedBlockAndBranchIf(Parse::Node parse_node,
  151. SemIR::NodeId cond_id)
  152. -> SemIR::NodeBlockId {
  153. return AddDominatedBlockAndBranchImpl<SemIR::Node::BranchIf>(
  154. *this, parse_node, cond_id);
  155. }
  156. auto Context::AddConvergenceBlockAndPush(Parse::Node parse_node, int num_blocks)
  157. -> void {
  158. CARBON_CHECK(num_blocks >= 2) << "no convergence";
  159. SemIR::NodeBlockId new_block_id = SemIR::NodeBlockId::Unreachable;
  160. for ([[maybe_unused]] auto _ : llvm::seq(num_blocks)) {
  161. if (node_block_stack().is_current_block_reachable()) {
  162. if (new_block_id == SemIR::NodeBlockId::Unreachable) {
  163. new_block_id = semantics_ir().AddNodeBlockId();
  164. }
  165. AddNode(SemIR::Node::Branch::Make(parse_node, new_block_id));
  166. }
  167. node_block_stack().Pop();
  168. }
  169. node_block_stack().Push(new_block_id);
  170. }
  171. auto Context::AddConvergenceBlockWithArgAndPush(
  172. Parse::Node parse_node, std::initializer_list<SemIR::NodeId> block_args)
  173. -> SemIR::NodeId {
  174. CARBON_CHECK(block_args.size() >= 2) << "no convergence";
  175. SemIR::NodeBlockId new_block_id = SemIR::NodeBlockId::Unreachable;
  176. for (auto arg_id : block_args) {
  177. if (node_block_stack().is_current_block_reachable()) {
  178. if (new_block_id == SemIR::NodeBlockId::Unreachable) {
  179. new_block_id = semantics_ir().AddNodeBlockId();
  180. }
  181. AddNode(
  182. SemIR::Node::BranchWithArg::Make(parse_node, new_block_id, arg_id));
  183. }
  184. node_block_stack().Pop();
  185. }
  186. node_block_stack().Push(new_block_id);
  187. // Acquire the result value.
  188. SemIR::TypeId result_type_id =
  189. semantics_ir().GetNode(*block_args.begin()).type_id();
  190. return AddNode(
  191. SemIR::Node::BlockArg::Make(parse_node, result_type_id, new_block_id));
  192. }
  193. // Add the current code block to the enclosing function.
  194. auto Context::AddCurrentCodeBlockToFunction() -> void {
  195. CARBON_CHECK(!node_block_stack().empty()) << "no current code block";
  196. CARBON_CHECK(!return_scope_stack().empty()) << "no current function";
  197. if (!node_block_stack().is_current_block_reachable()) {
  198. // Don't include unreachable blocks in the function.
  199. return;
  200. }
  201. auto function_id = semantics_ir()
  202. .GetNode(return_scope_stack().back())
  203. .GetAsFunctionDeclaration();
  204. semantics_ir()
  205. .GetFunction(function_id)
  206. .body_block_ids.push_back(node_block_stack().PeekOrAdd());
  207. }
  208. auto Context::is_current_position_reachable() -> bool {
  209. if (!node_block_stack().is_current_block_reachable()) {
  210. return false;
  211. }
  212. // Our current position is at the end of a reachable block. That position is
  213. // reachable unless the previous instruction is a terminator instruction.
  214. auto block_contents = node_block_stack().PeekCurrentBlockContents();
  215. if (block_contents.empty()) {
  216. return true;
  217. }
  218. const auto& last_node = semantics_ir().GetNode(block_contents.back());
  219. return last_node.kind().terminator_kind() !=
  220. SemIR::TerminatorKind::Terminator;
  221. }
  222. namespace {
  223. // A handle to a new block that may be modified, with copy-on-write semantics.
  224. //
  225. // The constructor is given the ID of an existing block that provides the
  226. // initial contents of the new block. The new block is lazily allocated; if no
  227. // modifications have been made, the `id()` function will return the original
  228. // block ID.
  229. //
  230. // This is intended to avoid an unnecessary block allocation in the case where
  231. // the new block ends up being exactly the same as the original block.
  232. class CopyOnWriteBlock {
  233. public:
  234. CopyOnWriteBlock(SemIR::File& file, SemIR::NodeBlockId source_id)
  235. : file_(file), source_id_(source_id) {}
  236. auto id() -> SemIR::NodeBlockId const { return id_; }
  237. auto Set(int i, SemIR::NodeId value) -> void {
  238. if (file_.GetNodeBlock(id_)[i] == value) {
  239. return;
  240. }
  241. if (id_ == source_id_) {
  242. id_ = file_.AddNodeBlock(file_.GetNodeBlock(source_id_));
  243. }
  244. file_.GetNodeBlock(id_)[i] = value;
  245. }
  246. private:
  247. SemIR::File& file_;
  248. SemIR::NodeBlockId source_id_;
  249. SemIR::NodeBlockId id_ = source_id_;
  250. };
  251. } // namespace
  252. auto Context::Initialize(Parse::Node parse_node, SemIR::NodeId target_id,
  253. SemIR::NodeId value_id) -> SemIR::NodeId {
  254. // Implicitly convert the value to the type of the target.
  255. auto type_id = semantics_ir().GetNode(target_id).type_id();
  256. auto expr_id = ImplicitAs(parse_node, value_id, type_id);
  257. SemIR::Node expr = semantics_ir().GetNode(expr_id);
  258. // Perform initialization now that we have an expression of the right type.
  259. switch (SemIR::GetExpressionCategory(semantics_ir(), expr_id)) {
  260. case SemIR::ExpressionCategory::NotExpression:
  261. CARBON_FATAL() << "Converting non-expression node " << expr
  262. << " to initializing expression";
  263. case SemIR::ExpressionCategory::DurableReference:
  264. case SemIR::ExpressionCategory::EphemeralReference:
  265. // The design uses a custom "copy initialization" process here. We model
  266. // that as value binding followed by direct initialization.
  267. //
  268. // TODO: Determine whether this is observably different from the design,
  269. // and change either the toolchain or the design so they match.
  270. return AddNode(SemIR::Node::BindValue::Make(expr.parse_node(),
  271. expr.type_id(), expr_id));
  272. case SemIR::ExpressionCategory::Value:
  273. // TODO: For class types, use an interface to determine how to perform
  274. // this operation.
  275. return expr_id;
  276. case SemIR::ExpressionCategory::Initializing:
  277. MarkInitializerFor(expr_id, target_id);
  278. return expr_id;
  279. case SemIR::ExpressionCategory::Mixed:
  280. expr = semantics_ir().GetNode(expr_id);
  281. // TODO: Make non-recursive.
  282. // TODO: This should be done as part of the `ImplicitAs` processing so
  283. // that we can still initialize directly from one tuple element if
  284. // another one needs to be converted.
  285. switch (expr.kind()) {
  286. case SemIR::NodeKind::TupleLiteral:
  287. case SemIR::NodeKind::StructLiteral: {
  288. bool is_tuple = expr.kind() == SemIR::NodeKind::TupleLiteral;
  289. auto elements_id =
  290. is_tuple ? expr.GetAsTupleLiteral() : expr.GetAsStructLiteral();
  291. auto elements = semantics_ir().GetNodeBlock(elements_id);
  292. CopyOnWriteBlock new_block(semantics_ir(), elements_id);
  293. bool is_in_place =
  294. SemIR::GetInitializingRepresentation(semantics_ir(), type_id)
  295. .kind == SemIR::InitializingRepresentation::InPlace;
  296. for (auto [i, elem_id] : llvm::enumerate(elements)) {
  297. // TODO: We know the type already matches because we already invoked
  298. // `ImplicitAsRequired`, but this will need to change once we stop
  299. // doing that.
  300. auto inner_target_type = semantics_ir().GetNode(elem_id).type_id();
  301. // TODO: This should be placed into the return slot, and only
  302. // created if needed.
  303. auto inner_target_id =
  304. AddNode(is_tuple ? SemIR::Node::TupleAccess::Make(
  305. parse_node, inner_target_type, target_id,
  306. SemIR::MemberIndex(i))
  307. : SemIR::Node::StructAccess::Make(
  308. parse_node, inner_target_type, target_id,
  309. SemIR::MemberIndex(i)));
  310. auto new_id =
  311. is_in_place ? InitializeAndFinalize(parse_node, inner_target_id,
  312. elem_id)
  313. : Initialize(parse_node, inner_target_id, elem_id);
  314. new_block.Set(i, new_id);
  315. }
  316. return AddNode(
  317. is_tuple ? SemIR::Node::TupleInit::Make(parse_node, type_id,
  318. expr_id, new_block.id())
  319. : SemIR::Node::StructInit::Make(
  320. parse_node, type_id, expr_id, new_block.id()));
  321. }
  322. default:
  323. CARBON_FATAL() << "Unexpected kind for mixed-category expression "
  324. << expr.kind();
  325. }
  326. }
  327. }
  328. auto Context::InitializeAndFinalize(Parse::Node parse_node,
  329. SemIR::NodeId target_id,
  330. SemIR::NodeId value_id) -> SemIR::NodeId {
  331. auto init_id = Initialize(parse_node, target_id, value_id);
  332. if (init_id == SemIR::NodeId::BuiltinError) {
  333. return init_id;
  334. }
  335. auto target_type_id = semantics_ir().GetNode(target_id).type_id();
  336. if (auto init_rep =
  337. SemIR::GetInitializingRepresentation(semantics_ir(), target_type_id);
  338. init_rep.kind == SemIR::InitializingRepresentation::ByCopy) {
  339. init_id = AddNode(SemIR::Node::InitializeFrom::Make(
  340. parse_node, target_type_id, init_id, target_id));
  341. }
  342. return init_id;
  343. }
  344. auto Context::ConvertToValueExpression(SemIR::NodeId expr_id) -> SemIR::NodeId {
  345. if (expr_id == SemIR::NodeId::BuiltinError) {
  346. return expr_id;
  347. }
  348. switch (SemIR::GetExpressionCategory(semantics_ir(), expr_id)) {
  349. case SemIR::ExpressionCategory::NotExpression: {
  350. // TODO: We currently encounter this for use of namespaces and functions.
  351. // We should provide a better diagnostic for inappropriate use of
  352. // namespace names, and allow use of functions as values.
  353. CARBON_DIAGNOSTIC(UseOfNonExpressionAsValue, Error,
  354. "Expression cannot be used as a value.");
  355. emitter().Emit(semantics_ir().GetNode(expr_id).parse_node(),
  356. UseOfNonExpressionAsValue);
  357. return SemIR::NodeId::BuiltinError;
  358. }
  359. case SemIR::ExpressionCategory::Initializing:
  360. // Commit to using a temporary for this initializing expression.
  361. // TODO: Don't create a temporary if the initializing representation is
  362. // already a value representation.
  363. expr_id = FinalizeTemporary(expr_id, /*discarded=*/false);
  364. [[fallthrough]];
  365. case SemIR::ExpressionCategory::DurableReference:
  366. case SemIR::ExpressionCategory::EphemeralReference: {
  367. // TODO: Support types with custom value representations.
  368. SemIR::Node expr = semantics_ir().GetNode(expr_id);
  369. return AddNode(SemIR::Node::BindValue::Make(expr.parse_node(),
  370. expr.type_id(), expr_id));
  371. }
  372. case SemIR::ExpressionCategory::Value:
  373. return expr_id;
  374. case SemIR::ExpressionCategory::Mixed: {
  375. SemIR::Node expr = semantics_ir().GetNode(expr_id);
  376. // TODO: Make non-recursive.
  377. switch (expr.kind()) {
  378. case SemIR::NodeKind::TupleLiteral:
  379. case SemIR::NodeKind::StructLiteral: {
  380. bool is_tuple = expr.kind() == SemIR::NodeKind::TupleLiteral;
  381. auto elements_id =
  382. is_tuple ? expr.GetAsTupleLiteral() : expr.GetAsStructLiteral();
  383. auto elements = semantics_ir().GetNodeBlock(elements_id);
  384. CopyOnWriteBlock new_block(semantics_ir(), elements_id);
  385. for (auto [i, elem_id] : llvm::enumerate(elements)) {
  386. new_block.Set(i, ConvertToValueExpression(elem_id));
  387. }
  388. return AddNode(is_tuple ? SemIR::Node::TupleValue::Make(
  389. expr.parse_node(), expr.type_id(),
  390. expr_id, new_block.id())
  391. : SemIR::Node::StructValue::Make(
  392. expr.parse_node(), expr.type_id(),
  393. expr_id, new_block.id()));
  394. }
  395. default:
  396. CARBON_FATAL() << "Unexpected kind for mixed-category expression "
  397. << expr.kind();
  398. }
  399. }
  400. }
  401. }
  402. // Convert the given expression to a value or reference expression of the same
  403. // type.
  404. auto Context::ConvertToValueOrReferenceExpression(SemIR::NodeId expr_id,
  405. bool discarded)
  406. -> SemIR::NodeId {
  407. switch (GetExpressionCategory(semantics_ir(), expr_id)) {
  408. case SemIR::ExpressionCategory::Value:
  409. case SemIR::ExpressionCategory::DurableReference:
  410. case SemIR::ExpressionCategory::EphemeralReference:
  411. return expr_id;
  412. case SemIR::ExpressionCategory::Initializing:
  413. return FinalizeTemporary(expr_id, discarded);
  414. case SemIR::ExpressionCategory::Mixed:
  415. case SemIR::ExpressionCategory::NotExpression:
  416. return ConvertToValueExpression(expr_id);
  417. }
  418. }
  419. // Given an initializing expression, find its return slot. Returns `Invalid` if
  420. // there is no return slot, because the initialization is not performed in
  421. // place.
  422. static auto FindReturnSlotForInitializer(SemIR::File& semantics_ir,
  423. SemIR::NodeId init_id)
  424. -> SemIR::NodeId {
  425. SemIR::Node init = semantics_ir.GetNode(init_id);
  426. switch (init.kind()) {
  427. default:
  428. CARBON_FATAL() << "Initialization from unexpected node " << init;
  429. case SemIR::NodeKind::StructInit:
  430. case SemIR::NodeKind::TupleInit:
  431. // TODO: Track a return slot for these initializers.
  432. CARBON_FATAL() << init
  433. << " should be created with its return slot already "
  434. "filled in properly";
  435. case SemIR::NodeKind::InitializeFrom: {
  436. auto [src_id, dest_id] = init.GetAsInitializeFrom();
  437. return dest_id;
  438. }
  439. case SemIR::NodeKind::Call: {
  440. auto [refs_id, callee_id] = init.GetAsCall();
  441. if (!semantics_ir.GetFunction(callee_id).return_slot_id.is_valid()) {
  442. return SemIR::NodeId::Invalid;
  443. }
  444. return semantics_ir.GetNodeBlock(refs_id).back();
  445. }
  446. case SemIR::NodeKind::ArrayInit: {
  447. auto [src_id, refs_id] = init.GetAsArrayInit();
  448. return semantics_ir.GetNodeBlock(refs_id).back();
  449. }
  450. }
  451. }
  452. auto Context::MarkInitializerFor(SemIR::NodeId init_id, SemIR::NodeId target_id)
  453. -> void {
  454. auto return_slot_id = FindReturnSlotForInitializer(semantics_ir(), init_id);
  455. if (return_slot_id.is_valid()) {
  456. // Replace the temporary in the return slot with a reference to our target.
  457. CARBON_CHECK(semantics_ir().GetNode(return_slot_id).kind() ==
  458. SemIR::NodeKind::TemporaryStorage)
  459. << "Return slot for initializer does not contain a temporary; "
  460. << "initialized multiple times? Have "
  461. << semantics_ir().GetNode(return_slot_id);
  462. semantics_ir().ReplaceNode(
  463. return_slot_id,
  464. SemIR::Node::StubReference::Make(
  465. semantics_ir().GetNode(init_id).parse_node(),
  466. semantics_ir().GetNode(target_id).type_id(), target_id));
  467. }
  468. }
  469. auto Context::FinalizeTemporary(SemIR::NodeId init_id, bool discarded)
  470. -> SemIR::NodeId {
  471. auto return_slot_id = FindReturnSlotForInitializer(semantics_ir(), init_id);
  472. if (return_slot_id.is_valid()) {
  473. // The return slot should already have a materialized temporary in it.
  474. CARBON_CHECK(semantics_ir().GetNode(return_slot_id).kind() ==
  475. SemIR::NodeKind::TemporaryStorage)
  476. << "Return slot for initializer does not contain a temporary; "
  477. << "initialized multiple times? Have "
  478. << semantics_ir().GetNode(return_slot_id);
  479. auto init = semantics_ir().GetNode(init_id);
  480. return AddNode(SemIR::Node::Temporary::Make(
  481. init.parse_node(), init.type_id(), return_slot_id, init_id));
  482. }
  483. if (discarded) {
  484. // Don't invent a temporary that we're going to discard.
  485. return SemIR::NodeId::Invalid;
  486. }
  487. // The initializer has no return slot, but we want to produce a temporary
  488. // object. Materialize one now.
  489. // TODO: Consider using an invalid ID to mean that we immediately
  490. // materialize and initialize a temporary, rather than two separate
  491. // nodes.
  492. auto init = semantics_ir().GetNode(init_id);
  493. auto temporary_id = AddNode(
  494. SemIR::Node::TemporaryStorage::Make(init.parse_node(), init.type_id()));
  495. return AddNode(SemIR::Node::Temporary::Make(init.parse_node(), init.type_id(),
  496. temporary_id, init_id));
  497. }
  498. auto Context::HandleDiscardedExpression(SemIR::NodeId expr_id) -> void {
  499. // If we discard an initializing expression, convert it to a value or
  500. // reference so that it has something to initialize.
  501. ConvertToValueOrReferenceExpression(expr_id, /*discarded=*/true);
  502. // TODO: This will eventually need to do some "do not discard" analysis.
  503. }
  504. auto Context::ImplicitAsForArgs(Parse::Node call_parse_node,
  505. SemIR::NodeBlockId arg_refs_id,
  506. Parse::Node param_parse_node,
  507. SemIR::NodeBlockId param_refs_id,
  508. bool has_return_slot) -> bool {
  509. // If both arguments and parameters are empty, return quickly. Otherwise,
  510. // we'll fetch both so that errors are consistent.
  511. if (arg_refs_id == SemIR::NodeBlockId::Empty &&
  512. param_refs_id == SemIR::NodeBlockId::Empty) {
  513. return true;
  514. }
  515. auto arg_refs = semantics_ir_->GetNodeBlock(arg_refs_id);
  516. auto param_refs = semantics_ir_->GetNodeBlock(param_refs_id);
  517. if (has_return_slot) {
  518. // There's no entry in the parameter block for the return slot, so ignore
  519. // the corresponding entry in the argument block.
  520. // TODO: Consider adding the return slot to the parameter list.
  521. CARBON_CHECK(!arg_refs.empty()) << "missing return slot";
  522. arg_refs = arg_refs.drop_back();
  523. }
  524. // If sizes mismatch, fail early.
  525. if (arg_refs.size() != param_refs.size()) {
  526. CARBON_DIAGNOSTIC(CallArgCountMismatch, Error,
  527. "{0} argument(s) passed to function expecting "
  528. "{1} argument(s).",
  529. int, int);
  530. CARBON_DIAGNOSTIC(InCallToFunction, Note,
  531. "Calling function declared here.");
  532. emitter_
  533. ->Build(call_parse_node, CallArgCountMismatch, arg_refs.size(),
  534. param_refs.size())
  535. .Note(param_parse_node, InCallToFunction)
  536. .Emit();
  537. return false;
  538. }
  539. if (param_refs.empty()) {
  540. return true;
  541. }
  542. int diag_param_index;
  543. DiagnosticAnnotationScope annotate_diagnostics(emitter_, [&](auto& builder) {
  544. CARBON_DIAGNOSTIC(InCallToFunctionParam, Note,
  545. "Initializing parameter {0} of function declared here.",
  546. int);
  547. builder.Note(param_parse_node, InCallToFunctionParam, diag_param_index + 1);
  548. });
  549. // Check type conversions per-element.
  550. for (auto [i, value_id, param_ref] : llvm::enumerate(arg_refs, param_refs)) {
  551. diag_param_index = i;
  552. auto as_type_id = semantics_ir_->GetNode(param_ref).type_id();
  553. // TODO: Convert to the proper expression category. For now, we assume
  554. // parameters are all `let` bindings.
  555. value_id = ConvertToValueOfType(call_parse_node, value_id, as_type_id);
  556. if (value_id == SemIR::NodeId::BuiltinError) {
  557. return false;
  558. }
  559. arg_refs[i] = value_id;
  560. }
  561. return true;
  562. }
  563. // Performs a conversion from a tuple to an array type.
  564. static auto ConvertTupleToArray(Context& context, SemIR::Node tuple_type,
  565. SemIR::Node array_type,
  566. SemIR::TypeId array_type_id,
  567. SemIR::NodeId value_id) -> SemIR::NodeId {
  568. auto [array_bound_id, element_type_id] = array_type.GetAsArrayType();
  569. auto tuple_elem_types_id = tuple_type.GetAsTupleType();
  570. const auto& tuple_elem_types =
  571. context.semantics_ir().GetTypeBlock(tuple_elem_types_id);
  572. auto value = context.semantics_ir().GetNode(value_id);
  573. llvm::ArrayRef<SemIR::NodeId> literal_elems;
  574. if (value.kind() == SemIR::NodeKind::TupleLiteral) {
  575. literal_elems =
  576. context.semantics_ir().GetNodeBlock(value.GetAsTupleLiteral());
  577. }
  578. // Check that the tuple is the right size.
  579. uint64_t array_bound =
  580. context.semantics_ir().GetArrayBoundValue(array_bound_id);
  581. if (tuple_elem_types.size() != array_bound) {
  582. CARBON_DIAGNOSTIC(
  583. ArrayInitFromLiteralArgCountMismatch, Error,
  584. "Cannot initialize array of {0} element(s) from {1} initializer(s).",
  585. uint64_t, size_t);
  586. CARBON_DIAGNOSTIC(ArrayInitFromExpressionArgCountMismatch, Error,
  587. "Cannot initialize array of {0} element(s) from tuple "
  588. "with {1} element(s).",
  589. uint64_t, size_t);
  590. context.emitter().Emit(
  591. context.semantics_ir().GetNode(value_id).parse_node(),
  592. literal_elems.empty() ? ArrayInitFromExpressionArgCountMismatch
  593. : ArrayInitFromLiteralArgCountMismatch,
  594. array_bound, tuple_elem_types.size());
  595. return SemIR::NodeId::BuiltinError;
  596. }
  597. // If we're initializing from a tuple literal, we will use its elements
  598. // directly. Otherwise, materialize a temporary if needed and index into the
  599. // result.
  600. if (literal_elems.empty()) {
  601. value_id = context.ConvertToValueOrReferenceExpression(value_id);
  602. }
  603. // Arrays are always initialized in-place. Tentatively allocate a temporary
  604. // as the destination for the array initialization.
  605. auto return_slot_id = context.AddNode(
  606. SemIR::Node::TemporaryStorage::Make(value.parse_node(), array_type_id));
  607. // Initialize each element of the array from the corresponding element of the
  608. // tuple.
  609. llvm::SmallVector<SemIR::NodeId> inits;
  610. inits.reserve(array_bound + 1);
  611. for (auto [i, src_type_id] : llvm::enumerate(tuple_elem_types)) {
  612. // TODO: Add a new node kind for indexing an array at a constant index
  613. // so that we don't need an integer literal node here.
  614. auto index_id = context.AddNode(SemIR::Node::IntegerLiteral::Make(
  615. value.parse_node(),
  616. context.CanonicalizeType(SemIR::NodeId::BuiltinIntegerType),
  617. context.semantics_ir().AddIntegerLiteral(llvm::APInt(32, i))));
  618. auto target_id = context.AddNode(SemIR::Node::ArrayIndex::Make(
  619. value.parse_node(), element_type_id, return_slot_id, index_id));
  620. auto src_id =
  621. !literal_elems.empty()
  622. ? literal_elems[i]
  623. : context.AddNode(SemIR::Node::TupleIndex::Make(
  624. value.parse_node(), src_type_id, value_id, index_id));
  625. auto init_id =
  626. context.InitializeAndFinalize(value.parse_node(), target_id, src_id);
  627. if (init_id == SemIR::NodeId::BuiltinError) {
  628. return SemIR::NodeId::BuiltinError;
  629. }
  630. inits.push_back(init_id);
  631. }
  632. // The last element of the refs block contains the return slot for the array
  633. // initialization.
  634. inits.push_back(return_slot_id);
  635. return context.AddNode(
  636. SemIR::Node::ArrayInit::Make(value.parse_node(), array_type_id, value_id,
  637. context.semantics_ir().AddNodeBlock(inits)));
  638. }
  639. auto Context::ImplicitAs(Parse::Node parse_node, SemIR::NodeId value_id,
  640. SemIR::TypeId as_type_id) -> SemIR::NodeId {
  641. // Start by making sure both sides are valid. If any part is invalid, the
  642. // result is invalid and we shouldn't error.
  643. if (value_id == SemIR::NodeId::BuiltinError) {
  644. // If the value is invalid, we can't do much, but do "succeed".
  645. return value_id;
  646. }
  647. auto value = semantics_ir_->GetNode(value_id);
  648. auto value_type_id = value.type_id();
  649. if (value_type_id == SemIR::TypeId::Error ||
  650. as_type_id == SemIR::TypeId::Error) {
  651. return SemIR::NodeId::BuiltinError;
  652. }
  653. if (value_type_id == as_type_id) {
  654. return value_id;
  655. }
  656. auto as_type = semantics_ir_->GetTypeAllowBuiltinTypes(as_type_id);
  657. auto as_type_node = semantics_ir_->GetNode(as_type);
  658. // A tuple (T1, T2, ..., Tn) converts to [T; n] if each Ti converts to T.
  659. if (as_type_node.kind() == SemIR::NodeKind::ArrayType) {
  660. auto value_type_node = semantics_ir_->GetNode(
  661. semantics_ir_->GetTypeAllowBuiltinTypes(value_type_id));
  662. if (value_type_node.kind() == SemIR::NodeKind::TupleType) {
  663. // The conversion from tuple to array is `final`, so we don't need a
  664. // fallback path here.
  665. return ConvertTupleToArray(*this, value_type_node, as_type_node,
  666. as_type_id, value_id);
  667. }
  668. }
  669. if (as_type_id == SemIR::TypeId::TypeType) {
  670. // A tuple of types converts to type `type`.
  671. // TODO: This should apply even for non-literal tuples.
  672. if (value.kind() == SemIR::NodeKind::TupleLiteral) {
  673. // The conversion from tuple to `type` is `final`.
  674. auto tuple_block_id = value.GetAsTupleLiteral();
  675. llvm::SmallVector<SemIR::TypeId> type_ids;
  676. // If it is empty tuple type, we don't fetch anything.
  677. if (tuple_block_id != SemIR::NodeBlockId::Empty) {
  678. const auto& tuple_block = semantics_ir_->GetNodeBlock(tuple_block_id);
  679. for (auto tuple_node_id : tuple_block) {
  680. // TODO: This call recurses back to this function. Switch to an
  681. // iterative approach.
  682. type_ids.push_back(
  683. ExpressionAsType(value.parse_node(), tuple_node_id));
  684. }
  685. }
  686. auto tuple_type_id =
  687. CanonicalizeTupleType(value.parse_node(), std::move(type_ids));
  688. return semantics_ir_->GetTypeAllowBuiltinTypes(tuple_type_id);
  689. }
  690. // When converting `{}` to a type, the result is `{} as type`.
  691. // TODO: This conversion should also be performed for a non-literal value of
  692. // type `{}`.
  693. if (value.kind() == SemIR::NodeKind::StructLiteral &&
  694. value.GetAsStructLiteral() == SemIR::NodeBlockId::Empty) {
  695. return semantics_ir_->GetType(value_type_id);
  696. }
  697. }
  698. // TODO: Handle ImplicitAs for compatible structs and tuples.
  699. CARBON_DIAGNOSTIC(ImplicitAsConversionFailure, Error,
  700. "Cannot implicitly convert from `{0}` to `{1}`.",
  701. std::string, std::string);
  702. emitter_
  703. ->Build(parse_node, ImplicitAsConversionFailure,
  704. semantics_ir_->StringifyType(
  705. semantics_ir_->GetNode(value_id).type_id()),
  706. semantics_ir_->StringifyType(as_type_id))
  707. .Emit();
  708. return SemIR::NodeId::BuiltinError;
  709. }
  710. auto Context::ParamOrArgStart() -> void { params_or_args_stack_.Push(); }
  711. auto Context::ParamOrArgComma() -> void {
  712. ParamOrArgSave(node_stack_.PopExpression());
  713. }
  714. auto Context::ParamOrArgEndNoPop(Parse::NodeKind start_kind) -> void {
  715. if (parse_tree_->node_kind(node_stack_.PeekParseNode()) != start_kind) {
  716. ParamOrArgSave(node_stack_.PopExpression());
  717. }
  718. }
  719. auto Context::ParamOrArgPop() -> SemIR::NodeBlockId {
  720. return params_or_args_stack_.Pop();
  721. }
  722. auto Context::ParamOrArgEnd(Parse::NodeKind start_kind) -> SemIR::NodeBlockId {
  723. ParamOrArgEndNoPop(start_kind);
  724. return ParamOrArgPop();
  725. }
  726. auto Context::CanonicalizeTypeImpl(
  727. SemIR::NodeKind kind,
  728. llvm::function_ref<void(llvm::FoldingSetNodeID& canonical_id)> profile_type,
  729. llvm::function_ref<SemIR::NodeId()> make_node) -> SemIR::TypeId {
  730. llvm::FoldingSetNodeID canonical_id;
  731. kind.Profile(canonical_id);
  732. profile_type(canonical_id);
  733. void* insert_pos;
  734. auto* node =
  735. canonical_type_nodes_.FindNodeOrInsertPos(canonical_id, insert_pos);
  736. if (node != nullptr) {
  737. return node->type_id();
  738. }
  739. auto node_id = make_node();
  740. auto type_id = semantics_ir_->AddType(node_id);
  741. CARBON_CHECK(canonical_types_.insert({node_id, type_id}).second);
  742. type_node_storage_.push_back(
  743. std::make_unique<TypeNode>(canonical_id, type_id));
  744. // In a debug build, check that our insertion position is still valid. It
  745. // could have been invalidated by a misbehaving `make_node`.
  746. CARBON_DCHECK([&] {
  747. void* check_insert_pos;
  748. auto* check_node = canonical_type_nodes_.FindNodeOrInsertPos(
  749. canonical_id, check_insert_pos);
  750. return !check_node && insert_pos == check_insert_pos;
  751. }()) << "Type was created recursively during canonicalization";
  752. canonical_type_nodes_.InsertNode(type_node_storage_.back().get(), insert_pos);
  753. return type_id;
  754. }
  755. // Compute a fingerprint for a tuple type, for use as a key in a folding set.
  756. static auto ProfileTupleType(llvm::ArrayRef<SemIR::TypeId> type_ids,
  757. llvm::FoldingSetNodeID& canonical_id) -> void {
  758. for (auto type_id : type_ids) {
  759. canonical_id.AddInteger(type_id.index);
  760. }
  761. }
  762. // Compute a fingerprint for a type, for use as a key in a folding set.
  763. static auto ProfileType(Context& semantics_context, SemIR::Node node,
  764. llvm::FoldingSetNodeID& canonical_id) -> void {
  765. switch (node.kind()) {
  766. case SemIR::NodeKind::ArrayType: {
  767. auto [bound_id, element_type_id] = node.GetAsArrayType();
  768. canonical_id.AddInteger(
  769. semantics_context.semantics_ir().GetArrayBoundValue(bound_id));
  770. canonical_id.AddInteger(element_type_id.index);
  771. break;
  772. }
  773. case SemIR::NodeKind::Builtin:
  774. canonical_id.AddInteger(node.GetAsBuiltin().AsInt());
  775. break;
  776. case SemIR::NodeKind::CrossReference: {
  777. // TODO: Cross-references should be canonicalized by looking at their
  778. // target rather than treating them as new unique types.
  779. auto [xref_id, node_id] = node.GetAsCrossReference();
  780. canonical_id.AddInteger(xref_id.index);
  781. canonical_id.AddInteger(node_id.index);
  782. break;
  783. }
  784. case SemIR::NodeKind::ConstType:
  785. canonical_id.AddInteger(
  786. semantics_context.GetUnqualifiedType(node.GetAsConstType()).index);
  787. break;
  788. case SemIR::NodeKind::PointerType:
  789. canonical_id.AddInteger(node.GetAsPointerType().index);
  790. break;
  791. case SemIR::NodeKind::StructType: {
  792. auto refs =
  793. semantics_context.semantics_ir().GetNodeBlock(node.GetAsStructType());
  794. for (const auto& ref_id : refs) {
  795. auto ref = semantics_context.semantics_ir().GetNode(ref_id);
  796. auto [name_id, type_id] = ref.GetAsStructTypeField();
  797. canonical_id.AddInteger(name_id.index);
  798. canonical_id.AddInteger(type_id.index);
  799. }
  800. break;
  801. }
  802. case SemIR::NodeKind::TupleType:
  803. ProfileTupleType(
  804. semantics_context.semantics_ir().GetTypeBlock(node.GetAsTupleType()),
  805. canonical_id);
  806. break;
  807. default:
  808. CARBON_FATAL() << "Unexpected type node " << node;
  809. }
  810. }
  811. auto Context::CanonicalizeTypeAndAddNodeIfNew(SemIR::Node node)
  812. -> SemIR::TypeId {
  813. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  814. ProfileType(*this, node, canonical_id);
  815. };
  816. auto make_node = [&] { return AddNode(node); };
  817. return CanonicalizeTypeImpl(node.kind(), profile_node, make_node);
  818. }
  819. auto Context::CanonicalizeType(SemIR::NodeId node_id) -> SemIR::TypeId {
  820. auto it = canonical_types_.find(node_id);
  821. if (it != canonical_types_.end()) {
  822. return it->second;
  823. }
  824. auto node = semantics_ir_->GetNode(node_id);
  825. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  826. ProfileType(*this, node, canonical_id);
  827. };
  828. auto make_node = [&] { return node_id; };
  829. return CanonicalizeTypeImpl(node.kind(), profile_node, make_node);
  830. }
  831. auto Context::CanonicalizeStructType(Parse::Node parse_node,
  832. SemIR::NodeBlockId refs_id)
  833. -> SemIR::TypeId {
  834. return CanonicalizeTypeAndAddNodeIfNew(SemIR::Node::StructType::Make(
  835. parse_node, SemIR::TypeId::TypeType, refs_id));
  836. }
  837. auto Context::CanonicalizeTupleType(Parse::Node parse_node,
  838. llvm::ArrayRef<SemIR::TypeId> type_ids)
  839. -> SemIR::TypeId {
  840. // Defer allocating a SemIR::TypeBlockId until we know this is a new type.
  841. auto profile_tuple = [&](llvm::FoldingSetNodeID& canonical_id) {
  842. ProfileTupleType(type_ids, canonical_id);
  843. };
  844. auto make_tuple_node = [&] {
  845. return AddNode(
  846. SemIR::Node::TupleType::Make(parse_node, SemIR::TypeId::TypeType,
  847. semantics_ir_->AddTypeBlock(type_ids)));
  848. };
  849. return CanonicalizeTypeImpl(SemIR::NodeKind::TupleType, profile_tuple,
  850. make_tuple_node);
  851. }
  852. auto Context::GetPointerType(Parse::Node parse_node,
  853. SemIR::TypeId pointee_type_id) -> SemIR::TypeId {
  854. return CanonicalizeTypeAndAddNodeIfNew(SemIR::Node::PointerType::Make(
  855. parse_node, SemIR::TypeId::TypeType, pointee_type_id));
  856. }
  857. auto Context::GetUnqualifiedType(SemIR::TypeId type_id) -> SemIR::TypeId {
  858. SemIR::Node type_node =
  859. semantics_ir_->GetNode(semantics_ir_->GetTypeAllowBuiltinTypes(type_id));
  860. if (type_node.kind() == SemIR::NodeKind::ConstType) {
  861. return type_node.GetAsConstType();
  862. }
  863. return type_id;
  864. }
  865. auto Context::PrintForStackDump(llvm::raw_ostream& output) const -> void {
  866. node_stack_.PrintForStackDump(output);
  867. node_block_stack_.PrintForStackDump(output);
  868. params_or_args_stack_.PrintForStackDump(output);
  869. args_type_info_stack_.PrintForStackDump(output);
  870. }
  871. } // namespace Carbon::Check