semantics_context.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/semantics/semantics_context.h"
  5. #include <utility>
  6. #include "common/check.h"
  7. #include "common/vlog.h"
  8. #include "llvm/ADT/STLExtras.h"
  9. #include "toolchain/diagnostics/diagnostic_kind.h"
  10. #include "toolchain/lexer/tokenized_buffer.h"
  11. #include "toolchain/parser/parse_node_kind.h"
  12. #include "toolchain/semantics/semantics_declaration_name_stack.h"
  13. #include "toolchain/semantics/semantics_ir.h"
  14. #include "toolchain/semantics/semantics_node.h"
  15. #include "toolchain/semantics/semantics_node_block_stack.h"
  16. #include "toolchain/semantics/semantics_node_kind.h"
  17. namespace Carbon::Check {
  18. Context::Context(const TokenizedBuffer& tokens,
  19. DiagnosticEmitter<ParseTree::Node>& emitter,
  20. const ParseTree& parse_tree, SemIR::File& semantics_ir,
  21. llvm::raw_ostream* vlog_stream)
  22. : tokens_(&tokens),
  23. emitter_(&emitter),
  24. parse_tree_(&parse_tree),
  25. semantics_ir_(&semantics_ir),
  26. vlog_stream_(vlog_stream),
  27. node_stack_(parse_tree, vlog_stream),
  28. node_block_stack_("node_block_stack_", semantics_ir, vlog_stream),
  29. params_or_args_stack_("params_or_args_stack_", semantics_ir, vlog_stream),
  30. args_type_info_stack_("args_type_info_stack_", semantics_ir, vlog_stream),
  31. declaration_name_stack_(this) {
  32. // Inserts the "Error" and "Type" types as "used types" so that
  33. // canonicalization can skip them. We don't emit either for lowering.
  34. canonical_types_.insert({SemIR::NodeId::BuiltinError, SemIR::TypeId::Error});
  35. canonical_types_.insert(
  36. {SemIR::NodeId::BuiltinTypeType, SemIR::TypeId::TypeType});
  37. }
  38. auto Context::TODO(ParseTree::Node parse_node, std::string label) -> bool {
  39. CARBON_DIAGNOSTIC(SemanticsTodo, Error, "Semantics TODO: `{0}`.",
  40. std::string);
  41. emitter_->Emit(parse_node, SemanticsTodo, std::move(label));
  42. return false;
  43. }
  44. auto Context::VerifyOnFinish() -> void {
  45. // Information in all the various context objects should be cleaned up as
  46. // various pieces of context go out of scope. At this point, nothing should
  47. // remain.
  48. // node_stack_ will still contain top-level entities.
  49. CARBON_CHECK(name_lookup_.empty()) << name_lookup_.size();
  50. CARBON_CHECK(scope_stack_.empty()) << scope_stack_.size();
  51. CARBON_CHECK(node_block_stack_.empty()) << node_block_stack_.size();
  52. CARBON_CHECK(params_or_args_stack_.empty()) << params_or_args_stack_.size();
  53. }
  54. auto Context::AddNode(SemIR::Node node) -> SemIR::NodeId {
  55. return AddNodeToBlock(node_block_stack_.PeekForAdd(), node);
  56. }
  57. auto Context::AddNodeToBlock(SemIR::NodeBlockId block, SemIR::Node node)
  58. -> SemIR::NodeId {
  59. CARBON_VLOG() << "AddNode " << block << ": " << node << "\n";
  60. return semantics_ir_->AddNode(block, node);
  61. }
  62. auto Context::AddNodeAndPush(ParseTree::Node parse_node, SemIR::Node node)
  63. -> void {
  64. auto node_id = AddNode(node);
  65. node_stack_.Push(parse_node, node_id);
  66. }
  67. auto Context::DiagnoseDuplicateName(ParseTree::Node parse_node,
  68. SemIR::NodeId prev_def_id) -> void {
  69. CARBON_DIAGNOSTIC(NameDeclarationDuplicate, Error,
  70. "Duplicate name being declared in the same scope.");
  71. CARBON_DIAGNOSTIC(NameDeclarationPrevious, Note,
  72. "Name is previously declared here.");
  73. auto prev_def = semantics_ir_->GetNode(prev_def_id);
  74. emitter_->Build(parse_node, NameDeclarationDuplicate)
  75. .Note(prev_def.parse_node(), NameDeclarationPrevious)
  76. .Emit();
  77. }
  78. auto Context::DiagnoseNameNotFound(ParseTree::Node parse_node,
  79. SemIR::StringId name_id) -> void {
  80. CARBON_DIAGNOSTIC(NameNotFound, Error, "Name `{0}` not found.",
  81. llvm::StringRef);
  82. emitter_->Emit(parse_node, NameNotFound, semantics_ir_->GetString(name_id));
  83. }
  84. auto Context::AddNameToLookup(ParseTree::Node name_node,
  85. SemIR::StringId name_id, SemIR::NodeId target_id)
  86. -> void {
  87. if (current_scope().names.insert(name_id).second) {
  88. name_lookup_[name_id].push_back(target_id);
  89. } else {
  90. DiagnoseDuplicateName(name_node, name_lookup_[name_id].back());
  91. }
  92. }
  93. auto Context::LookupName(ParseTree::Node parse_node, SemIR::StringId name_id,
  94. SemIR::NameScopeId scope_id, bool print_diagnostics)
  95. -> SemIR::NodeId {
  96. if (scope_id == SemIR::NameScopeId::Invalid) {
  97. auto it = name_lookup_.find(name_id);
  98. if (it == name_lookup_.end()) {
  99. if (print_diagnostics) {
  100. DiagnoseNameNotFound(parse_node, name_id);
  101. }
  102. return SemIR::NodeId::BuiltinError;
  103. }
  104. CARBON_CHECK(!it->second.empty())
  105. << "Should have been erased: " << semantics_ir_->GetString(name_id);
  106. // TODO: Check for ambiguous lookups.
  107. return it->second.back();
  108. } else {
  109. const auto& scope = semantics_ir_->GetNameScope(scope_id);
  110. auto it = scope.find(name_id);
  111. if (it == scope.end()) {
  112. if (print_diagnostics) {
  113. DiagnoseNameNotFound(parse_node, name_id);
  114. }
  115. return SemIR::NodeId::BuiltinError;
  116. }
  117. return it->second;
  118. }
  119. }
  120. auto Context::PushScope() -> void { scope_stack_.push_back({}); }
  121. auto Context::PopScope() -> void {
  122. auto scope = scope_stack_.pop_back_val();
  123. for (const auto& str_id : scope.names) {
  124. auto it = name_lookup_.find(str_id);
  125. if (it->second.size() == 1) {
  126. // Erase names that no longer resolve.
  127. name_lookup_.erase(it);
  128. } else {
  129. it->second.pop_back();
  130. }
  131. }
  132. }
  133. template <typename BranchNode, typename... Args>
  134. static auto AddDominatedBlockAndBranchImpl(Context& context,
  135. ParseTree::Node parse_node,
  136. Args... args) -> SemIR::NodeBlockId {
  137. if (!context.node_block_stack().is_current_block_reachable()) {
  138. return SemIR::NodeBlockId::Unreachable;
  139. }
  140. auto block_id = context.semantics_ir().AddNodeBlock();
  141. context.AddNode(BranchNode::Make(parse_node, block_id, args...));
  142. return block_id;
  143. }
  144. auto Context::AddDominatedBlockAndBranch(ParseTree::Node parse_node)
  145. -> SemIR::NodeBlockId {
  146. return AddDominatedBlockAndBranchImpl<SemIR::Node::Branch>(*this, parse_node);
  147. }
  148. auto Context::AddDominatedBlockAndBranchWithArg(ParseTree::Node parse_node,
  149. SemIR::NodeId arg_id)
  150. -> SemIR::NodeBlockId {
  151. return AddDominatedBlockAndBranchImpl<SemIR::Node::BranchWithArg>(
  152. *this, parse_node, arg_id);
  153. }
  154. auto Context::AddDominatedBlockAndBranchIf(ParseTree::Node parse_node,
  155. SemIR::NodeId cond_id)
  156. -> SemIR::NodeBlockId {
  157. return AddDominatedBlockAndBranchImpl<SemIR::Node::BranchIf>(
  158. *this, parse_node, cond_id);
  159. }
  160. auto Context::AddConvergenceBlockAndPush(
  161. ParseTree::Node parse_node,
  162. std::initializer_list<SemIR::NodeBlockId> blocks) -> void {
  163. CARBON_CHECK(blocks.size() >= 2) << "no convergence";
  164. SemIR::NodeBlockId new_block_id = SemIR::NodeBlockId::Unreachable;
  165. for (SemIR::NodeBlockId block_id : blocks) {
  166. if (block_id != SemIR::NodeBlockId::Unreachable) {
  167. if (new_block_id == SemIR::NodeBlockId::Unreachable) {
  168. new_block_id = semantics_ir().AddNodeBlock();
  169. }
  170. AddNodeToBlock(block_id,
  171. SemIR::Node::Branch::Make(parse_node, new_block_id));
  172. }
  173. }
  174. node_block_stack().Push(new_block_id);
  175. }
  176. auto Context::AddConvergenceBlockWithArgAndPush(
  177. ParseTree::Node parse_node,
  178. std::initializer_list<std::pair<SemIR::NodeBlockId, SemIR::NodeId>>
  179. blocks_and_args) -> SemIR::NodeId {
  180. CARBON_CHECK(blocks_and_args.size() >= 2) << "no convergence";
  181. SemIR::NodeBlockId new_block_id = SemIR::NodeBlockId::Unreachable;
  182. for (auto [block_id, arg_id] : blocks_and_args) {
  183. if (block_id != SemIR::NodeBlockId::Unreachable) {
  184. if (new_block_id == SemIR::NodeBlockId::Unreachable) {
  185. new_block_id = semantics_ir().AddNodeBlock();
  186. }
  187. AddNodeToBlock(block_id, SemIR::Node::BranchWithArg::Make(
  188. parse_node, new_block_id, arg_id));
  189. }
  190. }
  191. node_block_stack().Push(new_block_id);
  192. // Acquire the result value.
  193. SemIR::TypeId result_type_id =
  194. semantics_ir().GetNode(blocks_and_args.begin()->second).type_id();
  195. return AddNode(
  196. SemIR::Node::BlockArg::Make(parse_node, result_type_id, new_block_id));
  197. }
  198. // Add the current code block to the enclosing function.
  199. auto Context::AddCurrentCodeBlockToFunction() -> void {
  200. CARBON_CHECK(!node_block_stack().empty()) << "no current code block";
  201. CARBON_CHECK(!return_scope_stack().empty()) << "no current function";
  202. if (!node_block_stack().is_current_block_reachable()) {
  203. // Don't include unreachable blocks in the function.
  204. return;
  205. }
  206. auto function_id = semantics_ir()
  207. .GetNode(return_scope_stack().back())
  208. .GetAsFunctionDeclaration();
  209. semantics_ir()
  210. .GetFunction(function_id)
  211. .body_block_ids.push_back(node_block_stack().PeekForAdd());
  212. }
  213. auto Context::is_current_position_reachable() -> bool {
  214. switch (auto block_id = node_block_stack().Peek(); block_id.index) {
  215. case SemIR::NodeBlockId::Unreachable.index: {
  216. return false;
  217. }
  218. case SemIR::NodeBlockId::Invalid.index: {
  219. return true;
  220. }
  221. default: {
  222. // Our current position is at the end of a real block. That position is
  223. // reachable unless the previous instruction is a terminator instruction.
  224. const auto& block_contents = semantics_ir().GetNodeBlock(block_id);
  225. if (block_contents.empty()) {
  226. return true;
  227. }
  228. const auto& last_node = semantics_ir().GetNode(block_contents.back());
  229. return last_node.kind().terminator_kind() !=
  230. SemIR::TerminatorKind::Terminator;
  231. }
  232. }
  233. }
  234. auto Context::ImplicitAsForArgs(
  235. SemIR::NodeBlockId arg_refs_id, ParseTree::Node param_parse_node,
  236. SemIR::NodeBlockId param_refs_id,
  237. DiagnosticEmitter<ParseTree::Node>::DiagnosticBuilder* diagnostic) -> bool {
  238. // If both arguments and parameters are empty, return quickly. Otherwise,
  239. // we'll fetch both so that errors are consistent.
  240. if (arg_refs_id == SemIR::NodeBlockId::Empty &&
  241. param_refs_id == SemIR::NodeBlockId::Empty) {
  242. return true;
  243. }
  244. auto arg_refs = semantics_ir_->GetNodeBlock(arg_refs_id);
  245. auto param_refs = semantics_ir_->GetNodeBlock(param_refs_id);
  246. // If sizes mismatch, fail early.
  247. if (arg_refs.size() != param_refs.size()) {
  248. CARBON_CHECK(diagnostic != nullptr) << "Should have validated first";
  249. CARBON_DIAGNOSTIC(CallArgCountMismatch, Note,
  250. "Function cannot be used: Received {0} argument(s), but "
  251. "require {1} argument(s).",
  252. int, int);
  253. diagnostic->Note(param_parse_node, CallArgCountMismatch, arg_refs.size(),
  254. param_refs.size());
  255. return false;
  256. }
  257. // Check type conversions per-element.
  258. // TODO: arg_ir_id is passed so that implicit conversions can be inserted.
  259. // It's currently not supported, but will be needed.
  260. for (auto [i, value_id, param_ref] : llvm::enumerate(arg_refs, param_refs)) {
  261. auto as_type_id = semantics_ir_->GetNode(param_ref).type_id();
  262. if (ImplicitAsImpl(value_id, as_type_id,
  263. diagnostic == nullptr ? &value_id : nullptr) ==
  264. ImplicitAsKind::Incompatible) {
  265. CARBON_CHECK(diagnostic != nullptr) << "Should have validated first";
  266. CARBON_DIAGNOSTIC(CallArgTypeMismatch, Note,
  267. "Function cannot be used: Cannot implicitly convert "
  268. "argument {0} from `{1}` to `{2}`.",
  269. size_t, std::string, std::string);
  270. diagnostic->Note(param_parse_node, CallArgTypeMismatch, i,
  271. semantics_ir_->StringifyType(
  272. semantics_ir_->GetNode(value_id).type_id()),
  273. semantics_ir_->StringifyType(as_type_id));
  274. return false;
  275. }
  276. }
  277. return true;
  278. }
  279. auto Context::ImplicitAsRequired(ParseTree::Node parse_node,
  280. SemIR::NodeId value_id,
  281. SemIR::TypeId as_type_id) -> SemIR::NodeId {
  282. SemIR::NodeId output_value_id = value_id;
  283. if (ImplicitAsImpl(value_id, as_type_id, &output_value_id) ==
  284. ImplicitAsKind::Incompatible) {
  285. // Only error when the system is trying to use the result.
  286. CARBON_DIAGNOSTIC(ImplicitAsConversionFailure, Error,
  287. "Cannot implicitly convert from `{0}` to `{1}`.",
  288. std::string, std::string);
  289. emitter_
  290. ->Build(parse_node, ImplicitAsConversionFailure,
  291. semantics_ir_->StringifyType(
  292. semantics_ir_->GetNode(value_id).type_id()),
  293. semantics_ir_->StringifyType(as_type_id))
  294. .Emit();
  295. }
  296. return output_value_id;
  297. }
  298. auto Context::ImplicitAsBool(ParseTree::Node parse_node, SemIR::NodeId value_id)
  299. -> SemIR::NodeId {
  300. return ImplicitAsRequired(parse_node, value_id,
  301. CanonicalizeType(SemIR::NodeId::BuiltinBoolType));
  302. }
  303. auto Context::ImplicitAsImpl(SemIR::NodeId value_id, SemIR::TypeId as_type_id,
  304. SemIR::NodeId* output_value_id) -> ImplicitAsKind {
  305. // Start by making sure both sides are valid. If any part is invalid, the
  306. // result is invalid and we shouldn't error.
  307. if (value_id == SemIR::NodeId::BuiltinError) {
  308. // If the value is invalid, we can't do much, but do "succeed".
  309. return ImplicitAsKind::Identical;
  310. }
  311. auto value = semantics_ir_->GetNode(value_id);
  312. auto value_type_id = value.type_id();
  313. if (value_type_id == SemIR::TypeId::Error) {
  314. // Although the source type is invalid, this still changes the value.
  315. if (output_value_id != nullptr) {
  316. *output_value_id = SemIR::NodeId::BuiltinError;
  317. }
  318. return ImplicitAsKind::Compatible;
  319. }
  320. if (as_type_id == SemIR::TypeId::Error) {
  321. // Although the target type is invalid, this still changes the value.
  322. if (output_value_id != nullptr) {
  323. *output_value_id = SemIR::NodeId::BuiltinError;
  324. }
  325. return ImplicitAsKind::Compatible;
  326. }
  327. if (value_type_id == as_type_id) {
  328. // Type doesn't need to change.
  329. return ImplicitAsKind::Identical;
  330. }
  331. auto as_type = semantics_ir_->GetTypeAllowBuiltinTypes(as_type_id);
  332. auto as_type_node = semantics_ir_->GetNode(as_type);
  333. if (as_type_node.kind() == SemIR::NodeKind::ArrayType) {
  334. auto [bound_node_id, element_type_id] = as_type_node.GetAsArrayType();
  335. // To resolve lambda issue.
  336. auto element_type = element_type_id;
  337. auto value_type_node = semantics_ir_->GetNode(
  338. semantics_ir_->GetTypeAllowBuiltinTypes(value_type_id));
  339. if (value_type_node.kind() == SemIR::NodeKind::TupleType) {
  340. auto tuple_type_block_id = value_type_node.GetAsTupleType();
  341. const auto& type_block = semantics_ir_->GetTypeBlock(tuple_type_block_id);
  342. if (type_block.size() ==
  343. semantics_ir_->GetArrayBoundValue(bound_node_id) &&
  344. std::all_of(type_block.begin(), type_block.end(),
  345. [&](auto type) { return type == element_type; })) {
  346. if (output_value_id != nullptr) {
  347. *output_value_id = AddNode(SemIR::Node::ArrayValue::Make(
  348. value.parse_node(), as_type_id, value_id));
  349. }
  350. return ImplicitAsKind::Compatible;
  351. }
  352. }
  353. }
  354. if (as_type_id == SemIR::TypeId::TypeType) {
  355. if (value.kind() == SemIR::NodeKind::TupleValue) {
  356. auto tuple_block_id = value.GetAsTupleValue();
  357. llvm::SmallVector<SemIR::TypeId> type_ids;
  358. // If it is empty tuple type, we don't fetch anything.
  359. if (tuple_block_id != SemIR::NodeBlockId::Empty) {
  360. const auto& tuple_block = semantics_ir_->GetNodeBlock(tuple_block_id);
  361. for (auto tuple_node_id : tuple_block) {
  362. // TODO: Eventually ExpressionAsType will insert implicit cast
  363. // instructions. When that happens, this will need to verify the full
  364. // tuple conversion will work before calling it.
  365. type_ids.push_back(
  366. ExpressionAsType(value.parse_node(), tuple_node_id));
  367. }
  368. }
  369. auto tuple_type_id =
  370. CanonicalizeTupleType(value.parse_node(), std::move(type_ids));
  371. if (output_value_id != nullptr) {
  372. *output_value_id =
  373. semantics_ir_->GetTypeAllowBuiltinTypes(tuple_type_id);
  374. }
  375. return ImplicitAsKind::Compatible;
  376. }
  377. // When converting `{}` to a type, the result is `{} as Type`.
  378. if (value.kind() == SemIR::NodeKind::StructValue &&
  379. value.GetAsStructValue() == SemIR::NodeBlockId::Empty) {
  380. if (output_value_id != nullptr) {
  381. *output_value_id = semantics_ir_->GetType(value_type_id);
  382. }
  383. return ImplicitAsKind::Compatible;
  384. }
  385. }
  386. // TODO: Handle ImplicitAs for compatible structs and tuples.
  387. if (output_value_id != nullptr) {
  388. *output_value_id = SemIR::NodeId::BuiltinError;
  389. }
  390. return ImplicitAsKind::Incompatible;
  391. }
  392. auto Context::ParamOrArgStart() -> void { params_or_args_stack_.Push(); }
  393. auto Context::ParamOrArgComma(bool for_args) -> void {
  394. ParamOrArgSave(for_args);
  395. }
  396. auto Context::ParamOrArgEnd(bool for_args, ParseNodeKind start_kind)
  397. -> SemIR::NodeBlockId {
  398. if (parse_tree_->node_kind(node_stack_.PeekParseNode()) != start_kind) {
  399. ParamOrArgSave(for_args);
  400. }
  401. return params_or_args_stack_.Pop();
  402. }
  403. auto Context::ParamOrArgSave(bool for_args) -> void {
  404. auto [entry_parse_node, entry_node_id] =
  405. node_stack_.PopExpressionWithParseNode();
  406. if (for_args) {
  407. // For an argument, we add a stub reference to the expression on the top of
  408. // the stack. There may not be anything on the IR prior to this.
  409. entry_node_id = AddNode(SemIR::Node::StubReference::Make(
  410. entry_parse_node, semantics_ir_->GetNode(entry_node_id).type_id(),
  411. entry_node_id));
  412. }
  413. // Save the param or arg ID.
  414. auto& params_or_args =
  415. semantics_ir_->GetNodeBlock(params_or_args_stack_.PeekForAdd());
  416. params_or_args.push_back(entry_node_id);
  417. }
  418. auto Context::CanonicalizeTypeImpl(
  419. SemIR::NodeKind kind,
  420. llvm::function_ref<void(llvm::FoldingSetNodeID& canonical_id)> profile_type,
  421. llvm::function_ref<SemIR::NodeId()> make_node) -> SemIR::TypeId {
  422. llvm::FoldingSetNodeID canonical_id;
  423. kind.Profile(canonical_id);
  424. profile_type(canonical_id);
  425. void* insert_pos;
  426. auto* node =
  427. canonical_type_nodes_.FindNodeOrInsertPos(canonical_id, insert_pos);
  428. if (node != nullptr) {
  429. return node->type_id();
  430. }
  431. auto node_id = make_node();
  432. auto type_id = semantics_ir_->AddType(node_id);
  433. CARBON_CHECK(canonical_types_.insert({node_id, type_id}).second);
  434. type_node_storage_.push_back(
  435. std::make_unique<TypeNode>(canonical_id, type_id));
  436. // In a debug build, check that our insertion position is still valid. It
  437. // could have been invalidated by a misbehaving `make_node`.
  438. CARBON_DCHECK([&] {
  439. void* check_insert_pos;
  440. auto* check_node = canonical_type_nodes_.FindNodeOrInsertPos(
  441. canonical_id, check_insert_pos);
  442. return !check_node && insert_pos == check_insert_pos;
  443. }()) << "Type was created recursively during canonicalization";
  444. canonical_type_nodes_.InsertNode(type_node_storage_.back().get(), insert_pos);
  445. return type_id;
  446. }
  447. // Compute a fingerprint for a tuple type, for use as a key in a folding set.
  448. static auto ProfileTupleType(const llvm::SmallVector<SemIR::TypeId>& type_ids,
  449. llvm::FoldingSetNodeID& canonical_id) -> void {
  450. for (const auto& type_id : type_ids) {
  451. canonical_id.AddInteger(type_id.index);
  452. }
  453. }
  454. // Compute a fingerprint for a type, for use as a key in a folding set.
  455. static auto ProfileType(Context& semantics_context, SemIR::Node node,
  456. llvm::FoldingSetNodeID& canonical_id) -> void {
  457. switch (node.kind()) {
  458. case SemIR::NodeKind::ArrayType: {
  459. auto [bound_id, element_type_id] = node.GetAsArrayType();
  460. canonical_id.AddInteger(
  461. semantics_context.semantics_ir().GetArrayBoundValue(bound_id));
  462. canonical_id.AddInteger(element_type_id.index);
  463. break;
  464. }
  465. case SemIR::NodeKind::Builtin:
  466. canonical_id.AddInteger(node.GetAsBuiltin().AsInt());
  467. break;
  468. case SemIR::NodeKind::CrossReference: {
  469. // TODO: Cross-references should be canonicalized by looking at their
  470. // target rather than treating them as new unique types.
  471. auto [xref_id, node_id] = node.GetAsCrossReference();
  472. canonical_id.AddInteger(xref_id.index);
  473. canonical_id.AddInteger(node_id.index);
  474. break;
  475. }
  476. case SemIR::NodeKind::ConstType:
  477. canonical_id.AddInteger(
  478. semantics_context.GetUnqualifiedType(node.GetAsConstType()).index);
  479. break;
  480. case SemIR::NodeKind::PointerType:
  481. canonical_id.AddInteger(node.GetAsPointerType().index);
  482. break;
  483. case SemIR::NodeKind::StructType: {
  484. auto refs =
  485. semantics_context.semantics_ir().GetNodeBlock(node.GetAsStructType());
  486. for (const auto& ref_id : refs) {
  487. auto ref = semantics_context.semantics_ir().GetNode(ref_id);
  488. auto [name_id, type_id] = ref.GetAsStructTypeField();
  489. canonical_id.AddInteger(name_id.index);
  490. canonical_id.AddInteger(type_id.index);
  491. }
  492. break;
  493. }
  494. case SemIR::NodeKind::StubReference: {
  495. // We rely on stub references not referring to each other to ensure we
  496. // only recurse once here.
  497. auto inner =
  498. semantics_context.semantics_ir().GetNode(node.GetAsStubReference());
  499. CARBON_CHECK(inner.kind() != SemIR::NodeKind::StubReference)
  500. << "A stub reference should never refer to another stub reference.";
  501. ProfileType(semantics_context, inner, canonical_id);
  502. break;
  503. }
  504. case SemIR::NodeKind::TupleType:
  505. ProfileTupleType(
  506. semantics_context.semantics_ir().GetTypeBlock(node.GetAsTupleType()),
  507. canonical_id);
  508. break;
  509. default:
  510. CARBON_FATAL() << "Unexpected type node " << node;
  511. }
  512. }
  513. auto Context::CanonicalizeTypeAndAddNodeIfNew(SemIR::Node node)
  514. -> SemIR::TypeId {
  515. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  516. ProfileType(*this, node, canonical_id);
  517. };
  518. auto make_node = [&] { return AddNode(node); };
  519. return CanonicalizeTypeImpl(node.kind(), profile_node, make_node);
  520. }
  521. auto Context::CanonicalizeType(SemIR::NodeId node_id) -> SemIR::TypeId {
  522. auto it = canonical_types_.find(node_id);
  523. if (it != canonical_types_.end()) {
  524. return it->second;
  525. }
  526. auto node = semantics_ir_->GetNode(node_id);
  527. auto profile_node = [&](llvm::FoldingSetNodeID& canonical_id) {
  528. ProfileType(*this, node, canonical_id);
  529. };
  530. auto make_node = [&] { return node_id; };
  531. return CanonicalizeTypeImpl(node.kind(), profile_node, make_node);
  532. }
  533. auto Context::CanonicalizeStructType(ParseTree::Node parse_node,
  534. SemIR::NodeBlockId refs_id)
  535. -> SemIR::TypeId {
  536. return CanonicalizeTypeAndAddNodeIfNew(SemIR::Node::StructType::Make(
  537. parse_node, SemIR::TypeId::TypeType, refs_id));
  538. }
  539. auto Context::CanonicalizeTupleType(ParseTree::Node parse_node,
  540. llvm::SmallVector<SemIR::TypeId>&& type_ids)
  541. -> SemIR::TypeId {
  542. // Defer allocating a SemIR::TypeBlockId until we know this is a new type.
  543. auto profile_tuple = [&](llvm::FoldingSetNodeID& canonical_id) {
  544. ProfileTupleType(type_ids, canonical_id);
  545. };
  546. auto make_tuple_node = [&] {
  547. auto type_block_id = semantics_ir_->AddTypeBlock();
  548. auto& type_block = semantics_ir_->GetTypeBlock(type_block_id);
  549. type_block = std::move(type_ids);
  550. return AddNode(SemIR::Node::TupleType::Make(
  551. parse_node, SemIR::TypeId::TypeType, type_block_id));
  552. };
  553. return CanonicalizeTypeImpl(SemIR::NodeKind::TupleType, profile_tuple,
  554. make_tuple_node);
  555. }
  556. auto Context::GetPointerType(ParseTree::Node parse_node,
  557. SemIR::TypeId pointee_type_id) -> SemIR::TypeId {
  558. return CanonicalizeTypeAndAddNodeIfNew(SemIR::Node::PointerType::Make(
  559. parse_node, SemIR::TypeId::TypeType, pointee_type_id));
  560. }
  561. auto Context::GetUnqualifiedType(SemIR::TypeId type_id) -> SemIR::TypeId {
  562. SemIR::Node type_node =
  563. semantics_ir_->GetNode(semantics_ir_->GetTypeAllowBuiltinTypes(type_id));
  564. if (type_node.kind() == SemIR::NodeKind::ConstType) {
  565. return type_node.GetAsConstType();
  566. }
  567. return type_id;
  568. }
  569. auto Context::PrintForStackDump(llvm::raw_ostream& output) const -> void {
  570. node_stack_.PrintForStackDump(output);
  571. node_block_stack_.PrintForStackDump(output);
  572. params_or_args_stack_.PrintForStackDump(output);
  573. args_type_info_stack_.PrintForStackDump(output);
  574. }
  575. } // namespace Carbon::Check