file_context.cpp 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135
  1. // Part of the Carbon Language project, under the Apache License v2.0 with LLVM
  2. // Exceptions. See /LICENSE for license information.
  3. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  4. #include "toolchain/lower/file_context.h"
  5. #include <memory>
  6. #include <optional>
  7. #include <string>
  8. #include <utility>
  9. #include "clang/CodeGen/ModuleBuilder.h"
  10. #include "common/check.h"
  11. #include "common/vlog.h"
  12. #include "llvm/ADT/STLExtras.h"
  13. #include "llvm/ADT/Sequence.h"
  14. #include "llvm/Linker/Linker.h"
  15. #include "llvm/Support/BLAKE3.h"
  16. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  17. #include "llvm/Transforms/Utils/ModuleUtils.h"
  18. #include "toolchain/base/kind_switch.h"
  19. #include "toolchain/lower/constant.h"
  20. #include "toolchain/lower/function_context.h"
  21. #include "toolchain/lower/mangler.h"
  22. #include "toolchain/sem_ir/absolute_node_id.h"
  23. #include "toolchain/sem_ir/entry_point.h"
  24. #include "toolchain/sem_ir/expr_info.h"
  25. #include "toolchain/sem_ir/file.h"
  26. #include "toolchain/sem_ir/function.h"
  27. #include "toolchain/sem_ir/generic.h"
  28. #include "toolchain/sem_ir/ids.h"
  29. #include "toolchain/sem_ir/inst.h"
  30. #include "toolchain/sem_ir/inst_kind.h"
  31. #include "toolchain/sem_ir/pattern.h"
  32. #include "toolchain/sem_ir/typed_insts.h"
  33. namespace Carbon::Lower {
  34. FileContext::FileContext(Context& context, const SemIR::File& sem_ir,
  35. const SemIR::InstNamer* inst_namer,
  36. llvm::raw_ostream* vlog_stream)
  37. : context_(&context),
  38. sem_ir_(&sem_ir),
  39. inst_namer_(inst_namer),
  40. vlog_stream_(vlog_stream) {
  41. // Initialization that relies on invariants of the class.
  42. cpp_code_generator_ = CreateCppCodeGenerator();
  43. CARBON_CHECK(!sem_ir.has_errors(),
  44. "Generating LLVM IR from invalid SemIR::File is unsupported.");
  45. }
  46. // TODO: Move this to lower.cpp.
  47. auto FileContext::PrepareToLower() -> void {
  48. if (cpp_code_generator_) {
  49. // Clang code generation should not actually modify the AST, but isn't
  50. // const-correct.
  51. cpp_code_generator_->Initialize(
  52. const_cast<clang::ASTContext&>(cpp_ast()->getASTContext()));
  53. }
  54. // Lower all types that were required to be complete.
  55. types_.resize(sem_ir_->insts().size());
  56. for (auto type_id : sem_ir_->types().complete_types()) {
  57. if (type_id.index >= 0) {
  58. types_[type_id.index] = BuildType(sem_ir_->types().GetInstId(type_id));
  59. }
  60. }
  61. // Lower function declarations.
  62. functions_.resize_for_overwrite(sem_ir_->functions().size());
  63. for (auto [id, _] : sem_ir_->functions().enumerate()) {
  64. functions_[id.index] = BuildFunctionDecl(id);
  65. }
  66. // Specific functions are lowered when we emit a reference to them.
  67. specific_functions_.resize(sem_ir_->specifics().size());
  68. // Additional data stored for specifics, for when attempting to coalesce.
  69. // Indexed by `GenericId`.
  70. lowered_specifics_.resize(sem_ir_->generics().size());
  71. // Indexed by `SpecificId`.
  72. lowered_specifics_type_fingerprint_.resize(sem_ir_->specifics().size());
  73. lowered_specific_fingerprint_.resize(sem_ir_->specifics().size());
  74. equivalent_specifics_.resize(sem_ir_->specifics().size(),
  75. SemIR::SpecificId::None);
  76. // Lower constants.
  77. constants_.resize(sem_ir_->insts().size());
  78. LowerConstants(*this, constants_);
  79. }
  80. // TODO: Move this to lower.cpp.
  81. auto FileContext::LowerDefinitions() -> void {
  82. for (const auto& class_info : sem_ir_->classes().values()) {
  83. if (auto* llvm_vtable = BuildVtable(class_info)) {
  84. global_variables_.Insert(class_info.vtable_id, llvm_vtable);
  85. }
  86. }
  87. // Lower global variable definitions.
  88. // TODO: Storing both a `constants_` array and a separate `global_variables_`
  89. // map is redundant.
  90. for (auto inst_id :
  91. sem_ir().inst_blocks().Get(sem_ir().top_inst_block_id())) {
  92. // Only `VarStorage` indicates a global variable declaration in the
  93. // top instruction block.
  94. if (auto var = sem_ir().insts().TryGetAs<SemIR::VarStorage>(inst_id)) {
  95. // Get the global variable declaration. We created this when lowering the
  96. // constant unless the variable is unnamed, in which case we need to
  97. // create it now.
  98. llvm::GlobalVariable* llvm_var = nullptr;
  99. if (sem_ir().constant_values().Get(inst_id).is_constant()) {
  100. llvm_var = cast<llvm::GlobalVariable>(
  101. GetGlobal(inst_id, SemIR::SpecificId::None));
  102. } else {
  103. llvm_var = BuildGlobalVariableDecl(*var);
  104. }
  105. // Convert the declaration of this variable into a definition by adding an
  106. // initializer.
  107. global_variables_.Insert(inst_id, llvm_var);
  108. llvm_var->setInitializer(
  109. llvm::Constant::getNullValue(llvm_var->getValueType()));
  110. }
  111. }
  112. // Lower function definitions.
  113. for (auto [id, _] : sem_ir_->functions().enumerate()) {
  114. BuildFunctionDefinition(id);
  115. }
  116. // Lower function definitions for generics.
  117. // This cannot be a range-based loop, as new definitions can be added
  118. // while building other definitions.
  119. // NOLINTNEXTLINE
  120. for (size_t i = 0; i != specific_function_definitions_.size(); ++i) {
  121. auto [function_id, specific_id] = specific_function_definitions_[i];
  122. BuildFunctionDefinition(function_id, specific_id);
  123. }
  124. // Append `__global_init` to `llvm::global_ctors` to initialize global
  125. // variables.
  126. if (sem_ir().global_ctor_id().has_value()) {
  127. llvm::appendToGlobalCtors(llvm_module(),
  128. GetFunction(sem_ir().global_ctor_id()),
  129. /*Priority=*/0);
  130. }
  131. if (cpp_code_generator_) {
  132. // Clang code generation should not actually modify the AST, but isn't
  133. // const-correct.
  134. cpp_code_generator_->HandleTranslationUnit(
  135. const_cast<clang::ASTContext&>(cpp_ast()->getASTContext()));
  136. bool link_error = llvm::Linker::linkModules(
  137. /*Dest=*/llvm_module(),
  138. /*Src=*/std::unique_ptr<llvm::Module>(
  139. cpp_code_generator_->ReleaseModule()));
  140. CARBON_CHECK(!link_error);
  141. }
  142. }
  143. auto FileContext::Finalize() -> void {
  144. // Find equivalent specifics (from the same generic), replace all uses and
  145. // remove duplicately lowered function definitions.
  146. CoalesceEquivalentSpecifics();
  147. }
  148. auto FileContext::InsertPair(
  149. SemIR::SpecificId specific_id1, SemIR::SpecificId specific_id2,
  150. Set<std::pair<SemIR::SpecificId, SemIR::SpecificId>>& set_of_pairs)
  151. -> bool {
  152. if (specific_id1.index > specific_id2.index) {
  153. std::swap(specific_id1.index, specific_id2.index);
  154. }
  155. auto insert_result =
  156. set_of_pairs.Insert(std::make_pair(specific_id1, specific_id2));
  157. return insert_result.is_inserted();
  158. }
  159. auto FileContext::ContainsPair(
  160. SemIR::SpecificId specific_id1, SemIR::SpecificId specific_id2,
  161. const Set<std::pair<SemIR::SpecificId, SemIR::SpecificId>>& set_of_pairs)
  162. -> bool {
  163. if (specific_id1.index > specific_id2.index) {
  164. std::swap(specific_id1.index, specific_id2.index);
  165. }
  166. return set_of_pairs.Contains(std::make_pair(specific_id1, specific_id2));
  167. }
  168. auto FileContext::CoalesceEquivalentSpecifics() -> void {
  169. for (auto& specifics : lowered_specifics_) {
  170. // i cannot be unsigned due to the comparison with a negative number when
  171. // the specifics vector is empty.
  172. for (int i = 0; i < static_cast<int>(specifics.size()) - 1; ++i) {
  173. // This specific was already replaced, skip it.
  174. if (equivalent_specifics_[specifics[i].index].has_value() &&
  175. equivalent_specifics_[specifics[i].index] != specifics[i]) {
  176. specifics[i] = specifics[specifics.size() - 1];
  177. specifics.pop_back();
  178. --i;
  179. continue;
  180. }
  181. // TODO: Improve quadratic behavior by using a single hash based on
  182. // `lowered_specifics_type_fingerprint_` and `common_fingerprint`.
  183. for (int j = i + 1; j < static_cast<int>(specifics.size()); ++j) {
  184. // When the specific was already replaced, skip it.
  185. if (equivalent_specifics_[specifics[j].index].has_value() &&
  186. equivalent_specifics_[specifics[j].index] != specifics[j]) {
  187. specifics[j] = specifics[specifics.size() - 1];
  188. specifics.pop_back();
  189. --j;
  190. continue;
  191. }
  192. // When the two specifics are not equivalent due to the function type
  193. // info stored in lowered_specifics_types, mark non-equivalance. This
  194. // can be reused to short-cut another path and continue the search for
  195. // other equivalences.
  196. if (!AreFunctionTypesEquivalent(specifics[i], specifics[j])) {
  197. InsertPair(specifics[i], specifics[j], non_equivalent_specifics_);
  198. continue;
  199. }
  200. Set<std::pair<SemIR::SpecificId, SemIR::SpecificId>>
  201. visited_equivalent_specifics;
  202. InsertPair(specifics[i], specifics[j], visited_equivalent_specifics);
  203. // Function type information matches; check usages inside the function
  204. // body that are dependent on the specific. This information has been
  205. // stored in lowered_states while lowering each function body.
  206. if (AreFunctionBodiesEquivalent(specifics[i], specifics[j],
  207. visited_equivalent_specifics)) {
  208. // When processing equivalences, we may change the canonical specific
  209. // multiple times, so we don't delete replaced specifics until the
  210. // end.
  211. llvm::SmallVector<SemIR::SpecificId> specifics_to_delete;
  212. visited_equivalent_specifics.ForEach(
  213. [&](std::pair<SemIR::SpecificId, SemIR::SpecificId>
  214. equivalent_entry) {
  215. CARBON_VLOG("Found equivalent specifics: {0}, {1}",
  216. equivalent_entry.first, equivalent_entry.second);
  217. ProcessSpecificEquivalence(equivalent_entry,
  218. specifics_to_delete);
  219. });
  220. // Delete function bodies for already replaced functions.
  221. for (auto specific_id : specifics_to_delete) {
  222. specific_functions_[specific_id.index]->eraseFromParent();
  223. specific_functions_[specific_id.index] =
  224. specific_functions_[equivalent_specifics_[specific_id.index]
  225. .index];
  226. }
  227. // Removed the replaced specific from the list of emitted specifics.
  228. // Only the top level, since the others are somewhere else in the
  229. // vector, they will be found and removed during processing.
  230. specifics[j] = specifics[specifics.size() - 1];
  231. specifics.pop_back();
  232. --j;
  233. } else {
  234. // Only mark non-equivalence based on state for starting specifics.
  235. InsertPair(specifics[i], specifics[j], non_equivalent_specifics_);
  236. }
  237. }
  238. }
  239. }
  240. }
  241. auto FileContext::ProcessSpecificEquivalence(
  242. std::pair<SemIR::SpecificId, SemIR::SpecificId> pair,
  243. llvm::SmallVector<SemIR::SpecificId>& specifics_to_delete) -> void {
  244. auto [specific_id1, specific_id2] = pair;
  245. CARBON_CHECK(specific_id1.has_value() && specific_id2.has_value(),
  246. "Expected values in equivalence check");
  247. auto get_canon = [&](SemIR::SpecificId specific_id) {
  248. return equivalent_specifics_[specific_id.index].has_value()
  249. ? std::make_pair(
  250. equivalent_specifics_[specific_id.index],
  251. (equivalent_specifics_[specific_id.index] != specific_id))
  252. : std::make_pair(specific_id, false);
  253. };
  254. auto [canon_id1, replaced_before1] = get_canon(specific_id1);
  255. auto [canon_id2, replaced_before2] = get_canon(specific_id2);
  256. if (canon_id1 == canon_id2) {
  257. // Already equivalent, there was a previous replacement.
  258. return;
  259. }
  260. if (canon_id1.index >= canon_id2.index) {
  261. // Prefer the earlier index for canonical values.
  262. std::swap(canon_id1, canon_id2);
  263. std::swap(replaced_before1, replaced_before2);
  264. }
  265. // Update equivalent_specifics_ for all. This is used as an indicator that
  266. // this specific_id may be the canonical one when reducing the equivalence
  267. // chains in `IsKnownEquivalence`.
  268. equivalent_specifics_[specific_id1.index] = canon_id1;
  269. equivalent_specifics_[specific_id2.index] = canon_id1;
  270. specific_functions_[canon_id2.index]->replaceAllUsesWith(
  271. specific_functions_[canon_id1.index]);
  272. if (!replaced_before2) {
  273. specifics_to_delete.push_back(canon_id2);
  274. }
  275. }
  276. auto FileContext::IsKnownEquivalence(SemIR::SpecificId specific_id1,
  277. SemIR::SpecificId specific_id2) -> bool {
  278. if (!equivalent_specifics_[specific_id1.index].has_value() ||
  279. !equivalent_specifics_[specific_id2.index].has_value()) {
  280. return false;
  281. }
  282. auto update_equivalent_specific = [&](SemIR::SpecificId specific_id) {
  283. llvm::SmallVector<SemIR::SpecificId> stack;
  284. SemIR::SpecificId specific_to_update = specific_id;
  285. while (equivalent_specifics_[equivalent_specifics_[specific_to_update.index]
  286. .index] !=
  287. equivalent_specifics_[specific_to_update.index]) {
  288. stack.push_back(specific_to_update);
  289. specific_to_update = equivalent_specifics_[specific_to_update.index];
  290. }
  291. for (auto specific : llvm::reverse(stack)) {
  292. equivalent_specifics_[specific.index] =
  293. equivalent_specifics_[equivalent_specifics_[specific.index].index];
  294. }
  295. };
  296. update_equivalent_specific(specific_id1);
  297. update_equivalent_specific(specific_id2);
  298. return equivalent_specifics_[specific_id1.index] ==
  299. equivalent_specifics_[specific_id2.index];
  300. }
  301. auto FileContext::AreFunctionTypesEquivalent(SemIR::SpecificId specific_id1,
  302. SemIR::SpecificId specific_id2)
  303. -> bool {
  304. CARBON_CHECK(specific_id1.has_value() && specific_id2.has_value());
  305. return lowered_specifics_type_fingerprint_[specific_id1.index] ==
  306. lowered_specifics_type_fingerprint_[specific_id2.index];
  307. }
  308. auto FileContext::AreFunctionBodiesEquivalent(
  309. SemIR::SpecificId specific_id1, SemIR::SpecificId specific_id2,
  310. Set<std::pair<SemIR::SpecificId, SemIR::SpecificId>>&
  311. visited_equivalent_specifics) -> bool {
  312. llvm::SmallVector<std::pair<SemIR::SpecificId, SemIR::SpecificId>> worklist;
  313. worklist.push_back({specific_id1, specific_id2});
  314. while (!worklist.empty()) {
  315. auto outer_pair = worklist.pop_back_val();
  316. auto [specific_id1, specific_id2] = outer_pair;
  317. auto state1 = lowered_specific_fingerprint_[specific_id1.index];
  318. auto state2 = lowered_specific_fingerprint_[specific_id2.index];
  319. if (state1.common_fingerprint != state2.common_fingerprint) {
  320. InsertPair(specific_id1, specific_id2, non_equivalent_specifics_);
  321. return false;
  322. }
  323. if (state1.specific_fingerprint == state2.specific_fingerprint) {
  324. continue;
  325. }
  326. // A size difference should have been detected by the common fingerprint.
  327. CARBON_CHECK(state1.calls.size() == state2.calls.size(),
  328. "Number of specific calls expected to be the same.");
  329. for (auto [state1_call, state2_call] :
  330. llvm::zip(state1.calls, state2.calls)) {
  331. if (state1_call != state2_call) {
  332. if (ContainsPair(state1_call, state2_call, non_equivalent_specifics_)) {
  333. return false;
  334. }
  335. if (IsKnownEquivalence(state1_call, state2_call)) {
  336. continue;
  337. }
  338. if (!InsertPair(state1_call, state2_call,
  339. visited_equivalent_specifics)) {
  340. continue;
  341. }
  342. // Leave the added equivalence pair in place and continue.
  343. worklist.push_back({state1_call, state2_call});
  344. }
  345. }
  346. }
  347. return true;
  348. }
  349. auto FileContext::CreateCppCodeGenerator()
  350. -> std::unique_ptr<clang::CodeGenerator> {
  351. if (!cpp_ast()) {
  352. return nullptr;
  353. }
  354. RawStringOstream clang_module_name_stream;
  355. clang_module_name_stream << llvm_module().getName() << ".clang";
  356. // Do not emit Clang's name and version as the creator of the output file.
  357. cpp_code_gen_options_.EmitVersionIdentMetadata = false;
  358. return std::unique_ptr<clang::CodeGenerator>(clang::CreateLLVMCodeGen(
  359. cpp_ast()->getASTContext().getDiagnostics(),
  360. clang_module_name_stream.TakeStr(), context().file_system(),
  361. cpp_header_search_options_, cpp_preprocessor_options_,
  362. cpp_code_gen_options_, llvm_context()));
  363. }
  364. auto FileContext::GetGlobal(SemIR::InstId inst_id,
  365. SemIR::SpecificId specific_id) -> llvm::Value* {
  366. auto const_id = GetConstantValueInSpecific(sem_ir(), specific_id, inst_id);
  367. CARBON_CHECK(const_id.is_concrete(), "Missing value: {0} {1} {2}", inst_id,
  368. specific_id, sem_ir().insts().Get(inst_id));
  369. auto const_inst_id = sem_ir().constant_values().GetInstId(const_id);
  370. auto* const_value = constants_[const_inst_id.index];
  371. // For value expressions and initializing expressions, the value produced by
  372. // a constant instruction is a value representation of the constant. For
  373. // initializing expressions, `FinishInit` will perform a copy if needed.
  374. switch (auto cat = SemIR::GetExprCategory(sem_ir(), const_inst_id)) {
  375. case SemIR::ExprCategory::Value:
  376. case SemIR::ExprCategory::Initializing:
  377. break;
  378. case SemIR::ExprCategory::DurableRef:
  379. case SemIR::ExprCategory::EphemeralRef:
  380. // Constant reference expressions lower to an address.
  381. return const_value;
  382. case SemIR::ExprCategory::NotExpr:
  383. case SemIR::ExprCategory::Error:
  384. case SemIR::ExprCategory::Mixed:
  385. CARBON_FATAL("Unexpected category {0} for lowered constant {1}", cat,
  386. sem_ir().insts().Get(const_inst_id));
  387. };
  388. auto value_rep = SemIR::ValueRepr::ForType(
  389. sem_ir(), sem_ir().insts().Get(const_inst_id).type_id());
  390. if (value_rep.kind != SemIR::ValueRepr::Pointer) {
  391. return const_value;
  392. }
  393. // The value representation is a pointer. Generate a variable to hold the
  394. // value, or find and reuse an existing one.
  395. if (auto result = global_variables().Lookup(const_inst_id)) {
  396. return result.value();
  397. }
  398. // Include both the name of the constant, if any, and the point of use in
  399. // the name of the variable.
  400. llvm::StringRef const_name;
  401. llvm::StringRef use_name;
  402. if (inst_namer_) {
  403. const_name = inst_namer_->GetUnscopedNameFor(const_inst_id);
  404. use_name = inst_namer_->GetUnscopedNameFor(inst_id);
  405. }
  406. // We always need to give the global a name even if the instruction namer
  407. // doesn't have one to use.
  408. if (const_name.empty()) {
  409. const_name = "const";
  410. }
  411. if (use_name.empty()) {
  412. use_name = "anon";
  413. }
  414. llvm::StringRef sep = (use_name[0] == '.') ? "" : ".";
  415. auto* global_variable = new llvm::GlobalVariable(
  416. llvm_module(), GetType(sem_ir().GetPointeeType(value_rep.type_id)),
  417. /*isConstant=*/true, llvm::GlobalVariable::InternalLinkage, const_value,
  418. const_name + sep + use_name);
  419. global_variables_.Insert(const_inst_id, global_variable);
  420. return global_variable;
  421. }
  422. auto FileContext::GetOrCreateFunction(SemIR::FunctionId function_id,
  423. SemIR::SpecificId specific_id)
  424. -> llvm::Function* {
  425. // Non-generic functions are declared eagerly.
  426. if (!specific_id.has_value()) {
  427. return GetFunction(function_id);
  428. }
  429. if (auto* result = specific_functions_[specific_id.index]) {
  430. return result;
  431. }
  432. auto* result = BuildFunctionDecl(function_id, specific_id);
  433. // TODO: Add this function to a list of specific functions whose definitions
  434. // we need to emit.
  435. specific_functions_[specific_id.index] = result;
  436. // TODO: Use this to generate definitions for these functions.
  437. specific_function_definitions_.push_back({function_id, specific_id});
  438. return result;
  439. }
  440. auto FileContext::BuildFunctionTypeInfo(const SemIR::Function& function,
  441. SemIR::SpecificId specific_id)
  442. -> FunctionTypeInfo {
  443. const auto return_info =
  444. SemIR::ReturnTypeInfo::ForFunction(sem_ir(), function, specific_id);
  445. if (!return_info.is_valid()) {
  446. // The return type has not been completed, create a trivial type instead.
  447. return {.type =
  448. llvm::FunctionType::get(llvm::Type::getVoidTy(llvm_context()),
  449. /*isVarArg=*/false)};
  450. }
  451. auto get_llvm_type = [&](SemIR::TypeId type_id) -> llvm::Type* {
  452. if (!type_id.has_value()) {
  453. return nullptr;
  454. }
  455. return GetType(type_id);
  456. };
  457. // TODO: expose the `Call` parameter patterns in `Function`, and use them here
  458. // instead of reconstructing them via the syntactic parameter lists.
  459. auto implicit_param_patterns =
  460. sem_ir().inst_blocks().GetOrEmpty(function.implicit_param_patterns_id);
  461. auto param_patterns =
  462. sem_ir().inst_blocks().GetOrEmpty(function.param_patterns_id);
  463. auto* return_type = get_llvm_type(return_info.type_id);
  464. llvm::SmallVector<llvm::Type*> param_types;
  465. // Compute the return type to use for the LLVM function. If the initializing
  466. // representation doesn't produce a value, set the return type to void.
  467. // TODO: For the `Run` entry point, remap return type to i32 if it doesn't
  468. // return a value.
  469. llvm::Type* function_return_type =
  470. (return_info.is_valid() &&
  471. return_info.init_repr.kind == SemIR::InitRepr::ByCopy)
  472. ? return_type
  473. : llvm::Type::getVoidTy(llvm_context());
  474. // TODO: Consider either storing `param_inst_ids` somewhere so that we can
  475. // reuse it from `BuildFunctionDefinition` and when building calls, or factor
  476. // out a mechanism to compute the mapping between parameters and arguments on
  477. // demand.
  478. llvm::SmallVector<SemIR::InstId> param_inst_ids;
  479. auto max_llvm_params = (return_info.has_return_slot() ? 1 : 0) +
  480. implicit_param_patterns.size() + param_patterns.size();
  481. param_types.reserve(max_llvm_params);
  482. param_inst_ids.reserve(max_llvm_params);
  483. auto return_param_id = SemIR::InstId::None;
  484. if (return_info.has_return_slot()) {
  485. param_types.push_back(
  486. llvm::PointerType::get(llvm_context(), /*AddressSpace=*/0));
  487. return_param_id = function.return_slot_pattern_id;
  488. param_inst_ids.push_back(return_param_id);
  489. }
  490. for (auto param_pattern_id : llvm::concat<const SemIR::InstId>(
  491. implicit_param_patterns, param_patterns)) {
  492. auto param_pattern_info = SemIR::Function::GetParamPatternInfoFromPatternId(
  493. sem_ir(), param_pattern_id);
  494. if (!param_pattern_info) {
  495. continue;
  496. }
  497. auto param_type_id = ExtractScrutineeType(
  498. sem_ir(), SemIR::GetTypeOfInstInSpecific(sem_ir(), specific_id,
  499. param_pattern_info->inst_id));
  500. CARBON_CHECK(
  501. !param_type_id.AsConstantId().is_symbolic(),
  502. "Found symbolic type id after resolution when lowering type {0}.",
  503. param_pattern_info->inst.type_id);
  504. switch (auto value_rep = SemIR::ValueRepr::ForType(sem_ir(), param_type_id);
  505. value_rep.kind) {
  506. case SemIR::ValueRepr::Unknown:
  507. // This parameter type is incomplete. Fallback to describing the
  508. // function type as `void()`.
  509. return {.type = llvm::FunctionType::get(
  510. llvm::Type::getVoidTy(llvm_context()),
  511. /*isVarArg=*/false)};
  512. case SemIR::ValueRepr::None:
  513. break;
  514. case SemIR::ValueRepr::Copy:
  515. case SemIR::ValueRepr::Custom:
  516. case SemIR::ValueRepr::Pointer:
  517. auto* param_types_to_add = get_llvm_type(value_rep.type_id);
  518. param_types.push_back(param_types_to_add);
  519. param_inst_ids.push_back(param_pattern_id);
  520. break;
  521. }
  522. }
  523. return {.type = llvm::FunctionType::get(function_return_type, param_types,
  524. /*isVarArg=*/false),
  525. .param_inst_ids = std::move(param_inst_ids),
  526. .return_type = return_type,
  527. .return_param_id = return_param_id};
  528. }
  529. auto FileContext::BuildFunctionDecl(SemIR::FunctionId function_id,
  530. SemIR::SpecificId specific_id)
  531. -> llvm::Function* {
  532. const auto& function = sem_ir().functions().Get(function_id);
  533. // Don't lower generic functions. Note that associated functions in interfaces
  534. // have `Self` in scope, so are implicitly generic functions.
  535. if (function.generic_id.has_value() && !specific_id.has_value()) {
  536. return nullptr;
  537. }
  538. // Don't lower builtins.
  539. if (function.builtin_function_kind != SemIR::BuiltinFunctionKind::None) {
  540. return nullptr;
  541. }
  542. // TODO: Consider tracking whether the function has been used, and only
  543. // lowering it if it's needed.
  544. auto function_type_info = BuildFunctionTypeInfo(function, specific_id);
  545. auto linkage = specific_id.has_value() ? llvm::Function::LinkOnceODRLinkage
  546. : llvm::Function::ExternalLinkage;
  547. Mangler m(*this);
  548. std::string mangled_name = m.Mangle(function_id, specific_id);
  549. // Create a unique fingerprint for the function type.
  550. // For now, compute the function type fingerprint only for specifics, though
  551. // we might need it for all functions in order to create a canonical
  552. // fingerprint across translation units.
  553. if (specific_id.has_value()) {
  554. llvm::BLAKE3 function_type_fingerprint;
  555. RawStringOstream os;
  556. function_type_info.type->print(os);
  557. function_type_fingerprint.update(os.TakeStr());
  558. function_type_fingerprint.final(
  559. lowered_specifics_type_fingerprint_[specific_id.index]);
  560. }
  561. auto* llvm_function = llvm::Function::Create(function_type_info.type, linkage,
  562. mangled_name, llvm_module());
  563. CARBON_CHECK(llvm_function->getName() == mangled_name,
  564. "Mangled name collision: {0}", mangled_name);
  565. // Set up parameters and the return slot.
  566. for (auto [inst_id, arg] : llvm::zip_equal(function_type_info.param_inst_ids,
  567. llvm_function->args())) {
  568. auto name_id = SemIR::NameId::None;
  569. if (inst_id == function_type_info.return_param_id) {
  570. name_id = SemIR::NameId::ReturnSlot;
  571. arg.addAttr(llvm::Attribute::getWithStructRetType(
  572. llvm_context(), function_type_info.return_type));
  573. } else {
  574. name_id = SemIR::GetPrettyNameFromPatternId(sem_ir(), inst_id);
  575. }
  576. arg.setName(sem_ir().names().GetIRBaseName(name_id));
  577. }
  578. return llvm_function;
  579. }
  580. auto FileContext::BuildFunctionDefinition(SemIR::FunctionId function_id,
  581. SemIR::SpecificId specific_id)
  582. -> void {
  583. const auto& function = sem_ir().functions().Get(function_id);
  584. const auto& body_block_ids = function.body_block_ids;
  585. if (body_block_ids.empty() &&
  586. (!function.cpp_decl || !function.cpp_decl->isDefined())) {
  587. // Function is probably defined in another file; not an error.
  588. return;
  589. }
  590. llvm::Function* llvm_function;
  591. if (specific_id.has_value()) {
  592. llvm_function = specific_functions_[specific_id.index];
  593. } else {
  594. llvm_function = GetFunction(function_id);
  595. if (!llvm_function) {
  596. // We chose not to lower this function at all, for example because it's a
  597. // generic function.
  598. return;
  599. }
  600. }
  601. // For non-generics we do not lower. For generics, the llvm function was
  602. // created via GetOrCreateFunction prior to this when building the
  603. // declaration.
  604. BuildFunctionBody(function_id, function, llvm_function, specific_id);
  605. }
  606. auto FileContext::BuildFunctionBody(SemIR::FunctionId function_id,
  607. const SemIR::Function& function,
  608. llvm::Function* llvm_function,
  609. SemIR::SpecificId specific_id) -> void {
  610. const auto& body_block_ids = function.body_block_ids;
  611. CARBON_DCHECK(llvm_function, "LLVM Function not found when lowering body.");
  612. if (function.cpp_decl) {
  613. // TODO: To support recursive inline functions, collect all calls to
  614. // `HandleTopLevelDecl()` in a custom `ASTConsumer` configured in the
  615. // `ASTUnit`, and replay them in lowering in the `CodeGenerator`. See
  616. // https://discord.com/channels/655572317891461132/768530752592805919/1370509111585935443
  617. clang::FunctionDecl* cpp_def = function.cpp_decl->getDefinition();
  618. CARBON_DCHECK(cpp_def, "No Clang function body found during lowering");
  619. // Create the LLVM function (`CodeGenModule::GetOrCreateLLVMFunction()`) so
  620. // that code generation (`CodeGenModule::EmitGlobal()`) would see this
  621. // function name (`CodeGenModule::getMangledName()`), and will generate its
  622. // definition.
  623. llvm::Constant* function_address =
  624. cpp_code_generator_->GetAddrOfGlobal(clang::GlobalDecl(cpp_def),
  625. /*isForDefinition=*/false);
  626. CARBON_DCHECK(function_address);
  627. // Emit the function code.
  628. cpp_code_generator_->HandleTopLevelDecl(clang::DeclGroupRef(cpp_def));
  629. return;
  630. }
  631. CARBON_DCHECK(!body_block_ids.empty(),
  632. "No function body blocks found during lowering.");
  633. // Store which specifics were already lowered (with definitions) for each
  634. // generic.
  635. if (function.generic_id.has_value() && specific_id.has_value()) {
  636. AddLoweredSpecificForGeneric(function.generic_id, specific_id);
  637. }
  638. FunctionContext function_lowering(
  639. *this, llvm_function, specific_id,
  640. InitializeFingerprintForSpecific(specific_id),
  641. BuildDISubprogram(function, llvm_function), vlog_stream_);
  642. // Add parameters to locals.
  643. // TODO: This duplicates the mapping between sem_ir instructions and LLVM
  644. // function parameters that was already computed in BuildFunctionDecl.
  645. // We should only do that once.
  646. auto call_param_ids =
  647. sem_ir().inst_blocks().GetOrEmpty(function.call_params_id);
  648. int param_index = 0;
  649. // TODO: Find a way to ensure this code and the function-call lowering use
  650. // the same parameter ordering.
  651. // Lowers the given parameter. Must be called in LLVM calling convention
  652. // parameter order.
  653. auto lower_param = [&](SemIR::InstId param_id) {
  654. // Get the value of the parameter from the function argument.
  655. auto param_inst = sem_ir().insts().GetAs<SemIR::AnyParam>(param_id);
  656. llvm::Value* param_value;
  657. if (SemIR::ValueRepr::ForType(sem_ir(), param_inst.type_id).kind !=
  658. SemIR::ValueRepr::None) {
  659. param_value = llvm_function->getArg(param_index);
  660. ++param_index;
  661. } else {
  662. param_value = llvm::PoisonValue::get(GetType(
  663. SemIR::GetTypeOfInstInSpecific(sem_ir(), specific_id, param_id)));
  664. }
  665. // The value of the parameter is the value of the argument.
  666. function_lowering.SetLocal(param_id, param_value);
  667. };
  668. // The subset of call_param_ids that is already in the order that the LLVM
  669. // calling convention expects.
  670. llvm::ArrayRef<SemIR::InstId> sequential_param_ids;
  671. if (function.return_slot_pattern_id.has_value()) {
  672. // The LLVM calling convention has the return slot first rather than last.
  673. // Note that this queries whether there is a return slot at the LLVM level,
  674. // whereas `function.return_slot_pattern_id.has_value()` queries whether
  675. // there is a return slot at the SemIR level.
  676. if (SemIR::ReturnTypeInfo::ForFunction(sem_ir(), function, specific_id)
  677. .has_return_slot()) {
  678. lower_param(call_param_ids.back());
  679. }
  680. sequential_param_ids = call_param_ids.drop_back();
  681. } else {
  682. sequential_param_ids = call_param_ids;
  683. }
  684. for (auto param_id : sequential_param_ids) {
  685. lower_param(param_id);
  686. }
  687. auto decl_block_id = SemIR::InstBlockId::None;
  688. if (function_id == sem_ir().global_ctor_id()) {
  689. decl_block_id = SemIR::InstBlockId::Empty;
  690. } else {
  691. decl_block_id = sem_ir()
  692. .insts()
  693. .GetAs<SemIR::FunctionDecl>(function.latest_decl_id())
  694. .decl_block_id;
  695. }
  696. // Lowers the contents of block_id into the corresponding LLVM block,
  697. // creating it if it doesn't already exist.
  698. auto lower_block = [&](SemIR::InstBlockId block_id) {
  699. CARBON_VLOG("Lowering {0}\n", block_id);
  700. auto* llvm_block = function_lowering.GetBlock(block_id);
  701. // Keep the LLVM blocks in lexical order.
  702. llvm_block->moveBefore(llvm_function->end());
  703. function_lowering.builder().SetInsertPoint(llvm_block);
  704. function_lowering.LowerBlockContents(block_id);
  705. };
  706. lower_block(decl_block_id);
  707. // If the decl block is empty, reuse it as the first body block. We don't do
  708. // this when the decl block is non-empty so that any branches back to the
  709. // first body block don't also re-execute the decl.
  710. llvm::BasicBlock* block = function_lowering.builder().GetInsertBlock();
  711. if (block->empty() &&
  712. function_lowering.TryToReuseBlock(body_block_ids.front(), block)) {
  713. // Reuse this block as the first block of the function body.
  714. } else {
  715. function_lowering.builder().CreateBr(
  716. function_lowering.GetBlock(body_block_ids.front()));
  717. }
  718. // Lower all blocks.
  719. for (auto block_id : body_block_ids) {
  720. lower_block(block_id);
  721. }
  722. // LLVM requires that the entry block has no predecessors.
  723. auto* entry_block = &llvm_function->getEntryBlock();
  724. if (entry_block->hasNPredecessorsOrMore(1)) {
  725. auto* new_entry_block = llvm::BasicBlock::Create(
  726. llvm_context(), "entry", llvm_function, entry_block);
  727. llvm::BranchInst::Create(entry_block, new_entry_block);
  728. }
  729. // Emit fingerprint accumulated inside the function context.
  730. function_lowering.EmitFinalFingerprint();
  731. }
  732. auto FileContext::BuildDISubprogram(const SemIR::Function& function,
  733. const llvm::Function* llvm_function)
  734. -> llvm::DISubprogram* {
  735. if (!context().di_compile_unit()) {
  736. return nullptr;
  737. }
  738. auto name = sem_ir().names().GetAsStringIfIdentifier(function.name_id);
  739. CARBON_CHECK(name, "Unexpected special name for function: {0}",
  740. function.name_id);
  741. auto loc = GetLocForDI(function.definition_id);
  742. // TODO: Add more details here, including real subroutine type (once type
  743. // information is built), etc.
  744. return context().di_builder().createFunction(
  745. context().di_compile_unit(), *name, llvm_function->getName(),
  746. /*File=*/context().di_builder().createFile(loc.filename, ""),
  747. /*LineNo=*/loc.line_number,
  748. context().di_builder().createSubroutineType(
  749. context().di_builder().getOrCreateTypeArray(std::nullopt)),
  750. /*ScopeLine=*/0, llvm::DINode::FlagZero,
  751. llvm::DISubprogram::SPFlagDefinition);
  752. }
  753. // BuildTypeForInst is used to construct types for FileContext::BuildType below.
  754. // Implementations return the LLVM type for the instruction. This first overload
  755. // is the fallback handler for non-type instructions.
  756. template <typename InstT>
  757. requires(InstT::Kind.is_type() == SemIR::InstIsType::Never)
  758. static auto BuildTypeForInst(FileContext& /*context*/, InstT inst)
  759. -> llvm::Type* {
  760. CARBON_FATAL("Cannot use inst as type: {0}", inst);
  761. }
  762. template <typename InstT>
  763. requires(InstT::Kind.is_symbolic_when_type())
  764. static auto BuildTypeForInst(FileContext& context, InstT /*inst*/)
  765. -> llvm::Type* {
  766. // Treat non-monomorphized symbolic types as opaque.
  767. return llvm::StructType::get(context.llvm_context());
  768. }
  769. static auto BuildTypeForInst(FileContext& context, SemIR::ArrayType inst)
  770. -> llvm::Type* {
  771. return llvm::ArrayType::get(
  772. context.GetType(context.sem_ir().types().GetTypeIdForTypeInstId(
  773. inst.element_type_inst_id)),
  774. *context.sem_ir().GetArrayBoundValue(inst.bound_id));
  775. }
  776. static auto BuildTypeForInst(FileContext& /*context*/, SemIR::AutoType inst)
  777. -> llvm::Type* {
  778. CARBON_FATAL("Unexpected builtin type in lowering: {0}", inst);
  779. }
  780. static auto BuildTypeForInst(FileContext& context, SemIR::BoolType /*inst*/)
  781. -> llvm::Type* {
  782. // TODO: We may want to have different representations for `bool` storage
  783. // (`i8`) versus for `bool` values (`i1`).
  784. return llvm::Type::getInt1Ty(context.llvm_context());
  785. }
  786. static auto BuildTypeForInst(FileContext& context, SemIR::ClassType inst)
  787. -> llvm::Type* {
  788. auto object_repr_id = context.sem_ir()
  789. .classes()
  790. .Get(inst.class_id)
  791. .GetObjectRepr(context.sem_ir(), inst.specific_id);
  792. return context.GetType(object_repr_id);
  793. }
  794. static auto BuildTypeForInst(FileContext& context, SemIR::ConstType inst)
  795. -> llvm::Type* {
  796. return context.GetType(
  797. context.sem_ir().types().GetTypeIdForTypeInstId(inst.inner_id));
  798. }
  799. static auto BuildTypeForInst(FileContext& context,
  800. SemIR::ImplWitnessAssociatedConstant inst)
  801. -> llvm::Type* {
  802. return context.GetType(inst.type_id);
  803. }
  804. static auto BuildTypeForInst(FileContext& /*context*/,
  805. SemIR::ErrorInst /*inst*/) -> llvm::Type* {
  806. // This is a complete type but uses of it should never be lowered.
  807. return nullptr;
  808. }
  809. static auto BuildTypeForInst(FileContext& context, SemIR::FloatType /*inst*/)
  810. -> llvm::Type* {
  811. // TODO: Handle different sizes.
  812. return llvm::Type::getDoubleTy(context.llvm_context());
  813. }
  814. static auto BuildTypeForInst(FileContext& context, SemIR::IntType inst)
  815. -> llvm::Type* {
  816. auto width =
  817. context.sem_ir().insts().TryGetAs<SemIR::IntValue>(inst.bit_width_id);
  818. CARBON_CHECK(width, "Can't lower int type with symbolic width");
  819. return llvm::IntegerType::get(
  820. context.llvm_context(),
  821. context.sem_ir().ints().Get(width->int_id).getZExtValue());
  822. }
  823. static auto BuildTypeForInst(FileContext& context,
  824. SemIR::LegacyFloatType /*inst*/) -> llvm::Type* {
  825. return llvm::Type::getDoubleTy(context.llvm_context());
  826. }
  827. static auto BuildTypeForInst(FileContext& context, SemIR::PointerType /*inst*/)
  828. -> llvm::Type* {
  829. return llvm::PointerType::get(context.llvm_context(), /*AddressSpace=*/0);
  830. }
  831. static auto BuildTypeForInst(FileContext& /*context*/,
  832. SemIR::PatternType /*inst*/) -> llvm::Type* {
  833. CARBON_FATAL("Unexpected pattern type in lowering");
  834. }
  835. static auto BuildTypeForInst(FileContext& context, SemIR::StructType inst)
  836. -> llvm::Type* {
  837. auto fields = context.sem_ir().struct_type_fields().Get(inst.fields_id);
  838. llvm::SmallVector<llvm::Type*> subtypes;
  839. subtypes.reserve(fields.size());
  840. for (auto field : fields) {
  841. subtypes.push_back(context.GetType(
  842. context.sem_ir().types().GetTypeIdForTypeInstId(field.type_inst_id)));
  843. }
  844. return llvm::StructType::get(context.llvm_context(), subtypes);
  845. }
  846. static auto BuildTypeForInst(FileContext& context, SemIR::TupleType inst)
  847. -> llvm::Type* {
  848. // TODO: Investigate special-casing handling of empty tuples so that they
  849. // can be collectively replaced with LLVM's void, particularly around
  850. // function returns. LLVM doesn't allow declaring variables with a void
  851. // type, so that may require significant special casing.
  852. auto elements = context.sem_ir().inst_blocks().Get(inst.type_elements_id);
  853. llvm::SmallVector<llvm::Type*> subtypes;
  854. subtypes.reserve(elements.size());
  855. for (auto type_id : context.sem_ir().types().GetBlockAsTypeIds(elements)) {
  856. subtypes.push_back(context.GetType(type_id));
  857. }
  858. return llvm::StructType::get(context.llvm_context(), subtypes);
  859. }
  860. static auto BuildTypeForInst(FileContext& context, SemIR::TypeType /*inst*/)
  861. -> llvm::Type* {
  862. return context.GetTypeType();
  863. }
  864. static auto BuildTypeForInst(FileContext& context, SemIR::VtableType /*inst*/)
  865. -> llvm::Type* {
  866. return llvm::Type::getVoidTy(context.llvm_context());
  867. }
  868. template <typename InstT>
  869. requires(InstT::Kind.template IsAnyOf<SemIR::SpecificFunctionType,
  870. SemIR::StringType>())
  871. static auto BuildTypeForInst(FileContext& context, InstT /*inst*/)
  872. -> llvm::Type* {
  873. // TODO: Decide how we want to represent `StringType`.
  874. return llvm::PointerType::get(context.llvm_context(), 0);
  875. }
  876. template <typename InstT>
  877. requires(InstT::Kind
  878. .template IsAnyOf<SemIR::BoundMethodType, SemIR::IntLiteralType,
  879. SemIR::NamespaceType, SemIR::WitnessType>())
  880. static auto BuildTypeForInst(FileContext& context, InstT /*inst*/)
  881. -> llvm::Type* {
  882. // Return an empty struct as a placeholder.
  883. return llvm::StructType::get(context.llvm_context());
  884. }
  885. template <typename InstT>
  886. requires(InstT::Kind.template IsAnyOf<
  887. SemIR::AssociatedEntityType, SemIR::FacetType, SemIR::FunctionType,
  888. SemIR::FunctionTypeWithSelfType, SemIR::GenericClassType,
  889. SemIR::GenericInterfaceType, SemIR::InstType,
  890. SemIR::UnboundElementType, SemIR::WhereExpr>())
  891. static auto BuildTypeForInst(FileContext& context, InstT /*inst*/)
  892. -> llvm::Type* {
  893. // Return an empty struct as a placeholder.
  894. // TODO: Should we model an interface as a witness table, or an associated
  895. // entity as an index?
  896. return llvm::StructType::get(context.llvm_context());
  897. }
  898. auto FileContext::BuildType(SemIR::InstId inst_id) -> llvm::Type* {
  899. // Use overload resolution to select the implementation, producing compile
  900. // errors when BuildTypeForInst isn't defined for a given instruction.
  901. CARBON_KIND_SWITCH(sem_ir_->insts().Get(inst_id)) {
  902. #define CARBON_SEM_IR_INST_KIND(Name) \
  903. case CARBON_KIND(SemIR::Name inst): { \
  904. return BuildTypeForInst(*this, inst); \
  905. }
  906. #include "toolchain/sem_ir/inst_kind.def"
  907. }
  908. }
  909. auto FileContext::BuildGlobalVariableDecl(SemIR::VarStorage var_storage)
  910. -> llvm::GlobalVariable* {
  911. Mangler m(*this);
  912. auto mangled_name = m.MangleGlobalVariable(var_storage.pattern_id);
  913. auto linkage = llvm::GlobalVariable::ExternalLinkage;
  914. // If the variable doesn't have an externally-visible name, demote it to
  915. // internal linkage and invent a plausible name that shouldn't collide with
  916. // any of our real manglings.
  917. if (mangled_name.empty()) {
  918. linkage = llvm::GlobalVariable::InternalLinkage;
  919. if (inst_namer_) {
  920. mangled_name =
  921. ("var.anon" + inst_namer_->GetUnscopedNameFor(var_storage.pattern_id))
  922. .str();
  923. }
  924. }
  925. auto* type = GetType(var_storage.type_id);
  926. return new llvm::GlobalVariable(llvm_module(), type,
  927. /*isConstant=*/false, linkage,
  928. /*Initializer=*/nullptr, mangled_name);
  929. }
  930. auto FileContext::GetLocForDI(SemIR::InstId inst_id) -> Context::LocForDI {
  931. return context().GetLocForDI(
  932. GetAbsoluteNodeId(sem_ir_, SemIR::LocId(inst_id)).back());
  933. }
  934. auto FileContext::BuildVtable(const SemIR::Class& class_info)
  935. -> llvm::GlobalVariable* {
  936. // Bail out if this class is not dynamic (this will account for classes that
  937. // are declared-and-not-defined (including extern declarations) as well).
  938. if (!class_info.is_dynamic) {
  939. return nullptr;
  940. }
  941. // Vtables can't be generated for generics, only for their specifics - and
  942. // must be done lazily based on the use of those specifics.
  943. if (class_info.generic_id != SemIR::GenericId::None) {
  944. return nullptr;
  945. }
  946. Mangler m(*this);
  947. std::string mangled_name = m.MangleVTable(class_info);
  948. auto first_owning_decl_loc =
  949. sem_ir().insts().GetCanonicalLocId(class_info.first_owning_decl_id);
  950. if (first_owning_decl_loc.kind() == SemIR::LocId::Kind::ImportIRInstId) {
  951. // Emit a declaration of an imported vtable using a(n opaque) pointer type.
  952. // This doesn't have to match the definition that appears elsewhere, it'll
  953. // still get merged correctly.
  954. auto* gv = new llvm::GlobalVariable(
  955. llvm_module(),
  956. llvm::PointerType::get(llvm_context(), /*AddressSpace=*/0),
  957. /*isConstant=*/true, llvm::GlobalValue::ExternalLinkage, nullptr,
  958. mangled_name);
  959. gv->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  960. return gv;
  961. }
  962. auto canonical_vtable_id =
  963. sem_ir().constant_values().GetConstantInstId(class_info.vtable_id);
  964. auto vtable_inst_block =
  965. sem_ir().inst_blocks().Get(sem_ir()
  966. .insts()
  967. .GetAs<SemIR::Vtable>(canonical_vtable_id)
  968. .virtual_functions_id);
  969. auto* entry_type = llvm::IntegerType::getInt32Ty(llvm_context());
  970. auto* table_type = llvm::ArrayType::get(entry_type, vtable_inst_block.size());
  971. auto* llvm_vtable = new llvm::GlobalVariable(
  972. llvm_module(), table_type, /*isConstant=*/true,
  973. llvm::GlobalValue::ExternalLinkage, nullptr, mangled_name);
  974. auto* i32_type = llvm::IntegerType::getInt32Ty(llvm_context());
  975. auto* i64_type = llvm::IntegerType::getInt64Ty(llvm_context());
  976. auto* vtable_const_int =
  977. llvm::ConstantExpr::getPtrToInt(llvm_vtable, i64_type);
  978. llvm::SmallVector<llvm::Constant*> vfuncs;
  979. vfuncs.reserve(vtable_inst_block.size());
  980. for (auto fn_decl_id : vtable_inst_block) {
  981. auto fn_decl = GetCalleeFunction(sem_ir(), fn_decl_id);
  982. vfuncs.push_back(llvm::ConstantExpr::getTrunc(
  983. llvm::ConstantExpr::getSub(
  984. llvm::ConstantExpr::getPtrToInt(
  985. GetOrCreateFunction(fn_decl.function_id,
  986. SemIR::SpecificId::None),
  987. i64_type),
  988. vtable_const_int),
  989. i32_type));
  990. }
  991. llvm_vtable->setInitializer(llvm::ConstantArray::get(table_type, vfuncs));
  992. llvm_vtable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  993. return llvm_vtable;
  994. }
  995. } // namespace Carbon::Lower