TPDE
Loading...
Searching...
No Matches
ValuePartRef.hpp
1// SPDX-FileCopyrightText: 2025 Contributors to TPDE <https://tpde.org>
2//
3// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
4#pragma once
5
6#include "tpde/ValueAssignment.hpp"
7
8#include <cstring>
9#include <span>
10
11namespace tpde {
12
13template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
14class CompilerBase<Adaptor, Derived, Config>::ValuePart {
15private:
16 struct ConstantData {
17 AsmReg reg = AsmReg::make_invalid();
18 bool has_assignment = false;
19 bool owned;
20 bool is_const : 1;
21 bool const_inline : 1;
22 union {
23 const u64 *data;
24 u64 inline_data;
25 };
26 RegBank bank;
27 u32 size;
28 };
29
30 struct ValueData {
31 AsmReg reg = AsmReg::make_invalid(); // only valid if fixed/locked
32 bool has_assignment = true;
33 bool owned;
34 ValLocalIdx local_idx;
35 u32 part;
36 ValueAssignment *assignment;
37 };
38
39 union {
40 ConstantData c;
41 ValueData v;
42 } state;
43
44public:
45 ValuePart() noexcept : state{ConstantData{.is_const = false}} {}
46
47 ValuePart(RegBank bank) noexcept
48 : state{
49 ConstantData{.is_const = false, .bank = bank}
50 } {
51 assert(bank.id() < Config::NUM_BANKS);
52 }
53
54 ValuePart(ValLocalIdx local_idx,
55 ValueAssignment *assignment,
56 u32 part,
57 bool owned) noexcept
58 : state{
59 .v = ValueData{
60 .owned = owned,
61 .local_idx = local_idx,
62 .part = part,
63 .assignment = assignment,
64 }
65 } {
66 assert(this->assignment().variable_ref() ||
67 state.v.assignment->references_left);
68 assert(!owned || state.v.assignment->references_left == 1);
69 }
70
71 ValuePart(const u64 *data, u32 size, RegBank bank) noexcept
72 : state{
73 .c = ConstantData{.is_const = true,
74 .const_inline = false,
75 .data = data,
76 .bank = bank,
77 .size = size}
78 } {
79 assert(data && "constant data must not be null");
80 assert(bank.id() < Config::NUM_BANKS);
81 }
82
83 ValuePart(const u64 val, u32 size, RegBank bank) noexcept
84 : state{
85 .c = ConstantData{.is_const = true,
86 .const_inline = true,
87 .inline_data = val,
88 .bank = bank,
89 .size = size}
90 } {
91 assert(size <= sizeof(val));
92 assert(bank.id() < Config::NUM_BANKS);
93 }
94
95 explicit ValuePart(const ValuePart &) = delete;
96
97 ValuePart(ValuePart &&other) noexcept : state{other.state} {
98 other.state.c = ConstantData{.is_const = false, .bank = bank()};
99 }
100
101 ~ValuePart() noexcept {
102 assert(!state.c.reg.valid() && "must call reset() on ValuePart explicitly");
103 }
104
105 ValuePart &operator=(const ValuePart &) = delete;
106
107 ValuePart &operator=(ValuePart &&other) noexcept {
108 if (this == &other) {
109 return *this;
110 }
111 assert(!state.c.reg.valid() && "must call reset() on ValuePart explicitly");
112 this->state = other.state;
113 other.state.c = ConstantData{.is_const = false, .bank = bank()};
114 return *this;
115 }
116
117 bool has_assignment() const noexcept { return state.v.has_assignment; }
118
119 bool is_const() const noexcept {
120 return !state.c.has_assignment && state.c.is_const;
121 }
122
123 bool is_owned() const noexcept {
124 assert(has_assignment());
125 return state.c.owned;
126 }
127
128 [[nodiscard]] AssignmentPartRef assignment() const noexcept {
129 assert(has_assignment());
130 return AssignmentPartRef{state.v.assignment, state.v.part};
131 }
132
133 /// If it is known that the value part has a register, this function can be
134 /// used to quickly access it
135 AsmReg cur_reg() const noexcept {
136 assert(state.v.reg.valid());
137 return state.v.reg;
138 }
139
140 /// Current register or none, even if the value is unlocked and could be
141 /// evicted by any other operation.
142 AsmReg cur_reg_unlocked() const noexcept {
143 if (state.v.reg.valid()) {
144 return state.v.reg;
145 }
146 if (has_assignment()) {
147 if (auto ap = assignment(); ap.register_valid()) {
148 return ap.get_reg();
149 }
150 }
151 return AsmReg::make_invalid();
152 }
153
154 /// Is the value part currently in the specified register?
155 bool is_in_reg(AsmReg reg) const noexcept {
156 if (has_reg()) {
157 return cur_reg() == reg;
158 }
159 if (has_assignment()) {
160 auto ap = assignment();
161 return ap.register_valid() && ap.get_reg() == reg;
162 }
163 return false;
164 }
165
166 bool has_reg() const noexcept { return state.v.reg.valid(); }
167
168private:
169 AsmReg alloc_reg_impl(CompilerBase *compiler,
170 u64 exclusion_mask,
171 bool reload) noexcept;
172 AsmReg alloc_specific_impl(CompilerBase *compiler,
173 AsmReg reg,
174 bool reload) noexcept;
175
176public:
177 /// Allocate and lock a register for the value part, *without* reloading the
178 /// value. Does nothing if a register is already allocated.
179 AsmReg alloc_reg(CompilerBase *compiler, u64 exclusion_mask = 0) noexcept {
180 return alloc_reg_impl(compiler, exclusion_mask, /*reload=*/false);
181 }
182
183 /// Allocate register, but try to reuse the register from ref first. This
184 /// method is complicated and must be used carefully. If ref is locked in a
185 /// register and owns the register (can_salvage()), the ownership of the
186 /// register is transferred to this ValuePart without modifying the value.
187 /// Otherwise, a new register is allocated.
188 ///
189 /// Usage example:
190 /// AsmReg operand_reg = operand_ref.load_to_reg();
191 /// AsmReg result_reg = result_ref.alloc_try_reuse(operand_ref);
192 /// if (operand_reg == result_reg) {
193 /// // reuse successful
194 /// ASM(ADD64ri, result_reg, 1);
195 /// } else {
196 /// ASM(LEA64rm, result_reg, FE_MEM(FE_NOREG, 1, operand_reg, 1));
197 /// }
198 AsmReg alloc_try_reuse(CompilerBase *compiler, ValuePart &ref) noexcept {
199 assert(ref.has_reg());
200 if (!has_assignment() || !assignment().register_valid()) {
201 assert(!has_assignment() || !assignment().fixed_assignment());
202 if (ref.can_salvage()) {
203 set_value(compiler, std::move(ref));
204 if (has_assignment()) {
205 lock(compiler);
206 }
207 return cur_reg();
208 }
209 }
210 return alloc_reg(compiler);
211 }
212
213 /// Allocate and lock a specific register for the value part, spilling the
214 /// register if it is currently used (must not be fixed), *without* reloading
215 /// or copying the value into the new register. The value must not be locked.
216 /// An existing assignment register is discarded. Value part must not be a
217 /// fixed assignment.
218 void alloc_specific(CompilerBase *compiler, AsmReg reg) noexcept {
219 alloc_specific_impl(compiler, reg, false);
220 }
221
222 /// Allocate, fill, and lock a register for the value part, reloading from
223 /// the stack or materializing the constant if necessary. Requires that the
224 /// value is currently unlocked (i.e., has_reg() is false).
225 AsmReg load_to_reg(CompilerBase *compiler) noexcept {
226 return alloc_reg_impl(compiler, 0, /*reload=*/true);
227 }
228
229 /// Allocate, fill, and lock a specific register for the value part, spilling
230 /// the register if it is currently used (must not be fixed). The value is
231 /// moved (assignment updated) or reloaded to this register. Value part must
232 /// not be a fixed assignment.
233 ///
234 /// \warning Do not overwrite the register content as it is not saved
235 /// \note The target register or the current value part may not be fixed
236 void load_to_specific(CompilerBase *compiler, AsmReg reg) noexcept {
237 alloc_specific_impl(compiler, reg, true);
238 }
239
240 /// Copy value into a different register.
241 AsmReg reload_into_specific_fixed(CompilerBase *compiler,
242 AsmReg reg,
243 unsigned size = 0) noexcept;
244
245 /// For a locked value, get an unonwed ValuePart referring to the register.
246 ValuePart get_unowned() noexcept {
247 assert(has_reg());
248 ValuePart res{bank()};
249 res.state.c =
250 ConstantData{.reg = cur_reg(), .owned = false, .is_const = false};
251 return res;
252 }
253
254 /// Move into a temporary register, reuse an existing register if possible.
255 ValuePart into_temporary(CompilerBase *compiler) && noexcept {
256 if (is_const()) {
257 if (state.c.const_inline) {
258 ValuePart res{state.c.inline_data, state.c.size, state.c.bank};
259 res.load_to_reg(compiler);
260 return res;
261 } else {
262 ValuePart res{state.c.data, state.c.size, state.c.bank};
263 res.load_to_reg(compiler);
264 return res;
265 }
266 }
267
268 // TODO: implement this. This needs size information to copy the value.
269 assert((has_assignment() || state.c.owned) &&
270 "into_temporary from unowned ValuePart not implemented");
271 ValuePart res{bank()};
272 res.set_value(compiler, std::move(*this));
273 if (!res.has_reg()) [[unlikely]] {
274 assert(res.is_const());
275 res.load_to_reg(compiler);
276 }
277 return res;
278 }
279
280 /// Move into a scratch register, reuse an existing register if possible.
281 ScratchReg into_scratch(CompilerBase *compiler) && noexcept {
282 // TODO: implement this. This needs size information to copy the value.
283 assert((has_assignment() || state.c.owned) &&
284 "into_scratch from unowned ValuePart not implemented");
285 ScratchReg res{compiler};
286 if (can_salvage()) {
287 res.alloc_specific(salvage(compiler));
288 } else {
289 reload_into_specific_fixed(compiler, res.alloc(bank()));
290 }
291 return res;
292 }
293
294 /// Extend integer value, reuse existing register if possible. Constants are
295 /// extended without allocating a register.
296 ValuePart into_extended(CompilerBase *compiler,
297 bool sign,
298 u32 from,
299 u32 to) && noexcept {
300 assert(from < to && "invalid integer extension sizes");
301 if (is_const() && to <= 64) {
302 u64 val = const_data()[0];
303 u64 extended = sign ? util::sext(val, from) : util::zext(val, from);
304 return ValuePart{extended, (to + 7) / 8, state.c.bank};
305 }
306 ValuePart res{bank()};
307 Reg src_reg = has_reg() ? cur_reg() : load_to_reg(compiler);
308 if (can_salvage()) {
309 res.set_value(compiler, std::move(*this));
310 assert(src_reg == res.cur_reg());
311 } else {
312 res.alloc_reg(compiler);
313 }
314 compiler->derived()->generate_raw_intext(
315 res.cur_reg(), src_reg, sign, from, to);
316 return res;
317 }
318
319 void lock(CompilerBase *compiler) noexcept;
320 void unlock(CompilerBase *compiler) noexcept;
321
322 void set_modified() noexcept {
323 assert(has_reg() && has_assignment());
324 assignment().set_modified(true);
325 }
326
327 /// Set the value to the value of a different value part, possibly taking
328 /// ownership of allocated registers. If this value part has an assignment,
329 /// the value part will be unlocked.
330 void set_value(CompilerBase *compiler, ValuePart &&other) noexcept;
331
332 /// Set the value to the value of the scratch register, taking ownership of
333 /// the register.
334 void set_value(CompilerBase *compiler, ScratchReg &&other) noexcept;
335
336 /// Set the value to the value of the specified register, possibly taking
337 /// ownership of the register. Intended for filling in arguments/calls results
338 /// which inherently get stored to fixed registers. There must not be a
339 /// currently locked register.
340 void set_value_reg(CompilerBase *compiler, AsmReg reg) noexcept;
341
342 bool can_salvage() const noexcept {
343 if (!has_assignment()) {
344 return state.c.owned && state.c.reg.valid();
345 }
346
347 return state.v.owned && assignment().register_valid();
348 }
349
350private:
351 AsmReg salvage_keep_used(CompilerBase *compiler) noexcept;
352
353public:
354 // only call when can_salvage returns true and a register is known to be
355 // allocated
356 AsmReg salvage(CompilerBase *compiler) noexcept {
357 AsmReg reg = salvage_keep_used(compiler);
358 compiler->register_file.unmark_used(reg);
359 return reg;
360 }
361
362 ValLocalIdx local_idx() const noexcept {
363 assert(has_assignment());
364 return state.v.local_idx;
365 }
366
367 u32 part() const noexcept {
368 assert(has_assignment());
369 return state.v.part;
370 }
371
372 RegBank bank() const noexcept {
373 return !has_assignment() ? state.c.bank : assignment().bank();
374 }
375
376 u32 part_size() const noexcept {
377 return !has_assignment() ? state.c.size : assignment().part_size();
378 }
379
380 std::span<const u64> const_data() const noexcept {
381 assert(is_const());
382 if (state.c.const_inline) {
383 return {&state.c.inline_data, 1};
384 }
385 return {state.c.data, (state.c.size + 7) / 8};
386 }
387
388 /// Reset the reference to the value part
389 void reset(CompilerBase *compiler) noexcept;
390};
391
392template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
393typename CompilerBase<Adaptor, Derived, Config>::AsmReg
394 CompilerBase<Adaptor, Derived, Config>::ValuePart::alloc_reg_impl(
395 CompilerBase *compiler,
396 u64 exclusion_mask,
397 const bool reload) noexcept {
398 // The caller has no control over the selected register, so it must assume
399 // that this function evicts some register. This is not permitted if the value
400 // state ought to be the same.
401 assert(compiler->may_change_value_state());
402 assert(!state.c.reg.valid());
403
404 RegBank bank;
405 if (has_assignment()) {
406 auto ap = assignment();
407 if (ap.register_valid()) {
408 lock(compiler);
409 // TODO: implement this if needed
410 assert((exclusion_mask & (1ull << state.v.reg.id())) == 0 &&
411 "moving registers in alloc_reg is unsupported");
412 return state.v.reg;
413 }
414
415 bank = ap.bank();
416 } else {
417 bank = state.c.bank;
418 }
419
420 Reg reg = compiler->select_reg(bank, exclusion_mask);
421 auto &reg_file = compiler->register_file;
422 reg_file.mark_clobbered(reg);
423 if (has_assignment()) {
424 reg_file.mark_used(reg, state.v.local_idx, state.v.part);
425 auto ap = assignment();
426 ap.set_reg(reg);
427 ap.set_register_valid(true);
428
429 // We must lock the value here, otherwise, load_from_stack could evict the
430 // register again.
431 lock(compiler);
432
433 if (reload) {
434 compiler->derived()->reload_to_reg(reg, ap);
435 } else {
436 assert(!ap.stack_valid() && "alloc_reg called on initialized value");
437 }
438 } else {
439 reg_file.mark_used(reg, INVALID_VAL_LOCAL_IDX, 0);
440 reg_file.mark_fixed(reg);
441 state.c.reg = reg;
442 state.c.owned = true;
443
444 if (reload) {
445 assert(is_const() && "cannot reload temporary value");
446 compiler->derived()->materialize_constant(
447 const_data().data(), state.c.bank, state.c.size, reg);
448 }
449 }
450
451 return reg;
452}
453
454template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
455typename CompilerBase<Adaptor, Derived, Config>::AsmReg
456 CompilerBase<Adaptor, Derived, Config>::ValuePart::alloc_specific_impl(
457 CompilerBase *compiler, AsmReg reg, const bool reload) noexcept {
458 assert(!state.c.reg.valid());
459
460 if (has_assignment()) {
461 auto ap = assignment();
462 assert(!ap.fixed_assignment());
463
464 if (ap.register_valid() && ap.get_reg() == reg) {
465 lock(compiler);
466 return ap.get_reg();
467 }
468 }
469
470 auto &reg_file = compiler->register_file;
471 if (reg_file.is_used(reg)) {
472 compiler->evict_reg(reg);
473 }
474
475 reg_file.mark_clobbered(reg);
476 if (has_assignment()) {
477 assert(compiler->may_change_value_state());
478
479 reg_file.mark_used(reg, state.v.local_idx, state.v.part);
480 auto ap = assignment();
481 auto old_reg = AsmReg::make_invalid();
482 if (ap.register_valid()) {
483 old_reg = ap.get_reg();
484 }
485
486 ap.set_reg(reg);
487 ap.set_register_valid(true);
488
489 // We must lock the value here, otherwise, load_from_stack could evict the
490 // register again.
491 lock(compiler);
492
493 if (reload) {
494 if (old_reg.valid()) {
495 compiler->derived()->mov(reg, old_reg, ap.part_size());
496 reg_file.unmark_used(old_reg);
497 } else {
498 compiler->derived()->reload_to_reg(reg, ap);
499 }
500 } else {
501 assert(!ap.stack_valid() && "alloc_reg with valid stack slot");
502 }
503 } else {
504 reg_file.mark_used(reg, INVALID_VAL_LOCAL_IDX, 0);
505 reg_file.mark_fixed(reg);
506
507 if (reload) {
508 if (state.c.reg.valid()) {
509 // TODO: size
510 compiler->derived()->mov(reg, state.c.reg, 8);
511 reg_file.unmark_fixed(state.c.reg);
512 reg_file.unmark_used(state.c.reg);
513 } else {
514 assert(is_const() && "cannot reload temporary value");
515 compiler->derived()->materialize_constant(
516 const_data().data(), state.c.bank, state.c.size, reg);
517 }
518 }
519
520 state.c.reg = reg;
521 state.c.owned = true;
522 }
523
524 return reg;
525}
526
527template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
528typename CompilerBase<Adaptor, Derived, Config>::AsmReg
531 AsmReg reg,
532 unsigned size) noexcept {
533 if (is_const()) {
534 compiler->derived()->materialize_constant(
535 const_data().data(), state.c.bank, state.c.size, reg);
536 return reg;
537 }
538 if (!has_assignment()) {
539 assert(has_reg());
540 assert(reg != cur_reg());
541 // TODO: value size
542 assert(size != 0);
543 compiler->derived()->mov(reg, cur_reg(), size);
544 return reg;
545 }
546
547 auto ap = assignment();
548 if (has_reg()) {
549 assert(cur_reg() != reg);
550 compiler->derived()->mov(reg, cur_reg(), ap.part_size());
551 } else if (ap.register_valid()) {
552 assert(ap.get_reg() != reg);
553
554 compiler->derived()->mov(reg, ap.get_reg(), ap.part_size());
555 } else {
556 assert(!ap.fixed_assignment());
557 compiler->derived()->reload_to_reg(reg, ap);
558 }
559
560 compiler->register_file.mark_clobbered(reg);
561 return reg;
562}
563
564template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
565void CompilerBase<Adaptor, Derived, Config>::ValuePart::lock(
566 CompilerBase *compiler) noexcept {
567 assert(has_assignment());
568 assert(!has_reg());
569 auto ap = assignment();
570 assert(ap.register_valid());
571
572 const auto reg = ap.get_reg();
573 compiler->register_file.inc_lock_count(reg);
574 state.v.reg = reg;
575}
576
577template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
578void CompilerBase<Adaptor, Derived, Config>::ValuePart::unlock(
579 CompilerBase *compiler) noexcept {
580 assert(has_assignment());
581 if (!state.v.reg.valid()) {
582 return;
583 }
584
585 compiler->register_file.dec_lock_count(state.v.reg);
586 state.v.reg = AsmReg::make_invalid();
587}
588
589template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
590void CompilerBase<Adaptor, Derived, Config>::ValuePart::set_value(
591 CompilerBase *compiler, ValuePart &&other) noexcept {
592 auto &reg_file = compiler->register_file;
593 if (!has_assignment()) {
594 assert(!is_const()); // probably don't want to allow mutating constants
595
596 // This is a temporary, which might currently have a register. We want to
597 // have a temporary register that holds the value at the end.
598 if (!other.has_assignment()) {
599 // When other is a temporary/constant, just take the value and drop our
600 // own register (if we have any).
601 reset(compiler);
602 *this = std::move(other);
603 return;
604 }
605
606 if (!other.can_salvage()) {
607 // We cannot take the register of other, so copy the value
608 AsmReg cur_reg = alloc_reg(compiler);
609 other.reload_into_specific_fixed(compiler, cur_reg);
610 other.reset(compiler);
611 return;
612 }
613
614 // We can take the register of other.
615 reset(compiler);
616
617 state.c.reg = other.salvage_keep_used(compiler);
618 state.c.owned = true;
619 reg_file.mark_fixed(state.c.reg);
620 reg_file.update_reg_assignment(state.c.reg, INVALID_VAL_LOCAL_IDX, 0);
621 return;
622 }
623
624 // Update the value of the assignment part
625 auto ap = assignment();
626 assert(!ap.variable_ref() && "cannot update variable ref");
627
628 if (ap.fixed_assignment() || !other.can_salvage()) {
629 // Source value owns no register or it is not reusable: copy value
630 AsmReg cur_reg = alloc_reg(compiler);
631 other.reload_into_specific_fixed(compiler, cur_reg, ap.part_size());
632 other.reset(compiler);
633 unlock(compiler);
634 ap.set_register_valid(true);
635 ap.set_modified(true);
636 return;
637 }
638
639 // Reuse register of other assignment
640 if (ap.register_valid()) {
641 // If we currently have a register, drop it
642 unlock(compiler);
643 auto cur_reg = ap.get_reg();
644 assert(!reg_file.is_fixed(cur_reg));
645 reg_file.unmark_used(cur_reg);
646 }
647
648 AsmReg new_reg = other.salvage_keep_used(compiler);
649 reg_file.update_reg_assignment(new_reg, local_idx(), part());
650 ap.set_reg(new_reg);
651 ap.set_register_valid(true);
652 ap.set_modified(true);
653}
654
655template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
656void CompilerBase<Adaptor, Derived, Config>::ValuePart::set_value(
657 CompilerBase *compiler, ScratchReg &&other) noexcept {
658 assert(compiler->may_change_value_state());
659
660 auto &reg_file = compiler->register_file;
661
662 // We could support this, but there shouldn't bee the need for that.
663 assert(other.has_reg() && "cannot initialize with invalid register");
664 Reg value_reg = other.cur_reg();
665 assert(reg_file.is_fixed(value_reg));
666 assert(reg_file.is_used(value_reg));
667 assert(reg_file.is_clobbered(value_reg));
668 assert(!state.c.reg.valid() &&
669 "attempted to overwrite already initialized and locked ValuePartRef");
670
671 if (!has_assignment()) {
672 assert(!is_const() && "cannot mutate constant ValuePartRef");
673 state.c.reg = value_reg;
674 state.c.owned = true;
675 assert(reg_file.reg_local_idx(value_reg) == INVALID_VAL_LOCAL_IDX);
676 assert(reg_file.reg_part(value_reg) == 0);
677 other.force_set_reg(AsmReg::make_invalid());
678 return;
679 }
680
681 // Update the value of the assignment part
682 auto ap = assignment();
683 assert(!ap.variable_ref() && "cannot update variable ref");
684
685 if (ap.fixed_assignment()) {
686 // For fixed assignments, copy the value into the fixed register.
687 auto cur_reg = ap.get_reg();
688 assert(reg_file.is_used(cur_reg));
689 assert(reg_file.is_fixed(cur_reg));
690 assert(reg_file.reg_local_idx(cur_reg) == local_idx());
691 assert(ap.register_valid() && !ap.stack_valid() &&
692 "invalid state for fixed assignment");
693 assert(cur_reg != value_reg);
694 compiler->derived()->mov(cur_reg, value_reg, ap.part_size());
695 other.reset();
696 return;
697 }
698
699 // Otherwise, take the register.
700 assert(!ap.register_valid() && !ap.stack_valid() &&
701 "attempted to overwrite already initialized ValuePartRef");
702
703 // ScratchReg's reg is fixed and used => unfix, keep used, update assignment
704 reg_file.unmark_fixed(value_reg);
705 reg_file.update_reg_assignment(value_reg, local_idx(), part());
706 ap.set_reg(value_reg);
707 ap.set_register_valid(true);
708 other.force_set_reg(AsmReg::make_invalid());
709}
710
711template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
712void CompilerBase<Adaptor, Derived, Config>::ValuePart::set_value_reg(
713 CompilerBase *compiler, AsmReg value_reg) noexcept {
714 assert(compiler->may_change_value_state());
715
716 auto &reg_file = compiler->register_file;
717
718 // We could support this, but there shouldn't bee the need for that.
719 assert(value_reg.valid() && "cannot initialize with invalid register");
720 assert(!state.c.reg.valid() &&
721 "attempted to overwrite already initialized and locked ValuePartRef");
722
723 if (!has_assignment()) {
724 assert(!is_const() && "cannot mutate constant ValuePartRef");
725 state.c.reg = value_reg;
726 state.c.owned = true;
727 reg_file.mark_used(state.c.reg, INVALID_VAL_LOCAL_IDX, 0);
728 reg_file.mark_fixed(state.c.reg);
729 return;
730 }
731
732 // Update the value of the assignment part
733 auto ap = assignment();
734 assert(!ap.variable_ref() && "cannot update variable ref");
735
736 if (ap.fixed_assignment()) {
737 // For fixed assignments, copy the value into the fixed register.
738 auto cur_reg = ap.get_reg();
739 assert(reg_file.is_used(cur_reg));
740 assert(reg_file.is_fixed(cur_reg));
741 assert(reg_file.reg_local_idx(cur_reg) == local_idx());
742 // TODO: can this happen? If so, conditionally emit move.
743 assert(cur_reg != value_reg);
744 compiler->derived()->mov(cur_reg, value_reg, ap.part_size());
745 ap.set_register_valid(true);
746 ap.set_modified(true);
747 return;
748 }
749
750 // Otherwise, take the register.
751 assert(!ap.register_valid() && !ap.stack_valid() &&
752 "attempted to overwrite already initialized ValuePartRef");
753
754 reg_file.mark_used(value_reg, local_idx(), part());
755 reg_file.mark_clobbered(value_reg);
756 ap.set_reg(value_reg);
757 ap.set_register_valid(true);
758 ap.set_modified(true);
759}
760
761template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
762typename CompilerBase<Adaptor, Derived, Config>::AsmReg
763 CompilerBase<Adaptor, Derived, Config>::ValuePart::salvage_keep_used(
764 CompilerBase *compiler) noexcept {
765 assert(compiler->may_change_value_state());
766 assert(can_salvage());
767 if (!has_assignment()) {
768 AsmReg reg = state.c.reg;
769 compiler->register_file.unmark_fixed(reg);
770 state.c.reg = AsmReg::make_invalid();
771 return reg;
772 }
773
774 auto ap = assignment();
775 assert(ap.register_valid());
776 auto cur_reg = ap.get_reg();
777
778 unlock(compiler);
779 assert(ap.fixed_assignment() || !compiler->register_file.is_fixed(cur_reg));
780 if (ap.fixed_assignment()) {
781 compiler->register_file.dec_lock_count(cur_reg); // release fixed register
782 }
783
784 ap.set_register_valid(false);
785 ap.set_fixed_assignment(false);
786 return cur_reg;
787}
788
789template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
790void CompilerBase<Adaptor, Derived, Config>::ValuePart::reset(
791 CompilerBase *compiler) noexcept {
792 AsmReg reg = state.c.reg;
793 if (!reg.valid()) {
794 return;
795 }
796
797#ifndef NDEBUG
798 // In debug builds, touch assignment to catch cases where the assignment was
799 // already free'ed.
800 assert(!has_assignment() || assignment().modified() || true);
801#endif
802
803 if (state.c.owned) {
804 if (has_assignment()) {
805 AssignmentPartRef ap = assignment();
806 bool fixed = ap.fixed_assignment();
807 ap.set_register_valid(false);
808 ap.set_fixed_assignment(false);
809 compiler->register_file.dec_lock_count_must_zero(reg, fixed ? 2 : 1);
810 } else {
811 compiler->register_file.unmark_fixed(reg);
812 }
813 compiler->register_file.unmark_used(reg);
814 } else if (has_assignment()) {
815 compiler->register_file.dec_lock_count(reg);
816 }
817
818 state.c.reg = AsmReg::make_invalid();
819}
820
821template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
822struct CompilerBase<Adaptor, Derived, Config>::ValuePartRef : ValuePart {
823 CompilerBase *compiler;
824
825 template <typename... Args>
826 ValuePartRef(CompilerBase *compiler, Args &&...args) noexcept
827 : ValuePart(std::forward<Args>(args)...), compiler(compiler) {}
828
829 explicit ValuePartRef(const ValuePartRef &) = delete;
830
831 ValuePartRef(ValuePartRef &&other) noexcept
832 : ValuePart(std::move(other)), compiler(other.compiler) {}
833
834 ~ValuePartRef() noexcept { reset(); }
835
836 ValuePartRef &operator=(const ValuePartRef &) = delete;
837
838 ValuePartRef &operator=(ValuePartRef &&other) noexcept {
839 if (this == &other) {
840 return *this;
841 }
842 reset();
843 ValuePart::operator=(std::move(other));
844 return *this;
845 }
846
847 AsmReg alloc_reg(u64 exclusion_mask = 0) noexcept {
848 return ValuePart::alloc_reg(compiler, exclusion_mask);
849 }
850
851 AsmReg alloc_try_reuse(ValuePart &ref) noexcept {
852 return ValuePart::alloc_try_reuse(compiler, ref);
853 }
854
855 void alloc_specific(AsmReg reg) noexcept {
856 ValuePart::alloc_specific(compiler, reg);
857 }
858
859 AsmReg load_to_reg() noexcept { return ValuePart::load_to_reg(compiler); }
860
861 void load_to_specific(AsmReg reg) noexcept {
862 ValuePart::load_to_specific(compiler, reg);
863 }
864
865 AsmReg reload_into_specific_fixed(AsmReg reg, unsigned size = 0) noexcept {
866 return ValuePart::reload_into_specific_fixed(compiler, reg, size);
867 }
868
869 AsmReg reload_into_specific_fixed(CompilerBase *compiler,
870 AsmReg reg,
871 unsigned size = 0) noexcept {
872 return ValuePart::reload_into_specific_fixed(compiler, reg, size);
873 }
874
875 ValuePartRef get_unowned_ref() noexcept {
876 return ValuePartRef{compiler, ValuePart::get_unowned()};
877 }
878
879 ValuePartRef into_temporary() && noexcept {
880 return ValuePartRef{
881 compiler,
882 std::move(*static_cast<ValuePart *>(this)).into_temporary(compiler)};
883 }
884
885 ValuePartRef into_extended(bool sign, u32 from, u32 to) && noexcept {
886 return ValuePartRef{compiler,
887 std::move(*static_cast<ValuePart *>(this))
888 .into_extended(compiler, sign, from, to)};
889 }
890
891 void lock() noexcept { ValuePart::lock(compiler); }
892 void unlock() noexcept { ValuePart::unlock(compiler); }
893
894 void set_value(ValuePart &&other) noexcept {
895 ValuePart::set_value(compiler, std::move(other));
896 }
897
898 void set_value(ScratchReg &&other) noexcept {
899 ValuePart::set_value(compiler, std::move(other));
900 }
901
902 void set_value_reg(AsmReg value_reg) noexcept {
903 ValuePart::set_value_reg(compiler, value_reg);
904 }
905
906 AsmReg salvage() noexcept { return ValuePart::salvage(compiler); }
907
908 void reset() noexcept { ValuePart::reset(compiler); }
909};
910
911} // namespace tpde
CompilerBase(Adaptor *adaptor)
Initialize a CompilerBase, should be called by the derived classes.