TPDE
Loading...
Searching...
No Matches
ValuePartRef.hpp
1// SPDX-FileCopyrightText: 2025 Contributors to TPDE <https://tpde.org>
2//
3// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
4#pragma once
5
6#include "tpde/ValueAssignment.hpp"
7
8#include <cstring>
9#include <span>
10
11namespace tpde {
12
13template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
14class CompilerBase<Adaptor, Derived, Config>::ValuePart {
15private:
16 struct ConstantData {
17 AsmReg reg = AsmReg::make_invalid();
18 bool has_assignment = false;
19 bool owned;
20 bool is_const : 1;
21 bool const_inline : 1;
22 union {
23 const u64 *data;
24 u64 inline_data;
25 };
26 RegBank bank;
27 u32 size;
28 };
29
30 struct ValueData {
31 AsmReg reg = AsmReg::make_invalid(); // only valid if fixed/locked
32 bool has_assignment = true;
33 bool owned;
34 ValLocalIdx local_idx;
35 u32 part;
36 ValueAssignment *assignment;
37 };
38
39 union {
40 ConstantData c;
41 ValueData v;
42 } state;
43
44public:
45 ValuePart() noexcept : state{ConstantData{.is_const = false}} {}
46
47 ValuePart(RegBank bank) noexcept
48 : state{
49 ConstantData{.is_const = false, .bank = bank}
50 } {
51 assert(bank.id() < Config::NUM_BANKS);
52 }
53
54 ValuePart(ValLocalIdx local_idx,
55 ValueAssignment *assignment,
56 u32 part,
57 bool owned) noexcept
58 : state{
59 .v = ValueData{
60 .owned = owned,
61 .local_idx = local_idx,
62 .part = part,
63 .assignment = assignment,
64 }
65 } {
66 assert(this->assignment().variable_ref() ||
67 state.v.assignment->references_left);
68 assert(!owned || state.v.assignment->references_left == 1);
69 }
70
71 ValuePart(const u64 *data, u32 size, RegBank bank) noexcept
72 : state{
73 .c = ConstantData{.is_const = true,
74 .const_inline = false,
75 .data = data,
76 .bank = bank,
77 .size = size}
78 } {
79 assert(data && "constant data must not be null");
80 assert(bank.id() < Config::NUM_BANKS);
81 }
82
83 ValuePart(const u64 val, u32 size, RegBank bank) noexcept
84 : state{
85 .c = ConstantData{.is_const = true,
86 .const_inline = true,
87 .inline_data = val,
88 .bank = bank,
89 .size = size}
90 } {
91 assert(size <= sizeof(val));
92 assert(bank.id() < Config::NUM_BANKS);
93 }
94
95 explicit ValuePart(const ValuePart &) = delete;
96
97 ValuePart(ValuePart &&other) noexcept : state{other.state} {
98 other.state.c = ConstantData{.is_const = false, .bank = bank()};
99 }
100
101 ~ValuePart() noexcept {
102 assert(!state.c.reg.valid() && "must call reset() on ValuePart explicitly");
103 }
104
105 ValuePart &operator=(const ValuePart &) = delete;
106
107 ValuePart &operator=(ValuePart &&other) noexcept {
108 if (this == &other) {
109 return *this;
110 }
111 assert(!state.c.reg.valid() && "must call reset() on ValuePart explicitly");
112 this->state = other.state;
113 other.state.c = ConstantData{.is_const = false, .bank = bank()};
114 return *this;
115 }
116
117 bool has_assignment() const noexcept { return state.v.has_assignment; }
118
119 bool is_const() const noexcept {
120 return !state.c.has_assignment && state.c.is_const;
121 }
122
123 bool is_owned() const noexcept {
124 assert(has_assignment());
125 return state.c.owned;
126 }
127
128 [[nodiscard]] AssignmentPartRef assignment() const noexcept {
129 assert(has_assignment());
130 return AssignmentPartRef{state.v.assignment, state.v.part};
131 }
132
133 /// If it is known that the value part has a register, this function can be
134 /// used to quickly access it
135 AsmReg cur_reg() const noexcept {
136 assert(state.v.reg.valid());
137 return state.v.reg;
138 }
139
140 /// Current register or none, even if the value is unlocked and could be
141 /// evicted by any other operation.
142 AsmReg cur_reg_unlocked() const noexcept {
143 if (state.v.reg.valid()) {
144 return state.v.reg;
145 }
146 if (has_assignment()) {
147 if (auto ap = assignment(); ap.register_valid()) {
148 return ap.get_reg();
149 }
150 }
151 return AsmReg::make_invalid();
152 }
153
154 /// Is the value part currently in the specified register?
155 bool is_in_reg(AsmReg reg) const noexcept {
156 if (has_reg()) {
157 return cur_reg() == reg;
158 }
159 if (has_assignment()) {
160 auto ap = assignment();
161 return ap.register_valid() && ap.get_reg() == reg;
162 }
163 return false;
164 }
165
166 bool has_reg() const noexcept { return state.v.reg.valid(); }
167
168private:
169 AsmReg alloc_reg_impl(CompilerBase *compiler,
170 u64 exclusion_mask,
171 bool reload) noexcept;
172 AsmReg alloc_specific_impl(CompilerBase *compiler,
173 AsmReg reg,
174 bool reload) noexcept;
175
176public:
177 /// Allocate and lock a register for the value part, *without* reloading the
178 /// value. Does nothing if a register is already allocated.
179 AsmReg alloc_reg(CompilerBase *compiler, u64 exclusion_mask = 0) noexcept {
180 return alloc_reg_impl(compiler, exclusion_mask, /*reload=*/false);
181 }
182
183 /// Allocate register, but try to reuse the register from ref first. This
184 /// method is complicated and must be used carefully. If ref is locked in a
185 /// register and owns the register (can_salvage()), the ownership of the
186 /// register is transferred to this ValuePart without modifying the value.
187 /// Otherwise, a new register is allocated.
188 ///
189 /// Usage example:
190 /// AsmReg operand_reg = operand_ref.load_to_reg();
191 /// AsmReg result_reg = result_ref.alloc_try_reuse(operand_ref);
192 /// if (operand_reg == result_reg) {
193 /// // reuse successful
194 /// ASM(ADD64ri, result_reg, 1);
195 /// } else {
196 /// ASM(LEA64rm, result_reg, FE_MEM(FE_NOREG, 1, operand_reg, 1));
197 /// }
198 AsmReg alloc_try_reuse(CompilerBase *compiler, ValuePart &ref) noexcept {
199 assert(ref.has_reg());
200 if (!has_assignment() || !assignment().register_valid()) {
201 assert(!has_assignment() || !assignment().fixed_assignment());
202 if (ref.can_salvage()) {
203 set_value(compiler, std::move(ref));
204 if (has_assignment()) {
205 lock(compiler);
206 }
207 return cur_reg();
208 }
209 }
210 return alloc_reg(compiler);
211 }
212
213 /// Allocate and lock a specific register for the value part, spilling the
214 /// register if it is currently used (must not be fixed), *without* reloading
215 /// or copying the value into the new register. The value must not be locked.
216 /// An existing assignment register is discarded. Value part must not be a
217 /// fixed assignment.
218 void alloc_specific(CompilerBase *compiler, AsmReg reg) noexcept {
219 alloc_specific_impl(compiler, reg, false);
220 }
221
222 /// Allocate, fill, and lock a register for the value part, reloading from
223 /// the stack or materializing the constant if necessary. Requires that the
224 /// value is currently unlocked (i.e., has_reg() is false).
225 AsmReg load_to_reg(CompilerBase *compiler) noexcept {
226 return alloc_reg_impl(compiler, 0, /*reload=*/true);
227 }
228
229 /// Allocate, fill, and lock a specific register for the value part, spilling
230 /// the register if it is currently used (must not be fixed). The value is
231 /// moved (assignment updated) or reloaded to this register. Value part must
232 /// not be a fixed assignment.
233 ///
234 /// \warning Do not overwrite the register content as it is not saved
235 /// \note The target register or the current value part may not be fixed
236 void load_to_specific(CompilerBase *compiler, AsmReg reg) noexcept {
237 alloc_specific_impl(compiler, reg, true);
238 }
239
240 /// Copy value into a different register.
241 AsmReg reload_into_specific_fixed(CompilerBase *compiler,
242 AsmReg reg,
243 unsigned size = 0) noexcept;
244
245 /// For a locked value, get an unonwed ValuePart referring to the register.
246 ValuePart get_unowned() noexcept {
247 assert(has_reg());
248 ValuePart res{bank()};
249 res.state.c =
250 ConstantData{.reg = cur_reg(), .owned = false, .is_const = false};
251 return res;
252 }
253
254 /// Move into a temporary register, reuse an existing register if possible.
255 ValuePart into_temporary(CompilerBase *compiler) && noexcept {
256 if (is_const()) {
257 if (state.c.const_inline) {
258 ValuePart res{state.c.inline_data, state.c.size, state.c.bank};
259 res.load_to_reg(compiler);
260 return res;
261 } else {
262 ValuePart res{state.c.data, state.c.size, state.c.bank};
263 res.load_to_reg(compiler);
264 return res;
265 }
266 }
267
268 // TODO: implement this. This needs size information to copy the value.
269 assert((has_assignment() || state.c.owned) &&
270 "into_temporary from unowned ValuePart not implemented");
271 ValuePart res{bank()};
272 res.set_value(compiler, std::move(*this));
273 if (!res.has_reg()) [[unlikely]] {
274 assert(res.is_const());
275 res.load_to_reg(compiler);
276 }
277 return res;
278 }
279
280 /// Move into a scratch register, reuse an existing register if possible.
281 ScratchReg into_scratch(CompilerBase *compiler) && noexcept {
282 // TODO: implement this. This needs size information to copy the value.
283 assert((has_assignment() || state.c.owned || state.c.is_const) &&
284 "into_scratch from unowned ValuePart not implemented");
285 ScratchReg res{compiler};
286 if (can_salvage()) {
287 res.alloc_specific(salvage(compiler));
288 } else {
289 reload_into_specific_fixed(compiler, res.alloc(bank()));
290 }
291 return res;
292 }
293
294 /// Extend integer value, reuse existing register if possible. Constants are
295 /// extended without allocating a register.
296 ValuePart into_extended(CompilerBase *compiler,
297 bool sign,
298 u32 from,
299 u32 to) && noexcept {
300 assert(from < to && "invalid integer extension sizes");
301 if (is_const() && to <= 64) {
302 u64 val = const_data()[0];
303 u64 extended = sign ? util::sext(val, from) : util::zext(val, from);
304 return ValuePart{extended, (to + 7) / 8, state.c.bank};
305 }
306 ValuePart res{bank()};
307 Reg src_reg = has_reg() ? cur_reg() : load_to_reg(compiler);
308 if (can_salvage()) {
309 res.set_value(compiler, std::move(*this));
310 assert(src_reg == res.cur_reg());
311 } else {
312 res.alloc_reg(compiler);
313 }
314 compiler->derived()->generate_raw_intext(
315 res.cur_reg(), src_reg, sign, from, to);
316 return res;
317 }
318
319 void lock(CompilerBase *compiler) noexcept;
320 void unlock(CompilerBase *compiler) noexcept;
321
322 void set_modified() noexcept {
323 assert(has_reg() && has_assignment());
324 assignment().set_modified(true);
325 }
326
327 /// Set the value to the value of a different value part, possibly taking
328 /// ownership of allocated registers. If this value part has an assignment,
329 /// the value part will be unlocked.
330 void set_value(CompilerBase *compiler, ValuePart &&other) noexcept;
331
332 /// Set the value to the value of the scratch register, taking ownership of
333 /// the register.
334 void set_value(CompilerBase *compiler, ScratchReg &&other) noexcept;
335
336 /// Set the value to the value of the specified register, possibly taking
337 /// ownership of the register. Intended for filling in arguments/calls results
338 /// which inherently get stored to fixed registers. There must not be a
339 /// currently locked register.
340 void set_value_reg(CompilerBase *compiler, AsmReg reg) noexcept;
341
342 bool can_salvage() const noexcept {
343 if (!has_assignment()) {
344 return state.c.owned && state.c.reg.valid();
345 }
346
347 return state.v.owned && assignment().register_valid();
348 }
349
350private:
351 AsmReg salvage_keep_used(CompilerBase *compiler) noexcept;
352
353public:
354 // only call when can_salvage returns true and a register is known to be
355 // allocated
356 AsmReg salvage(CompilerBase *compiler) noexcept {
357 AsmReg reg = salvage_keep_used(compiler);
358 compiler->register_file.unmark_used(reg);
359 return reg;
360 }
361
362 ValLocalIdx local_idx() const noexcept {
363 assert(has_assignment());
364 return state.v.local_idx;
365 }
366
367 u32 part() const noexcept {
368 assert(has_assignment());
369 return state.v.part;
370 }
371
372 RegBank bank() const noexcept {
373 return !has_assignment() ? state.c.bank : assignment().bank();
374 }
375
376 u32 part_size() const noexcept {
377 return !has_assignment() ? state.c.size : assignment().part_size();
378 }
379
380 std::span<const u64> const_data() const noexcept {
381 assert(is_const());
382 if (state.c.const_inline) {
383 return {&state.c.inline_data, 1};
384 }
385 return {state.c.data, (state.c.size + 7) / 8};
386 }
387
388 /// Reset the reference to the value part
389 void reset(CompilerBase *compiler) noexcept;
390};
391
392template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
393typename CompilerBase<Adaptor, Derived, Config>::AsmReg
394 CompilerBase<Adaptor, Derived, Config>::ValuePart::alloc_reg_impl(
395 CompilerBase *compiler,
396 u64 exclusion_mask,
397 const bool reload) noexcept {
398 // The caller has no control over the selected register, so it must assume
399 // that this function evicts some register. This is not permitted if the value
400 // state ought to be the same.
401 assert(compiler->may_change_value_state());
402 assert(!state.c.reg.valid());
403
404 RegBank bank;
405 if (has_assignment()) {
406 auto ap = assignment();
407 if (ap.register_valid()) {
408 lock(compiler);
409 // TODO: implement this if needed
410 assert((exclusion_mask & (1ull << state.v.reg.id())) == 0 &&
411 "moving registers in alloc_reg is unsupported");
412 return state.v.reg;
413 }
414
415 bank = ap.bank();
416 } else {
417 bank = state.c.bank;
418 }
419
420 Reg reg = compiler->select_reg(bank, exclusion_mask);
421 auto &reg_file = compiler->register_file;
422 reg_file.mark_clobbered(reg);
423 if (has_assignment()) {
424 reg_file.mark_used(reg, state.v.local_idx, state.v.part);
425 auto ap = assignment();
426 ap.set_reg(reg);
427 ap.set_register_valid(true);
428
429 // We must lock the value here, otherwise, load_from_stack could evict the
430 // register again.
431 lock(compiler);
432
433 if (reload) {
434 compiler->derived()->reload_to_reg(reg, ap);
435 } else {
436 assert(!ap.stack_valid() && "alloc_reg called on initialized value");
437 }
438 } else {
439 reg_file.mark_used(reg, INVALID_VAL_LOCAL_IDX, 0);
440 reg_file.mark_fixed(reg);
441 state.c.reg = reg;
442 state.c.owned = true;
443
444 if (reload) {
445 assert(is_const() && "cannot reload temporary value");
446 compiler->derived()->materialize_constant(
447 const_data().data(), state.c.bank, state.c.size, reg);
448 }
449 }
450
451 return reg;
452}
453
454template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
455typename CompilerBase<Adaptor, Derived, Config>::AsmReg
456 CompilerBase<Adaptor, Derived, Config>::ValuePart::alloc_specific_impl(
457 CompilerBase *compiler, AsmReg reg, const bool reload) noexcept {
458 assert(!state.c.reg.valid());
459
460 if (has_assignment()) {
461 auto ap = assignment();
462 assert(!ap.fixed_assignment());
463
464 if (ap.register_valid() && ap.get_reg() == reg) {
465 lock(compiler);
466 return ap.get_reg();
467 }
468 }
469
470 auto &reg_file = compiler->register_file;
471 if (reg_file.is_used(reg)) {
472 compiler->evict_reg(reg);
473 }
474
475 reg_file.mark_clobbered(reg);
476 if (has_assignment()) {
477 assert(compiler->may_change_value_state());
478
479 reg_file.mark_used(reg, state.v.local_idx, state.v.part);
480 auto ap = assignment();
481 auto old_reg = AsmReg::make_invalid();
482 if (ap.register_valid()) {
483 old_reg = ap.get_reg();
484 }
485
486 ap.set_reg(reg);
487 ap.set_register_valid(true);
488
489 // We must lock the value here, otherwise, load_from_stack could evict the
490 // register again.
491 lock(compiler);
492
493 if (reload) {
494 if (old_reg.valid()) {
495 compiler->derived()->mov(reg, old_reg, ap.part_size());
496 reg_file.unmark_used(old_reg);
497 } else {
498 compiler->derived()->reload_to_reg(reg, ap);
499 }
500 } else {
501 assert(!ap.stack_valid() && "alloc_reg with valid stack slot");
502 }
503 } else {
504 reg_file.mark_used(reg, INVALID_VAL_LOCAL_IDX, 0);
505 reg_file.mark_fixed(reg);
506
507 if (reload) {
508 if (state.c.reg.valid()) {
509 // TODO: size
510 compiler->derived()->mov(reg, state.c.reg, 8);
511 reg_file.unmark_fixed(state.c.reg);
512 reg_file.unmark_used(state.c.reg);
513 } else {
514 assert(is_const() && "cannot reload temporary value");
515 compiler->derived()->materialize_constant(
516 const_data().data(), state.c.bank, state.c.size, reg);
517 }
518 }
519
520 state.c.reg = reg;
521 state.c.owned = true;
522 }
523
524 return reg;
525}
526
527template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
528typename CompilerBase<Adaptor, Derived, Config>::AsmReg
531 AsmReg reg,
532 unsigned size) noexcept {
533 if (is_const()) {
534 compiler->derived()->materialize_constant(
535 const_data().data(), state.c.bank, state.c.size, reg);
536 return reg;
537 }
538 if (!has_assignment()) {
539 assert(has_reg());
540 assert(reg != cur_reg());
541 // TODO: value size
542 assert(size != 0);
543 compiler->derived()->mov(reg, cur_reg(), size);
544 return reg;
545 }
546
547 auto ap = assignment();
548 if (has_reg()) {
549 assert(cur_reg() != reg);
550 compiler->derived()->mov(reg, cur_reg(), ap.part_size());
551 } else if (ap.register_valid()) {
552 assert(ap.get_reg() != reg);
553
554 compiler->derived()->mov(reg, ap.get_reg(), ap.part_size());
555 } else {
556 assert(!ap.fixed_assignment());
557 compiler->derived()->reload_to_reg(reg, ap);
558 }
559
560 compiler->register_file.mark_clobbered(reg);
561 return reg;
562}
563
564template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
565void CompilerBase<Adaptor, Derived, Config>::ValuePart::lock(
566 CompilerBase *compiler) noexcept {
567 assert(has_assignment());
568 assert(!has_reg());
569 auto ap = assignment();
570 assert(ap.register_valid());
571
572 const auto reg = ap.get_reg();
573 compiler->register_file.inc_lock_count(reg);
574 state.v.reg = reg;
575}
576
577template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
578void CompilerBase<Adaptor, Derived, Config>::ValuePart::unlock(
579 CompilerBase *compiler) noexcept {
580 assert(has_assignment());
581 if (!state.v.reg.valid()) {
582 return;
583 }
584
585 compiler->register_file.dec_lock_count(state.v.reg);
586 state.v.reg = AsmReg::make_invalid();
587}
588
589template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
590void CompilerBase<Adaptor, Derived, Config>::ValuePart::set_value(
591 CompilerBase *compiler, ValuePart &&other) noexcept {
592 auto &reg_file = compiler->register_file;
593 if (!has_assignment()) {
594 assert(!is_const()); // probably don't want to allow mutating constants
595
596 // This is a temporary, which might currently have a register. We want to
597 // have a temporary register that holds the value at the end.
598 if (!other.has_assignment()) {
599 // When other is a temporary/constant, just take the value and drop our
600 // own register (if we have any).
601 reset(compiler);
602 *this = std::move(other);
603 return;
604 }
605
606 if (!other.can_salvage()) {
607 // We cannot take the register of other, so copy the value
608 AsmReg cur_reg = alloc_reg(compiler);
609 other.reload_into_specific_fixed(compiler, cur_reg);
610 other.reset(compiler);
611 return;
612 }
613
614 // We can take the register of other.
615 reset(compiler);
616
617 state.c.reg = other.salvage_keep_used(compiler);
618 state.c.owned = true;
619 reg_file.mark_fixed(state.c.reg);
620 reg_file.update_reg_assignment(state.c.reg, INVALID_VAL_LOCAL_IDX, 0);
621 return;
622 }
623
624 // Update the value of the assignment part
625 auto ap = assignment();
626 assert(!ap.variable_ref() && "cannot update variable ref");
627
628 if (ap.fixed_assignment() || !other.can_salvage()) {
629#ifndef NDEBUG
630 // alloc_reg has the assertion that stack_valid must be false to prevent
631 // accidental loss of information. set_value behaves more like an explicit
632 // assignment, so we permit this overwrite -- but need to disable the
633 // assertion.
634 ap.set_modified(true);
635#endif
636 // Source value owns no register or it is not reusable: copy value
637 AsmReg cur_reg = alloc_reg(compiler);
638 other.reload_into_specific_fixed(compiler, cur_reg, ap.part_size());
639 other.reset(compiler);
640 unlock(compiler);
641 ap.set_register_valid(true);
642 ap.set_modified(true);
643 return;
644 }
645
646 // Reuse register of other assignment
647 if (ap.register_valid()) {
648 // If we currently have a register, drop it
649 unlock(compiler);
650 auto cur_reg = ap.get_reg();
651 assert(!reg_file.is_fixed(cur_reg));
652 reg_file.unmark_used(cur_reg);
653 }
654
655 AsmReg new_reg = other.salvage_keep_used(compiler);
656 reg_file.update_reg_assignment(new_reg, local_idx(), part());
657 ap.set_reg(new_reg);
658 ap.set_register_valid(true);
659 ap.set_modified(true);
660}
661
662template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
663void CompilerBase<Adaptor, Derived, Config>::ValuePart::set_value(
664 CompilerBase *compiler, ScratchReg &&other) noexcept {
665 assert(compiler->may_change_value_state());
666
667 auto &reg_file = compiler->register_file;
668
669 // We could support this, but there shouldn't bee the need for that.
670 assert(other.has_reg() && "cannot initialize with invalid register");
671 Reg value_reg = other.cur_reg();
672 assert(reg_file.is_fixed(value_reg));
673 assert(reg_file.is_used(value_reg));
674 assert(reg_file.is_clobbered(value_reg));
675 assert(!state.c.reg.valid() &&
676 "attempted to overwrite already initialized and locked ValuePartRef");
677
678 if (!has_assignment()) {
679 assert(!is_const() && "cannot mutate constant ValuePartRef");
680 state.c.reg = value_reg;
681 state.c.owned = true;
682 assert(reg_file.reg_local_idx(value_reg) == INVALID_VAL_LOCAL_IDX);
683 assert(reg_file.reg_part(value_reg) == 0);
684 other.force_set_reg(AsmReg::make_invalid());
685 return;
686 }
687
688 // Update the value of the assignment part
689 auto ap = assignment();
690 assert(!ap.variable_ref() && "cannot update variable ref");
691
692 if (ap.fixed_assignment()) {
693 // For fixed assignments, copy the value into the fixed register.
694 auto cur_reg = ap.get_reg();
695 assert(reg_file.is_used(cur_reg));
696 assert(reg_file.is_fixed(cur_reg));
697 assert(reg_file.reg_local_idx(cur_reg) == local_idx());
698 assert(ap.register_valid() && !ap.stack_valid() &&
699 "invalid state for fixed assignment");
700 assert(cur_reg != value_reg);
701 compiler->derived()->mov(cur_reg, value_reg, ap.part_size());
702 other.reset();
703 return;
704 }
705
706 // Otherwise, take the register.
707 assert(!ap.register_valid() && !ap.stack_valid() &&
708 "attempted to overwrite already initialized ValuePartRef");
709
710 // ScratchReg's reg is fixed and used => unfix, keep used, update assignment
711 reg_file.unmark_fixed(value_reg);
712 reg_file.update_reg_assignment(value_reg, local_idx(), part());
713 ap.set_reg(value_reg);
714 ap.set_register_valid(true);
715 ap.set_modified(true);
716 other.force_set_reg(AsmReg::make_invalid());
717}
718
719template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
720void CompilerBase<Adaptor, Derived, Config>::ValuePart::set_value_reg(
721 CompilerBase *compiler, AsmReg value_reg) noexcept {
722 assert(compiler->may_change_value_state());
723
724 auto &reg_file = compiler->register_file;
725
726 // We could support this, but there shouldn't bee the need for that.
727 assert(value_reg.valid() && "cannot initialize with invalid register");
728 assert(!state.c.reg.valid() &&
729 "attempted to overwrite already initialized and locked ValuePartRef");
730
731 if (!has_assignment()) {
732 assert(!is_const() && "cannot mutate constant ValuePartRef");
733 state.c.reg = value_reg;
734 state.c.owned = true;
735 reg_file.mark_used(state.c.reg, INVALID_VAL_LOCAL_IDX, 0);
736 reg_file.mark_fixed(state.c.reg);
737 return;
738 }
739
740 // Update the value of the assignment part
741 auto ap = assignment();
742 assert(!ap.variable_ref() && "cannot update variable ref");
743
744 if (ap.fixed_assignment()) {
745 // For fixed assignments, copy the value into the fixed register.
746 auto cur_reg = ap.get_reg();
747 assert(reg_file.is_used(cur_reg));
748 assert(reg_file.is_fixed(cur_reg));
749 assert(reg_file.reg_local_idx(cur_reg) == local_idx());
750 // TODO: can this happen? If so, conditionally emit move.
751 assert(cur_reg != value_reg);
752 compiler->derived()->mov(cur_reg, value_reg, ap.part_size());
753 ap.set_register_valid(true);
754 ap.set_modified(true);
755 return;
756 }
757
758 // Otherwise, take the register.
759 assert(!ap.register_valid() && !ap.stack_valid() &&
760 "attempted to overwrite already initialized ValuePartRef");
761
762 reg_file.mark_used(value_reg, local_idx(), part());
763 reg_file.mark_clobbered(value_reg);
764 ap.set_reg(value_reg);
765 ap.set_register_valid(true);
766 ap.set_modified(true);
767}
768
769template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
770typename CompilerBase<Adaptor, Derived, Config>::AsmReg
771 CompilerBase<Adaptor, Derived, Config>::ValuePart::salvage_keep_used(
772 CompilerBase *compiler) noexcept {
773 assert(compiler->may_change_value_state());
774 assert(can_salvage());
775 if (!has_assignment()) {
776 AsmReg reg = state.c.reg;
777 compiler->register_file.unmark_fixed(reg);
778 state.c.reg = AsmReg::make_invalid();
779 return reg;
780 }
781
782 auto ap = assignment();
783 assert(ap.register_valid());
784 auto cur_reg = ap.get_reg();
785
786 unlock(compiler);
787 assert(ap.fixed_assignment() || !compiler->register_file.is_fixed(cur_reg));
788 if (ap.fixed_assignment()) {
789 compiler->register_file.dec_lock_count(cur_reg); // release fixed register
790 --compiler->assignments.cur_fixed_assignment_count[ap.bank().id()];
791 }
792
793 ap.set_register_valid(false);
794 ap.set_fixed_assignment(false);
795 return cur_reg;
796}
797
798template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
799void CompilerBase<Adaptor, Derived, Config>::ValuePart::reset(
800 CompilerBase *compiler) noexcept {
801 AsmReg reg = state.c.reg;
802 if (!reg.valid()) {
803 return;
804 }
805
806#ifndef NDEBUG
807 // In debug builds, touch assignment to catch cases where the assignment was
808 // already free'ed.
809 assert(!has_assignment() || assignment().modified() || true);
810#endif
811
812 if (state.c.owned) {
813 if (has_assignment()) {
814 AssignmentPartRef ap = assignment();
815 bool fixed = ap.fixed_assignment();
816 ap.set_register_valid(false);
817 ap.set_fixed_assignment(false);
818 compiler->register_file.dec_lock_count_must_zero(reg, fixed ? 2 : 1);
819 if (fixed) {
820 --compiler->assignments.cur_fixed_assignment_count[ap.bank().id()];
821 }
822 } else {
823 compiler->register_file.unmark_fixed(reg);
824 }
825 compiler->register_file.unmark_used(reg);
826 } else if (has_assignment()) {
827 compiler->register_file.dec_lock_count(reg);
828 }
829
830 state.c.reg = AsmReg::make_invalid();
831}
832
833template <IRAdaptor Adaptor, typename Derived, CompilerConfig Config>
834struct CompilerBase<Adaptor, Derived, Config>::ValuePartRef : ValuePart {
835 CompilerBase *compiler;
836
837 template <typename... Args>
838 ValuePartRef(CompilerBase *compiler, Args &&...args) noexcept
839 : ValuePart(std::forward<Args>(args)...), compiler(compiler) {}
840
841 explicit ValuePartRef(const ValuePartRef &) = delete;
842
843 ValuePartRef(ValuePartRef &&other) noexcept
844 : ValuePart(std::move(other)), compiler(other.compiler) {}
845
846 ~ValuePartRef() noexcept { reset(); }
847
848 ValuePartRef &operator=(const ValuePartRef &) = delete;
849
850 ValuePartRef &operator=(ValuePartRef &&other) noexcept {
851 if (this == &other) {
852 return *this;
853 }
854 reset();
855 ValuePart::operator=(std::move(other));
856 return *this;
857 }
858
859 ValuePartRef &operator=(ValuePart &&other) noexcept {
860 reset();
861 ValuePart::operator=(std::move(other));
862 return *this;
863 }
864
865 AsmReg alloc_reg(u64 exclusion_mask = 0) noexcept {
866 return ValuePart::alloc_reg(compiler, exclusion_mask);
867 }
868
869 AsmReg alloc_try_reuse(ValuePart &ref) noexcept {
870 return ValuePart::alloc_try_reuse(compiler, ref);
871 }
872
873 void alloc_specific(AsmReg reg) noexcept {
874 ValuePart::alloc_specific(compiler, reg);
875 }
876
877 AsmReg load_to_reg() noexcept { return ValuePart::load_to_reg(compiler); }
878
879 void load_to_specific(AsmReg reg) noexcept {
880 ValuePart::load_to_specific(compiler, reg);
881 }
882
883 AsmReg reload_into_specific_fixed(AsmReg reg, unsigned size = 0) noexcept {
884 return ValuePart::reload_into_specific_fixed(compiler, reg, size);
885 }
886
887 AsmReg reload_into_specific_fixed(CompilerBase *compiler,
888 AsmReg reg,
889 unsigned size = 0) noexcept {
890 return ValuePart::reload_into_specific_fixed(compiler, reg, size);
891 }
892
893 ValuePartRef get_unowned_ref() noexcept {
894 return ValuePartRef{compiler, ValuePart::get_unowned()};
895 }
896
897 ValuePartRef into_temporary() && noexcept {
898 return ValuePartRef{
899 compiler,
900 std::move(*static_cast<ValuePart *>(this)).into_temporary(compiler)};
901 }
902
903 ScratchReg into_scratch() && noexcept {
904 return std::move(*static_cast<ValuePart *>(this)).into_scratch(compiler);
905 }
906
907 ValuePartRef into_extended(bool sign, u32 from, u32 to) && noexcept {
908 return ValuePartRef{compiler,
909 std::move(*static_cast<ValuePart *>(this))
910 .into_extended(compiler, sign, from, to)};
911 }
912
913 void lock() noexcept { ValuePart::lock(compiler); }
914 void unlock() noexcept { ValuePart::unlock(compiler); }
915
916 void set_value(ValuePart &&other) noexcept {
917 ValuePart::set_value(compiler, std::move(other));
918 }
919
920 void set_value(ScratchReg &&other) noexcept {
921 ValuePart::set_value(compiler, std::move(other));
922 }
923
924 void set_value_reg(AsmReg value_reg) noexcept {
925 ValuePart::set_value_reg(compiler, value_reg);
926 }
927
928 AsmReg salvage() noexcept { return ValuePart::salvage(compiler); }
929
930 void reset() noexcept { ValuePart::reset(compiler); }
931};
932
933} // namespace tpde
Owned unspillable and unevictable temporary register with RAII semantics.
The base class for the compiler.
void reset()
Reset any leftover data from the previous compilation such that it will not affect the next compilati...
CompilerBase(Adaptor *adaptor)
Initialize a CompilerBase, should be called by the derived classes.