static_inst.cc (12499:b81688796004) static_inst.cc (12510:b8203d3676fc)
1/*
2 * Copyright (c) 2010-2014, 2016-2018 ARM Limited
3 * Copyright (c) 2013 Advanced Micro Devices, Inc.
4 * All rights reserved
5 *
6 * The license below extends only to copyright in the software and shall
7 * not be construed as granting a license to any other intellectual
8 * property including but not limited to intellectual property relating
9 * to a hardware implementation of the functionality of the software
10 * licensed hereunder. You may use the software subject to the license
11 * terms below provided that you ensure that this notice is replicated
12 * unmodified and in its entirety in all distributions of the software,
13 * modified or unmodified, in source code or in binary form.
14 *
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Stephen Hines
42 */
43
44#include "arch/arm/insts/static_inst.hh"
45
46#include "arch/arm/faults.hh"
47#include "base/condcodes.hh"
48#include "base/cprintf.hh"
49#include "base/loader/symtab.hh"
50#include "cpu/reg_class.hh"
51
52namespace ArmISA
53{
54// Shift Rm by an immediate value
55int32_t
56ArmStaticInst::shift_rm_imm(uint32_t base, uint32_t shamt,
57 uint32_t type, uint32_t cfval) const
58{
59 assert(shamt < 32);
60 ArmShiftType shiftType;
61 shiftType = (ArmShiftType)type;
62
63 switch (shiftType)
64 {
65 case LSL:
66 return base << shamt;
67 case LSR:
68 if (shamt == 0)
69 return 0;
70 else
71 return base >> shamt;
72 case ASR:
73 if (shamt == 0)
74 return (base >> 31) | -((base & (1 << 31)) >> 31);
75 else
76 return (base >> shamt) | -((base & (1 << 31)) >> shamt);
77 case ROR:
78 if (shamt == 0)
79 return (cfval << 31) | (base >> 1); // RRX
80 else
81 return (base << (32 - shamt)) | (base >> shamt);
82 default:
83 ccprintf(std::cerr, "Unhandled shift type\n");
84 exit(1);
85 break;
86 }
87 return 0;
88}
89
90int64_t
91ArmStaticInst::shiftReg64(uint64_t base, uint64_t shiftAmt,
92 ArmShiftType type, uint8_t width) const
93{
94 shiftAmt = shiftAmt % width;
95 ArmShiftType shiftType;
96 shiftType = (ArmShiftType)type;
97
98 switch (shiftType)
99 {
100 case LSL:
101 return base << shiftAmt;
102 case LSR:
103 if (shiftAmt == 0)
104 return base;
105 else
106 return (base & mask(width)) >> shiftAmt;
107 case ASR:
108 if (shiftAmt == 0) {
109 return base;
110 } else {
111 int sign_bit = bits(base, intWidth - 1);
112 base >>= shiftAmt;
113 base = sign_bit ? (base | ~mask(intWidth - shiftAmt)) : base;
114 return base & mask(intWidth);
115 }
116 case ROR:
117 if (shiftAmt == 0)
118 return base;
119 else
120 return (base << (width - shiftAmt)) | (base >> shiftAmt);
121 default:
122 ccprintf(std::cerr, "Unhandled shift type\n");
123 exit(1);
124 break;
125 }
126 return 0;
127}
128
129int64_t
130ArmStaticInst::extendReg64(uint64_t base, ArmExtendType type,
131 uint64_t shiftAmt, uint8_t width) const
132{
133 bool sign_extend = false;
134 int len = 0;
135 switch (type) {
136 case UXTB:
137 len = 8;
138 break;
139 case UXTH:
140 len = 16;
141 break;
142 case UXTW:
143 len = 32;
144 break;
145 case UXTX:
146 len = 64;
147 break;
148 case SXTB:
149 len = 8;
150 sign_extend = true;
151 break;
152 case SXTH:
153 len = 16;
154 sign_extend = true;
155 break;
156 case SXTW:
157 len = 32;
158 sign_extend = true;
159 break;
160 case SXTX:
161 len = 64;
162 sign_extend = true;
163 break;
164 }
165 len = len <= width - shiftAmt ? len : width - shiftAmt;
166 uint64_t tmp = (uint64_t) bits(base, len - 1, 0) << shiftAmt;
167 if (sign_extend) {
168 int sign_bit = bits(tmp, len + shiftAmt - 1);
169 tmp = sign_bit ? (tmp | ~mask(len + shiftAmt)) : tmp;
170 }
171 return tmp & mask(width);
172}
173
174// Shift Rm by Rs
175int32_t
176ArmStaticInst::shift_rm_rs(uint32_t base, uint32_t shamt,
177 uint32_t type, uint32_t cfval) const
178{
179 enum ArmShiftType shiftType;
180 shiftType = (enum ArmShiftType) type;
181
182 switch (shiftType)
183 {
184 case LSL:
185 if (shamt >= 32)
186 return 0;
187 else
188 return base << shamt;
189 case LSR:
190 if (shamt >= 32)
191 return 0;
192 else
193 return base >> shamt;
194 case ASR:
195 if (shamt >= 32)
196 return (base >> 31) | -((base & (1 << 31)) >> 31);
197 else
198 return (base >> shamt) | -((base & (1 << 31)) >> shamt);
199 case ROR:
200 shamt = shamt & 0x1f;
201 if (shamt == 0)
202 return base;
203 else
204 return (base << (32 - shamt)) | (base >> shamt);
205 default:
206 ccprintf(std::cerr, "Unhandled shift type\n");
207 exit(1);
208 break;
209 }
210 return 0;
211}
212
213
214// Generate C for a shift by immediate
215bool
216ArmStaticInst::shift_carry_imm(uint32_t base, uint32_t shamt,
217 uint32_t type, uint32_t cfval) const
218{
219 enum ArmShiftType shiftType;
220 shiftType = (enum ArmShiftType) type;
221
222 switch (shiftType)
223 {
224 case LSL:
225 if (shamt == 0)
226 return cfval;
227 else
228 return (base >> (32 - shamt)) & 1;
229 case LSR:
230 if (shamt == 0)
231 return (base >> 31);
232 else
233 return (base >> (shamt - 1)) & 1;
234 case ASR:
235 if (shamt == 0)
236 return (base >> 31);
237 else
238 return (base >> (shamt - 1)) & 1;
239 case ROR:
240 shamt = shamt & 0x1f;
241 if (shamt == 0)
242 return (base & 1); // RRX
243 else
244 return (base >> (shamt - 1)) & 1;
245 default:
246 ccprintf(std::cerr, "Unhandled shift type\n");
247 exit(1);
248 break;
249 }
250 return 0;
251}
252
253
254// Generate C for a shift by Rs
255bool
256ArmStaticInst::shift_carry_rs(uint32_t base, uint32_t shamt,
257 uint32_t type, uint32_t cfval) const
258{
259 enum ArmShiftType shiftType;
260 shiftType = (enum ArmShiftType) type;
261
262 if (shamt == 0)
263 return cfval;
264
265 switch (shiftType)
266 {
267 case LSL:
268 if (shamt > 32)
269 return 0;
270 else
271 return (base >> (32 - shamt)) & 1;
272 case LSR:
273 if (shamt > 32)
274 return 0;
275 else
276 return (base >> (shamt - 1)) & 1;
277 case ASR:
278 if (shamt > 32)
279 shamt = 32;
280 return (base >> (shamt - 1)) & 1;
281 case ROR:
282 shamt = shamt & 0x1f;
283 if (shamt == 0)
284 shamt = 32;
285 return (base >> (shamt - 1)) & 1;
286 default:
287 ccprintf(std::cerr, "Unhandled shift type\n");
288 exit(1);
289 break;
290 }
291 return 0;
292}
293
294void
295ArmStaticInst::printIntReg(std::ostream &os, RegIndex reg_idx) const
296{
297 if (aarch64) {
298 if (reg_idx == INTREG_UREG0)
299 ccprintf(os, "ureg0");
300 else if (reg_idx == INTREG_SPX)
301 ccprintf(os, "%s%s", (intWidth == 32) ? "w" : "", "sp");
302 else if (reg_idx == INTREG_X31)
303 ccprintf(os, "%szr", (intWidth == 32) ? "w" : "x");
304 else
305 ccprintf(os, "%s%d", (intWidth == 32) ? "w" : "x", reg_idx);
306 } else {
307 switch (reg_idx) {
308 case PCReg:
309 ccprintf(os, "pc");
310 break;
311 case StackPointerReg:
312 ccprintf(os, "sp");
313 break;
314 case FramePointerReg:
315 ccprintf(os, "fp");
316 break;
317 case ReturnAddressReg:
318 ccprintf(os, "lr");
319 break;
320 default:
321 ccprintf(os, "r%d", reg_idx);
322 break;
323 }
324 }
325}
326
327void
328ArmStaticInst::printFloatReg(std::ostream &os, RegIndex reg_idx) const
329{
330 ccprintf(os, "f%d", reg_idx);
331}
332
333void
334ArmStaticInst::printVecReg(std::ostream &os, RegIndex reg_idx) const
335{
336 ccprintf(os, "v%d", reg_idx);
337}
338
339void
340ArmStaticInst::printCCReg(std::ostream &os, RegIndex reg_idx) const
341{
342 ccprintf(os, "cc_%s", ArmISA::ccRegName[reg_idx]);
343}
344
345void
346ArmStaticInst::printMiscReg(std::ostream &os, RegIndex reg_idx) const
347{
348 assert(reg_idx < NUM_MISCREGS);
349 ccprintf(os, "%s", ArmISA::miscRegName[reg_idx]);
350}
351
352void
353ArmStaticInst::printMnemonic(std::ostream &os,
354 const std::string &suffix,
355 bool withPred,
356 bool withCond64,
357 ConditionCode cond64) const
358{
359 os << " " << mnemonic;
360 if (withPred && !aarch64) {
361 printCondition(os, machInst.condCode);
362 os << suffix;
363 } else if (withCond64) {
364 os << ".";
365 printCondition(os, cond64);
366 os << suffix;
367 }
368 if (machInst.bigThumb)
369 os << ".w";
370 os << " ";
371}
372
373void
374ArmStaticInst::printTarget(std::ostream &os, Addr target,
375 const SymbolTable *symtab) const
376{
377 Addr symbolAddr;
378 std::string symbol;
379
380 if (symtab && symtab->findNearestSymbol(target, symbol, symbolAddr)) {
381 ccprintf(os, "<%s", symbol);
382 if (symbolAddr != target)
383 ccprintf(os, "+%d>", target - symbolAddr);
384 else
385 ccprintf(os, ">");
386 } else {
387 ccprintf(os, "%#x", target);
388 }
389}
390
391void
392ArmStaticInst::printCondition(std::ostream &os,
393 unsigned code,
394 bool noImplicit) const
395{
396 switch (code) {
397 case COND_EQ:
398 os << "eq";
399 break;
400 case COND_NE:
401 os << "ne";
402 break;
403 case COND_CS:
404 os << "cs";
405 break;
406 case COND_CC:
407 os << "cc";
408 break;
409 case COND_MI:
410 os << "mi";
411 break;
412 case COND_PL:
413 os << "pl";
414 break;
415 case COND_VS:
416 os << "vs";
417 break;
418 case COND_VC:
419 os << "vc";
420 break;
421 case COND_HI:
422 os << "hi";
423 break;
424 case COND_LS:
425 os << "ls";
426 break;
427 case COND_GE:
428 os << "ge";
429 break;
430 case COND_LT:
431 os << "lt";
432 break;
433 case COND_GT:
434 os << "gt";
435 break;
436 case COND_LE:
437 os << "le";
438 break;
439 case COND_AL:
440 // This one is implicit.
441 if (noImplicit)
442 os << "al";
443 break;
444 case COND_UC:
445 // Unconditional.
446 if (noImplicit)
447 os << "uc";
448 break;
449 default:
450 panic("Unrecognized condition code %d.\n", code);
451 }
452}
453
454void
455ArmStaticInst::printMemSymbol(std::ostream &os,
456 const SymbolTable *symtab,
457 const std::string &prefix,
458 const Addr addr,
459 const std::string &suffix) const
460{
461 Addr symbolAddr;
462 std::string symbol;
463 if (symtab && symtab->findNearestSymbol(addr, symbol, symbolAddr)) {
464 ccprintf(os, "%s%s", prefix, symbol);
465 if (symbolAddr != addr)
466 ccprintf(os, "+%d", addr - symbolAddr);
467 ccprintf(os, suffix);
468 }
469}
470
471void
472ArmStaticInst::printShiftOperand(std::ostream &os,
473 IntRegIndex rm,
474 bool immShift,
475 uint32_t shiftAmt,
476 IntRegIndex rs,
477 ArmShiftType type) const
478{
479 bool firstOp = false;
480
481 if (rm != INTREG_ZERO) {
482 printIntReg(os, rm);
483 }
484
485 bool done = false;
486
487 if ((type == LSR || type == ASR) && immShift && shiftAmt == 0)
488 shiftAmt = 32;
489
490 switch (type) {
491 case LSL:
492 if (immShift && shiftAmt == 0) {
493 done = true;
494 break;
495 }
496 if (!firstOp)
497 os << ", ";
498 os << "LSL";
499 break;
500 case LSR:
501 if (!firstOp)
502 os << ", ";
503 os << "LSR";
504 break;
505 case ASR:
506 if (!firstOp)
507 os << ", ";
508 os << "ASR";
509 break;
510 case ROR:
511 if (immShift && shiftAmt == 0) {
512 if (!firstOp)
513 os << ", ";
514 os << "RRX";
515 done = true;
516 break;
517 }
518 if (!firstOp)
519 os << ", ";
520 os << "ROR";
521 break;
522 default:
523 panic("Tried to disassemble unrecognized shift type.\n");
524 }
525 if (!done) {
526 if (!firstOp)
527 os << " ";
528 if (immShift)
529 os << "#" << shiftAmt;
530 else
531 printIntReg(os, rs);
532 }
533}
534
535void
536ArmStaticInst::printExtendOperand(bool firstOperand, std::ostream &os,
537 IntRegIndex rm, ArmExtendType type,
538 int64_t shiftAmt) const
539{
540 if (!firstOperand)
541 ccprintf(os, ", ");
542 printIntReg(os, rm);
543 if (type == UXTX && shiftAmt == 0)
544 return;
545 switch (type) {
546 case UXTB: ccprintf(os, ", UXTB");
547 break;
548 case UXTH: ccprintf(os, ", UXTH");
549 break;
550 case UXTW: ccprintf(os, ", UXTW");
551 break;
552 case UXTX: ccprintf(os, ", LSL");
553 break;
554 case SXTB: ccprintf(os, ", SXTB");
555 break;
556 case SXTH: ccprintf(os, ", SXTH");
557 break;
558 case SXTW: ccprintf(os, ", SXTW");
559 break;
560 case SXTX: ccprintf(os, ", SXTW");
561 break;
562 }
563 if (type == UXTX || shiftAmt)
564 ccprintf(os, " #%d", shiftAmt);
565}
566
567void
568ArmStaticInst::printDataInst(std::ostream &os, bool withImm,
569 bool immShift, bool s, IntRegIndex rd, IntRegIndex rn,
570 IntRegIndex rm, IntRegIndex rs, uint32_t shiftAmt,
571 ArmShiftType type, uint64_t imm) const
572{
573 printMnemonic(os, s ? "s" : "");
574 bool firstOp = true;
575
576 // Destination
577 if (rd != INTREG_ZERO) {
578 firstOp = false;
579 printIntReg(os, rd);
580 }
581
582 // Source 1.
583 if (rn != INTREG_ZERO) {
584 if (!firstOp)
585 os << ", ";
586 firstOp = false;
587 printIntReg(os, rn);
588 }
589
590 if (!firstOp)
591 os << ", ";
592 if (withImm) {
593 ccprintf(os, "#%ld", imm);
594 } else {
595 printShiftOperand(os, rm, immShift, shiftAmt, rs, type);
596 }
597}
598
599std::string
600ArmStaticInst::generateDisassembly(Addr pc,
601 const SymbolTable *symtab) const
602{
603 std::stringstream ss;
604 printMnemonic(ss);
605 return ss.str();
606}
607
608
609Fault
610ArmStaticInst::advSIMDFPAccessTrap64(ExceptionLevel el) const
611{
612 switch (el) {
613 case EL1:
614 return std::make_shared<SupervisorTrap>(machInst, 0x1E00000,
615 EC_TRAPPED_SIMD_FP);
616 case EL2:
617 return std::make_shared<HypervisorTrap>(machInst, 0x1E00000,
618 EC_TRAPPED_SIMD_FP);
619 case EL3:
620 return std::make_shared<SecureMonitorTrap>(machInst, 0x1E00000,
621 EC_TRAPPED_SIMD_FP);
622
623 default:
624 panic("Illegal EL in advSIMDFPAccessTrap64\n");
625 }
626}
627
628
629Fault
630ArmStaticInst::checkFPAdvSIMDTrap64(ThreadContext *tc, CPSR cpsr) const
631{
632 const ExceptionLevel el = (ExceptionLevel) (uint8_t)cpsr.el;
633
634 if (ArmSystem::haveVirtualization(tc) && el <= EL2) {
635 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL2);
636 if (cptrEnCheck.tfp)
637 return advSIMDFPAccessTrap64(EL2);
638 }
639
640 if (ArmSystem::haveSecurity(tc)) {
641 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3);
642 if (cptrEnCheck.tfp)
643 return advSIMDFPAccessTrap64(EL3);
644 }
645
646 return NoFault;
647}
648
649Fault
650ArmStaticInst::checkFPAdvSIMDEnabled64(ThreadContext *tc,
651 CPSR cpsr, CPACR cpacr) const
652{
653 const ExceptionLevel el = (ExceptionLevel) (uint8_t)cpsr.el;
654 if ((el == EL0 && cpacr.fpen != 0x3) ||
655 (el == EL1 && !(cpacr.fpen & 0x1)))
656 return advSIMDFPAccessTrap64(EL1);
657
658 return checkFPAdvSIMDTrap64(tc, cpsr);
659}
660
661Fault
662ArmStaticInst::checkAdvSIMDOrFPEnabled32(ThreadContext *tc,
663 CPSR cpsr, CPACR cpacr,
664 NSACR nsacr, FPEXC fpexc,
665 bool fpexc_check, bool advsimd) const
666{
667 const bool have_virtualization = ArmSystem::haveVirtualization(tc);
668 const bool have_security = ArmSystem::haveSecurity(tc);
669 const bool is_secure = inSecureState(tc);
670 const ExceptionLevel cur_el = opModeToEL(currOpMode(tc));
671
672 if (cur_el == EL0 && ELIs64(tc, EL1))
673 return checkFPAdvSIMDEnabled64(tc, cpsr, cpacr);
674
675 uint8_t cpacr_cp10 = cpacr.cp10;
676 bool cpacr_asedis = cpacr.asedis;
677
678 if (have_security && !ELIs64(tc, EL3) && !is_secure) {
679 if (nsacr.nsasedis)
680 cpacr_asedis = true;
681 if (nsacr.cp10 == 0)
682 cpacr_cp10 = 0;
683 }
684
685 if (cur_el != EL2) {
686 if (advsimd && cpacr_asedis)
687 return disabledFault();
688
689 if ((cur_el == EL0 && cpacr_cp10 != 0x3) ||
690 (cur_el != EL0 && !(cpacr_cp10 & 0x1)))
691 return disabledFault();
692 }
693
694 if (fpexc_check && !fpexc.en)
695 return disabledFault();
696
697 // -- aarch32/exceptions/traps/AArch32.CheckFPAdvSIMDTrap --
698
699 if (have_virtualization && !is_secure && ELIs64(tc, EL2))
700 return checkFPAdvSIMDTrap64(tc, cpsr);
701
702 if (have_virtualization && !is_secure) {
703 HCPTR hcptr = tc->readMiscReg(MISCREG_HCPTR);
704 bool hcptr_cp10 = hcptr.tcp10;
705 bool hcptr_tase = hcptr.tase;
706
707 if (have_security && !ELIs64(tc, EL3) && !is_secure) {
708 if (nsacr.nsasedis)
709 hcptr_tase = true;
710 if (nsacr.cp10)
711 hcptr_cp10 = true;
712 }
713
714 if ((advsimd && hcptr_tase) || hcptr_cp10) {
715 const uint32_t iss = advsimd ? (1 << 5) : 0xA;
716 if (cur_el == EL2) {
717 return std::make_shared<UndefinedInstruction>(
718 machInst, iss,
719 EC_TRAPPED_HCPTR, mnemonic);
720 } else {
721 return std::make_shared<HypervisorTrap>(
722 machInst, iss,
723 EC_TRAPPED_HCPTR);
724 }
725
726 }
727 }
728
729 if (have_security && ELIs64(tc, EL3)) {
730 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3);
731 if (cptrEnCheck.tfp)
732 return advSIMDFPAccessTrap64(EL3);
733 }
734
735 return NoFault;
736}
737
738inline bool
739ArmStaticInst::isWFxTrapping(ThreadContext *tc,
740 ExceptionLevel tgtEl,
741 bool isWfe) const
742{
743 bool trap = false;
744 SCTLR sctlr = ((SCTLR)tc->readMiscReg(MISCREG_SCTLR_EL1));
745 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
746 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3));
747
748 switch (tgtEl) {
749 case EL1:
750 trap = isWfe? !sctlr.ntwe : !sctlr.ntwi;
751 break;
752 case EL2:
753 trap = isWfe? hcr.twe : hcr.twi;
754 break;
755 case EL3:
756 trap = isWfe? scr.twe : scr.twi;
757 break;
758 default:
759 break;
760 }
761
762 return trap;
763}
764
765Fault
766ArmStaticInst::checkForWFxTrap32(ThreadContext *tc,
767 ExceptionLevel targetEL,
768 bool isWfe) const
769{
770 // Check if target exception level is implemented.
771 assert(ArmSystem::haveEL(tc, targetEL));
772
773 // Check for routing to AArch64: this happens if the
774 // target exception level (where the trap will be handled)
775 // is using aarch64
776 if (ELIs64(tc, targetEL)) {
777 return checkForWFxTrap64(tc, targetEL, isWfe);
778 }
779
780 // Check if processor needs to trap at selected exception level
781 bool trap = isWFxTrapping(tc, targetEL, isWfe);
782
783 if (trap) {
784 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */
785 0x1E00000; /* WFI Instruction syndrome */
786 switch (targetEL) {
787 case EL1:
788 return std::make_shared<UndefinedInstruction>(
789 machInst, iss,
790 EC_TRAPPED_WFI_WFE, mnemonic);
791 case EL2:
792 return std::make_shared<HypervisorTrap>(machInst, iss,
793 EC_TRAPPED_WFI_WFE);
794 case EL3:
795 return std::make_shared<SecureMonitorTrap>(machInst, iss,
796 EC_TRAPPED_WFI_WFE);
797 default:
798 panic("Unrecognized Exception Level: %d\n", targetEL);
799 }
800 }
801
802 return NoFault;
803}
804
805Fault
806ArmStaticInst::checkForWFxTrap64(ThreadContext *tc,
807 ExceptionLevel targetEL,
808 bool isWfe) const
809{
810 // Check if target exception level is implemented.
811 assert(ArmSystem::haveEL(tc, targetEL));
812
813 // Check if processor needs to trap at selected exception level
814 bool trap = isWFxTrapping(tc, targetEL, isWfe);
815
816 if (trap) {
817 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */
818 0x1E00000; /* WFI Instruction syndrome */
819 switch (targetEL) {
820 case EL1:
821 return std::make_shared<SupervisorTrap>(machInst, iss,
822 EC_TRAPPED_WFI_WFE);
823 case EL2:
824 return std::make_shared<HypervisorTrap>(machInst, iss,
825 EC_TRAPPED_WFI_WFE);
826 case EL3:
827 return std::make_shared<SecureMonitorTrap>(machInst, iss,
828 EC_TRAPPED_WFI_WFE);
829 default:
830 panic("Unrecognized Exception Level: %d\n", targetEL);
831 }
832 }
833
834 return NoFault;
835}
836
837Fault
838ArmStaticInst::trapWFx(ThreadContext *tc,
839 CPSR cpsr, SCR scr,
840 bool isWfe) const
841{
842 Fault fault = NoFault;
843 if (cpsr.el == EL0) {
844 fault = checkForWFxTrap32(tc, EL1, isWfe);
845 }
846
847 if ((fault == NoFault) &&
848 ArmSystem::haveEL(tc, EL2) && !inSecureState(scr, cpsr) &&
849 ((cpsr.el == EL0) || (cpsr.el == EL1))) {
850
851 fault = checkForWFxTrap32(tc, EL2, isWfe);
852 }
853
854 if ((fault == NoFault) &&
855 ArmSystem::haveEL(tc, EL3) && cpsr.el != EL3) {
856 fault = checkForWFxTrap32(tc, EL3, isWfe);
857 }
858
859 return fault;
860}
861
862Fault
863ArmStaticInst::checkSETENDEnabled(ThreadContext *tc, CPSR cpsr) const
864{
865 bool setend_disabled(false);
866 ExceptionLevel pstateEL = (ExceptionLevel)(uint8_t)(cpsr.el);
867
868 if (pstateEL == EL2) {
869 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(MISCREG_HSCTLR)).sed;
870 } else {
871 // Please note: in the armarm pseudocode there is a distinction
872 // whether EL1 is aarch32 or aarch64:
873 // if ELUsingAArch32(EL1) then SCTLR.SED else SCTLR[].SED;
874 // Considering that SETEND is aarch32 only, ELUsingAArch32(EL1)
875 // will always be true (hence using SCTLR.SED) except for
876 // instruction executed at EL0, and with an AArch64 EL1.
877 // In this case SCTLR_EL1 will be used. In gem5 the register is
878 // mapped to SCTLR_ns. We can safely use SCTLR and choose the
879 // appropriate bank version.
880
881 // Get the index of the banked version of SCTLR:
882 // SCTLR_s or SCTLR_ns.
883 auto banked_sctlr = snsBankedIndex(
884 MISCREG_SCTLR, tc, !inSecureState(tc));
885
886 // SCTLR.SED bit is enabling/disabling the ue of SETEND instruction.
887 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(banked_sctlr)).sed;
888 }
889
890 return setend_disabled ? undefinedFault32(tc, pstateEL) :
891 NoFault;
892}
893
894Fault
895ArmStaticInst::undefinedFault32(ThreadContext *tc,
896 ExceptionLevel pstateEL) const
897{
898 // Even if we are running in aarch32, the fault might be dealt with in
899 // aarch64 ISA.
900 if (generalExceptionsToAArch64(tc, pstateEL)) {
901 return undefinedFault64(tc, pstateEL);
902 } else {
903 // Please note: according to the ARM ARM pseudocode we should handle
904 // the case when EL2 is aarch64 and HCR.TGE is 1 as well.
905 // However this case is already handled by the routeToHyp method in
906 // ArmFault class.
907 return std::make_shared<UndefinedInstruction>(
908 machInst, 0,
909 EC_UNKNOWN, mnemonic);
910 }
911}
912
913Fault
914ArmStaticInst::undefinedFault64(ThreadContext *tc,
915 ExceptionLevel pstateEL) const
916{
917 switch (pstateEL) {
918 case EL0:
919 case EL1:
920 return std::make_shared<SupervisorTrap>(machInst, 0, EC_UNKNOWN);
921 case EL2:
922 return std::make_shared<HypervisorTrap>(machInst, 0, EC_UNKNOWN);
923 case EL3:
924 return std::make_shared<SecureMonitorTrap>(machInst, 0, EC_UNKNOWN);
925 default:
926 panic("Unrecognized Exception Level: %d\n", pstateEL);
927 break;
928 }
929
930 return NoFault;
931}
932
933static uint8_t
934getRestoredITBits(ThreadContext *tc, CPSR spsr)
935{
936 // See: shared/functions/system/RestoredITBits in the ARM ARM
937
938 const ExceptionLevel el = opModeToEL((OperatingMode) (uint8_t)spsr.mode);
939 const uint8_t it = itState(spsr);
940
941 if (!spsr.t || spsr.il)
942 return 0;
943
944 // The IT bits are forced to zero when they are set to a reserved
945 // value.
946 if (bits(it, 7, 4) != 0 && bits(it, 3, 0) == 0)
947 return 0;
948
949 const bool itd = el == EL2 ?
950 ((SCTLR)tc->readMiscReg(MISCREG_HSCTLR)).itd :
951 ((SCTLR)tc->readMiscReg(MISCREG_SCTLR)).itd;
952
953 // The IT bits are forced to zero when returning to A32 state, or
954 // when returning to an EL with the ITD bit set to 1, and the IT
955 // bits are describing a multi-instruction block.
956 if (itd && bits(it, 2, 0) != 0)
957 return 0;
958
959 return it;
960}
961
962static bool
963illegalExceptionReturn(ThreadContext *tc, CPSR cpsr, CPSR spsr)
964{
965 const OperatingMode mode = (OperatingMode) (uint8_t)spsr.mode;
966 if (badMode(mode))
967 return true;
968
969 const OperatingMode cur_mode = (OperatingMode) (uint8_t)cpsr.mode;
970 const ExceptionLevel target_el = opModeToEL(mode);
971
972 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
973 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3));
974
975 if (target_el > opModeToEL(cur_mode))
976 return true;
977
978 if (!ArmSystem::haveEL(tc, target_el))
979 return true;
980
981 if (target_el == EL1 && ArmSystem::haveEL(tc, EL2) && scr.ns && hcr.tge)
982 return true;
983
984 if (target_el == EL2 && ArmSystem::haveEL(tc, EL3) && !scr.ns)
985 return true;
986
987 bool spsr_mode_is_aarch32 = (spsr.width == 1);
988 bool known, target_el_is_aarch32;
989 std::tie(known, target_el_is_aarch32) = ELUsingAArch32K(tc, target_el);
990 assert(known || (target_el == EL0 && ELIs64(tc, EL1)));
991
992 if (known && (spsr_mode_is_aarch32 != target_el_is_aarch32))
993 return true;
994
995 if (!spsr.width) {
996 // aarch64
997 if (!ArmSystem::highestELIs64(tc))
998 return true;
999 if (spsr & 0x2)
1000 return true;
1001 if (target_el == EL0 && spsr.sp)
1002 return true;
1003 } else {
1004 // aarch32
1005 return badMode32(mode);
1006 }
1007
1008 return false;
1009}
1010
1011CPSR
1012ArmStaticInst::getPSTATEFromPSR(ThreadContext *tc, CPSR cpsr, CPSR spsr) const
1013{
1014 CPSR new_cpsr = 0;
1015
1016 // gem5 doesn't implement single-stepping, so force the SS bit to
1017 // 0.
1018 new_cpsr.ss = 0;
1019
1020 if (illegalExceptionReturn(tc, cpsr, spsr)) {
1/*
2 * Copyright (c) 2010-2014, 2016-2018 ARM Limited
3 * Copyright (c) 2013 Advanced Micro Devices, Inc.
4 * All rights reserved
5 *
6 * The license below extends only to copyright in the software and shall
7 * not be construed as granting a license to any other intellectual
8 * property including but not limited to intellectual property relating
9 * to a hardware implementation of the functionality of the software
10 * licensed hereunder. You may use the software subject to the license
11 * terms below provided that you ensure that this notice is replicated
12 * unmodified and in its entirety in all distributions of the software,
13 * modified or unmodified, in source code or in binary form.
14 *
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Stephen Hines
42 */
43
44#include "arch/arm/insts/static_inst.hh"
45
46#include "arch/arm/faults.hh"
47#include "base/condcodes.hh"
48#include "base/cprintf.hh"
49#include "base/loader/symtab.hh"
50#include "cpu/reg_class.hh"
51
52namespace ArmISA
53{
54// Shift Rm by an immediate value
55int32_t
56ArmStaticInst::shift_rm_imm(uint32_t base, uint32_t shamt,
57 uint32_t type, uint32_t cfval) const
58{
59 assert(shamt < 32);
60 ArmShiftType shiftType;
61 shiftType = (ArmShiftType)type;
62
63 switch (shiftType)
64 {
65 case LSL:
66 return base << shamt;
67 case LSR:
68 if (shamt == 0)
69 return 0;
70 else
71 return base >> shamt;
72 case ASR:
73 if (shamt == 0)
74 return (base >> 31) | -((base & (1 << 31)) >> 31);
75 else
76 return (base >> shamt) | -((base & (1 << 31)) >> shamt);
77 case ROR:
78 if (shamt == 0)
79 return (cfval << 31) | (base >> 1); // RRX
80 else
81 return (base << (32 - shamt)) | (base >> shamt);
82 default:
83 ccprintf(std::cerr, "Unhandled shift type\n");
84 exit(1);
85 break;
86 }
87 return 0;
88}
89
90int64_t
91ArmStaticInst::shiftReg64(uint64_t base, uint64_t shiftAmt,
92 ArmShiftType type, uint8_t width) const
93{
94 shiftAmt = shiftAmt % width;
95 ArmShiftType shiftType;
96 shiftType = (ArmShiftType)type;
97
98 switch (shiftType)
99 {
100 case LSL:
101 return base << shiftAmt;
102 case LSR:
103 if (shiftAmt == 0)
104 return base;
105 else
106 return (base & mask(width)) >> shiftAmt;
107 case ASR:
108 if (shiftAmt == 0) {
109 return base;
110 } else {
111 int sign_bit = bits(base, intWidth - 1);
112 base >>= shiftAmt;
113 base = sign_bit ? (base | ~mask(intWidth - shiftAmt)) : base;
114 return base & mask(intWidth);
115 }
116 case ROR:
117 if (shiftAmt == 0)
118 return base;
119 else
120 return (base << (width - shiftAmt)) | (base >> shiftAmt);
121 default:
122 ccprintf(std::cerr, "Unhandled shift type\n");
123 exit(1);
124 break;
125 }
126 return 0;
127}
128
129int64_t
130ArmStaticInst::extendReg64(uint64_t base, ArmExtendType type,
131 uint64_t shiftAmt, uint8_t width) const
132{
133 bool sign_extend = false;
134 int len = 0;
135 switch (type) {
136 case UXTB:
137 len = 8;
138 break;
139 case UXTH:
140 len = 16;
141 break;
142 case UXTW:
143 len = 32;
144 break;
145 case UXTX:
146 len = 64;
147 break;
148 case SXTB:
149 len = 8;
150 sign_extend = true;
151 break;
152 case SXTH:
153 len = 16;
154 sign_extend = true;
155 break;
156 case SXTW:
157 len = 32;
158 sign_extend = true;
159 break;
160 case SXTX:
161 len = 64;
162 sign_extend = true;
163 break;
164 }
165 len = len <= width - shiftAmt ? len : width - shiftAmt;
166 uint64_t tmp = (uint64_t) bits(base, len - 1, 0) << shiftAmt;
167 if (sign_extend) {
168 int sign_bit = bits(tmp, len + shiftAmt - 1);
169 tmp = sign_bit ? (tmp | ~mask(len + shiftAmt)) : tmp;
170 }
171 return tmp & mask(width);
172}
173
174// Shift Rm by Rs
175int32_t
176ArmStaticInst::shift_rm_rs(uint32_t base, uint32_t shamt,
177 uint32_t type, uint32_t cfval) const
178{
179 enum ArmShiftType shiftType;
180 shiftType = (enum ArmShiftType) type;
181
182 switch (shiftType)
183 {
184 case LSL:
185 if (shamt >= 32)
186 return 0;
187 else
188 return base << shamt;
189 case LSR:
190 if (shamt >= 32)
191 return 0;
192 else
193 return base >> shamt;
194 case ASR:
195 if (shamt >= 32)
196 return (base >> 31) | -((base & (1 << 31)) >> 31);
197 else
198 return (base >> shamt) | -((base & (1 << 31)) >> shamt);
199 case ROR:
200 shamt = shamt & 0x1f;
201 if (shamt == 0)
202 return base;
203 else
204 return (base << (32 - shamt)) | (base >> shamt);
205 default:
206 ccprintf(std::cerr, "Unhandled shift type\n");
207 exit(1);
208 break;
209 }
210 return 0;
211}
212
213
214// Generate C for a shift by immediate
215bool
216ArmStaticInst::shift_carry_imm(uint32_t base, uint32_t shamt,
217 uint32_t type, uint32_t cfval) const
218{
219 enum ArmShiftType shiftType;
220 shiftType = (enum ArmShiftType) type;
221
222 switch (shiftType)
223 {
224 case LSL:
225 if (shamt == 0)
226 return cfval;
227 else
228 return (base >> (32 - shamt)) & 1;
229 case LSR:
230 if (shamt == 0)
231 return (base >> 31);
232 else
233 return (base >> (shamt - 1)) & 1;
234 case ASR:
235 if (shamt == 0)
236 return (base >> 31);
237 else
238 return (base >> (shamt - 1)) & 1;
239 case ROR:
240 shamt = shamt & 0x1f;
241 if (shamt == 0)
242 return (base & 1); // RRX
243 else
244 return (base >> (shamt - 1)) & 1;
245 default:
246 ccprintf(std::cerr, "Unhandled shift type\n");
247 exit(1);
248 break;
249 }
250 return 0;
251}
252
253
254// Generate C for a shift by Rs
255bool
256ArmStaticInst::shift_carry_rs(uint32_t base, uint32_t shamt,
257 uint32_t type, uint32_t cfval) const
258{
259 enum ArmShiftType shiftType;
260 shiftType = (enum ArmShiftType) type;
261
262 if (shamt == 0)
263 return cfval;
264
265 switch (shiftType)
266 {
267 case LSL:
268 if (shamt > 32)
269 return 0;
270 else
271 return (base >> (32 - shamt)) & 1;
272 case LSR:
273 if (shamt > 32)
274 return 0;
275 else
276 return (base >> (shamt - 1)) & 1;
277 case ASR:
278 if (shamt > 32)
279 shamt = 32;
280 return (base >> (shamt - 1)) & 1;
281 case ROR:
282 shamt = shamt & 0x1f;
283 if (shamt == 0)
284 shamt = 32;
285 return (base >> (shamt - 1)) & 1;
286 default:
287 ccprintf(std::cerr, "Unhandled shift type\n");
288 exit(1);
289 break;
290 }
291 return 0;
292}
293
294void
295ArmStaticInst::printIntReg(std::ostream &os, RegIndex reg_idx) const
296{
297 if (aarch64) {
298 if (reg_idx == INTREG_UREG0)
299 ccprintf(os, "ureg0");
300 else if (reg_idx == INTREG_SPX)
301 ccprintf(os, "%s%s", (intWidth == 32) ? "w" : "", "sp");
302 else if (reg_idx == INTREG_X31)
303 ccprintf(os, "%szr", (intWidth == 32) ? "w" : "x");
304 else
305 ccprintf(os, "%s%d", (intWidth == 32) ? "w" : "x", reg_idx);
306 } else {
307 switch (reg_idx) {
308 case PCReg:
309 ccprintf(os, "pc");
310 break;
311 case StackPointerReg:
312 ccprintf(os, "sp");
313 break;
314 case FramePointerReg:
315 ccprintf(os, "fp");
316 break;
317 case ReturnAddressReg:
318 ccprintf(os, "lr");
319 break;
320 default:
321 ccprintf(os, "r%d", reg_idx);
322 break;
323 }
324 }
325}
326
327void
328ArmStaticInst::printFloatReg(std::ostream &os, RegIndex reg_idx) const
329{
330 ccprintf(os, "f%d", reg_idx);
331}
332
333void
334ArmStaticInst::printVecReg(std::ostream &os, RegIndex reg_idx) const
335{
336 ccprintf(os, "v%d", reg_idx);
337}
338
339void
340ArmStaticInst::printCCReg(std::ostream &os, RegIndex reg_idx) const
341{
342 ccprintf(os, "cc_%s", ArmISA::ccRegName[reg_idx]);
343}
344
345void
346ArmStaticInst::printMiscReg(std::ostream &os, RegIndex reg_idx) const
347{
348 assert(reg_idx < NUM_MISCREGS);
349 ccprintf(os, "%s", ArmISA::miscRegName[reg_idx]);
350}
351
352void
353ArmStaticInst::printMnemonic(std::ostream &os,
354 const std::string &suffix,
355 bool withPred,
356 bool withCond64,
357 ConditionCode cond64) const
358{
359 os << " " << mnemonic;
360 if (withPred && !aarch64) {
361 printCondition(os, machInst.condCode);
362 os << suffix;
363 } else if (withCond64) {
364 os << ".";
365 printCondition(os, cond64);
366 os << suffix;
367 }
368 if (machInst.bigThumb)
369 os << ".w";
370 os << " ";
371}
372
373void
374ArmStaticInst::printTarget(std::ostream &os, Addr target,
375 const SymbolTable *symtab) const
376{
377 Addr symbolAddr;
378 std::string symbol;
379
380 if (symtab && symtab->findNearestSymbol(target, symbol, symbolAddr)) {
381 ccprintf(os, "<%s", symbol);
382 if (symbolAddr != target)
383 ccprintf(os, "+%d>", target - symbolAddr);
384 else
385 ccprintf(os, ">");
386 } else {
387 ccprintf(os, "%#x", target);
388 }
389}
390
391void
392ArmStaticInst::printCondition(std::ostream &os,
393 unsigned code,
394 bool noImplicit) const
395{
396 switch (code) {
397 case COND_EQ:
398 os << "eq";
399 break;
400 case COND_NE:
401 os << "ne";
402 break;
403 case COND_CS:
404 os << "cs";
405 break;
406 case COND_CC:
407 os << "cc";
408 break;
409 case COND_MI:
410 os << "mi";
411 break;
412 case COND_PL:
413 os << "pl";
414 break;
415 case COND_VS:
416 os << "vs";
417 break;
418 case COND_VC:
419 os << "vc";
420 break;
421 case COND_HI:
422 os << "hi";
423 break;
424 case COND_LS:
425 os << "ls";
426 break;
427 case COND_GE:
428 os << "ge";
429 break;
430 case COND_LT:
431 os << "lt";
432 break;
433 case COND_GT:
434 os << "gt";
435 break;
436 case COND_LE:
437 os << "le";
438 break;
439 case COND_AL:
440 // This one is implicit.
441 if (noImplicit)
442 os << "al";
443 break;
444 case COND_UC:
445 // Unconditional.
446 if (noImplicit)
447 os << "uc";
448 break;
449 default:
450 panic("Unrecognized condition code %d.\n", code);
451 }
452}
453
454void
455ArmStaticInst::printMemSymbol(std::ostream &os,
456 const SymbolTable *symtab,
457 const std::string &prefix,
458 const Addr addr,
459 const std::string &suffix) const
460{
461 Addr symbolAddr;
462 std::string symbol;
463 if (symtab && symtab->findNearestSymbol(addr, symbol, symbolAddr)) {
464 ccprintf(os, "%s%s", prefix, symbol);
465 if (symbolAddr != addr)
466 ccprintf(os, "+%d", addr - symbolAddr);
467 ccprintf(os, suffix);
468 }
469}
470
471void
472ArmStaticInst::printShiftOperand(std::ostream &os,
473 IntRegIndex rm,
474 bool immShift,
475 uint32_t shiftAmt,
476 IntRegIndex rs,
477 ArmShiftType type) const
478{
479 bool firstOp = false;
480
481 if (rm != INTREG_ZERO) {
482 printIntReg(os, rm);
483 }
484
485 bool done = false;
486
487 if ((type == LSR || type == ASR) && immShift && shiftAmt == 0)
488 shiftAmt = 32;
489
490 switch (type) {
491 case LSL:
492 if (immShift && shiftAmt == 0) {
493 done = true;
494 break;
495 }
496 if (!firstOp)
497 os << ", ";
498 os << "LSL";
499 break;
500 case LSR:
501 if (!firstOp)
502 os << ", ";
503 os << "LSR";
504 break;
505 case ASR:
506 if (!firstOp)
507 os << ", ";
508 os << "ASR";
509 break;
510 case ROR:
511 if (immShift && shiftAmt == 0) {
512 if (!firstOp)
513 os << ", ";
514 os << "RRX";
515 done = true;
516 break;
517 }
518 if (!firstOp)
519 os << ", ";
520 os << "ROR";
521 break;
522 default:
523 panic("Tried to disassemble unrecognized shift type.\n");
524 }
525 if (!done) {
526 if (!firstOp)
527 os << " ";
528 if (immShift)
529 os << "#" << shiftAmt;
530 else
531 printIntReg(os, rs);
532 }
533}
534
535void
536ArmStaticInst::printExtendOperand(bool firstOperand, std::ostream &os,
537 IntRegIndex rm, ArmExtendType type,
538 int64_t shiftAmt) const
539{
540 if (!firstOperand)
541 ccprintf(os, ", ");
542 printIntReg(os, rm);
543 if (type == UXTX && shiftAmt == 0)
544 return;
545 switch (type) {
546 case UXTB: ccprintf(os, ", UXTB");
547 break;
548 case UXTH: ccprintf(os, ", UXTH");
549 break;
550 case UXTW: ccprintf(os, ", UXTW");
551 break;
552 case UXTX: ccprintf(os, ", LSL");
553 break;
554 case SXTB: ccprintf(os, ", SXTB");
555 break;
556 case SXTH: ccprintf(os, ", SXTH");
557 break;
558 case SXTW: ccprintf(os, ", SXTW");
559 break;
560 case SXTX: ccprintf(os, ", SXTW");
561 break;
562 }
563 if (type == UXTX || shiftAmt)
564 ccprintf(os, " #%d", shiftAmt);
565}
566
567void
568ArmStaticInst::printDataInst(std::ostream &os, bool withImm,
569 bool immShift, bool s, IntRegIndex rd, IntRegIndex rn,
570 IntRegIndex rm, IntRegIndex rs, uint32_t shiftAmt,
571 ArmShiftType type, uint64_t imm) const
572{
573 printMnemonic(os, s ? "s" : "");
574 bool firstOp = true;
575
576 // Destination
577 if (rd != INTREG_ZERO) {
578 firstOp = false;
579 printIntReg(os, rd);
580 }
581
582 // Source 1.
583 if (rn != INTREG_ZERO) {
584 if (!firstOp)
585 os << ", ";
586 firstOp = false;
587 printIntReg(os, rn);
588 }
589
590 if (!firstOp)
591 os << ", ";
592 if (withImm) {
593 ccprintf(os, "#%ld", imm);
594 } else {
595 printShiftOperand(os, rm, immShift, shiftAmt, rs, type);
596 }
597}
598
599std::string
600ArmStaticInst::generateDisassembly(Addr pc,
601 const SymbolTable *symtab) const
602{
603 std::stringstream ss;
604 printMnemonic(ss);
605 return ss.str();
606}
607
608
609Fault
610ArmStaticInst::advSIMDFPAccessTrap64(ExceptionLevel el) const
611{
612 switch (el) {
613 case EL1:
614 return std::make_shared<SupervisorTrap>(machInst, 0x1E00000,
615 EC_TRAPPED_SIMD_FP);
616 case EL2:
617 return std::make_shared<HypervisorTrap>(machInst, 0x1E00000,
618 EC_TRAPPED_SIMD_FP);
619 case EL3:
620 return std::make_shared<SecureMonitorTrap>(machInst, 0x1E00000,
621 EC_TRAPPED_SIMD_FP);
622
623 default:
624 panic("Illegal EL in advSIMDFPAccessTrap64\n");
625 }
626}
627
628
629Fault
630ArmStaticInst::checkFPAdvSIMDTrap64(ThreadContext *tc, CPSR cpsr) const
631{
632 const ExceptionLevel el = (ExceptionLevel) (uint8_t)cpsr.el;
633
634 if (ArmSystem::haveVirtualization(tc) && el <= EL2) {
635 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL2);
636 if (cptrEnCheck.tfp)
637 return advSIMDFPAccessTrap64(EL2);
638 }
639
640 if (ArmSystem::haveSecurity(tc)) {
641 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3);
642 if (cptrEnCheck.tfp)
643 return advSIMDFPAccessTrap64(EL3);
644 }
645
646 return NoFault;
647}
648
649Fault
650ArmStaticInst::checkFPAdvSIMDEnabled64(ThreadContext *tc,
651 CPSR cpsr, CPACR cpacr) const
652{
653 const ExceptionLevel el = (ExceptionLevel) (uint8_t)cpsr.el;
654 if ((el == EL0 && cpacr.fpen != 0x3) ||
655 (el == EL1 && !(cpacr.fpen & 0x1)))
656 return advSIMDFPAccessTrap64(EL1);
657
658 return checkFPAdvSIMDTrap64(tc, cpsr);
659}
660
661Fault
662ArmStaticInst::checkAdvSIMDOrFPEnabled32(ThreadContext *tc,
663 CPSR cpsr, CPACR cpacr,
664 NSACR nsacr, FPEXC fpexc,
665 bool fpexc_check, bool advsimd) const
666{
667 const bool have_virtualization = ArmSystem::haveVirtualization(tc);
668 const bool have_security = ArmSystem::haveSecurity(tc);
669 const bool is_secure = inSecureState(tc);
670 const ExceptionLevel cur_el = opModeToEL(currOpMode(tc));
671
672 if (cur_el == EL0 && ELIs64(tc, EL1))
673 return checkFPAdvSIMDEnabled64(tc, cpsr, cpacr);
674
675 uint8_t cpacr_cp10 = cpacr.cp10;
676 bool cpacr_asedis = cpacr.asedis;
677
678 if (have_security && !ELIs64(tc, EL3) && !is_secure) {
679 if (nsacr.nsasedis)
680 cpacr_asedis = true;
681 if (nsacr.cp10 == 0)
682 cpacr_cp10 = 0;
683 }
684
685 if (cur_el != EL2) {
686 if (advsimd && cpacr_asedis)
687 return disabledFault();
688
689 if ((cur_el == EL0 && cpacr_cp10 != 0x3) ||
690 (cur_el != EL0 && !(cpacr_cp10 & 0x1)))
691 return disabledFault();
692 }
693
694 if (fpexc_check && !fpexc.en)
695 return disabledFault();
696
697 // -- aarch32/exceptions/traps/AArch32.CheckFPAdvSIMDTrap --
698
699 if (have_virtualization && !is_secure && ELIs64(tc, EL2))
700 return checkFPAdvSIMDTrap64(tc, cpsr);
701
702 if (have_virtualization && !is_secure) {
703 HCPTR hcptr = tc->readMiscReg(MISCREG_HCPTR);
704 bool hcptr_cp10 = hcptr.tcp10;
705 bool hcptr_tase = hcptr.tase;
706
707 if (have_security && !ELIs64(tc, EL3) && !is_secure) {
708 if (nsacr.nsasedis)
709 hcptr_tase = true;
710 if (nsacr.cp10)
711 hcptr_cp10 = true;
712 }
713
714 if ((advsimd && hcptr_tase) || hcptr_cp10) {
715 const uint32_t iss = advsimd ? (1 << 5) : 0xA;
716 if (cur_el == EL2) {
717 return std::make_shared<UndefinedInstruction>(
718 machInst, iss,
719 EC_TRAPPED_HCPTR, mnemonic);
720 } else {
721 return std::make_shared<HypervisorTrap>(
722 machInst, iss,
723 EC_TRAPPED_HCPTR);
724 }
725
726 }
727 }
728
729 if (have_security && ELIs64(tc, EL3)) {
730 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3);
731 if (cptrEnCheck.tfp)
732 return advSIMDFPAccessTrap64(EL3);
733 }
734
735 return NoFault;
736}
737
738inline bool
739ArmStaticInst::isWFxTrapping(ThreadContext *tc,
740 ExceptionLevel tgtEl,
741 bool isWfe) const
742{
743 bool trap = false;
744 SCTLR sctlr = ((SCTLR)tc->readMiscReg(MISCREG_SCTLR_EL1));
745 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
746 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3));
747
748 switch (tgtEl) {
749 case EL1:
750 trap = isWfe? !sctlr.ntwe : !sctlr.ntwi;
751 break;
752 case EL2:
753 trap = isWfe? hcr.twe : hcr.twi;
754 break;
755 case EL3:
756 trap = isWfe? scr.twe : scr.twi;
757 break;
758 default:
759 break;
760 }
761
762 return trap;
763}
764
765Fault
766ArmStaticInst::checkForWFxTrap32(ThreadContext *tc,
767 ExceptionLevel targetEL,
768 bool isWfe) const
769{
770 // Check if target exception level is implemented.
771 assert(ArmSystem::haveEL(tc, targetEL));
772
773 // Check for routing to AArch64: this happens if the
774 // target exception level (where the trap will be handled)
775 // is using aarch64
776 if (ELIs64(tc, targetEL)) {
777 return checkForWFxTrap64(tc, targetEL, isWfe);
778 }
779
780 // Check if processor needs to trap at selected exception level
781 bool trap = isWFxTrapping(tc, targetEL, isWfe);
782
783 if (trap) {
784 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */
785 0x1E00000; /* WFI Instruction syndrome */
786 switch (targetEL) {
787 case EL1:
788 return std::make_shared<UndefinedInstruction>(
789 machInst, iss,
790 EC_TRAPPED_WFI_WFE, mnemonic);
791 case EL2:
792 return std::make_shared<HypervisorTrap>(machInst, iss,
793 EC_TRAPPED_WFI_WFE);
794 case EL3:
795 return std::make_shared<SecureMonitorTrap>(machInst, iss,
796 EC_TRAPPED_WFI_WFE);
797 default:
798 panic("Unrecognized Exception Level: %d\n", targetEL);
799 }
800 }
801
802 return NoFault;
803}
804
805Fault
806ArmStaticInst::checkForWFxTrap64(ThreadContext *tc,
807 ExceptionLevel targetEL,
808 bool isWfe) const
809{
810 // Check if target exception level is implemented.
811 assert(ArmSystem::haveEL(tc, targetEL));
812
813 // Check if processor needs to trap at selected exception level
814 bool trap = isWFxTrapping(tc, targetEL, isWfe);
815
816 if (trap) {
817 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */
818 0x1E00000; /* WFI Instruction syndrome */
819 switch (targetEL) {
820 case EL1:
821 return std::make_shared<SupervisorTrap>(machInst, iss,
822 EC_TRAPPED_WFI_WFE);
823 case EL2:
824 return std::make_shared<HypervisorTrap>(machInst, iss,
825 EC_TRAPPED_WFI_WFE);
826 case EL3:
827 return std::make_shared<SecureMonitorTrap>(machInst, iss,
828 EC_TRAPPED_WFI_WFE);
829 default:
830 panic("Unrecognized Exception Level: %d\n", targetEL);
831 }
832 }
833
834 return NoFault;
835}
836
837Fault
838ArmStaticInst::trapWFx(ThreadContext *tc,
839 CPSR cpsr, SCR scr,
840 bool isWfe) const
841{
842 Fault fault = NoFault;
843 if (cpsr.el == EL0) {
844 fault = checkForWFxTrap32(tc, EL1, isWfe);
845 }
846
847 if ((fault == NoFault) &&
848 ArmSystem::haveEL(tc, EL2) && !inSecureState(scr, cpsr) &&
849 ((cpsr.el == EL0) || (cpsr.el == EL1))) {
850
851 fault = checkForWFxTrap32(tc, EL2, isWfe);
852 }
853
854 if ((fault == NoFault) &&
855 ArmSystem::haveEL(tc, EL3) && cpsr.el != EL3) {
856 fault = checkForWFxTrap32(tc, EL3, isWfe);
857 }
858
859 return fault;
860}
861
862Fault
863ArmStaticInst::checkSETENDEnabled(ThreadContext *tc, CPSR cpsr) const
864{
865 bool setend_disabled(false);
866 ExceptionLevel pstateEL = (ExceptionLevel)(uint8_t)(cpsr.el);
867
868 if (pstateEL == EL2) {
869 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(MISCREG_HSCTLR)).sed;
870 } else {
871 // Please note: in the armarm pseudocode there is a distinction
872 // whether EL1 is aarch32 or aarch64:
873 // if ELUsingAArch32(EL1) then SCTLR.SED else SCTLR[].SED;
874 // Considering that SETEND is aarch32 only, ELUsingAArch32(EL1)
875 // will always be true (hence using SCTLR.SED) except for
876 // instruction executed at EL0, and with an AArch64 EL1.
877 // In this case SCTLR_EL1 will be used. In gem5 the register is
878 // mapped to SCTLR_ns. We can safely use SCTLR and choose the
879 // appropriate bank version.
880
881 // Get the index of the banked version of SCTLR:
882 // SCTLR_s or SCTLR_ns.
883 auto banked_sctlr = snsBankedIndex(
884 MISCREG_SCTLR, tc, !inSecureState(tc));
885
886 // SCTLR.SED bit is enabling/disabling the ue of SETEND instruction.
887 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(banked_sctlr)).sed;
888 }
889
890 return setend_disabled ? undefinedFault32(tc, pstateEL) :
891 NoFault;
892}
893
894Fault
895ArmStaticInst::undefinedFault32(ThreadContext *tc,
896 ExceptionLevel pstateEL) const
897{
898 // Even if we are running in aarch32, the fault might be dealt with in
899 // aarch64 ISA.
900 if (generalExceptionsToAArch64(tc, pstateEL)) {
901 return undefinedFault64(tc, pstateEL);
902 } else {
903 // Please note: according to the ARM ARM pseudocode we should handle
904 // the case when EL2 is aarch64 and HCR.TGE is 1 as well.
905 // However this case is already handled by the routeToHyp method in
906 // ArmFault class.
907 return std::make_shared<UndefinedInstruction>(
908 machInst, 0,
909 EC_UNKNOWN, mnemonic);
910 }
911}
912
913Fault
914ArmStaticInst::undefinedFault64(ThreadContext *tc,
915 ExceptionLevel pstateEL) const
916{
917 switch (pstateEL) {
918 case EL0:
919 case EL1:
920 return std::make_shared<SupervisorTrap>(machInst, 0, EC_UNKNOWN);
921 case EL2:
922 return std::make_shared<HypervisorTrap>(machInst, 0, EC_UNKNOWN);
923 case EL3:
924 return std::make_shared<SecureMonitorTrap>(machInst, 0, EC_UNKNOWN);
925 default:
926 panic("Unrecognized Exception Level: %d\n", pstateEL);
927 break;
928 }
929
930 return NoFault;
931}
932
933static uint8_t
934getRestoredITBits(ThreadContext *tc, CPSR spsr)
935{
936 // See: shared/functions/system/RestoredITBits in the ARM ARM
937
938 const ExceptionLevel el = opModeToEL((OperatingMode) (uint8_t)spsr.mode);
939 const uint8_t it = itState(spsr);
940
941 if (!spsr.t || spsr.il)
942 return 0;
943
944 // The IT bits are forced to zero when they are set to a reserved
945 // value.
946 if (bits(it, 7, 4) != 0 && bits(it, 3, 0) == 0)
947 return 0;
948
949 const bool itd = el == EL2 ?
950 ((SCTLR)tc->readMiscReg(MISCREG_HSCTLR)).itd :
951 ((SCTLR)tc->readMiscReg(MISCREG_SCTLR)).itd;
952
953 // The IT bits are forced to zero when returning to A32 state, or
954 // when returning to an EL with the ITD bit set to 1, and the IT
955 // bits are describing a multi-instruction block.
956 if (itd && bits(it, 2, 0) != 0)
957 return 0;
958
959 return it;
960}
961
962static bool
963illegalExceptionReturn(ThreadContext *tc, CPSR cpsr, CPSR spsr)
964{
965 const OperatingMode mode = (OperatingMode) (uint8_t)spsr.mode;
966 if (badMode(mode))
967 return true;
968
969 const OperatingMode cur_mode = (OperatingMode) (uint8_t)cpsr.mode;
970 const ExceptionLevel target_el = opModeToEL(mode);
971
972 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
973 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3));
974
975 if (target_el > opModeToEL(cur_mode))
976 return true;
977
978 if (!ArmSystem::haveEL(tc, target_el))
979 return true;
980
981 if (target_el == EL1 && ArmSystem::haveEL(tc, EL2) && scr.ns && hcr.tge)
982 return true;
983
984 if (target_el == EL2 && ArmSystem::haveEL(tc, EL3) && !scr.ns)
985 return true;
986
987 bool spsr_mode_is_aarch32 = (spsr.width == 1);
988 bool known, target_el_is_aarch32;
989 std::tie(known, target_el_is_aarch32) = ELUsingAArch32K(tc, target_el);
990 assert(known || (target_el == EL0 && ELIs64(tc, EL1)));
991
992 if (known && (spsr_mode_is_aarch32 != target_el_is_aarch32))
993 return true;
994
995 if (!spsr.width) {
996 // aarch64
997 if (!ArmSystem::highestELIs64(tc))
998 return true;
999 if (spsr & 0x2)
1000 return true;
1001 if (target_el == EL0 && spsr.sp)
1002 return true;
1003 } else {
1004 // aarch32
1005 return badMode32(mode);
1006 }
1007
1008 return false;
1009}
1010
1011CPSR
1012ArmStaticInst::getPSTATEFromPSR(ThreadContext *tc, CPSR cpsr, CPSR spsr) const
1013{
1014 CPSR new_cpsr = 0;
1015
1016 // gem5 doesn't implement single-stepping, so force the SS bit to
1017 // 0.
1018 new_cpsr.ss = 0;
1019
1020 if (illegalExceptionReturn(tc, cpsr, spsr)) {
1021 // If the SPSR specifies an illegal exception return,
1022 // then PSTATE.{M, nRW, EL, SP} are unchanged and PSTATE.IL
1023 // is set to 1.
1021 new_cpsr.il = 1;
1024 new_cpsr.il = 1;
1025 if (cpsr.width) {
1026 new_cpsr.mode = cpsr.mode;
1027 } else {
1028 new_cpsr.width = cpsr.width;
1029 new_cpsr.el = cpsr.el;
1030 new_cpsr.sp = cpsr.sp;
1031 }
1022 } else {
1023 new_cpsr.il = spsr.il;
1024 if (spsr.width && badMode32((OperatingMode)(uint8_t)spsr.mode)) {
1025 new_cpsr.il = 1;
1026 } else if (spsr.width) {
1027 new_cpsr.mode = spsr.mode;
1028 } else {
1029 new_cpsr.el = spsr.el;
1030 new_cpsr.sp = spsr.sp;
1031 }
1032 }
1033
1034 new_cpsr.nz = spsr.nz;
1035 new_cpsr.c = spsr.c;
1036 new_cpsr.v = spsr.v;
1037 if (new_cpsr.width) {
1038 // aarch32
1039 const ITSTATE it = getRestoredITBits(tc, spsr);
1040 new_cpsr.q = spsr.q;
1041 new_cpsr.ge = spsr.ge;
1042 new_cpsr.e = spsr.e;
1043 new_cpsr.aif = spsr.aif;
1044 new_cpsr.t = spsr.t;
1045 new_cpsr.it2 = it.top6;
1046 new_cpsr.it1 = it.bottom2;
1047 } else {
1048 // aarch64
1049 new_cpsr.daif = spsr.daif;
1050 }
1051
1052 return new_cpsr;
1053}
1054
1055bool
1056ArmStaticInst::generalExceptionsToAArch64(ThreadContext *tc,
1057 ExceptionLevel pstateEL) const
1058{
1059 // Returns TRUE if exceptions normally routed to EL1 are being handled
1060 // at an Exception level using AArch64, because either EL1 is using
1061 // AArch64 or TGE is in force and EL2 is using AArch64.
1062 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
1063 return (pstateEL == EL0 && !ELIs32(tc, EL1)) ||
1064 (ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) &&
1065 !ELIs32(tc, EL2) && hcr.tge);
1066}
1067
1068
1069}
1032 } else {
1033 new_cpsr.il = spsr.il;
1034 if (spsr.width && badMode32((OperatingMode)(uint8_t)spsr.mode)) {
1035 new_cpsr.il = 1;
1036 } else if (spsr.width) {
1037 new_cpsr.mode = spsr.mode;
1038 } else {
1039 new_cpsr.el = spsr.el;
1040 new_cpsr.sp = spsr.sp;
1041 }
1042 }
1043
1044 new_cpsr.nz = spsr.nz;
1045 new_cpsr.c = spsr.c;
1046 new_cpsr.v = spsr.v;
1047 if (new_cpsr.width) {
1048 // aarch32
1049 const ITSTATE it = getRestoredITBits(tc, spsr);
1050 new_cpsr.q = spsr.q;
1051 new_cpsr.ge = spsr.ge;
1052 new_cpsr.e = spsr.e;
1053 new_cpsr.aif = spsr.aif;
1054 new_cpsr.t = spsr.t;
1055 new_cpsr.it2 = it.top6;
1056 new_cpsr.it1 = it.bottom2;
1057 } else {
1058 // aarch64
1059 new_cpsr.daif = spsr.daif;
1060 }
1061
1062 return new_cpsr;
1063}
1064
1065bool
1066ArmStaticInst::generalExceptionsToAArch64(ThreadContext *tc,
1067 ExceptionLevel pstateEL) const
1068{
1069 // Returns TRUE if exceptions normally routed to EL1 are being handled
1070 // at an Exception level using AArch64, because either EL1 is using
1071 // AArch64 or TGE is in force and EL2 is using AArch64.
1072 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
1073 return (pstateEL == EL0 && !ELIs32(tc, EL1)) ||
1074 (ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) &&
1075 !ELIs32(tc, EL2) && hcr.tge);
1076}
1077
1078
1079}