Deleted Added
sdiff udiff text old ( 12788:fe6d6ae79d7c ) new ( 12789:b28b286fa57d )
full compact
1/*
2 * Copyright (c) 2010-2014, 2016-2018 ARM Limited
3 * Copyright (c) 2013 Advanced Micro Devices, Inc.
4 * All rights reserved
5 *
6 * The license below extends only to copyright in the software and shall
7 * not be construed as granting a license to any other intellectual
8 * property including but not limited to intellectual property relating
9 * to a hardware implementation of the functionality of the software
10 * licensed hereunder. You may use the software subject to the license
11 * terms below provided that you ensure that this notice is replicated
12 * unmodified and in its entirety in all distributions of the software,
13 * modified or unmodified, in source code or in binary form.
14 *
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Stephen Hines
42 */
43
44#include "arch/arm/insts/static_inst.hh"
45
46#include "arch/arm/faults.hh"
47#include "base/condcodes.hh"
48#include "base/cprintf.hh"
49#include "base/loader/symtab.hh"
50#include "cpu/reg_class.hh"
51
52namespace ArmISA
53{
54// Shift Rm by an immediate value
55int32_t
56ArmStaticInst::shift_rm_imm(uint32_t base, uint32_t shamt,
57 uint32_t type, uint32_t cfval) const
58{
59 assert(shamt < 32);
60 ArmShiftType shiftType;
61 shiftType = (ArmShiftType)type;
62
63 switch (shiftType)
64 {
65 case LSL:
66 return base << shamt;
67 case LSR:
68 if (shamt == 0)
69 return 0;
70 else
71 return base >> shamt;
72 case ASR:
73 if (shamt == 0)
74 return (base >> 31) | -((base & (1 << 31)) >> 31);
75 else
76 return (base >> shamt) | -((base & (1 << 31)) >> shamt);
77 case ROR:
78 if (shamt == 0)
79 return (cfval << 31) | (base >> 1); // RRX
80 else
81 return (base << (32 - shamt)) | (base >> shamt);
82 default:
83 ccprintf(std::cerr, "Unhandled shift type\n");
84 exit(1);
85 break;
86 }
87 return 0;
88}
89
90int64_t
91ArmStaticInst::shiftReg64(uint64_t base, uint64_t shiftAmt,
92 ArmShiftType type, uint8_t width) const
93{
94 shiftAmt = shiftAmt % width;
95 ArmShiftType shiftType;
96 shiftType = (ArmShiftType)type;
97
98 switch (shiftType)
99 {
100 case LSL:
101 return base << shiftAmt;
102 case LSR:
103 if (shiftAmt == 0)
104 return base;
105 else
106 return (base & mask(width)) >> shiftAmt;
107 case ASR:
108 if (shiftAmt == 0) {
109 return base;
110 } else {
111 int sign_bit = bits(base, intWidth - 1);
112 base >>= shiftAmt;
113 base = sign_bit ? (base | ~mask(intWidth - shiftAmt)) : base;
114 return base & mask(intWidth);
115 }
116 case ROR:
117 if (shiftAmt == 0)
118 return base;
119 else
120 return (base << (width - shiftAmt)) | (base >> shiftAmt);
121 default:
122 ccprintf(std::cerr, "Unhandled shift type\n");
123 exit(1);
124 break;
125 }
126 return 0;
127}
128
129int64_t
130ArmStaticInst::extendReg64(uint64_t base, ArmExtendType type,
131 uint64_t shiftAmt, uint8_t width) const
132{
133 bool sign_extend = false;
134 int len = 0;
135 switch (type) {
136 case UXTB:
137 len = 8;
138 break;
139 case UXTH:
140 len = 16;
141 break;
142 case UXTW:
143 len = 32;
144 break;
145 case UXTX:
146 len = 64;
147 break;
148 case SXTB:
149 len = 8;
150 sign_extend = true;
151 break;
152 case SXTH:
153 len = 16;
154 sign_extend = true;
155 break;
156 case SXTW:
157 len = 32;
158 sign_extend = true;
159 break;
160 case SXTX:
161 len = 64;
162 sign_extend = true;
163 break;
164 }
165 len = len <= width - shiftAmt ? len : width - shiftAmt;
166 uint64_t tmp = (uint64_t) bits(base, len - 1, 0) << shiftAmt;
167 if (sign_extend) {
168 int sign_bit = bits(tmp, len + shiftAmt - 1);
169 tmp = sign_bit ? (tmp | ~mask(len + shiftAmt)) : tmp;
170 }
171 return tmp & mask(width);
172}
173
174// Shift Rm by Rs
175int32_t
176ArmStaticInst::shift_rm_rs(uint32_t base, uint32_t shamt,
177 uint32_t type, uint32_t cfval) const
178{
179 enum ArmShiftType shiftType;
180 shiftType = (enum ArmShiftType) type;
181
182 switch (shiftType)
183 {
184 case LSL:
185 if (shamt >= 32)
186 return 0;
187 else
188 return base << shamt;
189 case LSR:
190 if (shamt >= 32)
191 return 0;
192 else
193 return base >> shamt;
194 case ASR:
195 if (shamt >= 32)
196 return (base >> 31) | -((base & (1 << 31)) >> 31);
197 else
198 return (base >> shamt) | -((base & (1 << 31)) >> shamt);
199 case ROR:
200 shamt = shamt & 0x1f;
201 if (shamt == 0)
202 return base;
203 else
204 return (base << (32 - shamt)) | (base >> shamt);
205 default:
206 ccprintf(std::cerr, "Unhandled shift type\n");
207 exit(1);
208 break;
209 }
210 return 0;
211}
212
213
214// Generate C for a shift by immediate
215bool
216ArmStaticInst::shift_carry_imm(uint32_t base, uint32_t shamt,
217 uint32_t type, uint32_t cfval) const
218{
219 enum ArmShiftType shiftType;
220 shiftType = (enum ArmShiftType) type;
221
222 switch (shiftType)
223 {
224 case LSL:
225 if (shamt == 0)
226 return cfval;
227 else
228 return (base >> (32 - shamt)) & 1;
229 case LSR:
230 if (shamt == 0)
231 return (base >> 31);
232 else
233 return (base >> (shamt - 1)) & 1;
234 case ASR:
235 if (shamt == 0)
236 return (base >> 31);
237 else
238 return (base >> (shamt - 1)) & 1;
239 case ROR:
240 shamt = shamt & 0x1f;
241 if (shamt == 0)
242 return (base & 1); // RRX
243 else
244 return (base >> (shamt - 1)) & 1;
245 default:
246 ccprintf(std::cerr, "Unhandled shift type\n");
247 exit(1);
248 break;
249 }
250 return 0;
251}
252
253
254// Generate C for a shift by Rs
255bool
256ArmStaticInst::shift_carry_rs(uint32_t base, uint32_t shamt,
257 uint32_t type, uint32_t cfval) const
258{
259 enum ArmShiftType shiftType;
260 shiftType = (enum ArmShiftType) type;
261
262 if (shamt == 0)
263 return cfval;
264
265 switch (shiftType)
266 {
267 case LSL:
268 if (shamt > 32)
269 return 0;
270 else
271 return (base >> (32 - shamt)) & 1;
272 case LSR:
273 if (shamt > 32)
274 return 0;
275 else
276 return (base >> (shamt - 1)) & 1;
277 case ASR:
278 if (shamt > 32)
279 shamt = 32;
280 return (base >> (shamt - 1)) & 1;
281 case ROR:
282 shamt = shamt & 0x1f;
283 if (shamt == 0)
284 shamt = 32;
285 return (base >> (shamt - 1)) & 1;
286 default:
287 ccprintf(std::cerr, "Unhandled shift type\n");
288 exit(1);
289 break;
290 }
291 return 0;
292}
293
294void
295ArmStaticInst::printIntReg(std::ostream &os, RegIndex reg_idx) const
296{
297 if (aarch64) {
298 if (reg_idx == INTREG_UREG0)
299 ccprintf(os, "ureg0");
300 else if (reg_idx == INTREG_SPX)
301 ccprintf(os, "%s%s", (intWidth == 32) ? "w" : "", "sp");
302 else if (reg_idx == INTREG_X31)
303 ccprintf(os, "%szr", (intWidth == 32) ? "w" : "x");
304 else
305 ccprintf(os, "%s%d", (intWidth == 32) ? "w" : "x", reg_idx);
306 } else {
307 switch (reg_idx) {
308 case PCReg:
309 ccprintf(os, "pc");
310 break;
311 case StackPointerReg:
312 ccprintf(os, "sp");
313 break;
314 case FramePointerReg:
315 ccprintf(os, "fp");
316 break;
317 case ReturnAddressReg:
318 ccprintf(os, "lr");
319 break;
320 default:
321 ccprintf(os, "r%d", reg_idx);
322 break;
323 }
324 }
325}
326
327void
328ArmStaticInst::printFloatReg(std::ostream &os, RegIndex reg_idx) const
329{
330 ccprintf(os, "f%d", reg_idx);
331}
332
333void
334ArmStaticInst::printVecReg(std::ostream &os, RegIndex reg_idx) const
335{
336 ccprintf(os, "v%d", reg_idx);
337}
338
339void
340ArmStaticInst::printCCReg(std::ostream &os, RegIndex reg_idx) const
341{
342 ccprintf(os, "cc_%s", ArmISA::ccRegName[reg_idx]);
343}
344
345void
346ArmStaticInst::printMiscReg(std::ostream &os, RegIndex reg_idx) const
347{
348 assert(reg_idx < NUM_MISCREGS);
349 ccprintf(os, "%s", ArmISA::miscRegName[reg_idx]);
350}
351
352void
353ArmStaticInst::printMnemonic(std::ostream &os,
354 const std::string &suffix,
355 bool withPred,
356 bool withCond64,
357 ConditionCode cond64) const
358{
359 os << " " << mnemonic;
360 if (withPred && !aarch64) {
361 printCondition(os, machInst.condCode);
362 os << suffix;
363 } else if (withCond64) {
364 os << ".";
365 printCondition(os, cond64);
366 os << suffix;
367 }
368 if (machInst.bigThumb)
369 os << ".w";
370 os << " ";
371}
372
373void
374ArmStaticInst::printTarget(std::ostream &os, Addr target,
375 const SymbolTable *symtab) const
376{
377 Addr symbolAddr;
378 std::string symbol;
379
380 if (symtab && symtab->findNearestSymbol(target, symbol, symbolAddr)) {
381 ccprintf(os, "<%s", symbol);
382 if (symbolAddr != target)
383 ccprintf(os, "+%d>", target - symbolAddr);
384 else
385 ccprintf(os, ">");
386 } else {
387 ccprintf(os, "%#x", target);
388 }
389}
390
391void
392ArmStaticInst::printCondition(std::ostream &os,
393 unsigned code,
394 bool noImplicit) const
395{
396 switch (code) {
397 case COND_EQ:
398 os << "eq";
399 break;
400 case COND_NE:
401 os << "ne";
402 break;
403 case COND_CS:
404 os << "cs";
405 break;
406 case COND_CC:
407 os << "cc";
408 break;
409 case COND_MI:
410 os << "mi";
411 break;
412 case COND_PL:
413 os << "pl";
414 break;
415 case COND_VS:
416 os << "vs";
417 break;
418 case COND_VC:
419 os << "vc";
420 break;
421 case COND_HI:
422 os << "hi";
423 break;
424 case COND_LS:
425 os << "ls";
426 break;
427 case COND_GE:
428 os << "ge";
429 break;
430 case COND_LT:
431 os << "lt";
432 break;
433 case COND_GT:
434 os << "gt";
435 break;
436 case COND_LE:
437 os << "le";
438 break;
439 case COND_AL:
440 // This one is implicit.
441 if (noImplicit)
442 os << "al";
443 break;
444 case COND_UC:
445 // Unconditional.
446 if (noImplicit)
447 os << "uc";
448 break;
449 default:
450 panic("Unrecognized condition code %d.\n", code);
451 }
452}
453
454void
455ArmStaticInst::printMemSymbol(std::ostream &os,
456 const SymbolTable *symtab,
457 const std::string &prefix,
458 const Addr addr,
459 const std::string &suffix) const
460{
461 Addr symbolAddr;
462 std::string symbol;
463 if (symtab && symtab->findNearestSymbol(addr, symbol, symbolAddr)) {
464 ccprintf(os, "%s%s", prefix, symbol);
465 if (symbolAddr != addr)
466 ccprintf(os, "+%d", addr - symbolAddr);
467 ccprintf(os, suffix);
468 }
469}
470
471void
472ArmStaticInst::printShiftOperand(std::ostream &os,
473 IntRegIndex rm,
474 bool immShift,
475 uint32_t shiftAmt,
476 IntRegIndex rs,
477 ArmShiftType type) const
478{
479 bool firstOp = false;
480
481 if (rm != INTREG_ZERO) {
482 printIntReg(os, rm);
483 }
484
485 bool done = false;
486
487 if ((type == LSR || type == ASR) && immShift && shiftAmt == 0)
488 shiftAmt = 32;
489
490 switch (type) {
491 case LSL:
492 if (immShift && shiftAmt == 0) {
493 done = true;
494 break;
495 }
496 if (!firstOp)
497 os << ", ";
498 os << "LSL";
499 break;
500 case LSR:
501 if (!firstOp)
502 os << ", ";
503 os << "LSR";
504 break;
505 case ASR:
506 if (!firstOp)
507 os << ", ";
508 os << "ASR";
509 break;
510 case ROR:
511 if (immShift && shiftAmt == 0) {
512 if (!firstOp)
513 os << ", ";
514 os << "RRX";
515 done = true;
516 break;
517 }
518 if (!firstOp)
519 os << ", ";
520 os << "ROR";
521 break;
522 default:
523 panic("Tried to disassemble unrecognized shift type.\n");
524 }
525 if (!done) {
526 if (!firstOp)
527 os << " ";
528 if (immShift)
529 os << "#" << shiftAmt;
530 else
531 printIntReg(os, rs);
532 }
533}
534
535void
536ArmStaticInst::printExtendOperand(bool firstOperand, std::ostream &os,
537 IntRegIndex rm, ArmExtendType type,
538 int64_t shiftAmt) const
539{
540 if (!firstOperand)
541 ccprintf(os, ", ");
542 printIntReg(os, rm);
543 if (type == UXTX && shiftAmt == 0)
544 return;
545 switch (type) {
546 case UXTB: ccprintf(os, ", UXTB");
547 break;
548 case UXTH: ccprintf(os, ", UXTH");
549 break;
550 case UXTW: ccprintf(os, ", UXTW");
551 break;
552 case UXTX: ccprintf(os, ", LSL");
553 break;
554 case SXTB: ccprintf(os, ", SXTB");
555 break;
556 case SXTH: ccprintf(os, ", SXTH");
557 break;
558 case SXTW: ccprintf(os, ", SXTW");
559 break;
560 case SXTX: ccprintf(os, ", SXTW");
561 break;
562 }
563 if (type == UXTX || shiftAmt)
564 ccprintf(os, " #%d", shiftAmt);
565}
566
567void
568ArmStaticInst::printDataInst(std::ostream &os, bool withImm,
569 bool immShift, bool s, IntRegIndex rd, IntRegIndex rn,
570 IntRegIndex rm, IntRegIndex rs, uint32_t shiftAmt,
571 ArmShiftType type, uint64_t imm) const
572{
573 printMnemonic(os, s ? "s" : "");
574 bool firstOp = true;
575
576 // Destination
577 if (rd != INTREG_ZERO) {
578 firstOp = false;
579 printIntReg(os, rd);
580 }
581
582 // Source 1.
583 if (rn != INTREG_ZERO) {
584 if (!firstOp)
585 os << ", ";
586 firstOp = false;
587 printIntReg(os, rn);
588 }
589
590 if (!firstOp)
591 os << ", ";
592 if (withImm) {
593 ccprintf(os, "#%ld", imm);
594 } else {
595 printShiftOperand(os, rm, immShift, shiftAmt, rs, type);
596 }
597}
598
599std::string
600ArmStaticInst::generateDisassembly(Addr pc,
601 const SymbolTable *symtab) const
602{
603 std::stringstream ss;
604 printMnemonic(ss);
605 return ss.str();
606}
607
608Fault
609ArmStaticInst::softwareBreakpoint32(ExecContext *xc, uint16_t imm) const
610{
611 const auto tc = xc->tcBase();
612 const HCR hcr = tc->readMiscReg(MISCREG_HCR_EL2);
613 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
614 if ((ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) &&
615 !ELIs32(tc, EL2) && (hcr.tge == 1 || mdcr.tde == 1)) ||
616 !ELIs32(tc, EL1)) {
617 // Route to AArch64 Software Breakpoint
618 return std::make_shared<SoftwareBreakpoint>(machInst, imm);
619 } else {
620 // Execute AArch32 Software Breakpoint
621 return std::make_shared<PrefetchAbort>(readPC(xc),
622 ArmFault::DebugEvent);
623 }
624}
625
626Fault
627ArmStaticInst::advSIMDFPAccessTrap64(ExceptionLevel el) const
628{
629 switch (el) {
630 case EL1:
631 return std::make_shared<SupervisorTrap>(machInst, 0x1E00000,
632 EC_TRAPPED_SIMD_FP);
633 case EL2:
634 return std::make_shared<HypervisorTrap>(machInst, 0x1E00000,
635 EC_TRAPPED_SIMD_FP);
636 case EL3:
637 return std::make_shared<SecureMonitorTrap>(machInst, 0x1E00000,
638 EC_TRAPPED_SIMD_FP);
639
640 default:
641 panic("Illegal EL in advSIMDFPAccessTrap64\n");
642 }
643}
644
645
646Fault
647ArmStaticInst::checkFPAdvSIMDTrap64(ThreadContext *tc, CPSR cpsr) const
648{
649 if (ArmSystem::haveVirtualization(tc) && !inSecureState(tc)) {
650 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL2);
651 if (cptrEnCheck.tfp)
652 return advSIMDFPAccessTrap64(EL2);
653 }
654
655 if (ArmSystem::haveSecurity(tc)) {
656 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3);
657 if (cptrEnCheck.tfp)
658 return advSIMDFPAccessTrap64(EL3);
659 }
660
661 return NoFault;
662}
663
664Fault
665ArmStaticInst::checkFPAdvSIMDEnabled64(ThreadContext *tc,
666 CPSR cpsr, CPACR cpacr) const
667{
668 const ExceptionLevel el = (ExceptionLevel) (uint8_t)cpsr.el;
669 if ((el == EL0 && cpacr.fpen != 0x3) ||
670 (el == EL1 && !(cpacr.fpen & 0x1)))
671 return advSIMDFPAccessTrap64(EL1);
672
673 return checkFPAdvSIMDTrap64(tc, cpsr);
674}
675
676Fault
677ArmStaticInst::checkAdvSIMDOrFPEnabled32(ThreadContext *tc,
678 CPSR cpsr, CPACR cpacr,
679 NSACR nsacr, FPEXC fpexc,
680 bool fpexc_check, bool advsimd) const
681{
682 const bool have_virtualization = ArmSystem::haveVirtualization(tc);
683 const bool have_security = ArmSystem::haveSecurity(tc);
684 const bool is_secure = inSecureState(tc);
685 const ExceptionLevel cur_el = opModeToEL(currOpMode(tc));
686
687 if (cur_el == EL0 && ELIs64(tc, EL1))
688 return checkFPAdvSIMDEnabled64(tc, cpsr, cpacr);
689
690 uint8_t cpacr_cp10 = cpacr.cp10;
691 bool cpacr_asedis = cpacr.asedis;
692
693 if (have_security && !ELIs64(tc, EL3) && !is_secure) {
694 if (nsacr.nsasedis)
695 cpacr_asedis = true;
696 if (nsacr.cp10 == 0)
697 cpacr_cp10 = 0;
698 }
699
700 if (cur_el != EL2) {
701 if (advsimd && cpacr_asedis)
702 return disabledFault();
703
704 if ((cur_el == EL0 && cpacr_cp10 != 0x3) ||
705 (cur_el != EL0 && !(cpacr_cp10 & 0x1)))
706 return disabledFault();
707 }
708
709 if (fpexc_check && !fpexc.en)
710 return disabledFault();
711
712 // -- aarch32/exceptions/traps/AArch32.CheckFPAdvSIMDTrap --
713
714 if (have_virtualization && !is_secure && ELIs64(tc, EL2))
715 return checkFPAdvSIMDTrap64(tc, cpsr);
716
717 if (have_virtualization && !is_secure) {
718 HCPTR hcptr = tc->readMiscReg(MISCREG_HCPTR);
719 bool hcptr_cp10 = hcptr.tcp10;
720 bool hcptr_tase = hcptr.tase;
721
722 if (have_security && !ELIs64(tc, EL3) && !is_secure) {
723 if (nsacr.nsasedis)
724 hcptr_tase = true;
725 if (nsacr.cp10)
726 hcptr_cp10 = true;
727 }
728
729 if ((advsimd && hcptr_tase) || hcptr_cp10) {
730 const uint32_t iss = advsimd ? (1 << 5) : 0xA;
731 if (cur_el == EL2) {
732 return std::make_shared<UndefinedInstruction>(
733 machInst, iss,
734 EC_TRAPPED_HCPTR, mnemonic);
735 } else {
736 return std::make_shared<HypervisorTrap>(
737 machInst, iss,
738 EC_TRAPPED_HCPTR);
739 }
740
741 }
742 }
743
744 if (have_security && ELIs64(tc, EL3)) {
745 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3);
746 if (cptrEnCheck.tfp)
747 return advSIMDFPAccessTrap64(EL3);
748 }
749
750 return NoFault;
751}
752
753inline bool
754ArmStaticInst::isWFxTrapping(ThreadContext *tc,
755 ExceptionLevel tgtEl,
756 bool isWfe) const
757{
758 bool trap = false;
759 SCTLR sctlr = ((SCTLR)tc->readMiscReg(MISCREG_SCTLR_EL1));
760 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
761 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3));
762
763 switch (tgtEl) {
764 case EL1:
765 trap = isWfe? !sctlr.ntwe : !sctlr.ntwi;
766 break;
767 case EL2:
768 trap = isWfe? hcr.twe : hcr.twi;
769 break;
770 case EL3:
771 trap = isWfe? scr.twe : scr.twi;
772 break;
773 default:
774 break;
775 }
776
777 return trap;
778}
779
780Fault
781ArmStaticInst::checkForWFxTrap32(ThreadContext *tc,
782 ExceptionLevel targetEL,
783 bool isWfe) const
784{
785 // Check if target exception level is implemented.
786 assert(ArmSystem::haveEL(tc, targetEL));
787
788 // Check for routing to AArch64: this happens if the
789 // target exception level (where the trap will be handled)
790 // is using aarch64
791 if (ELIs64(tc, targetEL)) {
792 return checkForWFxTrap64(tc, targetEL, isWfe);
793 }
794
795 // Check if processor needs to trap at selected exception level
796 bool trap = isWFxTrapping(tc, targetEL, isWfe);
797
798 if (trap) {
799 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */
800 0x1E00000; /* WFI Instruction syndrome */
801 switch (targetEL) {
802 case EL1:
803 return std::make_shared<UndefinedInstruction>(
804 machInst, iss,
805 EC_TRAPPED_WFI_WFE, mnemonic);
806 case EL2:
807 return std::make_shared<HypervisorTrap>(machInst, iss,
808 EC_TRAPPED_WFI_WFE);
809 case EL3:
810 return std::make_shared<SecureMonitorTrap>(machInst, iss,
811 EC_TRAPPED_WFI_WFE);
812 default:
813 panic("Unrecognized Exception Level: %d\n", targetEL);
814 }
815 }
816
817 return NoFault;
818}
819
820Fault
821ArmStaticInst::checkForWFxTrap64(ThreadContext *tc,
822 ExceptionLevel targetEL,
823 bool isWfe) const
824{
825 // Check if target exception level is implemented.
826 assert(ArmSystem::haveEL(tc, targetEL));
827
828 // Check if processor needs to trap at selected exception level
829 bool trap = isWFxTrapping(tc, targetEL, isWfe);
830
831 if (trap) {
832 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */
833 0x1E00000; /* WFI Instruction syndrome */
834 switch (targetEL) {
835 case EL1:
836 return std::make_shared<SupervisorTrap>(machInst, iss,
837 EC_TRAPPED_WFI_WFE);
838 case EL2:
839 return std::make_shared<HypervisorTrap>(machInst, iss,
840 EC_TRAPPED_WFI_WFE);
841 case EL3:
842 return std::make_shared<SecureMonitorTrap>(machInst, iss,
843 EC_TRAPPED_WFI_WFE);
844 default:
845 panic("Unrecognized Exception Level: %d\n", targetEL);
846 }
847 }
848
849 return NoFault;
850}
851
852Fault
853ArmStaticInst::trapWFx(ThreadContext *tc,
854 CPSR cpsr, SCR scr,
855 bool isWfe) const
856{
857 Fault fault = NoFault;
858 if (cpsr.el == EL0) {
859 fault = checkForWFxTrap32(tc, EL1, isWfe);
860 }
861
862 if ((fault == NoFault) &&
863 ArmSystem::haveEL(tc, EL2) && !inSecureState(scr, cpsr) &&
864 ((cpsr.el == EL0) || (cpsr.el == EL1))) {
865
866 fault = checkForWFxTrap32(tc, EL2, isWfe);
867 }
868
869 if ((fault == NoFault) &&
870 ArmSystem::haveEL(tc, EL3) && cpsr.el != EL3) {
871 fault = checkForWFxTrap32(tc, EL3, isWfe);
872 }
873
874 return fault;
875}
876
877Fault
878ArmStaticInst::checkSETENDEnabled(ThreadContext *tc, CPSR cpsr) const
879{
880 bool setend_disabled(false);
881 ExceptionLevel pstateEL = (ExceptionLevel)(uint8_t)(cpsr.el);
882
883 if (pstateEL == EL2) {
884 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(MISCREG_HSCTLR)).sed;
885 } else {
886 // Please note: in the armarm pseudocode there is a distinction
887 // whether EL1 is aarch32 or aarch64:
888 // if ELUsingAArch32(EL1) then SCTLR.SED else SCTLR[].SED;
889 // Considering that SETEND is aarch32 only, ELUsingAArch32(EL1)
890 // will always be true (hence using SCTLR.SED) except for
891 // instruction executed at EL0, and with an AArch64 EL1.
892 // In this case SCTLR_EL1 will be used. In gem5 the register is
893 // mapped to SCTLR_ns. We can safely use SCTLR and choose the
894 // appropriate bank version.
895
896 // Get the index of the banked version of SCTLR:
897 // SCTLR_s or SCTLR_ns.
898 auto banked_sctlr = snsBankedIndex(
899 MISCREG_SCTLR, tc, !inSecureState(tc));
900
901 // SCTLR.SED bit is enabling/disabling the ue of SETEND instruction.
902 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(banked_sctlr)).sed;
903 }
904
905 return setend_disabled ? undefinedFault32(tc, pstateEL) :
906 NoFault;
907}
908
909Fault
910ArmStaticInst::undefinedFault32(ThreadContext *tc,
911 ExceptionLevel pstateEL) const
912{
913 // Even if we are running in aarch32, the fault might be dealt with in
914 // aarch64 ISA.
915 if (generalExceptionsToAArch64(tc, pstateEL)) {
916 return undefinedFault64(tc, pstateEL);
917 } else {
918 // Please note: according to the ARM ARM pseudocode we should handle
919 // the case when EL2 is aarch64 and HCR.TGE is 1 as well.
920 // However this case is already handled by the routeToHyp method in
921 // ArmFault class.
922 return std::make_shared<UndefinedInstruction>(
923 machInst, 0,
924 EC_UNKNOWN, mnemonic);
925 }
926}
927
928Fault
929ArmStaticInst::undefinedFault64(ThreadContext *tc,
930 ExceptionLevel pstateEL) const
931{
932 switch (pstateEL) {
933 case EL0:
934 case EL1:
935 return std::make_shared<SupervisorTrap>(machInst, 0, EC_UNKNOWN);
936 case EL2:
937 return std::make_shared<HypervisorTrap>(machInst, 0, EC_UNKNOWN);
938 case EL3:
939 return std::make_shared<SecureMonitorTrap>(machInst, 0, EC_UNKNOWN);
940 default:
941 panic("Unrecognized Exception Level: %d\n", pstateEL);
942 break;
943 }
944
945 return NoFault;
946}
947
948static uint8_t
949getRestoredITBits(ThreadContext *tc, CPSR spsr)
950{
951 // See: shared/functions/system/RestoredITBits in the ARM ARM
952
953 const ExceptionLevel el = opModeToEL((OperatingMode) (uint8_t)spsr.mode);
954 const uint8_t it = itState(spsr);
955
956 if (!spsr.t || spsr.il)
957 return 0;
958
959 // The IT bits are forced to zero when they are set to a reserved
960 // value.
961 if (bits(it, 7, 4) != 0 && bits(it, 3, 0) == 0)
962 return 0;
963
964 const bool itd = el == EL2 ?
965 ((SCTLR)tc->readMiscReg(MISCREG_HSCTLR)).itd :
966 ((SCTLR)tc->readMiscReg(MISCREG_SCTLR)).itd;
967
968 // The IT bits are forced to zero when returning to A32 state, or
969 // when returning to an EL with the ITD bit set to 1, and the IT
970 // bits are describing a multi-instruction block.
971 if (itd && bits(it, 2, 0) != 0)
972 return 0;
973
974 return it;
975}
976
977static bool
978illegalExceptionReturn(ThreadContext *tc, CPSR cpsr, CPSR spsr)
979{
980 const OperatingMode mode = (OperatingMode) (uint8_t)spsr.mode;
981 if (unknownMode(mode))
982 return true;
983
984 const OperatingMode cur_mode = (OperatingMode) (uint8_t)cpsr.mode;
985 const ExceptionLevel target_el = opModeToEL(mode);
986
987 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
988 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3));
989
990 if (target_el > opModeToEL(cur_mode))
991 return true;
992
993 if (!ArmSystem::haveEL(tc, target_el))
994 return true;
995
996 if (target_el == EL1 && ArmSystem::haveEL(tc, EL2) && scr.ns && hcr.tge)
997 return true;
998
999 if (target_el == EL2 && ArmSystem::haveEL(tc, EL3) && !scr.ns)
1000 return true;
1001
1002 bool spsr_mode_is_aarch32 = (spsr.width == 1);
1003 bool known, target_el_is_aarch32;
1004 std::tie(known, target_el_is_aarch32) = ELUsingAArch32K(tc, target_el);
1005 assert(known || (target_el == EL0 && ELIs64(tc, EL1)));
1006
1007 if (known && (spsr_mode_is_aarch32 != target_el_is_aarch32))
1008 return true;
1009
1010 if (!spsr.width) {
1011 // aarch64
1012 if (!ArmSystem::highestELIs64(tc))
1013 return true;
1014 if (spsr & 0x2)
1015 return true;
1016 if (target_el == EL0 && spsr.sp)
1017 return true;
1018 } else {
1019 // aarch32
1020 return unknownMode32(mode);
1021 }
1022
1023 return false;
1024}
1025
1026CPSR
1027ArmStaticInst::getPSTATEFromPSR(ThreadContext *tc, CPSR cpsr, CPSR spsr) const
1028{
1029 CPSR new_cpsr = 0;
1030
1031 // gem5 doesn't implement single-stepping, so force the SS bit to
1032 // 0.
1033 new_cpsr.ss = 0;
1034
1035 if (illegalExceptionReturn(tc, cpsr, spsr)) {
1036 // If the SPSR specifies an illegal exception return,
1037 // then PSTATE.{M, nRW, EL, SP} are unchanged and PSTATE.IL
1038 // is set to 1.
1039 new_cpsr.il = 1;
1040 if (cpsr.width) {
1041 new_cpsr.mode = cpsr.mode;
1042 } else {
1043 new_cpsr.width = cpsr.width;
1044 new_cpsr.el = cpsr.el;
1045 new_cpsr.sp = cpsr.sp;
1046 }
1047 } else {
1048 new_cpsr.il = spsr.il;
1049 if (spsr.width && unknownMode32((OperatingMode)(uint8_t)spsr.mode)) {
1050 new_cpsr.il = 1;
1051 } else if (spsr.width) {
1052 new_cpsr.mode = spsr.mode;
1053 } else {
1054 new_cpsr.el = spsr.el;
1055 new_cpsr.sp = spsr.sp;
1056 }
1057 }
1058
1059 new_cpsr.nz = spsr.nz;
1060 new_cpsr.c = spsr.c;
1061 new_cpsr.v = spsr.v;
1062 if (new_cpsr.width) {
1063 // aarch32
1064 const ITSTATE it = getRestoredITBits(tc, spsr);
1065 new_cpsr.q = spsr.q;
1066 new_cpsr.ge = spsr.ge;
1067 new_cpsr.e = spsr.e;
1068 new_cpsr.aif = spsr.aif;
1069 new_cpsr.t = spsr.t;
1070 new_cpsr.it2 = it.top6;
1071 new_cpsr.it1 = it.bottom2;
1072 } else {
1073 // aarch64
1074 new_cpsr.daif = spsr.daif;
1075 }
1076
1077 return new_cpsr;
1078}
1079
1080bool
1081ArmStaticInst::generalExceptionsToAArch64(ThreadContext *tc,
1082 ExceptionLevel pstateEL) const
1083{
1084 // Returns TRUE if exceptions normally routed to EL1 are being handled
1085 // at an Exception level using AArch64, because either EL1 is using
1086 // AArch64 or TGE is in force and EL2 is using AArch64.
1087 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2));
1088 return (pstateEL == EL0 && !ELIs32(tc, EL1)) ||
1089 (ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) &&
1090 !ELIs32(tc, EL2) && hcr.tge);
1091}
1092
1093
1094}