1/* 2 * Copyright (c) 2010-2014, 2016-2018 ARM Limited 3 * Copyright (c) 2013 Advanced Micro Devices, Inc. 4 * All rights reserved 5 * 6 * The license below extends only to copyright in the software and shall 7 * not be construed as granting a license to any other intellectual 8 * property including but not limited to intellectual property relating 9 * to a hardware implementation of the functionality of the software 10 * licensed hereunder. You may use the software subject to the license 11 * terms below provided that you ensure that this notice is replicated 12 * unmodified and in its entirety in all distributions of the software, 13 * modified or unmodified, in source code or in binary form. 14 * 15 * Copyright (c) 2007-2008 The Florida State University 16 * All rights reserved. 17 * 18 * Redistribution and use in source and binary forms, with or without 19 * modification, are permitted provided that the following conditions are 20 * met: redistributions of source code must retain the above copyright 21 * notice, this list of conditions and the following disclaimer; 22 * redistributions in binary form must reproduce the above copyright 23 * notice, this list of conditions and the following disclaimer in the 24 * documentation and/or other materials provided with the distribution; 25 * neither the name of the copyright holders nor the names of its 26 * contributors may be used to endorse or promote products derived from 27 * this software without specific prior written permission. 28 * 29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 40 * 41 * Authors: Stephen Hines 42 */ 43 44#include "arch/arm/insts/static_inst.hh" 45 46#include "arch/arm/faults.hh" 47#include "base/condcodes.hh" 48#include "base/cprintf.hh" 49#include "base/loader/symtab.hh" 50#include "cpu/reg_class.hh" 51 52namespace ArmISA 53{ 54// Shift Rm by an immediate value 55int32_t 56ArmStaticInst::shift_rm_imm(uint32_t base, uint32_t shamt, 57 uint32_t type, uint32_t cfval) const 58{ 59 assert(shamt < 32); 60 ArmShiftType shiftType; 61 shiftType = (ArmShiftType)type; 62 63 switch (shiftType) 64 { 65 case LSL: 66 return base << shamt; 67 case LSR: 68 if (shamt == 0) 69 return 0; 70 else 71 return base >> shamt; 72 case ASR: 73 if (shamt == 0) 74 return (base >> 31) | -((base & (1 << 31)) >> 31); 75 else 76 return (base >> shamt) | -((base & (1 << 31)) >> shamt); 77 case ROR: 78 if (shamt == 0) 79 return (cfval << 31) | (base >> 1); // RRX 80 else 81 return (base << (32 - shamt)) | (base >> shamt); 82 default: 83 ccprintf(std::cerr, "Unhandled shift type\n"); 84 exit(1); 85 break; 86 } 87 return 0; 88} 89 90int64_t 91ArmStaticInst::shiftReg64(uint64_t base, uint64_t shiftAmt, 92 ArmShiftType type, uint8_t width) const 93{ 94 shiftAmt = shiftAmt % width; 95 ArmShiftType shiftType; 96 shiftType = (ArmShiftType)type; 97 98 switch (shiftType) 99 { 100 case LSL: 101 return base << shiftAmt; 102 case LSR: 103 if (shiftAmt == 0) 104 return base; 105 else 106 return (base & mask(width)) >> shiftAmt; 107 case ASR: 108 if (shiftAmt == 0) { 109 return base; 110 } else { 111 int sign_bit = bits(base, intWidth - 1); 112 base >>= shiftAmt; 113 base = sign_bit ? (base | ~mask(intWidth - shiftAmt)) : base; 114 return base & mask(intWidth); 115 } 116 case ROR: 117 if (shiftAmt == 0) 118 return base; 119 else 120 return (base << (width - shiftAmt)) | (base >> shiftAmt); 121 default: 122 ccprintf(std::cerr, "Unhandled shift type\n"); 123 exit(1); 124 break; 125 } 126 return 0; 127} 128 129int64_t 130ArmStaticInst::extendReg64(uint64_t base, ArmExtendType type, 131 uint64_t shiftAmt, uint8_t width) const 132{ 133 bool sign_extend = false; 134 int len = 0; 135 switch (type) { 136 case UXTB: 137 len = 8; 138 break; 139 case UXTH: 140 len = 16; 141 break; 142 case UXTW: 143 len = 32; 144 break; 145 case UXTX: 146 len = 64; 147 break; 148 case SXTB: 149 len = 8; 150 sign_extend = true; 151 break; 152 case SXTH: 153 len = 16; 154 sign_extend = true; 155 break; 156 case SXTW: 157 len = 32; 158 sign_extend = true; 159 break; 160 case SXTX: 161 len = 64; 162 sign_extend = true; 163 break; 164 } 165 len = len <= width - shiftAmt ? len : width - shiftAmt; 166 uint64_t tmp = (uint64_t) bits(base, len - 1, 0) << shiftAmt; 167 if (sign_extend) { 168 int sign_bit = bits(tmp, len + shiftAmt - 1); 169 tmp = sign_bit ? (tmp | ~mask(len + shiftAmt)) : tmp; 170 } 171 return tmp & mask(width); 172} 173 174// Shift Rm by Rs 175int32_t 176ArmStaticInst::shift_rm_rs(uint32_t base, uint32_t shamt, 177 uint32_t type, uint32_t cfval) const 178{ 179 enum ArmShiftType shiftType; 180 shiftType = (enum ArmShiftType) type; 181 182 switch (shiftType) 183 { 184 case LSL: 185 if (shamt >= 32) 186 return 0; 187 else 188 return base << shamt; 189 case LSR: 190 if (shamt >= 32) 191 return 0; 192 else 193 return base >> shamt; 194 case ASR: 195 if (shamt >= 32) 196 return (base >> 31) | -((base & (1 << 31)) >> 31); 197 else 198 return (base >> shamt) | -((base & (1 << 31)) >> shamt); 199 case ROR: 200 shamt = shamt & 0x1f; 201 if (shamt == 0) 202 return base; 203 else 204 return (base << (32 - shamt)) | (base >> shamt); 205 default: 206 ccprintf(std::cerr, "Unhandled shift type\n"); 207 exit(1); 208 break; 209 } 210 return 0; 211} 212 213 214// Generate C for a shift by immediate 215bool 216ArmStaticInst::shift_carry_imm(uint32_t base, uint32_t shamt, 217 uint32_t type, uint32_t cfval) const 218{ 219 enum ArmShiftType shiftType; 220 shiftType = (enum ArmShiftType) type; 221 222 switch (shiftType) 223 { 224 case LSL: 225 if (shamt == 0) 226 return cfval; 227 else 228 return (base >> (32 - shamt)) & 1; 229 case LSR: 230 if (shamt == 0) 231 return (base >> 31); 232 else 233 return (base >> (shamt - 1)) & 1; 234 case ASR: 235 if (shamt == 0) 236 return (base >> 31); 237 else 238 return (base >> (shamt - 1)) & 1; 239 case ROR: 240 shamt = shamt & 0x1f; 241 if (shamt == 0) 242 return (base & 1); // RRX 243 else 244 return (base >> (shamt - 1)) & 1; 245 default: 246 ccprintf(std::cerr, "Unhandled shift type\n"); 247 exit(1); 248 break; 249 } 250 return 0; 251} 252 253 254// Generate C for a shift by Rs 255bool 256ArmStaticInst::shift_carry_rs(uint32_t base, uint32_t shamt, 257 uint32_t type, uint32_t cfval) const 258{ 259 enum ArmShiftType shiftType; 260 shiftType = (enum ArmShiftType) type; 261 262 if (shamt == 0) 263 return cfval; 264 265 switch (shiftType) 266 { 267 case LSL: 268 if (shamt > 32) 269 return 0; 270 else 271 return (base >> (32 - shamt)) & 1; 272 case LSR: 273 if (shamt > 32) 274 return 0; 275 else 276 return (base >> (shamt - 1)) & 1; 277 case ASR: 278 if (shamt > 32) 279 shamt = 32; 280 return (base >> (shamt - 1)) & 1; 281 case ROR: 282 shamt = shamt & 0x1f; 283 if (shamt == 0) 284 shamt = 32; 285 return (base >> (shamt - 1)) & 1; 286 default: 287 ccprintf(std::cerr, "Unhandled shift type\n"); 288 exit(1); 289 break; 290 } 291 return 0; 292} 293 294void 295ArmStaticInst::printIntReg(std::ostream &os, RegIndex reg_idx) const 296{ 297 if (aarch64) { 298 if (reg_idx == INTREG_UREG0) 299 ccprintf(os, "ureg0"); 300 else if (reg_idx == INTREG_SPX) 301 ccprintf(os, "%s%s", (intWidth == 32) ? "w" : "", "sp"); 302 else if (reg_idx == INTREG_X31) 303 ccprintf(os, "%szr", (intWidth == 32) ? "w" : "x"); 304 else 305 ccprintf(os, "%s%d", (intWidth == 32) ? "w" : "x", reg_idx); 306 } else { 307 switch (reg_idx) { 308 case PCReg: 309 ccprintf(os, "pc"); 310 break; 311 case StackPointerReg: 312 ccprintf(os, "sp"); 313 break; 314 case FramePointerReg: 315 ccprintf(os, "fp"); 316 break; 317 case ReturnAddressReg: 318 ccprintf(os, "lr"); 319 break; 320 default: 321 ccprintf(os, "r%d", reg_idx); 322 break; 323 } 324 } 325} 326
|
337void 338ArmStaticInst::printFloatReg(std::ostream &os, RegIndex reg_idx) const 339{ 340 ccprintf(os, "f%d", reg_idx); 341} 342 343void 344ArmStaticInst::printVecReg(std::ostream &os, RegIndex reg_idx) const 345{ 346 ccprintf(os, "v%d", reg_idx); 347} 348 349void 350ArmStaticInst::printCCReg(std::ostream &os, RegIndex reg_idx) const 351{ 352 ccprintf(os, "cc_%s", ArmISA::ccRegName[reg_idx]); 353} 354 355void 356ArmStaticInst::printMiscReg(std::ostream &os, RegIndex reg_idx) const 357{ 358 assert(reg_idx < NUM_MISCREGS); 359 ccprintf(os, "%s", ArmISA::miscRegName[reg_idx]); 360} 361 362void 363ArmStaticInst::printMnemonic(std::ostream &os, 364 const std::string &suffix, 365 bool withPred, 366 bool withCond64, 367 ConditionCode cond64) const 368{ 369 os << " " << mnemonic; 370 if (withPred && !aarch64) { 371 printCondition(os, machInst.condCode); 372 os << suffix; 373 } else if (withCond64) { 374 os << "."; 375 printCondition(os, cond64); 376 os << suffix; 377 } 378 if (machInst.bigThumb) 379 os << ".w"; 380 os << " "; 381} 382 383void 384ArmStaticInst::printTarget(std::ostream &os, Addr target, 385 const SymbolTable *symtab) const 386{ 387 Addr symbolAddr; 388 std::string symbol; 389 390 if (symtab && symtab->findNearestSymbol(target, symbol, symbolAddr)) { 391 ccprintf(os, "<%s", symbol); 392 if (symbolAddr != target) 393 ccprintf(os, "+%d>", target - symbolAddr); 394 else 395 ccprintf(os, ">"); 396 } else { 397 ccprintf(os, "%#x", target); 398 } 399} 400 401void 402ArmStaticInst::printCondition(std::ostream &os, 403 unsigned code, 404 bool noImplicit) const 405{ 406 switch (code) { 407 case COND_EQ: 408 os << "eq"; 409 break; 410 case COND_NE: 411 os << "ne"; 412 break; 413 case COND_CS: 414 os << "cs"; 415 break; 416 case COND_CC: 417 os << "cc"; 418 break; 419 case COND_MI: 420 os << "mi"; 421 break; 422 case COND_PL: 423 os << "pl"; 424 break; 425 case COND_VS: 426 os << "vs"; 427 break; 428 case COND_VC: 429 os << "vc"; 430 break; 431 case COND_HI: 432 os << "hi"; 433 break; 434 case COND_LS: 435 os << "ls"; 436 break; 437 case COND_GE: 438 os << "ge"; 439 break; 440 case COND_LT: 441 os << "lt"; 442 break; 443 case COND_GT: 444 os << "gt"; 445 break; 446 case COND_LE: 447 os << "le"; 448 break; 449 case COND_AL: 450 // This one is implicit. 451 if (noImplicit) 452 os << "al"; 453 break; 454 case COND_UC: 455 // Unconditional. 456 if (noImplicit) 457 os << "uc"; 458 break; 459 default: 460 panic("Unrecognized condition code %d.\n", code); 461 } 462} 463 464void 465ArmStaticInst::printMemSymbol(std::ostream &os, 466 const SymbolTable *symtab, 467 const std::string &prefix, 468 const Addr addr, 469 const std::string &suffix) const 470{ 471 Addr symbolAddr; 472 std::string symbol; 473 if (symtab && symtab->findNearestSymbol(addr, symbol, symbolAddr)) { 474 ccprintf(os, "%s%s", prefix, symbol); 475 if (symbolAddr != addr) 476 ccprintf(os, "+%d", addr - symbolAddr); 477 ccprintf(os, suffix); 478 } 479} 480 481void 482ArmStaticInst::printShiftOperand(std::ostream &os, 483 IntRegIndex rm, 484 bool immShift, 485 uint32_t shiftAmt, 486 IntRegIndex rs, 487 ArmShiftType type) const 488{ 489 bool firstOp = false; 490 491 if (rm != INTREG_ZERO) { 492 printIntReg(os, rm); 493 } 494 495 bool done = false; 496 497 if ((type == LSR || type == ASR) && immShift && shiftAmt == 0) 498 shiftAmt = 32; 499 500 switch (type) { 501 case LSL: 502 if (immShift && shiftAmt == 0) { 503 done = true; 504 break; 505 } 506 if (!firstOp) 507 os << ", "; 508 os << "LSL"; 509 break; 510 case LSR: 511 if (!firstOp) 512 os << ", "; 513 os << "LSR"; 514 break; 515 case ASR: 516 if (!firstOp) 517 os << ", "; 518 os << "ASR"; 519 break; 520 case ROR: 521 if (immShift && shiftAmt == 0) { 522 if (!firstOp) 523 os << ", "; 524 os << "RRX"; 525 done = true; 526 break; 527 } 528 if (!firstOp) 529 os << ", "; 530 os << "ROR"; 531 break; 532 default: 533 panic("Tried to disassemble unrecognized shift type.\n"); 534 } 535 if (!done) { 536 if (!firstOp) 537 os << " "; 538 if (immShift) 539 os << "#" << shiftAmt; 540 else 541 printIntReg(os, rs); 542 } 543} 544 545void 546ArmStaticInst::printExtendOperand(bool firstOperand, std::ostream &os, 547 IntRegIndex rm, ArmExtendType type, 548 int64_t shiftAmt) const 549{ 550 if (!firstOperand) 551 ccprintf(os, ", "); 552 printIntReg(os, rm); 553 if (type == UXTX && shiftAmt == 0) 554 return; 555 switch (type) { 556 case UXTB: ccprintf(os, ", UXTB"); 557 break; 558 case UXTH: ccprintf(os, ", UXTH"); 559 break; 560 case UXTW: ccprintf(os, ", UXTW"); 561 break; 562 case UXTX: ccprintf(os, ", LSL"); 563 break; 564 case SXTB: ccprintf(os, ", SXTB"); 565 break; 566 case SXTH: ccprintf(os, ", SXTH"); 567 break; 568 case SXTW: ccprintf(os, ", SXTW"); 569 break; 570 case SXTX: ccprintf(os, ", SXTW"); 571 break; 572 } 573 if (type == UXTX || shiftAmt) 574 ccprintf(os, " #%d", shiftAmt); 575} 576 577void 578ArmStaticInst::printDataInst(std::ostream &os, bool withImm, 579 bool immShift, bool s, IntRegIndex rd, IntRegIndex rn, 580 IntRegIndex rm, IntRegIndex rs, uint32_t shiftAmt, 581 ArmShiftType type, uint64_t imm) const 582{ 583 printMnemonic(os, s ? "s" : ""); 584 bool firstOp = true; 585 586 // Destination 587 if (rd != INTREG_ZERO) { 588 firstOp = false; 589 printIntReg(os, rd); 590 } 591 592 // Source 1. 593 if (rn != INTREG_ZERO) { 594 if (!firstOp) 595 os << ", "; 596 firstOp = false; 597 printIntReg(os, rn); 598 } 599 600 if (!firstOp) 601 os << ", "; 602 if (withImm) { 603 ccprintf(os, "#%ld", imm); 604 } else { 605 printShiftOperand(os, rm, immShift, shiftAmt, rs, type); 606 } 607} 608 609std::string 610ArmStaticInst::generateDisassembly(Addr pc, 611 const SymbolTable *symtab) const 612{ 613 std::stringstream ss; 614 printMnemonic(ss); 615 return ss.str(); 616} 617 618Fault 619ArmStaticInst::softwareBreakpoint32(ExecContext *xc, uint16_t imm) const 620{ 621 const auto tc = xc->tcBase(); 622 const HCR hcr = tc->readMiscReg(MISCREG_HCR_EL2); 623 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2); 624 if ((ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) && 625 !ELIs32(tc, EL2) && (hcr.tge == 1 || mdcr.tde == 1)) || 626 !ELIs32(tc, EL1)) { 627 // Route to AArch64 Software Breakpoint 628 return std::make_shared<SoftwareBreakpoint>(machInst, imm); 629 } else { 630 // Execute AArch32 Software Breakpoint 631 return std::make_shared<PrefetchAbort>(readPC(xc), 632 ArmFault::DebugEvent); 633 } 634} 635 636Fault 637ArmStaticInst::advSIMDFPAccessTrap64(ExceptionLevel el) const 638{ 639 switch (el) { 640 case EL1: 641 return std::make_shared<SupervisorTrap>(machInst, 0x1E00000, 642 EC_TRAPPED_SIMD_FP); 643 case EL2: 644 return std::make_shared<HypervisorTrap>(machInst, 0x1E00000, 645 EC_TRAPPED_SIMD_FP); 646 case EL3: 647 return std::make_shared<SecureMonitorTrap>(machInst, 0x1E00000, 648 EC_TRAPPED_SIMD_FP); 649 650 default: 651 panic("Illegal EL in advSIMDFPAccessTrap64\n"); 652 } 653} 654 655 656Fault 657ArmStaticInst::checkFPAdvSIMDTrap64(ThreadContext *tc, CPSR cpsr) const 658{ 659 if (ArmSystem::haveVirtualization(tc) && !inSecureState(tc)) { 660 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL2); 661 if (cptrEnCheck.tfp) 662 return advSIMDFPAccessTrap64(EL2); 663 } 664 665 if (ArmSystem::haveSecurity(tc)) { 666 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3); 667 if (cptrEnCheck.tfp) 668 return advSIMDFPAccessTrap64(EL3); 669 } 670 671 return NoFault; 672} 673 674Fault 675ArmStaticInst::checkFPAdvSIMDEnabled64(ThreadContext *tc, 676 CPSR cpsr, CPACR cpacr) const 677{ 678 const ExceptionLevel el = (ExceptionLevel) (uint8_t)cpsr.el; 679 if ((el == EL0 && cpacr.fpen != 0x3) || 680 (el == EL1 && !(cpacr.fpen & 0x1))) 681 return advSIMDFPAccessTrap64(EL1); 682 683 return checkFPAdvSIMDTrap64(tc, cpsr); 684} 685 686Fault 687ArmStaticInst::checkAdvSIMDOrFPEnabled32(ThreadContext *tc, 688 CPSR cpsr, CPACR cpacr, 689 NSACR nsacr, FPEXC fpexc, 690 bool fpexc_check, bool advsimd) const 691{ 692 const bool have_virtualization = ArmSystem::haveVirtualization(tc); 693 const bool have_security = ArmSystem::haveSecurity(tc); 694 const bool is_secure = inSecureState(tc); 695 const ExceptionLevel cur_el = opModeToEL(currOpMode(tc)); 696 697 if (cur_el == EL0 && ELIs64(tc, EL1)) 698 return checkFPAdvSIMDEnabled64(tc, cpsr, cpacr); 699 700 uint8_t cpacr_cp10 = cpacr.cp10; 701 bool cpacr_asedis = cpacr.asedis; 702 703 if (have_security && !ELIs64(tc, EL3) && !is_secure) { 704 if (nsacr.nsasedis) 705 cpacr_asedis = true; 706 if (nsacr.cp10 == 0) 707 cpacr_cp10 = 0; 708 } 709 710 if (cur_el != EL2) { 711 if (advsimd && cpacr_asedis) 712 return disabledFault(); 713 714 if ((cur_el == EL0 && cpacr_cp10 != 0x3) || 715 (cur_el != EL0 && !(cpacr_cp10 & 0x1))) 716 return disabledFault(); 717 } 718 719 if (fpexc_check && !fpexc.en) 720 return disabledFault(); 721 722 // -- aarch32/exceptions/traps/AArch32.CheckFPAdvSIMDTrap -- 723 724 if (have_virtualization && !is_secure && ELIs64(tc, EL2)) 725 return checkFPAdvSIMDTrap64(tc, cpsr); 726 727 if (have_virtualization && !is_secure) { 728 HCPTR hcptr = tc->readMiscReg(MISCREG_HCPTR); 729 bool hcptr_cp10 = hcptr.tcp10; 730 bool hcptr_tase = hcptr.tase; 731 732 if (have_security && !ELIs64(tc, EL3) && !is_secure) { 733 if (nsacr.nsasedis) 734 hcptr_tase = true; 735 if (nsacr.cp10) 736 hcptr_cp10 = true; 737 } 738 739 if ((advsimd && hcptr_tase) || hcptr_cp10) { 740 const uint32_t iss = advsimd ? (1 << 5) : 0xA; 741 if (cur_el == EL2) { 742 return std::make_shared<UndefinedInstruction>( 743 machInst, iss, 744 EC_TRAPPED_HCPTR, mnemonic); 745 } else { 746 return std::make_shared<HypervisorTrap>( 747 machInst, iss, 748 EC_TRAPPED_HCPTR); 749 } 750 751 } 752 } 753 754 if (have_security && ELIs64(tc, EL3)) { 755 HCPTR cptrEnCheck = tc->readMiscReg(MISCREG_CPTR_EL3); 756 if (cptrEnCheck.tfp) 757 return advSIMDFPAccessTrap64(EL3); 758 } 759 760 return NoFault; 761} 762 763inline bool 764ArmStaticInst::isWFxTrapping(ThreadContext *tc, 765 ExceptionLevel tgtEl, 766 bool isWfe) const 767{ 768 bool trap = false; 769 SCTLR sctlr = ((SCTLR)tc->readMiscReg(MISCREG_SCTLR_EL1)); 770 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2)); 771 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3)); 772 773 switch (tgtEl) { 774 case EL1: 775 trap = isWfe? !sctlr.ntwe : !sctlr.ntwi; 776 break; 777 case EL2: 778 trap = isWfe? hcr.twe : hcr.twi; 779 break; 780 case EL3: 781 trap = isWfe? scr.twe : scr.twi; 782 break; 783 default: 784 break; 785 } 786 787 return trap; 788} 789 790Fault 791ArmStaticInst::checkForWFxTrap32(ThreadContext *tc, 792 ExceptionLevel targetEL, 793 bool isWfe) const 794{ 795 // Check if target exception level is implemented. 796 assert(ArmSystem::haveEL(tc, targetEL)); 797 798 // Check for routing to AArch64: this happens if the 799 // target exception level (where the trap will be handled) 800 // is using aarch64 801 if (ELIs64(tc, targetEL)) { 802 return checkForWFxTrap64(tc, targetEL, isWfe); 803 } 804 805 // Check if processor needs to trap at selected exception level 806 bool trap = isWFxTrapping(tc, targetEL, isWfe); 807 808 if (trap) { 809 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */ 810 0x1E00000; /* WFI Instruction syndrome */ 811 switch (targetEL) { 812 case EL1: 813 return std::make_shared<UndefinedInstruction>( 814 machInst, iss, 815 EC_TRAPPED_WFI_WFE, mnemonic); 816 case EL2: 817 return std::make_shared<HypervisorTrap>(machInst, iss, 818 EC_TRAPPED_WFI_WFE); 819 case EL3: 820 return std::make_shared<SecureMonitorTrap>(machInst, iss, 821 EC_TRAPPED_WFI_WFE); 822 default: 823 panic("Unrecognized Exception Level: %d\n", targetEL); 824 } 825 } 826 827 return NoFault; 828} 829 830Fault 831ArmStaticInst::checkForWFxTrap64(ThreadContext *tc, 832 ExceptionLevel targetEL, 833 bool isWfe) const 834{ 835 // Check if target exception level is implemented. 836 assert(ArmSystem::haveEL(tc, targetEL)); 837 838 // Check if processor needs to trap at selected exception level 839 bool trap = isWFxTrapping(tc, targetEL, isWfe); 840 841 if (trap) { 842 uint32_t iss = isWfe? 0x1E00001 : /* WFE Instruction syndrome */ 843 0x1E00000; /* WFI Instruction syndrome */ 844 switch (targetEL) { 845 case EL1: 846 return std::make_shared<SupervisorTrap>(machInst, iss, 847 EC_TRAPPED_WFI_WFE); 848 case EL2: 849 return std::make_shared<HypervisorTrap>(machInst, iss, 850 EC_TRAPPED_WFI_WFE); 851 case EL3: 852 return std::make_shared<SecureMonitorTrap>(machInst, iss, 853 EC_TRAPPED_WFI_WFE); 854 default: 855 panic("Unrecognized Exception Level: %d\n", targetEL); 856 } 857 } 858 859 return NoFault; 860} 861 862Fault 863ArmStaticInst::trapWFx(ThreadContext *tc, 864 CPSR cpsr, SCR scr, 865 bool isWfe) const 866{ 867 Fault fault = NoFault; 868 if (cpsr.el == EL0) { 869 fault = checkForWFxTrap32(tc, EL1, isWfe); 870 } 871 872 if ((fault == NoFault) && 873 ArmSystem::haveEL(tc, EL2) && !inSecureState(scr, cpsr) && 874 ((cpsr.el == EL0) || (cpsr.el == EL1))) { 875 876 fault = checkForWFxTrap32(tc, EL2, isWfe); 877 } 878 879 if ((fault == NoFault) && 880 ArmSystem::haveEL(tc, EL3) && cpsr.el != EL3) { 881 fault = checkForWFxTrap32(tc, EL3, isWfe); 882 } 883 884 return fault; 885} 886 887Fault 888ArmStaticInst::checkSETENDEnabled(ThreadContext *tc, CPSR cpsr) const 889{ 890 bool setend_disabled(false); 891 ExceptionLevel pstateEL = (ExceptionLevel)(uint8_t)(cpsr.el); 892 893 if (pstateEL == EL2) { 894 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(MISCREG_HSCTLR)).sed; 895 } else { 896 // Please note: in the armarm pseudocode there is a distinction 897 // whether EL1 is aarch32 or aarch64: 898 // if ELUsingAArch32(EL1) then SCTLR.SED else SCTLR[].SED; 899 // Considering that SETEND is aarch32 only, ELUsingAArch32(EL1) 900 // will always be true (hence using SCTLR.SED) except for 901 // instruction executed at EL0, and with an AArch64 EL1. 902 // In this case SCTLR_EL1 will be used. In gem5 the register is 903 // mapped to SCTLR_ns. We can safely use SCTLR and choose the 904 // appropriate bank version. 905 906 // Get the index of the banked version of SCTLR: 907 // SCTLR_s or SCTLR_ns. 908 auto banked_sctlr = snsBankedIndex( 909 MISCREG_SCTLR, tc, !inSecureState(tc)); 910 911 // SCTLR.SED bit is enabling/disabling the ue of SETEND instruction. 912 setend_disabled = ((SCTLR)tc->readMiscRegNoEffect(banked_sctlr)).sed; 913 } 914 915 return setend_disabled ? undefinedFault32(tc, pstateEL) : 916 NoFault; 917} 918 919Fault 920ArmStaticInst::undefinedFault32(ThreadContext *tc, 921 ExceptionLevel pstateEL) const 922{ 923 // Even if we are running in aarch32, the fault might be dealt with in 924 // aarch64 ISA. 925 if (generalExceptionsToAArch64(tc, pstateEL)) { 926 return undefinedFault64(tc, pstateEL); 927 } else { 928 // Please note: according to the ARM ARM pseudocode we should handle 929 // the case when EL2 is aarch64 and HCR.TGE is 1 as well. 930 // However this case is already handled by the routeToHyp method in 931 // ArmFault class. 932 return std::make_shared<UndefinedInstruction>( 933 machInst, 0, 934 EC_UNKNOWN, mnemonic); 935 } 936} 937 938Fault 939ArmStaticInst::undefinedFault64(ThreadContext *tc, 940 ExceptionLevel pstateEL) const 941{ 942 switch (pstateEL) { 943 case EL0: 944 case EL1: 945 return std::make_shared<SupervisorTrap>(machInst, 0, EC_UNKNOWN); 946 case EL2: 947 return std::make_shared<HypervisorTrap>(machInst, 0, EC_UNKNOWN); 948 case EL3: 949 return std::make_shared<SecureMonitorTrap>(machInst, 0, EC_UNKNOWN); 950 default: 951 panic("Unrecognized Exception Level: %d\n", pstateEL); 952 break; 953 } 954 955 return NoFault; 956} 957 958static uint8_t 959getRestoredITBits(ThreadContext *tc, CPSR spsr) 960{ 961 // See: shared/functions/system/RestoredITBits in the ARM ARM 962 963 const ExceptionLevel el = opModeToEL((OperatingMode) (uint8_t)spsr.mode); 964 const uint8_t it = itState(spsr); 965 966 if (!spsr.t || spsr.il) 967 return 0; 968 969 // The IT bits are forced to zero when they are set to a reserved 970 // value. 971 if (bits(it, 7, 4) != 0 && bits(it, 3, 0) == 0) 972 return 0; 973 974 const bool itd = el == EL2 ? 975 ((SCTLR)tc->readMiscReg(MISCREG_HSCTLR)).itd : 976 ((SCTLR)tc->readMiscReg(MISCREG_SCTLR)).itd; 977 978 // The IT bits are forced to zero when returning to A32 state, or 979 // when returning to an EL with the ITD bit set to 1, and the IT 980 // bits are describing a multi-instruction block. 981 if (itd && bits(it, 2, 0) != 0) 982 return 0; 983 984 return it; 985} 986 987static bool 988illegalExceptionReturn(ThreadContext *tc, CPSR cpsr, CPSR spsr) 989{ 990 const OperatingMode mode = (OperatingMode) (uint8_t)spsr.mode; 991 if (unknownMode(mode)) 992 return true; 993 994 const OperatingMode cur_mode = (OperatingMode) (uint8_t)cpsr.mode; 995 const ExceptionLevel target_el = opModeToEL(mode); 996 997 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2)); 998 SCR scr = ((SCR)tc->readMiscReg(MISCREG_SCR_EL3)); 999 1000 if (target_el > opModeToEL(cur_mode)) 1001 return true; 1002 1003 if (!ArmSystem::haveEL(tc, target_el)) 1004 return true; 1005 1006 if (target_el == EL1 && ArmSystem::haveEL(tc, EL2) && scr.ns && hcr.tge) 1007 return true; 1008 1009 if (target_el == EL2 && ArmSystem::haveEL(tc, EL3) && !scr.ns) 1010 return true; 1011 1012 bool spsr_mode_is_aarch32 = (spsr.width == 1); 1013 bool known, target_el_is_aarch32; 1014 std::tie(known, target_el_is_aarch32) = ELUsingAArch32K(tc, target_el); 1015 assert(known || (target_el == EL0 && ELIs64(tc, EL1))); 1016 1017 if (known && (spsr_mode_is_aarch32 != target_el_is_aarch32)) 1018 return true; 1019 1020 if (!spsr.width) { 1021 // aarch64 1022 if (!ArmSystem::highestELIs64(tc)) 1023 return true; 1024 if (spsr & 0x2) 1025 return true; 1026 if (target_el == EL0 && spsr.sp) 1027 return true; 1028 } else { 1029 // aarch32 1030 return unknownMode32(mode); 1031 } 1032 1033 return false; 1034} 1035 1036CPSR 1037ArmStaticInst::getPSTATEFromPSR(ThreadContext *tc, CPSR cpsr, CPSR spsr) const 1038{ 1039 CPSR new_cpsr = 0; 1040 1041 // gem5 doesn't implement single-stepping, so force the SS bit to 1042 // 0. 1043 new_cpsr.ss = 0; 1044 1045 if (illegalExceptionReturn(tc, cpsr, spsr)) { 1046 // If the SPSR specifies an illegal exception return, 1047 // then PSTATE.{M, nRW, EL, SP} are unchanged and PSTATE.IL 1048 // is set to 1. 1049 new_cpsr.il = 1; 1050 if (cpsr.width) { 1051 new_cpsr.mode = cpsr.mode; 1052 } else { 1053 new_cpsr.width = cpsr.width; 1054 new_cpsr.el = cpsr.el; 1055 new_cpsr.sp = cpsr.sp; 1056 } 1057 } else { 1058 new_cpsr.il = spsr.il; 1059 if (spsr.width && unknownMode32((OperatingMode)(uint8_t)spsr.mode)) { 1060 new_cpsr.il = 1; 1061 } else if (spsr.width) { 1062 new_cpsr.mode = spsr.mode; 1063 } else { 1064 new_cpsr.el = spsr.el; 1065 new_cpsr.sp = spsr.sp; 1066 } 1067 } 1068 1069 new_cpsr.nz = spsr.nz; 1070 new_cpsr.c = spsr.c; 1071 new_cpsr.v = spsr.v; 1072 if (new_cpsr.width) { 1073 // aarch32 1074 const ITSTATE it = getRestoredITBits(tc, spsr); 1075 new_cpsr.q = spsr.q; 1076 new_cpsr.ge = spsr.ge; 1077 new_cpsr.e = spsr.e; 1078 new_cpsr.aif = spsr.aif; 1079 new_cpsr.t = spsr.t; 1080 new_cpsr.it2 = it.top6; 1081 new_cpsr.it1 = it.bottom2; 1082 } else { 1083 // aarch64 1084 new_cpsr.daif = spsr.daif; 1085 } 1086 1087 return new_cpsr; 1088} 1089 1090bool 1091ArmStaticInst::generalExceptionsToAArch64(ThreadContext *tc, 1092 ExceptionLevel pstateEL) const 1093{ 1094 // Returns TRUE if exceptions normally routed to EL1 are being handled 1095 // at an Exception level using AArch64, because either EL1 is using 1096 // AArch64 or TGE is in force and EL2 is using AArch64. 1097 HCR hcr = ((HCR)tc->readMiscReg(MISCREG_HCR_EL2)); 1098 return (pstateEL == EL0 && !ELIs32(tc, EL1)) || 1099 (ArmSystem::haveEL(tc, EL2) && !inSecureState(tc) && 1100 !ELIs32(tc, EL2) && hcr.tge); 1101} 1102 1103 1104}
|