1/* 2 * Copyright (c) 2010-2013, 2016, 2019 ARM Limited 3 * All rights reserved 4 * 5 * The license below extends only to copyright in the software and shall 6 * not be construed as granting a license to any other intellectual 7 * property including but not limited to intellectual property relating 8 * to a hardware implementation of the functionality of the software 9 * licensed hereunder. You may use the software subject to the license 10 * terms below provided that you ensure that this notice is replicated 11 * unmodified and in its entirety in all distributions of the software, 12 * modified or unmodified, in source code or in binary form. 13 * 14 * Copyright (c) 2001-2005 The Regents of The University of Michigan 15 * All rights reserved. 16 * 17 * Redistribution and use in source and binary forms, with or without 18 * modification, are permitted provided that the following conditions are 19 * met: redistributions of source code must retain the above copyright 20 * notice, this list of conditions and the following disclaimer; 21 * redistributions in binary form must reproduce the above copyright 22 * notice, this list of conditions and the following disclaimer in the 23 * documentation and/or other materials provided with the distribution; 24 * neither the name of the copyright holders nor the names of its 25 * contributors may be used to endorse or promote products derived from 26 * this software without specific prior written permission. 27 * 28 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39 * 40 * Authors: Ali Saidi 41 */ 42 43#ifndef __ARCH_ARM_TLB_HH__ 44#define __ARCH_ARM_TLB_HH__ 45 46 47#include "arch/arm/isa_traits.hh" 48#include "arch/arm/pagetable.hh" 49#include "arch/arm/utility.hh" 50#include "arch/arm/vtophys.hh" 51#include "arch/generic/tlb.hh" 52#include "base/statistics.hh" 53#include "mem/request.hh" 54#include "params/ArmTLB.hh" 55#include "sim/probe/pmu.hh" 56 57class ThreadContext; 58 59namespace ArmISA { 60 61class TableWalker; 62class Stage2LookUp; 63class Stage2MMU; 64class TLB; 65 66class TlbTestInterface 67{ 68 public: 69 TlbTestInterface() {} 70 virtual ~TlbTestInterface() {} 71 72 /** 73 * Check if a TLB translation should be forced to fail. 74 * 75 * @param req Request requiring a translation. 76 * @param is_priv Access from a privileged mode (i.e., not EL0) 77 * @param mode Access type 78 * @param domain Domain type 79 */ 80 virtual Fault translationCheck(const RequestPtr &req, bool is_priv, 81 BaseTLB::Mode mode, 82 TlbEntry::DomainType domain) = 0; 83 84 /** 85 * Check if a page table walker access should be forced to fail. 86 * 87 * @param pa Physical address the walker is accessing 88 * @param size Walker access size 89 * @param va Virtual address that initiated the walk 90 * @param is_secure Access from secure state 91 * @param is_priv Access from a privileged mode (i.e., not EL0) 92 * @param mode Access type 93 * @param domain Domain type 94 * @param lookup_level Page table walker level 95 */ 96 virtual Fault walkCheck(Addr pa, Addr size, Addr va, bool is_secure, 97 Addr is_priv, BaseTLB::Mode mode, 98 TlbEntry::DomainType domain, 99 LookupLevel lookup_level) = 0; 100}; 101 102class TLB : public BaseTLB 103{ 104 public: 105 enum ArmFlags { 106 AlignmentMask = 0x7, 107 108 AlignByte = 0x0, 109 AlignHalfWord = 0x1, 110 AlignWord = 0x2, 111 AlignDoubleWord = 0x3, 112 AlignQuadWord = 0x4, 113 AlignOctWord = 0x5, 114 115 AllowUnaligned = 0x8, 116 // Priv code operating as if it wasn't 117 UserMode = 0x10, 118 // Because zero otherwise looks like a valid setting and may be used 119 // accidentally, this bit must be non-zero to show it was used on 120 // purpose. 121 MustBeOne = 0x40 122 }; 123 124 enum ArmTranslationType { 125 NormalTran = 0, 126 S1CTran = 0x1, 127 HypMode = 0x2, 128 // Secure code operating as if it wasn't (required by some Address 129 // Translate operations) 130 S1S2NsTran = 0x4, 131 // Address translation instructions (eg AT S1E0R_Xt) need to be handled 132 // in special ways during translation because they could need to act 133 // like a different EL than the current EL. The following flags are 134 // for these instructions 135 S1E0Tran = 0x8, 136 S1E1Tran = 0x10, 137 S1E2Tran = 0x20, 138 S1E3Tran = 0x40, 139 S12E0Tran = 0x80, 140 S12E1Tran = 0x100 141 }; 142 143 /** 144 * Determine the EL to use for the purpose of a translation given 145 * a specific translation type. If the translation type doesn't 146 * specify an EL, we use the current EL. 147 */ 148 static ExceptionLevel tranTypeEL(CPSR cpsr, ArmTranslationType type); 149 150 protected: 151 TlbEntry* table; // the Page Table 152 int size; // TLB Size 153 bool isStage2; // Indicates this TLB is part of the second stage MMU 154 bool stage2Req; // Indicates whether a stage 2 lookup is also required 155 // Indicates whether a stage 2 lookup of the table descriptors is required. 156 // Certain address translation instructions will intercept the IPA but the 157 // table descriptors still need to be translated by the stage2. 158 bool stage2DescReq; 159 uint64_t _attr; // Memory attributes for last accessed TLB entry 160 bool directToStage2; // Indicates whether all translation requests should 161 // be routed directly to the stage 2 TLB 162 163 TableWalker *tableWalker; 164 TLB *stage2Tlb; 165 Stage2MMU *stage2Mmu; 166 167 TlbTestInterface *test; 168 169 // Access Stats 170 mutable Stats::Scalar instHits; 171 mutable Stats::Scalar instMisses; 172 mutable Stats::Scalar readHits; 173 mutable Stats::Scalar readMisses; 174 mutable Stats::Scalar writeHits; 175 mutable Stats::Scalar writeMisses; 176 mutable Stats::Scalar inserts; 177 mutable Stats::Scalar flushTlb; 178 mutable Stats::Scalar flushTlbMva; 179 mutable Stats::Scalar flushTlbMvaAsid; 180 mutable Stats::Scalar flushTlbAsid; 181 mutable Stats::Scalar flushedEntries; 182 mutable Stats::Scalar alignFaults; 183 mutable Stats::Scalar prefetchFaults; 184 mutable Stats::Scalar domainFaults; 185 mutable Stats::Scalar permsFaults; 186 187 Stats::Formula readAccesses; 188 Stats::Formula writeAccesses; 189 Stats::Formula instAccesses; 190 Stats::Formula hits; 191 Stats::Formula misses; 192 Stats::Formula accesses; 193 194 /** PMU probe for TLB refills */ 195 ProbePoints::PMUUPtr ppRefills; 196 197 int rangeMRU; //On lookup, only move entries ahead when outside rangeMRU 198 199 public: 200 TLB(const ArmTLBParams *p); 201 TLB(const Params *p, int _size, TableWalker *_walker); 202 203 /** Lookup an entry in the TLB 204 * @param vpn virtual address 205 * @param asn context id/address space id to use 206 * @param vmid The virtual machine ID used for stage 2 translation 207 * @param secure if the lookup is secure 208 * @param hyp if the lookup is done from hyp mode 209 * @param functional if the lookup should modify state 210 * @param ignore_asn if on lookup asn should be ignored 211 * @return pointer to TLB entry if it exists 212 */ 213 TlbEntry *lookup(Addr vpn, uint16_t asn, uint8_t vmid, bool hyp, 214 bool secure, bool functional, 215 bool ignore_asn, ExceptionLevel target_el); 216 217 virtual ~TLB(); 218 219 void takeOverFrom(BaseTLB *otlb) override; 220 221 /// setup all the back pointers 222 void init() override; 223 224 void setTestInterface(SimObject *ti); 225 226 TableWalker *getTableWalker() { return tableWalker; } 227 228 void setMMU(Stage2MMU *m, MasterID master_id); 229 230 int getsize() const { return size; } 231 232 void insert(Addr vaddr, TlbEntry &pte); 233 234 Fault getTE(TlbEntry **te, const RequestPtr &req, 235 ThreadContext *tc, Mode mode, 236 Translation *translation, bool timing, bool functional, 237 bool is_secure, ArmTranslationType tranType); 238 239 Fault getResultTe(TlbEntry **te, const RequestPtr &req, 240 ThreadContext *tc, Mode mode, 241 Translation *translation, bool timing, 242 bool functional, TlbEntry *mergeTe); 243 244 Fault checkPermissions(TlbEntry *te, const RequestPtr &req, Mode mode); 245 Fault checkPermissions64(TlbEntry *te, const RequestPtr &req, Mode mode, 246 ThreadContext *tc); 247 bool checkPAN(ThreadContext *tc, uint8_t ap, const RequestPtr &req, 248 Mode mode); 249 250 251 /** Reset the entire TLB 252 * @param secure_lookup if the operation affects the secure world 253 */ 254 void flushAllSecurity(bool secure_lookup, ExceptionLevel target_el, 255 bool ignore_el = false); 256 257 /** Remove all entries in the non secure world, depending on whether they 258 * were allocated in hyp mode or not 259 */ 260 void flushAllNs(ExceptionLevel target_el, bool ignore_el = false); 261 262 263 /** Reset the entire TLB. Used for CPU switching to prevent stale 264 * translations after multiple switches 265 */ 266 void flushAll() override 267 { 268 flushAllSecurity(false, EL0, true); 269 flushAllSecurity(true, EL0, true); 270 } 271 272 /** Remove any entries that match both a va and asn 273 * @param mva virtual address to flush 274 * @param asn contextid/asn to flush on match 275 * @param secure_lookup if the operation affects the secure world 276 */ 277 void flushMvaAsid(Addr mva, uint64_t asn, bool secure_lookup, 278 ExceptionLevel target_el); 279 280 /** Remove any entries that match the asn 281 * @param asn contextid/asn to flush on match 282 * @param secure_lookup if the operation affects the secure world 283 */ 284 void flushAsid(uint64_t asn, bool secure_lookup, 285 ExceptionLevel target_el); 286 287 /** Remove all entries that match the va regardless of asn 288 * @param mva address to flush from cache 289 * @param secure_lookup if the operation affects the secure world 290 */ 291 void flushMva(Addr mva, bool secure_lookup, ExceptionLevel target_el); 292 293 /** 294 * Invalidate all entries in the stage 2 TLB that match the given ipa 295 * and the current VMID 296 * @param ipa the address to invalidate 297 * @param secure_lookup if the operation affects the secure world 298 */ 299 void flushIpaVmid(Addr ipa, bool secure_lookup, ExceptionLevel target_el); 300 301 Fault trickBoxCheck(const RequestPtr &req, Mode mode, 302 TlbEntry::DomainType domain); 303 304 Fault walkTrickBoxCheck(Addr pa, bool is_secure, Addr va, Addr sz, 305 bool is_exec, bool is_write, 306 TlbEntry::DomainType domain, 307 LookupLevel lookup_level); 308 309 void printTlb() const; 310 311 void demapPage(Addr vaddr, uint64_t asn) override 312 { 313 // needed for x86 only 314 panic("demapPage() is not implemented.\n"); 315 } 316 317 /** 318 * Do a functional lookup on the TLB (for debugging) 319 * and don't modify any internal state 320 * @param tc thread context to get the context id from 321 * @param vaddr virtual address to translate 322 * @param pa returned physical address 323 * @return if the translation was successful 324 */ 325 bool translateFunctional(ThreadContext *tc, Addr vaddr, Addr &paddr); 326 327 /** 328 * Do a functional lookup on the TLB (for checker cpu) that 329 * behaves like a normal lookup without modifying any page table state. 330 */ 331 Fault translateFunctional(const RequestPtr &req, ThreadContext *tc, 332 Mode mode, ArmTranslationType tranType); 333 Fault 334 translateFunctional(const RequestPtr &req, 335 ThreadContext *tc, Mode mode) override 336 { 337 return translateFunctional(req, tc, mode, NormalTran); 338 } 339 340 /** Accessor functions for memory attributes for last accessed TLB entry 341 */ 342 void 343 setAttr(uint64_t attr) 344 { 345 _attr = attr; 346 } 347 348 uint64_t 349 getAttr() const 350 { 351 return _attr; 352 } 353 354 Fault translateFs(const RequestPtr &req, ThreadContext *tc, Mode mode, 355 Translation *translation, bool &delay, 356 bool timing, ArmTranslationType tranType, bool functional = false); 357 Fault translateSe(const RequestPtr &req, ThreadContext *tc, Mode mode, 358 Translation *translation, bool &delay, bool timing); 359 Fault translateAtomic(const RequestPtr &req, ThreadContext *tc, Mode mode, 360 ArmTranslationType tranType); 361 Fault 362 translateAtomic(const RequestPtr &req, 363 ThreadContext *tc, Mode mode) override 364 { 365 return translateAtomic(req, tc, mode, NormalTran); 366 } 367 void translateTiming( 368 const RequestPtr &req, ThreadContext *tc, 369 Translation *translation, Mode mode, 370 ArmTranslationType tranType); 371 void 372 translateTiming(const RequestPtr &req, ThreadContext *tc, 373 Translation *translation, Mode mode) override 374 { 375 translateTiming(req, tc, translation, mode, NormalTran); 376 } 377 Fault translateComplete(const RequestPtr &req, ThreadContext *tc, 378 Translation *translation, Mode mode, ArmTranslationType tranType, 379 bool callFromS2); 380 Fault finalizePhysical( 381 const RequestPtr &req, 382 ThreadContext *tc, Mode mode) const override; 383 384 void drainResume() override; 385 386 // Checkpointing 387 void serialize(CheckpointOut &cp) const override; 388 void unserialize(CheckpointIn &cp) override; 389 390 void regStats() override; 391 392 void regProbePoints() override; 393 394 /** 395 * Get the table walker port. This is used for migrating 396 * port connections during a CPU takeOverFrom() call. For 397 * architectures that do not have a table walker, NULL is 398 * returned, hence the use of a pointer rather than a 399 * reference. For ARM this method will always return a valid port 400 * pointer. 401 * 402 * @return A pointer to the walker master port 403 */ 404 Port *getTableWalkerPort() override; 405 406 // Caching misc register values here. 407 // Writing to misc registers needs to invalidate them. 408 // translateFunctional/translateSe/translateFs checks if they are 409 // invalid and call updateMiscReg if necessary. 410protected: 411 CPSR cpsr; 412 bool aarch64; 413 ExceptionLevel aarch64EL; 414 SCTLR sctlr; 415 SCR scr; 416 bool isPriv; 417 bool isSecure; 418 bool isHyp; 419 TTBCR ttbcr; 420 uint16_t asid; 421 uint8_t vmid; 422 PRRR prrr; 423 NMRR nmrr; 424 HCR hcr; 425 uint32_t dacr; 426 bool miscRegValid; 427 ContextID miscRegContext; 428 ArmTranslationType curTranType; 429 430 // Cached copies of system-level properties 431 bool haveLPAE; 432 bool haveVirtualization; 433 bool haveLargeAsid64; 434 435 AddrRange m5opRange; 436 437 void updateMiscReg(ThreadContext *tc, 438 ArmTranslationType tranType = NormalTran); 439 440public: 441 const Params * 442 params() const 443 { 444 return dynamic_cast<const Params *>(_params); 445 } 446 inline void invalidateMiscReg() { miscRegValid = false; } 447 448private: 449 /** Remove any entries that match both a va and asn 450 * @param mva virtual address to flush 451 * @param asn contextid/asn to flush on match 452 * @param secure_lookup if the operation affects the secure world 453 * @param ignore_asn if the flush should ignore the asn 454 */ 455 void _flushMva(Addr mva, uint64_t asn, bool secure_lookup, 456 bool ignore_asn, ExceptionLevel target_el); 457 458 public: /* Testing */ 459 Fault testTranslation(const RequestPtr &req, Mode mode, 460 TlbEntry::DomainType domain); 461 Fault testWalk(Addr pa, Addr size, Addr va, bool is_secure, Mode mode, 462 TlbEntry::DomainType domain, 463 LookupLevel lookup_level); 464}; 465 466template<typename T> 467TLB * 468getITBPtr(T *tc) 469{ 470 auto tlb = static_cast<TLB *>(tc->getITBPtr()); 471 assert(tlb); 472 return tlb; 473} 474 475template<typename T> 476TLB * 477getDTBPtr(T *tc) 478{ 479 auto tlb = static_cast<TLB *>(tc->getDTBPtr()); 480 assert(tlb); 481 return tlb; 482} 483 484} // namespace ArmISA 485 486#endif // __ARCH_ARM_TLB_HH__ 487