lsq_unit_impl.hh (10824:308771bd2647) lsq_unit_impl.hh (11097:da477ae38907)
1
2/*
3 * Copyright (c) 2010-2014 ARM Limited
4 * Copyright (c) 2013 Advanced Micro Devices, Inc.
5 * All rights reserved
6 *
7 * The license below extends only to copyright in the software and shall
8 * not be construed as granting a license to any other intellectual

--- 439 unchanged lines hidden (view full) ---

448 TheISA::handleLockedSnoop(tc, pkt, cacheBlockMask);
449 cpu->thread[x]->noSquashFromTC = no_squash;
450 }
451
452 Addr invalidate_addr = pkt->getAddr() & cacheBlockMask;
453
454 DynInstPtr ld_inst = loadQueue[load_idx];
455 if (ld_inst) {
1
2/*
3 * Copyright (c) 2010-2014 ARM Limited
4 * Copyright (c) 2013 Advanced Micro Devices, Inc.
5 * All rights reserved
6 *
7 * The license below extends only to copyright in the software and shall
8 * not be construed as granting a license to any other intellectual

--- 439 unchanged lines hidden (view full) ---

448 TheISA::handleLockedSnoop(tc, pkt, cacheBlockMask);
449 cpu->thread[x]->noSquashFromTC = no_squash;
450 }
451
452 Addr invalidate_addr = pkt->getAddr() & cacheBlockMask;
453
454 DynInstPtr ld_inst = loadQueue[load_idx];
455 if (ld_inst) {
456 Addr load_addr = ld_inst->physEffAddr & cacheBlockMask;
456 Addr load_addr_low = ld_inst->physEffAddrLow & cacheBlockMask;
457 Addr load_addr_high = ld_inst->physEffAddrHigh & cacheBlockMask;
458
457 // Check that this snoop didn't just invalidate our lock flag
459 // Check that this snoop didn't just invalidate our lock flag
458 if (ld_inst->effAddrValid() && load_addr == invalidate_addr &&
459 ld_inst->memReqFlags & Request::LLSC)
460 if (ld_inst->effAddrValid() && (load_addr_low == invalidate_addr
461 || load_addr_high == invalidate_addr)
462 && ld_inst->memReqFlags & Request::LLSC)
460 TheISA::handleLockedSnoopHit(ld_inst.get());
461 }
462
463 // If this is the only load in the LSQ we don't care
464 if (load_idx == loadTail)
465 return;
466
467 incrLdIdx(load_idx);
468
469 bool force_squash = false;
470
471 while (load_idx != loadTail) {
472 DynInstPtr ld_inst = loadQueue[load_idx];
473
474 if (!ld_inst->effAddrValid() || ld_inst->strictlyOrdered()) {
475 incrLdIdx(load_idx);
476 continue;
477 }
478
463 TheISA::handleLockedSnoopHit(ld_inst.get());
464 }
465
466 // If this is the only load in the LSQ we don't care
467 if (load_idx == loadTail)
468 return;
469
470 incrLdIdx(load_idx);
471
472 bool force_squash = false;
473
474 while (load_idx != loadTail) {
475 DynInstPtr ld_inst = loadQueue[load_idx];
476
477 if (!ld_inst->effAddrValid() || ld_inst->strictlyOrdered()) {
478 incrLdIdx(load_idx);
479 continue;
480 }
481
479 Addr load_addr = ld_inst->physEffAddr & cacheBlockMask;
482 Addr load_addr_low = ld_inst->physEffAddrLow & cacheBlockMask;
483 Addr load_addr_high = ld_inst->physEffAddrHigh & cacheBlockMask;
484
480 DPRINTF(LSQUnit, "-- inst [sn:%lli] load_addr: %#x to pktAddr:%#x\n",
485 DPRINTF(LSQUnit, "-- inst [sn:%lli] load_addr: %#x to pktAddr:%#x\n",
481 ld_inst->seqNum, load_addr, invalidate_addr);
486 ld_inst->seqNum, load_addr_low, invalidate_addr);
482
487
483 if (load_addr == invalidate_addr || force_squash) {
488 if ((load_addr_low == invalidate_addr
489 || load_addr_high == invalidate_addr) || force_squash) {
484 if (needsTSO) {
485 // If we have a TSO system, as all loads must be ordered with
486 // all other loads, this load as well as *all* subsequent loads
487 // need to be squashed to prevent possible load reordering.
488 force_squash = true;
489 }
490 if (ld_inst->possibleLoadViolation() || force_squash) {
491 DPRINTF(LSQUnit, "Conflicting load at addr %#x [sn:%lli]\n",

--- 835 unchanged lines hidden ---
490 if (needsTSO) {
491 // If we have a TSO system, as all loads must be ordered with
492 // all other loads, this load as well as *all* subsequent loads
493 // need to be squashed to prevent possible load reordering.
494 force_squash = true;
495 }
496 if (ld_inst->possibleLoadViolation() || force_squash) {
497 DPRINTF(LSQUnit, "Conflicting load at addr %#x [sn:%lli]\n",

--- 835 unchanged lines hidden ---