1/** 2 * Copyright (c) 2019 Metempsy Technology Consulting 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions are 7 * met: redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer; 9 * redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution; 12 * neither the name of the copyright holders nor the names of its 13 * contributors may be used to endorse or promote products derived from 14 * this software without specific prior written permission. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 * 28 * Authors: Ivan Pizarro 29 */ 30 31/** Implementation of the 'Proactive Instruction Fetch' prefetcher 32 * Reference: 33 * Ferdman, M., Kaynak, C., & Falsafi, B. (2011, December). 34 * Proactive instruction fetch. 35 * In Proceedings of the 44th Annual IEEE/ACM International Symposium 36 * on Microarchitecture (pp. 152-162). ACM. 37 */ 38 39#ifndef __MEM_CACHE_PREFETCH_PIF_HH__ 40#define __MEM_CACHE_PREFETCH_PIF_HH__ 41 42#include <deque> 43#include <vector> 44 45#include "mem/cache/prefetch/associative_set.hh" 46#include "mem/cache/prefetch/queued.hh" 47 48struct PIFPrefetcherParams; 49 50class PIFPrefetcher : public QueuedPrefetcher 51{ 52 private: 53 /** Number of preceding and subsequent spatial addresses to compact */ 54 const unsigned int precSize; 55 const unsigned int succSize; 56 /** Number of entries used for the temporal compactor */ 57 const unsigned int maxCompactorEntries; 58 /** Max number of entries to be used in the Stream Address Buffer */ 59 const unsigned int maxStreamAddressBufferEntries; 60 61 /** 62 * The compactor tracks retired instructions addresses, leveraging the 63 * spatial and temporal locality among instructions for compaction. It 64 *comprises the spatial and temporal compaction mechanisms. 65 * 66 * Taking advantage of the spatial locality across instruction blocks, 67 * the spatial compactor combines instruction-block addresses that fall 68 * within a 'spatial region', a group of adjacent instruction blocks. 69 * When an instruction outside the current spatial region retires, the 70 * existing spatial region is sent to the temporal compactor. 71 * 72 * The temporal compactor tracks a small number of the 73 * most-recently-observed spatial region records. 74 */ 75 struct CompactorEntry { 76 Addr trigger; 77 std::vector<bool> prec; 78 std::vector<bool> succ; 79 CompactorEntry() {} 80 CompactorEntry(Addr, unsigned int, unsigned int); 81 82 /** 83 * Checks if a given address is in the same defined spatial region 84 * as the compactor entry. 85 * @param addr Address to check if it's inside the spatial region 86 * @param log_blk_distance log_2(block size of the cache) 87 * @param update if true, set the corresponding succ/prec entry 88 * @return TRUE if they are in the same spatial region, FALSE 89 * otherwise 90 */ 91 bool inSameSpatialRegion(Addr addr, unsigned int log_blk_size, 92 bool update); 93 /** 94 * Checks if the provided address is contained in this spatial 95 * region and if its corresponding bit vector entry is set 96 * @param target address to check 97 * @param log_blk_distance log_2(block size of the cache) 98 * @return TRUE if target has its bit set 99 */ 100 bool hasAddress(Addr target, unsigned int log_blk_size) const; 101 102 /** 103 * Fills the provided vector with the predicted addresses using the 104 * recorded bit vectors of the entry 105 * @param log_blk_distance log_2(block size of the cache) 106 * @param addresses reference to a vector to add the generated 107 * addresses 108 */ 109 void getPredictedAddresses(unsigned int log_blk_size, 110 std::vector<AddrPriority> &addresses) const; 111 private: 112 /** 113 * Computes the distance, in cache blocks, from an address to the 114 * trigger of the entry. 115 * @param addr address to compute the distance from the trigger 116 * @param log_blk_distance log_2(block size of the cache) 117 * @result distance in cache blocks from the address to the trigger 118 */ 119 Addr distanceFromTrigger(Addr addr, 120 unsigned int log_blk_size) const; 121 }; 122 123 CompactorEntry spatialCompactor; 124 std::deque<CompactorEntry> temporalCompactor; 125 126 /** 127 * History buffer is a circular buffer that stores the sequence of 128 * retired instructions in FIFO order. 129 */ 130 std::vector<CompactorEntry> historyBuffer; 131 unsigned int historyBufferTail; 132 133 struct IndexEntry : public TaggedEntry 134 { 135 unsigned int historyIndex; 136 }; 137 /** 138 * The index table is a small cache-like structure that facilitates 139 * fast search of the history buffer. 140 */ 141 AssociativeSet<IndexEntry> index; 142 143 /** 144 * A Stream Address Buffer (SAB) tracks a window of consecutive 145 * spatial regions. The SAB mantains a pointer to the sequence in the 146 * history buffer, initiallly set to the pointer taken from the index 147 * table 148 */ 149 std::deque<CompactorEntry*> streamAddressBuffer; 150 151 /** 152 * Updates the prefetcher structures upon an instruction retired 153 * @param pc PC of the instruction being retired 154 */ 155 void notifyRetiredInst(const Addr pc); 156 157 /** 158 * Probe Listener to handle probe events from the CPU 159 */ 160 class PrefetchListenerPC : public ProbeListenerArgBase<Addr> 161 { 162 public: 163 PrefetchListenerPC(PIFPrefetcher &_parent, ProbeManager *pm, 164 const std::string &name) 165 : ProbeListenerArgBase(pm, name), 166 parent(_parent) {} 167 void notify(const Addr& pc) override; 168 protected: 169 PIFPrefetcher &parent; 170 }; 171 172 /** Array of probe listeners */ 173 std::vector<PrefetchListenerPC *> listenersPC; 174 175 176 public: 177 PIFPrefetcher(const PIFPrefetcherParams *p); 178 ~PIFPrefetcher() {} 179 180 void calculatePrefetch(const PrefetchInfo &pfi, 181 std::vector<AddrPriority> &addresses); 182 183 /** 184 * Add a SimObject and a probe name to monitor the retired instructions 185 * @param obj The SimObject pointer to listen from 186 * @param name The probe name 187 */ 188 void addEventProbeRetiredInsts(SimObject *obj, const char *name); 189}; 190 191#endif // __MEM_CACHE_PREFETCH_PIF_HH__ 192