access_map_pattern_matching.cc (13554:f16adb9b35cc) access_map_pattern_matching.cc (13700:56fa28e6fab4)
1/**
2 * Copyright (c) 2018 Metempsy Technology Consulting
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are
7 * met: redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer;

--- 18 unchanged lines hidden (view full) ---

27 *
28 * Authors: Javier Bueno
29 */
30
31#include "mem/cache/prefetch/access_map_pattern_matching.hh"
32
33#include "debug/HWPrefetch.hh"
34#include "mem/cache/prefetch/associative_set_impl.hh"
1/**
2 * Copyright (c) 2018 Metempsy Technology Consulting
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are
7 * met: redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer;

--- 18 unchanged lines hidden (view full) ---

27 *
28 * Authors: Javier Bueno
29 */
30
31#include "mem/cache/prefetch/access_map_pattern_matching.hh"
32
33#include "debug/HWPrefetch.hh"
34#include "mem/cache/prefetch/associative_set_impl.hh"
35#include "params/AccessMapPatternMatchingPrefetcher.hh"
35#include "params/AMPMPrefetcher.hh"
36#include "params/AccessMapPatternMatching.hh"
36
37
37AccessMapPatternMatchingPrefetcher::AccessMapPatternMatchingPrefetcher(
38 const AccessMapPatternMatchingPrefetcherParams *p)
39 : QueuedPrefetcher(p),
38AccessMapPatternMatching::AccessMapPatternMatching(
39 const AccessMapPatternMatchingParams *p)
40 : ClockedObject(p), blkSize(p->block_size), limitStride(p->limit_stride),
40 startDegree(p->start_degree), hotZoneSize(p->hot_zone_size),
41 highCoverageThreshold(p->high_coverage_threshold),
42 lowCoverageThreshold(p->low_coverage_threshold),
43 highAccuracyThreshold(p->high_accuracy_threshold),
44 lowAccuracyThreshold(p->low_accuracy_threshold),
45 highCacheHitThreshold(p->high_cache_hit_threshold),
46 lowCacheHitThreshold(p->low_cache_hit_threshold),
47 epochCycles(p->epoch_cycles),

--- 9 unchanged lines hidden (view full) ---

57 fatal_if(!isPowerOf2(hotZoneSize),
58 "the hot zone size must be a power of 2");
59 if (!epochEvent.scheduled()) {
60 schedule(epochEvent, clockEdge(epochCycles));
61 }
62}
63
64void
41 startDegree(p->start_degree), hotZoneSize(p->hot_zone_size),
42 highCoverageThreshold(p->high_coverage_threshold),
43 lowCoverageThreshold(p->low_coverage_threshold),
44 highAccuracyThreshold(p->high_accuracy_threshold),
45 lowAccuracyThreshold(p->low_accuracy_threshold),
46 highCacheHitThreshold(p->high_cache_hit_threshold),
47 lowCacheHitThreshold(p->low_cache_hit_threshold),
48 epochCycles(p->epoch_cycles),

--- 9 unchanged lines hidden (view full) ---

58 fatal_if(!isPowerOf2(hotZoneSize),
59 "the hot zone size must be a power of 2");
60 if (!epochEvent.scheduled()) {
61 schedule(epochEvent, clockEdge(epochCycles));
62 }
63}
64
65void
65AccessMapPatternMatchingPrefetcher::processEpochEvent()
66AccessMapPatternMatching::processEpochEvent()
66{
67 schedule(epochEvent, clockEdge(epochCycles));
68 double prefetch_accuracy =
69 ((double) numGoodPrefetches) / ((double) numTotalPrefetches);
70 double prefetch_coverage =
71 ((double) numGoodPrefetches) / ((double) numRawCacheMisses);
72 double cache_hit_ratio = ((double) numRawCacheHits) /
73 ((double) (numRawCacheHits + numRawCacheMisses));

--- 16 unchanged lines hidden (view full) ---

90 degree = std::min((unsigned) memory_bandwidth, usefulDegree);
91 // reset epoch stats
92 numGoodPrefetches = 0.0;
93 numTotalPrefetches = 0.0;
94 numRawCacheMisses = 0.0;
95 numRawCacheHits = 0.0;
96}
97
67{
68 schedule(epochEvent, clockEdge(epochCycles));
69 double prefetch_accuracy =
70 ((double) numGoodPrefetches) / ((double) numTotalPrefetches);
71 double prefetch_coverage =
72 ((double) numGoodPrefetches) / ((double) numRawCacheMisses);
73 double cache_hit_ratio = ((double) numRawCacheHits) /
74 ((double) (numRawCacheHits + numRawCacheMisses));

--- 16 unchanged lines hidden (view full) ---

91 degree = std::min((unsigned) memory_bandwidth, usefulDegree);
92 // reset epoch stats
93 numGoodPrefetches = 0.0;
94 numTotalPrefetches = 0.0;
95 numRawCacheMisses = 0.0;
96 numRawCacheHits = 0.0;
97}
98
98AccessMapPatternMatchingPrefetcher::AccessMapEntry *
99AccessMapPatternMatchingPrefetcher::getAccessMapEntry(Addr am_addr,
99AccessMapPatternMatching::AccessMapEntry *
100AccessMapPatternMatching::getAccessMapEntry(Addr am_addr,
100 bool is_secure)
101{
102 AccessMapEntry *am_entry = accessMapTable.findEntry(am_addr, is_secure);
103 if (am_entry != nullptr) {
104 accessMapTable.accessEntry(am_entry);
105 } else {
106 am_entry = accessMapTable.findVictim(am_addr);
107 assert(am_entry != nullptr);
108
109 accessMapTable.insertEntry(am_addr, is_secure, am_entry);
110 }
111 return am_entry;
112}
113
114void
101 bool is_secure)
102{
103 AccessMapEntry *am_entry = accessMapTable.findEntry(am_addr, is_secure);
104 if (am_entry != nullptr) {
105 accessMapTable.accessEntry(am_entry);
106 } else {
107 am_entry = accessMapTable.findVictim(am_addr);
108 assert(am_entry != nullptr);
109
110 accessMapTable.insertEntry(am_addr, is_secure, am_entry);
111 }
112 return am_entry;
113}
114
115void
115AccessMapPatternMatchingPrefetcher::setEntryState(AccessMapEntry &entry,
116AccessMapPatternMatching::setEntryState(AccessMapEntry &entry,
116 Addr block, enum AccessMapState state)
117{
118 enum AccessMapState old = entry.states[block];
119 entry.states[block] = state;
120
121 //do not update stats when initializing
122 if (state == AM_INIT) return;
123

--- 18 unchanged lines hidden (view full) ---

142 break;
143 default:
144 panic("Impossible path\n");
145 break;
146 }
147}
148
149void
117 Addr block, enum AccessMapState state)
118{
119 enum AccessMapState old = entry.states[block];
120 entry.states[block] = state;
121
122 //do not update stats when initializing
123 if (state == AM_INIT) return;
124

--- 18 unchanged lines hidden (view full) ---

143 break;
144 default:
145 panic("Impossible path\n");
146 break;
147 }
148}
149
150void
150AccessMapPatternMatchingPrefetcher::calculatePrefetch(const PrefetchInfo &pfi,
151 std::vector<AddrPriority> &addresses)
151AccessMapPatternMatching::calculatePrefetch(
152 const BasePrefetcher::PrefetchInfo &pfi,
153 std::vector<QueuedPrefetcher::AddrPriority> &addresses)
152{
153 assert(addresses.empty());
154 bool is_secure = pfi.isSecure();
155 Addr am_addr = pfi.getAddr() / hotZoneSize;
156 Addr current_block = (pfi.getAddr() % hotZoneSize) / blkSize;
157 uint64_t lines_per_zone = hotZoneSize / blkSize;
158
159 // Get the entries of the curent block (am_addr), the previous, and the

--- 29 unchanged lines hidden (view full) ---

189 * am_entry_prev->states => states[ 0 .. lines_per_zone-1]
190 * am_entry_curr->states => states[ lines_per_zone .. 2*lines_per_zone-1]
191 * am_entry_next->states => states[2*lines_per_zone .. 3*lines_per_zone-1]
192 */
193
194 // index of the current_block in the new vector
195 Addr states_current_block = current_block + lines_per_zone;
196 // consider strides 1..lines_per_zone/2
154{
155 assert(addresses.empty());
156 bool is_secure = pfi.isSecure();
157 Addr am_addr = pfi.getAddr() / hotZoneSize;
158 Addr current_block = (pfi.getAddr() % hotZoneSize) / blkSize;
159 uint64_t lines_per_zone = hotZoneSize / blkSize;
160
161 // Get the entries of the curent block (am_addr), the previous, and the

--- 29 unchanged lines hidden (view full) ---

191 * am_entry_prev->states => states[ 0 .. lines_per_zone-1]
192 * am_entry_curr->states => states[ lines_per_zone .. 2*lines_per_zone-1]
193 * am_entry_next->states => states[2*lines_per_zone .. 3*lines_per_zone-1]
194 */
195
196 // index of the current_block in the new vector
197 Addr states_current_block = current_block + lines_per_zone;
198 // consider strides 1..lines_per_zone/2
197 for (int stride = 1; stride < lines_per_zone/2; stride += 1) {
199 int max_stride = limitStride == 0 ? lines_per_zone / 2 : limitStride + 1;
200 for (int stride = 1; stride < max_stride; stride += 1) {
198 // Test accessed positive strides
199 if (checkCandidate(states, states_current_block, stride)) {
200 // candidate found, current_block - stride
201 Addr pf_addr;
202 if (stride > current_block) {
203 // The index (current_block - stride) falls in the range of
204 // the previous zone (am_entry_prev), adjust the address
205 // accordingly
206 Addr blk = states_current_block - stride;
207 pf_addr = (am_addr - 1) * hotZoneSize + blk * blkSize;
208 setEntryState(*am_entry_prev, blk, AM_PREFETCH);
209 } else {
210 // The index (current_block - stride) falls within
211 // am_entry_curr
212 Addr blk = current_block - stride;
213 pf_addr = am_addr * hotZoneSize + blk * blkSize;
214 setEntryState(*am_entry_curr, blk, AM_PREFETCH);
215 }
201 // Test accessed positive strides
202 if (checkCandidate(states, states_current_block, stride)) {
203 // candidate found, current_block - stride
204 Addr pf_addr;
205 if (stride > current_block) {
206 // The index (current_block - stride) falls in the range of
207 // the previous zone (am_entry_prev), adjust the address
208 // accordingly
209 Addr blk = states_current_block - stride;
210 pf_addr = (am_addr - 1) * hotZoneSize + blk * blkSize;
211 setEntryState(*am_entry_prev, blk, AM_PREFETCH);
212 } else {
213 // The index (current_block - stride) falls within
214 // am_entry_curr
215 Addr blk = current_block - stride;
216 pf_addr = am_addr * hotZoneSize + blk * blkSize;
217 setEntryState(*am_entry_curr, blk, AM_PREFETCH);
218 }
216 addresses.push_back(AddrPriority(pf_addr, 0));
219 addresses.push_back(QueuedPrefetcher::AddrPriority(pf_addr, 0));
217 if (addresses.size() == degree) {
218 break;
219 }
220 }
221
222 // Test accessed negative strides
223 if (checkCandidate(states, states_current_block, -stride)) {
224 // candidate found, current_block + stride

--- 7 unchanged lines hidden (view full) ---

232 setEntryState(*am_entry_next, blk, AM_PREFETCH);
233 } else {
234 // The index (current_block + stride) falls within
235 // am_entry_curr
236 Addr blk = current_block + stride;
237 pf_addr = am_addr * hotZoneSize + blk * blkSize;
238 setEntryState(*am_entry_curr, blk, AM_PREFETCH);
239 }
220 if (addresses.size() == degree) {
221 break;
222 }
223 }
224
225 // Test accessed negative strides
226 if (checkCandidate(states, states_current_block, -stride)) {
227 // candidate found, current_block + stride

--- 7 unchanged lines hidden (view full) ---

235 setEntryState(*am_entry_next, blk, AM_PREFETCH);
236 } else {
237 // The index (current_block + stride) falls within
238 // am_entry_curr
239 Addr blk = current_block + stride;
240 pf_addr = am_addr * hotZoneSize + blk * blkSize;
241 setEntryState(*am_entry_curr, blk, AM_PREFETCH);
242 }
240 addresses.push_back(AddrPriority(pf_addr, 0));
243 addresses.push_back(QueuedPrefetcher::AddrPriority(pf_addr, 0));
241 if (addresses.size() == degree) {
242 break;
243 }
244 }
245 }
246}
247
244 if (addresses.size() == degree) {
245 break;
246 }
247 }
248 }
249}
250
248AccessMapPatternMatchingPrefetcher*
249AccessMapPatternMatchingPrefetcherParams::create()
251AccessMapPatternMatching*
252AccessMapPatternMatchingParams::create()
250{
253{
251 return new AccessMapPatternMatchingPrefetcher(this);
254 return new AccessMapPatternMatching(this);
252}
255}
256
257AMPMPrefetcher::AMPMPrefetcher(const AMPMPrefetcherParams *p)
258 : QueuedPrefetcher(p), ampm(*p->ampm)
259{
260}
261
262void
263AMPMPrefetcher::calculatePrefetch(const PrefetchInfo &pfi,
264 std::vector<AddrPriority> &addresses)
265{
266 ampm.calculatePrefetch(pfi, addresses);
267}
268
269AMPMPrefetcher*
270AMPMPrefetcherParams::create()
271{
272 return new AMPMPrefetcher(this);
273}