amo.isa revision 12385:288c62455dde
1// -*- mode:c++ -*- 2 3// Copyright (c) 2015 Riscv Developers 4// Copyright (c) 2016 The University of Virginia 5// All rights reserved. 6// 7// Redistribution and use in source and binary forms, with or without 8// modification, are permitted provided that the following conditions are 9// met: redistributions of source code must retain the above copyright 10// notice, this list of conditions and the following disclaimer; 11// redistributions in binary form must reproduce the above copyright 12// notice, this list of conditions and the following disclaimer in the 13// documentation and/or other materials provided with the distribution; 14// neither the name of the copyright holders nor the names of its 15// contributors may be used to endorse or promote products derived from 16// this software without specific prior written permission. 17// 18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29// 30// Authors: Alec Roelke 31 32//////////////////////////////////////////////////////////////////// 33// 34// Atomic memory operation instructions 35// 36def template AtomicMemOpDeclare {{ 37 /** 38 * Static instruction class for an AtomicMemOp operation 39 */ 40 class %(class_name)s : public %(base_class)s 41 { 42 public: 43 // Constructor 44 %(class_name)s(ExtMachInst machInst); 45 46 protected: 47 48 class %(class_name)sLoad : public %(base_class)sMicro 49 { 50 public: 51 // Constructor 52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p); 53 54 Fault execute(ExecContext *, Trace::InstRecord *) const; 55 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 56 Fault completeAcc(PacketPtr, ExecContext *, 57 Trace::InstRecord *) const; 58 }; 59 60 class %(class_name)sStore : public %(base_class)sMicro 61 { 62 public: 63 // Constructor 64 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p); 65 66 Fault execute(ExecContext *, Trace::InstRecord *) const; 67 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 68 Fault completeAcc(PacketPtr, ExecContext *, 69 Trace::InstRecord *) const; 70 }; 71 }; 72}}; 73 74def template LRSCConstructor {{ 75 %(class_name)s::%(class_name)s(ExtMachInst machInst): 76 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) 77 { 78 %(constructor)s; 79 if (AQ) 80 memAccessFlags = memAccessFlags | Request::ACQUIRE; 81 if (RL) 82 memAccessFlags = memAccessFlags | Request::RELEASE; 83 } 84}}; 85 86def template AtomicMemOpMacroConstructor {{ 87 %(class_name)s::%(class_name)s(ExtMachInst machInst) 88 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) 89 { 90 %(constructor)s; 91 microops = {new %(class_name)sLoad(machInst, this), 92 new %(class_name)sStore(machInst, this)}; 93 } 94}}; 95 96def template AtomicMemOpLoadConstructor {{ 97 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad( 98 ExtMachInst machInst, %(class_name)s *_p) 99 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s) 100 { 101 %(constructor)s; 102 flags[IsFirstMicroop] = true; 103 flags[IsDelayedCommit] = true; 104 if (AQ) 105 memAccessFlags = Request::ACQUIRE; 106 } 107}}; 108 109def template AtomicMemOpStoreConstructor {{ 110 %(class_name)s::%(class_name)sStore::%(class_name)sStore( 111 ExtMachInst machInst, %(class_name)s *_p) 112 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s) 113 { 114 %(constructor)s; 115 flags[IsLastMicroop] = true; 116 flags[IsNonSpeculative] = true; 117 if (RL) 118 memAccessFlags = Request::RELEASE; 119 } 120}}; 121 122def template StoreCondExecute {{ 123 Fault %(class_name)s::execute(ExecContext *xc, 124 Trace::InstRecord *traceData) const 125 { 126 Addr EA; 127 Fault fault = NoFault; 128 uint64_t result; 129 130 %(op_decl)s; 131 %(op_rd)s; 132 %(ea_code)s; 133 134 if (fault == NoFault) { 135 %(memacc_code)s; 136 } 137 138 if (fault == NoFault) { 139 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 140 &result); 141 // RISC-V has the opposite convention gem5 has for success flags, 142 // so we invert the result here. 143 result = !result; 144 } 145 146 if (fault == NoFault) { 147 %(postacc_code)s; 148 } 149 150 if (fault == NoFault) { 151 %(op_wb)s; 152 } 153 154 return fault; 155 } 156}}; 157 158def template AtomicMemOpLoadExecute {{ 159 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc, 160 Trace::InstRecord *traceData) const 161 { 162 Addr EA; 163 Fault fault = NoFault; 164 165 %(op_decl)s; 166 %(op_rd)s; 167 %(ea_code)s; 168 169 if (fault == NoFault) { 170 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); 171 } 172 173 if (fault == NoFault) { 174 %(code)s; 175 } 176 177 if (fault == NoFault) { 178 %(op_wb)s; 179 } 180 181 return fault; 182 } 183}}; 184 185def template AtomicMemOpStoreExecute {{ 186 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc, 187 Trace::InstRecord *traceData) const 188 { 189 Addr EA; 190 Fault fault = NoFault; 191 192 %(op_decl)s; 193 %(op_rd)s; 194 %(ea_code)s; 195 196 if (fault == NoFault) { 197 %(code)s; 198 } 199 200 if (fault == NoFault) { 201 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 202 nullptr); 203 } 204 205 if (fault == NoFault) { 206 %(op_wb)s; 207 } 208 209 return fault; 210 } 211}}; 212 213def template AtomicMemOpLoadInitiateAcc {{ 214 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc, 215 Trace::InstRecord *traceData) const 216 { 217 Addr EA; 218 Fault fault = NoFault; 219 220 %(op_src_decl)s; 221 %(op_rd)s; 222 %(ea_code)s; 223 224 if (fault == NoFault) { 225 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); 226 } 227 228 return fault; 229 } 230}}; 231 232def template AtomicMemOpStoreInitiateAcc {{ 233 Fault %(class_name)s::%(class_name)sStore::initiateAcc( 234 ExecContext *xc, Trace::InstRecord *traceData) const 235 { 236 Addr EA; 237 Fault fault = NoFault; 238 239 %(op_decl)s; 240 %(op_rd)s; 241 %(ea_code)s; 242 243 if (fault == NoFault) { 244 %(code)s; 245 } 246 247 if (fault == NoFault) { 248 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 249 nullptr); 250 } 251 252 if (fault == NoFault) { 253 %(op_wb)s; 254 } 255 256 return fault; 257 } 258}}; 259 260def template StoreCondCompleteAcc {{ 261 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc, 262 Trace::InstRecord *traceData) const 263 { 264 Fault fault = NoFault; 265 266 %(op_dest_decl)s; 267 268 // RISC-V has the opposite convention gem5 has for success flags, 269 // so we invert the result here. 270 uint64_t result = !pkt->req->getExtraData(); 271 272 if (fault == NoFault) { 273 %(postacc_code)s; 274 } 275 276 if (fault == NoFault) { 277 %(op_wb)s; 278 } 279 280 return fault; 281 } 282}}; 283 284def template AtomicMemOpLoadCompleteAcc {{ 285 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt, 286 ExecContext *xc, Trace::InstRecord *traceData) const 287 { 288 Fault fault = NoFault; 289 290 %(op_decl)s; 291 %(op_rd)s; 292 293 getMem(pkt, Mem, traceData); 294 295 if (fault == NoFault) { 296 %(code)s; 297 } 298 299 if (fault == NoFault) { 300 %(op_wb)s; 301 } 302 303 return fault; 304 } 305}}; 306 307def template AtomicMemOpStoreCompleteAcc {{ 308 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt, 309 ExecContext *xc, Trace::InstRecord *traceData) const 310 { 311 return NoFault; 312 } 313}}; 314 315def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, 316 mem_flags=[], inst_flags=[]) {{ 317 mem_flags = makeList(mem_flags) 318 inst_flags = makeList(inst_flags) 319 iop = InstObjParams(name, Name, 'LoadReserved', 320 {'ea_code': ea_code, 'memacc_code': memacc_code, 321 'postacc_code': postacc_code}, inst_flags) 322 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ 323 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' 324 325 header_output = LoadStoreDeclare.subst(iop) 326 decoder_output = LRSCConstructor.subst(iop) 327 decode_block = BasicDecode.subst(iop) 328 exec_output = LoadExecute.subst(iop) \ 329 + LoadInitiateAcc.subst(iop) \ 330 + LoadCompleteAcc.subst(iop) 331}}; 332 333def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, 334 mem_flags=[], inst_flags=[]) {{ 335 mem_flags = makeList(mem_flags) 336 inst_flags = makeList(inst_flags) 337 iop = InstObjParams(name, Name, 'StoreCond', 338 {'ea_code': ea_code, 'memacc_code': memacc_code, 339 'postacc_code': postacc_code}, inst_flags) 340 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ 341 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' 342 343 header_output = LoadStoreDeclare.subst(iop) 344 decoder_output = LRSCConstructor.subst(iop) 345 decode_block = BasicDecode.subst(iop) 346 exec_output = StoreCondExecute.subst(iop) \ 347 + StoreInitiateAcc.subst(iop) \ 348 + StoreCondCompleteAcc.subst(iop) 349}}; 350 351def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[], 352 store_flags=[], inst_flags=[]) {{ 353 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags) 354 header_output = AtomicMemOpDeclare.subst(macro_iop) 355 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop) 356 decode_block = BasicDecode.subst(macro_iop) 357 exec_output = '' 358 359 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"] 360 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', 361 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'}, 362 load_inst_flags) 363 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop) 364 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \ 365 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \ 366 + AtomicMemOpLoadCompleteAcc.subst(load_iop) 367 368 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"] 369 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', 370 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'}, 371 store_inst_flags) 372 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop) 373 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \ 374 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \ 375 + AtomicMemOpStoreCompleteAcc.subst(store_iop) 376}}; 377