amo.isa (12323:55d08b81ff39) | amo.isa (12385:288c62455dde) |
---|---|
1// -*- mode:c++ -*- 2 3// Copyright (c) 2015 Riscv Developers 4// Copyright (c) 2016 The University of Virginia 5// All rights reserved. 6// 7// Redistribution and use in source and binary forms, with or without 8// modification, are permitted provided that the following conditions are 9// met: redistributions of source code must retain the above copyright 10// notice, this list of conditions and the following disclaimer; 11// redistributions in binary form must reproduce the above copyright 12// notice, this list of conditions and the following disclaimer in the 13// documentation and/or other materials provided with the distribution; 14// neither the name of the copyright holders nor the names of its 15// contributors may be used to endorse or promote products derived from 16// this software without specific prior written permission. 17// 18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29// 30// Authors: Alec Roelke 31 32//////////////////////////////////////////////////////////////////// 33// 34// Atomic memory operation instructions 35// 36def template AtomicMemOpDeclare {{ 37 /** 38 * Static instruction class for an AtomicMemOp operation 39 */ 40 class %(class_name)s : public %(base_class)s 41 { 42 public: 43 // Constructor 44 %(class_name)s(ExtMachInst machInst); 45 46 protected: 47 48 class %(class_name)sLoad : public %(base_class)sMicro 49 { 50 public: 51 // Constructor 52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p); 53 54 Fault execute(ExecContext *, Trace::InstRecord *) const; | 1// -*- mode:c++ -*- 2 3// Copyright (c) 2015 Riscv Developers 4// Copyright (c) 2016 The University of Virginia 5// All rights reserved. 6// 7// Redistribution and use in source and binary forms, with or without 8// modification, are permitted provided that the following conditions are 9// met: redistributions of source code must retain the above copyright 10// notice, this list of conditions and the following disclaimer; 11// redistributions in binary form must reproduce the above copyright 12// notice, this list of conditions and the following disclaimer in the 13// documentation and/or other materials provided with the distribution; 14// neither the name of the copyright holders nor the names of its 15// contributors may be used to endorse or promote products derived from 16// this software without specific prior written permission. 17// 18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29// 30// Authors: Alec Roelke 31 32//////////////////////////////////////////////////////////////////// 33// 34// Atomic memory operation instructions 35// 36def template AtomicMemOpDeclare {{ 37 /** 38 * Static instruction class for an AtomicMemOp operation 39 */ 40 class %(class_name)s : public %(base_class)s 41 { 42 public: 43 // Constructor 44 %(class_name)s(ExtMachInst machInst); 45 46 protected: 47 48 class %(class_name)sLoad : public %(base_class)sMicro 49 { 50 public: 51 // Constructor 52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p); 53 54 Fault execute(ExecContext *, Trace::InstRecord *) const; |
55 Fault eaComp(ExecContext *, Trace::InstRecord *) const; | |
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 57 Fault completeAcc(PacketPtr, ExecContext *, 58 Trace::InstRecord *) const; 59 }; 60 61 class %(class_name)sStore : public %(base_class)sMicro 62 { 63 public: 64 // Constructor 65 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p); 66 67 Fault execute(ExecContext *, Trace::InstRecord *) const; | 55 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 56 Fault completeAcc(PacketPtr, ExecContext *, 57 Trace::InstRecord *) const; 58 }; 59 60 class %(class_name)sStore : public %(base_class)sMicro 61 { 62 public: 63 // Constructor 64 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p); 65 66 Fault execute(ExecContext *, Trace::InstRecord *) const; |
68 Fault eaComp(ExecContext *, Trace::InstRecord *) const; | |
69 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 70 Fault completeAcc(PacketPtr, ExecContext *, 71 Trace::InstRecord *) const; 72 }; 73 }; 74}}; 75 76def template LRSCConstructor {{ 77 %(class_name)s::%(class_name)s(ExtMachInst machInst): 78 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) 79 { 80 %(constructor)s; 81 if (AQ) 82 memAccessFlags = memAccessFlags | Request::ACQUIRE; 83 if (RL) 84 memAccessFlags = memAccessFlags | Request::RELEASE; 85 } 86}}; 87 88def template AtomicMemOpMacroConstructor {{ 89 %(class_name)s::%(class_name)s(ExtMachInst machInst) 90 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) 91 { 92 %(constructor)s; 93 microops = {new %(class_name)sLoad(machInst, this), 94 new %(class_name)sStore(machInst, this)}; 95 } 96}}; 97 98def template AtomicMemOpLoadConstructor {{ 99 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad( 100 ExtMachInst machInst, %(class_name)s *_p) 101 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s) 102 { 103 %(constructor)s; 104 flags[IsFirstMicroop] = true; 105 flags[IsDelayedCommit] = true; 106 if (AQ) 107 memAccessFlags = Request::ACQUIRE; 108 } 109}}; 110 111def template AtomicMemOpStoreConstructor {{ 112 %(class_name)s::%(class_name)sStore::%(class_name)sStore( 113 ExtMachInst machInst, %(class_name)s *_p) 114 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s) 115 { 116 %(constructor)s; 117 flags[IsLastMicroop] = true; 118 flags[IsNonSpeculative] = true; 119 if (RL) 120 memAccessFlags = Request::RELEASE; 121 } 122}}; 123 124def template StoreCondExecute {{ 125 Fault %(class_name)s::execute(ExecContext *xc, 126 Trace::InstRecord *traceData) const 127 { 128 Addr EA; 129 Fault fault = NoFault; 130 uint64_t result; 131 132 %(op_decl)s; 133 %(op_rd)s; 134 %(ea_code)s; 135 136 if (fault == NoFault) { 137 %(memacc_code)s; 138 } 139 140 if (fault == NoFault) { 141 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 142 &result); 143 // RISC-V has the opposite convention gem5 has for success flags, 144 // so we invert the result here. 145 result = !result; 146 } 147 148 if (fault == NoFault) { 149 %(postacc_code)s; 150 } 151 152 if (fault == NoFault) { 153 %(op_wb)s; 154 } 155 156 return fault; 157 } 158}}; 159 160def template AtomicMemOpLoadExecute {{ 161 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc, 162 Trace::InstRecord *traceData) const 163 { 164 Addr EA; 165 Fault fault = NoFault; 166 167 %(op_decl)s; 168 %(op_rd)s; 169 %(ea_code)s; 170 171 if (fault == NoFault) { 172 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); 173 } 174 175 if (fault == NoFault) { 176 %(code)s; 177 } 178 179 if (fault == NoFault) { 180 %(op_wb)s; 181 } 182 183 return fault; 184 } 185}}; 186 187def template AtomicMemOpStoreExecute {{ 188 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc, 189 Trace::InstRecord *traceData) const 190 { 191 Addr EA; 192 Fault fault = NoFault; 193 194 %(op_decl)s; 195 %(op_rd)s; 196 %(ea_code)s; 197 198 if (fault == NoFault) { 199 %(code)s; 200 } 201 202 if (fault == NoFault) { 203 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 204 nullptr); 205 } 206 207 if (fault == NoFault) { 208 %(op_wb)s; 209 } 210 211 return fault; 212 } 213}}; 214 | 67 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 68 Fault completeAcc(PacketPtr, ExecContext *, 69 Trace::InstRecord *) const; 70 }; 71 }; 72}}; 73 74def template LRSCConstructor {{ 75 %(class_name)s::%(class_name)s(ExtMachInst machInst): 76 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) 77 { 78 %(constructor)s; 79 if (AQ) 80 memAccessFlags = memAccessFlags | Request::ACQUIRE; 81 if (RL) 82 memAccessFlags = memAccessFlags | Request::RELEASE; 83 } 84}}; 85 86def template AtomicMemOpMacroConstructor {{ 87 %(class_name)s::%(class_name)s(ExtMachInst machInst) 88 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s) 89 { 90 %(constructor)s; 91 microops = {new %(class_name)sLoad(machInst, this), 92 new %(class_name)sStore(machInst, this)}; 93 } 94}}; 95 96def template AtomicMemOpLoadConstructor {{ 97 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad( 98 ExtMachInst machInst, %(class_name)s *_p) 99 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s) 100 { 101 %(constructor)s; 102 flags[IsFirstMicroop] = true; 103 flags[IsDelayedCommit] = true; 104 if (AQ) 105 memAccessFlags = Request::ACQUIRE; 106 } 107}}; 108 109def template AtomicMemOpStoreConstructor {{ 110 %(class_name)s::%(class_name)sStore::%(class_name)sStore( 111 ExtMachInst machInst, %(class_name)s *_p) 112 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s) 113 { 114 %(constructor)s; 115 flags[IsLastMicroop] = true; 116 flags[IsNonSpeculative] = true; 117 if (RL) 118 memAccessFlags = Request::RELEASE; 119 } 120}}; 121 122def template StoreCondExecute {{ 123 Fault %(class_name)s::execute(ExecContext *xc, 124 Trace::InstRecord *traceData) const 125 { 126 Addr EA; 127 Fault fault = NoFault; 128 uint64_t result; 129 130 %(op_decl)s; 131 %(op_rd)s; 132 %(ea_code)s; 133 134 if (fault == NoFault) { 135 %(memacc_code)s; 136 } 137 138 if (fault == NoFault) { 139 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 140 &result); 141 // RISC-V has the opposite convention gem5 has for success flags, 142 // so we invert the result here. 143 result = !result; 144 } 145 146 if (fault == NoFault) { 147 %(postacc_code)s; 148 } 149 150 if (fault == NoFault) { 151 %(op_wb)s; 152 } 153 154 return fault; 155 } 156}}; 157 158def template AtomicMemOpLoadExecute {{ 159 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc, 160 Trace::InstRecord *traceData) const 161 { 162 Addr EA; 163 Fault fault = NoFault; 164 165 %(op_decl)s; 166 %(op_rd)s; 167 %(ea_code)s; 168 169 if (fault == NoFault) { 170 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); 171 } 172 173 if (fault == NoFault) { 174 %(code)s; 175 } 176 177 if (fault == NoFault) { 178 %(op_wb)s; 179 } 180 181 return fault; 182 } 183}}; 184 185def template AtomicMemOpStoreExecute {{ 186 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc, 187 Trace::InstRecord *traceData) const 188 { 189 Addr EA; 190 Fault fault = NoFault; 191 192 %(op_decl)s; 193 %(op_rd)s; 194 %(ea_code)s; 195 196 if (fault == NoFault) { 197 %(code)s; 198 } 199 200 if (fault == NoFault) { 201 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 202 nullptr); 203 } 204 205 if (fault == NoFault) { 206 %(op_wb)s; 207 } 208 209 return fault; 210 } 211}}; 212 |
215def template AtomicMemOpEACompExecute {{ 216 Fault 217 %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc, 218 Trace::InstRecord *traceData) const 219 { 220 Addr EA; 221 Fault fault = NoFault; 222 223 %(op_decl)s; 224 %(op_rd)s; 225 %(ea_code)s; 226 227 if (fault == NoFault) { 228 %(op_wb)s; 229 xc->setEA(EA); 230 } 231 232 return fault; 233 } 234}}; 235 | |
236def template AtomicMemOpLoadInitiateAcc {{ 237 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc, 238 Trace::InstRecord *traceData) const 239 { 240 Addr EA; 241 Fault fault = NoFault; 242 243 %(op_src_decl)s; 244 %(op_rd)s; 245 %(ea_code)s; 246 247 if (fault == NoFault) { 248 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); 249 } 250 251 return fault; 252 } 253}}; 254 255def template AtomicMemOpStoreInitiateAcc {{ 256 Fault %(class_name)s::%(class_name)sStore::initiateAcc( 257 ExecContext *xc, Trace::InstRecord *traceData) const 258 { 259 Addr EA; 260 Fault fault = NoFault; 261 262 %(op_decl)s; 263 %(op_rd)s; 264 %(ea_code)s; 265 266 if (fault == NoFault) { 267 %(code)s; 268 } 269 270 if (fault == NoFault) { 271 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 272 nullptr); 273 } 274 275 if (fault == NoFault) { 276 %(op_wb)s; 277 } 278 279 return fault; 280 } 281}}; 282 283def template StoreCondCompleteAcc {{ 284 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc, 285 Trace::InstRecord *traceData) const 286 { 287 Fault fault = NoFault; 288 289 %(op_dest_decl)s; 290 291 // RISC-V has the opposite convention gem5 has for success flags, 292 // so we invert the result here. 293 uint64_t result = !pkt->req->getExtraData(); 294 295 if (fault == NoFault) { 296 %(postacc_code)s; 297 } 298 299 if (fault == NoFault) { 300 %(op_wb)s; 301 } 302 303 return fault; 304 } 305}}; 306 307def template AtomicMemOpLoadCompleteAcc {{ 308 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt, 309 ExecContext *xc, Trace::InstRecord *traceData) const 310 { 311 Fault fault = NoFault; 312 313 %(op_decl)s; 314 %(op_rd)s; 315 316 getMem(pkt, Mem, traceData); 317 318 if (fault == NoFault) { 319 %(code)s; 320 } 321 322 if (fault == NoFault) { 323 %(op_wb)s; 324 } 325 326 return fault; 327 } 328}}; 329 330def template AtomicMemOpStoreCompleteAcc {{ 331 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt, 332 ExecContext *xc, Trace::InstRecord *traceData) const 333 { 334 return NoFault; 335 } 336}}; 337 338def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, 339 mem_flags=[], inst_flags=[]) {{ 340 mem_flags = makeList(mem_flags) 341 inst_flags = makeList(inst_flags) 342 iop = InstObjParams(name, Name, 'LoadReserved', 343 {'ea_code': ea_code, 'memacc_code': memacc_code, 344 'postacc_code': postacc_code}, inst_flags) 345 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ 346 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' 347 348 header_output = LoadStoreDeclare.subst(iop) 349 decoder_output = LRSCConstructor.subst(iop) 350 decode_block = BasicDecode.subst(iop) 351 exec_output = LoadExecute.subst(iop) \ | 213def template AtomicMemOpLoadInitiateAcc {{ 214 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc, 215 Trace::InstRecord *traceData) const 216 { 217 Addr EA; 218 Fault fault = NoFault; 219 220 %(op_src_decl)s; 221 %(op_rd)s; 222 %(ea_code)s; 223 224 if (fault == NoFault) { 225 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); 226 } 227 228 return fault; 229 } 230}}; 231 232def template AtomicMemOpStoreInitiateAcc {{ 233 Fault %(class_name)s::%(class_name)sStore::initiateAcc( 234 ExecContext *xc, Trace::InstRecord *traceData) const 235 { 236 Addr EA; 237 Fault fault = NoFault; 238 239 %(op_decl)s; 240 %(op_rd)s; 241 %(ea_code)s; 242 243 if (fault == NoFault) { 244 %(code)s; 245 } 246 247 if (fault == NoFault) { 248 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 249 nullptr); 250 } 251 252 if (fault == NoFault) { 253 %(op_wb)s; 254 } 255 256 return fault; 257 } 258}}; 259 260def template StoreCondCompleteAcc {{ 261 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc, 262 Trace::InstRecord *traceData) const 263 { 264 Fault fault = NoFault; 265 266 %(op_dest_decl)s; 267 268 // RISC-V has the opposite convention gem5 has for success flags, 269 // so we invert the result here. 270 uint64_t result = !pkt->req->getExtraData(); 271 272 if (fault == NoFault) { 273 %(postacc_code)s; 274 } 275 276 if (fault == NoFault) { 277 %(op_wb)s; 278 } 279 280 return fault; 281 } 282}}; 283 284def template AtomicMemOpLoadCompleteAcc {{ 285 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt, 286 ExecContext *xc, Trace::InstRecord *traceData) const 287 { 288 Fault fault = NoFault; 289 290 %(op_decl)s; 291 %(op_rd)s; 292 293 getMem(pkt, Mem, traceData); 294 295 if (fault == NoFault) { 296 %(code)s; 297 } 298 299 if (fault == NoFault) { 300 %(op_wb)s; 301 } 302 303 return fault; 304 } 305}}; 306 307def template AtomicMemOpStoreCompleteAcc {{ 308 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt, 309 ExecContext *xc, Trace::InstRecord *traceData) const 310 { 311 return NoFault; 312 } 313}}; 314 315def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, 316 mem_flags=[], inst_flags=[]) {{ 317 mem_flags = makeList(mem_flags) 318 inst_flags = makeList(inst_flags) 319 iop = InstObjParams(name, Name, 'LoadReserved', 320 {'ea_code': ea_code, 'memacc_code': memacc_code, 321 'postacc_code': postacc_code}, inst_flags) 322 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ 323 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' 324 325 header_output = LoadStoreDeclare.subst(iop) 326 decoder_output = LRSCConstructor.subst(iop) 327 decode_block = BasicDecode.subst(iop) 328 exec_output = LoadExecute.subst(iop) \ |
352 + EACompExecute.subst(iop) \ | |
353 + LoadInitiateAcc.subst(iop) \ 354 + LoadCompleteAcc.subst(iop) 355}}; 356 357def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, 358 mem_flags=[], inst_flags=[]) {{ 359 mem_flags = makeList(mem_flags) 360 inst_flags = makeList(inst_flags) 361 iop = InstObjParams(name, Name, 'StoreCond', 362 {'ea_code': ea_code, 'memacc_code': memacc_code, 363 'postacc_code': postacc_code}, inst_flags) 364 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ 365 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' 366 367 header_output = LoadStoreDeclare.subst(iop) 368 decoder_output = LRSCConstructor.subst(iop) 369 decode_block = BasicDecode.subst(iop) 370 exec_output = StoreCondExecute.subst(iop) \ | 329 + LoadInitiateAcc.subst(iop) \ 330 + LoadCompleteAcc.subst(iop) 331}}; 332 333def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}}, 334 mem_flags=[], inst_flags=[]) {{ 335 mem_flags = makeList(mem_flags) 336 inst_flags = makeList(inst_flags) 337 iop = InstObjParams(name, Name, 'StoreCond', 338 {'ea_code': ea_code, 'memacc_code': memacc_code, 339 'postacc_code': postacc_code}, inst_flags) 340 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \ 341 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';' 342 343 header_output = LoadStoreDeclare.subst(iop) 344 decoder_output = LRSCConstructor.subst(iop) 345 decode_block = BasicDecode.subst(iop) 346 exec_output = StoreCondExecute.subst(iop) \ |
371 + EACompExecute.subst(iop) \ | |
372 + StoreInitiateAcc.subst(iop) \ 373 + StoreCondCompleteAcc.subst(iop) 374}}; 375 376def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[], 377 store_flags=[], inst_flags=[]) {{ 378 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags) 379 header_output = AtomicMemOpDeclare.subst(macro_iop) 380 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop) 381 decode_block = BasicDecode.subst(macro_iop) 382 exec_output = '' 383 384 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"] 385 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', 386 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'}, 387 load_inst_flags) 388 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop) 389 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \ | 347 + StoreInitiateAcc.subst(iop) \ 348 + StoreCondCompleteAcc.subst(iop) 349}}; 350 351def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[], 352 store_flags=[], inst_flags=[]) {{ 353 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags) 354 header_output = AtomicMemOpDeclare.subst(macro_iop) 355 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop) 356 decode_block = BasicDecode.subst(macro_iop) 357 exec_output = '' 358 359 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"] 360 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', 361 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'}, 362 load_inst_flags) 363 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop) 364 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \ |
390 + AtomicMemOpEACompExecute.subst(load_iop) \ | |
391 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \ 392 + AtomicMemOpLoadCompleteAcc.subst(load_iop) 393 394 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"] 395 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', 396 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'}, 397 store_inst_flags) 398 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop) 399 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \ | 365 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \ 366 + AtomicMemOpLoadCompleteAcc.subst(load_iop) 367 368 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"] 369 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro', 370 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'}, 371 store_inst_flags) 372 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop) 373 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \ |
400 + AtomicMemOpEACompExecute.subst(store_iop) \ | |
401 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \ 402 + AtomicMemOpStoreCompleteAcc.subst(store_iop) 403}}; | 374 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \ 375 + AtomicMemOpStoreCompleteAcc.subst(store_iop) 376}}; |