1// Copyright (c) 2017-2018 ARM Limited 2// All rights reserved 3// 4// The license below extends only to copyright in the software and shall 5// not be construed as granting a license to any other intellectual 6// property including but not limited to intellectual property relating 7// to a hardware implementation of the functionality of the software 8// licensed hereunder. You may use the software subject to the license --- 75 unchanged lines hidden (view full) --- 84 return new Unknown64(machInst); 85 } 86 87 // Decodes SVE contiguous load instructions, scalar plus immediate form. 88 template <template <typename T1, typename T2> class Base> 89 StaticInstPtr 90 decodeSveContigLoadSIInsts(uint8_t dtype, ExtMachInst machInst, 91 IntRegIndex zt, IntRegIndex pg, IntRegIndex rn, |
92 uint64_t imm, bool nonFaulting, |
93 bool replicate = false) 94 { |
95 assert(!(nonFaulting && replicate)); 96 const char* mn = replicate ? "ld1r" : (nonFaulting ? "ldnf1" : "ld1"); |
97 switch (dtype) { 98 case 0x0: 99 return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 100 case 0x1: 101 return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 102 case 0x2: 103 return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 104 case 0x3: --- 98 unchanged lines hidden (view full) --- 203}}; 204 205output decoder {{ 206 207 StaticInstPtr 208 decodeSveGatherLoadVIInsts(uint8_t dtype, ExtMachInst machInst, 209 IntRegIndex zt, IntRegIndex pg, IntRegIndex zn, 210 uint64_t imm, bool esizeIs32, |
211 bool firstFault) |
212 { |
213 const char* mn = firstFault ? "ldff1" : "ld1"; |
214 switch (dtype) { 215 case 0x0: 216 if (esizeIs32) { 217 return new SveIndexedMemVI<int32_t, int8_t, |
218 SveGatherLoadVIMicroop, 219 SveFirstFaultWritebackMicroop>( 220 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
221 } else { 222 return new SveIndexedMemVI<int64_t, int8_t, |
223 SveGatherLoadVIMicroop, 224 SveFirstFaultWritebackMicroop>( 225 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
226 } 227 case 0x1: 228 if (esizeIs32) { 229 return new SveIndexedMemVI<uint32_t, uint8_t, |
230 SveGatherLoadVIMicroop, 231 SveFirstFaultWritebackMicroop>( 232 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
233 } else { 234 return new SveIndexedMemVI<uint64_t, uint8_t, |
235 SveGatherLoadVIMicroop, 236 SveFirstFaultWritebackMicroop>( 237 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
238 } 239 case 0x2: 240 if (esizeIs32) { 241 return new SveIndexedMemVI<int32_t, int16_t, |
242 SveGatherLoadVIMicroop, 243 SveFirstFaultWritebackMicroop>( 244 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
245 } else { 246 return new SveIndexedMemVI<int64_t, int16_t, |
247 SveGatherLoadVIMicroop, 248 SveFirstFaultWritebackMicroop>( 249 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
250 } 251 case 0x3: 252 if (esizeIs32) { 253 return new SveIndexedMemVI<uint32_t, uint16_t, |
254 SveGatherLoadVIMicroop, 255 SveFirstFaultWritebackMicroop>( 256 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
257 } else { 258 return new SveIndexedMemVI<uint64_t, uint16_t, |
259 SveGatherLoadVIMicroop, 260 SveFirstFaultWritebackMicroop>( 261 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
262 } 263 case 0x4: 264 if (esizeIs32) { 265 break; 266 } else { 267 return new SveIndexedMemVI<int64_t, int32_t, |
268 SveGatherLoadVIMicroop, 269 SveFirstFaultWritebackMicroop>( 270 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
271 } 272 case 0x5: 273 if (esizeIs32) { 274 return new SveIndexedMemVI<uint32_t, uint32_t, |
275 SveGatherLoadVIMicroop, 276 SveFirstFaultWritebackMicroop>( 277 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
278 } else { 279 return new SveIndexedMemVI<uint64_t, uint32_t, |
280 SveGatherLoadVIMicroop, 281 SveFirstFaultWritebackMicroop>( 282 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
283 } 284 case 0x7: 285 if (esizeIs32) { 286 break; 287 } else { 288 return new SveIndexedMemVI<uint64_t, uint64_t, |
289 SveGatherLoadVIMicroop, 290 SveFirstFaultWritebackMicroop>( 291 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
292 } 293 } 294 return new Unknown64(machInst); 295 } 296 297 StaticInstPtr 298 decodeSveGatherLoadSVInsts(uint8_t dtype, ExtMachInst machInst, 299 IntRegIndex zt, IntRegIndex pg, IntRegIndex rn, 300 IntRegIndex zm, bool esizeIs32, bool offsetIs32, 301 bool offsetIsSigned, bool offsetIsScaled, |
302 bool firstFault) |
303 { |
304 const char* mn = firstFault ? "ldff1" : "ld1"; |
305 switch (dtype) { 306 case 0x0: 307 if (esizeIs32) { 308 return new SveIndexedMemSV<int32_t, int8_t, |
309 SveGatherLoadSVMicroop, 310 SveFirstFaultWritebackMicroop>( |
311 mn, machInst, MemReadOp, zt, pg, rn, zm, |
312 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
313 } else { 314 return new SveIndexedMemSV<int64_t, int8_t, |
315 SveGatherLoadSVMicroop, 316 SveFirstFaultWritebackMicroop>( |
317 mn, machInst, MemReadOp, zt, pg, rn, zm, |
318 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
319 } 320 case 0x1: 321 if (esizeIs32) { 322 return new SveIndexedMemSV<uint32_t, uint8_t, |
323 SveGatherLoadSVMicroop, 324 SveFirstFaultWritebackMicroop>( |
325 mn, machInst, MemReadOp, zt, pg, rn, zm, |
326 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
327 } else { 328 return new SveIndexedMemSV<uint64_t, uint8_t, |
329 SveGatherLoadSVMicroop, 330 SveFirstFaultWritebackMicroop>( |
331 mn, machInst, MemReadOp, zt, pg, rn, zm, |
332 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
333 } 334 case 0x2: 335 if (esizeIs32) { 336 return new SveIndexedMemSV<int32_t, int16_t, |
337 SveGatherLoadSVMicroop, 338 SveFirstFaultWritebackMicroop>( |
339 mn, machInst, MemReadOp, zt, pg, rn, zm, |
340 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
341 } else { 342 return new SveIndexedMemSV<int64_t, int16_t, |
343 SveGatherLoadSVMicroop, 344 SveFirstFaultWritebackMicroop>( |
345 mn, machInst, MemReadOp, zt, pg, rn, zm, |
346 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
347 } 348 case 0x3: 349 if (esizeIs32) { 350 return new SveIndexedMemSV<uint32_t, uint16_t, |
351 SveGatherLoadSVMicroop, 352 SveFirstFaultWritebackMicroop>( |
353 mn, machInst, MemReadOp, zt, pg, rn, zm, |
354 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
355 } else { 356 return new SveIndexedMemSV<uint64_t, uint16_t, |
357 SveGatherLoadSVMicroop, 358 SveFirstFaultWritebackMicroop>( |
359 mn, machInst, MemReadOp, zt, pg, rn, zm, |
360 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
361 } 362 case 0x4: 363 if (esizeIs32) { 364 break; 365 } else { 366 return new SveIndexedMemSV<int64_t, int32_t, |
367 SveGatherLoadSVMicroop, 368 SveFirstFaultWritebackMicroop>( |
369 mn, machInst, MemReadOp, zt, pg, rn, zm, |
370 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
371 } 372 case 0x5: 373 if (esizeIs32) { 374 return new SveIndexedMemSV<uint32_t, uint32_t, |
375 SveGatherLoadSVMicroop, 376 SveFirstFaultWritebackMicroop>( |
377 mn, machInst, MemReadOp, zt, pg, rn, zm, |
378 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
379 } else { 380 return new SveIndexedMemSV<uint64_t, uint32_t, |
381 SveGatherLoadSVMicroop, 382 SveFirstFaultWritebackMicroop>( |
383 mn, machInst, MemReadOp, zt, pg, rn, zm, |
384 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
385 } 386 case 0x7: 387 if (esizeIs32) { 388 break; 389 } else { 390 return new SveIndexedMemSV<uint64_t, uint64_t, |
391 SveGatherLoadSVMicroop, 392 SveFirstFaultWritebackMicroop>( |
393 mn, machInst, MemReadOp, zt, pg, rn, zm, |
394 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
395 } 396 } 397 return new Unknown64(machInst); 398 } 399 400 StaticInstPtr 401 decodeSveScatterStoreVIInsts(uint8_t msz, ExtMachInst machInst, 402 IntRegIndex zt, IntRegIndex pg, 403 IntRegIndex zn, uint64_t imm, 404 bool esizeIs32) 405 { 406 const char* mn = "st1"; 407 switch (msz) { 408 case 0x0: 409 if (esizeIs32) { 410 return new SveIndexedMemVI<uint32_t, uint8_t, |
411 SveScatterStoreVIMicroop, 412 SveFirstFaultWritebackMicroop>( 413 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
414 } else { 415 return new SveIndexedMemVI<uint64_t, uint8_t, |
416 SveScatterStoreVIMicroop, 417 SveFirstFaultWritebackMicroop>( 418 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
419 } 420 case 0x1: 421 if (esizeIs32) { 422 return new SveIndexedMemVI<uint32_t, uint16_t, |
423 SveScatterStoreVIMicroop, 424 SveFirstFaultWritebackMicroop>( 425 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
426 } else { 427 return new SveIndexedMemVI<uint64_t, uint16_t, |
428 SveScatterStoreVIMicroop, 429 SveFirstFaultWritebackMicroop>( 430 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
431 } 432 case 0x2: 433 if (esizeIs32) { 434 return new SveIndexedMemVI<uint32_t, uint32_t, |
435 SveScatterStoreVIMicroop, 436 SveFirstFaultWritebackMicroop>( 437 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
438 } else { 439 return new SveIndexedMemVI<uint64_t, uint32_t, |
440 SveScatterStoreVIMicroop, 441 SveFirstFaultWritebackMicroop>( 442 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
443 } 444 case 0x3: 445 if (esizeIs32) { 446 break; 447 } else { 448 return new SveIndexedMemVI<uint64_t, uint64_t, |
449 SveScatterStoreVIMicroop, 450 SveFirstFaultWritebackMicroop>( 451 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
452 } 453 } 454 return new Unknown64(machInst); 455 } 456 457 StaticInstPtr 458 decodeSveScatterStoreSVInsts(uint8_t msz, ExtMachInst machInst, 459 IntRegIndex zt, IntRegIndex pg, 460 IntRegIndex rn, IntRegIndex zm, 461 bool esizeIs32, bool offsetIs32, 462 bool offsetIsSigned, bool offsetIsScaled) 463 { 464 const char* mn = "st1"; 465 switch (msz) { 466 case 0x0: 467 if (esizeIs32) { 468 return new SveIndexedMemSV<uint32_t, uint8_t, |
469 SveScatterStoreSVMicroop, 470 SveFirstFaultWritebackMicroop>( |
471 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
472 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
473 } else { 474 return new SveIndexedMemSV<uint64_t, uint8_t, |
475 SveScatterStoreSVMicroop, 476 SveFirstFaultWritebackMicroop>( |
477 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
478 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
479 } 480 case 0x1: 481 if (esizeIs32) { 482 return new SveIndexedMemSV<uint32_t, uint16_t, |
483 SveScatterStoreSVMicroop, 484 SveFirstFaultWritebackMicroop>( |
485 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
486 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
487 } else { 488 return new SveIndexedMemSV<uint64_t, uint16_t, |
489 SveScatterStoreSVMicroop, 490 SveFirstFaultWritebackMicroop>( |
491 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
492 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
493 } 494 case 0x2: 495 if (esizeIs32) { 496 return new SveIndexedMemSV<uint32_t, uint32_t, |
497 SveScatterStoreSVMicroop, 498 SveFirstFaultWritebackMicroop>( |
499 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
500 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
501 } else { 502 return new SveIndexedMemSV<uint64_t, uint32_t, |
503 SveScatterStoreSVMicroop, 504 SveFirstFaultWritebackMicroop>( |
505 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
506 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
507 } 508 case 0x3: 509 if (esizeIs32) { 510 break; 511 } else { 512 return new SveIndexedMemSV<uint64_t, uint64_t, |
513 SveScatterStoreSVMicroop, 514 SveFirstFaultWritebackMicroop>( |
515 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
516 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
517 } 518 } 519 return new Unknown64(machInst); 520 } 521 522}}; 523 524 --- 11 unchanged lines hidden (view full) --- 536 ''' 537 538 def emitSveMemFillSpill(isPred): 539 global header_output, exec_output, decoders 540 eaCode = SPAlignmentCheckCode + ''' 541 int memAccessSize = %(memacc_size)s; 542 EA = XBase + ((int64_t) imm * %(memacc_size)s)''' % { 543 'memacc_size': 'eCount / 8' if isPred else 'eCount'} |
544 loadRdEnableCode = ''' 545 auto rdEn = std::vector<bool>(); 546 ''' |
547 if isPred: 548 loadMemAccCode = ''' 549 int index = 0; 550 uint8_t byte; 551 for (int i = 0; i < eCount / 8; i++) { 552 byte = memDataView[i]; 553 for (int j = 0; j < 8; j++, index++) { 554 PDest_x[index] = (byte >> j) & 1; --- 30 unchanged lines hidden (view full) --- 585 ''' 586 loadIop = InstObjParams('ldr', 587 'SveLdrPred' if isPred else 'SveLdrVec', 588 'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill', 589 {'tpl_header': '', 590 'tpl_args': '', 591 'memacc_code': loadMemAccCode, 592 'ea_code' : sveEnabledCheckCode + eaCode, |
593 'rden_code' : loadRdEnableCode, 594 'fault_code' : '', |
595 'fa_code' : ''}, 596 ['IsMemRef', 'IsLoad']) 597 storeIop = InstObjParams('str', 598 'SveStrPred' if isPred else 'SveStrVec', 599 'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill', 600 {'tpl_header': '', 601 'tpl_args': '', 602 'wren_code': storeWrEnableCode, --- 66 unchanged lines hidden (view full) --- 669 ('uint32_t', 'uint32_t'), 670 ('uint64_t', 'uint32_t'), 671 ('uint64_t', 'uint64_t'), 672 ) 673 674 # Generates definitions for SVE contiguous loads 675 def emitSveContigMemInsts(offsetIsImm): 676 global header_output, exec_output, decoders |
677 # First-faulting instructions only have a scalar plus scalar form, 678 # while non-faulting instructions only a scalar plus immediate form, so 679 # `offsetIsImm` is used to determine which class of instructions is 680 # generated 681 firstFaulting = not offsetIsImm |
682 tplHeader = 'template <class RegElemType, class MemElemType>' 683 tplArgs = '<RegElemType, MemElemType>' 684 eaCode = SPAlignmentCheckCode + ''' 685 int memAccessSize = eCount * sizeof(MemElemType); 686 EA = XBase + ''' 687 if offsetIsImm: 688 eaCode += '((int64_t) this->imm * eCount * sizeof(MemElemType))' 689 else: 690 eaCode += '(XOffset * sizeof(MemElemType));' |
691 loadRdEnableCode = ''' 692 auto rdEn = std::vector<bool>(sizeof(MemElemType) * eCount, true); 693 for (int i = 0; i < eCount; i++) { 694 if (!GpOp_x[i]) { 695 for (int j = 0; j < sizeof(MemElemType); j++) { 696 rdEn[sizeof(MemElemType) * i + j] = false; 697 } 698 } 699 } 700 ''' |
701 loadMemAccCode = ''' 702 for (int i = 0; i < eCount; i++) { 703 if (GpOp_x[i]) { 704 AA64FpDest_x[i] = memDataView[i]; 705 } else { 706 AA64FpDest_x[i] = 0; 707 } 708 } --- 8 unchanged lines hidden (view full) --- 717 wrEn[sizeof(MemElemType) * i + j] = false; 718 } 719 } 720 } 721 ''' 722 storeWrEnableCode = ''' 723 auto wrEn = std::vector<bool>(sizeof(MemElemType) * eCount, true); 724 ''' |
725 ffrReadBackCode = ''' 726 auto& firstFaultReg = Ffr;''' 727 fautlingLoadmemAccCode = ''' 728 for (int i = 0; i < eCount; i++) { 729 if (GpOp_x[i] && firstFaultReg[i * sizeof(RegElemType)]) { 730 AA64FpDest_x[i] = memDataView[i]; 731 } else { 732 AA64FpDest_x[i] = 0; 733 } 734 } 735 ''' 736 nonFaultingCode = 'true ||' 737 faultCode = ''' 738 Addr fault_addr; 739 if (fault == NoFault || getFaultVAddr(fault, fault_addr)) { 740 unsigned fault_elem_index; 741 if (fault != NoFault) { 742 assert(fault_addr >= EA); 743 fault_elem_index = (fault_addr - EA) / sizeof(MemElemType); 744 } else { 745 fault_elem_index = eCount + 1; 746 } 747 int first_active_index; 748 for (first_active_index = 0; 749 first_active_index < eCount && !(GpOp_x[first_active_index]); 750 first_active_index++); 751 if (%s first_active_index < fault_elem_index) { 752 for (int i = 0; i < eCount; i++) { 753 for (int j = 0; j < sizeof(RegElemType); j++) { 754 if (i < fault_elem_index) { 755 Ffr_ub[i * sizeof(RegElemType) + j] = FfrAux_x[i]; 756 } else { 757 Ffr_ub[i * sizeof(RegElemType) + j] = 0; 758 } 759 } 760 } 761 fault = NoFault; 762 if (first_active_index >= fault_elem_index) { 763 // non-faulting load needs this 764 xc->setMemAccPredicate(false); 765 } 766 } 767 } 768 ''' % ('' if firstFaulting else nonFaultingCode) 769 |
770 loadIop = InstObjParams('ld1', 771 'SveContigLoadSI' if offsetIsImm else 'SveContigLoadSS', 772 'SveContigMemSI' if offsetIsImm else 'SveContigMemSS', 773 {'tpl_header': tplHeader, 774 'tpl_args': tplArgs, |
775 'rden_code' : loadRdEnableCode, |
776 'memacc_code': loadMemAccCode, 777 'ea_code' : sveEnabledCheckCode + eaCode, |
778 'fault_code' : '', |
779 'fa_code' : ''}, 780 ['IsMemRef', 'IsLoad']) 781 storeIop = InstObjParams('st1', 782 'SveContigStoreSI' if offsetIsImm else 'SveContigStoreSS', 783 'SveContigMemSI' if offsetIsImm else 'SveContigMemSS', 784 {'tpl_header': tplHeader, 785 'tpl_args': tplArgs, 786 'wren_code': storeWrEnableCode, 787 'memacc_code': storeMemAccCode, 788 'ea_code' : sveEnabledCheckCode + eaCode, 789 'fa_code' : ''}, 790 ['IsMemRef', 'IsStore']) |
791 faultIop = InstObjParams('ldff1' if firstFaulting else 'ldnf1', 792 'SveContigFFLoadSS' if firstFaulting else 'SveContigNFLoadSI', 793 'SveContigMemSS' if firstFaulting else 'SveContigMemSI', 794 {'tpl_header': tplHeader, 795 'tpl_args': tplArgs, 796 'rden_code' : loadRdEnableCode, 797 'memacc_code': fautlingLoadmemAccCode, 798 'ea_code' : sveEnabledCheckCode + eaCode, 799 'fault_code' : faultCode, 800 'fa_code' : ''}, 801 ['IsMemRef', 'IsLoad']) 802 faultIop.snippets['memacc_code'] = (ffrReadBackCode + 803 faultIop.snippets['memacc_code']) |
804 if offsetIsImm: 805 header_output += SveContigMemSIOpDeclare.subst(loadIop) 806 header_output += SveContigMemSIOpDeclare.subst(storeIop) |
807 header_output += SveContigMemSIOpDeclare.subst(faultIop) |
808 else: 809 header_output += SveContigMemSSOpDeclare.subst(loadIop) 810 header_output += SveContigMemSSOpDeclare.subst(storeIop) |
811 header_output += SveContigMemSSOpDeclare.subst(faultIop) |
812 exec_output += ( 813 SveContigLoadExecute.subst(loadIop) + 814 SveContigLoadInitiateAcc.subst(loadIop) + 815 SveContigLoadCompleteAcc.subst(loadIop) + 816 SveContigStoreExecute.subst(storeIop) + 817 SveContigStoreInitiateAcc.subst(storeIop) + |
818 SveContigStoreCompleteAcc.subst(storeIop) + 819 SveContigLoadExecute.subst(faultIop) + 820 SveContigLoadInitiateAcc.subst(faultIop) + 821 SveContigLoadCompleteAcc.subst(faultIop)) 822 |
823 for args in loadTplArgs: 824 substDict = {'tpl_args': '<%s>' % ', '.join(args), 825 'class_name': 'SveContigLoadSI' if offsetIsImm 826 else 'SveContigLoadSS'} 827 exec_output += SveContigMemExecDeclare.subst(substDict) 828 for args in storeTplArgs: 829 substDict = {'tpl_args': '<%s>' % ', '.join(args), 830 'class_name': 'SveContigStoreSI' if offsetIsImm 831 else 'SveContigStoreSS'} 832 exec_output += SveContigMemExecDeclare.subst(substDict) |
833 for args in loadTplArgs: 834 substDict = {'tpl_args': '<%s>' % ', '.join(args), 835 'class_name': 'SveContigFFLoadSS' if firstFaulting 836 else 'SveContigNFLoadSI'} 837 exec_output += SveContigMemExecDeclare.subst(substDict) |
838 |
839 |
840 # Generates definitions for SVE load-and-replicate instructions 841 def emitSveLoadAndRepl(): 842 global header_output, exec_output, decoders 843 tplHeader = 'template <class RegElemType, class MemElemType>' 844 tplArgs = '<RegElemType, MemElemType>' 845 eaCode = SPAlignmentCheckCode + ''' 846 EA = XBase + imm * sizeof(MemElemType);''' 847 memAccCode = ''' --- 48 unchanged lines hidden (view full) --- 896 if (offsetIsSigned) { 897 offset = sext<32>(offset); 898 } 899 if (offsetIsScaled) { 900 offset *= sizeof(MemElemType); 901 } 902 EA = XBase + offset''' 903 loadMemAccCode = ''' |
904 AA64FpDest_x[elemIndex] = memData; |
905 ''' 906 storeMemAccCode = ''' 907 memData = AA64FpDest_x[elemIndex]; 908 ''' |
909 predCheckCode = 'GpOp_x[index]' 910 faultStatusSetCode = 'PUreg0_x[elemIndex] = 1;' 911 faultStatusResetCode = 'PUreg0_x[elemIndex] = 0;' |
912 loadIop = InstObjParams('ld1', 913 ('SveGatherLoadVIMicroop' 914 if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM 915 else 'SveGatherLoadSVMicroop'), 916 'MicroOp', 917 {'tpl_header': tplHeader, 918 'tpl_args': tplArgs, 919 'memacc_code': loadMemAccCode, 920 'ea_code' : sveEnabledCheckCode + eaCode, |
921 'fault_status_set_code' : faultStatusSetCode, 922 'fault_status_reset_code' : faultStatusResetCode, |
923 'pred_check_code' : predCheckCode, 924 'fa_code' : ''}, 925 ['IsMicroop', 'IsMemRef', 'IsLoad']) 926 storeIop = InstObjParams('st1', 927 ('SveScatterStoreVIMicroop' 928 if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM 929 else 'SveScatterStoreSVMicroop'), 930 'MicroOp', --- 31 unchanged lines hidden (view full) --- 962 'class_name': ( 963 'SveScatterStoreVIMicroop' 964 if indexed_addr_form == \ 965 IndexedAddrForm.VEC_PLUS_IMM 966 else 'SveScatterStoreSVMicroop')} 967 # TODO: this should become SveMemExecDeclare 968 exec_output += SveContigMemExecDeclare.subst(substDict) 969 |
970 firstFaultTplArgs = ('int32_t', 'int64_t', 'uint32_t', 'uint64_t') 971 972 def emitSveFirstFaultWritebackMicroop(): 973 global header_output, exec_output, decoders 974 tplHeader = 'template <class RegElemType>' 975 tplArgs = '<RegElemType>' 976 faultStatusCheckCode = 'PUreg0_x[index]' 977 firstFaultResetCode = ''' 978 for(int j = 0; j < sizeof(RegElemType); j++) { 979 Ffr_ub[index * sizeof(RegElemType) + j] = 0; 980 } 981 ''' 982 firstFaultForwardCode = ''' 983 for(int j = 0; j < sizeof(RegElemType); j++) { 984 Ffr_ub[index * sizeof(RegElemType) + j] = FfrAux_x[index]; 985 } 986 ''' 987 iop = InstObjParams('ldff1', 988 'SveFirstFaultWritebackMicroop', 989 'MicroOp', 990 {'tpl_header': tplHeader, 991 'tpl_args': tplArgs, 992 'fault_status_check_code' : faultStatusCheckCode, 993 'first_fault_reset_code' : firstFaultResetCode, 994 'first_fault_forward_code' : firstFaultForwardCode}, 995 ['IsMicroop']) 996 header_output += SveFirstFaultWritebackMicroopDeclare.subst(iop) 997 exec_output += SveFirstFaultWritebackMicroopExecute.subst(iop) 998 for args in firstFaultTplArgs: 999 substDict = {'targs': args, 1000 'class_name' : 'SveFirstFaultWritebackMicroop' } 1001 exec_output += SveOpExecDeclare.subst(substDict) 1002 |
1003 # Generates definitions for the first microop of SVE gather loads, required 1004 # to propagate the source vector register to the transfer microops 1005 def emitSveGatherLoadCpySrcVecMicroop(): 1006 global header_output, exec_output, decoders 1007 code = sveEnabledCheckCode + ''' 1008 unsigned eCount = ArmStaticInst::getCurSveVecLen<uint8_t>( 1009 xc->tcBase()); 1010 for (unsigned i = 0; i < eCount; i++) { --- 4 unchanged lines hidden (view full) --- 1015 'MicroOp', 1016 {'code': code}, 1017 ['IsMicroop']) 1018 header_output += SveGatherLoadCpySrcVecMicroopDeclare.subst(iop) 1019 exec_output += SveGatherLoadCpySrcVecMicroopExecute.subst(iop) 1020 1021 # LD1[S]{B,H,W,D} (scalar plus immediate) 1022 # ST1[S]{B,H,W,D} (scalar plus immediate) |
1023 # LDNF1[S]{B,H,W,D} (scalar plus immediate) |
1024 emitSveContigMemInsts(True) 1025 # LD1[S]{B,H,W,D} (scalar plus scalar) 1026 # ST1[S]{B,H,W,D} (scalar plus scalar) |
1027 # LDFF1[S]{B,H,W,D} (scalar plus vector) |
1028 emitSveContigMemInsts(False) 1029 1030 # LD1R[S]{B,H,W,D} 1031 emitSveLoadAndRepl() 1032 1033 # LDR (predicate), STR (predicate) 1034 emitSveMemFillSpill(True) 1035 # LDR (vector), STR (vector) 1036 emitSveMemFillSpill(False) 1037 1038 # LD1[S]{B,H,W,D} (vector plus immediate) 1039 # ST1[S]{B,H,W,D} (vector plus immediate) |
1040 # LDFF1[S]{B,H,W,D} (scalar plus immediate) |
1041 emitSveIndexedMemMicroops(IndexedAddrForm.VEC_PLUS_IMM) 1042 # LD1[S]{B,H,W,D} (scalar plus vector) 1043 # ST1[S]{B,H,W,D} (scalar plus vector) |
1044 # LDFF1[S]{B,H,W,D} (scalar plus vector) |
1045 emitSveIndexedMemMicroops(IndexedAddrForm.SCA_PLUS_VEC) 1046 |
1047 # FFR writeback microop for gather loads 1048 emitSveFirstFaultWritebackMicroop() 1049 |
1050 # Source vector copy microop for gather loads 1051 emitSveGatherLoadCpySrcVecMicroop() |
1052}}; |