sve_mem.isa (14028:44edf7dbe672) | sve_mem.isa (14091:090449e74135) |
---|---|
1// Copyright (c) 2017-2018 ARM Limited 2// All rights reserved 3// 4// The license below extends only to copyright in the software and shall 5// not be construed as granting a license to any other intellectual 6// property including but not limited to intellectual property relating 7// to a hardware implementation of the functionality of the software 8// licensed hereunder. You may use the software subject to the license --- 75 unchanged lines hidden (view full) --- 84 return new Unknown64(machInst); 85 } 86 87 // Decodes SVE contiguous load instructions, scalar plus immediate form. 88 template <template <typename T1, typename T2> class Base> 89 StaticInstPtr 90 decodeSveContigLoadSIInsts(uint8_t dtype, ExtMachInst machInst, 91 IntRegIndex zt, IntRegIndex pg, IntRegIndex rn, | 1// Copyright (c) 2017-2018 ARM Limited 2// All rights reserved 3// 4// The license below extends only to copyright in the software and shall 5// not be construed as granting a license to any other intellectual 6// property including but not limited to intellectual property relating 7// to a hardware implementation of the functionality of the software 8// licensed hereunder. You may use the software subject to the license --- 75 unchanged lines hidden (view full) --- 84 return new Unknown64(machInst); 85 } 86 87 // Decodes SVE contiguous load instructions, scalar plus immediate form. 88 template <template <typename T1, typename T2> class Base> 89 StaticInstPtr 90 decodeSveContigLoadSIInsts(uint8_t dtype, ExtMachInst machInst, 91 IntRegIndex zt, IntRegIndex pg, IntRegIndex rn, |
92 uint64_t imm, bool firstFaulting, | 92 uint64_t imm, bool nonFaulting, |
93 bool replicate = false) 94 { | 93 bool replicate = false) 94 { |
95 assert(!(replicate && firstFaulting)); 96 97 const char* mn = replicate ? "ld1r" : 98 (firstFaulting ? "ldff1" : "ld1"); | 95 assert(!(nonFaulting && replicate)); 96 const char* mn = replicate ? "ld1r" : (nonFaulting ? "ldnf1" : "ld1"); |
99 switch (dtype) { 100 case 0x0: 101 return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 102 case 0x1: 103 return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 104 case 0x2: 105 return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 106 case 0x3: --- 98 unchanged lines hidden (view full) --- 205}}; 206 207output decoder {{ 208 209 StaticInstPtr 210 decodeSveGatherLoadVIInsts(uint8_t dtype, ExtMachInst machInst, 211 IntRegIndex zt, IntRegIndex pg, IntRegIndex zn, 212 uint64_t imm, bool esizeIs32, | 97 switch (dtype) { 98 case 0x0: 99 return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 100 case 0x1: 101 return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 102 case 0x2: 103 return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, imm); 104 case 0x3: --- 98 unchanged lines hidden (view full) --- 203}}; 204 205output decoder {{ 206 207 StaticInstPtr 208 decodeSveGatherLoadVIInsts(uint8_t dtype, ExtMachInst machInst, 209 IntRegIndex zt, IntRegIndex pg, IntRegIndex zn, 210 uint64_t imm, bool esizeIs32, |
213 bool firstFaulting) | 211 bool firstFault) |
214 { | 212 { |
215 const char* mn = firstFaulting ? "ldff1" : "ld1"; | 213 const char* mn = firstFault ? "ldff1" : "ld1"; |
216 switch (dtype) { 217 case 0x0: 218 if (esizeIs32) { 219 return new SveIndexedMemVI<int32_t, int8_t, | 214 switch (dtype) { 215 case 0x0: 216 if (esizeIs32) { 217 return new SveIndexedMemVI<int32_t, int8_t, |
220 SveGatherLoadVIMicroop>( 221 mn, machInst, MemReadOp, zt, pg, zn, imm); | 218 SveGatherLoadVIMicroop, 219 SveFirstFaultWritebackMicroop>( 220 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
222 } else { 223 return new SveIndexedMemVI<int64_t, int8_t, | 221 } else { 222 return new SveIndexedMemVI<int64_t, int8_t, |
224 SveGatherLoadVIMicroop>( 225 mn, machInst, MemReadOp, zt, pg, zn, imm); | 223 SveGatherLoadVIMicroop, 224 SveFirstFaultWritebackMicroop>( 225 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
226 } 227 case 0x1: 228 if (esizeIs32) { 229 return new SveIndexedMemVI<uint32_t, uint8_t, | 226 } 227 case 0x1: 228 if (esizeIs32) { 229 return new SveIndexedMemVI<uint32_t, uint8_t, |
230 SveGatherLoadVIMicroop>( 231 mn, machInst, MemReadOp, zt, pg, zn, imm); | 230 SveGatherLoadVIMicroop, 231 SveFirstFaultWritebackMicroop>( 232 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
232 } else { 233 return new SveIndexedMemVI<uint64_t, uint8_t, | 233 } else { 234 return new SveIndexedMemVI<uint64_t, uint8_t, |
234 SveGatherLoadVIMicroop>( 235 mn, machInst, MemReadOp, zt, pg, zn, imm); | 235 SveGatherLoadVIMicroop, 236 SveFirstFaultWritebackMicroop>( 237 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
236 } 237 case 0x2: 238 if (esizeIs32) { 239 return new SveIndexedMemVI<int32_t, int16_t, | 238 } 239 case 0x2: 240 if (esizeIs32) { 241 return new SveIndexedMemVI<int32_t, int16_t, |
240 SveGatherLoadVIMicroop>( 241 mn, machInst, MemReadOp, zt, pg, zn, imm); | 242 SveGatherLoadVIMicroop, 243 SveFirstFaultWritebackMicroop>( 244 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
242 } else { 243 return new SveIndexedMemVI<int64_t, int16_t, | 245 } else { 246 return new SveIndexedMemVI<int64_t, int16_t, |
244 SveGatherLoadVIMicroop>( 245 mn, machInst, MemReadOp, zt, pg, zn, imm); | 247 SveGatherLoadVIMicroop, 248 SveFirstFaultWritebackMicroop>( 249 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
246 } 247 case 0x3: 248 if (esizeIs32) { 249 return new SveIndexedMemVI<uint32_t, uint16_t, | 250 } 251 case 0x3: 252 if (esizeIs32) { 253 return new SveIndexedMemVI<uint32_t, uint16_t, |
250 SveGatherLoadVIMicroop>( 251 mn, machInst, MemReadOp, zt, pg, zn, imm); | 254 SveGatherLoadVIMicroop, 255 SveFirstFaultWritebackMicroop>( 256 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
252 } else { 253 return new SveIndexedMemVI<uint64_t, uint16_t, | 257 } else { 258 return new SveIndexedMemVI<uint64_t, uint16_t, |
254 SveGatherLoadVIMicroop>( 255 mn, machInst, MemReadOp, zt, pg, zn, imm); | 259 SveGatherLoadVIMicroop, 260 SveFirstFaultWritebackMicroop>( 261 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
256 } 257 case 0x4: 258 if (esizeIs32) { 259 break; 260 } else { 261 return new SveIndexedMemVI<int64_t, int32_t, | 262 } 263 case 0x4: 264 if (esizeIs32) { 265 break; 266 } else { 267 return new SveIndexedMemVI<int64_t, int32_t, |
262 SveGatherLoadVIMicroop>( 263 mn, machInst, MemReadOp, zt, pg, zn, imm); | 268 SveGatherLoadVIMicroop, 269 SveFirstFaultWritebackMicroop>( 270 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
264 } 265 case 0x5: 266 if (esizeIs32) { 267 return new SveIndexedMemVI<uint32_t, uint32_t, | 271 } 272 case 0x5: 273 if (esizeIs32) { 274 return new SveIndexedMemVI<uint32_t, uint32_t, |
268 SveGatherLoadVIMicroop>( 269 mn, machInst, MemReadOp, zt, pg, zn, imm); | 275 SveGatherLoadVIMicroop, 276 SveFirstFaultWritebackMicroop>( 277 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
270 } else { 271 return new SveIndexedMemVI<uint64_t, uint32_t, | 278 } else { 279 return new SveIndexedMemVI<uint64_t, uint32_t, |
272 SveGatherLoadVIMicroop>( 273 mn, machInst, MemReadOp, zt, pg, zn, imm); | 280 SveGatherLoadVIMicroop, 281 SveFirstFaultWritebackMicroop>( 282 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
274 } 275 case 0x7: 276 if (esizeIs32) { 277 break; 278 } else { 279 return new SveIndexedMemVI<uint64_t, uint64_t, | 283 } 284 case 0x7: 285 if (esizeIs32) { 286 break; 287 } else { 288 return new SveIndexedMemVI<uint64_t, uint64_t, |
280 SveGatherLoadVIMicroop>( 281 mn, machInst, MemReadOp, zt, pg, zn, imm); | 289 SveGatherLoadVIMicroop, 290 SveFirstFaultWritebackMicroop>( 291 mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault); |
282 } 283 } 284 return new Unknown64(machInst); 285 } 286 287 StaticInstPtr 288 decodeSveGatherLoadSVInsts(uint8_t dtype, ExtMachInst machInst, 289 IntRegIndex zt, IntRegIndex pg, IntRegIndex rn, 290 IntRegIndex zm, bool esizeIs32, bool offsetIs32, 291 bool offsetIsSigned, bool offsetIsScaled, | 292 } 293 } 294 return new Unknown64(machInst); 295 } 296 297 StaticInstPtr 298 decodeSveGatherLoadSVInsts(uint8_t dtype, ExtMachInst machInst, 299 IntRegIndex zt, IntRegIndex pg, IntRegIndex rn, 300 IntRegIndex zm, bool esizeIs32, bool offsetIs32, 301 bool offsetIsSigned, bool offsetIsScaled, |
292 bool firstFaulting) | 302 bool firstFault) |
293 { | 303 { |
294 const char* mn = firstFaulting ? "ldff1" : "ld1"; | 304 const char* mn = firstFault ? "ldff1" : "ld1"; |
295 switch (dtype) { 296 case 0x0: 297 if (esizeIs32) { 298 return new SveIndexedMemSV<int32_t, int8_t, | 305 switch (dtype) { 306 case 0x0: 307 if (esizeIs32) { 308 return new SveIndexedMemSV<int32_t, int8_t, |
299 SveGatherLoadSVMicroop>( | 309 SveGatherLoadSVMicroop, 310 SveFirstFaultWritebackMicroop>( |
300 mn, machInst, MemReadOp, zt, pg, rn, zm, | 311 mn, machInst, MemReadOp, zt, pg, rn, zm, |
301 offsetIs32, offsetIsSigned, offsetIsScaled); | 312 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
302 } else { 303 return new SveIndexedMemSV<int64_t, int8_t, | 313 } else { 314 return new SveIndexedMemSV<int64_t, int8_t, |
304 SveGatherLoadSVMicroop>( | 315 SveGatherLoadSVMicroop, 316 SveFirstFaultWritebackMicroop>( |
305 mn, machInst, MemReadOp, zt, pg, rn, zm, | 317 mn, machInst, MemReadOp, zt, pg, rn, zm, |
306 offsetIs32, offsetIsSigned, offsetIsScaled); | 318 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
307 } 308 case 0x1: 309 if (esizeIs32) { 310 return new SveIndexedMemSV<uint32_t, uint8_t, | 319 } 320 case 0x1: 321 if (esizeIs32) { 322 return new SveIndexedMemSV<uint32_t, uint8_t, |
311 SveGatherLoadSVMicroop>( | 323 SveGatherLoadSVMicroop, 324 SveFirstFaultWritebackMicroop>( |
312 mn, machInst, MemReadOp, zt, pg, rn, zm, | 325 mn, machInst, MemReadOp, zt, pg, rn, zm, |
313 offsetIs32, offsetIsSigned, offsetIsScaled); | 326 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
314 } else { 315 return new SveIndexedMemSV<uint64_t, uint8_t, | 327 } else { 328 return new SveIndexedMemSV<uint64_t, uint8_t, |
316 SveGatherLoadSVMicroop>( | 329 SveGatherLoadSVMicroop, 330 SveFirstFaultWritebackMicroop>( |
317 mn, machInst, MemReadOp, zt, pg, rn, zm, | 331 mn, machInst, MemReadOp, zt, pg, rn, zm, |
318 offsetIs32, offsetIsSigned, offsetIsScaled); | 332 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
319 } 320 case 0x2: 321 if (esizeIs32) { 322 return new SveIndexedMemSV<int32_t, int16_t, | 333 } 334 case 0x2: 335 if (esizeIs32) { 336 return new SveIndexedMemSV<int32_t, int16_t, |
323 SveGatherLoadSVMicroop>( | 337 SveGatherLoadSVMicroop, 338 SveFirstFaultWritebackMicroop>( |
324 mn, machInst, MemReadOp, zt, pg, rn, zm, | 339 mn, machInst, MemReadOp, zt, pg, rn, zm, |
325 offsetIs32, offsetIsSigned, offsetIsScaled); | 340 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
326 } else { 327 return new SveIndexedMemSV<int64_t, int16_t, | 341 } else { 342 return new SveIndexedMemSV<int64_t, int16_t, |
328 SveGatherLoadSVMicroop>( | 343 SveGatherLoadSVMicroop, 344 SveFirstFaultWritebackMicroop>( |
329 mn, machInst, MemReadOp, zt, pg, rn, zm, | 345 mn, machInst, MemReadOp, zt, pg, rn, zm, |
330 offsetIs32, offsetIsSigned, offsetIsScaled); | 346 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
331 } 332 case 0x3: 333 if (esizeIs32) { 334 return new SveIndexedMemSV<uint32_t, uint16_t, | 347 } 348 case 0x3: 349 if (esizeIs32) { 350 return new SveIndexedMemSV<uint32_t, uint16_t, |
335 SveGatherLoadSVMicroop>( | 351 SveGatherLoadSVMicroop, 352 SveFirstFaultWritebackMicroop>( |
336 mn, machInst, MemReadOp, zt, pg, rn, zm, | 353 mn, machInst, MemReadOp, zt, pg, rn, zm, |
337 offsetIs32, offsetIsSigned, offsetIsScaled); | 354 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
338 } else { 339 return new SveIndexedMemSV<uint64_t, uint16_t, | 355 } else { 356 return new SveIndexedMemSV<uint64_t, uint16_t, |
340 SveGatherLoadSVMicroop>( | 357 SveGatherLoadSVMicroop, 358 SveFirstFaultWritebackMicroop>( |
341 mn, machInst, MemReadOp, zt, pg, rn, zm, | 359 mn, machInst, MemReadOp, zt, pg, rn, zm, |
342 offsetIs32, offsetIsSigned, offsetIsScaled); | 360 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
343 } 344 case 0x4: 345 if (esizeIs32) { 346 break; 347 } else { 348 return new SveIndexedMemSV<int64_t, int32_t, | 361 } 362 case 0x4: 363 if (esizeIs32) { 364 break; 365 } else { 366 return new SveIndexedMemSV<int64_t, int32_t, |
349 SveGatherLoadSVMicroop>( | 367 SveGatherLoadSVMicroop, 368 SveFirstFaultWritebackMicroop>( |
350 mn, machInst, MemReadOp, zt, pg, rn, zm, | 369 mn, machInst, MemReadOp, zt, pg, rn, zm, |
351 offsetIs32, offsetIsSigned, offsetIsScaled); | 370 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
352 } 353 case 0x5: 354 if (esizeIs32) { 355 return new SveIndexedMemSV<uint32_t, uint32_t, | 371 } 372 case 0x5: 373 if (esizeIs32) { 374 return new SveIndexedMemSV<uint32_t, uint32_t, |
356 SveGatherLoadSVMicroop>( | 375 SveGatherLoadSVMicroop, 376 SveFirstFaultWritebackMicroop>( |
357 mn, machInst, MemReadOp, zt, pg, rn, zm, | 377 mn, machInst, MemReadOp, zt, pg, rn, zm, |
358 offsetIs32, offsetIsSigned, offsetIsScaled); | 378 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
359 } else { 360 return new SveIndexedMemSV<uint64_t, uint32_t, | 379 } else { 380 return new SveIndexedMemSV<uint64_t, uint32_t, |
361 SveGatherLoadSVMicroop>( | 381 SveGatherLoadSVMicroop, 382 SveFirstFaultWritebackMicroop>( |
362 mn, machInst, MemReadOp, zt, pg, rn, zm, | 383 mn, machInst, MemReadOp, zt, pg, rn, zm, |
363 offsetIs32, offsetIsSigned, offsetIsScaled); | 384 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
364 } 365 case 0x7: 366 if (esizeIs32) { 367 break; 368 } else { 369 return new SveIndexedMemSV<uint64_t, uint64_t, | 385 } 386 case 0x7: 387 if (esizeIs32) { 388 break; 389 } else { 390 return new SveIndexedMemSV<uint64_t, uint64_t, |
370 SveGatherLoadSVMicroop>( | 391 SveGatherLoadSVMicroop, 392 SveFirstFaultWritebackMicroop>( |
371 mn, machInst, MemReadOp, zt, pg, rn, zm, | 393 mn, machInst, MemReadOp, zt, pg, rn, zm, |
372 offsetIs32, offsetIsSigned, offsetIsScaled); | 394 offsetIs32, offsetIsSigned, offsetIsScaled, firstFault); |
373 } 374 } 375 return new Unknown64(machInst); 376 } 377 378 StaticInstPtr 379 decodeSveScatterStoreVIInsts(uint8_t msz, ExtMachInst machInst, 380 IntRegIndex zt, IntRegIndex pg, 381 IntRegIndex zn, uint64_t imm, 382 bool esizeIs32) 383 { 384 const char* mn = "st1"; 385 switch (msz) { 386 case 0x0: 387 if (esizeIs32) { 388 return new SveIndexedMemVI<uint32_t, uint8_t, | 395 } 396 } 397 return new Unknown64(machInst); 398 } 399 400 StaticInstPtr 401 decodeSveScatterStoreVIInsts(uint8_t msz, ExtMachInst machInst, 402 IntRegIndex zt, IntRegIndex pg, 403 IntRegIndex zn, uint64_t imm, 404 bool esizeIs32) 405 { 406 const char* mn = "st1"; 407 switch (msz) { 408 case 0x0: 409 if (esizeIs32) { 410 return new SveIndexedMemVI<uint32_t, uint8_t, |
389 SveScatterStoreVIMicroop>( 390 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 411 SveScatterStoreVIMicroop, 412 SveFirstFaultWritebackMicroop>( 413 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
391 } else { 392 return new SveIndexedMemVI<uint64_t, uint8_t, | 414 } else { 415 return new SveIndexedMemVI<uint64_t, uint8_t, |
393 SveScatterStoreVIMicroop>( 394 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 416 SveScatterStoreVIMicroop, 417 SveFirstFaultWritebackMicroop>( 418 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
395 } 396 case 0x1: 397 if (esizeIs32) { 398 return new SveIndexedMemVI<uint32_t, uint16_t, | 419 } 420 case 0x1: 421 if (esizeIs32) { 422 return new SveIndexedMemVI<uint32_t, uint16_t, |
399 SveScatterStoreVIMicroop>( 400 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 423 SveScatterStoreVIMicroop, 424 SveFirstFaultWritebackMicroop>( 425 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
401 } else { 402 return new SveIndexedMemVI<uint64_t, uint16_t, | 426 } else { 427 return new SveIndexedMemVI<uint64_t, uint16_t, |
403 SveScatterStoreVIMicroop>( 404 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 428 SveScatterStoreVIMicroop, 429 SveFirstFaultWritebackMicroop>( 430 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
405 } 406 case 0x2: 407 if (esizeIs32) { 408 return new SveIndexedMemVI<uint32_t, uint32_t, | 431 } 432 case 0x2: 433 if (esizeIs32) { 434 return new SveIndexedMemVI<uint32_t, uint32_t, |
409 SveScatterStoreVIMicroop>( 410 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 435 SveScatterStoreVIMicroop, 436 SveFirstFaultWritebackMicroop>( 437 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
411 } else { 412 return new SveIndexedMemVI<uint64_t, uint32_t, | 438 } else { 439 return new SveIndexedMemVI<uint64_t, uint32_t, |
413 SveScatterStoreVIMicroop>( 414 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 440 SveScatterStoreVIMicroop, 441 SveFirstFaultWritebackMicroop>( 442 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
415 } 416 case 0x3: 417 if (esizeIs32) { 418 break; 419 } else { 420 return new SveIndexedMemVI<uint64_t, uint64_t, | 443 } 444 case 0x3: 445 if (esizeIs32) { 446 break; 447 } else { 448 return new SveIndexedMemVI<uint64_t, uint64_t, |
421 SveScatterStoreVIMicroop>( 422 mn, machInst, MemWriteOp, zt, pg, zn, imm); | 449 SveScatterStoreVIMicroop, 450 SveFirstFaultWritebackMicroop>( 451 mn, machInst, MemWriteOp, zt, pg, zn, imm, false); |
423 } 424 } 425 return new Unknown64(machInst); 426 } 427 428 StaticInstPtr 429 decodeSveScatterStoreSVInsts(uint8_t msz, ExtMachInst machInst, 430 IntRegIndex zt, IntRegIndex pg, 431 IntRegIndex rn, IntRegIndex zm, 432 bool esizeIs32, bool offsetIs32, 433 bool offsetIsSigned, bool offsetIsScaled) 434 { 435 const char* mn = "st1"; 436 switch (msz) { 437 case 0x0: 438 if (esizeIs32) { 439 return new SveIndexedMemSV<uint32_t, uint8_t, | 452 } 453 } 454 return new Unknown64(machInst); 455 } 456 457 StaticInstPtr 458 decodeSveScatterStoreSVInsts(uint8_t msz, ExtMachInst machInst, 459 IntRegIndex zt, IntRegIndex pg, 460 IntRegIndex rn, IntRegIndex zm, 461 bool esizeIs32, bool offsetIs32, 462 bool offsetIsSigned, bool offsetIsScaled) 463 { 464 const char* mn = "st1"; 465 switch (msz) { 466 case 0x0: 467 if (esizeIs32) { 468 return new SveIndexedMemSV<uint32_t, uint8_t, |
440 SveScatterStoreSVMicroop>( | 469 SveScatterStoreSVMicroop, 470 SveFirstFaultWritebackMicroop>( |
441 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 471 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
442 offsetIs32, offsetIsSigned, offsetIsScaled); | 472 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
443 } else { 444 return new SveIndexedMemSV<uint64_t, uint8_t, | 473 } else { 474 return new SveIndexedMemSV<uint64_t, uint8_t, |
445 SveScatterStoreSVMicroop>( | 475 SveScatterStoreSVMicroop, 476 SveFirstFaultWritebackMicroop>( |
446 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 477 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
447 offsetIs32, offsetIsSigned, offsetIsScaled); | 478 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
448 } 449 case 0x1: 450 if (esizeIs32) { 451 return new SveIndexedMemSV<uint32_t, uint16_t, | 479 } 480 case 0x1: 481 if (esizeIs32) { 482 return new SveIndexedMemSV<uint32_t, uint16_t, |
452 SveScatterStoreSVMicroop>( | 483 SveScatterStoreSVMicroop, 484 SveFirstFaultWritebackMicroop>( |
453 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 485 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
454 offsetIs32, offsetIsSigned, offsetIsScaled); | 486 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
455 } else { 456 return new SveIndexedMemSV<uint64_t, uint16_t, | 487 } else { 488 return new SveIndexedMemSV<uint64_t, uint16_t, |
457 SveScatterStoreSVMicroop>( | 489 SveScatterStoreSVMicroop, 490 SveFirstFaultWritebackMicroop>( |
458 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 491 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
459 offsetIs32, offsetIsSigned, offsetIsScaled); | 492 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
460 } 461 case 0x2: 462 if (esizeIs32) { 463 return new SveIndexedMemSV<uint32_t, uint32_t, | 493 } 494 case 0x2: 495 if (esizeIs32) { 496 return new SveIndexedMemSV<uint32_t, uint32_t, |
464 SveScatterStoreSVMicroop>( | 497 SveScatterStoreSVMicroop, 498 SveFirstFaultWritebackMicroop>( |
465 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 499 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
466 offsetIs32, offsetIsSigned, offsetIsScaled); | 500 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
467 } else { 468 return new SveIndexedMemSV<uint64_t, uint32_t, | 501 } else { 502 return new SveIndexedMemSV<uint64_t, uint32_t, |
469 SveScatterStoreSVMicroop>( | 503 SveScatterStoreSVMicroop, 504 SveFirstFaultWritebackMicroop>( |
470 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 505 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
471 offsetIs32, offsetIsSigned, offsetIsScaled); | 506 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
472 } 473 case 0x3: 474 if (esizeIs32) { 475 break; 476 } else { 477 return new SveIndexedMemSV<uint64_t, uint64_t, | 507 } 508 case 0x3: 509 if (esizeIs32) { 510 break; 511 } else { 512 return new SveIndexedMemSV<uint64_t, uint64_t, |
478 SveScatterStoreSVMicroop>( | 513 SveScatterStoreSVMicroop, 514 SveFirstFaultWritebackMicroop>( |
479 mn, machInst, MemWriteOp, zt, pg, rn, zm, | 515 mn, machInst, MemWriteOp, zt, pg, rn, zm, |
480 offsetIs32, offsetIsSigned, offsetIsScaled); | 516 offsetIs32, offsetIsSigned, offsetIsScaled, false); |
481 } 482 } 483 return new Unknown64(machInst); 484 } 485 486}}; 487 488 --- 11 unchanged lines hidden (view full) --- 500 ''' 501 502 def emitSveMemFillSpill(isPred): 503 global header_output, exec_output, decoders 504 eaCode = SPAlignmentCheckCode + ''' 505 int memAccessSize = %(memacc_size)s; 506 EA = XBase + ((int64_t) imm * %(memacc_size)s)''' % { 507 'memacc_size': 'eCount / 8' if isPred else 'eCount'} | 517 } 518 } 519 return new Unknown64(machInst); 520 } 521 522}}; 523 524 --- 11 unchanged lines hidden (view full) --- 536 ''' 537 538 def emitSveMemFillSpill(isPred): 539 global header_output, exec_output, decoders 540 eaCode = SPAlignmentCheckCode + ''' 541 int memAccessSize = %(memacc_size)s; 542 EA = XBase + ((int64_t) imm * %(memacc_size)s)''' % { 543 'memacc_size': 'eCount / 8' if isPred else 'eCount'} |
544 loadRdEnableCode = ''' 545 auto rdEn = std::vector<bool>(); 546 ''' |
|
508 if isPred: 509 loadMemAccCode = ''' 510 int index = 0; 511 uint8_t byte; 512 for (int i = 0; i < eCount / 8; i++) { 513 byte = memDataView[i]; 514 for (int j = 0; j < 8; j++, index++) { 515 PDest_x[index] = (byte >> j) & 1; --- 30 unchanged lines hidden (view full) --- 546 ''' 547 loadIop = InstObjParams('ldr', 548 'SveLdrPred' if isPred else 'SveLdrVec', 549 'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill', 550 {'tpl_header': '', 551 'tpl_args': '', 552 'memacc_code': loadMemAccCode, 553 'ea_code' : sveEnabledCheckCode + eaCode, | 547 if isPred: 548 loadMemAccCode = ''' 549 int index = 0; 550 uint8_t byte; 551 for (int i = 0; i < eCount / 8; i++) { 552 byte = memDataView[i]; 553 for (int j = 0; j < 8; j++, index++) { 554 PDest_x[index] = (byte >> j) & 1; --- 30 unchanged lines hidden (view full) --- 585 ''' 586 loadIop = InstObjParams('ldr', 587 'SveLdrPred' if isPred else 'SveLdrVec', 588 'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill', 589 {'tpl_header': '', 590 'tpl_args': '', 591 'memacc_code': loadMemAccCode, 592 'ea_code' : sveEnabledCheckCode + eaCode, |
593 'rden_code' : loadRdEnableCode, 594 'fault_code' : '', |
|
554 'fa_code' : ''}, 555 ['IsMemRef', 'IsLoad']) 556 storeIop = InstObjParams('str', 557 'SveStrPred' if isPred else 'SveStrVec', 558 'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill', 559 {'tpl_header': '', 560 'tpl_args': '', 561 'wren_code': storeWrEnableCode, --- 66 unchanged lines hidden (view full) --- 628 ('uint32_t', 'uint32_t'), 629 ('uint64_t', 'uint32_t'), 630 ('uint64_t', 'uint64_t'), 631 ) 632 633 # Generates definitions for SVE contiguous loads 634 def emitSveContigMemInsts(offsetIsImm): 635 global header_output, exec_output, decoders | 595 'fa_code' : ''}, 596 ['IsMemRef', 'IsLoad']) 597 storeIop = InstObjParams('str', 598 'SveStrPred' if isPred else 'SveStrVec', 599 'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill', 600 {'tpl_header': '', 601 'tpl_args': '', 602 'wren_code': storeWrEnableCode, --- 66 unchanged lines hidden (view full) --- 669 ('uint32_t', 'uint32_t'), 670 ('uint64_t', 'uint32_t'), 671 ('uint64_t', 'uint64_t'), 672 ) 673 674 # Generates definitions for SVE contiguous loads 675 def emitSveContigMemInsts(offsetIsImm): 676 global header_output, exec_output, decoders |
677 # First-faulting instructions only have a scalar plus scalar form, 678 # while non-faulting instructions only a scalar plus immediate form, so 679 # `offsetIsImm` is used to determine which class of instructions is 680 # generated 681 firstFaulting = not offsetIsImm |
|
636 tplHeader = 'template <class RegElemType, class MemElemType>' 637 tplArgs = '<RegElemType, MemElemType>' 638 eaCode = SPAlignmentCheckCode + ''' 639 int memAccessSize = eCount * sizeof(MemElemType); 640 EA = XBase + ''' 641 if offsetIsImm: 642 eaCode += '((int64_t) this->imm * eCount * sizeof(MemElemType))' 643 else: 644 eaCode += '(XOffset * sizeof(MemElemType));' | 682 tplHeader = 'template <class RegElemType, class MemElemType>' 683 tplArgs = '<RegElemType, MemElemType>' 684 eaCode = SPAlignmentCheckCode + ''' 685 int memAccessSize = eCount * sizeof(MemElemType); 686 EA = XBase + ''' 687 if offsetIsImm: 688 eaCode += '((int64_t) this->imm * eCount * sizeof(MemElemType))' 689 else: 690 eaCode += '(XOffset * sizeof(MemElemType));' |
691 loadRdEnableCode = ''' 692 auto rdEn = std::vector<bool>(sizeof(MemElemType) * eCount, true); 693 for (int i = 0; i < eCount; i++) { 694 if (!GpOp_x[i]) { 695 for (int j = 0; j < sizeof(MemElemType); j++) { 696 rdEn[sizeof(MemElemType) * i + j] = false; 697 } 698 } 699 } 700 ''' |
|
645 loadMemAccCode = ''' 646 for (int i = 0; i < eCount; i++) { 647 if (GpOp_x[i]) { 648 AA64FpDest_x[i] = memDataView[i]; 649 } else { 650 AA64FpDest_x[i] = 0; 651 } 652 } --- 8 unchanged lines hidden (view full) --- 661 wrEn[sizeof(MemElemType) * i + j] = false; 662 } 663 } 664 } 665 ''' 666 storeWrEnableCode = ''' 667 auto wrEn = std::vector<bool>(sizeof(MemElemType) * eCount, true); 668 ''' | 701 loadMemAccCode = ''' 702 for (int i = 0; i < eCount; i++) { 703 if (GpOp_x[i]) { 704 AA64FpDest_x[i] = memDataView[i]; 705 } else { 706 AA64FpDest_x[i] = 0; 707 } 708 } --- 8 unchanged lines hidden (view full) --- 717 wrEn[sizeof(MemElemType) * i + j] = false; 718 } 719 } 720 } 721 ''' 722 storeWrEnableCode = ''' 723 auto wrEn = std::vector<bool>(sizeof(MemElemType) * eCount, true); 724 ''' |
725 ffrReadBackCode = ''' 726 auto& firstFaultReg = Ffr;''' 727 fautlingLoadmemAccCode = ''' 728 for (int i = 0; i < eCount; i++) { 729 if (GpOp_x[i] && firstFaultReg[i * sizeof(RegElemType)]) { 730 AA64FpDest_x[i] = memDataView[i]; 731 } else { 732 AA64FpDest_x[i] = 0; 733 } 734 } 735 ''' 736 nonFaultingCode = 'true ||' 737 faultCode = ''' 738 Addr fault_addr; 739 if (fault == NoFault || getFaultVAddr(fault, fault_addr)) { 740 unsigned fault_elem_index; 741 if (fault != NoFault) { 742 assert(fault_addr >= EA); 743 fault_elem_index = (fault_addr - EA) / sizeof(MemElemType); 744 } else { 745 fault_elem_index = eCount + 1; 746 } 747 int first_active_index; 748 for (first_active_index = 0; 749 first_active_index < eCount && !(GpOp_x[first_active_index]); 750 first_active_index++); 751 if (%s first_active_index < fault_elem_index) { 752 for (int i = 0; i < eCount; i++) { 753 for (int j = 0; j < sizeof(RegElemType); j++) { 754 if (i < fault_elem_index) { 755 Ffr_ub[i * sizeof(RegElemType) + j] = FfrAux_x[i]; 756 } else { 757 Ffr_ub[i * sizeof(RegElemType) + j] = 0; 758 } 759 } 760 } 761 fault = NoFault; 762 if (first_active_index >= fault_elem_index) { 763 // non-faulting load needs this 764 xc->setMemAccPredicate(false); 765 } 766 } 767 } 768 ''' % ('' if firstFaulting else nonFaultingCode) 769 |
|
669 loadIop = InstObjParams('ld1', 670 'SveContigLoadSI' if offsetIsImm else 'SveContigLoadSS', 671 'SveContigMemSI' if offsetIsImm else 'SveContigMemSS', 672 {'tpl_header': tplHeader, 673 'tpl_args': tplArgs, | 770 loadIop = InstObjParams('ld1', 771 'SveContigLoadSI' if offsetIsImm else 'SveContigLoadSS', 772 'SveContigMemSI' if offsetIsImm else 'SveContigMemSS', 773 {'tpl_header': tplHeader, 774 'tpl_args': tplArgs, |
775 'rden_code' : loadRdEnableCode, |
|
674 'memacc_code': loadMemAccCode, 675 'ea_code' : sveEnabledCheckCode + eaCode, | 776 'memacc_code': loadMemAccCode, 777 'ea_code' : sveEnabledCheckCode + eaCode, |
778 'fault_code' : '', |
|
676 'fa_code' : ''}, 677 ['IsMemRef', 'IsLoad']) 678 storeIop = InstObjParams('st1', 679 'SveContigStoreSI' if offsetIsImm else 'SveContigStoreSS', 680 'SveContigMemSI' if offsetIsImm else 'SveContigMemSS', 681 {'tpl_header': tplHeader, 682 'tpl_args': tplArgs, 683 'wren_code': storeWrEnableCode, 684 'memacc_code': storeMemAccCode, 685 'ea_code' : sveEnabledCheckCode + eaCode, 686 'fa_code' : ''}, 687 ['IsMemRef', 'IsStore']) | 779 'fa_code' : ''}, 780 ['IsMemRef', 'IsLoad']) 781 storeIop = InstObjParams('st1', 782 'SveContigStoreSI' if offsetIsImm else 'SveContigStoreSS', 783 'SveContigMemSI' if offsetIsImm else 'SveContigMemSS', 784 {'tpl_header': tplHeader, 785 'tpl_args': tplArgs, 786 'wren_code': storeWrEnableCode, 787 'memacc_code': storeMemAccCode, 788 'ea_code' : sveEnabledCheckCode + eaCode, 789 'fa_code' : ''}, 790 ['IsMemRef', 'IsStore']) |
791 faultIop = InstObjParams('ldff1' if firstFaulting else 'ldnf1', 792 'SveContigFFLoadSS' if firstFaulting else 'SveContigNFLoadSI', 793 'SveContigMemSS' if firstFaulting else 'SveContigMemSI', 794 {'tpl_header': tplHeader, 795 'tpl_args': tplArgs, 796 'rden_code' : loadRdEnableCode, 797 'memacc_code': fautlingLoadmemAccCode, 798 'ea_code' : sveEnabledCheckCode + eaCode, 799 'fault_code' : faultCode, 800 'fa_code' : ''}, 801 ['IsMemRef', 'IsLoad']) 802 faultIop.snippets['memacc_code'] = (ffrReadBackCode + 803 faultIop.snippets['memacc_code']) |
|
688 if offsetIsImm: 689 header_output += SveContigMemSIOpDeclare.subst(loadIop) 690 header_output += SveContigMemSIOpDeclare.subst(storeIop) | 804 if offsetIsImm: 805 header_output += SveContigMemSIOpDeclare.subst(loadIop) 806 header_output += SveContigMemSIOpDeclare.subst(storeIop) |
807 header_output += SveContigMemSIOpDeclare.subst(faultIop) |
|
691 else: 692 header_output += SveContigMemSSOpDeclare.subst(loadIop) 693 header_output += SveContigMemSSOpDeclare.subst(storeIop) | 808 else: 809 header_output += SveContigMemSSOpDeclare.subst(loadIop) 810 header_output += SveContigMemSSOpDeclare.subst(storeIop) |
811 header_output += SveContigMemSSOpDeclare.subst(faultIop) |
|
694 exec_output += ( 695 SveContigLoadExecute.subst(loadIop) + 696 SveContigLoadInitiateAcc.subst(loadIop) + 697 SveContigLoadCompleteAcc.subst(loadIop) + 698 SveContigStoreExecute.subst(storeIop) + 699 SveContigStoreInitiateAcc.subst(storeIop) + | 812 exec_output += ( 813 SveContigLoadExecute.subst(loadIop) + 814 SveContigLoadInitiateAcc.subst(loadIop) + 815 SveContigLoadCompleteAcc.subst(loadIop) + 816 SveContigStoreExecute.subst(storeIop) + 817 SveContigStoreInitiateAcc.subst(storeIop) + |
700 SveContigStoreCompleteAcc.subst(storeIop)) | 818 SveContigStoreCompleteAcc.subst(storeIop) + 819 SveContigLoadExecute.subst(faultIop) + 820 SveContigLoadInitiateAcc.subst(faultIop) + 821 SveContigLoadCompleteAcc.subst(faultIop)) 822 |
701 for args in loadTplArgs: 702 substDict = {'tpl_args': '<%s>' % ', '.join(args), 703 'class_name': 'SveContigLoadSI' if offsetIsImm 704 else 'SveContigLoadSS'} 705 exec_output += SveContigMemExecDeclare.subst(substDict) 706 for args in storeTplArgs: 707 substDict = {'tpl_args': '<%s>' % ', '.join(args), 708 'class_name': 'SveContigStoreSI' if offsetIsImm 709 else 'SveContigStoreSS'} 710 exec_output += SveContigMemExecDeclare.subst(substDict) | 823 for args in loadTplArgs: 824 substDict = {'tpl_args': '<%s>' % ', '.join(args), 825 'class_name': 'SveContigLoadSI' if offsetIsImm 826 else 'SveContigLoadSS'} 827 exec_output += SveContigMemExecDeclare.subst(substDict) 828 for args in storeTplArgs: 829 substDict = {'tpl_args': '<%s>' % ', '.join(args), 830 'class_name': 'SveContigStoreSI' if offsetIsImm 831 else 'SveContigStoreSS'} 832 exec_output += SveContigMemExecDeclare.subst(substDict) |
833 for args in loadTplArgs: 834 substDict = {'tpl_args': '<%s>' % ', '.join(args), 835 'class_name': 'SveContigFFLoadSS' if firstFaulting 836 else 'SveContigNFLoadSI'} 837 exec_output += SveContigMemExecDeclare.subst(substDict) |
|
711 | 838 |
839 |
|
712 # Generates definitions for SVE load-and-replicate instructions 713 def emitSveLoadAndRepl(): 714 global header_output, exec_output, decoders 715 tplHeader = 'template <class RegElemType, class MemElemType>' 716 tplArgs = '<RegElemType, MemElemType>' 717 eaCode = SPAlignmentCheckCode + ''' 718 EA = XBase + imm * sizeof(MemElemType);''' 719 memAccCode = ''' --- 48 unchanged lines hidden (view full) --- 768 if (offsetIsSigned) { 769 offset = sext<32>(offset); 770 } 771 if (offsetIsScaled) { 772 offset *= sizeof(MemElemType); 773 } 774 EA = XBase + offset''' 775 loadMemAccCode = ''' | 840 # Generates definitions for SVE load-and-replicate instructions 841 def emitSveLoadAndRepl(): 842 global header_output, exec_output, decoders 843 tplHeader = 'template <class RegElemType, class MemElemType>' 844 tplArgs = '<RegElemType, MemElemType>' 845 eaCode = SPAlignmentCheckCode + ''' 846 EA = XBase + imm * sizeof(MemElemType);''' 847 memAccCode = ''' --- 48 unchanged lines hidden (view full) --- 896 if (offsetIsSigned) { 897 offset = sext<32>(offset); 898 } 899 if (offsetIsScaled) { 900 offset *= sizeof(MemElemType); 901 } 902 EA = XBase + offset''' 903 loadMemAccCode = ''' |
776 if (GpOp_x[elemIndex]) { 777 AA64FpDest_x[elemIndex] = memData; 778 } else { 779 AA64FpDest_x[elemIndex] = 0; 780 } | 904 AA64FpDest_x[elemIndex] = memData; |
781 ''' 782 storeMemAccCode = ''' 783 memData = AA64FpDest_x[elemIndex]; 784 ''' | 905 ''' 906 storeMemAccCode = ''' 907 memData = AA64FpDest_x[elemIndex]; 908 ''' |
785 predCheckCode = 'GpOp_x[elemIndex]' | 909 predCheckCode = 'GpOp_x[index]' 910 faultStatusSetCode = 'PUreg0_x[elemIndex] = 1;' 911 faultStatusResetCode = 'PUreg0_x[elemIndex] = 0;' |
786 loadIop = InstObjParams('ld1', 787 ('SveGatherLoadVIMicroop' 788 if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM 789 else 'SveGatherLoadSVMicroop'), 790 'MicroOp', 791 {'tpl_header': tplHeader, 792 'tpl_args': tplArgs, 793 'memacc_code': loadMemAccCode, 794 'ea_code' : sveEnabledCheckCode + eaCode, | 912 loadIop = InstObjParams('ld1', 913 ('SveGatherLoadVIMicroop' 914 if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM 915 else 'SveGatherLoadSVMicroop'), 916 'MicroOp', 917 {'tpl_header': tplHeader, 918 'tpl_args': tplArgs, 919 'memacc_code': loadMemAccCode, 920 'ea_code' : sveEnabledCheckCode + eaCode, |
921 'fault_status_set_code' : faultStatusSetCode, 922 'fault_status_reset_code' : faultStatusResetCode, |
|
795 'pred_check_code' : predCheckCode, 796 'fa_code' : ''}, 797 ['IsMicroop', 'IsMemRef', 'IsLoad']) 798 storeIop = InstObjParams('st1', 799 ('SveScatterStoreVIMicroop' 800 if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM 801 else 'SveScatterStoreSVMicroop'), 802 'MicroOp', --- 31 unchanged lines hidden (view full) --- 834 'class_name': ( 835 'SveScatterStoreVIMicroop' 836 if indexed_addr_form == \ 837 IndexedAddrForm.VEC_PLUS_IMM 838 else 'SveScatterStoreSVMicroop')} 839 # TODO: this should become SveMemExecDeclare 840 exec_output += SveContigMemExecDeclare.subst(substDict) 841 | 923 'pred_check_code' : predCheckCode, 924 'fa_code' : ''}, 925 ['IsMicroop', 'IsMemRef', 'IsLoad']) 926 storeIop = InstObjParams('st1', 927 ('SveScatterStoreVIMicroop' 928 if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM 929 else 'SveScatterStoreSVMicroop'), 930 'MicroOp', --- 31 unchanged lines hidden (view full) --- 962 'class_name': ( 963 'SveScatterStoreVIMicroop' 964 if indexed_addr_form == \ 965 IndexedAddrForm.VEC_PLUS_IMM 966 else 'SveScatterStoreSVMicroop')} 967 # TODO: this should become SveMemExecDeclare 968 exec_output += SveContigMemExecDeclare.subst(substDict) 969 |
970 firstFaultTplArgs = ('int32_t', 'int64_t', 'uint32_t', 'uint64_t') 971 972 def emitSveFirstFaultWritebackMicroop(): 973 global header_output, exec_output, decoders 974 tplHeader = 'template <class RegElemType>' 975 tplArgs = '<RegElemType>' 976 faultStatusCheckCode = 'PUreg0_x[index]' 977 firstFaultResetCode = ''' 978 for(int j = 0; j < sizeof(RegElemType); j++) { 979 Ffr_ub[index * sizeof(RegElemType) + j] = 0; 980 } 981 ''' 982 firstFaultForwardCode = ''' 983 for(int j = 0; j < sizeof(RegElemType); j++) { 984 Ffr_ub[index * sizeof(RegElemType) + j] = FfrAux_x[index]; 985 } 986 ''' 987 iop = InstObjParams('ldff1', 988 'SveFirstFaultWritebackMicroop', 989 'MicroOp', 990 {'tpl_header': tplHeader, 991 'tpl_args': tplArgs, 992 'fault_status_check_code' : faultStatusCheckCode, 993 'first_fault_reset_code' : firstFaultResetCode, 994 'first_fault_forward_code' : firstFaultForwardCode}, 995 ['IsMicroop']) 996 header_output += SveFirstFaultWritebackMicroopDeclare.subst(iop) 997 exec_output += SveFirstFaultWritebackMicroopExecute.subst(iop) 998 for args in firstFaultTplArgs: 999 substDict = {'targs': args, 1000 'class_name' : 'SveFirstFaultWritebackMicroop' } 1001 exec_output += SveOpExecDeclare.subst(substDict) 1002 |
|
842 # Generates definitions for the first microop of SVE gather loads, required 843 # to propagate the source vector register to the transfer microops 844 def emitSveGatherLoadCpySrcVecMicroop(): 845 global header_output, exec_output, decoders 846 code = sveEnabledCheckCode + ''' 847 unsigned eCount = ArmStaticInst::getCurSveVecLen<uint8_t>( 848 xc->tcBase()); 849 for (unsigned i = 0; i < eCount; i++) { --- 4 unchanged lines hidden (view full) --- 854 'MicroOp', 855 {'code': code}, 856 ['IsMicroop']) 857 header_output += SveGatherLoadCpySrcVecMicroopDeclare.subst(iop) 858 exec_output += SveGatherLoadCpySrcVecMicroopExecute.subst(iop) 859 860 # LD1[S]{B,H,W,D} (scalar plus immediate) 861 # ST1[S]{B,H,W,D} (scalar plus immediate) | 1003 # Generates definitions for the first microop of SVE gather loads, required 1004 # to propagate the source vector register to the transfer microops 1005 def emitSveGatherLoadCpySrcVecMicroop(): 1006 global header_output, exec_output, decoders 1007 code = sveEnabledCheckCode + ''' 1008 unsigned eCount = ArmStaticInst::getCurSveVecLen<uint8_t>( 1009 xc->tcBase()); 1010 for (unsigned i = 0; i < eCount; i++) { --- 4 unchanged lines hidden (view full) --- 1015 'MicroOp', 1016 {'code': code}, 1017 ['IsMicroop']) 1018 header_output += SveGatherLoadCpySrcVecMicroopDeclare.subst(iop) 1019 exec_output += SveGatherLoadCpySrcVecMicroopExecute.subst(iop) 1020 1021 # LD1[S]{B,H,W,D} (scalar plus immediate) 1022 # ST1[S]{B,H,W,D} (scalar plus immediate) |
1023 # LDNF1[S]{B,H,W,D} (scalar plus immediate) |
|
862 emitSveContigMemInsts(True) 863 # LD1[S]{B,H,W,D} (scalar plus scalar) 864 # ST1[S]{B,H,W,D} (scalar plus scalar) | 1024 emitSveContigMemInsts(True) 1025 # LD1[S]{B,H,W,D} (scalar plus scalar) 1026 # ST1[S]{B,H,W,D} (scalar plus scalar) |
1027 # LDFF1[S]{B,H,W,D} (scalar plus vector) |
|
865 emitSveContigMemInsts(False) 866 867 # LD1R[S]{B,H,W,D} 868 emitSveLoadAndRepl() 869 870 # LDR (predicate), STR (predicate) 871 emitSveMemFillSpill(True) 872 # LDR (vector), STR (vector) 873 emitSveMemFillSpill(False) 874 875 # LD1[S]{B,H,W,D} (vector plus immediate) 876 # ST1[S]{B,H,W,D} (vector plus immediate) | 1028 emitSveContigMemInsts(False) 1029 1030 # LD1R[S]{B,H,W,D} 1031 emitSveLoadAndRepl() 1032 1033 # LDR (predicate), STR (predicate) 1034 emitSveMemFillSpill(True) 1035 # LDR (vector), STR (vector) 1036 emitSveMemFillSpill(False) 1037 1038 # LD1[S]{B,H,W,D} (vector plus immediate) 1039 # ST1[S]{B,H,W,D} (vector plus immediate) |
1040 # LDFF1[S]{B,H,W,D} (scalar plus immediate) |
|
877 emitSveIndexedMemMicroops(IndexedAddrForm.VEC_PLUS_IMM) 878 # LD1[S]{B,H,W,D} (scalar plus vector) 879 # ST1[S]{B,H,W,D} (scalar plus vector) | 1041 emitSveIndexedMemMicroops(IndexedAddrForm.VEC_PLUS_IMM) 1042 # LD1[S]{B,H,W,D} (scalar plus vector) 1043 # ST1[S]{B,H,W,D} (scalar plus vector) |
1044 # LDFF1[S]{B,H,W,D} (scalar plus vector) |
|
880 emitSveIndexedMemMicroops(IndexedAddrForm.SCA_PLUS_VEC) 881 | 1045 emitSveIndexedMemMicroops(IndexedAddrForm.SCA_PLUS_VEC) 1046 |
1047 # FFR writeback microop for gather loads 1048 emitSveFirstFaultWritebackMicroop() 1049 |
|
882 # Source vector copy microop for gather loads 883 emitSveGatherLoadCpySrcVecMicroop() | 1050 # Source vector copy microop for gather loads 1051 emitSveGatherLoadCpySrcVecMicroop() |
884 | |
885}}; | 1052}}; |