sve_mem.isa revision 14091:090449e74135
1// Copyright (c) 2017-2018 ARM Limited
2// All rights reserved
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder.  You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Redistribution and use in source and binary forms, with or without
14// modification, are permitted provided that the following conditions are
15// met: redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer;
17// redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution;
20// neither the name of the copyright holders nor the names of its
21// contributors may be used to endorse or promote products derived from
22// this software without specific prior written permission.
23//
24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35//
36// Authors: Giacomo Gabrielli
37
38// @file Definition of SVE memory access instructions.
39
40output header {{
41
42    // Decodes SVE contiguous load instructions, scalar plus scalar form.
43    template <template <typename T1, typename T2> class Base>
44    StaticInstPtr
45    decodeSveContigLoadSSInsts(uint8_t dtype, ExtMachInst machInst,
46                               IntRegIndex zt, IntRegIndex pg, IntRegIndex rn,
47                               IntRegIndex rm, bool firstFaulting)
48    {
49        const char* mn = firstFaulting ? "ldff1" : "ld1";
50        switch (dtype) {
51          case 0x0:
52            return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
53          case 0x1:
54            return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
55          case 0x2:
56            return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
57          case 0x3:
58            return new Base<uint64_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
59          case 0x4:
60            return new Base<int64_t, int32_t>(mn, machInst, zt, pg, rn, rm);
61          case 0x5:
62            return new Base<uint16_t, uint16_t>(mn, machInst, zt, pg, rn, rm);
63          case 0x6:
64            return new Base<uint32_t, uint16_t>(mn, machInst, zt, pg, rn, rm);
65          case 0x7:
66            return new Base<uint64_t, uint16_t>(mn, machInst, zt, pg, rn, rm);
67          case 0x8:
68            return new Base<int64_t, int16_t>(mn, machInst, zt, pg, rn, rm);
69          case 0x9:
70            return new Base<int32_t, int16_t>(mn, machInst, zt, pg, rn, rm);
71          case 0xa:
72            return new Base<uint32_t, uint32_t>(mn, machInst, zt, pg, rn, rm);
73          case 0xb:
74            return new Base<uint64_t, uint32_t>(mn, machInst, zt, pg, rn, rm);
75          case 0xc:
76            return new Base<int64_t, int8_t>(mn, machInst, zt, pg, rn, rm);
77          case 0xd:
78            return new Base<int32_t, int8_t>(mn, machInst, zt, pg, rn, rm);
79          case 0xe:
80            return new Base<int16_t, int8_t>(mn, machInst, zt, pg, rn, rm);
81          case 0xf:
82            return new Base<uint64_t, uint64_t>(mn, machInst, zt, pg, rn, rm);
83        }
84        return new Unknown64(machInst);
85    }
86
87    // Decodes SVE contiguous load instructions, scalar plus immediate form.
88    template <template <typename T1, typename T2> class Base>
89    StaticInstPtr
90    decodeSveContigLoadSIInsts(uint8_t dtype, ExtMachInst machInst,
91                               IntRegIndex zt, IntRegIndex pg, IntRegIndex rn,
92                               uint64_t imm, bool nonFaulting,
93                               bool replicate = false)
94    {
95        assert(!(nonFaulting && replicate));
96        const char* mn = replicate ? "ld1r" : (nonFaulting ? "ldnf1" : "ld1");
97        switch (dtype) {
98          case 0x0:
99            return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
100          case 0x1:
101            return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
102          case 0x2:
103            return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
104          case 0x3:
105            return new Base<uint64_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
106          case 0x4:
107            return new Base<int64_t, int32_t>(mn, machInst, zt, pg, rn, imm);
108          case 0x5:
109            return new Base<uint16_t, uint16_t>(mn, machInst, zt, pg, rn, imm);
110          case 0x6:
111            return new Base<uint32_t, uint16_t>(mn, machInst, zt, pg, rn, imm);
112          case 0x7:
113            return new Base<uint64_t, uint16_t>(mn, machInst, zt, pg, rn, imm);
114          case 0x8:
115            return new Base<int64_t, int16_t>(mn, machInst, zt, pg, rn, imm);
116          case 0x9:
117            return new Base<int32_t, int16_t>(mn, machInst, zt, pg, rn, imm);
118          case 0xa:
119            return new Base<uint32_t, uint32_t>(mn, machInst, zt, pg, rn, imm);
120          case 0xb:
121            return new Base<uint64_t, uint32_t>(mn, machInst, zt, pg, rn, imm);
122          case 0xc:
123            return new Base<int64_t, int8_t>(mn, machInst, zt, pg, rn, imm);
124          case 0xd:
125            return new Base<int32_t, int8_t>(mn, machInst, zt, pg, rn, imm);
126          case 0xe:
127            return new Base<int16_t, int8_t>(mn, machInst, zt, pg, rn, imm);
128          case 0xf:
129            return new Base<uint64_t, uint64_t>(mn, machInst, zt, pg, rn, imm);
130        }
131        return new Unknown64(machInst);
132    }
133
134    // Decodes SVE contiguous store instructions, scalar plus scalar form.
135    template <template <typename T1, typename T2> class Base>
136    StaticInstPtr
137    decodeSveContigStoreSSInsts(uint8_t dtype, ExtMachInst machInst,
138                                IntRegIndex zt, IntRegIndex pg, IntRegIndex rn,
139                                IntRegIndex rm)
140    {
141        const char* mn = "st1";
142        switch (dtype) {
143          case 0x0:
144            return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
145          case 0x1:
146            return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
147          case 0x2:
148            return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
149          case 0x3:
150            return new Base<uint64_t, uint8_t>(mn, machInst, zt, pg, rn, rm);
151          case 0x5:
152            return new Base<uint16_t, uint16_t>(mn, machInst, zt, pg, rn, rm);
153          case 0x6:
154            return new Base<uint32_t, uint16_t>(mn, machInst, zt, pg, rn, rm);
155          case 0x7:
156            return new Base<uint64_t, uint16_t>(mn, machInst, zt, pg, rn, rm);
157          case 0xa:
158            return new Base<uint32_t, uint32_t>(mn, machInst, zt, pg, rn, rm);
159          case 0xb:
160            return new Base<uint64_t, uint32_t>(mn, machInst, zt, pg, rn, rm);
161          case 0xf:
162            return new Base<uint64_t, uint64_t>(mn, machInst, zt, pg, rn, rm);
163        }
164        return new Unknown64(machInst);
165    }
166
167    // Decodes SVE contiguous store instructions, scalar plus immediate form.
168    template <template <typename T1, typename T2> class Base>
169    StaticInstPtr
170    decodeSveContigStoreSIInsts(uint8_t dtype, ExtMachInst machInst,
171                                IntRegIndex zt, IntRegIndex pg, IntRegIndex rn,
172                                int8_t imm)
173    {
174        const char* mn = "st1";
175        switch (dtype) {
176          case 0x0:
177            return new Base<uint8_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
178          case 0x1:
179            return new Base<uint16_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
180          case 0x2:
181            return new Base<uint32_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
182          case 0x3:
183            return new Base<uint64_t, uint8_t>(mn, machInst, zt, pg, rn, imm);
184          case 0x5:
185            return new Base<uint16_t, uint16_t>(mn, machInst, zt, pg, rn, imm);
186          case 0x6:
187            return new Base<uint32_t, uint16_t>(mn, machInst, zt, pg, rn, imm);
188          case 0x7:
189            return new Base<uint64_t, uint16_t>(mn, machInst, zt, pg, rn, imm);
190          case 0xa:
191            return new Base<uint32_t, uint32_t>(mn, machInst, zt, pg, rn, imm);
192          case 0xb:
193            return new Base<uint64_t, uint32_t>(mn, machInst, zt, pg, rn, imm);
194          case 0xf:
195            return new Base<uint64_t, uint64_t>(mn, machInst, zt, pg, rn, imm);
196        }
197        return new Unknown64(machInst);
198    }
199
200    // NOTE: SVE load-and-replicate instructions are decoded with
201    // decodeSveContigLoadSIInsts(...).
202
203}};
204
205output decoder {{
206
207    StaticInstPtr
208    decodeSveGatherLoadVIInsts(uint8_t dtype, ExtMachInst machInst,
209                               IntRegIndex zt, IntRegIndex pg, IntRegIndex zn,
210                               uint64_t imm, bool esizeIs32,
211                               bool firstFault)
212    {
213        const char* mn = firstFault ? "ldff1" : "ld1";
214        switch (dtype) {
215          case 0x0:
216            if (esizeIs32) {
217                return new SveIndexedMemVI<int32_t, int8_t,
218                                           SveGatherLoadVIMicroop,
219                                           SveFirstFaultWritebackMicroop>(
220                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
221            } else {
222                return new SveIndexedMemVI<int64_t, int8_t,
223                                           SveGatherLoadVIMicroop,
224                                           SveFirstFaultWritebackMicroop>(
225                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
226            }
227          case 0x1:
228            if (esizeIs32) {
229                return new SveIndexedMemVI<uint32_t, uint8_t,
230                                           SveGatherLoadVIMicroop,
231                                           SveFirstFaultWritebackMicroop>(
232                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
233            } else {
234                return new SveIndexedMemVI<uint64_t, uint8_t,
235                                           SveGatherLoadVIMicroop,
236                                           SveFirstFaultWritebackMicroop>(
237                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
238            }
239          case 0x2:
240            if (esizeIs32) {
241                return new SveIndexedMemVI<int32_t, int16_t,
242                                           SveGatherLoadVIMicroop,
243                                           SveFirstFaultWritebackMicroop>(
244                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
245            } else {
246                return new SveIndexedMemVI<int64_t, int16_t,
247                                           SveGatherLoadVIMicroop,
248                                           SveFirstFaultWritebackMicroop>(
249                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
250            }
251          case 0x3:
252            if (esizeIs32) {
253                return new SveIndexedMemVI<uint32_t, uint16_t,
254                                           SveGatherLoadVIMicroop,
255                                           SveFirstFaultWritebackMicroop>(
256                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
257            } else {
258                return new SveIndexedMemVI<uint64_t, uint16_t,
259                                           SveGatherLoadVIMicroop,
260                                           SveFirstFaultWritebackMicroop>(
261                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
262            }
263          case 0x4:
264            if (esizeIs32) {
265                break;
266            } else {
267                return new SveIndexedMemVI<int64_t, int32_t,
268                                           SveGatherLoadVIMicroop,
269                                           SveFirstFaultWritebackMicroop>(
270                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
271            }
272          case 0x5:
273            if (esizeIs32) {
274                return new SveIndexedMemVI<uint32_t, uint32_t,
275                                           SveGatherLoadVIMicroop,
276                                           SveFirstFaultWritebackMicroop>(
277                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
278            } else {
279                return new SveIndexedMemVI<uint64_t, uint32_t,
280                                           SveGatherLoadVIMicroop,
281                                           SveFirstFaultWritebackMicroop>(
282                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
283            }
284          case 0x7:
285            if (esizeIs32) {
286                break;
287            } else {
288                return new SveIndexedMemVI<uint64_t, uint64_t,
289                                           SveGatherLoadVIMicroop,
290                                           SveFirstFaultWritebackMicroop>(
291                    mn, machInst, MemReadOp, zt, pg, zn, imm, firstFault);
292            }
293        }
294        return new Unknown64(machInst);
295    }
296
297    StaticInstPtr
298    decodeSveGatherLoadSVInsts(uint8_t dtype, ExtMachInst machInst,
299                               IntRegIndex zt, IntRegIndex pg, IntRegIndex rn,
300                               IntRegIndex zm, bool esizeIs32, bool offsetIs32,
301                               bool offsetIsSigned, bool offsetIsScaled,
302                               bool firstFault)
303    {
304        const char* mn = firstFault ? "ldff1" : "ld1";
305        switch (dtype) {
306          case 0x0:
307            if (esizeIs32) {
308                return new SveIndexedMemSV<int32_t, int8_t,
309                                           SveGatherLoadSVMicroop,
310                                           SveFirstFaultWritebackMicroop>(
311                    mn, machInst, MemReadOp, zt, pg, rn, zm,
312                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
313            } else {
314                return new SveIndexedMemSV<int64_t, int8_t,
315                                           SveGatherLoadSVMicroop,
316                                           SveFirstFaultWritebackMicroop>(
317                    mn, machInst, MemReadOp, zt, pg, rn, zm,
318                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
319            }
320          case 0x1:
321            if (esizeIs32) {
322                return new SveIndexedMemSV<uint32_t, uint8_t,
323                                           SveGatherLoadSVMicroop,
324                                           SveFirstFaultWritebackMicroop>(
325                    mn, machInst, MemReadOp, zt, pg, rn, zm,
326                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
327            } else {
328                return new SveIndexedMemSV<uint64_t, uint8_t,
329                                           SveGatherLoadSVMicroop,
330                                           SveFirstFaultWritebackMicroop>(
331                    mn, machInst, MemReadOp, zt, pg, rn, zm,
332                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
333            }
334          case 0x2:
335            if (esizeIs32) {
336                return new SveIndexedMemSV<int32_t, int16_t,
337                                           SveGatherLoadSVMicroop,
338                                           SveFirstFaultWritebackMicroop>(
339                    mn, machInst, MemReadOp, zt, pg, rn, zm,
340                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
341            } else {
342                return new SveIndexedMemSV<int64_t, int16_t,
343                                           SveGatherLoadSVMicroop,
344                                           SveFirstFaultWritebackMicroop>(
345                    mn, machInst, MemReadOp, zt, pg, rn, zm,
346                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
347            }
348          case 0x3:
349            if (esizeIs32) {
350                return new SveIndexedMemSV<uint32_t, uint16_t,
351                                           SveGatherLoadSVMicroop,
352                                           SveFirstFaultWritebackMicroop>(
353                    mn, machInst, MemReadOp, zt, pg, rn, zm,
354                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
355            } else {
356                return new SveIndexedMemSV<uint64_t, uint16_t,
357                                           SveGatherLoadSVMicroop,
358                                           SveFirstFaultWritebackMicroop>(
359                    mn, machInst, MemReadOp, zt, pg, rn, zm,
360                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
361            }
362          case 0x4:
363            if (esizeIs32) {
364                break;
365            } else {
366                return new SveIndexedMemSV<int64_t, int32_t,
367                                           SveGatherLoadSVMicroop,
368                                           SveFirstFaultWritebackMicroop>(
369                    mn, machInst, MemReadOp, zt, pg, rn, zm,
370                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
371            }
372          case 0x5:
373            if (esizeIs32) {
374                return new SveIndexedMemSV<uint32_t, uint32_t,
375                                           SveGatherLoadSVMicroop,
376                                           SveFirstFaultWritebackMicroop>(
377                    mn, machInst, MemReadOp, zt, pg, rn, zm,
378                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
379            } else {
380                return new SveIndexedMemSV<uint64_t, uint32_t,
381                                           SveGatherLoadSVMicroop,
382                                           SveFirstFaultWritebackMicroop>(
383                    mn, machInst, MemReadOp, zt, pg, rn, zm,
384                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
385            }
386          case 0x7:
387            if (esizeIs32) {
388                break;
389            } else {
390                return new SveIndexedMemSV<uint64_t, uint64_t,
391                                           SveGatherLoadSVMicroop,
392                                           SveFirstFaultWritebackMicroop>(
393                    mn, machInst, MemReadOp, zt, pg, rn, zm,
394                    offsetIs32, offsetIsSigned, offsetIsScaled, firstFault);
395            }
396        }
397        return new Unknown64(machInst);
398    }
399
400    StaticInstPtr
401    decodeSveScatterStoreVIInsts(uint8_t msz, ExtMachInst machInst,
402                                 IntRegIndex zt, IntRegIndex pg,
403                                 IntRegIndex zn, uint64_t imm,
404                                 bool esizeIs32)
405    {
406        const char* mn = "st1";
407        switch (msz) {
408          case 0x0:
409            if (esizeIs32) {
410                return new SveIndexedMemVI<uint32_t, uint8_t,
411                                           SveScatterStoreVIMicroop,
412                                           SveFirstFaultWritebackMicroop>(
413                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
414            } else {
415                return new SveIndexedMemVI<uint64_t, uint8_t,
416                                           SveScatterStoreVIMicroop,
417                                           SveFirstFaultWritebackMicroop>(
418                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
419            }
420          case 0x1:
421            if (esizeIs32) {
422                return new SveIndexedMemVI<uint32_t, uint16_t,
423                                           SveScatterStoreVIMicroop,
424                                           SveFirstFaultWritebackMicroop>(
425                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
426            } else {
427                return new SveIndexedMemVI<uint64_t, uint16_t,
428                                           SveScatterStoreVIMicroop,
429                                           SveFirstFaultWritebackMicroop>(
430                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
431            }
432          case 0x2:
433            if (esizeIs32) {
434                return new SveIndexedMemVI<uint32_t, uint32_t,
435                                           SveScatterStoreVIMicroop,
436                                           SveFirstFaultWritebackMicroop>(
437                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
438            } else {
439                return new SveIndexedMemVI<uint64_t, uint32_t,
440                                           SveScatterStoreVIMicroop,
441                                           SveFirstFaultWritebackMicroop>(
442                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
443            }
444          case 0x3:
445            if (esizeIs32) {
446                break;
447            } else {
448                return new SveIndexedMemVI<uint64_t, uint64_t,
449                                           SveScatterStoreVIMicroop,
450                                           SveFirstFaultWritebackMicroop>(
451                    mn, machInst, MemWriteOp, zt, pg, zn, imm, false);
452            }
453        }
454        return new Unknown64(machInst);
455    }
456
457    StaticInstPtr
458    decodeSveScatterStoreSVInsts(uint8_t msz, ExtMachInst machInst,
459                                 IntRegIndex zt, IntRegIndex pg,
460                                 IntRegIndex rn, IntRegIndex zm,
461                                 bool esizeIs32, bool offsetIs32,
462                                 bool offsetIsSigned, bool offsetIsScaled)
463    {
464        const char* mn = "st1";
465        switch (msz) {
466          case 0x0:
467            if (esizeIs32) {
468                return new SveIndexedMemSV<uint32_t, uint8_t,
469                                           SveScatterStoreSVMicroop,
470                                           SveFirstFaultWritebackMicroop>(
471                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
472                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
473            } else {
474                return new SveIndexedMemSV<uint64_t, uint8_t,
475                                           SveScatterStoreSVMicroop,
476                                           SveFirstFaultWritebackMicroop>(
477                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
478                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
479            }
480          case 0x1:
481            if (esizeIs32) {
482                return new SveIndexedMemSV<uint32_t, uint16_t,
483                                           SveScatterStoreSVMicroop,
484                                           SveFirstFaultWritebackMicroop>(
485                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
486                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
487            } else {
488                return new SveIndexedMemSV<uint64_t, uint16_t,
489                                           SveScatterStoreSVMicroop,
490                                           SveFirstFaultWritebackMicroop>(
491                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
492                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
493            }
494          case 0x2:
495            if (esizeIs32) {
496                return new SveIndexedMemSV<uint32_t, uint32_t,
497                                           SveScatterStoreSVMicroop,
498                                           SveFirstFaultWritebackMicroop>(
499                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
500                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
501            } else {
502                return new SveIndexedMemSV<uint64_t, uint32_t,
503                                           SveScatterStoreSVMicroop,
504                                           SveFirstFaultWritebackMicroop>(
505                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
506                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
507            }
508          case 0x3:
509            if (esizeIs32) {
510                break;
511            } else {
512                return new SveIndexedMemSV<uint64_t, uint64_t,
513                                           SveScatterStoreSVMicroop,
514                                           SveFirstFaultWritebackMicroop>(
515                    mn, machInst, MemWriteOp, zt, pg, rn, zm,
516                    offsetIs32, offsetIsSigned, offsetIsScaled, false);
517            }
518        }
519        return new Unknown64(machInst);
520    }
521
522}};
523
524
525let {{
526
527    header_output = ''
528    exec_output = ''
529    decoders = { 'Generic': {} }
530
531    SPAlignmentCheckCode = '''
532        if (this->baseIsSP && bits(XBase, 3, 0) &&
533            SPAlignmentCheckEnabled(xc->tcBase())) {
534            return std::make_shared<SPAlignmentFault>();
535        }
536    '''
537
538    def emitSveMemFillSpill(isPred):
539        global header_output, exec_output, decoders
540        eaCode = SPAlignmentCheckCode + '''
541        int memAccessSize = %(memacc_size)s;
542        EA = XBase + ((int64_t) imm * %(memacc_size)s)''' % {
543            'memacc_size': 'eCount / 8' if isPred else 'eCount'}
544        loadRdEnableCode = '''
545        auto rdEn = std::vector<bool>();
546        '''
547        if isPred:
548            loadMemAccCode = '''
549            int index = 0;
550            uint8_t byte;
551            for (int i = 0; i < eCount / 8; i++) {
552                byte = memDataView[i];
553                for (int j = 0; j < 8; j++, index++) {
554                    PDest_x[index] = (byte >> j) & 1;
555                }
556            }
557            '''
558            storeMemAccCode = '''
559            int index = 0;
560            uint8_t byte;
561            for (int i = 0; i < eCount / 8; i++) {
562                byte = 0;
563                for (int j = 0; j < 8; j++, index++) {
564                    byte |= PDest_x[index] << j;
565                }
566                memDataView[i] = byte;
567            }
568            '''
569            storeWrEnableCode = '''
570            auto wrEn = std::vector<bool>(eCount / 8, true);
571            '''
572        else:
573            loadMemAccCode = '''
574            for (int i = 0; i < eCount; i++) {
575                AA64FpDest_x[i] = memDataView[i];
576            }
577            '''
578            storeMemAccCode = '''
579            for (int i = 0; i < eCount; i++) {
580                memDataView[i] = AA64FpDest_x[i];
581            }
582            '''
583            storeWrEnableCode = '''
584            auto wrEn = std::vector<bool>(sizeof(MemElemType) * eCount, true);
585            '''
586        loadIop = InstObjParams('ldr',
587            'SveLdrPred' if isPred else 'SveLdrVec',
588            'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill',
589            {'tpl_header': '',
590             'tpl_args': '',
591             'memacc_code': loadMemAccCode,
592             'ea_code' : sveEnabledCheckCode + eaCode,
593             'rden_code' : loadRdEnableCode,
594             'fault_code' : '',
595             'fa_code' : ''},
596            ['IsMemRef', 'IsLoad'])
597        storeIop = InstObjParams('str',
598            'SveStrPred' if isPred else 'SveStrVec',
599            'SveMemPredFillSpill' if isPred else 'SveMemVecFillSpill',
600            {'tpl_header': '',
601             'tpl_args': '',
602             'wren_code': storeWrEnableCode,
603             'memacc_code': storeMemAccCode,
604             'ea_code' : sveEnabledCheckCode + eaCode,
605             'fa_code' : ''},
606            ['IsMemRef', 'IsStore'])
607        header_output += SveMemFillSpillOpDeclare.subst(loadIop)
608        header_output += SveMemFillSpillOpDeclare.subst(storeIop)
609        exec_output += (
610            SveContigLoadExecute.subst(loadIop) +
611            SveContigLoadInitiateAcc.subst(loadIop) +
612            SveContigLoadCompleteAcc.subst(loadIop) +
613            SveContigStoreExecute.subst(storeIop) +
614            SveContigStoreInitiateAcc.subst(storeIop) +
615            SveContigStoreCompleteAcc.subst(storeIop))
616
617    loadTplArgs = (
618        ('uint8_t', 'uint8_t'),
619        ('uint16_t', 'uint8_t'),
620        ('uint32_t', 'uint8_t'),
621        ('uint64_t', 'uint8_t'),
622        ('int64_t', 'int32_t'),
623        ('uint16_t', 'uint16_t'),
624        ('uint32_t', 'uint16_t'),
625        ('uint64_t', 'uint16_t'),
626        ('int64_t', 'int16_t'),
627        ('int32_t', 'int16_t'),
628        ('uint32_t', 'uint32_t'),
629        ('uint64_t', 'uint32_t'),
630        ('int64_t', 'int8_t'),
631        ('int32_t', 'int8_t'),
632        ('int16_t', 'int8_t'),
633        ('uint64_t', 'uint64_t'),
634    )
635
636    storeTplArgs = (
637        ('uint8_t', 'uint8_t'),
638        ('uint16_t', 'uint8_t'),
639        ('uint32_t', 'uint8_t'),
640        ('uint64_t', 'uint8_t'),
641        ('uint16_t', 'uint16_t'),
642        ('uint32_t', 'uint16_t'),
643        ('uint64_t', 'uint16_t'),
644        ('uint32_t', 'uint32_t'),
645        ('uint64_t', 'uint32_t'),
646        ('uint64_t', 'uint64_t'),
647    )
648
649    gatherLoadTplArgs = (
650        ('int32_t', 'int8_t'),
651        ('int64_t', 'int8_t'),
652        ('uint32_t', 'uint8_t'),
653        ('uint64_t', 'uint8_t'),
654        ('int32_t', 'int16_t'),
655        ('int64_t', 'int16_t'),
656        ('uint32_t', 'uint16_t'),
657        ('uint64_t', 'uint16_t'),
658        ('int64_t', 'int32_t'),
659        ('uint32_t', 'uint32_t'),
660        ('uint64_t', 'uint32_t'),
661        ('uint64_t', 'uint64_t'),
662    )
663
664    scatterStoreTplArgs = (
665        ('uint32_t', 'uint8_t'),
666        ('uint64_t', 'uint8_t'),
667        ('uint32_t', 'uint16_t'),
668        ('uint64_t', 'uint16_t'),
669        ('uint32_t', 'uint32_t'),
670        ('uint64_t', 'uint32_t'),
671        ('uint64_t', 'uint64_t'),
672    )
673
674    # Generates definitions for SVE contiguous loads
675    def emitSveContigMemInsts(offsetIsImm):
676        global header_output, exec_output, decoders
677        # First-faulting instructions only have a scalar plus scalar form,
678        # while non-faulting instructions only a scalar plus immediate form, so
679        # `offsetIsImm` is used to determine which class of instructions is
680        # generated
681        firstFaulting = not offsetIsImm
682        tplHeader = 'template <class RegElemType, class MemElemType>'
683        tplArgs = '<RegElemType, MemElemType>'
684        eaCode = SPAlignmentCheckCode + '''
685        int memAccessSize = eCount * sizeof(MemElemType);
686        EA = XBase + '''
687        if offsetIsImm:
688            eaCode += '((int64_t) this->imm * eCount * sizeof(MemElemType))'
689        else:
690            eaCode += '(XOffset * sizeof(MemElemType));'
691        loadRdEnableCode = '''
692        auto rdEn = std::vector<bool>(sizeof(MemElemType) * eCount, true);
693        for (int i = 0; i < eCount; i++) {
694            if (!GpOp_x[i]) {
695                for (int j = 0; j < sizeof(MemElemType); j++) {
696                    rdEn[sizeof(MemElemType) * i + j] = false;
697                }
698            }
699        }
700        '''
701        loadMemAccCode = '''
702        for (int i = 0; i < eCount; i++) {
703            if (GpOp_x[i]) {
704                AA64FpDest_x[i] = memDataView[i];
705            } else {
706                AA64FpDest_x[i] = 0;
707            }
708        }
709        '''
710        storeMemAccCode = '''
711        for (int i = 0; i < eCount; i++) {
712            if (GpOp_x[i]) {
713                memDataView[i] = AA64FpDest_x[i];
714            } else {
715                memDataView[i] = 0;
716                for (int j = 0; j < sizeof(MemElemType); j++) {
717                    wrEn[sizeof(MemElemType) * i + j] = false;
718                }
719            }
720        }
721        '''
722        storeWrEnableCode = '''
723        auto wrEn = std::vector<bool>(sizeof(MemElemType) * eCount, true);
724        '''
725        ffrReadBackCode = '''
726        auto& firstFaultReg = Ffr;'''
727        fautlingLoadmemAccCode = '''
728        for (int i = 0; i < eCount; i++) {
729            if (GpOp_x[i] && firstFaultReg[i * sizeof(RegElemType)]) {
730                AA64FpDest_x[i] = memDataView[i];
731            } else {
732                AA64FpDest_x[i] = 0;
733            }
734        }
735        '''
736        nonFaultingCode = 'true ||'
737        faultCode = '''
738        Addr fault_addr;
739        if (fault == NoFault || getFaultVAddr(fault, fault_addr)) {
740            unsigned fault_elem_index;
741            if (fault != NoFault) {
742                assert(fault_addr >= EA);
743                fault_elem_index = (fault_addr - EA) / sizeof(MemElemType);
744            } else {
745                fault_elem_index = eCount + 1;
746            }
747            int first_active_index;
748            for (first_active_index = 0;
749                 first_active_index < eCount && !(GpOp_x[first_active_index]);
750                 first_active_index++);
751            if (%s first_active_index < fault_elem_index) {
752                for (int i = 0; i < eCount; i++) {
753                    for (int j = 0; j < sizeof(RegElemType); j++) {
754                        if (i < fault_elem_index) {
755                            Ffr_ub[i * sizeof(RegElemType) + j] = FfrAux_x[i];
756                        } else {
757                            Ffr_ub[i * sizeof(RegElemType) + j] = 0;
758                        }
759                    }
760                }
761                fault = NoFault;
762                if (first_active_index >= fault_elem_index) {
763                    // non-faulting load needs this
764                    xc->setMemAccPredicate(false);
765                }
766            }
767        }
768        ''' % ('' if firstFaulting else nonFaultingCode)
769
770        loadIop = InstObjParams('ld1',
771            'SveContigLoadSI' if offsetIsImm else 'SveContigLoadSS',
772            'SveContigMemSI' if offsetIsImm else 'SveContigMemSS',
773            {'tpl_header': tplHeader,
774             'tpl_args': tplArgs,
775             'rden_code' : loadRdEnableCode,
776             'memacc_code': loadMemAccCode,
777             'ea_code' : sveEnabledCheckCode + eaCode,
778             'fault_code' : '',
779             'fa_code' : ''},
780            ['IsMemRef', 'IsLoad'])
781        storeIop = InstObjParams('st1',
782            'SveContigStoreSI' if offsetIsImm else 'SveContigStoreSS',
783            'SveContigMemSI' if offsetIsImm else 'SveContigMemSS',
784            {'tpl_header': tplHeader,
785             'tpl_args': tplArgs,
786             'wren_code': storeWrEnableCode,
787             'memacc_code': storeMemAccCode,
788             'ea_code' : sveEnabledCheckCode + eaCode,
789             'fa_code' : ''},
790            ['IsMemRef', 'IsStore'])
791        faultIop = InstObjParams('ldff1' if firstFaulting else 'ldnf1',
792            'SveContigFFLoadSS' if firstFaulting else 'SveContigNFLoadSI',
793            'SveContigMemSS' if firstFaulting else 'SveContigMemSI',
794            {'tpl_header': tplHeader,
795             'tpl_args': tplArgs,
796             'rden_code' : loadRdEnableCode,
797             'memacc_code': fautlingLoadmemAccCode,
798             'ea_code' : sveEnabledCheckCode + eaCode,
799             'fault_code' : faultCode,
800             'fa_code' : ''},
801            ['IsMemRef', 'IsLoad'])
802        faultIop.snippets['memacc_code'] = (ffrReadBackCode +
803                                           faultIop.snippets['memacc_code'])
804        if offsetIsImm:
805            header_output += SveContigMemSIOpDeclare.subst(loadIop)
806            header_output += SveContigMemSIOpDeclare.subst(storeIop)
807            header_output += SveContigMemSIOpDeclare.subst(faultIop)
808        else:
809            header_output += SveContigMemSSOpDeclare.subst(loadIop)
810            header_output += SveContigMemSSOpDeclare.subst(storeIop)
811            header_output += SveContigMemSSOpDeclare.subst(faultIop)
812        exec_output += (
813            SveContigLoadExecute.subst(loadIop) +
814            SveContigLoadInitiateAcc.subst(loadIop) +
815            SveContigLoadCompleteAcc.subst(loadIop) +
816            SveContigStoreExecute.subst(storeIop) +
817            SveContigStoreInitiateAcc.subst(storeIop) +
818            SveContigStoreCompleteAcc.subst(storeIop) +
819            SveContigLoadExecute.subst(faultIop) +
820            SveContigLoadInitiateAcc.subst(faultIop) +
821            SveContigLoadCompleteAcc.subst(faultIop))
822
823        for args in loadTplArgs:
824            substDict = {'tpl_args': '<%s>' % ', '.join(args),
825                         'class_name': 'SveContigLoadSI' if offsetIsImm
826                                       else 'SveContigLoadSS'}
827            exec_output += SveContigMemExecDeclare.subst(substDict)
828        for args in storeTplArgs:
829            substDict = {'tpl_args': '<%s>' % ', '.join(args),
830                         'class_name': 'SveContigStoreSI' if offsetIsImm
831                                       else 'SveContigStoreSS'}
832            exec_output += SveContigMemExecDeclare.subst(substDict)
833        for args in loadTplArgs:
834            substDict = {'tpl_args': '<%s>' % ', '.join(args),
835                         'class_name': 'SveContigFFLoadSS' if firstFaulting
836                                       else 'SveContigNFLoadSI'}
837            exec_output += SveContigMemExecDeclare.subst(substDict)
838
839
840    # Generates definitions for SVE load-and-replicate instructions
841    def emitSveLoadAndRepl():
842        global header_output, exec_output, decoders
843        tplHeader = 'template <class RegElemType, class MemElemType>'
844        tplArgs = '<RegElemType, MemElemType>'
845        eaCode = SPAlignmentCheckCode + '''
846        EA = XBase + imm * sizeof(MemElemType);'''
847        memAccCode = '''
848        for (int i = 0; i < eCount; i++) {
849            if (GpOp_x[i]) {
850                AA64FpDest_x[i] = memData;
851            } else {
852                AA64FpDest_x[i] = 0;
853            }
854        }
855        '''
856        iop = InstObjParams('ld1r',
857            'SveLoadAndRepl',
858            'SveContigMemSI',
859            {'tpl_header': tplHeader,
860             'tpl_args': tplArgs,
861             'memacc_code': memAccCode,
862             'ea_code' : sveEnabledCheckCode + eaCode,
863             'fa_code' : ''},
864            ['IsMemRef', 'IsLoad'])
865        header_output += SveContigMemSIOpDeclare.subst(iop)
866        exec_output += (
867            SveLoadAndReplExecute.subst(iop) +
868            SveLoadAndReplInitiateAcc.subst(iop) +
869            SveLoadAndReplCompleteAcc.subst(iop))
870        for args in loadTplArgs:
871            substDict = {'tpl_args': '<%s>' % ', '.join(args),
872                         'class_name': 'SveLoadAndRepl'}
873            exec_output += SveContigMemExecDeclare.subst(substDict)
874
875    class IndexedAddrForm:
876        VEC_PLUS_IMM = 0
877        SCA_PLUS_VEC = 1
878
879    # Generates definitions for the transfer microops of SVE indexed memory
880    # operations (gather loads, scatter stores)
881    def emitSveIndexedMemMicroops(indexed_addr_form):
882        assert indexed_addr_form in (IndexedAddrForm.VEC_PLUS_IMM,
883                                     IndexedAddrForm.SCA_PLUS_VEC)
884        global header_output, exec_output, decoders
885        tplHeader = 'template <class RegElemType, class MemElemType>'
886        tplArgs = '<RegElemType, MemElemType>'
887        if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM:
888            eaCode = '''
889        EA = AA64FpBase_x[elemIndex] + imm * sizeof(MemElemType)'''
890        else:
891            eaCode = '''
892        uint64_t offset = AA64FpOffset_x[elemIndex];
893        if (offsetIs32) {
894            offset &= (1ULL << 32) - 1;
895        }
896        if (offsetIsSigned) {
897            offset = sext<32>(offset);
898        }
899        if (offsetIsScaled) {
900            offset *= sizeof(MemElemType);
901        }
902        EA = XBase + offset'''
903        loadMemAccCode = '''
904            AA64FpDest_x[elemIndex] = memData;
905        '''
906        storeMemAccCode = '''
907            memData = AA64FpDest_x[elemIndex];
908        '''
909        predCheckCode = 'GpOp_x[index]'
910        faultStatusSetCode = 'PUreg0_x[elemIndex] = 1;'
911        faultStatusResetCode = 'PUreg0_x[elemIndex] = 0;'
912        loadIop = InstObjParams('ld1',
913            ('SveGatherLoadVIMicroop'
914             if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM
915             else 'SveGatherLoadSVMicroop'),
916            'MicroOp',
917            {'tpl_header': tplHeader,
918             'tpl_args': tplArgs,
919             'memacc_code': loadMemAccCode,
920             'ea_code' : sveEnabledCheckCode + eaCode,
921             'fault_status_set_code' : faultStatusSetCode,
922             'fault_status_reset_code' : faultStatusResetCode,
923             'pred_check_code' : predCheckCode,
924             'fa_code' : ''},
925            ['IsMicroop', 'IsMemRef', 'IsLoad'])
926        storeIop = InstObjParams('st1',
927            ('SveScatterStoreVIMicroop'
928             if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM
929             else 'SveScatterStoreSVMicroop'),
930            'MicroOp',
931            {'tpl_header': tplHeader,
932             'tpl_args': tplArgs,
933             'memacc_code': storeMemAccCode,
934             'ea_code' : sveEnabledCheckCode + eaCode,
935             'pred_check_code' : predCheckCode,
936             'fa_code' : ''},
937            ['IsMicroop', 'IsMemRef', 'IsStore'])
938        if indexed_addr_form == IndexedAddrForm.VEC_PLUS_IMM:
939            header_output += SveIndexedMemVIMicroopDeclare.subst(loadIop)
940            header_output += SveIndexedMemVIMicroopDeclare.subst(storeIop)
941        else:
942            header_output += SveIndexedMemSVMicroopDeclare.subst(loadIop)
943            header_output += SveIndexedMemSVMicroopDeclare.subst(storeIop)
944        exec_output += (
945            SveGatherLoadMicroopExecute.subst(loadIop) +
946            SveGatherLoadMicroopInitiateAcc.subst(loadIop) +
947            SveGatherLoadMicroopCompleteAcc.subst(loadIop) +
948            SveScatterStoreMicroopExecute.subst(storeIop) +
949            SveScatterStoreMicroopInitiateAcc.subst(storeIop) +
950            SveScatterStoreMicroopCompleteAcc.subst(storeIop))
951        for args in gatherLoadTplArgs:
952            substDict = {'tpl_args': '<%s>' % ', '.join(args),
953                         'class_name': (
954                             'SveGatherLoadVIMicroop'
955                             if indexed_addr_form == \
956                                 IndexedAddrForm.VEC_PLUS_IMM
957                             else 'SveGatherLoadSVMicroop')}
958            # TODO: this should become SveMemExecDeclare
959            exec_output += SveContigMemExecDeclare.subst(substDict)
960        for args in scatterStoreTplArgs:
961            substDict = {'tpl_args': '<%s>' % ', '.join(args),
962                         'class_name': (
963                             'SveScatterStoreVIMicroop'
964                             if indexed_addr_form == \
965                                 IndexedAddrForm.VEC_PLUS_IMM
966                             else 'SveScatterStoreSVMicroop')}
967            # TODO: this should become SveMemExecDeclare
968            exec_output += SveContigMemExecDeclare.subst(substDict)
969
970    firstFaultTplArgs = ('int32_t', 'int64_t', 'uint32_t', 'uint64_t')
971
972    def emitSveFirstFaultWritebackMicroop():
973        global header_output, exec_output, decoders
974        tplHeader = 'template <class RegElemType>'
975        tplArgs = '<RegElemType>'
976        faultStatusCheckCode = 'PUreg0_x[index]'
977        firstFaultResetCode = '''
978        for(int j = 0; j < sizeof(RegElemType); j++) {
979            Ffr_ub[index * sizeof(RegElemType) + j] = 0;
980        }
981        '''
982        firstFaultForwardCode = '''
983        for(int j = 0; j < sizeof(RegElemType); j++) {
984            Ffr_ub[index * sizeof(RegElemType) + j] = FfrAux_x[index];
985        }
986        '''
987        iop = InstObjParams('ldff1',
988            'SveFirstFaultWritebackMicroop',
989            'MicroOp',
990            {'tpl_header': tplHeader,
991             'tpl_args': tplArgs,
992             'fault_status_check_code' : faultStatusCheckCode,
993             'first_fault_reset_code' : firstFaultResetCode,
994             'first_fault_forward_code' : firstFaultForwardCode},
995             ['IsMicroop'])
996        header_output += SveFirstFaultWritebackMicroopDeclare.subst(iop)
997        exec_output += SveFirstFaultWritebackMicroopExecute.subst(iop)
998        for args in firstFaultTplArgs:
999            substDict = {'targs': args,
1000                         'class_name' : 'SveFirstFaultWritebackMicroop' }
1001            exec_output += SveOpExecDeclare.subst(substDict)
1002
1003    # Generates definitions for the first microop of SVE gather loads, required
1004    # to propagate the source vector register to the transfer microops
1005    def emitSveGatherLoadCpySrcVecMicroop():
1006        global header_output, exec_output, decoders
1007        code = sveEnabledCheckCode + '''
1008        unsigned eCount = ArmStaticInst::getCurSveVecLen<uint8_t>(
1009                xc->tcBase());
1010        for (unsigned i = 0; i < eCount; i++) {
1011            AA64FpUreg0_ub[i] = AA64FpOp1_ub[i];
1012        }'''
1013        iop = InstObjParams('ld1',
1014            'SveGatherLoadCpySrcVecMicroop',
1015            'MicroOp',
1016            {'code': code},
1017            ['IsMicroop'])
1018        header_output += SveGatherLoadCpySrcVecMicroopDeclare.subst(iop)
1019        exec_output += SveGatherLoadCpySrcVecMicroopExecute.subst(iop)
1020
1021    # LD1[S]{B,H,W,D} (scalar plus immediate)
1022    # ST1[S]{B,H,W,D} (scalar plus immediate)
1023    # LDNF1[S]{B,H,W,D} (scalar plus immediate)
1024    emitSveContigMemInsts(True)
1025    # LD1[S]{B,H,W,D} (scalar plus scalar)
1026    # ST1[S]{B,H,W,D} (scalar plus scalar)
1027    # LDFF1[S]{B,H,W,D} (scalar plus vector)
1028    emitSveContigMemInsts(False)
1029
1030    # LD1R[S]{B,H,W,D}
1031    emitSveLoadAndRepl()
1032
1033    # LDR (predicate), STR (predicate)
1034    emitSveMemFillSpill(True)
1035    # LDR (vector), STR (vector)
1036    emitSveMemFillSpill(False)
1037
1038    # LD1[S]{B,H,W,D} (vector plus immediate)
1039    # ST1[S]{B,H,W,D} (vector plus immediate)
1040    # LDFF1[S]{B,H,W,D} (scalar plus immediate)
1041    emitSveIndexedMemMicroops(IndexedAddrForm.VEC_PLUS_IMM)
1042    # LD1[S]{B,H,W,D} (scalar plus vector)
1043    # ST1[S]{B,H,W,D} (scalar plus vector)
1044    # LDFF1[S]{B,H,W,D} (scalar plus vector)
1045    emitSveIndexedMemMicroops(IndexedAddrForm.SCA_PLUS_VEC)
1046
1047    # FFR writeback microop for gather loads
1048    emitSveFirstFaultWritebackMicroop()
1049
1050    # Source vector copy microop for gather loads
1051    emitSveGatherLoadCpySrcVecMicroop()
1052}};
1053