// Copyright (c) 2017-2018 ARM Limited // All rights reserved // // The license below extends only to copyright in the software and shall // not be construed as granting a license to any other intellectual // property including but not limited to intellectual property relating // to a hardware implementation of the functionality of the software // licensed hereunder. You may use the software subject to the license // terms below provided that you ensure that this notice is replicated // unmodified and in its entirety in all distributions of the software, // modified or unmodified, in source code or in binary form. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer; // redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution; // neither the name of the copyright holders nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Authors: Giacomo Gabrielli def template SveMemFillSpillOpDeclare {{ class %(class_name)s : public %(base_class)s { protected: typedef uint8_t TPElem; typedef uint8_t RegElemType; typedef uint8_t MemElemType; public: %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, IntRegIndex _base, uint64_t _imm) : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base, _imm) { %(constructor)s; } Fault execute(ExecContext *, Trace::InstRecord *) const; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; virtual void annotateFault(ArmFault *fault) { %(fa_code)s } }; }}; def template SveContigMemSSOpDeclare {{ %(tpl_header)s class %(class_name)s : public %(base_class)s { protected: typedef RegElemType TPElem; public: %(class_name)s(const char* mnem, ExtMachInst machInst, IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, IntRegIndex _offset) : %(base_class)s(mnem, machInst, %(op_class)s, _dest, _gp, _base, _offset) { %(constructor)s; } Fault execute(ExecContext *, Trace::InstRecord *) const; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; virtual void annotateFault(ArmFault *fault) { %(fa_code)s } }; }}; def template SveContigMemSIOpDeclare {{ %(tpl_header)s class %(class_name)s : public %(base_class)s { protected: typedef RegElemType TPElem; public: %(class_name)s(const char* mnem, ExtMachInst machInst, IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, uint64_t _imm) : %(base_class)s(mnem, machInst, %(op_class)s, _dest, _gp, _base, _imm) { %(constructor)s; } Fault execute(ExecContext *, Trace::InstRecord *) const; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; virtual void annotateFault(ArmFault *fault) { %(fa_code)s } }; }}; def template SveContigMemExecDeclare {{ template Fault %(class_name)s%(tpl_args)s::execute(ExecContext *, Trace::InstRecord *) const; template Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *, Trace::InstRecord *) const; template Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; }}; def template SveContigLoadExecute {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_decl)s; %(op_rd)s; %(ea_code)s; TheISA::VecRegContainer memData; auto memDataView = memData.as(); if (fault == NoFault) { fault = xc->readMem(EA, memData.raw_ptr(), memAccessSize, this->memAccessFlags); %(memacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveContigLoadInitiateAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_src_decl)s; %(op_rd)s; %(ea_code)s; if (fault == NoFault) { fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags); } return fault; } }}; def template SveContigLoadCompleteAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_decl)s; %(op_rd)s; TheISA::VecRegContainer memData; auto memDataView = memData.as(); memcpy(memData.raw_ptr(), pkt->getPtr(), pkt->getSize()); if (fault == NoFault) { %(memacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveContigStoreExecute {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_decl)s; %(op_rd)s; %(ea_code)s; TheISA::VecRegContainer memData; auto memDataView = memData.as(); %(wren_code)s; if (fault == NoFault) { %(memacc_code)s; } if (fault == NoFault) { fault = xc->writeMem(memData.raw_ptr(), memAccessSize, EA, this->memAccessFlags, NULL, wrEn); } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveContigStoreInitiateAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_decl)s; %(op_rd)s; %(ea_code)s; TheISA::VecRegContainer memData; auto memDataView = memData.as(); %(wren_code)s; if (fault == NoFault) { %(memacc_code)s; } if (fault == NoFault) { fault = xc->writeMem(memData.raw_ptr(), memAccessSize, EA, this->memAccessFlags, NULL, wrEn); } return fault; } }}; def template SveContigStoreCompleteAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { return NoFault; } }}; def template SveLoadAndReplExecute {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_decl)s; %(op_rd)s; %(ea_code)s; MemElemType memData; if (fault == NoFault) { fault = readMemAtomic(xc, traceData, EA, memData, this->memAccessFlags); %(memacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveLoadAndReplInitiateAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; %(op_src_decl)s; %(op_rd)s; %(ea_code)s; MemElemType memData; if (fault == NoFault) { fault = initiateMemRead(xc, traceData, EA, memData, this->memAccessFlags); } return fault; } }}; def template SveLoadAndReplCompleteAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; unsigned eCount = ArmStaticInst::getCurSveVecLen( xc->tcBase()); %(op_decl)s; %(op_rd)s; MemElemType memData; getMem(pkt, memData, traceData); if (fault == NoFault) { %(memacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveIndexedMemVIMicroopDeclare {{ %(tpl_header)s class %(class_name)s : public %(base_class)s { protected: typedef RegElemType TPElem; IntRegIndex dest; IntRegIndex gp; IntRegIndex base; uint64_t imm; int elemIndex; int numElems; unsigned memAccessFlags; public: %(class_name)s(const char* mnem, ExtMachInst machInst, OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems) : %(base_class)s(mnem, machInst, %(op_class)s), dest(_dest), gp(_gp), base(_base), imm(_imm), elemIndex(_elemIndex), numElems(_numElems), memAccessFlags(ArmISA::TLB::AllowUnaligned | ArmISA::TLB::MustBeOne) { %(constructor)s; if (_opClass == MemReadOp && elemIndex == 0) { // The first micro-op is responsible for pinning the // destination register _destRegIdx[0].setNumPinnedWrites(numElems - 1); } } Fault execute(ExecContext *, Trace::InstRecord *) const; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; virtual void annotateFault(ArmFault *fault) { %(fa_code)s } std::string generateDisassembly(Addr pc, const SymbolTable *symtab) const { // TODO: add suffix to transfer register std::stringstream ss; printMnemonic(ss, "", false); ccprintf(ss, "{"); printVecReg(ss, dest, true); ccprintf(ss, "}, "); printVecPredReg(ss, gp); if (_opClass == MemReadOp) { ccprintf(ss, "/z"); } ccprintf(ss, ", ["); printVecReg(ss, base, true); if (imm != 0) { ccprintf(ss, ", #%d", imm * sizeof(MemElemType)); } ccprintf(ss, "] (uop elem %d tfer)", elemIndex); return ss.str(); } }; }}; def template SveIndexedMemSVMicroopDeclare {{ %(tpl_header)s class %(class_name)s : public %(base_class)s { protected: typedef RegElemType TPElem; IntRegIndex dest; IntRegIndex gp; IntRegIndex base; IntRegIndex offset; bool offsetIs32; bool offsetIsSigned; bool offsetIsScaled; int elemIndex; int numElems; unsigned memAccessFlags; public: %(class_name)s(const char* mnem, ExtMachInst machInst, OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32, bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex, int _numElems) : %(base_class)s(mnem, machInst, %(op_class)s), dest(_dest), gp(_gp), base(_base), offset(_offset), offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned), offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex), numElems(_numElems), memAccessFlags(ArmISA::TLB::AllowUnaligned | ArmISA::TLB::MustBeOne) { %(constructor)s; if (_opClass == MemReadOp && elemIndex == 0) { // The first micro-op is responsible for pinning the // destination register _destRegIdx[0].setNumPinnedWrites(numElems - 1); } } Fault execute(ExecContext *, Trace::InstRecord *) const; Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; virtual void annotateFault(ArmFault *fault) { %(fa_code)s } std::string generateDisassembly(Addr pc, const SymbolTable *symtab) const { // TODO: add suffix to transfer and base registers std::stringstream ss; printMnemonic(ss, "", false); ccprintf(ss, "{"); printVecReg(ss, dest, true); ccprintf(ss, "}, "); printVecPredReg(ss, gp); if (_opClass == MemReadOp) { ccprintf(ss, "/z"); } ccprintf(ss, ", ["); printIntReg(ss, base); ccprintf(ss, ", "); printVecReg(ss, offset, true); ccprintf(ss, "] (uop elem %d tfer)", elemIndex); return ss.str(); } }; }}; def template SveGatherLoadMicroopExecute {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; %(op_decl)s; %(op_rd)s; %(ea_code)s; MemElemType memData; if (%(pred_check_code)s) { fault = readMemAtomic(xc, traceData, EA, memData, this->memAccessFlags); } if (fault == NoFault) { %(memacc_code)s; %(op_wb)s; } return fault; } }}; def template SveGatherLoadMicroopInitiateAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; %(op_src_decl)s; %(op_rd)s; %(ea_code)s; MemElemType memData; if (%(pred_check_code)s) { fault = initiateMemRead(xc, traceData, EA, memData, this->memAccessFlags); } else { xc->setMemAccPredicate(false); } return fault; } }}; def template SveGatherLoadMicroopCompleteAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; %(op_decl)s; %(op_rd)s; MemElemType memData = 0; if (%(pred_check_code)s) { getMem(pkt, memData, traceData); } if (fault == NoFault) { %(memacc_code)s; } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveScatterStoreMicroopExecute {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; %(op_decl)s; %(op_rd)s; %(ea_code)s; MemElemType memData; %(memacc_code)s; if (%(pred_check_code)s) { fault = writeMemAtomic(xc, traceData, memData, EA, this->memAccessFlags, NULL); } if (fault == NoFault) { %(op_wb)s; } return fault; } }}; def template SveScatterStoreMicroopInitiateAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, Trace::InstRecord *traceData) const { Addr EA; Fault fault = NoFault; bool aarch64 M5_VAR_USED = true; %(op_decl)s; %(op_rd)s; %(ea_code)s; MemElemType memData; %(memacc_code)s; if (%(pred_check_code)s) { fault = writeMemTiming(xc, traceData, memData, EA, this->memAccessFlags, NULL); } else { xc->setPredicate(false); } return fault; } }}; def template SveScatterStoreMicroopCompleteAcc {{ %(tpl_header)s Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, ExecContext *xc, Trace::InstRecord *traceData) const { return NoFault; } }}; def template SveGatherLoadCpySrcVecMicroopDeclare {{ class SveGatherLoadCpySrcVecMicroop : public MicroOp { protected: IntRegIndex op1; StaticInst *macroOp; public: SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst, IntRegIndex _op1, StaticInst *_macroOp) : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp) { %(constructor)s; } Fault execute(ExecContext *, Trace::InstRecord *) const; std::string generateDisassembly(Addr pc, const SymbolTable *symtab) const { std::stringstream ss; ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); ccprintf(ss, " (uop src vec cpy)"); return ss.str(); } }; }}; def template SveGatherLoadCpySrcVecMicroopExecute {{ Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc, Trace::InstRecord *traceData) const { Fault fault = NoFault; %(op_decl)s; %(op_rd)s; %(code)s; if (fault == NoFault) { %(op_wb)s; } return fault; } }};