isa.cc (12604:0cf3d5d3bd3e) isa.cc (12605:16476b32138d)
1/*
2 * Copyright (c) 2010-2016 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software

--- 28 unchanged lines hidden (view full) ---

37 * Authors: Gabe Black
38 * Ali Saidi
39 */
40
41#include "arch/arm/isa.hh"
42#include "arch/arm/pmu.hh"
43#include "arch/arm/system.hh"
44#include "arch/arm/tlb.hh"
1/*
2 * Copyright (c) 2010-2016 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software

--- 28 unchanged lines hidden (view full) ---

37 * Authors: Gabe Black
38 * Ali Saidi
39 */
40
41#include "arch/arm/isa.hh"
42#include "arch/arm/pmu.hh"
43#include "arch/arm/system.hh"
44#include "arch/arm/tlb.hh"
45#include "arch/arm/tlbi_op.hh"
45#include "cpu/base.hh"
46#include "cpu/checker/cpu.hh"
47#include "debug/Arm.hh"
48#include "debug/MiscRegs.hh"
49#include "dev/arm/generic_timer.hh"
50#include "params/ArmISA.hh"
51#include "sim/faults.hh"
52#include "sim/stat_control.hh"

--- 625 unchanged lines hidden (view full) ---

678 misc_reg, lower, upper, v);
679 } else {
680 miscRegs[lower] = v;
681 DPRINTF(MiscRegs, "Writing to misc reg %d (%d) : %#x\n",
682 misc_reg, lower, v);
683 }
684}
685
46#include "cpu/base.hh"
47#include "cpu/checker/cpu.hh"
48#include "debug/Arm.hh"
49#include "debug/MiscRegs.hh"
50#include "dev/arm/generic_timer.hh"
51#include "params/ArmISA.hh"
52#include "sim/faults.hh"
53#include "sim/stat_control.hh"

--- 625 unchanged lines hidden (view full) ---

679 misc_reg, lower, upper, v);
680 } else {
681 miscRegs[lower] = v;
682 DPRINTF(MiscRegs, "Writing to misc reg %d (%d) : %#x\n",
683 misc_reg, lower, v);
684 }
685}
686
686namespace {
687
688template<typename T>
689TLB *
690getITBPtr(T *tc)
691{
692 auto tlb = dynamic_cast<TLB *>(tc->getITBPtr());
693 assert(tlb);
694 return tlb;
695}
696
697template<typename T>
698TLB *
699getDTBPtr(T *tc)
700{
701 auto tlb = dynamic_cast<TLB *>(tc->getDTBPtr());
702 assert(tlb);
703 return tlb;
704}
705
706} // anonymous namespace
707
708void
709ISA::setMiscReg(int misc_reg, const MiscReg &val, ThreadContext *tc)
710{
711
712 MiscReg newVal = val;
713 bool secure_lookup;
687void
688ISA::setMiscReg(int misc_reg, const MiscReg &val, ThreadContext *tc)
689{
690
691 MiscReg newVal = val;
692 bool secure_lookup;
714 bool hyp;
715 uint8_t target_el;
716 uint16_t asid;
717 SCR scr;
718
719 if (misc_reg == MISCREG_CPSR) {
720 updateRegMap(val);
721
722
723 CPSR old_cpsr = miscRegs[MISCREG_CPSR];
724 int old_mode = old_cpsr.mode;

--- 289 unchanged lines hidden (view full) ---

1014 case MISCREG_ID_AA64ISAR1_EL1:
1015 case MISCREG_ID_AA64MMFR0_EL1:
1016 case MISCREG_ID_AA64MMFR1_EL1:
1017 case MISCREG_ID_AA64PFR0_EL1:
1018 case MISCREG_ID_AA64PFR1_EL1:
1019 // ID registers are constants.
1020 return;
1021
693 SCR scr;
694
695 if (misc_reg == MISCREG_CPSR) {
696 updateRegMap(val);
697
698
699 CPSR old_cpsr = miscRegs[MISCREG_CPSR];
700 int old_mode = old_cpsr.mode;

--- 289 unchanged lines hidden (view full) ---

990 case MISCREG_ID_AA64ISAR1_EL1:
991 case MISCREG_ID_AA64MMFR0_EL1:
992 case MISCREG_ID_AA64MMFR1_EL1:
993 case MISCREG_ID_AA64PFR0_EL1:
994 case MISCREG_ID_AA64PFR1_EL1:
995 // ID registers are constants.
996 return;
997
1022 // TLBI all entries, EL0&1 inner sharable (ignored)
1023 case MISCREG_TLBIALLIS:
998 // TLB Invalidate All
1024 case MISCREG_TLBIALL: // TLBI all entries, EL0&1,
999 case MISCREG_TLBIALL: // TLBI all entries, EL0&1,
1025 assert32(tc);
1026 target_el = 1; // el 0 and 1 are handled together
1027 scr = readMiscReg(MISCREG_SCR, tc);
1028 secure_lookup = haveSecurity && !scr.ns;
1029 tlbiALL(tc, secure_lookup, target_el);
1030 return;
1031 // TLBI all entries, EL0&1, instruction side
1000 {
1001 assert32(tc);
1002 scr = readMiscReg(MISCREG_SCR, tc);
1003
1004 TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
1005 tlbiOp(tc);
1006 return;
1007 }
1008 // TLB Invalidate All, Inner Shareable
1009 case MISCREG_TLBIALLIS:
1010 {
1011 assert32(tc);
1012 scr = readMiscReg(MISCREG_SCR, tc);
1013
1014 TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
1015 tlbiOp.broadcast(tc);
1016 return;
1017 }
1018 // Instruction TLB Invalidate All
1032 case MISCREG_ITLBIALL:
1019 case MISCREG_ITLBIALL:
1033 assert32(tc);
1034 target_el = 1; // el 0 and 1 are handled together
1035 scr = readMiscReg(MISCREG_SCR, tc);
1036 secure_lookup = haveSecurity && !scr.ns;
1037 getITBPtr(tc)->flushAllSecurity(secure_lookup, target_el);
1038 return;
1039 // TLBI all entries, EL0&1, data side
1020 {
1021 assert32(tc);
1022 scr = readMiscReg(MISCREG_SCR, tc);
1023
1024 ITLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
1025 tlbiOp(tc);
1026 return;
1027 }
1028 // Data TLB Invalidate All
1040 case MISCREG_DTLBIALL:
1029 case MISCREG_DTLBIALL:
1041 assert32(tc);
1042 target_el = 1; // el 0 and 1 are handled together
1043 scr = readMiscReg(MISCREG_SCR, tc);
1044 secure_lookup = haveSecurity && !scr.ns;
1045 getDTBPtr(tc)->flushAllSecurity(secure_lookup, target_el);
1046 return;
1047 // TLBI based on VA, EL0&1 inner sharable (ignored)
1048 case MISCREG_TLBIMVAL:
1049 case MISCREG_TLBIMVALIS:
1050 // mcr tlbimval(is) is invalidating all matching entries
1051 // regardless of the level of lookup, since in gem5 we cache
1052 // in the tlb the last level of lookup only.
1030 {
1031 assert32(tc);
1032 scr = readMiscReg(MISCREG_SCR, tc);
1033
1034 DTLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
1035 tlbiOp(tc);
1036 return;
1037 }
1038 // TLB Invalidate by VA
1039 // mcr tlbimval(is) is invalidating all matching entries
1040 // regardless of the level of lookup, since in gem5 we cache
1041 // in the tlb the last level of lookup only.
1053 case MISCREG_TLBIMVA:
1042 case MISCREG_TLBIMVA:
1043 case MISCREG_TLBIMVAL:
1044 {
1045 assert32(tc);
1046 scr = readMiscReg(MISCREG_SCR, tc);
1047
1048 TLBIMVA tlbiOp(EL1,
1049 haveSecurity && !scr.ns,
1050 mbits(newVal, 31, 12),
1051 bits(newVal, 7,0));
1052
1053 tlbiOp(tc);
1054 return;
1055 }
1056 // TLB Invalidate by VA, Inner Shareable
1054 case MISCREG_TLBIMVAIS:
1057 case MISCREG_TLBIMVAIS:
1055 assert32(tc);
1056 target_el = 1; // el 0 and 1 are handled together
1057 scr = readMiscReg(MISCREG_SCR, tc);
1058 secure_lookup = haveSecurity && !scr.ns;
1059 tlbiVA(tc, mbits(newVal, 31, 12), bits(newVal, 7,0),
1060 secure_lookup, target_el);
1061 return;
1062 // TLBI by ASID, EL0&1, inner sharable
1063 case MISCREG_TLBIASIDIS:
1058 case MISCREG_TLBIMVALIS:
1059 {
1060 assert32(tc);
1061 scr = readMiscReg(MISCREG_SCR, tc);
1062
1063 TLBIMVA tlbiOp(EL1,
1064 haveSecurity && !scr.ns,
1065 mbits(newVal, 31, 12),
1066 bits(newVal, 7,0));
1067
1068 tlbiOp.broadcast(tc);
1069 return;
1070 }
1071 // TLB Invalidate by ASID match
1064 case MISCREG_TLBIASID:
1072 case MISCREG_TLBIASID:
1065 assert32(tc);
1066 target_el = 1; // el 0 and 1 are handled together
1067 scr = readMiscReg(MISCREG_SCR, tc);
1068 secure_lookup = haveSecurity && !scr.ns;
1069 asid = bits(newVal, 7,0);
1070 tlbiASID(tc, asid, secure_lookup, target_el);
1071 return;
1072 // TLBI by address, EL0&1, inner sharable (ignored)
1073 case MISCREG_TLBIMVAAL:
1074 case MISCREG_TLBIMVAALIS:
1075 // mcr tlbimvaal(is) is invalidating all matching entries
1076 // regardless of the level of lookup, since in gem5 we cache
1077 // in the tlb the last level of lookup only.
1073 {
1074 assert32(tc);
1075 scr = readMiscReg(MISCREG_SCR, tc);
1076
1077 TLBIASID tlbiOp(EL1,
1078 haveSecurity && !scr.ns,
1079 bits(newVal, 7,0));
1080
1081 tlbiOp(tc);
1082 return;
1083 }
1084 // TLB Invalidate by ASID match, Inner Shareable
1085 case MISCREG_TLBIASIDIS:
1086 {
1087 assert32(tc);
1088 scr = readMiscReg(MISCREG_SCR, tc);
1089
1090 TLBIASID tlbiOp(EL1,
1091 haveSecurity && !scr.ns,
1092 bits(newVal, 7,0));
1093
1094 tlbiOp.broadcast(tc);
1095 return;
1096 }
1097 // mcr tlbimvaal(is) is invalidating all matching entries
1098 // regardless of the level of lookup, since in gem5 we cache
1099 // in the tlb the last level of lookup only.
1100 // TLB Invalidate by VA, All ASID
1078 case MISCREG_TLBIMVAA:
1101 case MISCREG_TLBIMVAA:
1102 case MISCREG_TLBIMVAAL:
1103 {
1104 assert32(tc);
1105 scr = readMiscReg(MISCREG_SCR, tc);
1106
1107 TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
1108 mbits(newVal, 31,12), false);
1109
1110 tlbiOp(tc);
1111 return;
1112 }
1113 // TLB Invalidate by VA, All ASID, Inner Shareable
1079 case MISCREG_TLBIMVAAIS:
1114 case MISCREG_TLBIMVAAIS:
1080 assert32(tc);
1081 target_el = 1; // el 0 and 1 are handled together
1082 scr = readMiscReg(MISCREG_SCR, tc);
1083 secure_lookup = haveSecurity && !scr.ns;
1084 hyp = 0;
1085 tlbiMVA(tc, mbits(newVal, 31,12), secure_lookup, hyp, target_el);
1086 return;
1087 // TLBI by address, EL2, hypervisor mode
1088 case MISCREG_TLBIMVALH:
1089 case MISCREG_TLBIMVALHIS:
1090 // mcr tlbimvalh(is) is invalidating all matching entries
1091 // regardless of the level of lookup, since in gem5 we cache
1092 // in the tlb the last level of lookup only.
1115 case MISCREG_TLBIMVAALIS:
1116 {
1117 assert32(tc);
1118 scr = readMiscReg(MISCREG_SCR, tc);
1119
1120 TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
1121 mbits(newVal, 31,12), false);
1122
1123 tlbiOp.broadcast(tc);
1124 return;
1125 }
1126 // mcr tlbimvalh(is) is invalidating all matching entries
1127 // regardless of the level of lookup, since in gem5 we cache
1128 // in the tlb the last level of lookup only.
1129 // TLB Invalidate by VA, Hyp mode
1093 case MISCREG_TLBIMVAH:
1130 case MISCREG_TLBIMVAH:
1131 case MISCREG_TLBIMVALH:
1132 {
1133 assert32(tc);
1134 scr = readMiscReg(MISCREG_SCR, tc);
1135
1136 TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
1137 mbits(newVal, 31,12), true);
1138
1139 tlbiOp(tc);
1140 return;
1141 }
1142 // TLB Invalidate by VA, Hyp mode, Inner Shareable
1094 case MISCREG_TLBIMVAHIS:
1143 case MISCREG_TLBIMVAHIS:
1095 assert32(tc);
1096 target_el = 1; // aarch32, use hyp bit
1097 scr = readMiscReg(MISCREG_SCR, tc);
1098 secure_lookup = haveSecurity && !scr.ns;
1099 hyp = 1;
1100 tlbiMVA(tc, mbits(newVal, 31,12), secure_lookup, hyp, target_el);
1101 return;
1102 case MISCREG_TLBIIPAS2L:
1103 case MISCREG_TLBIIPAS2LIS:
1104 // mcr tlbiipas2l(is) is invalidating all matching entries
1105 // regardless of the level of lookup, since in gem5 we cache
1106 // in the tlb the last level of lookup only.
1144 case MISCREG_TLBIMVALHIS:
1145 {
1146 assert32(tc);
1147 scr = readMiscReg(MISCREG_SCR, tc);
1148
1149 TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
1150 mbits(newVal, 31,12), true);
1151
1152 tlbiOp.broadcast(tc);
1153 return;
1154 }
1155 // mcr tlbiipas2l(is) is invalidating all matching entries
1156 // regardless of the level of lookup, since in gem5 we cache
1157 // in the tlb the last level of lookup only.
1158 // TLB Invalidate by Intermediate Physical Address, Stage 2
1107 case MISCREG_TLBIIPAS2:
1159 case MISCREG_TLBIIPAS2:
1160 case MISCREG_TLBIIPAS2L:
1161 {
1162 assert32(tc);
1163 scr = readMiscReg(MISCREG_SCR, tc);
1164
1165 TLBIIPA tlbiOp(EL1,
1166 haveSecurity && !scr.ns,
1167 static_cast<Addr>(bits(newVal, 35, 0)) << 12);
1168
1169 tlbiOp(tc);
1170 return;
1171 }
1172 // TLB Invalidate by Intermediate Physical Address, Stage 2,
1173 // Inner Shareable
1108 case MISCREG_TLBIIPAS2IS:
1174 case MISCREG_TLBIIPAS2IS:
1109 assert32(tc);
1110 target_el = 1; // EL 0 and 1 are handled together
1111 scr = readMiscReg(MISCREG_SCR, tc);
1112 secure_lookup = haveSecurity && !scr.ns;
1113 tlbiIPA(tc, newVal, secure_lookup, target_el);
1114 return;
1115 // TLBI by address and asid, EL0&1, instruction side only
1175 case MISCREG_TLBIIPAS2LIS:
1176 {
1177 assert32(tc);
1178 scr = readMiscReg(MISCREG_SCR, tc);
1179
1180 TLBIIPA tlbiOp(EL1,
1181 haveSecurity && !scr.ns,
1182 static_cast<Addr>(bits(newVal, 35, 0)) << 12);
1183
1184 tlbiOp.broadcast(tc);
1185 return;
1186 }
1187 // Instruction TLB Invalidate by VA
1116 case MISCREG_ITLBIMVA:
1188 case MISCREG_ITLBIMVA:
1117 assert32(tc);
1118 target_el = 1; // el 0 and 1 are handled together
1119 scr = readMiscReg(MISCREG_SCR, tc);
1120 secure_lookup = haveSecurity && !scr.ns;
1121 getITBPtr(tc)->flushMvaAsid(mbits(newVal, 31, 12),
1122 bits(newVal, 7,0), secure_lookup, target_el);
1123 return;
1124 // TLBI by address and asid, EL0&1, data side only
1189 {
1190 assert32(tc);
1191 scr = readMiscReg(MISCREG_SCR, tc);
1192
1193 ITLBIMVA tlbiOp(EL1,
1194 haveSecurity && !scr.ns,
1195 mbits(newVal, 31, 12),
1196 bits(newVal, 7,0));
1197
1198 tlbiOp(tc);
1199 return;
1200 }
1201 // Data TLB Invalidate by VA
1125 case MISCREG_DTLBIMVA:
1202 case MISCREG_DTLBIMVA:
1126 assert32(tc);
1127 target_el = 1; // el 0 and 1 are handled together
1128 scr = readMiscReg(MISCREG_SCR, tc);
1129 secure_lookup = haveSecurity && !scr.ns;
1130 getDTBPtr(tc)->flushMvaAsid(mbits(newVal, 31, 12),
1131 bits(newVal, 7,0), secure_lookup, target_el);
1132 return;
1133 // TLBI by ASID, EL0&1, instrution side only
1203 {
1204 assert32(tc);
1205 scr = readMiscReg(MISCREG_SCR, tc);
1206
1207 DTLBIMVA tlbiOp(EL1,
1208 haveSecurity && !scr.ns,
1209 mbits(newVal, 31, 12),
1210 bits(newVal, 7,0));
1211
1212 tlbiOp(tc);
1213 return;
1214 }
1215 // Instruction TLB Invalidate by ASID match
1134 case MISCREG_ITLBIASID:
1216 case MISCREG_ITLBIASID:
1135 assert32(tc);
1136 target_el = 1; // el 0 and 1 are handled together
1137 scr = readMiscReg(MISCREG_SCR, tc);
1138 secure_lookup = haveSecurity && !scr.ns;
1139 getITBPtr(tc)->flushAsid(bits(newVal, 7,0), secure_lookup,
1140 target_el);
1141 return;
1142 // TLBI by ASID EL0&1 data size only
1217 {
1218 assert32(tc);
1219 scr = readMiscReg(MISCREG_SCR, tc);
1220
1221 ITLBIASID tlbiOp(EL1,
1222 haveSecurity && !scr.ns,
1223 bits(newVal, 7,0));
1224
1225 tlbiOp(tc);
1226 return;
1227 }
1228 // Data TLB Invalidate by ASID match
1143 case MISCREG_DTLBIASID:
1229 case MISCREG_DTLBIASID:
1144 assert32(tc);
1145 target_el = 1; // el 0 and 1 are handled together
1146 scr = readMiscReg(MISCREG_SCR, tc);
1147 secure_lookup = haveSecurity && !scr.ns;
1148 getDTBPtr(tc)->flushAsid(bits(newVal, 7,0), secure_lookup,
1149 target_el);
1150 return;
1151 // Invalidate entire Non-secure Hyp/Non-Hyp Unified TLB
1230 {
1231 assert32(tc);
1232 scr = readMiscReg(MISCREG_SCR, tc);
1233
1234 DTLBIASID tlbiOp(EL1,
1235 haveSecurity && !scr.ns,
1236 bits(newVal, 7,0));
1237
1238 tlbiOp(tc);
1239 return;
1240 }
1241 // TLB Invalidate All, Non-Secure Non-Hyp
1152 case MISCREG_TLBIALLNSNH:
1242 case MISCREG_TLBIALLNSNH:
1243 {
1244 assert32(tc);
1245
1246 TLBIALLN tlbiOp(EL1, false);
1247 tlbiOp(tc);
1248 return;
1249 }
1250 // TLB Invalidate All, Non-Secure Non-Hyp, Inner Shareable
1153 case MISCREG_TLBIALLNSNHIS:
1251 case MISCREG_TLBIALLNSNHIS:
1154 assert32(tc);
1155 target_el = 1; // el 0 and 1 are handled together
1156 hyp = 0;
1157 tlbiALLN(tc, hyp, target_el);
1158 return;
1159 // TLBI all entries, EL2, hyp,
1252 {
1253 assert32(tc);
1254
1255 TLBIALLN tlbiOp(EL1, false);
1256 tlbiOp.broadcast(tc);
1257 return;
1258 }
1259 // TLB Invalidate All, Hyp mode
1160 case MISCREG_TLBIALLH:
1260 case MISCREG_TLBIALLH:
1261 {
1262 assert32(tc);
1263
1264 TLBIALLN tlbiOp(EL1, true);
1265 tlbiOp(tc);
1266 return;
1267 }
1268 // TLB Invalidate All, Hyp mode, Inner Shareable
1161 case MISCREG_TLBIALLHIS:
1269 case MISCREG_TLBIALLHIS:
1162 assert32(tc);
1163 target_el = 1; // aarch32, use hyp bit
1164 hyp = 1;
1165 tlbiALLN(tc, hyp, target_el);
1166 return;
1167 // AArch64 TLBI: invalidate all entries EL3
1168 case MISCREG_TLBI_ALLE3IS:
1270 {
1271 assert32(tc);
1272
1273 TLBIALLN tlbiOp(EL1, true);
1274 tlbiOp.broadcast(tc);
1275 return;
1276 }
1277 // AArch64 TLB Invalidate All, EL3
1169 case MISCREG_TLBI_ALLE3:
1278 case MISCREG_TLBI_ALLE3:
1170 assert64(tc);
1171 target_el = 3;
1172 secure_lookup = true;
1173 tlbiALL(tc, secure_lookup, target_el);
1174 return;
1279 {
1280 assert64(tc);
1281
1282 TLBIALL tlbiOp(EL3, true);
1283 tlbiOp(tc);
1284 return;
1285 }
1286 // AArch64 TLB Invalidate All, EL3, Inner Shareable
1287 case MISCREG_TLBI_ALLE3IS:
1288 {
1289 assert64(tc);
1290
1291 TLBIALL tlbiOp(EL3, true);
1292 tlbiOp.broadcast(tc);
1293 return;
1294 }
1175 // @todo: uncomment this to enable Virtualization
1176 // case MISCREG_TLBI_ALLE2IS:
1177 // case MISCREG_TLBI_ALLE2:
1295 // @todo: uncomment this to enable Virtualization
1296 // case MISCREG_TLBI_ALLE2IS:
1297 // case MISCREG_TLBI_ALLE2:
1178 // TLBI all entries, EL0&1
1179 case MISCREG_TLBI_ALLE1IS:
1298 // AArch64 TLB Invalidate All, EL1
1180 case MISCREG_TLBI_ALLE1:
1299 case MISCREG_TLBI_ALLE1:
1181 // AArch64 TLBI: invalidate all entries, stage 1, current VMID
1182 case MISCREG_TLBI_VMALLE1IS:
1183 case MISCREG_TLBI_VMALLE1:
1300 case MISCREG_TLBI_VMALLE1:
1184 // AArch64 TLBI: invalidate all entries, stages 1 & 2, current VMID
1185 case MISCREG_TLBI_VMALLS12E1IS:
1186 case MISCREG_TLBI_VMALLS12E1:
1187 // @todo: handle VMID and stage 2 to enable Virtualization
1301 case MISCREG_TLBI_VMALLS12E1:
1302 // @todo: handle VMID and stage 2 to enable Virtualization
1188 assert64(tc);
1189 target_el = 1; // el 0 and 1 are handled together
1190 scr = readMiscReg(MISCREG_SCR, tc);
1191 secure_lookup = haveSecurity && !scr.ns;
1192 tlbiALL(tc, secure_lookup, target_el);
1193 return;
1194 // AArch64 TLBI: invalidate by VA and ASID, stage 1, current VMID
1195 // VAEx(IS) and VALEx(IS) are the same because TLBs only store entries
1303 {
1304 assert64(tc);
1305 scr = readMiscReg(MISCREG_SCR, tc);
1306
1307 TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
1308 tlbiOp(tc);
1309 return;
1310 }
1311 // AArch64 TLB Invalidate All, EL1, Inner Shareable
1312 case MISCREG_TLBI_ALLE1IS:
1313 case MISCREG_TLBI_VMALLE1IS:
1314 case MISCREG_TLBI_VMALLS12E1IS:
1315 // @todo: handle VMID and stage 2 to enable Virtualization
1316 {
1317 assert64(tc);
1318 scr = readMiscReg(MISCREG_SCR, tc);
1319
1320 TLBIALL tlbiOp(EL1, haveSecurity && !scr.ns);
1321 tlbiOp.broadcast(tc);
1322 return;
1323 }
1324 // VAEx(IS) and VALEx(IS) are the same because TLBs
1325 // only store entries
1196 // from the last level of translation table walks
1197 // @todo: handle VMID to enable Virtualization
1326 // from the last level of translation table walks
1327 // @todo: handle VMID to enable Virtualization
1198 // TLBI all entries, EL0&1
1199 case MISCREG_TLBI_VAE3IS_Xt:
1328 // AArch64 TLB Invalidate by VA, EL3
1200 case MISCREG_TLBI_VAE3_Xt:
1329 case MISCREG_TLBI_VAE3_Xt:
1201 // TLBI by VA, EL3 regime stage 1, last level walk
1202 case MISCREG_TLBI_VALE3IS_Xt:
1203 case MISCREG_TLBI_VALE3_Xt:
1330 case MISCREG_TLBI_VALE3_Xt:
1204 assert64(tc);
1205 target_el = 3;
1206 asid = 0xbeef; // does not matter, tlbi is global
1207 secure_lookup = true;
1208 tlbiVA(tc, ((Addr) bits(newVal, 43, 0)) << 12,
1209 asid, secure_lookup, target_el);
1210 return;
1211 // TLBI by VA, EL2
1212 case MISCREG_TLBI_VAE2IS_Xt:
1331 {
1332 assert64(tc);
1333
1334 TLBIMVA tlbiOp(EL3, true,
1335 static_cast<Addr>(bits(newVal, 43, 0)) << 12,
1336 0xbeef);
1337 tlbiOp(tc);
1338 return;
1339 }
1340 // AArch64 TLB Invalidate by VA, EL3, Inner Shareable
1341 case MISCREG_TLBI_VAE3IS_Xt:
1342 case MISCREG_TLBI_VALE3IS_Xt:
1343 {
1344 assert64(tc);
1345
1346 TLBIMVA tlbiOp(EL3, true,
1347 static_cast<Addr>(bits(newVal, 43, 0)) << 12,
1348 0xbeef);
1349
1350 tlbiOp.broadcast(tc);
1351 return;
1352 }
1353 // AArch64 TLB Invalidate by VA, EL2
1213 case MISCREG_TLBI_VAE2_Xt:
1354 case MISCREG_TLBI_VAE2_Xt:
1214 // TLBI by VA, EL2, stage1 last level walk
1215 case MISCREG_TLBI_VALE2IS_Xt:
1216 case MISCREG_TLBI_VALE2_Xt:
1355 case MISCREG_TLBI_VALE2_Xt:
1217 assert64(tc);
1218 target_el = 2;
1219 asid = 0xbeef; // does not matter, tlbi is global
1220 scr = readMiscReg(MISCREG_SCR, tc);
1221 secure_lookup = haveSecurity && !scr.ns;
1222 tlbiVA(tc, ((Addr) bits(newVal, 43, 0)) << 12,
1223 asid, secure_lookup, target_el);
1224 return;
1225 // TLBI by VA EL1 & 0, stage1, ASID, current VMID
1226 case MISCREG_TLBI_VAE1IS_Xt:
1356 {
1357 assert64(tc);
1358 scr = readMiscReg(MISCREG_SCR, tc);
1359
1360 TLBIMVA tlbiOp(EL2, haveSecurity && !scr.ns,
1361 static_cast<Addr>(bits(newVal, 43, 0)) << 12,
1362 0xbeef);
1363 tlbiOp(tc);
1364 return;
1365 }
1366 // AArch64 TLB Invalidate by VA, EL2, Inner Shareable
1367 case MISCREG_TLBI_VAE2IS_Xt:
1368 case MISCREG_TLBI_VALE2IS_Xt:
1369 {
1370 assert64(tc);
1371 scr = readMiscReg(MISCREG_SCR, tc);
1372
1373 TLBIMVA tlbiOp(EL2, haveSecurity && !scr.ns,
1374 static_cast<Addr>(bits(newVal, 43, 0)) << 12,
1375 0xbeef);
1376
1377 tlbiOp.broadcast(tc);
1378 return;
1379 }
1380 // AArch64 TLB Invalidate by VA, EL1
1227 case MISCREG_TLBI_VAE1_Xt:
1381 case MISCREG_TLBI_VAE1_Xt:
1228 case MISCREG_TLBI_VALE1IS_Xt:
1229 case MISCREG_TLBI_VALE1_Xt:
1382 case MISCREG_TLBI_VALE1_Xt:
1230 assert64(tc);
1231 asid = bits(newVal, 63, 48);
1232 target_el = 1; // el 0 and 1 are handled together
1233 scr = readMiscReg(MISCREG_SCR, tc);
1234 secure_lookup = haveSecurity && !scr.ns;
1235 tlbiVA(tc, ((Addr) bits(newVal, 43, 0)) << 12,
1236 asid, secure_lookup, target_el);
1237 return;
1238 // AArch64 TLBI: invalidate by ASID, stage 1, current VMID
1383 {
1384 assert64(tc);
1385 scr = readMiscReg(MISCREG_SCR, tc);
1386 auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
1387 bits(newVal, 55, 48);
1388
1389 TLBIMVA tlbiOp(EL1, haveSecurity && !scr.ns,
1390 static_cast<Addr>(bits(newVal, 43, 0)) << 12,
1391 asid);
1392
1393 tlbiOp(tc);
1394 return;
1395 }
1396 // AArch64 TLB Invalidate by VA, EL1, Inner Shareable
1397 case MISCREG_TLBI_VAE1IS_Xt:
1398 case MISCREG_TLBI_VALE1IS_Xt:
1399 {
1400 assert64(tc);
1401 scr = readMiscReg(MISCREG_SCR, tc);
1402 auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
1403 bits(newVal, 55, 48);
1404
1405 TLBIMVA tlbiOp(EL1, haveSecurity && !scr.ns,
1406 static_cast<Addr>(bits(newVal, 43, 0)) << 12,
1407 asid);
1408
1409 tlbiOp.broadcast(tc);
1410 return;
1411 }
1412 // AArch64 TLB Invalidate by ASID, EL1
1239 // @todo: handle VMID to enable Virtualization
1413 // @todo: handle VMID to enable Virtualization
1240 case MISCREG_TLBI_ASIDE1IS_Xt:
1241 case MISCREG_TLBI_ASIDE1_Xt:
1414 case MISCREG_TLBI_ASIDE1_Xt:
1242 assert64(tc);
1243 target_el = 1; // el 0 and 1 are handled together
1244 scr = readMiscReg(MISCREG_SCR, tc);
1245 secure_lookup = haveSecurity && !scr.ns;
1246 asid = bits(newVal, 63, 48);
1247 tlbiASID(tc, asid, secure_lookup, target_el);
1248 return;
1249 // AArch64 TLBI: invalidate by VA, ASID, stage 1, current VMID
1415 {
1416 assert64(tc);
1417 scr = readMiscReg(MISCREG_SCR, tc);
1418 auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
1419 bits(newVal, 55, 48);
1420
1421 TLBIASID tlbiOp(EL1, haveSecurity && !scr.ns, asid);
1422 tlbiOp(tc);
1423 return;
1424 }
1425 // AArch64 TLB Invalidate by ASID, EL1, Inner Shareable
1426 case MISCREG_TLBI_ASIDE1IS_Xt:
1427 {
1428 assert64(tc);
1429 scr = readMiscReg(MISCREG_SCR, tc);
1430 auto asid = haveLargeAsid64 ? bits(newVal, 63, 48) :
1431 bits(newVal, 55, 48);
1432
1433 TLBIASID tlbiOp(EL1, haveSecurity && !scr.ns, asid);
1434 tlbiOp.broadcast(tc);
1435 return;
1436 }
1250 // VAAE1(IS) and VAALE1(IS) are the same because TLBs only store
1251 // entries from the last level of translation table walks
1437 // VAAE1(IS) and VAALE1(IS) are the same because TLBs only store
1438 // entries from the last level of translation table walks
1252 // @todo: handle VMID to enable Virtualization
1253 case MISCREG_TLBI_VAAE1IS_Xt:
1439 // AArch64 TLB Invalidate by VA, All ASID, EL1
1254 case MISCREG_TLBI_VAAE1_Xt:
1440 case MISCREG_TLBI_VAAE1_Xt:
1255 case MISCREG_TLBI_VAALE1IS_Xt:
1256 case MISCREG_TLBI_VAALE1_Xt:
1441 case MISCREG_TLBI_VAALE1_Xt:
1257 assert64(tc);
1258 target_el = 1; // el 0 and 1 are handled together
1259 scr = readMiscReg(MISCREG_SCR, tc);
1260 secure_lookup = haveSecurity && !scr.ns;
1261 tlbiMVA(tc,
1262 ((Addr)bits(newVal, 43, 0)) << 12,
1263 secure_lookup, false, target_el);
1264 return;
1265 // AArch64 TLBI: invalidate by IPA, stage 2, current VMID
1266 case MISCREG_TLBI_IPAS2LE1IS_Xt:
1442 {
1443 assert64(tc);
1444 scr = readMiscReg(MISCREG_SCR, tc);
1445
1446 TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
1447 static_cast<Addr>(bits(newVal, 43, 0)) << 12, false);
1448
1449 tlbiOp(tc);
1450 return;
1451 }
1452 // AArch64 TLB Invalidate by VA, All ASID, EL1, Inner Shareable
1453 case MISCREG_TLBI_VAAE1IS_Xt:
1454 case MISCREG_TLBI_VAALE1IS_Xt:
1455 {
1456 assert64(tc);
1457 scr = readMiscReg(MISCREG_SCR, tc);
1458
1459 TLBIMVAA tlbiOp(EL1, haveSecurity && !scr.ns,
1460 static_cast<Addr>(bits(newVal, 43, 0)) << 12, false);
1461
1462 tlbiOp.broadcast(tc);
1463 return;
1464 }
1465 // AArch64 TLB Invalidate by Intermediate Physical Address,
1466 // Stage 2, EL1
1467 case MISCREG_TLBI_IPAS2E1_Xt:
1267 case MISCREG_TLBI_IPAS2LE1_Xt:
1468 case MISCREG_TLBI_IPAS2LE1_Xt:
1469 {
1470 assert64(tc);
1471 scr = readMiscReg(MISCREG_SCR, tc);
1472
1473 TLBIIPA tlbiOp(EL1, haveSecurity && !scr.ns,
1474 static_cast<Addr>(bits(newVal, 35, 0)) << 12);
1475
1476 tlbiOp(tc);
1477 return;
1478 }
1479 // AArch64 TLB Invalidate by Intermediate Physical Address,
1480 // Stage 2, EL1, Inner Shareable
1268 case MISCREG_TLBI_IPAS2E1IS_Xt:
1481 case MISCREG_TLBI_IPAS2E1IS_Xt:
1269 case MISCREG_TLBI_IPAS2E1_Xt:
1270 assert64(tc);
1271 target_el = 1; // EL 0 and 1 are handled together
1272 scr = readMiscReg(MISCREG_SCR, tc);
1273 secure_lookup = haveSecurity && !scr.ns;
1274 tlbiIPA(tc, newVal, secure_lookup, target_el);
1275 return;
1482 case MISCREG_TLBI_IPAS2LE1IS_Xt:
1483 {
1484 assert64(tc);
1485 scr = readMiscReg(MISCREG_SCR, tc);
1486
1487 TLBIIPA tlbiOp(EL1, haveSecurity && !scr.ns,
1488 static_cast<Addr>(bits(newVal, 35, 0)) << 12);
1489
1490 tlbiOp.broadcast(tc);
1491 return;
1492 }
1276 case MISCREG_ACTLR:
1277 warn("Not doing anything for write of miscreg ACTLR\n");
1278 break;
1279
1280 case MISCREG_PMXEVTYPER_PMCCFILTR:
1281 case MISCREG_PMINTENSET_EL1 ... MISCREG_PMOVSSET_EL0:
1282 case MISCREG_PMEVCNTR0_EL0 ... MISCREG_PMEVTYPER5_EL0:
1283 case MISCREG_PMCR ... MISCREG_PMOVSSET:

--- 429 unchanged lines hidden (view full) ---

1713 case MISCREG_CNTVOFF_EL2 ... MISCREG_CNTPS_CVAL_EL1:
1714 getGenericTimer(tc).setMiscReg(misc_reg, newVal);
1715 break;
1716 }
1717 }
1718 setMiscRegNoEffect(misc_reg, newVal);
1719}
1720
1493 case MISCREG_ACTLR:
1494 warn("Not doing anything for write of miscreg ACTLR\n");
1495 break;
1496
1497 case MISCREG_PMXEVTYPER_PMCCFILTR:
1498 case MISCREG_PMINTENSET_EL1 ... MISCREG_PMOVSSET_EL0:
1499 case MISCREG_PMEVCNTR0_EL0 ... MISCREG_PMEVTYPER5_EL0:
1500 case MISCREG_PMCR ... MISCREG_PMOVSSET:

--- 429 unchanged lines hidden (view full) ---

1930 case MISCREG_CNTVOFF_EL2 ... MISCREG_CNTPS_CVAL_EL1:
1931 getGenericTimer(tc).setMiscReg(misc_reg, newVal);
1932 break;
1933 }
1934 }
1935 setMiscRegNoEffect(misc_reg, newVal);
1936}
1937
1721void
1722ISA::tlbiVA(ThreadContext *tc, Addr va, uint16_t asid,
1723 bool secure_lookup, uint8_t target_el)
1724{
1725 if (!haveLargeAsid64)
1726 asid &= mask(8);
1727 System *sys = tc->getSystemPtr();
1728 for (int x = 0; x < sys->numContexts(); x++) {
1729 ThreadContext *oc = sys->getThreadContext(x);
1730 getITBPtr(oc)->flushMvaAsid(va, asid,
1731 secure_lookup, target_el);
1732 getDTBPtr(oc)->flushMvaAsid(va, asid,
1733 secure_lookup, target_el);
1734
1735 CheckerCPU *checker = oc->getCheckerCpuPtr();
1736 if (checker) {
1737 getITBPtr(checker)->flushMvaAsid(
1738 va, asid, secure_lookup, target_el);
1739 getDTBPtr(checker)->flushMvaAsid(
1740 va, asid, secure_lookup, target_el);
1741 }
1742 }
1743}
1744
1745void
1746ISA::tlbiALL(ThreadContext *tc, bool secure_lookup, uint8_t target_el)
1747{
1748 System *sys = tc->getSystemPtr();
1749 for (int x = 0; x < sys->numContexts(); x++) {
1750 ThreadContext *oc = sys->getThreadContext(x);
1751 getITBPtr(oc)->flushAllSecurity(secure_lookup, target_el);
1752 getDTBPtr(oc)->flushAllSecurity(secure_lookup, target_el);
1753
1754 // If CheckerCPU is connected, need to notify it of a flush
1755 CheckerCPU *checker = oc->getCheckerCpuPtr();
1756 if (checker) {
1757 getITBPtr(checker)->flushAllSecurity(secure_lookup,
1758 target_el);
1759 getDTBPtr(checker)->flushAllSecurity(secure_lookup,
1760 target_el);
1761 }
1762 }
1763}
1764
1765void
1766ISA::tlbiALLN(ThreadContext *tc, bool hyp, uint8_t target_el)
1767{
1768 System *sys = tc->getSystemPtr();
1769 for (int x = 0; x < sys->numContexts(); x++) {
1770 ThreadContext *oc = sys->getThreadContext(x);
1771 getITBPtr(oc)->flushAllNs(hyp, target_el);
1772 getDTBPtr(oc)->flushAllNs(hyp, target_el);
1773
1774 CheckerCPU *checker = oc->getCheckerCpuPtr();
1775 if (checker) {
1776 getITBPtr(checker)->flushAllNs(hyp, target_el);
1777 getDTBPtr(checker)->flushAllNs(hyp, target_el);
1778 }
1779 }
1780}
1781
1782void
1783ISA::tlbiMVA(ThreadContext *tc, Addr va, bool secure_lookup, bool hyp,
1784 uint8_t target_el)
1785{
1786 System *sys = tc->getSystemPtr();
1787 for (int x = 0; x < sys->numContexts(); x++) {
1788 ThreadContext *oc = sys->getThreadContext(x);
1789 getITBPtr(oc)->flushMva(va, secure_lookup, hyp, target_el);
1790 getDTBPtr(oc)->flushMva(va, secure_lookup, hyp, target_el);
1791
1792 CheckerCPU *checker = oc->getCheckerCpuPtr();
1793 if (checker) {
1794 getITBPtr(checker)->flushMva(va, secure_lookup, hyp, target_el);
1795 getDTBPtr(checker)->flushMva(va, secure_lookup, hyp, target_el);
1796 }
1797 }
1798}
1799
1800void
1801ISA::tlbiIPA(ThreadContext *tc, MiscReg newVal, bool secure_lookup,
1802 uint8_t target_el)
1803{
1804 System *sys = tc->getSystemPtr();
1805 for (auto x = 0; x < sys->numContexts(); x++) {
1806 tc = sys->getThreadContext(x);
1807 Addr ipa = ((Addr) bits(newVal, 35, 0)) << 12;
1808 getITBPtr(tc)->flushIpaVmid(ipa,
1809 secure_lookup, false, target_el);
1810 getDTBPtr(tc)->flushIpaVmid(ipa,
1811 secure_lookup, false, target_el);
1812
1813 CheckerCPU *checker = tc->getCheckerCpuPtr();
1814 if (checker) {
1815 getITBPtr(checker)->flushIpaVmid(ipa,
1816 secure_lookup, false, target_el);
1817 getDTBPtr(checker)->flushIpaVmid(ipa,
1818 secure_lookup, false, target_el);
1819 }
1820 }
1821}
1822
1823void
1824ISA::tlbiASID(ThreadContext *tc, uint16_t asid, bool secure_lookup,
1825 uint8_t target_el)
1826{
1827 if (!haveLargeAsid64)
1828 asid &= mask(8);
1829
1830 System *sys = tc->getSystemPtr();
1831 for (auto x = 0; x < sys->numContexts(); x++) {
1832 tc = sys->getThreadContext(x);
1833 getITBPtr(tc)->flushAsid(asid, secure_lookup, target_el);
1834 getDTBPtr(tc)->flushAsid(asid, secure_lookup, target_el);
1835 CheckerCPU *checker = tc->getCheckerCpuPtr();
1836 if (checker) {
1837 getITBPtr(checker)->flushAsid(asid, secure_lookup, target_el);
1838 getDTBPtr(checker)->flushAsid(asid, secure_lookup, target_el);
1839 }
1840 }
1841}
1842
1843BaseISADevice &
1844ISA::getGenericTimer(ThreadContext *tc)
1845{
1846 // We only need to create an ISA interface the first time we try
1847 // to access the timer.
1848 if (timer)
1849 return *timer.get();
1850

--- 18 unchanged lines hidden ---
1938BaseISADevice &
1939ISA::getGenericTimer(ThreadContext *tc)
1940{
1941 // We only need to create an ISA interface the first time we try
1942 // to access the timer.
1943 if (timer)
1944 return *timer.get();
1945

--- 18 unchanged lines hidden ---