cpt_upgrader.py (10930:ddc3d96d6313) | cpt_upgrader.py (11077:fae097742b7e) |
---|---|
1#!/usr/bin/env python 2 | 1#!/usr/bin/env python 2 |
3# Copyright (c) 2012-2013 ARM Limited | 3# Copyright (c) 2012-2013,2015 ARM Limited |
4# All rights reserved 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated --- 19 unchanged lines hidden (view full) --- 31# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37# 38# Authors: Ali Saidi | 4# All rights reserved 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated --- 19 unchanged lines hidden (view full) --- 31# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37# 38# Authors: Ali Saidi |
39# Curtis Dunham |
|
39# 40 41# This python code is used to migrate checkpoints that were created in one 42# version of the simulator to newer version. As features are added or bugs are 43# fixed some of the state that needs to be checkpointed can change. If you have 44# many historic checkpoints that you use, manually editing them to fix them is 45# both time consuming and error-prone. 46 47# This script provides a way to migrate checkpoints to the newer repository in | 40# 41 42# This python code is used to migrate checkpoints that were created in one 43# version of the simulator to newer version. As features are added or bugs are 44# fixed some of the state that needs to be checkpointed can change. If you have 45# many historic checkpoints that you use, manually editing them to fix them is 46# both time consuming and error-prone. 47 48# This script provides a way to migrate checkpoints to the newer repository in |
48# a programatic way. It can be imported into another script or used on the | 49# a programmatic way. It can be imported into another script or used on the |
49# command line. From the command line the script will either migrate every 50# checkpoint it finds recursively (-r option) or a single checkpoint. When a | 50# command line. From the command line the script will either migrate every 51# checkpoint it finds recursively (-r option) or a single checkpoint. When a |
51# change is made to the gem5 repository that breaks previous checkpoints a 52# from_N() method should be implemented here and the gem5CheckpointVersion 53# variable in src/sim/serialize.hh should be incremented. For each version 54# between the checkpoints current version and the new version the from_N() 55# method will be run, passing in a ConfigParser object which contains the open 56# file. As these operations can be isa specific the method can verify the isa 57# and use regexes to find the correct sections that need to be updated. | 52# change is made to the gem5 repository that breaks previous checkpoints an 53# upgrade() method should be implemented in its own .py file and placed in 54# src/util/cpt_upgraders/. For each upgrader whose tag is not present in 55# the checkpoint tag list, the upgrade() method will be run, passing in a 56# ConfigParser object which contains the open file. As these operations can 57# be isa specific the method can verify the isa and use regexes to find the 58# correct sections that need to be updated. |
58 59 60import ConfigParser | 59 60 61import ConfigParser |
61import sys, os | 62import glob, types, sys, os |
62import os.path as osp 63 | 63import os.path as osp 64 |
64# An example of a translator 65def from_0(cpt): 66 if cpt.get('root','isa') == 'arm': 67 for sec in cpt.sections(): 68 import re 69 # Search for all the execution contexts 70 if re.search('.*sys.*\.cpu.*\.x.\..*', sec): 71 # Update each one 72 mr = cpt.get(sec, 'miscRegs').split() 73 #mr.insert(21,0) 74 #mr.insert(26,0) 75 cpt.set(sec, 'miscRegs', ' '.join(str(x) for x in mr)) | 65verbose_print = False |
76 | 66 |
77# The backing store supporting the memories in the system has changed 78# in that it is now stored globally per address range. As a result the 79# actual storage is separate from the memory controllers themselves. 80def from_1(cpt): 81 for sec in cpt.sections(): 82 import re 83 # Search for a physical memory 84 if re.search('.*sys.*\.physmem$', sec): 85 # Add the number of stores attribute to the global physmem 86 cpt.set(sec, 'nbr_of_stores', '1') 87 88 # Get the filename and size as this is moving to the 89 # specific backing store 90 mem_filename = cpt.get(sec, 'filename') 91 mem_size = cpt.get(sec, '_size') 92 cpt.remove_option(sec, 'filename') 93 cpt.remove_option(sec, '_size') 94 95 # Get the name so that we can create the new section 96 system_name = str(sec).split('.')[0] 97 section_name = system_name + '.physmem.store0' 98 cpt.add_section(section_name) 99 cpt.set(section_name, 'store_id', '0') 100 cpt.set(section_name, 'range_size', mem_size) 101 cpt.set(section_name, 'filename', mem_filename) 102 elif re.search('.*sys.*\.\w*mem$', sec): 103 # Due to the lack of information about a start address, 104 # this migration only works if there is a single memory in 105 # the system, thus starting at 0 106 raise ValueError("more than one memory detected (" + sec + ")") 107 108def from_2(cpt): 109 for sec in cpt.sections(): 110 import re 111 # Search for a CPUs 112 if re.search('.*sys.*cpu', sec): 113 try: 114 junk = cpt.get(sec, 'instCnt') 115 cpt.set(sec, '_pid', '0') 116 except ConfigParser.NoOptionError: 117 pass 118 119# The ISA is now a separate SimObject, which means that we serialize 120# it in a separate section instead of as a part of the ThreadContext. 121def from_3(cpt): 122 isa = cpt.get('root','isa') 123 isa_fields = { 124 "alpha" : ( "fpcr", "uniq", "lock_flag", "lock_addr", "ipr" ), 125 "arm" : ( "miscRegs" ), 126 "sparc" : ( "asi", "tick", "fprs", "gsr", "softint", "tick_cmpr", 127 "stick", "stick_cmpr", "tpc", "tnpc", "tstate", "tt", 128 "tba", "pstate", "tl", "pil", "cwp", "gl", "hpstate", 129 "htstate", "hintp", "htba", "hstick_cmpr", 130 "strandStatusReg", "fsr", "priContext", "secContext", 131 "partId", "lsuCtrlReg", "scratchPad", 132 "cpu_mondo_head", "cpu_mondo_tail", 133 "dev_mondo_head", "dev_mondo_tail", 134 "res_error_head", "res_error_tail", 135 "nres_error_head", "nres_error_tail", 136 "tick_intr_sched", 137 "cpu", "tc_num", "tick_cmp", "stick_cmp", "hstick_cmp"), 138 "x86" : ( "regVal" ), 139 } 140 141 isa_fields = isa_fields.get(isa, []) 142 isa_sections = [] 143 for sec in cpt.sections(): 144 import re 145 146 re_cpu_match = re.match('^(.*sys.*\.cpu[^.]*)\.xc\.(.+)$', sec) 147 # Search for all the execution contexts 148 if not re_cpu_match: 149 continue 150 151 if re_cpu_match.group(2) != "0": 152 # This shouldn't happen as we didn't support checkpointing 153 # of in-order and O3 CPUs. 154 raise ValueError("Don't know how to migrate multi-threaded CPUs " 155 "from version 1") 156 157 isa_section = [] 158 for fspec in isa_fields: 159 for (key, value) in cpt.items(sec, raw=True): 160 if key in isa_fields: 161 isa_section.append((key, value)) 162 163 name = "%s.isa" % re_cpu_match.group(1) 164 isa_sections.append((name, isa_section)) 165 166 for (key, value) in isa_section: 167 cpt.remove_option(sec, key) 168 169 for (sec, options) in isa_sections: 170 # Some intermediate versions of gem5 have empty ISA sections 171 # (after we made the ISA a SimObject, but before we started to 172 # serialize into a separate ISA section). 173 if not cpt.has_section(sec): 174 cpt.add_section(sec) 175 else: 176 if cpt.items(sec): 177 raise ValueError("Unexpected populated ISA section in old " 178 "checkpoint") 179 180 for (key, value) in options: 181 cpt.set(sec, key, value) 182 183# Version 5 of the checkpoint format removes the MISCREG_CPSR_MODE 184# register from the ARM register file. 185def from_4(cpt): 186 if cpt.get('root','isa') == 'arm': 187 for sec in cpt.sections(): 188 import re 189 # Search for all ISA sections 190 if re.search('.*sys.*\.cpu.*\.isa', sec): 191 mr = cpt.get(sec, 'miscRegs').split() 192 # Remove MISCREG_CPSR_MODE 193 del mr[137] 194 cpt.set(sec, 'miscRegs', ' '.join(str(x) for x in mr)) 195 196# Version 6 of the checkpoint format adds tlb to x86 checkpoints 197def from_5(cpt): 198 if cpt.get('root','isa') == 'x86': 199 for sec in cpt.sections(): 200 import re 201 # Search for all ISA sections 202 if re.search('.*sys.*\.cpu.*\.dtb$', sec): 203 cpt.set(sec, '_size', '0') 204 cpt.set(sec, 'lruSeq', '0') 205 206 if re.search('.*sys.*\.cpu.*\.itb$', sec): 207 cpt.set(sec, '_size', '0') 208 cpt.set(sec, 'lruSeq', '0') 209 else: 210 print "ISA is not x86" 211 212# Version 7 of the checkpoint adds support for the IDE dmaAbort flag 213def from_6(cpt): 214 # Update IDE disk devices with dmaAborted 215 for sec in cpt.sections(): 216 # curSector only exists in IDE devices, so key on that attribute 217 if cpt.has_option(sec, "curSector"): 218 cpt.set(sec, "dmaAborted", "false") 219 220# Version 8 of the checkpoint adds an ARM MISCREG 221def from_7(cpt): 222 if cpt.get('root','isa') == 'arm': 223 for sec in cpt.sections(): 224 import re 225 # Search for all ISA sections 226 if re.search('.*sys.*\.cpu.*\.isa', sec): 227 mr = cpt.get(sec, 'miscRegs').split() 228 if len(mr) == 161: 229 print "MISCREG_TEEHBR already seems to be inserted." 230 else: 231 # Add dummy value for MISCREG_TEEHBR 232 mr.insert(51,0); 233 cpt.set(sec, 'miscRegs', ' '.join(str(x) for x in mr)) 234 235# Version 9 of the checkpoint adds an all ARMv8 state 236def from_8(cpt): 237 if cpt.get('root','isa') != 'arm': | 67def verboseprint(*args): 68 if not verbose_print: |
238 return | 69 return |
239 import re 240 print "Warning: The size of the FP register file has changed. "\ 241 "To get similar results you need to adjust the number of "\ 242 "physical registers in the CPU you're restoring into by "\ 243 "NNNN." 244 # Find the CPU context's and upgrade their registers 245 for sec in cpt.sections(): 246 re_xc_match = re.match('^.*?sys.*?\.cpu(\d+)*\.xc\.*', sec) 247 if not re_xc_match: 248 continue | 70 for arg in args: 71 print arg, 72 print |
249 | 73 |
250 # Update floating point regs 251 fpr = cpt.get(sec, 'floatRegs.i').split() 252 # v8 has 128 normal fp and 32 special fp regs compared 253 # to v7's 64 normal fp and 8 special fp regs. 254 # Insert the extra normal fp registers at end of v7 normal fp regs 255 for x in xrange(64): 256 fpr.insert(64, "0") 257 # Append the extra special registers 258 for x in xrange(24): 259 fpr.append("0") 260 cpt.set(sec, 'floatRegs.i', ' '.join(str(x) for x in fpr)) | 74class Upgrader: 75 tag_set = set() 76 by_tag = {} 77 legacy = {} 78 def __init__(self, filename): 79 self.filename = filename 80 execfile(filename, {}, self.__dict__) |
261 | 81 |
262 ir = cpt.get(sec, 'intRegs').split() 263 # Add in v8 int reg state 264 # Splice in R13_HYP 265 ir.insert(20, "0") 266 # Splice in INTREG_DUMMY and SP0 - SP3 267 ir.extend(["0", "0", "0", "0", "0"]) 268 cpt.set(sec, 'intRegs', ' '.join(str(x) for x in ir)) | 82 if not hasattr(self, 'tag'): 83 self.tag = osp.basename(filename)[:-3] 84 if not hasattr(self, 'depends'): 85 self.depends = [] 86 elif isinstance(self.depends, str): 87 self.depends = [self.depends] |
269 | 88 |
270 # Update the cpu interrupt field 271 for sec in cpt.sections(): 272 re_int_match = re.match("^.*?sys.*?\.cpu(\d+)*$", sec) 273 if not re_int_match: 274 continue | 89 if not hasattr(self, 'upgrader'): 90 print "Error: no upgrader method for", self.tag 91 sys.exit(1) 92 elif not isinstance(self.upgrader, types.FunctionType): 93 print "Error: 'upgrader' for %s is %s, not function", \ 94 self.tag, type(self) 95 sys.exit(1) |
275 | 96 |
276 irqs = cpt.get(sec, "interrupts").split() 277 irqs.append("false") 278 irqs.append("false") 279 cpt.set(sec, "interrupts", ' '.join(str(x) for x in irqs)) | 97 if hasattr(self, 'legacy_version'): 98 Upgrader.legacy[self.legacy_version] = self |
280 | 99 |
281 # Update the per cpu interrupt structure 282 for sec in cpt.sections(): 283 re_int_match = re.match("^.*?sys.*?\.cpu(\d+)*\.interrupts$", sec) 284 if not re_int_match: 285 continue | 100 Upgrader.by_tag[self.tag] = self 101 Upgrader.tag_set.add(self.tag) |
286 | 102 |
287 irqs = cpt.get(sec, "interrupts").split() 288 irqs.append("false") 289 irqs.append("false") 290 cpt.set(sec, "interrupts", ' '.join(str(x) for x in irqs)) | 103 def ready(self, tags): 104 for dep in self.depends: 105 if dep not in tags: 106 return False 107 return True |
291 | 108 |
292 # Update the misc regs and add in new isa specific fields 293 for sec in cpt.sections(): 294 re_isa_match = re.match("^.*?sys.*?\.cpu(\d+)*\.isa$", sec) 295 if not re_isa_match: 296 continue | 109 def upgrade(self, cpt): 110 (self.upgrader)(cpt) 111 verboseprint("applied upgrade for", self.tag) |
297 | 112 |
298 cpt.set(sec, 'haveSecurity', 'false') 299 cpt.set(sec, 'haveLPAE', 'false') 300 cpt.set(sec, 'haveVirtualization', 'false') 301 cpt.set(sec, 'haveLargeAsid64', 'false') 302 cpt.set(sec, 'physAddrRange64', '40') | 113 @staticmethod 114 def get(tag): 115 return Upgrader.by_tag[tag] |
303 | 116 |
304 # splice in the new misc registers, ~200 -> 605 registers, 305 # ordering does not remain consistent 306 mr_old = cpt.get(sec, 'miscRegs').split() 307 mr_new = [ '0' for x in xrange(605) ] | 117 @staticmethod 118 def load_all(): 119 util_dir = osp.dirname(osp.abspath(__file__)) |
308 | 120 |
309 # map old v7 miscRegs to new v8 miscRegs 310 mr_new[0] = mr_old[0] # CPSR 311 mr_new[16] = mr_old[1] # CPSR_Q 312 mr_new[1] = mr_old[2] # SPSR 313 mr_new[2] = mr_old[3] # SPSR_FIQ 314 mr_new[3] = mr_old[4] # SPSR_IRQ 315 mr_new[4] = mr_old[5] # SPSR_SVC 316 mr_new[5] = mr_old[6] # SPSR_MON 317 mr_new[8] = mr_old[7] # SPSR_UND 318 mr_new[6] = mr_old[8] # SPSR_ABT 319 mr_new[432] = mr_old[9] # FPSR 320 mr_new[10] = mr_old[10] # FPSID 321 mr_new[11] = mr_old[11] # FPSCR 322 mr_new[18] = mr_old[12] # FPSCR_QC 323 mr_new[17] = mr_old[13] # FPSCR_EXC 324 mr_new[14] = mr_old[14] # FPEXC 325 mr_new[13] = mr_old[15] # MVFR0 326 mr_new[12] = mr_old[16] # MVFR1 327 mr_new[28] = mr_old[17] # SCTLR_RST, 328 mr_new[29] = mr_old[18] # SEV_MAILBOX, 329 mr_new[30] = mr_old[19] # DBGDIDR 330 mr_new[31] = mr_old[20] # DBGDSCR_INT, 331 mr_new[33] = mr_old[21] # DBGDTRRX_INT, 332 mr_new[34] = mr_old[22] # DBGTRTX_INT, 333 mr_new[35] = mr_old[23] # DBGWFAR, 334 mr_new[36] = mr_old[24] # DBGVCR, 335 #mr_new[] = mr_old[25] # DBGECR -> UNUSED, 336 #mr_new[] = mr_old[26] # DBGDSCCR -> UNUSED, 337 #mr_new[] = mr_old[27] # DBGSMCR -> UNUSED, 338 mr_new[37] = mr_old[28] # DBGDTRRX_EXT, 339 mr_new[38] = mr_old[29] # DBGDSCR_EXT, 340 mr_new[39] = mr_old[30] # DBGDTRTX_EXT, 341 #mr_new[] = mr_old[31] # DBGDRCR -> UNUSED, 342 mr_new[41] = mr_old[32] # DBGBVR, 343 mr_new[47] = mr_old[33] # DBGBCR, 344 #mr_new[] = mr_old[34] # DBGBVR_M -> UNUSED, 345 #mr_new[] = mr_old[35] # DBGBCR_M -> UNUSED, 346 mr_new[61] = mr_old[36] # DBGDRAR, 347 #mr_new[] = mr_old[37] # DBGBXVR_M -> UNUSED, 348 mr_new[64] = mr_old[38] # DBGOSLAR, 349 #mr_new[] = mr_old[39] # DBGOSSRR -> UNUSED, 350 mr_new[66] = mr_old[40] # DBGOSDLR, 351 mr_new[67] = mr_old[41] # DBGPRCR, 352 #mr_new[] = mr_old[42] # DBGPRSR -> UNUSED, 353 mr_new[68] = mr_old[43] # DBGDSAR, 354 #mr_new[] = mr_old[44] # DBGITCTRL -> UNUSED, 355 mr_new[69] = mr_old[45] # DBGCLAIMSET, 356 mr_new[70] = mr_old[46] # DBGCLAIMCLR, 357 mr_new[71] = mr_old[47] # DBGAUTHSTATUS, 358 mr_new[72] = mr_old[48] # DBGDEVID2, 359 mr_new[73] = mr_old[49] # DBGDEVID1, 360 mr_new[74] = mr_old[50] # DBGDEVID, 361 mr_new[77] = mr_old[51] # TEEHBR, 362 mr_new[109] = mr_old[52] # v7 SCTLR -> aarc32 SCTLR_NS 363 mr_new[189] = mr_old[53] # DCCISW, 364 mr_new[188] = mr_old[54] # DCCIMVAC, 365 mr_new[183] = mr_old[55] # DCCMVAC, 366 mr_new[271] = mr_old[56] # v7 CONTEXTIDR -> aarch32 CONTEXTIDR_NS, 367 mr_new[274] = mr_old[57] # v7 TPIDRURW -> aarch32 TPIDRURW_NS, 368 mr_new[277] = mr_old[58] # v7 TPIDRURO -> aarch32 TPIDRURO_NS, 369 mr_new[280] = mr_old[59] # v7 TPIDRPRW -> aarch32 TPIDRPRW_NS, 370 mr_new[170] = mr_old[60] # CP15ISB, 371 mr_new[185] = mr_old[61] # CP15DSB, 372 mr_new[186] = mr_old[62] # CP15DMB, 373 mr_new[114] = mr_old[63] # CPACR, 374 mr_new[101] = mr_old[64] # CLIDR, 375 mr_new[100] = mr_old[65] # CCSIDR, 376 mr_new[104] = mr_old[66] # v7 CSSELR -> aarch32 CSSELR_NS, 377 mr_new[163] = mr_old[67] # ICIALLUIS, 378 mr_new[168] = mr_old[68] # ICIALLU, 379 mr_new[169] = mr_old[69] # ICIMVAU, 380 mr_new[172] = mr_old[70] # BPIMVA, 381 mr_new[164] = mr_old[71] # BPIALLIS, 382 mr_new[171] = mr_old[72] # BPIALL, 383 mr_new[80] = mr_old[73] # MIDR, 384 mr_new[126] = mr_old[74] # v7 TTBR0 -> aarch32 TTBR0_NS, 385 mr_new[129] = mr_old[75] # v7 TTBR1 -> aarch32 TTBR1_NS, 386 mr_new[83] = mr_old[76] # TLBTR, 387 mr_new[137] = mr_old[77] # v7 DACR -> aarch32 DACR_NS, 388 mr_new[192] = mr_old[78] # TLBIALLIS, 389 mr_new[193] = mr_old[79] # TLBIMVAIS, 390 mr_new[194] = mr_old[80] # TLBIASIDIS, 391 mr_new[195] = mr_old[81] # TLBIMVAAIS, 392 mr_new[198] = mr_old[82] # ITLBIALL, 393 mr_new[199] = mr_old[83] # ITLBIMVA, 394 mr_new[200] = mr_old[84] # ITLBIASID, 395 mr_new[201] = mr_old[85] # DTLBIALL, 396 mr_new[202] = mr_old[86] # DTLBIMVA, 397 mr_new[203] = mr_old[87] # DTLBIASID, 398 mr_new[204] = mr_old[88] # TLBIALL, 399 mr_new[205] = mr_old[89] # TLBIMVA, 400 mr_new[206] = mr_old[90] # TLBIASID, 401 mr_new[207] = mr_old[91] # TLBIMVAA, 402 mr_new[140] = mr_old[92] # v7 DFSR -> aarch32 DFSR_NS, 403 mr_new[143] = mr_old[93] # v7 IFSR -> aarch32 IFSR_NS, 404 mr_new[155] = mr_old[94] # v7 DFAR -> aarch32 DFAR_NS, 405 mr_new[158] = mr_old[95] # v7 IFAR -> aarch32 IFAR_NS, 406 mr_new[84] = mr_old[96] # MPIDR, 407 mr_new[241] = mr_old[97] # v7 PRRR -> aarch32 PRRR_NS, 408 mr_new[247] = mr_old[98] # v7 NMRR -> aarch32 NMRR_NS, 409 mr_new[131] = mr_old[99] # TTBCR, 410 mr_new[86] = mr_old[100] # ID_PFR0, 411 mr_new[81] = mr_old[101] # CTR, 412 mr_new[115] = mr_old[102] # SCR, 413 # Set the non-secure bit 414 scr = int(mr_new[115]) 415 scr = scr | 0x1 416 mr_new[115] = str(scr) 417 ### 418 mr_new[116] = mr_old[103] # SDER, 419 mr_new[165] = mr_old[104] # PAR, 420 mr_new[175] = mr_old[105] # V2PCWPR -> ATS1CPR, 421 mr_new[176] = mr_old[106] # V2PCWPW -> ATS1CPW, 422 mr_new[177] = mr_old[107] # V2PCWUR -> ATS1CUR, 423 mr_new[178] = mr_old[108] # V2PCWUW -> ATS1CUW, 424 mr_new[179] = mr_old[109] # V2POWPR -> ATS12NSOPR, 425 mr_new[180] = mr_old[110] # V2POWPW -> ATS12NSOPW, 426 mr_new[181] = mr_old[111] # V2POWUR -> ATS12NSOUR, 427 mr_new[182] = mr_old[112] # V2POWUW -> ATS12NWOUW, 428 mr_new[90] = mr_old[113] # ID_MMFR0, 429 mr_new[92] = mr_old[114] # ID_MMFR2, 430 mr_new[93] = mr_old[115] # ID_MMFR3, 431 mr_new[112] = mr_old[116] # v7 ACTLR -> aarch32 ACTLR_NS 432 mr_new[222] = mr_old[117] # PMCR, 433 mr_new[230] = mr_old[118] # PMCCNTR, 434 mr_new[223] = mr_old[119] # PMCNTENSET, 435 mr_new[224] = mr_old[120] # PMCNTENCLR, 436 mr_new[225] = mr_old[121] # PMOVSR, 437 mr_new[226] = mr_old[122] # PMSWINC, 438 mr_new[227] = mr_old[123] # PMSELR, 439 mr_new[228] = mr_old[124] # PMCEID0, 440 mr_new[229] = mr_old[125] # PMCEID1, 441 mr_new[231] = mr_old[126] # PMXEVTYPER, 442 mr_new[233] = mr_old[127] # PMXEVCNTR, 443 mr_new[234] = mr_old[128] # PMUSERENR, 444 mr_new[235] = mr_old[129] # PMINTENSET, 445 mr_new[236] = mr_old[130] # PMINTENCLR, 446 mr_new[94] = mr_old[131] # ID_ISAR0, 447 mr_new[95] = mr_old[132] # ID_ISAR1, 448 mr_new[96] = mr_old[133] # ID_ISAR2, 449 mr_new[97] = mr_old[134] # ID_ISAR3, 450 mr_new[98] = mr_old[135] # ID_ISAR4, 451 mr_new[99] = mr_old[136] # ID_ISAR5, 452 mr_new[20] = mr_old[137] # LOCKFLAG, 453 mr_new[19] = mr_old[138] # LOCKADDR, 454 mr_new[87] = mr_old[139] # ID_PFR1, 455 # Set up the processor features register 456 pfr = int(mr_new[87]) 457 pfr = pfr | 0x1011 458 mr_new[87] = str(pfr) 459 ### 460 mr_new[238] = mr_old[140] # L2CTLR, 461 mr_new[82] = mr_old[141] # TCMTR 462 mr_new[88] = mr_old[142] # ID_DFR0, 463 mr_new[89] = mr_old[143] # ID_AFR0, 464 mr_new[91] = mr_old[144] # ID_MMFR1, 465 mr_new[102] = mr_old[145] # AIDR, 466 mr_new[146] = mr_old[146] # v7 ADFSR -> aarch32 ADFSR_NS, 467 mr_new[148] = mr_old[147] # AIFSR, 468 mr_new[173] = mr_old[148] # DCIMVAC, 469 mr_new[174] = mr_old[149] # DCISW, 470 mr_new[184] = mr_old[150] # MCCSW -> DCCSW, 471 mr_new[187] = mr_old[151] # DCCMVAU, 472 mr_new[117] = mr_old[152] # NSACR, 473 mr_new[262] = mr_old[153] # VBAR, 474 mr_new[265] = mr_old[154] # MVBAR, 475 mr_new[267] = mr_old[155] # ISR, 476 mr_new[269] = mr_old[156] # FCEIDR -> FCSEIDR, 477 #mr_new[] = mr_old[157] # L2LATENCY -> UNUSED, 478 #mr_new[] = mr_old[158] # CRN15 -> UNUSED, 479 mr_new[599] = mr_old[159] # NOP 480 mr_new[600] = mr_old[160] # RAZ, | 121 for py in glob.glob(util_dir + '/cpt_upgraders/*.py'): 122 Upgrader(py) |
481 | 123 |
482 # Set the new miscRegs structure 483 cpt.set(sec, 'miscRegs', ' '.join(str(x) for x in mr_new)) | 124 # make linear dependences for legacy versions 125 i = 3 126 while i in Upgrader.legacy: 127 Upgrader.legacy[i].depends = [Upgrader.legacy[i-1].tag] 128 i = i + 1 |
484 | 129 |
485 cpu_prefix = {} 486 # Add in state for ITB/DTB 487 for sec in cpt.sections(): 488 re_tlb_match = re.match('(^.*?sys.*?\.cpu(\d+)*)\.(dtb|itb)$', sec) 489 if not re_tlb_match: 490 continue 491 492 cpu_prefix[re_tlb_match.group(1)] = True # Save off prefix to add 493 # Set the non-secure bit (bit 9) to 1 for attributes 494 attr = int(cpt.get(sec, '_attr')) 495 attr = attr | 0x200 496 cpt.set(sec, '_attr', str(attr)) 497 cpt.set(sec, 'haveLPAE', 'false') 498 cpt.set(sec, 'directToStage2', 'false') 499 cpt.set(sec, 'stage2Req', 'false') 500 cpt.set(sec, 'bootUncacheability', 'true') 501 502 # Add in extra state for the new TLB Entries 503 for sec in cpt.sections(): 504 re_tlbentry_match = re.match('(^.*?sys.*?\.cpu(\d+)*)\.(dtb|itb).TlbEntry\d+$', sec) 505 if not re_tlbentry_match: 506 continue 507 508 # Add in the new entries 509 cpt.set(sec, 'longDescFormat', 'false') 510 cpt.set(sec, 'vmid', '0') 511 cpt.set(sec, 'isHyp', 'false') 512 valid = cpt.get(sec, 'valid') 513 if valid == 'true': 514 cpt.set(sec, 'ns', 'true') 515 cpt.set(sec, 'nstid', 'true') 516 cpt.set(sec, 'pxn', 'true') 517 cpt.set(sec, 'hap', '3') 518 # All v7 code used 2 level page tables 519 cpt.set(sec, 'lookupLevel', '2') 520 attr = int(cpt.get(sec, 'attributes')) 521 # set the non-secure bit (bit 9) to 1 522 # as no previous v7 code used secure code 523 attr = attr | 0x200 524 cpt.set(sec, 'attributes', str(attr)) 525 else: 526 cpt.set(sec, 'ns', 'false') 527 cpt.set(sec, 'nstid', 'false') 528 cpt.set(sec, 'pxn', 'false') 529 cpt.set(sec, 'hap', '0') 530 cpt.set(sec, 'lookupLevel', '0') 531 cpt.set(sec, 'outerShareable', 'false') 532 533 # Add d/istage2_mmu and d/istage2_mmu.stage2_tlb 534 for key in cpu_prefix: 535 for suffix in ['.istage2_mmu', '.dstage2_mmu']: 536 new_sec = key + suffix 537 cpt.add_section(new_sec) 538 new_sec = key + suffix + ".stage2_tlb" 539 cpt.add_section(new_sec) 540 # Fill in tlb info with some defaults 541 cpt.set(new_sec, '_attr', '0') 542 cpt.set(new_sec, 'haveLPAE', 'false') 543 cpt.set(new_sec, 'directToStage2', 'false') 544 cpt.set(new_sec, 'stage2Req', 'false') 545 cpt.set(new_sec, 'bootUncacheability', 'false') 546 cpt.set(new_sec, 'num_entries', '0') 547 548# Version 10 adds block_size_bytes to system.ruby 549def from_9(cpt): 550 for sec in cpt.sections(): 551 if sec == 'system.ruby': 552 # Use Gem5's default of 64; this should be changed if the to be 553 # upgraded checkpoints were not taken with block-size 64! 554 cpt.set(sec, 'block_size_bytes', '64') 555 556# Checkpoint version 11 (0xB) adds the perfLevel variable in the clock domain 557# and voltage domain simObjects used for DVFS and is serialized and 558# unserialized. 559def from_A(cpt): 560 for sec in cpt.sections(): 561 import re 562 563 if re.match('^.*sys.*[._]clk_domain$', sec): 564 # Make _perfLevel equal to 0 which means best performance 565 cpt.set(sec, '_perfLevel', ' '.join('0')) 566 elif re.match('^.*sys.*[._]voltage_domain$', sec): 567 # Make _perfLevel equal to 0 which means best performance 568 cpt.set(sec, '_perfLevel', ' '.join('0')) 569 else: 570 continue 571 572# The change between versions C and D is the addition of support for multiple 573# event queues, so for old checkpoints we must specify that there's only one. 574def from_B(cpt): 575 cpt.set('Globals', 'numMainEventQueues', '1') 576 577# Checkpoint version D uses condition code registers for the ARM 578# architecture; previously the integer register file was used for these 579# registers. To upgrade, we move those 5 integer registers to the ccRegs 580# register file. 581def from_C(cpt): 582 if cpt.get('root','isa') == 'arm': 583 for sec in cpt.sections(): 584 import re 585 586 re_cpu_match = re.match('^(.*sys.*\.cpu[^.]*)\.xc\.(.+)$', sec) 587 # Search for all the execution contexts 588 if not re_cpu_match: 589 continue 590 591 items = [] 592 for (item,value) in cpt.items(sec): 593 items.append(item) 594 if 'ccRegs' not in items: 595 intRegs = cpt.get(sec, 'intRegs').split() 596 597 ccRegs = intRegs[38:43] 598 del intRegs[38:43] 599 600 ccRegs.append('0') # CCREG_ZERO 601 602 cpt.set(sec, 'intRegs', ' '.join(intRegs)) 603 cpt.set(sec, 'ccRegs', ' '.join(ccRegs)) 604 605# Checkpoint version E adds the ARM CONTEXTIDR_EL2 miscreg. 606def from_D(cpt): 607 if cpt.get('root','isa') == 'arm': 608 for sec in cpt.sections(): 609 import re 610 # Search for all ISA sections 611 if re.search('.*sys.*\.cpu.*\.isa$', sec): 612 miscRegs = cpt.get(sec, 'miscRegs').split() 613 # CONTEXTIDR_EL2 defaults to 0b11111100000000000001 614 miscRegs[599:599] = [0xFC001] 615 cpt.set(sec, 'miscRegs', ' '.join(str(x) for x in miscRegs)) 616 617# Checkpoint version F renames an internal member of Process class. 618def from_E(cpt): 619 import re 620 for sec in cpt.sections(): 621 fdm = 'FdMap' 622 fde = 'FDEntry' 623 if re.match('.*\.%s.*' % fdm, sec): 624 rename = re.sub(fdm, fde, sec) 625 split = re.split(fde, rename) 626 627 # rename the section and add the 'mode' field 628 rename_section(cpt, sec, rename) 629 cpt.set(rename, 'mode', "0") # no proper value to set :( 630 631 # add in entries 257 to 1023 632 if split[1] == "0": 633 for x in range(257, 1024): 634 seq = (split[0], fde, "%s" % x) 635 section = "".join(seq) 636 cpt.add_section(section) 637 cpt.set(section, 'fd', '-1') 638 639 640migrations = [] 641migrations.append(from_0) 642migrations.append(from_1) 643migrations.append(from_2) 644migrations.append(from_3) 645migrations.append(from_4) 646migrations.append(from_5) 647migrations.append(from_6) 648migrations.append(from_7) 649migrations.append(from_8) 650migrations.append(from_9) 651migrations.append(from_A) 652migrations.append(from_B) 653migrations.append(from_C) 654migrations.append(from_D) 655migrations.append(from_E) 656 657# http://stackoverflow.com/questions/15069127/python-configparser-module-\ 658# rename-a-section 659def rename_section(cp, section_from, section_to): 660 items = cp.items(section_from) 661 cp.add_section(section_to) 662 for item in items: 663 cp.set(section_to, item[0], item[1]) 664 cp.remove_section(section_from) 665 666verbose_print = False 667 668def verboseprint(*args): 669 if not verbose_print: 670 return 671 for arg in args: 672 print arg, 673 print 674 | |
675def process_file(path, **kwargs): 676 if not osp.isfile(path): 677 import errno 678 raise IOError(ennro.ENOENT, "No such file", path) 679 680 verboseprint("Processing file %s...." % path) 681 682 if kwargs.get('backup', True): --- 5 unchanged lines hidden (view full) --- 688 # gem5 is case sensitive with paramaters 689 cpt.optionxform = str 690 691 # Read the current data 692 cpt_file = file(path, 'r') 693 cpt.readfp(cpt_file) 694 cpt_file.close() 695 | 130def process_file(path, **kwargs): 131 if not osp.isfile(path): 132 import errno 133 raise IOError(ennro.ENOENT, "No such file", path) 134 135 verboseprint("Processing file %s...." % path) 136 137 if kwargs.get('backup', True): --- 5 unchanged lines hidden (view full) --- 143 # gem5 is case sensitive with paramaters 144 cpt.optionxform = str 145 146 # Read the current data 147 cpt_file = file(path, 'r') 148 cpt.readfp(cpt_file) 149 cpt_file.close() 150 |
151 change = False 152 |
|
696 # Make sure we know what we're starting from | 153 # Make sure we know what we're starting from |
697 if not cpt.has_option('root','cpt_ver'): 698 raise LookupError("cannot determine version of checkpoint") | 154 if cpt.has_option('root','cpt_ver'): 155 cpt_ver = cpt.getint('root','cpt_ver') |
699 | 156 |
700 cpt_ver = cpt.getint('root','cpt_ver') | 157 # Legacy linear checkpoint version 158 # convert to list of tags before proceeding 159 tags = set([]) 160 for i in xrange(2, cpt_ver+1): 161 tags.add(Upgrader.legacy[i].tag) 162 verboseprint("performed legacy version -> tags conversion") 163 change = True |
701 | 164 |
702 # If the current checkpoint is longer than the migrations list, we have a problem 703 # and someone didn't update this file 704 if cpt_ver > len(migrations): 705 raise ValueError("upgrade script is too old and needs updating") | 165 cpt.remove_option('root', 'cpt_ver') 166 elif cpt.has_option('Globals','version_tags'): 167 tags = set((''.join(cpt.get('Globals','version_tags'))).split()) 168 else: 169 print "fatal: no version information in checkpoint" 170 exit(1) |
706 | 171 |
707 verboseprint("\t...file is at version %#x" % cpt_ver) | 172 verboseprint("has tags", ' '.join(tags)) 173 # If the current checkpoint has a tag we don't know about, we have 174 # a divergence that (in general) must be addressed by (e.g.) merging 175 # simulator support for its changes. 176 unknown_tags = tags - Upgrader.tag_set 177 if unknown_tags: 178 print "warning: upgrade script does not recognize the following "\ 179 "tags in this checkpoint:", ' '.join(unknown_tags) |
708 | 180 |
709 if cpt_ver == len(migrations): 710 verboseprint("\t...nothing to do") | 181 # Apply migrations for tags not in checkpoint, respecting dependences 182 to_apply = Upgrader.tag_set - tags 183 while to_apply: 184 ready = set([ t for t in to_apply if Upgrader.get(t).ready(tags) ]) 185 if not ready: 186 print "could not apply these upgrades:", ' '.join(to_apply) 187 print "upgrade dependences impossible to resolve; aborting" 188 exit(1) 189 190 for tag in ready: 191 Upgrader.get(tag).upgrade(cpt) 192 tags.add(tag) 193 change = True 194 195 to_apply -= ready 196 197 if not change: 198 verboseprint("...nothing to do") |
711 return 712 | 199 return 200 |
713 # Walk through every function from now until the end fixing the checkpoint 714 for v in xrange(cpt_ver,len(migrations)): 715 verboseprint("\t...migrating to version %#x" % (v + 1)) 716 migrations[v](cpt) 717 cpt.set('root','cpt_ver', str(v + 1)) | 201 cpt.set('Globals', 'version_tags', ' '.join(tags)) |
718 719 # Write the old data back | 202 203 # Write the old data back |
720 verboseprint("\t...completed") | 204 verboseprint("...completed") |
721 cpt.write(file(path, 'w')) 722 723if __name__ == '__main__': | 205 cpt.write(file(path, 'w')) 206 207if __name__ == '__main__': |
724 from optparse import OptionParser | 208 from optparse import OptionParser, SUPPRESS_HELP |
725 parser = OptionParser("usage: %prog [options] <filename or directory>") 726 parser.add_option("-r", "--recurse", action="store_true", 727 help="Recurse through all subdirectories modifying "\ 728 "each checkpoint that is found") 729 parser.add_option("-N", "--no-backup", action="store_false", 730 dest="backup", default=True, 731 help="Do no backup each checkpoint before modifying it") 732 parser.add_option("-v", "--verbose", action="store_true", 733 help="Print out debugging information as") | 209 parser = OptionParser("usage: %prog [options] <filename or directory>") 210 parser.add_option("-r", "--recurse", action="store_true", 211 help="Recurse through all subdirectories modifying "\ 212 "each checkpoint that is found") 213 parser.add_option("-N", "--no-backup", action="store_false", 214 dest="backup", default=True, 215 help="Do no backup each checkpoint before modifying it") 216 parser.add_option("-v", "--verbose", action="store_true", 217 help="Print out debugging information as") |
218 parser.add_option("--get-cc-file", action="store_true", 219 # used during build; generate src/sim/tags.cc and exit 220 help=SUPPRESS_HELP) |
|
734 735 (options, args) = parser.parse_args() | 221 222 (options, args) = parser.parse_args() |
736 if len(args) != 1: | 223 verbose_print = options.verbose 224 225 Upgrader.load_all() 226 227 if options.get_cc_file: 228 print "// this file is auto-generated by util/cpt_upgrader.py" 229 print "#include <string>" 230 print "#include <set>" 231 print 232 print "std::set<std::string> version_tags = {" 233 for tag in Upgrader.tag_set: 234 print " \"%s\"," % tag 235 print "};" 236 exit(0) 237 elif len(args) != 1: |
737 parser.error("You must specify a checkpoint file to modify or a "\ 738 "directory of checkpoints to recursively update") 739 | 238 parser.error("You must specify a checkpoint file to modify or a "\ 239 "directory of checkpoints to recursively update") 240 |
740 verbose_print = options.verbose 741 | |
742 # Deal with shell variables and ~ 743 path = osp.expandvars(osp.expanduser(args[0])) 744 745 # Process a single file if we have it 746 if osp.isfile(path): 747 process_file(path, **vars(options)) 748 # Process an entire directory 749 elif osp.isdir(path): --- 18 unchanged lines hidden --- | 241 # Deal with shell variables and ~ 242 path = osp.expandvars(osp.expanduser(args[0])) 243 244 # Process a single file if we have it 245 if osp.isfile(path): 246 process_file(path, **vars(options)) 247 # Process an entire directory 248 elif osp.isdir(path): --- 18 unchanged lines hidden --- |