cleanup + faster

This commit is contained in:
bongbui321 2024-03-12 01:01:58 -04:00
parent f5f88b9be2
commit 48523af093
2 changed files with 18 additions and 67 deletions

View file

@ -1375,10 +1375,10 @@ class firehose(metaclass=LogBase):
slot_b_status = not slot_a_status
fpartitions = {}
try:
for lun in self.luns:
lunname = "Lun" + str(lun)
for lun_a in self.luns:
lunname = "Lun" + str(lun_a)
fpartitions[lunname] = []
header_data_a, guid_gpt_a = self.get_gpt(lun, int(0), int(0), int(0))
header_data_a, guid_gpt_a = self.get_gpt(lun_a, int(0), int(0), int(0))
if guid_gpt_a is None:
break
else:
@ -1386,11 +1386,16 @@ class firehose(metaclass=LogBase):
slot = partitionname_a.lower()[-2:]
if slot == "_a":
partitionname_b = partitionname_a[:-1] + "b"
resp = self.detect_partition(arguments=None, partitionname=partitionname_b, send_full=True)
if not resp[0]:
self.error(f"Cannot find partition {partitionname_b}")
return False
_, lun_b, header_data_b, guid_gpt_b = resp
if partitionname_b in guid_gpt_a.partentries:
lun_b = lun_a
header_data_b = header_data_a
guid_gpt_b = guid_gpt_a
else:
resp = self.detect_partition(arguments=None, partitionname=partitionname_b, send_full=True)
if not resp[0]:
self.error(f"Cannot find partition {partitionname_b}")
return False
_, lun_b, header_data_b, guid_gpt_b = resp
part_a = guid_gpt_a.partentries[partitionname_a]
part_b = guid_gpt_b.partentries[partitionname_b]
@ -1403,26 +1408,24 @@ class firehose(metaclass=LogBase):
slot_a_status, slot_b_status,
is_boot
)
assert(poffset_b != poffset_a)
header_data_a[poffset_a : poffset_a+len(pdata_a)] = pdata_a
new_header_a = guid_gpt_a.fix_gpt_crc(header_data_a)
if lun == lun_b:
assert(poffset_b != poffset_a)
header_data_b = new_header_a
header_data_b[poffset_b:poffset_b + len(pdata_b)] = pdata_b
new_header_b = guid_gpt_b.fix_gpt_crc(header_data_b)
if new_header_a is not None:
start_sector_patch_a = poffset_a // self.cfg.SECTOR_SIZE_IN_BYTES
byte_offset_patch_a = poffset_a % self.cfg.SECTOR_SIZE_IN_BYTES
cmd_patch_multiple(lun, start_sector_patch_a, byte_offset_patch_a, pdata_a)
cmd_patch_multiple(lun_a, start_sector_patch_a, byte_offset_patch_a, pdata_a)
# header will be updated in partitionname_b if in same lun
if lun != lun_b:
if lun_a != lun_b:
headeroffset_a = guid_gpt_a.header.current_lba * guid_gpt_a.sectorsize
start_sector_hdr_a = guid_gpt_a.header.current_lba
pheader_a = new_header_a[headeroffset_a : headeroffset_a+guid_gpt_a.header.header_size]
cmd_patch_multiple(lun, start_sector_hdr_a, 0, pheader_a)
cmd_patch_multiple(lun_a, start_sector_hdr_a, 0, pheader_a)
if new_header_b is not None:
start_sector_patch_b = poffset_b // self.cfg.SECTOR_SIZE_IN_BYTES

View file

@ -496,58 +496,6 @@ class gpt(metaclass=LogBase):
res = self.print_gptfile(os.path.join("TestFiles", "gpt_sm8180x.bin"))
assert res, "GPT Partition wasn't decoded properly"
def patch(self, data:bytes, partitionname="boot", active: bool = True):
def set_flags(flags, active):
new_flags = flags
if active:
new_flags |= AB_PARTITION_ATTR_SLOT_ACTIVE << (AB_FLAG_OFFSET*8)
else:
new_flags &= ~(AB_PARTITION_ATTR_SLOT_ACTIVE << (AB_FLAG_OFFSET*8))
return new_flags
try:
rf = BytesIO(data)
for sectorsize in [512, 4096]:
result = self.parse(data, sectorsize)
if result:
for rname_a in self.partentries:
if partitionname.lower() == rname_a.lower():
rname_b = rname_a[:-1] + "b"
if rname_b not in self.partentries:
return None, None, None, None
slot_a_state = active
slot_b_state = not active
partition_a = self.partentries[rname_a]
partition_b = self.partentries[rname_b]
rf.seek(partition_a.entryoffset)
sdata_a = rf.read(self.header.part_entry_size)
partentry_a = self.gpt_partition(sdata_a)
partentry_a.flags = set_flags(partentry_a.flags, slot_a_state)
rf.seek(partition_b.entryoffset)
sdata_b = rf.read(self.header.part_entry_size)
partentry_b = self.gpt_partition(sdata_b)
partentry_b.flags = set_flags(partentry_b.flags, slot_b_state)
print(f"[Before] pa: {partentry_a.type}, pb: {partentry_b.type}")
old_a = partentry_a.type
old_b = partentry_b.type
partentry_a.type, partentry_b.type = partentry_b.type, partentry_a.type
assert(partentry_a.type == old_b)
assert(partentry_b.type == old_a)
print(f"[After] pa: {partentry_a.type}, pb: {partentry_b.type}")
pdata_a = partentry_a.create()
pdata_b = partentry_b.create()
return pdata_a, pdata_b, partition_a.entryoffset, partition_b.entryoffset
break
return None, None, None, None
except Exception as e:
self.error(str(e))
return None, None, None, None
def fix_gpt_crc(self, data):
partentry_size = self.header.num_part_entries * self.header.part_entry_size
partentry_offset = self.header.part_entry_start_lba * self.sectorsize