diff --git a/scripts/extract-partitions.py b/scripts/extract-partitions.py new file mode 100755 index 0000000..9803155 --- /dev/null +++ b/scripts/extract-partitions.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +from collections import OrderedDict +from contextlib import closing, contextmanager +import argparse, logging, os, struct +import lglaf + +_logger = logging.getLogger(__name__) + +def read_uint32(data, offset): + return struct.unpack_from('= size: + if offset > size: + _logger.warn("%s: unexpected size %dK > %dK", + local_path, offset, size) + else: + _logger.info("%s: already retrieved %dK", + local_path, size) + return + + # Read offsets must be a multiple of 512 bytes, enforce this + BLOCK_SIZE = 512 + offset = BLOCK_SIZE * (offset // BLOCK_SIZE) + + with laf_open_ro(comm, remote_path) as fd_num: + _logger.debug("Opened fd %d for %s (size %dK, offset %dK)", + fd_num, remote_path, size / 1024, offset * BLOCK_SIZE / 1024) + with open(local_path, 'ab') as f: + f.seek(BLOCK_SIZE * offset) + while offset < size: + chunksize = min(size - offset, BLOCK_SIZE * MAX_BLOCK_SIZE) + data = laf_read(comm, fd_num, offset // BLOCK_SIZE, chunksize) + f.write(data) + offset += chunksize + +def dump_partitions(comm, outdir, max_size): + parts = read_partitions(comm) + for name, size in parts.items(): + if size > max_size: + _logger.info("Ignoring large partition %s of size %dK" % (name, size)) + continue + out_path = os.path.join(outdir, "%s.bin" % name) + dump_file(comm, "/dev/block/%s" % name, out_path, 1024 * size) + +def main(): + args = parser.parse_args() + logging.basicConfig(format='%(asctime)s %(name)s: %(levelname)s: %(message)s', + level=logging.DEBUG if args.debug else logging.INFO) + + try: os.makedirs(args.outdir) + except OSError: pass + + comm = lglaf.autodetect_device() + with closing(comm): + lglaf.try_hello(comm) + dump_partitions(comm, args.outdir, args.max_size) + +if __name__ == '__main__': + main()