mkuf2.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197
  1. #!/usr/bin/env python
  2. #
  3. # SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
  4. # SPDX-License-Identifier: Apache-2.0
  5. from __future__ import division
  6. import argparse
  7. import hashlib
  8. import json
  9. import os
  10. import struct
  11. from functools import partial
  12. from typing import Dict, List
  13. from future.utils import iteritems
  14. def round_up_int_div(n: int, d: int) -> int:
  15. # equivalent to math.ceil(n / d)
  16. return (n + d - 1) // d
  17. class UF2Writer(object):
  18. # The UF2 format is described here: https://github.com/microsoft/uf2
  19. UF2_BLOCK_SIZE = 512
  20. UF2_DATA_SIZE = 476 # max value of CHUNK_SIZE reduced by optional parts. Currently, MD5_PART only.
  21. UF2_MD5_PART_SIZE = 24
  22. UF2_FIRST_MAGIC = 0x0A324655
  23. UF2_SECOND_MAGIC = 0x9E5D5157
  24. UF2_FINAL_MAGIC = 0x0AB16F30
  25. UF2_FLAG_FAMILYID_PRESENT = 0x00002000
  26. UF2_FLAG_MD5_PRESENT = 0x00004000
  27. def __init__(self, chip_id: int, output_file: os.PathLike, chunk_size: int) -> None:
  28. self.chip_id = chip_id
  29. self.CHUNK_SIZE = self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE if chunk_size is None else chunk_size
  30. self.f = open(output_file, 'wb')
  31. def __enter__(self) -> 'UF2Writer':
  32. return self
  33. def __exit__(self, exc_type: str, exc_val: int, exc_tb: List) -> None:
  34. if self.f:
  35. self.f.close()
  36. @staticmethod
  37. def _to_uint32(num: int) -> bytes:
  38. return struct.pack('<I', num)
  39. def _write_block(self, addr: int, chunk: bytes, len_chunk: int, block_no: int, blocks: int) -> None:
  40. assert len_chunk > 0
  41. assert len_chunk <= self.CHUNK_SIZE
  42. assert block_no < blocks
  43. block = self._to_uint32(self.UF2_FIRST_MAGIC)
  44. block += self._to_uint32(self.UF2_SECOND_MAGIC)
  45. block += self._to_uint32(self.UF2_FLAG_FAMILYID_PRESENT | self.UF2_FLAG_MD5_PRESENT)
  46. block += self._to_uint32(addr)
  47. block += self._to_uint32(len_chunk)
  48. block += self._to_uint32(block_no)
  49. block += self._to_uint32(blocks)
  50. block += self._to_uint32(self.chip_id)
  51. block += chunk
  52. md5_part = self._to_uint32(addr)
  53. md5_part += self._to_uint32(len_chunk)
  54. md5_part += hashlib.md5(chunk).digest()
  55. assert(len(md5_part) == self.UF2_MD5_PART_SIZE)
  56. block += md5_part
  57. block += b'\x00' * (self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE - len_chunk)
  58. block += self._to_uint32(self.UF2_FINAL_MAGIC)
  59. assert len(block) == self.UF2_BLOCK_SIZE
  60. self.f.write(block)
  61. def add_file(self, addr: int, f_path: os.PathLike) -> None:
  62. blocks = round_up_int_div(os.path.getsize(f_path), self.CHUNK_SIZE)
  63. with open(f_path, 'rb') as fin:
  64. a = addr
  65. for i, chunk in enumerate(iter(partial(fin.read, self.CHUNK_SIZE), b'')):
  66. len_chunk = len(chunk)
  67. self._write_block(a, chunk, len_chunk, i, blocks)
  68. a += len_chunk
  69. def action_write(args: Dict) -> None:
  70. with UF2Writer(args['chip_id'], args['output_file'], args['chunk_size']) as writer:
  71. for addr, f in args['files']:
  72. print('Adding {} at {:#x}'.format(f, addr))
  73. writer.add_file(addr, f)
  74. print('"{}" has been written.'.format(args['output_file']))
  75. def main() -> None:
  76. parser = argparse.ArgumentParser()
  77. def four_byte_aligned(integer: int) -> bool:
  78. return integer & 3 == 0
  79. def parse_chunk_size(string: str) -> int:
  80. num = int(string, 0)
  81. if not four_byte_aligned(num):
  82. raise argparse.ArgumentTypeError('Chunk size should be a 4-byte aligned number')
  83. return num
  84. def parse_chip_id(string: str) -> int:
  85. num = int(string, 16)
  86. if num < 0 or num > 0xFFFFFFFF:
  87. raise argparse.ArgumentTypeError('Chip ID should be a 4-byte unsigned integer')
  88. return num
  89. # Provision to add "info" command
  90. subparsers = parser.add_subparsers(dest='command')
  91. write_parser = subparsers.add_parser('write')
  92. write_parser.add_argument('-o', '--output-file',
  93. help='Filename for storing the output UF2 image',
  94. required=True)
  95. write_parser.add_argument('--chip-id',
  96. required=True,
  97. type=parse_chip_id,
  98. help='Hexa-decimal chip identificator')
  99. write_parser.add_argument('--chunk-size',
  100. required=False,
  101. type=parse_chunk_size,
  102. default=None,
  103. help='Specify the used data part of the 512 byte UF2 block. A common value is 256. By '
  104. 'default the largest possible value will be used.')
  105. write_parser.add_argument('--json',
  106. help='Optional file for loading "flash_files" dictionary with <address> <file> items')
  107. write_parser.add_argument('--bin',
  108. help='Use only a subset of binaries from the JSON file, e.g. "partition_table '
  109. 'bootloader app"',
  110. nargs='*')
  111. write_parser.add_argument('files',
  112. metavar='<address> <file>', help='Add <file> at <address>',
  113. nargs='*')
  114. args = parser.parse_args()
  115. def check_file(file_name: str) -> str:
  116. if not os.path.isfile(file_name):
  117. raise RuntimeError('{} is not a regular file!'.format(file_name))
  118. return file_name
  119. def parse_addr(string: str) -> int:
  120. num = int(string, 0)
  121. if not four_byte_aligned(num):
  122. raise RuntimeError('{} is not a 4-byte aligned valid address'.format(string))
  123. return num
  124. files = []
  125. if args.files:
  126. files += [(parse_addr(addr), check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])]
  127. if args.json:
  128. json_dir = os.path.dirname(os.path.abspath(args.json))
  129. def process_json_file(path: str) -> str:
  130. '''
  131. The input path is relative to json_dir. This function makes it relative to the current working
  132. directory.
  133. '''
  134. return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir))
  135. with open(args.json) as f:
  136. json_content = json.load(f)
  137. if args.bin:
  138. try:
  139. bin_selection = [json_content[b] for b in args.bin]
  140. flash_dic = dict((x['offset'], x['file']) for x in bin_selection)
  141. except KeyError:
  142. print('Invalid binary was selected.')
  143. valid = [k if all(x in v for x in ('offset', 'file')) else None for k, v in iteritems(json_content)]
  144. print('Valid ones:', ' '.join(x for x in valid if x))
  145. exit(1)
  146. else:
  147. flash_dic = json_content['flash_files']
  148. files += [(parse_addr(addr), process_json_file(f_name)) for addr, f_name in iteritems(flash_dic)]
  149. files = sorted([(addr, f_name) for addr, f_name in iteritems(dict(files))],
  150. key=lambda x: x[0]) # remove possible duplicates and sort based on the address
  151. cmd_args = {'output_file': args.output_file,
  152. 'files': files,
  153. 'chip_id': args.chip_id,
  154. 'chunk_size': args.chunk_size,
  155. }
  156. {'write': action_write
  157. }[args.command](cmd_args)
  158. if __name__ == '__main__':
  159. main()