mkuf2.py 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. #!/usr/bin/env python
  2. #
  3. # Copyright 2020 Espressif Systems (Shanghai) CO LTD
  4. #
  5. # Licensed under the Apache License, Version 2.0 (the "License");
  6. # you may not use this file except in compliance with the License.
  7. # You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing, software
  12. # distributed under the License is distributed on an "AS IS" BASIS,
  13. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. # See the License for the specific language governing permissions and
  15. # limitations under the License.
  16. from __future__ import division
  17. import argparse
  18. import hashlib
  19. import json
  20. import os
  21. import struct
  22. from functools import partial
  23. from future.utils import iteritems
  24. try:
  25. from itertools import izip as zip
  26. except ImportError:
  27. # Python 3
  28. pass
  29. def round_up_int_div(n, d):
  30. # equivalent to math.ceil(n / d)
  31. return (n + d - 1) // d
  32. class UF2Writer(object):
  33. # The UF2 format is described here: https://github.com/microsoft/uf2
  34. UF2_BLOCK_SIZE = 512
  35. UF2_DATA_SIZE = 476 # max value of CHUNK_SIZE reduced by optional parts. Currently, MD5_PART only.
  36. UF2_MD5_PART_SIZE = 24
  37. UF2_FIRST_MAGIC = 0x0A324655
  38. UF2_SECOND_MAGIC = 0x9E5D5157
  39. UF2_FINAL_MAGIC = 0x0AB16F30
  40. UF2_FLAG_FAMILYID_PRESENT = 0x00002000
  41. UF2_FLAG_MD5_PRESENT = 0x00004000
  42. def __init__(self, chip_id, output_file, chunk_size):
  43. self.chip_id = chip_id
  44. self.CHUNK_SIZE = self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE if chunk_size is None else chunk_size
  45. self.f = open(output_file, 'wb')
  46. def __enter__(self):
  47. return self
  48. def __exit__(self, exc_type, exc_val, exc_tb):
  49. if self.f:
  50. self.f.close()
  51. @staticmethod
  52. def _to_uint32(num):
  53. return struct.pack('<I', num)
  54. def _write_block(self, addr, chunk, len_chunk, block_no, blocks):
  55. assert len_chunk > 0
  56. assert len_chunk <= self.CHUNK_SIZE
  57. assert block_no < blocks
  58. block = self._to_uint32(self.UF2_FIRST_MAGIC)
  59. block += self._to_uint32(self.UF2_SECOND_MAGIC)
  60. block += self._to_uint32(self.UF2_FLAG_FAMILYID_PRESENT | self.UF2_FLAG_MD5_PRESENT)
  61. block += self._to_uint32(addr)
  62. block += self._to_uint32(len_chunk)
  63. block += self._to_uint32(block_no)
  64. block += self._to_uint32(blocks)
  65. block += self._to_uint32(self.chip_id)
  66. block += chunk
  67. md5_part = self._to_uint32(addr)
  68. md5_part += self._to_uint32(len_chunk)
  69. md5_part += hashlib.md5(chunk).digest()
  70. assert(len(md5_part) == self.UF2_MD5_PART_SIZE)
  71. block += md5_part
  72. block += b'\x00' * (self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE - len_chunk)
  73. block += self._to_uint32(self.UF2_FINAL_MAGIC)
  74. assert len(block) == self.UF2_BLOCK_SIZE
  75. self.f.write(block)
  76. def add_file(self, addr, f_path):
  77. blocks = round_up_int_div(os.path.getsize(f_path), self.CHUNK_SIZE)
  78. with open(f_path, 'rb') as fin:
  79. a = addr
  80. for i, chunk in enumerate(iter(partial(fin.read, self.CHUNK_SIZE), b'')):
  81. len_chunk = len(chunk)
  82. self._write_block(a, chunk, len_chunk, i, blocks)
  83. a += len_chunk
  84. def action_write(args):
  85. with UF2Writer(args['chip_id'], args['output_file'], args['chunk_size']) as writer:
  86. for addr, f in args['files']:
  87. print('Adding {} at {:#x}'.format(f, addr))
  88. writer.add_file(addr, f)
  89. print('"{}" has been written.'.format(args['output_file']))
  90. def main():
  91. parser = argparse.ArgumentParser()
  92. def four_byte_aligned(integer):
  93. return integer & 3 == 0
  94. def parse_chunk_size(string):
  95. num = int(string, 0)
  96. if not four_byte_aligned(num):
  97. raise argparse.ArgumentTypeError('Chunk size should be a 4-byte aligned number')
  98. return num
  99. def parse_chip_id(string):
  100. num = int(string, 16)
  101. if num < 0 or num > 0xFFFFFFFF:
  102. raise argparse.ArgumentTypeError('Chip ID should be a 4-byte unsigned integer')
  103. return num
  104. # Provision to add "info" command
  105. subparsers = parser.add_subparsers(dest='command')
  106. write_parser = subparsers.add_parser('write')
  107. write_parser.add_argument('-o', '--output-file',
  108. help='Filename for storing the output UF2 image',
  109. required=True)
  110. write_parser.add_argument('--chip-id',
  111. required=True,
  112. type=parse_chip_id,
  113. help='Hexa-decimal chip identificator')
  114. write_parser.add_argument('--chunk-size',
  115. required=False,
  116. type=parse_chunk_size,
  117. default=None,
  118. help='Specify the used data part of the 512 byte UF2 block. A common value is 256. By '
  119. 'default the largest possible value will be used.')
  120. write_parser.add_argument('--json',
  121. help='Optional file for loading "flash_files" dictionary with <address> <file> items')
  122. write_parser.add_argument('--bin',
  123. help='Use only a subset of binaries from the JSON file, e.g. "partition_table '
  124. 'bootloader app"',
  125. nargs='*')
  126. write_parser.add_argument('files',
  127. metavar='<address> <file>', help='Add <file> at <address>',
  128. nargs='*')
  129. args = parser.parse_args()
  130. def check_file(file_name):
  131. if not os.path.isfile(file_name):
  132. raise RuntimeError('{} is not a regular file!'.format(file_name))
  133. return file_name
  134. def parse_addr(string):
  135. num = int(string, 0)
  136. if not four_byte_aligned(num):
  137. raise RuntimeError('{} is not a 4-byte aligned valid address'.format(string))
  138. return num
  139. files = []
  140. if args.files:
  141. files += [(parse_addr(addr), check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])]
  142. if args.json:
  143. json_dir = os.path.dirname(os.path.abspath(args.json))
  144. def process_json_file(path):
  145. '''
  146. The input path is relative to json_dir. This function makes it relative to the current working
  147. directory.
  148. '''
  149. return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir))
  150. with open(args.json) as f:
  151. json_content = json.load(f)
  152. if args.bin:
  153. try:
  154. bin_selection = [json_content[b] for b in args.bin]
  155. flash_dic = dict((x['offset'], x['file']) for x in bin_selection)
  156. except KeyError:
  157. print('Invalid binary was selected.')
  158. valid = [k if all(x in v for x in ('offset', 'file')) else None for k, v in iteritems(json_content)]
  159. print('Valid ones:', ' '.join(x for x in valid if x))
  160. exit(1)
  161. else:
  162. flash_dic = json_content['flash_files']
  163. files += [(parse_addr(addr), process_json_file(f_name)) for addr, f_name in iteritems(flash_dic)]
  164. files = sorted([(addr, f_name) for addr, f_name in iteritems(dict(files))],
  165. key=lambda x: x[0]) # remove possible duplicates and sort based on the address
  166. cmd_args = {'output_file': args.output_file,
  167. 'files': files,
  168. 'chip_id': args.chip_id,
  169. 'chunk_size': args.chunk_size,
  170. }
  171. {'write': action_write
  172. }[args.command](cmd_args)
  173. if __name__ == '__main__':
  174. main()