|
|
@@ -1,106 +1,96 @@
|
|
|
#!/usr/bin/env python
|
|
|
#
|
|
|
-# Copyright 2018 Espressif Systems (Shanghai) PTE LTD
|
|
|
-#
|
|
|
-# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
-# you may not use this file except in compliance with the License.
|
|
|
-# You may obtain a copy of the License at
|
|
|
-#
|
|
|
-# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
-#
|
|
|
-# Unless required by applicable law or agreed to in writing, software
|
|
|
-# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
-# See the License for the specific language governing permissions and
|
|
|
-# limitations under the License.
|
|
|
+# SPDX-FileCopyrightText: 2018-2022 Espressif Systems (Shanghai) CO LTD
|
|
|
+# SPDX-License-Identifier: Apache-2.0
|
|
|
#
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
import argparse
|
|
|
+import csv
|
|
|
import distutils.dir_util
|
|
|
import os
|
|
|
import sys
|
|
|
-from io import open
|
|
|
|
|
|
from future.moves.itertools import zip_longest
|
|
|
|
|
|
try:
|
|
|
- sys.path.insert(0, os.getenv('IDF_PATH') + '/components/nvs_flash/nvs_partition_generator/')
|
|
|
+ idf_path = os.environ['IDF_PATH']
|
|
|
+ sys.path.insert(0, idf_path + '/components/nvs_flash/nvs_partition_generator/')
|
|
|
import nvs_partition_gen
|
|
|
except Exception as e:
|
|
|
print(e)
|
|
|
sys.exit('Please check IDF_PATH')
|
|
|
|
|
|
|
|
|
-def verify_values_exist(input_values_file, values_file_data, key_count_in_values_file, line_no=1):
|
|
|
+def verify_values_exist(input_values_file, keys_in_values_file):
|
|
|
""" Verify all keys have corresponding values in values file
|
|
|
"""
|
|
|
- if len(values_file_data) != key_count_in_values_file:
|
|
|
- raise SystemExit('\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n'
|
|
|
- % (str(input_values_file), str(line_no)))
|
|
|
+ with open(input_values_file, 'r') as values_file:
|
|
|
+ values_file_reader = csv.reader(values_file, delimiter=',')
|
|
|
+ next(values_file_reader)
|
|
|
+
|
|
|
+ for line_num, line in enumerate(values_file_reader, start=2):
|
|
|
+ key_count_in_values_file = len(keys_in_values_file)
|
|
|
+ if len(line) != key_count_in_values_file:
|
|
|
+ raise SystemExit('\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n'
|
|
|
+ % (str(input_values_file), str(line_num)))
|
|
|
|
|
|
|
|
|
-def verify_keys_exist(values_file_keys, config_file_data):
|
|
|
+def verify_keys_exist(values_file_keys, input_config_file):
|
|
|
""" Verify all keys from config file are present in values file
|
|
|
"""
|
|
|
keys_missing = []
|
|
|
|
|
|
- for line_no, config_data in enumerate(config_file_data,1):
|
|
|
- if not isinstance(config_data, str):
|
|
|
- config_data = config_data.encode('utf-8')
|
|
|
- config_data_line = config_data.strip().split(',')
|
|
|
- if 'namespace' not in config_data_line:
|
|
|
- if values_file_keys:
|
|
|
- if config_data_line[0] == values_file_keys[0]:
|
|
|
- del values_file_keys[0]
|
|
|
+ with open(input_config_file,'r') as config_file:
|
|
|
+ config_file_reader = csv.reader(config_file, delimiter=',')
|
|
|
+
|
|
|
+ for line_num, line in enumerate(config_file_reader, start=1):
|
|
|
+ if 'namespace' not in line:
|
|
|
+ if values_file_keys:
|
|
|
+ if line[0] == values_file_keys[0]:
|
|
|
+ del values_file_keys[0]
|
|
|
+ else:
|
|
|
+ keys_missing.append([line_num, line[0]])
|
|
|
else:
|
|
|
- keys_missing.append([config_data_line[0], line_no])
|
|
|
- else:
|
|
|
- keys_missing.append([config_data_line[0], line_no])
|
|
|
+ keys_missing.append([line_num, line[0]])
|
|
|
|
|
|
- if keys_missing:
|
|
|
- for key, line_no in keys_missing:
|
|
|
- print('Key:`', str(key), '` at line no:', str(line_no),
|
|
|
- ' in config file is not found in values file.')
|
|
|
- raise SystemExit(1)
|
|
|
+ if keys_missing:
|
|
|
+ for line_num, key in keys_missing:
|
|
|
+ print('Key:`', str(key), '` at line no:', str(line_num),
|
|
|
+ ' in config file is not found in values file.')
|
|
|
+ raise SystemExit(1)
|
|
|
|
|
|
|
|
|
-def verify_datatype_encoding(input_config_file, config_file_data):
|
|
|
+def verify_datatype_encoding(input_config_file):
|
|
|
""" Verify datatype and encodings from config file is valid
|
|
|
"""
|
|
|
- valid_encodings = ['string', 'binary', 'hex2bin','u8', 'i8', 'u16', 'u32', 'i32','base64']
|
|
|
- valid_datatypes = ['file','data','namespace']
|
|
|
- line_no = 0
|
|
|
-
|
|
|
- for data in config_file_data:
|
|
|
- line_no += 1
|
|
|
- if not isinstance(data, str):
|
|
|
- data = data.encode('utf-8')
|
|
|
- line = data.strip().split(',')
|
|
|
- if line[1] not in valid_datatypes:
|
|
|
- raise SystemExit('Error: config file: %s has invalid datatype at line no:%s\n'
|
|
|
- % (str(input_config_file), str(line_no)))
|
|
|
- if 'namespace' not in line:
|
|
|
- if line[2] not in valid_encodings:
|
|
|
- raise SystemExit('Error: config file: %s has invalid encoding at line no:%s\n'
|
|
|
- % (str(input_config_file), str(line_no)))
|
|
|
-
|
|
|
-
|
|
|
-def verify_file_data_count(cfg_file_data, keys_repeat):
|
|
|
+ valid_encodings = {'string', 'binary', 'hex2bin','u8', 'i8', 'u16', 'u32', 'i32','base64'}
|
|
|
+ valid_datatypes = {'file','data','namespace'}
|
|
|
+
|
|
|
+ with open(input_config_file,'r') as config_file:
|
|
|
+ config_file_reader = csv.reader(config_file, delimiter=',')
|
|
|
+
|
|
|
+ for line_num, line in enumerate(config_file_reader, start=1):
|
|
|
+ if line[1] not in valid_datatypes:
|
|
|
+ raise SystemExit('Error: config file: %s has invalid datatype at line no:%s\n`'
|
|
|
+ % (str(input_config_file), str(line_num)))
|
|
|
+ if 'namespace' not in line and line[2] not in valid_encodings:
|
|
|
+ raise SystemExit('Error: config file: %s has invalid encoding at line no:%s\n`'
|
|
|
+ % (str(input_config_file), str(line_num)))
|
|
|
+
|
|
|
+
|
|
|
+def verify_file_data_count(input_config_file, keys_repeat):
|
|
|
""" Verify count of data on each line in config file is equal to 3
|
|
|
(as format must be: <key,type and encoding>)
|
|
|
"""
|
|
|
- line_no = 0
|
|
|
+ with open(input_config_file, 'r') as config_file:
|
|
|
+ config_file_reader = csv.reader(config_file, delimiter=',')
|
|
|
|
|
|
- for data in cfg_file_data:
|
|
|
- line_no += 1
|
|
|
- if not isinstance(data, str):
|
|
|
- data = data.encode('utf-8')
|
|
|
- line = data.strip().split(',')
|
|
|
- if len(line) != 3 and line[0] not in keys_repeat:
|
|
|
- raise SystemExit('Error: data missing in config file at line no:%s <format needed:key,type,encoding>\n'
|
|
|
- % str(line_no))
|
|
|
+ for line_num, line in enumerate(config_file_reader, start=1):
|
|
|
+ if len(line) != 3 and line[0] not in keys_repeat:
|
|
|
+ raise SystemExit('Error: data missing in config file at line no:%s <format needed:key,type,encoding>\n'
|
|
|
+ % str(line_num))
|
|
|
|
|
|
|
|
|
def verify_data_in_file(input_config_file, input_values_file, config_file_keys, keys_in_values_file, keys_repeat):
|
|
|
@@ -111,36 +101,16 @@ def verify_data_in_file(input_config_file, input_values_file, config_file_keys,
|
|
|
Verify each key has corresponding value in values file
|
|
|
"""
|
|
|
try:
|
|
|
- values_file_keys = []
|
|
|
- values_file_line = None
|
|
|
+ verify_file_data_count(input_config_file, keys_repeat)
|
|
|
+
|
|
|
+ verify_datatype_encoding(input_config_file)
|
|
|
|
|
|
# Get keys from values file present in config files
|
|
|
values_file_keys = get_keys(keys_in_values_file, config_file_keys)
|
|
|
|
|
|
- with open(input_config_file, 'r', newline='\n') as cfg_file:
|
|
|
- cfg_file_data = cfg_file.readlines()
|
|
|
- verify_file_data_count(cfg_file_data, keys_repeat)
|
|
|
- verify_datatype_encoding(input_config_file, cfg_file_data)
|
|
|
- verify_keys_exist(values_file_keys, cfg_file_data)
|
|
|
+ verify_keys_exist(values_file_keys, input_config_file)
|
|
|
|
|
|
- with open(input_values_file, 'r', newline='\n') as values_file:
|
|
|
- key_count_in_values_file = len(keys_in_values_file)
|
|
|
- lineno = 0
|
|
|
- # Read first keys(header) line
|
|
|
- values_file_data = values_file.readline()
|
|
|
- lineno += 1
|
|
|
- while values_file_data:
|
|
|
- # Read values line
|
|
|
- values_file_line = values_file.readline()
|
|
|
- if not isinstance(values_file_line, str):
|
|
|
- values_file_line = values_file_line.encode('utf-8')
|
|
|
-
|
|
|
- values_file_data = values_file_line.strip().split(',')
|
|
|
-
|
|
|
- lineno += 1
|
|
|
- if len(values_file_data) == 1 and '' in values_file_data:
|
|
|
- break
|
|
|
- verify_values_exist(input_values_file, values_file_data, key_count_in_values_file, line_no=lineno)
|
|
|
+ verify_values_exist(input_values_file, keys_in_values_file)
|
|
|
|
|
|
except Exception as err:
|
|
|
print(err)
|
|
|
@@ -150,11 +120,7 @@ def verify_data_in_file(input_config_file, input_values_file, config_file_keys,
|
|
|
def get_keys(keys_in_values_file, config_file_keys):
|
|
|
""" Get keys from values file present in config file
|
|
|
"""
|
|
|
- values_file_keys = []
|
|
|
- for key in keys_in_values_file:
|
|
|
- if key in config_file_keys:
|
|
|
- values_file_keys.append(key)
|
|
|
-
|
|
|
+ values_file_keys = [key for key in keys_in_values_file if key in config_file_keys]
|
|
|
return values_file_keys
|
|
|
|
|
|
|
|
|
@@ -164,35 +130,31 @@ def add_config_data_per_namespace(input_config_file):
|
|
|
config_data_to_write = []
|
|
|
config_data_per_namespace = []
|
|
|
|
|
|
- with open(input_config_file, 'r', newline='\n') as cfg_file:
|
|
|
- config_data = cfg_file.readlines()
|
|
|
-
|
|
|
- # `config_data_per_namespace` is added to `config_data_to_write` list after reading next namespace
|
|
|
- for data in config_data:
|
|
|
- if not isinstance(data, str):
|
|
|
- data = data.encode('utf-8')
|
|
|
- cfg_data = data.strip().split(',')
|
|
|
- if 'REPEAT' in cfg_data:
|
|
|
- cfg_data.remove('REPEAT')
|
|
|
- if 'namespace' in cfg_data:
|
|
|
- if config_data_per_namespace:
|
|
|
- config_data_to_write.append(config_data_per_namespace)
|
|
|
- config_data_per_namespace = []
|
|
|
- config_data_per_namespace.append(cfg_data)
|
|
|
+ with open(input_config_file,'r') as csv_config_file:
|
|
|
+ config_file_reader = csv.reader(csv_config_file, delimiter=',')
|
|
|
+
|
|
|
+ # `config_data_per_namespace` is added to `config_data_to_write` list after reading next namespace
|
|
|
+ for config_data in config_file_reader:
|
|
|
+ if 'REPEAT' in config_data:
|
|
|
+ config_data.remove('REPEAT')
|
|
|
+ if 'namespace' in config_data:
|
|
|
+ if config_data_per_namespace:
|
|
|
+ config_data_to_write.append(config_data_per_namespace)
|
|
|
+ config_data_per_namespace = []
|
|
|
+ config_data_per_namespace.append(config_data)
|
|
|
+ else:
|
|
|
+ config_data_per_namespace.append(config_data)
|
|
|
else:
|
|
|
- config_data_per_namespace.append(cfg_data)
|
|
|
- else:
|
|
|
- config_data_per_namespace.append(cfg_data)
|
|
|
+ config_data_per_namespace.append(config_data)
|
|
|
|
|
|
- # `config_data_per_namespace` is added to `config_data_to_write` list as EOF is reached
|
|
|
- if (not config_data_to_write) or (config_data_to_write and config_data_per_namespace):
|
|
|
- config_data_to_write.append(config_data_per_namespace)
|
|
|
+ # `config_data_per_namespace` is added to `config_data_to_write` list as EOF is reached
|
|
|
+ if (not config_data_to_write) or (config_data_to_write and config_data_per_namespace):
|
|
|
+ config_data_to_write.append(config_data_per_namespace)
|
|
|
|
|
|
return config_data_to_write
|
|
|
|
|
|
|
|
|
-def get_fileid_val(file_identifier, keys_in_config_file, keys_in_values_file,
|
|
|
- values_data_line, key_value_data, fileid_value):
|
|
|
+def get_fileid_val(file_identifier, key_value_data, fileid_value):
|
|
|
""" Get file identifier value
|
|
|
"""
|
|
|
file_id_found = False
|
|
|
@@ -213,36 +175,26 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file):
|
|
|
"""
|
|
|
header = ['key', 'type', 'encoding', 'value']
|
|
|
data_to_write = []
|
|
|
- newline = u'\n'
|
|
|
-
|
|
|
- target_csv_file = open(output_csv_file, 'w', newline=None)
|
|
|
-
|
|
|
- line_to_write = u','.join(header)
|
|
|
- target_csv_file.write(line_to_write)
|
|
|
- target_csv_file.write(newline)
|
|
|
- for namespace_config_data in config_data_to_write:
|
|
|
- for data in namespace_config_data:
|
|
|
- data_to_write = data[:]
|
|
|
- if 'namespace' in data:
|
|
|
- data_to_write.append('')
|
|
|
- line_to_write = u','.join(data_to_write)
|
|
|
- target_csv_file.write(line_to_write)
|
|
|
- target_csv_file.write(newline)
|
|
|
- else:
|
|
|
- key = data[0]
|
|
|
- while key not in key_value_pair[0]:
|
|
|
- del key_value_pair[0]
|
|
|
- if key in key_value_pair[0]:
|
|
|
- value = key_value_pair[0][1]
|
|
|
- data_to_write.append(value)
|
|
|
- del key_value_pair[0]
|
|
|
- line_to_write = u','.join(data_to_write)
|
|
|
- target_csv_file.write(line_to_write)
|
|
|
- target_csv_file.write(newline)
|
|
|
-
|
|
|
- # Set index to start of file
|
|
|
- target_csv_file.seek(0)
|
|
|
- target_csv_file.close()
|
|
|
+
|
|
|
+ with open(output_csv_file, 'w', newline='') as target_csv_file:
|
|
|
+ output_file_writer = csv.writer(target_csv_file, delimiter=',')
|
|
|
+ output_file_writer.writerow(header)
|
|
|
+
|
|
|
+ for namespace_config_data in config_data_to_write:
|
|
|
+ for data in namespace_config_data:
|
|
|
+ data_to_write = data[:]
|
|
|
+ if 'namespace' in data:
|
|
|
+ data_to_write.append('')
|
|
|
+ output_file_writer.writerow(data_to_write)
|
|
|
+ else:
|
|
|
+ key = data[0]
|
|
|
+ while key not in key_value_pair[0]:
|
|
|
+ del key_value_pair[0]
|
|
|
+ if key in key_value_pair[0]:
|
|
|
+ value = key_value_pair[0][1]
|
|
|
+ data_to_write.append(value)
|
|
|
+ del key_value_pair[0]
|
|
|
+ output_file_writer.writerow(data_to_write)
|
|
|
|
|
|
|
|
|
def create_dir(filetype, output_dir_path):
|
|
|
@@ -256,72 +208,46 @@ def create_dir(filetype, output_dir_path):
|
|
|
|
|
|
|
|
|
def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename):
|
|
|
- key_val_pair = []
|
|
|
- key_repeated = []
|
|
|
- line = None
|
|
|
- newline = u'\n'
|
|
|
- with open(csv_file, 'r', newline=None) as read_from, open(target_filename,'w', newline=None) as write_to:
|
|
|
- headers = read_from.readline()
|
|
|
- values = read_from.readline()
|
|
|
- write_to.write(headers)
|
|
|
- write_to.write(values)
|
|
|
- if not isinstance(values, str):
|
|
|
- values = values.encode('utf-8')
|
|
|
- values = values.strip().split(',')
|
|
|
- total_keys_values = list(zip_longest(keys, values))
|
|
|
+ with open(csv_file, 'r') as read_from, open(target_filename,'w', newline='') as write_to:
|
|
|
+ csv_file_reader = csv.reader(read_from, delimiter=',')
|
|
|
+ headers = next(csv_file_reader)
|
|
|
+ values = next(csv_file_reader)
|
|
|
+ csv_file_writer = csv.writer(write_to, delimiter=',')
|
|
|
+ csv_file_writer.writerow(headers)
|
|
|
+ csv_file_writer.writerow(values)
|
|
|
|
|
|
# read new data, add value if key has repeat tag, write to new file
|
|
|
- line = read_from.readline()
|
|
|
- if not isinstance(line, str):
|
|
|
- line = line.encode('utf-8')
|
|
|
- row = line.strip().split(',')
|
|
|
- while row:
|
|
|
- index = -1
|
|
|
+ for row in csv_file_reader:
|
|
|
key_val_new = list(zip_longest(keys, row))
|
|
|
- key_val_pair = total_keys_values[:]
|
|
|
+ key_val_pair = list(zip_longest(keys, values))
|
|
|
key_repeated = total_keys_repeat[:]
|
|
|
+ index = 0
|
|
|
while key_val_new and key_repeated:
|
|
|
- index = index + 1
|
|
|
# if key has repeat tag, get its corresponding value, write to file
|
|
|
if key_val_new[0][0] == key_repeated[0]:
|
|
|
- val = key_val_pair[0][1]
|
|
|
- row[index] = val
|
|
|
+ row[index] = key_val_pair[0][1]
|
|
|
del key_repeated[0]
|
|
|
del key_val_new[0]
|
|
|
del key_val_pair[0]
|
|
|
-
|
|
|
- line_to_write = u','.join(row)
|
|
|
- write_to.write(line_to_write)
|
|
|
- write_to.write(newline)
|
|
|
-
|
|
|
- # Read next line
|
|
|
- line = read_from.readline()
|
|
|
- if not isinstance(line, str):
|
|
|
- line = line.encode('utf-8')
|
|
|
- row = line.strip().split(',')
|
|
|
- if len(row) == 1 and '' in row:
|
|
|
- break
|
|
|
+ index += 1
|
|
|
+ csv_file_writer.writerow(row)
|
|
|
|
|
|
return target_filename
|
|
|
|
|
|
|
|
|
-def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys_repeat, is_encr=False):
|
|
|
+def create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=False):
|
|
|
file_identifier_value = '0'
|
|
|
csv_str = 'csv'
|
|
|
bin_str = 'bin'
|
|
|
- line = None
|
|
|
set_output_keyfile = False
|
|
|
|
|
|
# Add config data per namespace to `config_data_to_write` list
|
|
|
config_data_to_write = add_config_data_per_namespace(args.conf)
|
|
|
|
|
|
try:
|
|
|
- with open(args.values, 'r', newline=None) as csv_values_file:
|
|
|
- # first line must be keys in file
|
|
|
- line = csv_values_file.readline()
|
|
|
- if not isinstance(line, str):
|
|
|
- line = line.encode('utf-8')
|
|
|
- keys = line.strip().split(',')
|
|
|
+ with open(args.values, 'r') as csv_values_file:
|
|
|
+ values_file_reader = csv.reader(csv_values_file, delimiter=',')
|
|
|
+ keys = next(values_file_reader)
|
|
|
|
|
|
filename, file_ext = os.path.splitext(args.values)
|
|
|
target_filename = filename + '_created' + file_ext
|
|
|
@@ -330,91 +256,77 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys
|
|
|
else:
|
|
|
target_values_file = args.values
|
|
|
|
|
|
- csv_values_file = open(target_values_file, 'r', newline=None)
|
|
|
-
|
|
|
- # Read header line
|
|
|
- csv_values_file.readline()
|
|
|
+ with open(target_values_file, 'r') as csv_values_file:
|
|
|
+ values_file_reader = csv.reader(csv_values_file, delimiter=',')
|
|
|
+ next(values_file_reader)
|
|
|
|
|
|
- # Create new directory(if doesn't exist) to store csv file generated
|
|
|
- output_csv_target_dir = create_dir(csv_str, args.outdir)
|
|
|
- # Create new directory(if doesn't exist) to store bin file generated
|
|
|
- output_bin_target_dir = create_dir(bin_str, args.outdir)
|
|
|
- if args.keygen:
|
|
|
- set_output_keyfile = True
|
|
|
+ # Create new directory(if doesn't exist) to store csv file generated
|
|
|
+ output_csv_target_dir = create_dir(csv_str, args.outdir)
|
|
|
+ # Create new directory(if doesn't exist) to store bin file generated
|
|
|
+ output_bin_target_dir = create_dir(bin_str, args.outdir)
|
|
|
+ if args.keygen:
|
|
|
+ set_output_keyfile = True
|
|
|
|
|
|
- line = csv_values_file.readline()
|
|
|
- if not isinstance(line, str):
|
|
|
- line = line.encode('utf-8')
|
|
|
- values_data_line = line.strip().split(',')
|
|
|
+ for values_data_line in values_file_reader:
|
|
|
+ key_value_data = list(zip_longest(keys_in_values_file, values_data_line))
|
|
|
|
|
|
- while values_data_line:
|
|
|
- key_value_data = list(zip_longest(keys_in_values_file, values_data_line))
|
|
|
+ # Get file identifier value from values file
|
|
|
+ file_identifier_value = get_fileid_val(args.fileid, key_value_data, file_identifier_value)
|
|
|
|
|
|
- # Get file identifier value from values file
|
|
|
- file_identifier_value = get_fileid_val(args.fileid, keys_in_config_file,
|
|
|
- keys_in_values_file, values_data_line, key_value_data,
|
|
|
- file_identifier_value)
|
|
|
+ key_value_pair = key_value_data[:]
|
|
|
|
|
|
- key_value_pair = key_value_data[:]
|
|
|
+ # Verify if output csv file does not exist
|
|
|
+ csv_filename = args.prefix + '-' + file_identifier_value + '.' + csv_str
|
|
|
+ output_csv_file = output_csv_target_dir + csv_filename
|
|
|
+ if os.path.isfile(output_csv_file):
|
|
|
+ raise SystemExit('Target csv file: %s already exists.`' % output_csv_file)
|
|
|
|
|
|
- # Verify if output csv file does not exist
|
|
|
- csv_filename = args.prefix + '-' + file_identifier_value + '.' + csv_str
|
|
|
- output_csv_file = output_csv_target_dir + csv_filename
|
|
|
- if os.path.isfile(output_csv_file):
|
|
|
- raise SystemExit('Target csv file: %s already exists.`' % output_csv_file)
|
|
|
+ # Add values corresponding to each key to csv intermediate file
|
|
|
+ add_data_to_file(config_data_to_write, key_value_pair, output_csv_file)
|
|
|
+ print('\nCreated CSV file: ===>', output_csv_file)
|
|
|
|
|
|
- # Add values corresponding to each key to csv intermediate file
|
|
|
- add_data_to_file(config_data_to_write, key_value_pair, output_csv_file)
|
|
|
- print('\nCreated CSV file: ===>', output_csv_file)
|
|
|
+ # Verify if output bin file does not exist
|
|
|
+ bin_filename = args.prefix + '-' + file_identifier_value + '.' + bin_str
|
|
|
+ output_bin_file = output_bin_target_dir + bin_filename
|
|
|
+ if os.path.isfile(output_bin_file):
|
|
|
+ raise SystemExit('Target binary file: %s already exists.`' % output_bin_file)
|
|
|
|
|
|
- # Verify if output bin file does not exist
|
|
|
- bin_filename = args.prefix + '-' + file_identifier_value + '.' + bin_str
|
|
|
- output_bin_file = output_bin_target_dir + bin_filename
|
|
|
- if os.path.isfile(output_bin_file):
|
|
|
- raise SystemExit('Target binary file: %s already exists.`' % output_bin_file)
|
|
|
+ args.input = output_csv_file
|
|
|
+ args.output = os.path.join(bin_str, bin_filename)
|
|
|
+ if set_output_keyfile:
|
|
|
+ args.keyfile = 'keys-' + args.prefix + '-' + file_identifier_value
|
|
|
|
|
|
- args.input = output_csv_file
|
|
|
- args.output = os.path.join(bin_str, bin_filename)
|
|
|
- if set_output_keyfile:
|
|
|
- args.keyfile = 'keys-' + args.prefix + '-' + file_identifier_value
|
|
|
-
|
|
|
- if is_encr:
|
|
|
- nvs_partition_gen.encrypt(args)
|
|
|
- else:
|
|
|
- nvs_partition_gen.generate(args)
|
|
|
-
|
|
|
- # Read next line
|
|
|
- line = csv_values_file.readline()
|
|
|
- if not isinstance(line, str):
|
|
|
- line = line.encode('utf-8')
|
|
|
- values_data_line = line.strip().split(',')
|
|
|
- if len(values_data_line) == 1 and '' in values_data_line:
|
|
|
- break
|
|
|
+ if is_encr:
|
|
|
+ nvs_partition_gen.encrypt(args)
|
|
|
+ else:
|
|
|
+ nvs_partition_gen.generate(args)
|
|
|
|
|
|
- print('\nFiles generated in %s ...' % args.outdir)
|
|
|
+ print('\nFiles generated in %s ...' % args.outdir)
|
|
|
|
|
|
except Exception as e:
|
|
|
print(e)
|
|
|
exit(1)
|
|
|
- finally:
|
|
|
- csv_values_file.close()
|
|
|
|
|
|
|
|
|
-def verify_empty_lines_exist(file_name, input_file_data):
|
|
|
- for data in input_file_data:
|
|
|
- if not isinstance(data, str):
|
|
|
- data = data.encode('utf-8')
|
|
|
- cfg_data = data.strip().split(',')
|
|
|
+def verify_empty_lines_exist(args, input_file):
|
|
|
+ input_file_reader = csv.reader(input_file, delimiter=',')
|
|
|
+ for file_data in input_file_reader:
|
|
|
+ for data in file_data:
|
|
|
+ if len(data.strip()) == 0:
|
|
|
+ raise SystemExit('Error: config file: %s cannot have empty lines. ' % args.conf)
|
|
|
+ else:
|
|
|
+ break
|
|
|
+ if not file_data:
|
|
|
+ raise SystemExit('Error: config file: %s cannot have empty lines.' % args.conf)
|
|
|
|
|
|
- if len(cfg_data) == 1 and '' in cfg_data:
|
|
|
- raise SystemExit('Error: file: %s cannot have empty lines. ' % file_name)
|
|
|
+ input_file.seek(0)
|
|
|
+ return input_file_reader
|
|
|
|
|
|
|
|
|
def verify_file_format(args):
|
|
|
keys_in_config_file = []
|
|
|
keys_in_values_file = []
|
|
|
keys_repeat = []
|
|
|
- file_data_keys = None
|
|
|
|
|
|
# Verify config file is not empty
|
|
|
if os.stat(args.conf).st_size == 0:
|
|
|
@@ -425,41 +337,25 @@ def verify_file_format(args):
|
|
|
raise SystemExit('Error: values file: %s is empty.' % args.values)
|
|
|
|
|
|
# Verify config file does not have empty lines
|
|
|
- with open(args.conf, 'r', newline='\n') as csv_config_file:
|
|
|
+ with open(args.conf, 'r') as csv_config_file:
|
|
|
try:
|
|
|
- file_data = csv_config_file.readlines()
|
|
|
- verify_empty_lines_exist(args.conf, file_data)
|
|
|
-
|
|
|
- csv_config_file.seek(0)
|
|
|
+ config_file_reader = verify_empty_lines_exist(args, csv_config_file)
|
|
|
# Extract keys from config file
|
|
|
- for data in file_data:
|
|
|
- if not isinstance(data, str):
|
|
|
- data = data.encode('utf-8')
|
|
|
- line_data = data.strip().split(',')
|
|
|
- if 'namespace' not in line_data:
|
|
|
- keys_in_config_file.append(line_data[0])
|
|
|
- if 'REPEAT' in line_data:
|
|
|
- keys_repeat.append(line_data[0])
|
|
|
+ for config_data in config_file_reader:
|
|
|
+ if 'namespace' not in config_data:
|
|
|
+ keys_in_config_file.append(config_data[0])
|
|
|
+ if 'REPEAT' in config_data:
|
|
|
+ keys_repeat.append(config_data[0])
|
|
|
+
|
|
|
except Exception as e:
|
|
|
print(e)
|
|
|
|
|
|
# Verify values file does not have empty lines
|
|
|
- with open(args.values, 'r', newline='\n') as csv_values_file:
|
|
|
+ with open(args.values, 'r') as csv_values_file:
|
|
|
try:
|
|
|
- # Extract keys from values file (first line of file)
|
|
|
- file_data = [csv_values_file.readline()]
|
|
|
-
|
|
|
- file_data_keys = file_data[0]
|
|
|
- if not isinstance(file_data_keys, str):
|
|
|
- file_data_keys = file_data_keys.encode('utf-8')
|
|
|
-
|
|
|
- keys_in_values_file = file_data_keys.strip().split(',')
|
|
|
-
|
|
|
- while file_data:
|
|
|
- verify_empty_lines_exist(args.values, file_data)
|
|
|
- file_data = [csv_values_file.readline()]
|
|
|
- if '' in file_data:
|
|
|
- break
|
|
|
+ values_file_reader = verify_empty_lines_exist(args, csv_values_file)
|
|
|
+ # Extract keys from values file
|
|
|
+ keys_in_values_file = next(values_file_reader)
|
|
|
|
|
|
except Exception as e:
|
|
|
print(e)
|
|
|
@@ -475,11 +371,6 @@ def verify_file_format(args):
|
|
|
|
|
|
|
|
|
def generate(args):
|
|
|
- keys_in_config_file = []
|
|
|
- keys_in_values_file = []
|
|
|
- keys_repeat = []
|
|
|
- encryption_enabled = False
|
|
|
-
|
|
|
args.outdir = os.path.join(args.outdir, '')
|
|
|
# Verify input config and values file format
|
|
|
keys_in_config_file, keys_in_values_file, keys_repeat = verify_file_format(args)
|
|
|
@@ -488,13 +379,12 @@ def generate(args):
|
|
|
verify_data_in_file(args.conf, args.values, keys_in_config_file,
|
|
|
keys_in_values_file, keys_repeat)
|
|
|
|
|
|
+ encryption_enabled = False
|
|
|
if (args.keygen or args.inputkey):
|
|
|
encryption_enabled = True
|
|
|
print('\nGenerating encrypted NVS binary images...')
|
|
|
-
|
|
|
# Create intermediate csv file
|
|
|
- create_intermediate_csv(args, keys_in_config_file, keys_in_values_file,
|
|
|
- keys_repeat, is_encr=encryption_enabled)
|
|
|
+ create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=encryption_enabled)
|
|
|
|
|
|
|
|
|
def generate_key(args):
|