2018-07-17 04:44:30 -04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
2023-11-01 11:27:24 -04:00
|
|
|
# SPDX-FileCopyrightText: 2018-2023 Espressif Systems (Shanghai) CO LTD
|
2022-06-02 09:29:35 -04:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2018-07-17 04:44:30 -04:00
|
|
|
#
|
|
|
|
import argparse
|
2022-06-02 09:29:35 -04:00
|
|
|
import csv
|
2021-01-25 21:49:01 -05:00
|
|
|
import os
|
2022-07-07 08:45:09 -04:00
|
|
|
from itertools import zip_longest
|
2024-08-19 12:59:47 -04:00
|
|
|
from pathlib import Path
|
2018-10-29 03:49:20 -04:00
|
|
|
|
2023-11-01 11:27:24 -04:00
|
|
|
import esp_idf_nvs_partition_gen.nvs_partition_gen as nvs_partition_gen
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2018-10-12 00:46:39 -04:00
|
|
|
|
2022-06-30 11:53:12 -04:00
|
|
|
def create_temp_files(args):
|
|
|
|
new_filenames = []
|
|
|
|
for filename in [args.conf, args.values]:
|
|
|
|
name, ext = os.path.splitext(filename)
|
|
|
|
new_filename = name + '_tmp' + ext
|
|
|
|
strip_blank_lines(filename, new_filename)
|
|
|
|
new_filenames.append(new_filename)
|
|
|
|
return new_filenames
|
|
|
|
|
|
|
|
|
|
|
|
def strip_blank_lines(input_filename, output_filename):
|
|
|
|
with open(input_filename, 'r') as read_from, open(output_filename,'w', newline='') as write_to:
|
|
|
|
for line in read_from:
|
|
|
|
if not line.isspace():
|
|
|
|
write_to.write(line)
|
|
|
|
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
def verify_values_exist(input_values_file, keys_in_values_file):
|
2018-07-17 04:44:30 -04:00
|
|
|
""" Verify all keys have corresponding values in values file
|
|
|
|
"""
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(input_values_file, 'r') as values_file:
|
|
|
|
values_file_reader = csv.reader(values_file, delimiter=',')
|
|
|
|
next(values_file_reader)
|
|
|
|
|
|
|
|
for line_num, line in enumerate(values_file_reader, start=2):
|
|
|
|
key_count_in_values_file = len(keys_in_values_file)
|
|
|
|
if len(line) != key_count_in_values_file:
|
|
|
|
raise SystemExit('\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n'
|
|
|
|
% (str(input_values_file), str(line_num)))
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
def verify_keys_exist(values_file_keys, input_config_file):
|
2018-07-17 04:44:30 -04:00
|
|
|
""" Verify all keys from config file are present in values file
|
|
|
|
"""
|
|
|
|
keys_missing = []
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(input_config_file,'r') as config_file:
|
|
|
|
config_file_reader = csv.reader(config_file, delimiter=',')
|
|
|
|
|
|
|
|
for line_num, line in enumerate(config_file_reader, start=1):
|
|
|
|
if 'namespace' not in line:
|
|
|
|
if values_file_keys:
|
|
|
|
if line[0] == values_file_keys[0]:
|
|
|
|
del values_file_keys[0]
|
|
|
|
else:
|
|
|
|
keys_missing.append([line_num, line[0]])
|
2018-07-17 04:44:30 -04:00
|
|
|
else:
|
2022-06-02 09:29:35 -04:00
|
|
|
keys_missing.append([line_num, line[0]])
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
if keys_missing:
|
|
|
|
for line_num, key in keys_missing:
|
|
|
|
print('Key:`', str(key), '` at line no:', str(line_num),
|
|
|
|
' in config file is not found in values file.')
|
|
|
|
raise SystemExit(1)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
def verify_datatype_encoding(input_config_file):
|
2018-07-17 04:44:30 -04:00
|
|
|
""" Verify datatype and encodings from config file is valid
|
|
|
|
"""
|
2024-07-10 05:53:10 -04:00
|
|
|
valid_encodings = {'string', 'binary', 'hex2bin','u8', 'i8', 'u16', 'u32', 'i32', 'u64', 'i64','base64'}
|
2022-06-02 09:29:35 -04:00
|
|
|
valid_datatypes = {'file','data','namespace'}
|
|
|
|
|
|
|
|
with open(input_config_file,'r') as config_file:
|
|
|
|
config_file_reader = csv.reader(config_file, delimiter=',')
|
|
|
|
|
|
|
|
for line_num, line in enumerate(config_file_reader, start=1):
|
|
|
|
if line[1] not in valid_datatypes:
|
|
|
|
raise SystemExit('Error: config file: %s has invalid datatype at line no:%s\n`'
|
|
|
|
% (str(input_config_file), str(line_num)))
|
|
|
|
if 'namespace' not in line and line[2] not in valid_encodings:
|
|
|
|
raise SystemExit('Error: config file: %s has invalid encoding at line no:%s\n`'
|
|
|
|
% (str(input_config_file), str(line_num)))
|
|
|
|
|
|
|
|
|
|
|
|
def verify_file_data_count(input_config_file, keys_repeat):
|
2018-07-17 04:44:30 -04:00
|
|
|
""" Verify count of data on each line in config file is equal to 3
|
|
|
|
(as format must be: <key,type and encoding>)
|
|
|
|
"""
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(input_config_file, 'r') as config_file:
|
|
|
|
config_file_reader = csv.reader(config_file, delimiter=',')
|
2020-05-26 07:16:08 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
for line_num, line in enumerate(config_file_reader, start=1):
|
|
|
|
if len(line) != 3 and line[0] not in keys_repeat:
|
|
|
|
raise SystemExit('Error: data missing in config file at line no:%s <format needed:key,type,encoding>\n'
|
|
|
|
% str(line_num))
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
def verify_data_in_file(input_config_file, input_values_file, config_file_keys, keys_in_values_file, keys_repeat):
|
|
|
|
""" Verify count of data on each line in config file is equal to 3 \
|
|
|
|
(as format must be: <key,type and encoding>)
|
|
|
|
Verify datatype and encodings from config file is valid
|
|
|
|
Verify all keys from config file are present in values file and \
|
|
|
|
Verify each key has corresponding value in values file
|
|
|
|
"""
|
|
|
|
try:
|
2022-06-02 09:29:35 -04:00
|
|
|
verify_file_data_count(input_config_file, keys_repeat)
|
|
|
|
|
|
|
|
verify_datatype_encoding(input_config_file)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
# Get keys from values file present in config files
|
|
|
|
values_file_keys = get_keys(keys_in_values_file, config_file_keys)
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
verify_keys_exist(values_file_keys, input_config_file)
|
2020-05-26 07:16:08 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
verify_values_exist(input_values_file, keys_in_values_file)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2018-10-29 03:49:20 -04:00
|
|
|
except Exception as err:
|
|
|
|
print(err)
|
2019-06-20 09:17:59 -04:00
|
|
|
exit(1)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
def get_keys(keys_in_values_file, config_file_keys):
|
|
|
|
""" Get keys from values file present in config file
|
|
|
|
"""
|
2022-06-02 09:29:35 -04:00
|
|
|
values_file_keys = [key for key in keys_in_values_file if key in config_file_keys]
|
2018-07-17 04:44:30 -04:00
|
|
|
return values_file_keys
|
|
|
|
|
|
|
|
|
|
|
|
def add_config_data_per_namespace(input_config_file):
|
|
|
|
""" Add config data per namespace to `config_data_to_write` list
|
|
|
|
"""
|
|
|
|
config_data_to_write = []
|
|
|
|
config_data_per_namespace = []
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(input_config_file,'r') as csv_config_file:
|
|
|
|
config_file_reader = csv.reader(csv_config_file, delimiter=',')
|
|
|
|
|
|
|
|
# `config_data_per_namespace` is added to `config_data_to_write` list after reading next namespace
|
|
|
|
for config_data in config_file_reader:
|
|
|
|
if 'REPEAT' in config_data:
|
|
|
|
config_data.remove('REPEAT')
|
|
|
|
if 'namespace' in config_data:
|
|
|
|
if config_data_per_namespace:
|
|
|
|
config_data_to_write.append(config_data_per_namespace)
|
|
|
|
config_data_per_namespace = []
|
|
|
|
config_data_per_namespace.append(config_data)
|
|
|
|
else:
|
|
|
|
config_data_per_namespace.append(config_data)
|
2018-07-17 04:44:30 -04:00
|
|
|
else:
|
2022-06-02 09:29:35 -04:00
|
|
|
config_data_per_namespace.append(config_data)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
# `config_data_per_namespace` is added to `config_data_to_write` list as EOF is reached
|
|
|
|
if (not config_data_to_write) or (config_data_to_write and config_data_per_namespace):
|
|
|
|
config_data_to_write.append(config_data_per_namespace)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
return config_data_to_write
|
|
|
|
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
def get_fileid_val(file_identifier, key_value_data, fileid_value):
|
2018-07-17 04:44:30 -04:00
|
|
|
""" Get file identifier value
|
|
|
|
"""
|
|
|
|
file_id_found = False
|
|
|
|
|
|
|
|
for key in key_value_data:
|
|
|
|
if file_identifier and not file_id_found and file_identifier in key:
|
|
|
|
fileid_value = key[1]
|
|
|
|
file_id_found = True
|
|
|
|
|
|
|
|
if not file_id_found:
|
|
|
|
fileid_value = str(int(fileid_value) + 1)
|
|
|
|
|
|
|
|
return fileid_value
|
|
|
|
|
|
|
|
|
|
|
|
def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file):
|
|
|
|
""" Add data to csv target file
|
|
|
|
"""
|
|
|
|
header = ['key', 'type', 'encoding', 'value']
|
|
|
|
data_to_write = []
|
2022-06-02 09:29:35 -04:00
|
|
|
|
|
|
|
with open(output_csv_file, 'w', newline='') as target_csv_file:
|
|
|
|
output_file_writer = csv.writer(target_csv_file, delimiter=',')
|
|
|
|
output_file_writer.writerow(header)
|
|
|
|
|
|
|
|
for namespace_config_data in config_data_to_write:
|
|
|
|
for data in namespace_config_data:
|
|
|
|
data_to_write = data[:]
|
|
|
|
if 'namespace' in data:
|
|
|
|
data_to_write.append('')
|
|
|
|
output_file_writer.writerow(data_to_write)
|
|
|
|
else:
|
|
|
|
key = data[0]
|
|
|
|
while key not in key_value_pair[0]:
|
|
|
|
del key_value_pair[0]
|
|
|
|
if key in key_value_pair[0]:
|
|
|
|
value = key_value_pair[0][1]
|
|
|
|
data_to_write.append(value)
|
|
|
|
del key_value_pair[0]
|
|
|
|
output_file_writer.writerow(data_to_write)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
def create_dir(filetype, output_dir_path):
|
|
|
|
""" Create new directory(if doesn't exist) to store file generated
|
|
|
|
"""
|
2019-06-20 09:17:59 -04:00
|
|
|
output_target_dir = os.path.join(output_dir_path,filetype,'')
|
2018-07-17 04:44:30 -04:00
|
|
|
if not os.path.isdir(output_target_dir):
|
2024-08-19 12:59:47 -04:00
|
|
|
Path(output_target_dir).mkdir(parents=True)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
return output_target_dir
|
|
|
|
|
|
|
|
|
2018-10-29 03:49:20 -04:00
|
|
|
def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename):
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(csv_file, 'r') as read_from, open(target_filename,'w', newline='') as write_to:
|
|
|
|
csv_file_reader = csv.reader(read_from, delimiter=',')
|
|
|
|
headers = next(csv_file_reader)
|
|
|
|
values = next(csv_file_reader)
|
|
|
|
csv_file_writer = csv.writer(write_to, delimiter=',')
|
|
|
|
csv_file_writer.writerow(headers)
|
|
|
|
csv_file_writer.writerow(values)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
# read new data, add value if key has repeat tag, write to new file
|
2022-06-02 09:29:35 -04:00
|
|
|
for row in csv_file_reader:
|
2018-10-11 08:20:12 -04:00
|
|
|
key_val_new = list(zip_longest(keys, row))
|
2022-06-02 09:29:35 -04:00
|
|
|
key_val_pair = list(zip_longest(keys, values))
|
2018-07-17 04:44:30 -04:00
|
|
|
key_repeated = total_keys_repeat[:]
|
2022-06-02 09:29:35 -04:00
|
|
|
index = 0
|
2018-07-17 04:44:30 -04:00
|
|
|
while key_val_new and key_repeated:
|
|
|
|
# if key has repeat tag, get its corresponding value, write to file
|
|
|
|
if key_val_new[0][0] == key_repeated[0]:
|
2022-06-02 09:29:35 -04:00
|
|
|
row[index] = key_val_pair[0][1]
|
2018-07-17 04:44:30 -04:00
|
|
|
del key_repeated[0]
|
|
|
|
del key_val_new[0]
|
|
|
|
del key_val_pair[0]
|
2022-06-02 09:29:35 -04:00
|
|
|
index += 1
|
|
|
|
csv_file_writer.writerow(row)
|
2018-10-03 07:31:00 -04:00
|
|
|
|
2018-07-17 04:44:30 -04:00
|
|
|
return target_filename
|
|
|
|
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
def create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=False):
|
2019-06-20 09:17:59 -04:00
|
|
|
file_identifier_value = '0'
|
|
|
|
# Add config data per namespace to `config_data_to_write` list
|
|
|
|
config_data_to_write = add_config_data_per_namespace(args.conf)
|
|
|
|
|
2018-07-17 04:44:30 -04:00
|
|
|
try:
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(args.values, 'r') as csv_values_file:
|
|
|
|
values_file_reader = csv.reader(csv_values_file, delimiter=',')
|
|
|
|
keys = next(values_file_reader)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
|
|
|
filename, file_ext = os.path.splitext(args.values)
|
2021-01-25 21:49:01 -05:00
|
|
|
target_filename = filename + '_created' + file_ext
|
2019-06-20 09:17:59 -04:00
|
|
|
if keys_repeat:
|
|
|
|
target_values_file = set_repeat_value(keys_repeat, keys, args.values, target_filename)
|
|
|
|
else:
|
|
|
|
target_values_file = args.values
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
with open(target_values_file, 'r') as csv_values_file:
|
|
|
|
values_file_reader = csv.reader(csv_values_file, delimiter=',')
|
|
|
|
next(values_file_reader)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
# Create new directory(if doesn't exist) to store csv file generated
|
2022-06-30 11:53:12 -04:00
|
|
|
output_csv_target_dir = create_dir('csv', args.outdir)
|
2022-06-02 09:29:35 -04:00
|
|
|
# Create new directory(if doesn't exist) to store bin file generated
|
2022-06-30 11:53:12 -04:00
|
|
|
output_bin_target_dir = create_dir('bin', args.outdir)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
for values_data_line in values_file_reader:
|
|
|
|
key_value_data = list(zip_longest(keys_in_values_file, values_data_line))
|
2020-05-26 07:16:08 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
# Get file identifier value from values file
|
|
|
|
file_identifier_value = get_fileid_val(args.fileid, key_value_data, file_identifier_value)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
key_value_pair = key_value_data[:]
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
# Verify if output csv file does not exist
|
2022-06-30 11:53:12 -04:00
|
|
|
csv_filename = args.prefix + '-' + file_identifier_value + '.' + 'csv'
|
2022-06-02 09:29:35 -04:00
|
|
|
output_csv_file = output_csv_target_dir + csv_filename
|
|
|
|
if os.path.isfile(output_csv_file):
|
|
|
|
raise SystemExit('Target csv file: %s already exists.`' % output_csv_file)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
# Add values corresponding to each key to csv intermediate file
|
|
|
|
add_data_to_file(config_data_to_write, key_value_pair, output_csv_file)
|
|
|
|
print('\nCreated CSV file: ===>', output_csv_file)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
# Verify if output bin file does not exist
|
2022-06-30 11:53:12 -04:00
|
|
|
bin_filename = args.prefix + '-' + file_identifier_value + '.' + 'bin'
|
2022-06-02 09:29:35 -04:00
|
|
|
output_bin_file = output_bin_target_dir + bin_filename
|
|
|
|
if os.path.isfile(output_bin_file):
|
|
|
|
raise SystemExit('Target binary file: %s already exists.`' % output_bin_file)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
args.input = output_csv_file
|
2022-06-30 11:53:12 -04:00
|
|
|
args.output = os.path.join('bin', bin_filename)
|
|
|
|
if args.keygen:
|
2022-06-02 09:29:35 -04:00
|
|
|
args.keyfile = 'keys-' + args.prefix + '-' + file_identifier_value
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
if is_encr:
|
|
|
|
nvs_partition_gen.encrypt(args)
|
|
|
|
else:
|
|
|
|
nvs_partition_gen.generate(args)
|
2020-05-26 07:16:08 -04:00
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
print('\nFiles generated in %s ...' % args.outdir)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
exit(1)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2019-06-20 09:17:59 -04:00
|
|
|
|
|
|
|
def verify_file_format(args):
|
|
|
|
keys_in_config_file = []
|
|
|
|
keys_in_values_file = []
|
|
|
|
keys_repeat = []
|
|
|
|
|
2022-06-30 11:53:12 -04:00
|
|
|
# Verify files have csv extension
|
|
|
|
conf_name, conf_extension = os.path.splitext(args.conf)
|
|
|
|
if conf_extension != '.csv':
|
|
|
|
raise SystemExit('Error: config file: %s does not have the .csv extension.' % args.conf)
|
|
|
|
values_name, values_extension = os.path.splitext(args.values)
|
|
|
|
if values_extension != '.csv':
|
|
|
|
raise SystemExit('Error: values file: %s does not have the .csv extension.' % args.values)
|
|
|
|
|
|
|
|
# Verify files are not empty
|
2019-06-20 09:17:59 -04:00
|
|
|
if os.stat(args.conf).st_size == 0:
|
2021-01-25 21:49:01 -05:00
|
|
|
raise SystemExit('Error: config file: %s is empty.' % args.conf)
|
2019-06-20 09:17:59 -04:00
|
|
|
if os.stat(args.values).st_size == 0:
|
2021-01-25 21:49:01 -05:00
|
|
|
raise SystemExit('Error: values file: %s is empty.' % args.values)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
2022-06-30 11:53:12 -04:00
|
|
|
# Extract keys from config file
|
|
|
|
with open(args.conf, 'r') as config_file:
|
|
|
|
config_file_reader = csv.reader(config_file, delimiter=',')
|
|
|
|
for config_data in config_file_reader:
|
|
|
|
if 'namespace' not in config_data:
|
|
|
|
keys_in_config_file.append(config_data[0])
|
|
|
|
if 'REPEAT' in config_data:
|
|
|
|
keys_repeat.append(config_data[0])
|
|
|
|
|
|
|
|
# Extract keys from values file
|
|
|
|
with open(args.values, 'r') as values_file:
|
|
|
|
values_file_reader = csv.reader(values_file, delimiter=',')
|
|
|
|
keys_in_values_file = next(values_file_reader)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2019-06-20 09:17:59 -04:00
|
|
|
# Verify file identifier exists in values file
|
|
|
|
if args.fileid:
|
|
|
|
if args.fileid not in keys_in_values_file:
|
|
|
|
raise SystemExit('Error: target_file_identifier: %s does not exist in values file.\n' % args.fileid)
|
|
|
|
else:
|
|
|
|
args.fileid = 1
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2019-06-20 09:17:59 -04:00
|
|
|
return keys_in_config_file, keys_in_values_file, keys_repeat
|
2018-07-17 04:44:30 -04:00
|
|
|
|
|
|
|
|
2019-06-20 09:17:59 -04:00
|
|
|
def generate(args):
|
2022-06-30 11:53:12 -04:00
|
|
|
# Create work files with no blank lines
|
|
|
|
args.conf, args.values = create_temp_files(args)
|
|
|
|
|
2019-06-20 09:17:59 -04:00
|
|
|
# Verify input config and values file format
|
|
|
|
keys_in_config_file, keys_in_values_file, keys_repeat = verify_file_format(args)
|
2018-10-03 07:31:00 -04:00
|
|
|
|
2019-06-20 09:17:59 -04:00
|
|
|
# Verify data in the input_config_file and input_values_file
|
|
|
|
verify_data_in_file(args.conf, args.values, keys_in_config_file,
|
|
|
|
keys_in_values_file, keys_repeat)
|
|
|
|
|
2022-06-02 09:29:35 -04:00
|
|
|
encryption_enabled = False
|
2019-06-20 09:17:59 -04:00
|
|
|
if (args.keygen or args.inputkey):
|
|
|
|
encryption_enabled = True
|
2021-01-25 21:49:01 -05:00
|
|
|
print('\nGenerating encrypted NVS binary images...')
|
2019-06-20 09:17:59 -04:00
|
|
|
# Create intermediate csv file
|
2022-06-02 09:29:35 -04:00
|
|
|
create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=encryption_enabled)
|
2019-06-20 09:17:59 -04:00
|
|
|
|
|
|
|
|
|
|
|
def generate_key(args):
|
|
|
|
nvs_partition_gen.generate_key(args)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
try:
|
2021-01-25 21:49:01 -05:00
|
|
|
parser = argparse.ArgumentParser(description='\nESP Manufacturing Utility', formatter_class=argparse.RawTextHelpFormatter)
|
2019-06-20 09:17:59 -04:00
|
|
|
subparser = parser.add_subparsers(title='Commands',
|
|
|
|
dest='command',
|
|
|
|
help='\nRun mfg_gen.py {command} -h for additional help\n\n')
|
|
|
|
|
|
|
|
parser_gen = subparser.add_parser('generate',
|
|
|
|
help='Generate NVS partition',
|
|
|
|
formatter_class=argparse.RawTextHelpFormatter)
|
|
|
|
parser_gen.set_defaults(func=generate)
|
|
|
|
parser_gen.add_argument('conf',
|
|
|
|
default=None,
|
|
|
|
help='Path to configuration csv file to parse')
|
|
|
|
parser_gen.add_argument('values',
|
|
|
|
default=None,
|
|
|
|
help='Path to values csv file to parse')
|
|
|
|
parser_gen.add_argument('prefix',
|
|
|
|
default=None,
|
|
|
|
help='Unique name for each output filename prefix')
|
|
|
|
parser_gen.add_argument('size',
|
|
|
|
default=None,
|
|
|
|
help='Size of NVS partition in bytes\
|
|
|
|
\n(must be multiple of 4096)')
|
|
|
|
parser_gen.add_argument('--fileid',
|
|
|
|
default=None,
|
|
|
|
help='''Unique file identifier(any key in values file) \
|
|
|
|
\nfor each filename suffix (Default: numeric value(1,2,3...)''')
|
|
|
|
parser_gen.add_argument('--version',
|
|
|
|
choices=[1, 2],
|
|
|
|
default=2,
|
|
|
|
type=int,
|
|
|
|
help='''Set multipage blob version.\
|
|
|
|
\nVersion 1 - Multipage blob support disabled.\
|
|
|
|
\nVersion 2 - Multipage blob support enabled.\
|
|
|
|
\nDefault: Version 2 ''')
|
|
|
|
parser_gen.add_argument('--keygen',
|
2021-01-25 21:49:01 -05:00
|
|
|
action='store_true',
|
2019-06-20 09:17:59 -04:00
|
|
|
help='Generates key for encrypting NVS partition')
|
|
|
|
parser_gen.add_argument('--keyfile',
|
|
|
|
default=None,
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
parser_gen.add_argument('--inputkey',
|
|
|
|
default=None,
|
|
|
|
help='File having key for encrypting NVS partition')
|
|
|
|
parser_gen.add_argument('--outdir',
|
|
|
|
default=os.getcwd(),
|
|
|
|
help='Output directory to store files created\
|
|
|
|
\n(Default: current directory)')
|
2023-04-20 07:07:49 -04:00
|
|
|
parser_gen.add_argument('--key_protect_hmac',
|
|
|
|
action='store_true',
|
|
|
|
help='''If set, the NVS encryption key protection scheme based on HMAC\
|
|
|
|
\nperipheral is used; else the default scheme based on Flash Encryption\
|
|
|
|
\nis used''')
|
|
|
|
parser_gen.add_argument('--kp_hmac_keygen',
|
|
|
|
action='store_true',
|
|
|
|
help='Generate the HMAC key for HMAC-based encryption scheme')
|
|
|
|
parser_gen.add_argument('--kp_hmac_keyfile',
|
|
|
|
default=None,
|
|
|
|
help='Path to output HMAC key file')
|
|
|
|
parser_gen.add_argument('--kp_hmac_inputkey',
|
|
|
|
default=None,
|
|
|
|
help='File having the HMAC key for generating the NVS encryption keys')
|
2019-06-20 09:17:59 -04:00
|
|
|
parser_gen.add_argument('--input',
|
|
|
|
default=None,
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
parser_gen.add_argument('--output',
|
|
|
|
default=None,
|
|
|
|
help=argparse.SUPPRESS)
|
|
|
|
parser_gen_key = subparser.add_parser('generate-key',
|
|
|
|
help='Generate keys for encryption',
|
|
|
|
formatter_class=argparse.RawTextHelpFormatter)
|
|
|
|
parser_gen_key.set_defaults(func=generate_key)
|
|
|
|
parser_gen_key.add_argument('--keyfile',
|
|
|
|
default=None,
|
|
|
|
help='Path to output encryption keys file')
|
|
|
|
parser_gen_key.add_argument('--outdir',
|
|
|
|
default=os.getcwd(),
|
|
|
|
help='Output directory to store files created.\
|
|
|
|
\n(Default: current directory)')
|
2023-04-20 07:07:49 -04:00
|
|
|
parser_gen_key.add_argument('--key_protect_hmac',
|
|
|
|
action='store_true',
|
|
|
|
help='''If set, the NVS encryption key protection scheme based on HMAC\
|
|
|
|
\nperipheral is used; else the default scheme based on Flash Encryption\
|
|
|
|
\nis used''')
|
|
|
|
parser_gen_key.add_argument('--kp_hmac_keygen',
|
|
|
|
action='store_true',
|
|
|
|
help='Generate the HMAC key for HMAC-based encryption scheme')
|
|
|
|
parser_gen_key.add_argument('--kp_hmac_keyfile',
|
|
|
|
default=None,
|
|
|
|
help='Path to output HMAC key file')
|
|
|
|
parser_gen_key.add_argument('--kp_hmac_inputkey',
|
|
|
|
default=None,
|
|
|
|
help='File having the HMAC key for generating the NVS encryption keys')
|
2019-06-20 09:17:59 -04:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
args.func(args)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2018-10-03 07:31:00 -04:00
|
|
|
except ValueError as err:
|
|
|
|
print(err)
|
2019-06-20 09:17:59 -04:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
2018-07-17 04:44:30 -04:00
|
|
|
|
2018-10-29 03:49:20 -04:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
if __name__ == '__main__':
|
2018-07-17 04:44:30 -04:00
|
|
|
main()
|