mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
mfg_util: Update file handling to maintain line endings across various OS
Fixes: https://github.com/espressif/esp-idf/issues/4888
This commit is contained in:
parent
820621687c
commit
4b63e016c4
@ -16,11 +16,10 @@
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
from builtins import range
|
||||
from future.moves.itertools import zip_longest
|
||||
from io import open
|
||||
import sys
|
||||
import os
|
||||
import csv
|
||||
import argparse
|
||||
import distutils.dir_util
|
||||
|
||||
@ -32,83 +31,74 @@ except Exception as e:
|
||||
sys.exit("Please check IDF_PATH")
|
||||
|
||||
|
||||
def verify_values_exist(input_values_file, keys_in_values_file):
|
||||
def verify_values_exist(input_values_file, values_file_data, key_count_in_values_file, line_no=1):
|
||||
""" Verify all keys have corresponding values in values file
|
||||
"""
|
||||
line_no = 1
|
||||
key_count_in_values_file = len(keys_in_values_file)
|
||||
|
||||
values_file = open(input_values_file, 'r')
|
||||
values_file_reader = csv.reader(values_file, delimiter=',')
|
||||
next(values_file_reader)
|
||||
|
||||
for values_data in values_file_reader:
|
||||
line_no += 1
|
||||
if len(values_data) != key_count_in_values_file:
|
||||
raise SystemExit("\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n"
|
||||
% (str(input_values_file), str(line_no)))
|
||||
if len(values_file_data) != key_count_in_values_file:
|
||||
raise SystemExit("\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n"
|
||||
% (str(input_values_file), str(line_no)))
|
||||
|
||||
|
||||
def verify_keys_exist(values_file_keys, input_config_file):
|
||||
def verify_keys_exist(values_file_keys, config_file_data):
|
||||
""" Verify all keys from config file are present in values file
|
||||
"""
|
||||
keys_missing = []
|
||||
|
||||
config_file = open(input_config_file,'r')
|
||||
config_file_reader = csv.reader(config_file, delimiter=',')
|
||||
for line_no, config_data in enumerate(config_file_reader,1):
|
||||
if 'namespace' not in config_data:
|
||||
for line_no, config_data in enumerate(config_file_data,1):
|
||||
if not isinstance(config_data, str):
|
||||
config_data = config_data.encode('utf-8')
|
||||
config_data_line = config_data.strip().split(',')
|
||||
if 'namespace' not in config_data_line:
|
||||
if values_file_keys:
|
||||
if config_data[0] == values_file_keys[0]:
|
||||
if config_data_line[0] == values_file_keys[0]:
|
||||
del values_file_keys[0]
|
||||
else:
|
||||
keys_missing.append([config_data[0], line_no])
|
||||
keys_missing.append([config_data_line[0], line_no])
|
||||
else:
|
||||
keys_missing.append([config_data[0], line_no])
|
||||
keys_missing.append([config_data_line[0], line_no])
|
||||
|
||||
if keys_missing:
|
||||
for key, line_no in keys_missing:
|
||||
print("Key:`", str(key), "` at line no:", str(line_no),
|
||||
" in config file is not found in values file.")
|
||||
config_file.close()
|
||||
raise SystemExit(1)
|
||||
|
||||
config_file.close()
|
||||
|
||||
|
||||
def verify_datatype_encoding(input_config_file):
|
||||
def verify_datatype_encoding(input_config_file, config_file_data):
|
||||
""" Verify datatype and encodings from config file is valid
|
||||
"""
|
||||
valid_encodings = ["string", "binary", "hex2bin","u8", "i8", "u16", "u32", "i32","base64"]
|
||||
valid_datatypes = ["file","data","namespace"]
|
||||
line_no = 0
|
||||
|
||||
config_file = open(input_config_file,'r')
|
||||
config_file_reader = csv.reader(config_file, delimiter=',')
|
||||
for config_data in config_file_reader:
|
||||
for data in config_file_data:
|
||||
line_no += 1
|
||||
if config_data[1] not in valid_datatypes:
|
||||
raise SystemExit("Error: config file: %s has invalid datatype at line no:%s\n`"
|
||||
if not isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
line = data.strip().split(',')
|
||||
if line[1] not in valid_datatypes:
|
||||
raise SystemExit("Error: config file: %s has invalid datatype at line no:%s\n"
|
||||
% (str(input_config_file), str(line_no)))
|
||||
if 'namespace' not in config_data:
|
||||
if config_data[2] not in valid_encodings:
|
||||
raise SystemExit("Error: config file: %s has invalid encoding at line no:%s\n`"
|
||||
if 'namespace' not in line:
|
||||
if line[2] not in valid_encodings:
|
||||
raise SystemExit("Error: config file: %s has invalid encoding at line no:%s\n"
|
||||
% (str(input_config_file), str(line_no)))
|
||||
|
||||
|
||||
def verify_file_data_count(input_config_file, keys_repeat):
|
||||
def verify_file_data_count(cfg_file_data, keys_repeat):
|
||||
""" Verify count of data on each line in config file is equal to 3
|
||||
(as format must be: <key,type and encoding>)
|
||||
"""
|
||||
line_no = 0
|
||||
config_file = open(input_config_file, 'r')
|
||||
config_file_reader = csv.reader(config_file, delimiter=',')
|
||||
for line in config_file_reader:
|
||||
|
||||
for data in cfg_file_data:
|
||||
line_no += 1
|
||||
if not isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
line = data.strip().split(',')
|
||||
if len(line) != 3 and line[0] not in keys_repeat:
|
||||
raise SystemExit("Error: data missing in config file at line no:%s <format needed:key,type,encoding>\n"
|
||||
% str(line_no))
|
||||
config_file.close()
|
||||
|
||||
|
||||
def verify_data_in_file(input_config_file, input_values_file, config_file_keys, keys_in_values_file, keys_repeat):
|
||||
@ -120,17 +110,35 @@ def verify_data_in_file(input_config_file, input_values_file, config_file_keys,
|
||||
"""
|
||||
try:
|
||||
values_file_keys = []
|
||||
|
||||
verify_file_data_count(input_config_file, keys_repeat)
|
||||
|
||||
verify_datatype_encoding(input_config_file)
|
||||
values_file_line = None
|
||||
|
||||
# Get keys from values file present in config files
|
||||
values_file_keys = get_keys(keys_in_values_file, config_file_keys)
|
||||
|
||||
verify_keys_exist(values_file_keys, input_config_file)
|
||||
with open(input_config_file, 'r', newline='\n') as cfg_file:
|
||||
cfg_file_data = cfg_file.readlines()
|
||||
verify_file_data_count(cfg_file_data, keys_repeat)
|
||||
verify_datatype_encoding(input_config_file, cfg_file_data)
|
||||
verify_keys_exist(values_file_keys, cfg_file_data)
|
||||
|
||||
verify_values_exist(input_values_file, keys_in_values_file)
|
||||
with open(input_values_file, 'r', newline='\n') as values_file:
|
||||
key_count_in_values_file = len(keys_in_values_file)
|
||||
lineno = 0
|
||||
# Read first keys(header) line
|
||||
values_file_data = values_file.readline()
|
||||
lineno += 1
|
||||
while values_file_data:
|
||||
# Read values line
|
||||
values_file_line = values_file.readline()
|
||||
if not isinstance(values_file_line, str):
|
||||
values_file_line = values_file_line.encode('utf-8')
|
||||
|
||||
values_file_data = values_file_line.strip().split(',')
|
||||
|
||||
lineno += 1
|
||||
if len(values_file_data) == 1 and '' in values_file_data:
|
||||
break
|
||||
verify_values_exist(input_values_file, values_file_data, key_count_in_values_file, line_no=lineno)
|
||||
|
||||
except Exception as err:
|
||||
print(err)
|
||||
@ -141,9 +149,9 @@ def get_keys(keys_in_values_file, config_file_keys):
|
||||
""" Get keys from values file present in config file
|
||||
"""
|
||||
values_file_keys = []
|
||||
for key in range(len(keys_in_values_file)):
|
||||
if keys_in_values_file[key] in config_file_keys:
|
||||
values_file_keys.append(keys_in_values_file[key])
|
||||
for key in keys_in_values_file:
|
||||
if key in config_file_keys:
|
||||
values_file_keys.append(key)
|
||||
|
||||
return values_file_keys
|
||||
|
||||
@ -154,29 +162,30 @@ def add_config_data_per_namespace(input_config_file):
|
||||
config_data_to_write = []
|
||||
config_data_per_namespace = []
|
||||
|
||||
csv_config_file = open(input_config_file,'r')
|
||||
config_file_reader = csv.reader(csv_config_file, delimiter=',')
|
||||
with open(input_config_file, 'r', newline='\n') as cfg_file:
|
||||
config_data = cfg_file.readlines()
|
||||
|
||||
# `config_data_per_namespace` is added to `config_data_to_write` list after reading next namespace
|
||||
for config_data in config_file_reader:
|
||||
if 'REPEAT' in config_data:
|
||||
config_data.remove('REPEAT')
|
||||
if 'namespace' in config_data:
|
||||
for data in config_data:
|
||||
if not isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
cfg_data = data.strip().split(',')
|
||||
if 'REPEAT' in cfg_data:
|
||||
cfg_data.remove('REPEAT')
|
||||
if 'namespace' in cfg_data:
|
||||
if config_data_per_namespace:
|
||||
config_data_to_write.append(config_data_per_namespace)
|
||||
config_data_per_namespace = []
|
||||
config_data_per_namespace.append(config_data)
|
||||
config_data_per_namespace.append(cfg_data)
|
||||
else:
|
||||
config_data_per_namespace.append(config_data)
|
||||
config_data_per_namespace.append(cfg_data)
|
||||
else:
|
||||
config_data_per_namespace.append(config_data)
|
||||
config_data_per_namespace.append(cfg_data)
|
||||
|
||||
# `config_data_per_namespace` is added to `config_data_to_write` list as EOF is reached
|
||||
if (not config_data_to_write) or (config_data_to_write and config_data_per_namespace):
|
||||
config_data_to_write.append(config_data_per_namespace)
|
||||
|
||||
csv_config_file.close()
|
||||
|
||||
return config_data_to_write
|
||||
|
||||
|
||||
@ -202,18 +211,21 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file):
|
||||
"""
|
||||
header = ['key', 'type', 'encoding', 'value']
|
||||
data_to_write = []
|
||||
newline = u"\n"
|
||||
|
||||
target_csv_file = open(output_csv_file, 'w')
|
||||
|
||||
output_file_writer = csv.writer(target_csv_file, delimiter=',')
|
||||
output_file_writer.writerow(header)
|
||||
target_csv_file = open(output_csv_file, 'w', newline=None)
|
||||
|
||||
line_to_write = u",".join(header)
|
||||
target_csv_file.write(line_to_write)
|
||||
target_csv_file.write(newline)
|
||||
for namespace_config_data in config_data_to_write:
|
||||
for data in namespace_config_data:
|
||||
data_to_write = data[:]
|
||||
if 'namespace' in data:
|
||||
data_to_write.append('')
|
||||
output_file_writer.writerow(data_to_write)
|
||||
line_to_write = u",".join(data_to_write)
|
||||
target_csv_file.write(line_to_write)
|
||||
target_csv_file.write(newline)
|
||||
else:
|
||||
key = data[0]
|
||||
while key not in key_value_pair[0]:
|
||||
@ -222,7 +234,9 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file):
|
||||
value = key_value_pair[0][1]
|
||||
data_to_write.append(value)
|
||||
del key_value_pair[0]
|
||||
output_file_writer.writerow(data_to_write)
|
||||
line_to_write = u",".join(data_to_write)
|
||||
target_csv_file.write(line_to_write)
|
||||
target_csv_file.write(newline)
|
||||
|
||||
# Set index to start of file
|
||||
target_csv_file.seek(0)
|
||||
@ -242,17 +256,24 @@ def create_dir(filetype, output_dir_path):
|
||||
def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename):
|
||||
key_val_pair = []
|
||||
key_repeated = []
|
||||
with open(csv_file, 'r') as read_from, open(target_filename,'w') as write_to:
|
||||
csv_file_reader = csv.reader(read_from, delimiter=',')
|
||||
headers = next(csv_file_reader)
|
||||
values = next(csv_file_reader)
|
||||
csv_file_writer = csv.writer(write_to, delimiter=',')
|
||||
csv_file_writer.writerow(headers)
|
||||
csv_file_writer.writerow(values)
|
||||
line = None
|
||||
newline = u"\n"
|
||||
with open(csv_file, 'r', newline=None) as read_from, open(target_filename,'w', newline=None) as write_to:
|
||||
headers = read_from.readline()
|
||||
values = read_from.readline()
|
||||
write_to.write(headers)
|
||||
write_to.write(values)
|
||||
if not isinstance(values, str):
|
||||
values = values.encode('utf-8')
|
||||
values = values.strip().split(',')
|
||||
total_keys_values = list(zip_longest(keys, values))
|
||||
|
||||
# read new data, add value if key has repeat tag, write to new file
|
||||
for row in csv_file_reader:
|
||||
line = read_from.readline()
|
||||
if not isinstance(line, str):
|
||||
line = line.encode('utf-8')
|
||||
row = line.strip().split(',')
|
||||
while row:
|
||||
index = -1
|
||||
key_val_new = list(zip_longest(keys, row))
|
||||
key_val_pair = total_keys_values[:]
|
||||
@ -266,7 +287,18 @@ def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename):
|
||||
del key_repeated[0]
|
||||
del key_val_new[0]
|
||||
del key_val_pair[0]
|
||||
csv_file_writer.writerow(row)
|
||||
|
||||
line_to_write = u",".join(row)
|
||||
write_to.write(line_to_write)
|
||||
write_to.write(newline)
|
||||
|
||||
# Read next line
|
||||
line = read_from.readline()
|
||||
if not isinstance(line, str):
|
||||
line = line.encode('utf-8')
|
||||
row = line.strip().split(',')
|
||||
if len(row) == 1 and '' in row:
|
||||
break
|
||||
|
||||
return target_filename
|
||||
|
||||
@ -275,15 +307,19 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys
|
||||
file_identifier_value = '0'
|
||||
csv_str = 'csv'
|
||||
bin_str = 'bin'
|
||||
line = None
|
||||
set_output_keyfile = False
|
||||
|
||||
# Add config data per namespace to `config_data_to_write` list
|
||||
config_data_to_write = add_config_data_per_namespace(args.conf)
|
||||
|
||||
try:
|
||||
with open(args.values, 'r') as csv_values_file:
|
||||
values_file_reader = csv.reader(csv_values_file, delimiter=',')
|
||||
keys = next(values_file_reader)
|
||||
with open(args.values, 'r', newline=None) as csv_values_file:
|
||||
# first line must be keys in file
|
||||
line = csv_values_file.readline()
|
||||
if not isinstance(line, str):
|
||||
line = line.encode('utf-8')
|
||||
keys = line.strip().split(',')
|
||||
|
||||
filename, file_ext = os.path.splitext(args.values)
|
||||
target_filename = filename + "_created" + file_ext
|
||||
@ -292,10 +328,10 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys
|
||||
else:
|
||||
target_values_file = args.values
|
||||
|
||||
csv_values_file = open(target_values_file, 'r')
|
||||
csv_values_file = open(target_values_file, 'r', newline=None)
|
||||
|
||||
values_file_reader = csv.reader(csv_values_file, delimiter=',')
|
||||
next(values_file_reader)
|
||||
# Read header line
|
||||
csv_values_file.readline()
|
||||
|
||||
# Create new directory(if doesn't exist) to store csv file generated
|
||||
output_csv_target_dir = create_dir(csv_str, args.outdir)
|
||||
@ -304,7 +340,12 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys
|
||||
if args.keygen:
|
||||
set_output_keyfile = True
|
||||
|
||||
for values_data_line in values_file_reader:
|
||||
line = csv_values_file.readline()
|
||||
if not isinstance(line, str):
|
||||
line = line.encode('utf-8')
|
||||
values_data_line = line.strip().split(',')
|
||||
|
||||
while values_data_line:
|
||||
key_value_data = list(zip_longest(keys_in_values_file, values_data_line))
|
||||
|
||||
# Get file identifier value from values file
|
||||
@ -340,6 +381,14 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys
|
||||
else:
|
||||
nvs_partition_gen.generate(args)
|
||||
|
||||
# Read next line
|
||||
line = csv_values_file.readline()
|
||||
if not isinstance(line, str):
|
||||
line = line.encode('utf-8')
|
||||
values_data_line = line.strip().split(',')
|
||||
if len(values_data_line) == 1 and '' in values_data_line:
|
||||
break
|
||||
|
||||
print("\nFiles generated in %s ..." % args.outdir)
|
||||
|
||||
except Exception as e:
|
||||
@ -349,25 +398,21 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys
|
||||
csv_values_file.close()
|
||||
|
||||
|
||||
def verify_empty_lines_exist(args, input_file):
|
||||
input_file_reader = csv.reader(input_file, delimiter=',')
|
||||
for file_data in input_file_reader:
|
||||
for data in file_data:
|
||||
if len(data.strip()) == 0:
|
||||
raise SystemExit("Error: config file: %s cannot have empty lines. " % args.conf)
|
||||
else:
|
||||
break
|
||||
if not file_data:
|
||||
raise SystemExit("Error: config file: %s cannot have empty lines." % args.conf)
|
||||
def verify_empty_lines_exist(file_name, input_file_data):
|
||||
for data in input_file_data:
|
||||
if not isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
cfg_data = data.strip().split(',')
|
||||
|
||||
input_file.seek(0)
|
||||
return input_file_reader
|
||||
if len(cfg_data) == 1 and '' in cfg_data:
|
||||
raise SystemExit("Error: file: %s cannot have empty lines. " % file_name)
|
||||
|
||||
|
||||
def verify_file_format(args):
|
||||
keys_in_config_file = []
|
||||
keys_in_values_file = []
|
||||
keys_repeat = []
|
||||
file_data_keys = None
|
||||
|
||||
# Verify config file is not empty
|
||||
if os.stat(args.conf).st_size == 0:
|
||||
@ -378,25 +423,41 @@ def verify_file_format(args):
|
||||
raise SystemExit("Error: values file: %s is empty." % args.values)
|
||||
|
||||
# Verify config file does not have empty lines
|
||||
with open(args.conf, 'r') as csv_config_file:
|
||||
with open(args.conf, 'r', newline='\n') as csv_config_file:
|
||||
try:
|
||||
config_file_reader = verify_empty_lines_exist(args, csv_config_file)
|
||||
# Extract keys from config file
|
||||
for config_data in config_file_reader:
|
||||
if 'namespace' not in config_data:
|
||||
keys_in_config_file.append(config_data[0])
|
||||
if 'REPEAT' in config_data:
|
||||
keys_repeat.append(config_data[0])
|
||||
file_data = csv_config_file.readlines()
|
||||
verify_empty_lines_exist(args.conf, file_data)
|
||||
|
||||
csv_config_file.seek(0)
|
||||
# Extract keys from config file
|
||||
for data in file_data:
|
||||
if not isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
line_data = data.strip().split(',')
|
||||
if 'namespace' not in line_data:
|
||||
keys_in_config_file.append(line_data[0])
|
||||
if 'REPEAT' in line_data:
|
||||
keys_repeat.append(line_data[0])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
# Verify values file does not have empty lines
|
||||
with open(args.values, 'r') as csv_values_file:
|
||||
with open(args.values, 'r', newline='\n') as csv_values_file:
|
||||
try:
|
||||
values_file_reader = verify_empty_lines_exist(args, csv_values_file)
|
||||
# Extract keys from values file
|
||||
keys_in_values_file = next(values_file_reader)
|
||||
# Extract keys from values file (first line of file)
|
||||
file_data = [csv_values_file.readline()]
|
||||
|
||||
file_data_keys = file_data[0]
|
||||
if not isinstance(file_data_keys, str):
|
||||
file_data_keys = file_data_keys.encode('utf-8')
|
||||
|
||||
keys_in_values_file = file_data_keys.strip().split(',')
|
||||
|
||||
while file_data:
|
||||
verify_empty_lines_exist(args.values, file_data)
|
||||
file_data = [csv_values_file.readline()]
|
||||
if '' in file_data:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
@ -428,6 +489,7 @@ def generate(args):
|
||||
if (args.keygen or args.inputkey):
|
||||
encryption_enabled = True
|
||||
print("\nGenerating encrypted NVS binary images...")
|
||||
|
||||
# Create intermediate csv file
|
||||
create_intermediate_csv(args, keys_in_config_file, keys_in_values_file,
|
||||
keys_repeat, is_encr=encryption_enabled)
|
||||
|
Loading…
x
Reference in New Issue
Block a user