This commit is contained in:
刘可亮
2024-06-04 19:00:30 +08:00
parent 990c72f5be
commit 0a13af6a1d
1668 changed files with 342810 additions and 37726 deletions

View File

@@ -17,7 +17,7 @@ if PKG_USING_SQLITE
config PKG_SQLITE_SQL_MAX_LEN
int "SQL statements max length"
default 1024
default 512
config PKG_SQLITE_DB_NAME_MAX_LEN
int "Database filename(fullpath) length"
@@ -26,8 +26,11 @@ if PKG_USING_SQLITE
config PKG_SQLITE_DAO_EXAMPLE
bool "Enable example"
default y
default n
config PKG_SQLITE_RECIPE_EXAMPLE
bool "Enable recipeData test example"
default n
choice
prompt "version"
help

View File

@@ -58,6 +58,7 @@ function hmm()
_hline "ctarget|ct" "" "cd to target board directory."
_hline "godir|gd" "[keyword]" "Go/jump to selected directory."
_hline "list" "" "List all SDK defconfig."
_hline "list_module" "" "List all enabled modules."
_hline "i" "" "Get current project's information."
_hline "buildall" "" "Build all the *defconfig in target/configs"
_hline "rebuildall" "" "Clean and build all the *defconfig in target/configs"
@@ -249,14 +250,14 @@ build_one_solution()
BUILD_CNT=`expr $BUILD_CNT + 1`
SUCCESS=`grep "Luban-Lite is built successfully" $LOG_FILE -wc`
WAR_CNT=`grep "warning:" $LOG_FILE -i | grep "is shorter than expected" -vc`
WAR_CNT=`grep -E "warning:|pinmux conflicts" $LOG_FILE -i | grep "is shorter than expected" -vc`
if [ $SUCCESS -ne 0 ]; then
printf "%2s. %-40s is OK. Warning: %s \n" \
$BUILD_CNT $SOLUTION_NAME $WAR_CNT >> $RESULT_FILE
if [ $WAR_CNT -gt 0 ]; then
echo [$SOLUTION_NAME]: >> $WARNING_FILE
grep "warning:" $LOG_FILE -i | grep "is shorter than expected" -v >> $WARNING_FILE
grep -E "warning:|pinmux conflicts" $LOG_FILE -i | grep "is shorter than expected" -v >> $WARNING_FILE
echo >> $WARNING_FILE
fi
return 0
@@ -536,6 +537,11 @@ function aicupg()
scons --aicupg -C $SDK_PRJ_TOP_DIR
}
function list_module()
{
scons --list-module -C $SDK_PRJ_TOP_DIR
}
function _lunch_check()
{
[[ -z ${SDK_PRJ_TOP_DIR} ]] && {

View File

@@ -764,6 +764,109 @@ def list_mem_cmd(aic_root, prj_chip, prj_board, prj_kernel, prj_app, prj_defconf
exit(0)
# cmd-option: list module
def list_module_cmd(aic_root, prj_chip, prj_board, prj_kernel, prj_app, prj_defconfig):
AddOption('--list-module', dest='list_module', action='store_true',
default=False, help='list all opened module info')
list_module = GetOption('list_module')
if list_module:
prj_out_dir = os.path.join(aic_root, 'target/configs/' + prj_defconfig)
prj_name = prj_defconfig.replace('_defconfig', '')
print('Load modules information from ' + prj_name)
list_tit = ["Module", "Version", "Device"]
list_item = []
with open(prj_out_dir, 'r') as file:
# get opened modules name and version
pattern = 'CONFIG_AIC_.*_DRV_V.*=y'
for f in file:
match = re.search(pattern, f)
if match:
matched_str = f.split('_DRV_V')
mod = matched_str[0].split('CONFIG_AIC_')[1]
ver = matched_str[1].split('=y\n')[0]
version = 'V' + ver[0] + '.' + ver[1]
list_item.append([mod, version])
file.seek(0)
# get opened device
pattern = r'CONFIG_AIC_USING_[^_]+\d+=y'
list_items_dev = [[] for _ in range(len(list_item))]
items_mod = [x[0] for x in list_item]
for f in file:
match = re.search(pattern, f)
if not match:
continue
dev_str = f.split('CONFIG_AIC_USING_')[1].split('=y\n')[0]
for i in range(len(items_mod)):
match = re.search(items_mod[i], dev_str)
if not match:
continue
list_items_dev[i].append(dev_str)
for i, item in enumerate(list_items_dev):
list_item[i] += (item,)
# Sort in ascending order based on the first value of each element
sorted_list_items = sorted(list_item, key=lambda x: x[0])
file.close()
items_mod = [x[0] for x in sorted_list_items]
items_ver = [x[1] for x in sorted_list_items]
items_dev = [x[2] for x in sorted_list_items]
len_mod = max(len(s) for s in items_mod)
len_ver = max(len(s) for s in items_ver)
# step: Interval between each title
step = 1
len_total = 0
min_len_item = max(len(s) for s in list_tit)
max_len_dev_strs = 0
len_dev_strs = 0
num_dev = 0
# get the length of the longest bit array in a two-dimensional array
# and the longest element of the longest bit array
for i in range(len(items_dev)):
num_dev = len(items_dev[i])
for dev_str in items_dev[i]:
len_dev_strs += len(dev_str) + step
if (len_dev_strs > max_len_dev_strs):
max_len_dev_strs = len_dev_strs
max_num_dev = num_dev
len_dev_strs = 0
len_dev = max_len_dev_strs - step
list_len = [len_mod, len_ver, len_dev]
num_tit = len(list_len)
for i in range(num_tit):
if list_len[i] < min_len_item:
list_len[i] = min_len_item
len_total += list_len[i]
print("=" * (len_total + (num_tit - 1) * step))
for i in range(num_tit):
print("{:<{}}".format(list_tit[i], list_len[i])),
print("")
for i in range(num_tit):
print("-" * list_len[i]),
for i in range(len(items_mod)):
print("")
print("{:<{}}{:<{}}".format(items_mod[i], list_len[0] + step,
items_ver[i], list_len[1])),
for item in items_dev[i]:
if len(item):
print("" + item),
print("")
print("=" * (len_total + (num_tit - 1) * step))
exit(0)
# cmd-option: distclean
def distclean_cmd(aic_root, prj_chip, prj_board, prj_kernel, prj_app, prj_defconfig):
AddOption('--distclean', dest='distclean', action='store_true',
@@ -1510,6 +1613,9 @@ def get_prj_config(aic_root):
# cmd-option: list mem
list_mem_cmd(aic_root, PRJ_CHIP, PRJ_BOARD, PRJ_KERNEL, PRJ_APP, PRJ_DEFCONFIG_NAME)
# cmd-option: list module
list_module_cmd(aic_root, PRJ_CHIP, PRJ_BOARD, PRJ_KERNEL, PRJ_APP, PRJ_DEFCONFIG_NAME)
# cmd-option: list size
list_size_cmd(aic_root, PRJ_CHIP, PRJ_BOARD, PRJ_KERNEL, PRJ_APP, PRJ_DEFCONFIG_NAME)

View File

@@ -8,76 +8,295 @@ import os
import sys
import argparse
def calc_size(maplines, outfile, outvar, sectname):
linecnt = len(maplines)
section = " ." + sectname
if linecnt != 0:
# Go to Memory Configuration
idx = 0
while True:
if "Memory Configuration" in maplines[idx]:
break
idx = idx + 1
line_for_file = ""
size_stat = {}
TYPE_SECTION_TOTAL = 0
TYPE_SECTION_FILE = 1
TYPE_SECTION_FUNC = 2
while idx < linecnt:
if maplines[idx].startswith(section):
line_for_file = maplines[idx].strip()
if " " not in line_for_file:
idx += 1
line_for_file = line_for_file + " " + maplines[idx].strip()
items = line_for_file.split()
# line_for_file = sectname + "," + ",".join(items)
line_for_file = "{},{},{},{}".format(sectname, items[0],
int(items[2], 16), items[3])
outvar.write(line_for_file + "\n")
fname = items[3]
varsize = int(items[2], 16)
if fname not in size_stat:
size_stat[fname] = varsize
def parse_column_info(line, line_next, col_fmt):
info = {}
info['type'] = -1
info['break'] = False
info['line'] = line.replace('\r', '').replace('\n', '')
line_size = len(line)
line_nsize = len(line_next)
if line_size < col_fmt[3] and line_nsize < col_fmt[3]:
return info
if line_size >= col_fmt[3]:
# All information in one line, not break in two lines
sect = line[col_fmt[0]:col_fmt[1]].strip()
addr = line[col_fmt[1]:col_fmt[2]].strip().lower()
size = line[col_fmt[2]:col_fmt[3]].strip().lower()
attr = line[col_fmt[3]:].strip()
if addr.startswith('0x') and size.startswith('0x'):
if len(sect) <= 0:
# unknown line
return info
else:
info['sect'] = sect
info['addr'] = addr
info['size'] = size
info['attr'] = attr
if line[0].isspace():
info['type'] = TYPE_SECTION_FILE
else:
size_stat[fname] = size_stat[fname] + varsize
idx += 1
info['type'] = TYPE_SECTION_TOTAL
elif len(sect) == 0 and len(size) == 0 and addr.startswith('0x'):
info['sect'] = sect
info['addr'] = addr
info['size'] = size
info['attr'] = attr
info['type'] = TYPE_SECTION_FUNC
else:
# Information maybe break in two lines, need to check
items = line.strip().split()
if len(items) > 1:
# Unknown line
return info
sect2 = line_next[col_fmt[0]:col_fmt[1]].strip()
if len(sect2) > 0:
# Not one line break into two case
return info
sect = line.strip()
addr = line_next[col_fmt[1]:col_fmt[2]].strip().lower()
size = line_next[col_fmt[2]:col_fmt[3]].strip().lower()
attr = line_next[col_fmt[3]:].strip()
if addr.startswith('0x') and size.startswith('0x'):
info['sect'] = sect
info['addr'] = addr
info['size'] = size
info['attr'] = attr
info['break'] = True
info['line'] += line_next.replace('\r', '').replace('\n', '')
if line[0].isspace():
info['type'] = TYPE_SECTION_FILE
else:
info['type'] = TYPE_SECTION_TOTAL
return info
total_size = 0
for linkedfile in size_stat:
outfile.write("{},{},{}\n".format(sectname, size_stat[linkedfile], linkedfile))
total_size += size_stat[linkedfile]
return total_size
def get_linked_size(maplines):
linecnt = len(maplines)
sect_total = False
if linecnt == 0:
return None
if __name__ == "__main__":
# Goto Linker script and memory map
idx = 0
while True:
if idx >= linecnt:
print('Map file not include Linker script and memory map')
return None
if 'Linker script and memory map' in maplines[idx]:
break
idx = idx + 1
# Find first section
line = ''
c1_addr = 'not found'
c2_len = 'not found'
while True:
if idx >= linecnt:
print('Cannot find the section start in map file')
return None
line = maplines[idx]
if line[0].isspace() is False:
cols = line.split()
if len(cols) < 3:
# Not section start
idx = idx + 1
continue
c1_addr = cols[1].lower()
c2_len = cols[2].lower()
if c1_addr.startswith('0x') and c2_len.startswith('0x'):
# Found the first section
break
idx = idx + 1
# Column format
c0_start = 0
c1_start = line.find(c1_addr)
if c1_start <= 0:
print('Parse column format error')
c2_start = c1_start + len(c1_addr)
line_left = line[c2_start:]
c3_start = line_left.find(c2_len)
if c3_start <= 0:
print('Parse column format error2')
c3_start += c2_start
c3_start += len(c2_len)
cols = (c0_start, c1_start, c2_start, c3_start)
cur_sect = 'unknown'
stat = {}
while True:
if (idx + 1) >= linecnt:
break
line = maplines[idx]
line_next = maplines[idx + 1]
info = parse_column_info(line, line_next, cols)
if info['type'] == TYPE_SECTION_TOTAL:
cur_sect = info['sect'].strip()
if cur_sect not in stat:
stat[cur_sect] = {}
stat[cur_sect]['addr'] = info['addr']
stat[cur_sect]['size'] = int(info['size'], 16)
stat[cur_sect]['detail'] = {}
else:
print('It make me confused, one section should not begin twice.')
sys.exit(1)
if info['break']:
idx += 1
elif info['type'] == TYPE_SECTION_FILE:
linkedfile = info['attr'].strip()
if len(linkedfile) == 0:
linkedfile = 'unknown/' + info['sect'].strip()
else:
abspath = os.path.abspath('/' + linkedfile)
linkedfile = abspath[1:]
newsize = int(info['size'], 16)
if linkedfile not in stat[cur_sect]['detail']:
stat[cur_sect]['detail'][linkedfile] = {}
stat[cur_sect]['detail'][linkedfile]['size'] = newsize
stat[cur_sect]['detail'][linkedfile]['isfile'] = True
else:
stat[cur_sect]['detail'][linkedfile]['size'] += newsize
# stat by directory
dirname = os.path.dirname(linkedfile)
while len(dirname) > 0:
if dirname not in stat[cur_sect]['detail']:
stat[cur_sect]['detail'][dirname] = {}
stat[cur_sect]['detail'][dirname]['size'] = newsize
stat[cur_sect]['detail'][dirname]['isfile'] = False
else:
stat[cur_sect]['detail'][dirname]['size'] += newsize
dirname = os.path.dirname(dirname)
if dirname == '/':
break
if info['break']:
idx += 1
else:
if info['break']:
idx += 1
idx += 1
return stat
def check_is_skip_section(s):
skip_list = ['.note', '.debug', '.comment']
skip = False
for sk in skip_list:
if s.startswith(sk):
skip = True
break
return skip
def gen_csv_summary(csv_sm, stat):
f_by_sm = open(csv_sm, 'w+')
f_by_sm.write('Section,Size,Unused\n')
sects = stat.keys()
total = 0
total_u = 0
for s in sects:
skip = check_is_skip_section(s)
if skip is False and stat[s]['size'] > 0:
unused = 0
if 'unknown' in stat[s]['detail']:
unused = stat[s]['detail']['unknown']['size']
f_by_sm.write('{},{},{}\n'.format(s, stat[s]['size'], unused))
total += stat[s]['size']
total_u += unused
f_by_sm.write('Total,{},{}\n'.format(total, total_u))
f_by_sm.close()
def gen_csv_detail(filename, stat, dir_only):
f_detail = open(filename, 'w+')
# Generate title/header
f_detail.write('Folder/File,Summary')
sects = stat.keys()
for s in sects:
skip = check_is_skip_section(s)
if skip is False and stat[s]['size'] > 0:
f_detail.write(',{}'.format(s))
f_detail.write('\n')
total = 0
for s in sects:
skip = check_is_skip_section(s)
if skip is False and stat[s]['size'] > 0:
total += stat[s]['size']
f_detail.write('Total,{}'.format(total))
for s in sects:
skip = check_is_skip_section(s)
if skip is False and stat[s]['size'] > 0:
f_detail.write(',{}'.format(stat[s]['size']))
f_detail.write('\n')
# By Folder/File
# Get all Folder/File first
items = []
for s in sects:
skip = check_is_skip_section(s)
if skip is False and stat[s]['size'] > 0:
keys = stat[s]['detail']
[items.append(x) for x in keys if x not in items]
items.sort()
for i in items:
sumval = 0
vals = []
isfile = False
for s in sects:
skip = check_is_skip_section(s)
if skip is False and stat[s]['size'] > 0:
itemsize = 0
if i in stat[s]['detail']:
itemsize = stat[s]['detail'][i]['size']
isfile = stat[s]['detail'][i]['isfile']
sumval += itemsize
vals.append(itemsize)
if dir_only and isfile:
continue
f_detail.write('{}'.format(i))
f_detail.write(',{}'.format(sumval))
for v in vals:
f_detail.write(',{}'.format(v))
f_detail.write('\n')
f_detail.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--map", type=str,
help="elf's map file, e.g. d21x.map")
parser.add_argument('-m', '--map', type=str,
help='elf\'s map file, e.g. d21x.map')
args = parser.parse_args()
if args.map == None:
if args.map is None:
print('Error, option --map is required.')
print('e.g.:')
print(' {} -m d21x.map'.format(sys.argv[0]))
sys.exit(1)
mapfile = args.map
lines = []
with open(mapfile, "r+") as fm:
with open(mapfile, 'r+') as fm:
lines = fm.readlines()
fvar_name = "{}.var.csv".format(mapfile.replace('map', 'size'))
ff_name = "{}.csv".format(mapfile.replace('map', 'size'))
outvar = open(fvar_name, "w+")
outfile = open(ff_name, "w+")
stat = get_linked_size(lines)
outfile.write("Section,Size,File\n")
outvar.write("Section,Symbol,Size,File\n")
section = ['text', 'rodata','data', 'bss']
total_size = 0
for s in section:
total_size += calc_size(lines, outfile, outvar, s)
outvar.write("all,,{},Total size\n".format(total_size))
outfile.write("all,{},Total size\n".format(total_size))
outvar.close()
outfile.close()
csv_sm = '{}.csv'.format(mapfile.replace('map', 'summary'))
gen_csv_summary(csv_sm, stat)
csv_detail = '{}.csv'.format(mapfile.replace('map', 'detail'))
gen_csv_detail(csv_detail, stat, False)
csv_detail = '{}.csv'.format(mapfile.replace('map', 'dironly'))
gen_csv_detail(csv_detail, stat, True)

View File

@@ -23,6 +23,7 @@ def mkimage_get_resource_size(srcdir, cluster_siz):
total_size += size
return total_size
def mkimage_get_part_size(outfile):
imgname = os.path.basename(outfile)
partlist = os.path.join(os.path.dirname(outfile), 'partition_file_list.h')
@@ -33,13 +34,14 @@ def mkimage_get_part_size(outfile):
lines = f.readlines()
for ln in lines:
name = ln.split(',')[1].replace('"', '').replace('*', '')
if imgname == re.sub(".sparse", "", name) or imgname in re.sub(".sparse", "", name):
if imgname == re.sub(".sparse", "", name):
size = int(ln.split(',')[2])
return size
print('Image {} is not used in any partition'.format(imgname))
print('please check your project\'s image_cfg.json');
print('please check your project\'s image_cfg.json')
return size
def run_cmd(cmdstr):
# print(cmdstr)
cmd = cmdstr.split(' ')
@@ -47,8 +49,9 @@ def run_cmd(cmdstr):
if ret.returncode != 0:
sys.exit(1)
def gen_fatfs(tooldir, srcdir, outimg, imgsiz, sector_siz, cluster):
sector_cnt = int(imgsiz / int(sector_siz))
sector_cnt = int(imgsiz / sector_siz)
if platform.system() == 'Linux':
truncate = 'truncate -s {} {}'.format(imgsiz, outimg)
run_cmd(truncate)
@@ -82,6 +85,7 @@ def gen_fatfs(tooldir, srcdir, outimg, imgsiz, sector_siz, cluster):
img2simg = '{}img2simg.exe {} {}.sparse 1024'.format(tooldir, outimg, outimg)
run_cmd(img2simg)
def round_pow2(x):
cnt = 0
shift = 64
@@ -98,40 +102,105 @@ def round_pow2(x):
value = 1 << last_one
return value
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-a", "--auto", action='store_true',
help="auto size of FAT image")
parser.add_argument("-f", "--fullpart", action='store_true',
help="image size of is partition size")
help="image size is partition size")
parser.add_argument("-i", "--inputdir", type=str,
help="input directory")
parser.add_argument("-o", "--outfile", type=str,
help="output file")
parser.add_argument("-g", "--imgsize", type=str,
help="sector size")
help="image size")
parser.add_argument("-s", "--sector", type=str,
help="sector size")
parser.add_argument("-c", "--cluster", type=str,
help="cluster size")
parser.add_argument("-t", "--tooldir", type=str,
help="tool directory")
parser.add_argument("-r", "--raw", action='store_true',
help="Don't strip FAT image, keep raw image")
args = parser.parse_args()
cluster = int(args.cluster)
# cluster should be pow of 2
cluster = round_pow2(cluster)
sector_siz = int(args.sector)
calc_mini = True
strip_siz = True
if args.auto:
cluster_siz = cluster * int(args.sector)
imgsiz = mkimage_get_resource_size(args.inputdir, cluster_siz)
# No need to strip size in auto mode
strip_siz = False
if args.raw:
# No need to strip size if user select raw image
strip_siz = False
if calc_mini:
cluster_siz = cluster * sector_siz
data_siz = mkimage_get_resource_size(args.inputdir, cluster_siz)
# Size should alignment with cluster size
imgsiz = cluster_siz * int(round((imgsiz + cluster_siz - 1) / cluster_siz))
# Add additional 512KB free space
imgsiz += 512 * 1024
data_siz = cluster_siz * int(((data_siz + cluster_siz - 1) / cluster_siz))
if args.auto:
data_clus_cnt = 2 + data_siz / cluster_siz
data_region_sz = data_clus_cnt * cluster_siz
if data_clus_cnt < 4096:
# FAT12
# - BPB_RsvdSecCnt should be 1
# - BPB_RootEntCnt max 512
# - FATsz: 2 * FAT
# - DATA Region: 2 cluster + DATA
fat_siz = (data_region_sz / cluster_siz) * 12 / 8
rsvd_siz = 1 * sector_siz
root_ent_cnt = 512 * 32
elif data_clus_cnt < 65525:
# FAT16
# - BPB_RsvdSecCnt should be 1
# - BPB_RootEntCnt max 512
# - FATsz: 2 * FAT
# - DATA Region: 2 cluster + DATA
fat_siz = (data_region_sz / cluster_siz) * 16 / 8
rsvd_siz = 1 * sector_siz
root_ent_cnt = 512 * 32
imgsiz = rsvd_siz + 2 * fat_siz + root_ent_cnt + data_region_sz
else:
# FAT32
# - BPB_RsvdSecCnt fixed 32
# - BPB_RootEntCnt fixed 0
# - FATsz: 2 * FAT
# - DATA Region: 2 cluster + DATA
fat_siz = data_region_sz / cluster_siz * 4
rsvd_siz = 32 * sector_siz
root_ent_cnt = 0
fat_siz = sector_siz * int((fat_siz + sector_siz - 1) / sector_siz)
imgsiz = rsvd_siz + 2 * fat_siz + root_ent_cnt + data_region_sz
# Round to cluster alignment
imgsiz = cluster_siz * int(((imgsiz + cluster_siz - 1) / cluster_siz))
elif args.fullpart:
imgsiz = mkimage_get_part_size(args.outfile)
else:
imgsiz = int(args.imgsize)
gen_fatfs(args.tooldir, args.inputdir, args.outfile, imgsiz, args.sector, cluster)
gen_fatfs(args.tooldir, args.inputdir, args.outfile, imgsiz, sector_siz, cluster)
if strip_siz:
clus_cnt = imgsiz / cluster_siz
if clus_cnt < 65536:
# FAT16/FAT12, assume it is FAT16, and evaluate the valid data size
fat_siz = (clus_cnt * 16) / 8
rsvd_siz = 1 * sector_siz
root_ent_cnt = 512 * 32
else:
# FAT32, evaluate the valid data size
fat_siz = (clus_cnt * 32) / 8
rsvd_siz = 32 * sector_siz
root_ent_cnt = 0
minimal_siz = rsvd_siz + 2 * fat_siz + root_ent_cnt + 2 * cluster_siz + data_siz
# Round to cluster alignment
minimal_siz = cluster_siz * int(((minimal_siz + cluster_siz - 1) / cluster_siz))
if platform.system() == 'Linux':
truncate = 'truncate -s {} {}'.format(minimal_siz, args.outfile)
run_cmd(truncate)
elif platform.system() == 'Windows':
truncate = '{}truncate.exe -s {} {}'.format(args.tooldir, minimal_siz, args.outfile)
run_cmd(truncate)

Binary file not shown.

View File

@@ -21,6 +21,24 @@ META_ALIGNED_SIZE = 512
BURNER = False
VERBOSE = False
COLOR_BEGIN = "\033["
COLOR_RED = COLOR_BEGIN + "41;37m"
COLOR_YELLOW = COLOR_BEGIN + "43;30m"
COLOR_WHITE = COLOR_BEGIN + "47;30m"
COLOR_END = "\033[0m"
def pr_err(string):
print(COLOR_RED + '*** ' + string + COLOR_END)
def pr_info(string):
print(COLOR_WHITE + '>>> ' + string + COLOR_END)
def pr_warn(string):
print(COLOR_YELLOW + '!!! ' + string + COLOR_END)
def parse_image_cfg(cfgfile):
""" Load image configuration file
@@ -828,6 +846,12 @@ def int_to_uint8_bytes(n):
return n.to_bytes(1, byteorder='little', signed=False)
def int_to_uint16_bytes(n):
""" Int value to uint8 bytes
"""
return n.to_bytes(2, byteorder='little', signed=False)
def int_from_uint32_bytes(s):
""" Int value from uint32 bytes
"""
@@ -949,7 +973,27 @@ def img_gen_fwc_meta(name, part, offset, size, crc, ram, attr, filename):
PAGE_TABLE_MAX_ENTRY = 101
"""
struct nand_page_table_head {
char magic[4]; /* AICP: AIC Page table */
u32 entry_cnt;
u16 page_size;
u8 pad[10]; /* Padding it to fit size 20 bytes */
};
struct nand_page_table_entry {
u32 pageaddr1;
u32 pageaddr2;
u32 checksum2;
u32 reserved;
u32 checksum1;
};
struct nand_page_table {
struct nand_page_table_head head;
struct nand_page_table_entry entry[PAGE_TABLE_MAX_ENTRY];
};
"""
def img_gen_page_table(binfile, cfg, datadir):
""" Generate page table data
Args:
@@ -966,9 +1010,13 @@ def img_gen_page_table(binfile, cfg, datadir):
block_size = int(re.sub(r"[^0-9]", "", item["block"]))
spl_file = cfg["image"]["target"]["spl"]["file"]
filesize = round_up(cfg["image"]["target"]["spl"]["filesize"], DATA_ALIGNED_SIZE);
filesize = round_up(cfg["image"]["target"]["spl"]["filesize"], DATA_ALIGNED_SIZE)
page_per_blk = block_size // page_size
page_cnt = filesize // (page_size * 1024)
if (page_cnt + 1 > (2 * PAGE_TABLE_MAX_ENTRY)):
print("SPL too large, more than 400K.")
sys.exit(1)
path = get_file_path(spl_file, datadir)
if path is None:
sys.exit(1)
@@ -977,26 +1025,25 @@ def img_gen_page_table(binfile, cfg, datadir):
entry_page = page_cnt + 1
buff = str_to_nbytes("AICP", 4)
buff = buff + int_to_uint32_bytes(entry_page) # The first SPL hold start 65 page
buff = buff + int_to_uint8_bytes(page_size)
buff = buff + gen_bytes(0xFF, 11)
buff = buff + int_to_uint32_bytes(entry_page)
buff = buff + int_to_uint16_bytes(page_size * 1024)
buff = buff + gen_bytes(0xFF, 10)
with open(path, "rb") as fwcfile:
pageaddr1 = 0
pageaddr2 = PAGE_TABLE_MAX_ENTRY
offset2 = (pageaddr2) * (page_size * 1024)
fwcfile.seek(offset2, 0)
bindata = fwcfile.read(step)
checksum2 = aic_calc_checksum(bindata, page_size * 1024)
if (pageaddr1 < PAGE_TABLE_MAX_ENTRY):
buff = buff + int_to_uint32_bytes(pageaddr1)
else:
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
if (pageaddr2 < (2 * PAGE_TABLE_MAX_ENTRY) and pageaddr2 <= (page_cnt + 1)):
offset2 = (pageaddr2 - 1) * (page_size * 1024)
fwcfile.seek(offset2, 0)
bindata = fwcfile.read(step)
checksum2 = aic_calc_checksum(bindata, page_size * 1024)
buff = buff + int_to_uint32_bytes(pageaddr2)
buff = buff + int_to_uint32_bytes(checksum2)
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
@@ -1010,31 +1057,21 @@ def img_gen_page_table(binfile, cfg, datadir):
else:
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
for i in range(1, page_cnt + 1):
for i in range(1, PAGE_TABLE_MAX_ENTRY):
pageaddr1 = i
pageaddr2 = PAGE_TABLE_MAX_ENTRY + i
offset1 = (pageaddr1 - 1) * (page_size * 1024)
offset2 = (pageaddr2 - 1) * (page_size * 1024)
fwcfile.seek(offset1, 0)
bindata = fwcfile.read(step)
checksum1 = aic_calc_checksum(bindata, page_size * 1024)
fwcfile.seek(offset2, 0)
bindata = fwcfile.read(step)
checksum2 = aic_calc_checksum(bindata, page_size * 1024)
if (page_cnt + 1 > PAGE_TABLE_MAX_ENTRY):
print("SPL too large")
sys.exit(1)
if (pageaddr1 < PAGE_TABLE_MAX_ENTRY):
if (pageaddr1 < PAGE_TABLE_MAX_ENTRY and pageaddr1 <= (page_cnt + 1)):
buff = buff + int_to_uint32_bytes(pageaddr1)
else:
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
if (pageaddr2 < (2 * PAGE_TABLE_MAX_ENTRY) and pageaddr2 <= (page_cnt + 1)):
offset2 = (pageaddr2 - 1) * (page_size * 1024)
fwcfile.seek(offset2, 0)
bindata = fwcfile.read(step)
checksum2 = aic_calc_checksum(bindata, page_size * 1024)
buff = buff + int_to_uint32_bytes(pageaddr2)
buff = buff + int_to_uint32_bytes(checksum2)
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
@@ -1044,9 +1081,15 @@ def img_gen_page_table(binfile, cfg, datadir):
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
if (pageaddr1 < PAGE_TABLE_MAX_ENTRY):
offset1 = (pageaddr1 - 1) * (page_size * 1024)
fwcfile.seek(offset1, 0)
bindata = fwcfile.read(step)
checksum1 = aic_calc_checksum(bindata, page_size * 1024)
buff = buff + int_to_uint32_bytes(checksum1)
else:
buff = buff + int_to_uint32_bytes(0xFFFFFFFF)
buff = buff + gen_bytes(0xFF, page_size * 1024 - len(buff))
checksum = aic_calc_checksum(buff, page_size * 1024)
buff = buff[0:36] + int_to_uint32_bytes(checksum) + buff[40:]
@@ -1104,6 +1147,12 @@ def img_write_fwc_meta_section(imgfile, cfg, sect, meta_off, file_off, datadir):
partitions = cfg[media_type]["partitions"]
for fwc in fwcset:
file_size = fwcset[fwc]["filesize"]
if sect == "target":
part_size = fwcset[fwc]["part_size"]
if file_size > part_size:
print("{} file_size: {} is over much than part_size: {}"
.format(fwcset[fwc]["file"], hex(file_size), hex(part_size)))
return (-1, -1)
if file_size <= 0:
continue
imgfile.seek(meta_off, 0)
@@ -1797,6 +1846,142 @@ def fixup_spinand_ubi_fwc_name(cfg, paramstr, orgitem):
cfg["image"]["info"]["media"]["array_organization"] = [orgitem]
def build_pinmux_check():
# FPGA-type boards may not have an aicboot key, in which case the pinmux
# conflict checking exited directly.
if cfg["temporary"].get("aicboot", 1) == 1:
return 0
cwd = os.getcwd()
image_path = cfg["temporary"]["aicboot"]["bootloader.aic"]["keydir"]
target_path = image_path.replace('images', 'target')
precess_path = os.path.join(cwd, 'output', image_path, '.pinmux.i')
if not os.path.exists(precess_path):
return 0
if (cfg["image"]["info"].get("product.backup")):
prduct = cfg["image"]["info"]["product.backup"].replace("_", "-")
rel_pinmux_path = os.path.join('target',
cfg["image"]["info"]["platform"],
prduct, 'pinmux.c')
else:
prduct = cfg["image"]["info"]["product"].replace("_", "-")
rel_pinmux_path = os.path.join('target',
cfg["image"]["info"]["platform"],
prduct, 'pinmux.c')
pinmux_path = os.path.join(cwd, rel_pinmux_path)
root_path = target_path.replace(os.path.join(cwd, 'output'), '')
defconfig_name = root_path.replace('target', '').replace(os.path.sep, '') + '_defconfig'
defconfig_path = os.path.join(cwd, 'target', 'configs', defconfig_name)
list_preproc_pins = []
list_conflict_pins = []
dict_pinmux = {}
# Get all configured pins and multiplexed functions in the pre-processed file pinmux.i
with open(precess_path, 'r') as file:
pin_pattern = r'\{(\d+),\s*([^,]+),\s*(\d+),\s*("[^"]+"|[^,]+)\}'
for f in file:
match = re.search(pin_pattern, f)
if match:
list_preproc_pins.append([match.groups()[0], match.groups()[3]])
file.close()
# Get the dictionary key as pin_name and the value as an array containing
# all the currently multiplexed functions.
# Tips: When the length of the value in the dictionary is greater than 1,
# it indicates that the pin is multiplexed with multiple functions.
for row in list_preproc_pins:
if row[1] not in dict_pinmux:
dict_pinmux[row[1]] = [row[0]]
else:
dict_pinmux[row[1]].append(row[0])
for pin_name, pin_func in dict_pinmux.items():
if len(pin_func) > 1:
list_conflict_pins.append(pin_name)
if not list_conflict_pins:
return 0
# Print macro definitions based on pins of conflict
pr_warn("Current pinmux conflicts! The conflicting pin:")
lines_num = 0
max_pin_name = max(len(s) for s in list_conflict_pins)
pin_name_total_len = max_pin_name + 2
enabled_macro = {}
with open(defconfig_path, 'r') as file:
matched_num = 0
for f in file:
for i in range(len(list_conflict_pins)):
match = re.search(list_conflict_pins[i], f)
if not match:
continue
matched_num += 1
if matched_num == 1:
print("\n{:<{}}".format('PIN', pin_name_total_len), end='')
print('MACROS (' + defconfig_name + ')')
print("{:<{}}".format(list_conflict_pins[i].replace("\"",
"") + ': ', pin_name_total_len), end='')
print(f.split('=')[0])
key_pin_name = f.split('=')[1].split('\n')[0]
val_macro = f.split('=')[0].replace('CONFIG_', '')
if key_pin_name in enabled_macro:
enabled_macro[key_pin_name].append(val_macro)
else:
enabled_macro[key_pin_name] = [val_macro]
file.close()
print("\n{:<{}}".format('PIN', pin_name_total_len), end='')
print('LINES (' + rel_pinmux_path + ')')
# Print the line number of conflicting pins in pinmux.c file
with open(pinmux_path, 'r') as file:
lines = file.readlines()
total_lines = len(str(len(lines))) + 2
file.seek(0)
for i in range(len(list_conflict_pins)):
print("{:<{}}".format(list_conflict_pins[i].replace("\"",
"") + ': ', pin_name_total_len), end='')
pin_func = dict_pinmux.get(list_conflict_pins[i])
matched_num = 0
for f in file:
lines_num += 1
match = re.search(list_conflict_pins[i], f)
if not match:
continue
fun = f.split('{')[1].split(',')[0]
if fun in pin_func:
matched_num += 1
if matched_num > 1:
print(' ' * pin_name_total_len, end='')
line_str = str(lines_num) + ': '
print("{:<{}}".format(line_str, total_lines), end='')
print(f.replace(' ', ''), end='')
file.seek(0)
lines_num = 0
# Search backwards from the macro to the line where the pin
# function configuration is
if list_conflict_pins[i] in enabled_macro:
for pin_name_index in enabled_macro[list_conflict_pins[i]]:
lines_num_macro = 0
for f in file:
lines_num_macro += 1
match = re.search(pin_name_index + '}', f)
line_str = str(lines_num_macro) + ': '
if not match:
continue
print(' ' * pin_name_total_len, end='')
print("{:<{}}".format(line_str, total_lines), end='')
print(f.replace(' ', ''), end='')
file.seek(0)
lines_num_macro = 0
file.close()
def build_firmware_image(cfg, datadir, outdir):
""" Build firmware image
Args:
@@ -1895,8 +2080,10 @@ def build_firmware_image(cfg, datadir, outdir):
generate_bootcfg(bcfgfile, cfg)
bcfgfile.flush()
build_pinmux_check()
return 0
if __name__ == "__main__":
default_bin_root = os.path.dirname(sys.argv[0])
if sys.platform.startswith("win"):

View File

@@ -56,6 +56,8 @@ for /f %%i in ('dir /b "%SDK_PRJ_TOP_DIR%\target\configs\*_defconfig"') do (
if !errorlevel! equ 0 (
rem Scan the warning information in log
find "warning: " %LOG_DIR%\!SOLUTION!.log | find "warning: " /i | find "is shorter than expected" /v /c > %LOG_DIR%\warning.tmp
find "pinmux conflicts" %LOG_DIR%\!SOLUTION!.log >> %LOG_DIR%\warning.tmp
for /F %%j in ('type %LOG_DIR%\warning.tmp') do set war_cnt=%%j
echo !CNT_FMT!. !SOLUTION_FMT! is OK. Warning: !war_cnt!

View File

@@ -17,6 +17,7 @@ echo cout/co : cd to build output directory.
echo cbuild/cb : cd to build root directory.
echo ctarget/ct : cd to target board directory.
echo list : List all SDK defconfig.
echo list_module : List all enabled modules.
echo i : Get current project's information.
echo buildall : Build all the *defconfig in target/configs
echo rebuildall : Clean and build all the *defconfig in target/configs

View File

@@ -0,0 +1,6 @@
@echo off
rem SPDX-License-Identifier: GPL-2.0+
rem
rem Copyright (C) 2024 ArtInChip Technology Co., Ltd
scons --list-module -C %SDK_PRJ_TOP_DIR%

169
tools/scripts/simg_dump.py Executable file
View File

@@ -0,0 +1,169 @@
#! /usr/bin/env python
# Copyright (C) 2012 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import getopt, posixpath, signal, struct, sys
def usage(argv0):
print("""
Usage: %s [-v] sparse_image_file ...
-v verbose output
""" % ( argv0 ))
sys.exit(2)
def main():
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
me = posixpath.basename(sys.argv[0])
# Parse the command line
verbose = 0 # -v
try:
opts, args = getopt.getopt(sys.argv[1:],
"v",
["verbose"])
except getopt.GetoptError, e:
print(e)
usage(me)
for o, a in opts:
if o in ("-v", "--verbose"):
verbose += 1
else:
print("Unrecognized option \"%s\"" % (o))
usage(me)
if len(args) == 0:
print("No sparse_image_file specified")
usage(me)
for path in args:
FH = open(path, 'rb')
header_bin = FH.read(28)
header = struct.unpack("<I4H4I", header_bin)
magic = header[0]
major_version = header[1]
minor_version = header[2]
file_hdr_sz = header[3]
chunk_hdr_sz = header[4]
blk_sz = header[5]
total_blks = header[6]
total_chunks = header[7]
image_checksum = header[8]
if magic != 0xED26FF3A:
print("%s: %s: Magic should be 0xED26FF3A but is 0x%08X"
% (me, path, magic))
continue
if major_version != 1 or minor_version != 0:
print("%s: %s: I only know about version 1.0, but this is version %u.%u"
% (me, path, major_version, minor_version))
continue
if file_hdr_sz != 28:
print("%s: %s: The file header size was expected to be 28, but is %u."
% (me, path, file_hdr_sz))
continue
if chunk_hdr_sz != 12:
print("%s: %s: The chunk header size was expected to be 12, but is %u."
% (me, path, chunk_hdr_sz))
continue
print("%s: Total of %u %u-byte output blocks in %u input chunks."
% (path, total_blks, blk_sz, total_chunks))
if image_checksum != 0:
print("checksum=0x%08X" % (image_checksum))
if not verbose:
continue
print(" input_bytes output_blocks")
print("chunk offset number offset number")
offset = 0
for i in xrange(1,total_chunks+1):
header_bin = FH.read(12)
header = struct.unpack("<2H2I", header_bin)
chunk_type = header[0]
reserved1 = header[1]
chunk_sz = header[2]
total_sz = header[3]
data_sz = total_sz - 12
print("%4u %10u %10u %7u %7u" % (i, FH.tell(), data_sz, offset, chunk_sz),
end=" ")
if chunk_type == 0xCAC1:
if data_sz != (chunk_sz * blk_sz):
print("Raw chunk input size (%u) does not match output size (%u)"
% (data_sz, chunk_sz * blk_sz))
break;
else:
print("Raw data", end="")
FH.read(data_sz)
elif chunk_type == 0xCAC2:
if data_sz != 4:
print("Fill chunk should have 4 bytes of fill, but this has %u"
% (data_sz), end="")
break;
else:
fill_bin = FH.read(4)
fill = struct.unpack("<I", fill_bin)
print("Fill with 0x%08X" % (fill))
elif chunk_type == 0xCAC3:
if data_sz != 0:
print("Don't care chunk input size is non-zero (%u)" % (data_sz))
break;
else:
print("Don't care", end="")
elif chunk_type == 0xCAC4:
if data_sz != 4:
print("CRC32 chunk should have 4 bytes of CRC, but this has %u"
% (data_sz), end="")
break;
else:
crc_bin = FH.read(4)
crc = struct.unpack("<I", crc)
print("Unverified CRC32 0x%08X" % (crc))
else:
print("Unknown chunk type 0x%04X" % (chunk_type), end="")
break;
if verbose > 1:
header = struct.unpack("<12B", header_bin)
print(" (%02X%02X %02X%02X %02X%02X%02X%02X %02X%02X%02X%02X)"
% (header[0], header[1], header[2], header[3],
header[4], header[5], header[6], header[7],
header[8], header[9], header[10], header[11]))
else:
print()
offset += chunk_sz
print(" %10u %7u End" % (FH.tell(), offset))
if total_blks != offset:
print("The header said we should have %u output blocks, but we saw %u"
% (total_blks, offset))
junk_len = len(FH.read())
if junk_len:
print("There were %u bytes of extra data at the end of the file."
% (junk_len))
sys.exit(0)
if __name__ == "__main__":
main()