first
This commit is contained in:
440
rt-thread/tools/WCS.py
Normal file
440
rt-thread/tools/WCS.py
Normal file
@@ -0,0 +1,440 @@
|
||||
import re
|
||||
import pprint
|
||||
import os
|
||||
from subprocess import check_output
|
||||
from optparse import OptionParser
|
||||
|
||||
# Constants
|
||||
rtl_ext_end = ".dfinish"
|
||||
rtl_ext = None # e.g. '.c.270r.dfinish'. The number '270' will change with gcc version and is auto-detected by the
|
||||
# function find_rtl_ext
|
||||
dir = r'.' # Working directory
|
||||
su_ext = '.su'
|
||||
obj_ext = '.o'
|
||||
manual_ext = '.msu'
|
||||
read_elf_path = "arm-none-eabi-readelf.exe" # You may need to enter the full path here
|
||||
stdout_encoding = "utf-8" # System dependant
|
||||
|
||||
|
||||
class Printable:
|
||||
def __repr__(self):
|
||||
return "<" + type(self).__name__ + "> " + pprint.pformat(vars(self), indent=4, width=1)
|
||||
|
||||
|
||||
class Symbol(Printable):
|
||||
pass
|
||||
|
||||
|
||||
def read_symbols(file):
|
||||
from subprocess import check_output
|
||||
|
||||
def to_symbol(read_elf_line):
|
||||
v = read_elf_line.split()
|
||||
|
||||
s2 = Symbol()
|
||||
s2.value = int(v[1], 16)
|
||||
s2.size = int(v[2])
|
||||
s2.type = v[3]
|
||||
s2.binding = v[4]
|
||||
if len(v) >= 8:
|
||||
s2.name = v[7]
|
||||
else:
|
||||
s2.name = ""
|
||||
|
||||
return s2
|
||||
|
||||
output = check_output([read_elf_path, "-s", "-W", file]).decode(stdout_encoding)
|
||||
lines = output.splitlines()[3:]
|
||||
return [to_symbol(line) for line in lines]
|
||||
|
||||
|
||||
def read_obj(tu, call_graph):
|
||||
"""
|
||||
Reads the file tu.o and gets the binding (global or local) for each function
|
||||
:param tu: name of the translation unit (e.g. for main.c, this would be 'main')
|
||||
:param call_graph: a object used to store information about each function, results go here
|
||||
"""
|
||||
symbols = read_symbols(tu[0:tu.rindex(".")] + obj_ext)
|
||||
|
||||
for s in symbols:
|
||||
|
||||
if s.type == 'FUNC':
|
||||
if s.binding == 'GLOBAL':
|
||||
# Check for multiple declarations
|
||||
if s.name in call_graph['globals'] or s.name in call_graph['locals']:
|
||||
raise Exception('Multiple declarations of {}'.format(s.name))
|
||||
call_graph['globals'][s.name] = {'tu': tu, 'name': s.name, 'binding': s.binding}
|
||||
elif s.binding == 'LOCAL':
|
||||
# Check for multiple declarations
|
||||
if s.name in call_graph['locals'] and tu in call_graph['locals'][s.name]:
|
||||
raise Exception('Multiple declarations of {}'.format(s.name))
|
||||
|
||||
if s.name not in call_graph['locals']:
|
||||
call_graph['locals'][s.name] = {}
|
||||
|
||||
call_graph['locals'][s.name][tu] = {'tu': tu, 'name': s.name, 'binding': s.binding}
|
||||
elif s.binding == 'WEAK':
|
||||
if s.name in call_graph['weak']:
|
||||
raise Exception('Multiple declarations of {}'.format(s.name))
|
||||
call_graph['weak'][s.name] = {'tu': tu, 'name': s.name, 'binding': s.binding}
|
||||
else:
|
||||
raise Exception('Error Unknown Binding "{}" for symbol: {}'.format(s.binding, s.name))
|
||||
|
||||
|
||||
def find_fxn(tu, fxn, call_graph):
|
||||
"""
|
||||
Looks up the dictionary associated with the function.
|
||||
:param tu: The translation unit in which to look for locals functions
|
||||
:param fxn: The function name
|
||||
:param call_graph: a object used to store information about each function
|
||||
:return: the dictionary for the given function or None
|
||||
"""
|
||||
|
||||
if fxn in call_graph['globals']:
|
||||
return call_graph['globals'][fxn]
|
||||
else:
|
||||
try:
|
||||
return call_graph['locals'][fxn][tu]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
||||
def find_demangled_fxn(tu, fxn, call_graph):
|
||||
"""
|
||||
Looks up the dictionary associated with the function.
|
||||
:param tu: The translation unit in which to look for locals functions
|
||||
:param fxn: The function name
|
||||
:param call_graph: a object used to store information about each function
|
||||
:return: the dictionary for the given function or None
|
||||
"""
|
||||
for f in call_graph['globals'].values():
|
||||
if 'demangledName' in f:
|
||||
if f['demangledName'] == fxn:
|
||||
return f
|
||||
for f in call_graph['locals'].values():
|
||||
if tu in f:
|
||||
if 'demangledName' in f[tu]:
|
||||
if f[tu]['demangledName'] == fxn:
|
||||
return f[tu]
|
||||
return None
|
||||
|
||||
|
||||
def read_rtl(tu, call_graph):
|
||||
"""
|
||||
Read an RTL file and finds callees for each function and if there are calls via function pointer.
|
||||
:param tu: the translation unit
|
||||
:param call_graph: a object used to store information about each function, results go here
|
||||
"""
|
||||
|
||||
# Construct A Call Graph
|
||||
function = re.compile(r'^;; Function (.*) \((\S+), funcdef_no=\d+(, [a-z_]+=\d+)*\)( \([a-z ]+\))?$')
|
||||
static_call = re.compile(r'^.*\(call.*"(.*)".*$')
|
||||
other_call = re.compile(r'^.*call .*$')
|
||||
|
||||
for line_ in open(tu + rtl_ext).readlines():
|
||||
m = function.match(line_)
|
||||
if m:
|
||||
fxn_name = m.group(2)
|
||||
fxn_dict2 = find_fxn(tu, fxn_name, call_graph)
|
||||
if not fxn_dict2:
|
||||
pprint.pprint(call_graph)
|
||||
raise Exception("Error locating function {} in {}".format(fxn_name, tu))
|
||||
|
||||
fxn_dict2['demangledName'] = m.group(1)
|
||||
fxn_dict2['calls'] = set()
|
||||
fxn_dict2['has_ptr_call'] = False
|
||||
continue
|
||||
|
||||
m = static_call.match(line_)
|
||||
if m:
|
||||
fxn_dict2['calls'].add(m.group(1))
|
||||
# print("Call: {0} -> {1}".format(current_fxn, m.group(1)))
|
||||
continue
|
||||
|
||||
m = other_call.match(line_)
|
||||
if m:
|
||||
fxn_dict2['has_ptr_call'] = True
|
||||
continue
|
||||
|
||||
|
||||
def read_su(tu, call_graph):
|
||||
"""
|
||||
Reads the 'local_stack' for each function. Local stack ignores stack used by callees.
|
||||
:param tu: the translation unit
|
||||
:param call_graph: a object used to store information about each function, results go here
|
||||
:return:
|
||||
"""
|
||||
|
||||
su_line = re.compile(r'^([^ :]+):([\d]+):([\d]+):(.+)\t(\d+)\t(\S+)$')
|
||||
i = 1
|
||||
|
||||
for line in open(tu[0:tu.rindex(".")] + su_ext).readlines():
|
||||
m = su_line.match(line)
|
||||
if m:
|
||||
fxn = m.group(4)
|
||||
fxn_dict2 = find_demangled_fxn(tu, fxn, call_graph)
|
||||
fxn_dict2['local_stack'] = int(m.group(5))
|
||||
else:
|
||||
print("error parsing line {} in file {}".format(i, tu))
|
||||
i += 1
|
||||
|
||||
|
||||
def read_manual(file, call_graph):
|
||||
"""
|
||||
reads the manual stack useage files.
|
||||
:param file: the file name
|
||||
:param call_graph: a object used to store information about each function, results go here
|
||||
"""
|
||||
|
||||
for line in open(file).readlines():
|
||||
fxn, stack_sz = line.split()
|
||||
if fxn in call_graph:
|
||||
raise Exception("Redeclared Function {}".format(fxn))
|
||||
call_graph['globals'][fxn] = {'wcs': int(stack_sz),
|
||||
'calls': set(),
|
||||
'has_ptr_call': False,
|
||||
'local_stack': int(stack_sz),
|
||||
'is_manual': True,
|
||||
'name': fxn,
|
||||
'tu': '#MANUAL',
|
||||
'binding': 'GLOBAL'}
|
||||
|
||||
|
||||
def validate_all_data(call_graph):
|
||||
"""
|
||||
Check that every entry in the call graph has the following fields:
|
||||
.calls, .has_ptr_call, .local_stack, .scope, .src_line
|
||||
"""
|
||||
|
||||
def validate_dict(d):
|
||||
if not ('calls' in d and 'has_ptr_call' in d and 'local_stack' in d
|
||||
and 'name' in d and 'tu' in d):
|
||||
print("Error data is missing in fxn dictionary {}".format(d))
|
||||
|
||||
# Loop through every global and local function
|
||||
# and resolve each call, save results in r_calls
|
||||
for fxn_dict2 in call_graph['globals'].values():
|
||||
validate_dict(fxn_dict2)
|
||||
|
||||
for l_dict in call_graph['locals'].values():
|
||||
for fxn_dict2 in l_dict.values():
|
||||
validate_dict(fxn_dict2)
|
||||
|
||||
def resolve_all_calls(call_graph):
|
||||
def resolve_calls(fxn_dict2):
|
||||
fxn_dict2['r_calls'] = []
|
||||
fxn_dict2['unresolved_calls'] = set()
|
||||
|
||||
for call in fxn_dict2['calls']:
|
||||
call_dict = find_fxn(fxn_dict2['tu'], call, call_graph)
|
||||
if call_dict:
|
||||
fxn_dict2['r_calls'].append(call_dict)
|
||||
else:
|
||||
fxn_dict2['unresolved_calls'].add(call)
|
||||
|
||||
# Loop through every global and local function
|
||||
# and resolve each call, save results in r_calls
|
||||
for fxn_dict in call_graph['globals'].values():
|
||||
resolve_calls(fxn_dict)
|
||||
|
||||
for l_dict in call_graph['locals'].values():
|
||||
for fxn_dict in l_dict.values():
|
||||
resolve_calls(fxn_dict)
|
||||
|
||||
|
||||
def calc_all_wcs(call_graph):
|
||||
def calc_wcs(fxn_dict2, call_graph1, parents):
|
||||
"""
|
||||
Calculates the worst case stack for a fxn that is declared (or called from) in a given file.
|
||||
:param parents: This function gets called recursively through the call graph. If a function has recursion the
|
||||
tuple file, fxn will be in the parents stack and everything between the top of the stack and the matching entry
|
||||
has recursion.
|
||||
:return:
|
||||
"""
|
||||
|
||||
# If the wcs is already known, then nothing to do
|
||||
if 'wcs' in fxn_dict2:
|
||||
return
|
||||
|
||||
# Check for pointer calls
|
||||
if fxn_dict2['has_ptr_call']:
|
||||
fxn_dict2['wcs'] = 'unbounded'
|
||||
return
|
||||
|
||||
# Check for recursion
|
||||
if fxn_dict2 in parents:
|
||||
fxn_dict2['wcs'] = 'unbounded'
|
||||
return
|
||||
|
||||
# Calculate WCS
|
||||
call_max = 0
|
||||
for call_dict in fxn_dict2['r_calls']:
|
||||
|
||||
# Calculate the WCS for the called function
|
||||
parents.append(fxn_dict2)
|
||||
calc_wcs(call_dict, call_graph1, parents)
|
||||
parents.pop()
|
||||
|
||||
# If the called function is unbounded, so is this function
|
||||
if call_dict['wcs'] == 'unbounded':
|
||||
fxn_dict2['wcs'] = 'unbounded'
|
||||
return
|
||||
|
||||
# Keep track of the call with the largest stack use
|
||||
call_max = max(call_max, call_dict['wcs'])
|
||||
|
||||
# Propagate Unresolved Calls
|
||||
for unresolved_call in call_dict['unresolved_calls']:
|
||||
fxn_dict2['unresolved_calls'].add(unresolved_call)
|
||||
|
||||
fxn_dict2['wcs'] = call_max + fxn_dict2['local_stack']
|
||||
|
||||
# Loop through every global and local function
|
||||
# and resolve each call, save results in r_calls
|
||||
for fxn_dict in call_graph['globals'].values():
|
||||
calc_wcs(fxn_dict, call_graph, [])
|
||||
|
||||
for l_dict in call_graph['locals'].values():
|
||||
for fxn_dict in l_dict.values():
|
||||
calc_wcs(fxn_dict, call_graph, [])
|
||||
|
||||
|
||||
def print_all_fxns(call_graph):
|
||||
|
||||
def print_fxn(row_format, fxn_dict2):
|
||||
unresolved = fxn_dict2['unresolved_calls']
|
||||
stack = str(fxn_dict2['wcs'])
|
||||
if unresolved:
|
||||
unresolved_str = '({})'.format(' ,'.join(unresolved))
|
||||
if stack != 'unbounded':
|
||||
stack = "unbounded:" + stack
|
||||
else:
|
||||
unresolved_str = ''
|
||||
|
||||
print(row_format.format(fxn_dict2['tu'], fxn_dict2['demangledName'], stack, unresolved_str))
|
||||
|
||||
def get_order(val):
|
||||
if val == 'unbounded':
|
||||
return 1
|
||||
else:
|
||||
return -val
|
||||
|
||||
# Loop through every global and local function
|
||||
# and resolve each call, save results in r_calls
|
||||
d_list = []
|
||||
for fxn_dict in call_graph['globals'].values():
|
||||
d_list.append(fxn_dict)
|
||||
|
||||
for l_dict in call_graph['locals'].values():
|
||||
for fxn_dict in l_dict.values():
|
||||
d_list.append(fxn_dict)
|
||||
|
||||
d_list.sort(key=lambda item: get_order(item['wcs']))
|
||||
|
||||
# Calculate table width
|
||||
tu_width = max(max([len(d['tu']) for d in d_list]), 16)
|
||||
name_width = max(max([len(d['name']) for d in d_list]), 13)
|
||||
row_format = "{:<" + str(tu_width + 2) + "} {:<" + str(name_width + 2) + "} {:>14} {:<17}"
|
||||
|
||||
# Print out the table
|
||||
print("")
|
||||
print(row_format.format('Translation Unit', 'Function Name', 'Stack', 'Unresolved Dependencies'))
|
||||
for d in d_list:
|
||||
print_fxn(row_format, d)
|
||||
|
||||
|
||||
def find_rtl_ext():
|
||||
# Find the rtl_extension
|
||||
global rtl_ext
|
||||
|
||||
for root, directories, filenames in os.walk('.'):
|
||||
for f in filenames:
|
||||
if (f.endswith(rtl_ext_end)):
|
||||
rtl_ext = f[f[:-len(rtl_ext_end)].rindex("."):]
|
||||
print("rtl_ext = " + rtl_ext)
|
||||
return
|
||||
|
||||
print("Could not find any files ending with '.dfinish'. Check that the script is being run from the correct "
|
||||
"directory. Check that the code was compiled with the correct flags")
|
||||
exit(-1)
|
||||
|
||||
|
||||
def find_files():
|
||||
tu = []
|
||||
manual = []
|
||||
all_files = []
|
||||
for root, directories, filenames in os.walk(dir):
|
||||
for filename in filenames:
|
||||
all_files.append(os.path.join(root,filename))
|
||||
|
||||
files = [f for f in all_files if os.path.isfile(f) and f.endswith(rtl_ext)]
|
||||
for f in files:
|
||||
base = f[0:-len(rtl_ext)]
|
||||
short_base = base[0:base.rindex(".")]
|
||||
if short_base + su_ext in all_files and short_base + obj_ext in all_files:
|
||||
tu.append(base)
|
||||
print('Reading: {}{}, {}{}, {}{}'.format(base, rtl_ext, short_base, su_ext, short_base, obj_ext))
|
||||
|
||||
files = [f for f in all_files if os.path.isfile(f) and f.endswith(manual_ext)]
|
||||
for f in files:
|
||||
manual.append(f)
|
||||
print('Reading: {}'.format(f))
|
||||
|
||||
# Print some diagnostic messages
|
||||
if not tu:
|
||||
print("Could not find any translation units to analyse")
|
||||
exit(-1)
|
||||
|
||||
return tu, manual
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Find the appropriate RTL extension
|
||||
find_rtl_ext()
|
||||
|
||||
# Find all input files
|
||||
call_graph = {'locals': {}, 'globals': {}, 'weak': {}}
|
||||
tu_list, manual_list = find_files()
|
||||
|
||||
# Read the input files
|
||||
for tu in tu_list:
|
||||
read_obj(tu, call_graph) # This must be first
|
||||
|
||||
for fxn in call_graph['weak'].values():
|
||||
if fxn['name'] not in call_graph['globals'].keys():
|
||||
call_graph['globals'][fxn['name']] = fxn
|
||||
|
||||
for tu in tu_list:
|
||||
read_rtl(tu, call_graph)
|
||||
for tu in tu_list:
|
||||
read_su(tu, call_graph)
|
||||
|
||||
# Read manual files
|
||||
for m in manual_list:
|
||||
read_manual(m, call_graph)
|
||||
|
||||
# Validate Data
|
||||
validate_all_data(call_graph)
|
||||
|
||||
# Resolve All Function Calls
|
||||
resolve_all_calls(call_graph)
|
||||
|
||||
# Calculate Worst Case Stack For Each Function
|
||||
calc_all_wcs(call_graph)
|
||||
|
||||
# Print A Nice Message With Each Function and the WCS
|
||||
print_all_fxns(call_graph)
|
||||
|
||||
|
||||
|
||||
|
||||
def ThreadStackStaticAnalysis(env):
|
||||
print('Start thread stack static analysis...')
|
||||
|
||||
import rtconfig
|
||||
read_elf_path = rtconfig.EXEC_PATH + r'\readelf.exe'
|
||||
main()
|
||||
|
||||
print('\nThread stack static analysis done!')
|
||||
return
|
3
rt-thread/tools/as.sh
Normal file
3
rt-thread/tools/as.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
|
||||
astyle --style=allman --indent=spaces=4 --pad-oper --pad-header --unpad-paren --suffix=none --align-pointer=name --lineend=linux --convert-tabs --verbose $1
|
1088
rt-thread/tools/building.py
Normal file
1088
rt-thread/tools/building.py
Normal file
File diff suppressed because it is too large
Load Diff
137
rt-thread/tools/cdk.py
Normal file
137
rt-thread/tools/cdk.py
Normal file
@@ -0,0 +1,137 @@
|
||||
#
|
||||
# File : keil.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2017-10-16 Tanek Add CDK IDE support
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
|
||||
def SDKAddGroup(ProjectFiles, parent, name, files, project_path):
|
||||
# don't add an empty group
|
||||
if len(files) == 0:
|
||||
return
|
||||
|
||||
group = SubElement(parent, 'VirtualDirectory', attrib={'Name': name})
|
||||
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
elm_attr_name = os.path.join(path, name)
|
||||
|
||||
file = SubElement(group, 'File', attrib={'Name': elm_attr_name})
|
||||
|
||||
return group
|
||||
|
||||
def _CDKProject(tree, target, script):
|
||||
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
root = tree.getroot()
|
||||
out = open(target, 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
LINKFLAGS = ''
|
||||
CCFLAGS = ''
|
||||
LIBS = []
|
||||
ProjectFiles = []
|
||||
|
||||
for child in root:
|
||||
if child.tag == 'VirtualDirectory':
|
||||
root.remove(child)
|
||||
|
||||
for group in script:
|
||||
group_tree = SDKAddGroup(ProjectFiles, root, group['name'], group['src'], project_path)
|
||||
|
||||
# get each include path
|
||||
if 'CPPPATH' in group and group['CPPPATH']:
|
||||
if CPPPATH:
|
||||
CPPPATH += group['CPPPATH']
|
||||
else:
|
||||
CPPPATH += group['CPPPATH']
|
||||
|
||||
# get each group's definitions
|
||||
if 'CPPDEFINES' in group and group['CPPDEFINES']:
|
||||
if CPPDEFINES:
|
||||
CPPDEFINES += group['CPPDEFINES']
|
||||
else:
|
||||
CPPDEFINES += group['CPPDEFINES']
|
||||
|
||||
# get each group's cc flags
|
||||
if 'CCFLAGS' in group and group['CCFLAGS']:
|
||||
if CCFLAGS:
|
||||
CCFLAGS += ' ' + group['CCFLAGS']
|
||||
else:
|
||||
CCFLAGS += group['CCFLAGS']
|
||||
|
||||
# get each group's link flags
|
||||
if 'LINKFLAGS' in group and group['LINKFLAGS']:
|
||||
if LINKFLAGS:
|
||||
LINKFLAGS += ' ' + group['LINKFLAGS']
|
||||
else:
|
||||
LINKFLAGS += group['LINKFLAGS']
|
||||
|
||||
# todo: cdk add lib
|
||||
if 'LIBS' in group and group['LIBS']:
|
||||
LIBS += group['LIBS']
|
||||
|
||||
# write include path, definitions and link flags
|
||||
text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in CPPPATH])
|
||||
IncludePath = tree.find('BuildConfigs/BuildConfig/Compiler/IncludePath')
|
||||
IncludePath.text = text
|
||||
IncludePath = tree.find('BuildConfigs/BuildConfig/Asm/IncludePath')
|
||||
IncludePath.text = text
|
||||
|
||||
Define = tree.find('BuildConfigs/BuildConfig/Compiler/Define')
|
||||
Define.text = '; '.join(set(CPPDEFINES))
|
||||
|
||||
CC_Misc = tree.find('BuildConfigs/BuildConfig/Compiler/OtherFlags')
|
||||
CC_Misc.text = CCFLAGS
|
||||
|
||||
LK_Misc = tree.find('BuildConfigs/BuildConfig/Linker/OtherFlags')
|
||||
LK_Misc.text = LINKFLAGS
|
||||
|
||||
LibName = tree.find('BuildConfigs/BuildConfig/Linker/LibName')
|
||||
if LibName.text:
|
||||
LibName.text=LibName.text+';'+';'.join(LIBS)
|
||||
else:
|
||||
LibName.text=';'.join(LIBS)
|
||||
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8'))
|
||||
out.close()
|
||||
|
||||
def CDKProject(target, script):
|
||||
template_tree = etree.parse('template.cdkproj')
|
||||
|
||||
_CDKProject(template_tree, target, script)
|
183
rt-thread/tools/ci/bsp_buildings.py
Normal file
183
rt-thread/tools/ci/bsp_buildings.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import multiprocessing
|
||||
|
||||
|
||||
def add_summary(text):
|
||||
"""
|
||||
add summary to github action.
|
||||
"""
|
||||
os.system(f'echo "{text}" >> $GITHUB_STEP_SUMMARY ;')
|
||||
|
||||
|
||||
def run_cmd(cmd, output_info=True):
|
||||
"""
|
||||
run command and return output and result.
|
||||
"""
|
||||
print('\033[1;32m' + cmd + '\033[0m')
|
||||
|
||||
output_str_list = []
|
||||
res = 0
|
||||
|
||||
if output_info:
|
||||
res = os.system(cmd + " > output.txt 2>&1")
|
||||
else:
|
||||
res = os.system(cmd + " > /dev/null 2>output.txt")
|
||||
|
||||
with open("output.txt", "r") as file:
|
||||
output_str_list = file.readlines()
|
||||
|
||||
for line in output_str_list:
|
||||
print(line, end='')
|
||||
|
||||
os.remove("output.txt")
|
||||
|
||||
return output_str_list, res
|
||||
|
||||
|
||||
def build_bsp(bsp, scons_args=''):
|
||||
"""
|
||||
build bsp.
|
||||
|
||||
cd {rtt_root}
|
||||
scons -C bsp/{bsp} --pyconfig-silent > /dev/null
|
||||
|
||||
cd {rtt_root}/bsp/{bsp}
|
||||
pkgs --update > /dev/null
|
||||
pkgs --list
|
||||
|
||||
cd {rtt_root}
|
||||
scons -C bsp/{bsp} -j{nproc} {scons_args}
|
||||
|
||||
cd {rtt_root}/bsp/{bsp}
|
||||
scons -c > /dev/null
|
||||
rm -rf packages
|
||||
|
||||
"""
|
||||
success = True
|
||||
os.chdir(rtt_root)
|
||||
if os.path.exists(f"{rtt_root}/bsp/{bsp}/Kconfig"):
|
||||
os.chdir(rtt_root)
|
||||
run_cmd(f'scons -C bsp/{bsp} --pyconfig-silent', output_info=False)
|
||||
|
||||
os.chdir(f'{rtt_root}/bsp/{bsp}')
|
||||
run_cmd('pkgs --update', output_info=False)
|
||||
run_cmd('pkgs --list')
|
||||
|
||||
nproc = multiprocessing.cpu_count()
|
||||
os.chdir(rtt_root)
|
||||
cmd = f'scons -C bsp/{bsp} -j{nproc} {scons_args}'
|
||||
__, res = run_cmd(cmd, output_info=False)
|
||||
|
||||
if res != 0:
|
||||
success = False
|
||||
|
||||
os.chdir(f'{rtt_root}/bsp/{bsp}')
|
||||
run_cmd('scons -c', output_info=False)
|
||||
|
||||
pkg_dir = os.path.join(rtt_root, 'bsp', bsp, 'packages')
|
||||
shutil.rmtree(pkg_dir, ignore_errors=True)
|
||||
|
||||
return success
|
||||
|
||||
|
||||
def append_file(source_file, destination_file):
|
||||
"""
|
||||
append file to another file.
|
||||
"""
|
||||
with open(source_file, 'r') as source:
|
||||
with open(destination_file, 'a') as destination:
|
||||
for line in source:
|
||||
destination.write(line)
|
||||
|
||||
|
||||
def check_scons_args(file_path):
|
||||
args = []
|
||||
with open(file_path, 'r') as file:
|
||||
for line in file:
|
||||
match = re.search(r'#\s*scons:\s*(.*)', line)
|
||||
if match:
|
||||
args.append(match.group(1).strip())
|
||||
return ' '.join(args)
|
||||
|
||||
|
||||
def build_bsp_attachconfig(bsp, attach_file):
|
||||
"""
|
||||
build bsp with attach config.
|
||||
|
||||
cp bsp/{bsp}/.config bsp/{bsp}/.config.origin
|
||||
cat .ci/attachconfig/{attach_file} >> bsp/{bsp}/.config
|
||||
|
||||
build_bsp()
|
||||
|
||||
cp bsp/{bsp}/.config.origin bsp/{bsp}/.config
|
||||
rm bsp/{bsp}/.config.origin
|
||||
|
||||
"""
|
||||
config_file = os.path.join(rtt_root, 'bsp', bsp, '.config')
|
||||
config_bacakup = config_file+'.origin'
|
||||
shutil.copyfile(config_file, config_bacakup)
|
||||
|
||||
attachconfig_dir = os.path.join(rtt_root, 'bsp', bsp, '.ci/attachconfig')
|
||||
attach_path = os.path.join(attachconfig_dir, attach_file)
|
||||
|
||||
append_file(attach_path, config_file)
|
||||
|
||||
scons_args = check_scons_args(attach_path)
|
||||
|
||||
res = build_bsp(bsp, scons_args)
|
||||
|
||||
shutil.copyfile(config_bacakup, config_file)
|
||||
os.remove(config_bacakup)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
build all bsp and attach config.
|
||||
|
||||
1. build all bsp.
|
||||
2. build all bsp with attach config.
|
||||
|
||||
"""
|
||||
failed = 0
|
||||
count = 0
|
||||
|
||||
rtt_root = os.getcwd()
|
||||
srtt_bsp = os.getenv('SRTT_BSP').split(',')
|
||||
|
||||
for bsp in srtt_bsp:
|
||||
count += 1
|
||||
print(f"::group::Compiling BSP: =={count}=== {bsp} ====")
|
||||
res = build_bsp(bsp)
|
||||
if not res:
|
||||
print(f"::error::build {bsp} failed")
|
||||
add_summary(f"- ❌ build {bsp} failed.")
|
||||
failed += 1
|
||||
else:
|
||||
add_summary(f'- ✅ build {bsp} success.')
|
||||
print("::endgroup::")
|
||||
|
||||
attach_dir = os.path.join(rtt_root, 'bsp', bsp, '.ci/attachconfig')
|
||||
attach_list = []
|
||||
for root, dirs, files in os.walk(attach_dir):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
relative_path = os.path.relpath(file_path, attach_dir)
|
||||
attach_list.append(relative_path)
|
||||
|
||||
for attach_file in attach_list:
|
||||
count += 1
|
||||
print(f"::group::\tCompiling BSP: =={count}=== {bsp} {attach_file}===")
|
||||
res = build_bsp_attachconfig(bsp, attach_file)
|
||||
if not res:
|
||||
print(f"::error::build {bsp} {attach_file} failed.")
|
||||
add_summary(f'\t- ❌ build {attach_file} failed.')
|
||||
failed += 1
|
||||
else:
|
||||
add_summary(f'\t- ✅ build {attach_file} success.')
|
||||
print("::endgroup::")
|
||||
|
||||
exit(failed)
|
98
rt-thread/tools/ci/compile_bsp_with_drivers.py
Normal file
98
rt-thread/tools/ci/compile_bsp_with_drivers.py
Normal file
@@ -0,0 +1,98 @@
|
||||
#
|
||||
# Copyright (c) 2006-2023, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2023-06-27 dejavudwh the first version
|
||||
#
|
||||
|
||||
import subprocess
|
||||
import logging
|
||||
import os
|
||||
|
||||
CONFIG_BSP_USING_X = ["CONFIG_BSP_USING_UART", "CONFIG_BSP_USING_I2C", "CONFIG_BSP_USING_SPI", "CONFIG_BSP_USING_ADC", "CONFIG_BSP_USING_DAC"]
|
||||
|
||||
def init_logger():
|
||||
log_format = "[%(filename)s %(lineno)d %(levelname)s] %(message)s "
|
||||
date_format = '%Y-%m-%d %H:%M:%S %a '
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format=log_format,
|
||||
datefmt=date_format,
|
||||
)
|
||||
|
||||
def diff():
|
||||
result = subprocess.run(['git', 'diff', '--name-only', 'HEAD', 'origin/master', '--diff-filter=ACMR', '--no-renames', '--full-index'], stdout = subprocess.PIPE)
|
||||
file_list = result.stdout.decode().strip().split('\n')
|
||||
logging.info(file_list)
|
||||
bsp_paths = set()
|
||||
for file in file_list:
|
||||
if "bsp/" in file:
|
||||
logging.info("Modifed file: {}".format(file))
|
||||
bsp_paths.add(file)
|
||||
|
||||
dirs = set()
|
||||
for dir in bsp_paths:
|
||||
dir = os.path.dirname(dir)
|
||||
while "bsp/" in dir:
|
||||
files = os.listdir(dir)
|
||||
if ".config" in files and "rt-thread.elf" not in files and not dir.endswith("bsp"):
|
||||
logging.info("Found bsp path: {}".format(dir))
|
||||
dirs.add(dir)
|
||||
break
|
||||
new_dir = os.path.dirname(dir)
|
||||
dir = new_dir
|
||||
|
||||
return dirs
|
||||
|
||||
def check_config_in_line(line):
|
||||
for config in CONFIG_BSP_USING_X:
|
||||
if config in line and '#' in line:
|
||||
logging.info("Found in {}".format(line))
|
||||
return config
|
||||
|
||||
return ""
|
||||
|
||||
def check_config_in_file(file_path):
|
||||
configs = set()
|
||||
found = False
|
||||
try:
|
||||
with open(file_path, 'r') as file:
|
||||
for line in file:
|
||||
line.strip()
|
||||
if found:
|
||||
res = check_config_in_line(line)
|
||||
if res:
|
||||
configs.add(res)
|
||||
elif "On-chip Peripheral Drivers" in line:
|
||||
logging.info("Found On-chip Peripheral Drivers")
|
||||
found = True
|
||||
except FileNotFoundError:
|
||||
logging.error("The .config file does not exist for this BSP, please recheck the file directory!")
|
||||
|
||||
return configs
|
||||
|
||||
def modify_config(file_path, configs):
|
||||
with open(file_path + "/rtconfig.h", 'a') as file:
|
||||
for item in configs:
|
||||
define1 = item.replace("CONFIG_BSP", "BSP")
|
||||
define2 = item.replace("CONFIG_BSP", "RT")
|
||||
file.write("#define " + define1 + "\n")
|
||||
file.write("#define " + define2 + "\n")
|
||||
|
||||
def recompile_bsp(dir):
|
||||
logging.info("recomplie bsp: {}".format(dir))
|
||||
os.system("scons -C " + dir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
init_logger()
|
||||
recompile_bsp_dirs = diff()
|
||||
for dir in recompile_bsp_dirs:
|
||||
dot_config_path = dir + "/" + ".config"
|
||||
configs = check_config_in_file(dot_config_path)
|
||||
logging.info("add config:")
|
||||
logging.info(configs)
|
||||
logging.info("Add configurations and recompile!")
|
||||
modify_config(dir, configs)
|
||||
recompile_bsp(dir)
|
81
rt-thread/tools/ci/cpp_check.py
Normal file
81
rt-thread/tools/ci/cpp_check.py
Normal file
@@ -0,0 +1,81 @@
|
||||
#
|
||||
# Copyright (c) 2006-2023, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2023-05-16 dejavudwh the first version
|
||||
#
|
||||
|
||||
import click
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
import format_ignore
|
||||
|
||||
class CPPCheck:
|
||||
def __init__(self, file_list):
|
||||
self.file_list = file_list
|
||||
|
||||
def check(self):
|
||||
file_list_filtered = [file for file in self.file_list if file.endswith(('.c', '.cpp', '.cc', '.cxx'))]
|
||||
logging.info("Start to static code analysis.")
|
||||
check_result = True
|
||||
for file in file_list_filtered:
|
||||
result = subprocess.run(
|
||||
[
|
||||
'cppcheck',
|
||||
'-DRT_ASSERT(x)=',
|
||||
'-DRTM_EXPORT(x)=',
|
||||
'-Drt_list_for_each_entry(a,b,c)=a=(void*)b;',
|
||||
'-I include',
|
||||
'-I thread/components/finsh',
|
||||
# it's okay because CI will do the real compilation to check this
|
||||
'--suppress=syntaxError',
|
||||
'--enable=warning',
|
||||
'performance',
|
||||
'portability',
|
||||
'--inline-suppr',
|
||||
'--error-exitcode=1',
|
||||
'--force',
|
||||
file
|
||||
],
|
||||
stdout = subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
logging.info(result.stdout.decode())
|
||||
logging.info(result.stderr.decode())
|
||||
if result.stderr:
|
||||
check_result = False
|
||||
return check_result
|
||||
|
||||
@click.group()
|
||||
@click.pass_context
|
||||
def cli(ctx):
|
||||
pass
|
||||
|
||||
@cli.command()
|
||||
def check():
|
||||
"""
|
||||
static code analysis(cppcheck).
|
||||
"""
|
||||
format_ignore.init_logger()
|
||||
# get modified files list
|
||||
checkout = format_ignore.CheckOut()
|
||||
file_list = checkout.get_new_file()
|
||||
if file_list is None:
|
||||
logging.error("checkout files fail")
|
||||
sys.exit(1)
|
||||
|
||||
# use cppcheck
|
||||
cpp_check = CPPCheck(file_list)
|
||||
cpp_check_result = cpp_check.check()
|
||||
|
||||
if not cpp_check_result:
|
||||
logging.error("static code analysis(cppcheck) fail.")
|
||||
sys.exit(1)
|
||||
logging.info("check success.")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli()
|
292
rt-thread/tools/ci/file_check.py
Normal file
292
rt-thread/tools/ci/file_check.py
Normal file
@@ -0,0 +1,292 @@
|
||||
#
|
||||
# Copyright (c) 2006-2022, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2021-04-01 LiuKang the first version
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import click
|
||||
import yaml
|
||||
import chardet
|
||||
import logging
|
||||
import datetime
|
||||
|
||||
|
||||
def init_logger():
|
||||
log_format = "[%(filename)s %(lineno)d %(levelname)s] %(message)s "
|
||||
date_format = '%Y-%m-%d %H:%M:%S %a '
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format=log_format,
|
||||
datefmt=date_format,
|
||||
)
|
||||
|
||||
|
||||
class CheckOut:
|
||||
def __init__(self, rtt_repo, rtt_branch):
|
||||
self.root = os.getcwd()
|
||||
self.rtt_repo = rtt_repo
|
||||
self.rtt_branch = rtt_branch
|
||||
|
||||
def __exclude_file(self, file_path):
|
||||
dir_number = file_path.split('/')
|
||||
ignore_path = file_path
|
||||
|
||||
# gets the file path depth.
|
||||
for i in dir_number:
|
||||
# current directory.
|
||||
dir_name = os.path.dirname(ignore_path)
|
||||
ignore_path = dir_name
|
||||
# judge the ignore file exists in the current directory.
|
||||
ignore_file_path = os.path.join(dir_name, ".ignore_format.yml")
|
||||
if not os.path.exists(ignore_file_path):
|
||||
continue
|
||||
try:
|
||||
with open(ignore_file_path) as f:
|
||||
ignore_config = yaml.safe_load(f.read())
|
||||
file_ignore = ignore_config.get("file_path", [])
|
||||
dir_ignore = ignore_config.get("dir_path", [])
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
continue
|
||||
logging.debug("ignore file path: {}".format(ignore_file_path))
|
||||
logging.debug("file_ignore: {}".format(file_ignore))
|
||||
logging.debug("dir_ignore: {}".format(dir_ignore))
|
||||
try:
|
||||
# judge file_path in the ignore file.
|
||||
for file in file_ignore:
|
||||
if file is not None:
|
||||
file_real_path = os.path.join(dir_name, file)
|
||||
if file_real_path == file_path:
|
||||
logging.info("ignore file path: {}".format(file_real_path))
|
||||
return 0
|
||||
|
||||
file_dir_path = os.path.dirname(file_path)
|
||||
for _dir in dir_ignore:
|
||||
if _dir is not None:
|
||||
dir_real_path = os.path.join(dir_name, _dir)
|
||||
if file_dir_path.startswith(dir_real_path):
|
||||
logging.info("ignore dir path: {}".format(dir_real_path))
|
||||
return 0
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
continue
|
||||
|
||||
return 1
|
||||
|
||||
def get_new_file(self):
|
||||
file_list = list()
|
||||
try:
|
||||
os.system('git remote add rtt_repo {}'.format(self.rtt_repo))
|
||||
os.system('git fetch rtt_repo')
|
||||
os.system('git merge rtt_repo/{}'.format(self.rtt_branch))
|
||||
os.system('git reset rtt_repo/{} --soft'.format(self.rtt_branch))
|
||||
os.system('git status > git.txt')
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
return None
|
||||
try:
|
||||
with open('git.txt', 'r') as f:
|
||||
file_lines = f.readlines()
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
return None
|
||||
file_path = ''
|
||||
for line in file_lines:
|
||||
if 'new file' in line:
|
||||
file_path = line.split('new file:')[1].strip()
|
||||
logging.info('new file -> {}'.format(file_path))
|
||||
elif 'deleted' in line:
|
||||
logging.info('deleted file -> {}'.format(line.split('deleted:')[1].strip()))
|
||||
elif 'modified' in line:
|
||||
file_path = line.split('modified:')[1].strip()
|
||||
logging.info('modified file -> {}'.format(file_path))
|
||||
else:
|
||||
continue
|
||||
|
||||
result = self.__exclude_file(file_path)
|
||||
if result != 0:
|
||||
file_list.append(file_path)
|
||||
|
||||
return file_list
|
||||
|
||||
|
||||
class FormatCheck:
|
||||
def __init__(self, file_list):
|
||||
self.file_list = file_list
|
||||
|
||||
def __check_rt_errorcode(self, line):
|
||||
pattern = re.compile(r'return\s+(RT_ERROR|RT_ETIMEOUT|RT_EFULL|RT_EEMPTY|RT_ENOMEM|RT_ENOSYS|RT_EBUSY|RT_EIO|RT_EINTR|RT_EINVAL|RT_ENOENT|RT_ENOSPC|RT_EPERM|RT_ETRAP|RT_EFAULT)')
|
||||
match = pattern.search(line)
|
||||
if match:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def __check_file(self, file_lines, file_path):
|
||||
line_num = 0
|
||||
check_result = True
|
||||
for line in file_lines:
|
||||
line_num += 1
|
||||
# check line start
|
||||
line_start = line.replace(' ', '')
|
||||
# find tab
|
||||
if line_start.startswith('\t'):
|
||||
logging.error("{} line[{}]: please use space replace tab at the start of this line.".format(file_path, line_num))
|
||||
check_result = False
|
||||
# check line end
|
||||
line_end = line.split('\n')[0]
|
||||
if line_end.endswith(' ') or line_end.endswith('\t'):
|
||||
logging.error("{} line[{}]: please delete extra space at the end of this line.".format(file_path, line_num))
|
||||
check_result = False
|
||||
if self.__check_rt_errorcode(line) == False:
|
||||
logging.error("{} line[{}]: the RT-Thread error code should return negative value. e.g. return -RT_ERROR".format(file_path, line_num))
|
||||
check_result = False
|
||||
return check_result
|
||||
|
||||
def check(self):
|
||||
logging.info("Start to check files format.")
|
||||
if len(self.file_list) == 0:
|
||||
logging.warning("There are no files to check format.")
|
||||
return True
|
||||
encoding_check_result = True
|
||||
format_check_fail_files = 0
|
||||
for file_path in self.file_list:
|
||||
code = ''
|
||||
if file_path.endswith(".c") or file_path.endswith(".h"):
|
||||
try:
|
||||
with open(file_path, 'rb') as f:
|
||||
file = f.read()
|
||||
# get file encoding
|
||||
chardet_report = chardet.detect(file)
|
||||
code = chardet_report['encoding']
|
||||
confidence = chardet_report['confidence']
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
else:
|
||||
continue
|
||||
|
||||
if code != 'utf-8' and code != 'ascii' and confidence > 0.8:
|
||||
logging.error("[{0}]: encoding {1} not utf-8, please format it.".format(file_path, code))
|
||||
encoding_check_result = False
|
||||
else:
|
||||
logging.info('[{0}]: encoding check success.'.format(file_path))
|
||||
|
||||
with open(file_path, 'r', encoding = "utf-8") as f:
|
||||
file_lines = f.readlines()
|
||||
if not self.__check_file(file_lines, file_path):
|
||||
format_check_fail_files += 1
|
||||
|
||||
if (not encoding_check_result) or (format_check_fail_files != 0):
|
||||
logging.error("files format check fail.")
|
||||
return False
|
||||
|
||||
logging.info("files format check success.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class LicenseCheck:
|
||||
def __init__(self, file_list):
|
||||
self.file_list = file_list
|
||||
|
||||
def check(self):
|
||||
current_year = datetime.date.today().year
|
||||
logging.info("current year: {}".format(current_year))
|
||||
if len(self.file_list) == 0:
|
||||
logging.warning("There are no files to check license.")
|
||||
return 0
|
||||
logging.info("Start to check files license.")
|
||||
check_result = True
|
||||
for file_path in self.file_list:
|
||||
if file_path.endswith(".c") or file_path.endswith(".h"):
|
||||
try:
|
||||
with open(file_path, 'r') as f:
|
||||
file = f.readlines()
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
else:
|
||||
continue
|
||||
|
||||
if 'Copyright' in file[1] and 'SPDX-License-Identifier: Apache-2.0' in file[3]:
|
||||
try:
|
||||
license_year = re.search(r'2006-\d{4}', file[1]).group()
|
||||
true_year = '2006-{}'.format(current_year)
|
||||
if license_year != true_year:
|
||||
logging.warning("[{0}]: license year: {} is not true: {}, please update.".format(file_path,
|
||||
license_year,
|
||||
true_year))
|
||||
|
||||
else:
|
||||
logging.info("[{0}]: license check success.".format(file_path))
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
|
||||
else:
|
||||
logging.error("[{0}]: license check fail.".format(file_path))
|
||||
check_result = False
|
||||
|
||||
return check_result
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.pass_context
|
||||
def cli(ctx):
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--license',
|
||||
"check_license",
|
||||
required=False,
|
||||
type=click.BOOL,
|
||||
flag_value=True,
|
||||
help="Enable File license check.",
|
||||
)
|
||||
@click.argument(
|
||||
'repo',
|
||||
nargs=1,
|
||||
type=click.STRING,
|
||||
default='https://github.com/RT-Thread/rt-thread',
|
||||
)
|
||||
@click.argument(
|
||||
'branch',
|
||||
nargs=1,
|
||||
type=click.STRING,
|
||||
default='master',
|
||||
)
|
||||
def check(check_license, repo, branch):
|
||||
"""
|
||||
check files license and format.
|
||||
"""
|
||||
init_logger()
|
||||
# get modified files list
|
||||
checkout = CheckOut(repo, branch)
|
||||
file_list = checkout.get_new_file()
|
||||
if file_list is None:
|
||||
logging.error("checkout files fail")
|
||||
sys.exit(1)
|
||||
|
||||
# check modified files format
|
||||
format_check = FormatCheck(file_list)
|
||||
format_check_result = format_check.check()
|
||||
license_check_result = True
|
||||
if check_license:
|
||||
license_check = LicenseCheck(file_list)
|
||||
license_check_result = license_check.check()
|
||||
|
||||
if not format_check_result or not license_check_result:
|
||||
logging.error("file format check or license check fail.")
|
||||
sys.exit(1)
|
||||
logging.info("check success.")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli()
|
84
rt-thread/tools/ci/format_ignore.py
Normal file
84
rt-thread/tools/ci/format_ignore.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#
|
||||
# Copyright (c) 2006-2023, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2023-05-16 dejavudwh the first version
|
||||
#
|
||||
|
||||
import yaml
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def init_logger():
|
||||
log_format = "[%(filename)s %(lineno)d %(levelname)s] %(message)s "
|
||||
date_format = '%Y-%m-%d %H:%M:%S %a '
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format=log_format,
|
||||
datefmt=date_format,
|
||||
)
|
||||
|
||||
class CheckOut:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __exclude_file(self, file_path):
|
||||
dir_number = file_path.split('/')
|
||||
ignore_path = file_path
|
||||
|
||||
# gets the file path depth.
|
||||
for i in dir_number:
|
||||
# current directory.
|
||||
dir_name = os.path.dirname(ignore_path)
|
||||
ignore_path = dir_name
|
||||
# judge the ignore file exists in the current directory.
|
||||
ignore_file_path = os.path.join(dir_name, ".ignore_format.yml")
|
||||
if not os.path.exists(ignore_file_path):
|
||||
continue
|
||||
try:
|
||||
with open(ignore_file_path) as f:
|
||||
ignore_config = yaml.safe_load(f.read())
|
||||
file_ignore = ignore_config.get("file_path", [])
|
||||
dir_ignore = ignore_config.get("dir_path", [])
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
continue
|
||||
logging.debug("ignore file path: {}".format(ignore_file_path))
|
||||
logging.debug("file_ignore: {}".format(file_ignore))
|
||||
logging.debug("dir_ignore: {}".format(dir_ignore))
|
||||
try:
|
||||
# judge file_path in the ignore file.
|
||||
for file in file_ignore:
|
||||
if file is not None:
|
||||
file_real_path = os.path.join(dir_name, file)
|
||||
if file_real_path == file_path:
|
||||
logging.info("ignore file path: {}".format(file_real_path))
|
||||
return 0
|
||||
|
||||
file_dir_path = os.path.dirname(file_path)
|
||||
for _dir in dir_ignore:
|
||||
if _dir is not None:
|
||||
dir_real_path = os.path.join(dir_name, _dir)
|
||||
if file_dir_path.startswith(dir_real_path):
|
||||
logging.info("ignore dir path: {}".format(dir_real_path))
|
||||
return 0
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
continue
|
||||
|
||||
return 1
|
||||
|
||||
def get_new_file(self):
|
||||
result = subprocess.run(['git', 'diff', '--name-only', 'HEAD', 'origin/master', '--diff-filter=ACMR', '--no-renames', '--full-index'], stdout = subprocess.PIPE)
|
||||
file_list = result.stdout.decode().strip().split('\n')
|
||||
new_files = []
|
||||
for line in file_list:
|
||||
logging.info("modified file -> {}".format(line))
|
||||
result = self.__exclude_file(line)
|
||||
if result != 0:
|
||||
new_files.append(line)
|
||||
|
||||
return new_files
|
69
rt-thread/tools/clang-analyze.py
Normal file
69
rt-thread/tools/clang-analyze.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
Tool-specific initialization for Clang static analyzer
|
||||
|
||||
There normally shouldn't be any need to import this module directly.
|
||||
It will usually be imported through the generic SCons.Tool.Tool()
|
||||
selection method.
|
||||
"""
|
||||
|
||||
__revision__ = "tools/clang-analyze.py 2013-09-06 grissiom"
|
||||
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import SCons.Action
|
||||
import SCons.Builder
|
||||
import SCons.Defaults
|
||||
import SCons.Tool
|
||||
import SCons.Util
|
||||
|
||||
import rtconfig
|
||||
|
||||
def generate(env):
|
||||
assert(rtconfig.CROSS_TOOL == 'clang-analyze')
|
||||
# let gnu_tools setup a basic env(learnt from SCons/Tools/mingw.py)
|
||||
gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas', 'm4']
|
||||
for tool in gnu_tools:
|
||||
SCons.Tool.Tool(tool)(env)
|
||||
|
||||
# then we could stand on the shoulders of gaints
|
||||
env['CC'] = 'ccc-analyzer'
|
||||
env['CXX'] = 'c++-analyzer'
|
||||
env['AS'] = 'true'
|
||||
env['AR'] = 'true'
|
||||
env['LINK'] = 'true'
|
||||
|
||||
env['CFLAGS'] = ['-fsyntax-only', '-Wall', '-Wno-invalid-source-encoding', '-m32']
|
||||
env['LINKFLAGS'] = '-Wl,--gc-sections'
|
||||
env['ARFLAGS'] = '-rc'
|
||||
|
||||
# only check, don't compile. ccc-analyzer use CCC_CC as the CC.
|
||||
# fsyntax-only will give us some additional warning messages
|
||||
env['ENV']['CCC_CC'] = 'clang'
|
||||
env['ENV']['CCC_CXX'] = 'clang++'
|
||||
|
||||
# setup the output dir and format
|
||||
env['ENV']['CCC_ANALYZER_HTML'] = './build/'
|
||||
env['ENV']['CCC_ANALYZER_OUTPUT_FORMAT'] = 'html'
|
||||
|
||||
# Some setting from the platform also have to be overridden:
|
||||
env['OBJSUFFIX'] = '.o'
|
||||
env['LIBPREFIX'] = 'lib'
|
||||
env['LIBSUFFIX'] = '.a'
|
||||
|
||||
if rtconfig.EXEC_PATH:
|
||||
if not os.path.exists(rtconfig.EXEC_PATH):
|
||||
print()
|
||||
print('warning: rtconfig.EXEC_PATH(%s) does not exists.' % rtconfig.EXEC_PATH)
|
||||
print()
|
||||
return
|
||||
env.AppendENVPath('PATH', rtconfig.EXEC_PATH)
|
||||
|
||||
def exists(env):
|
||||
return env.Detect(['ccc-analyzer', 'c++-analyzer'])
|
||||
|
||||
# Local Variables:
|
||||
# tab-width:4
|
||||
# indent-tabs-mode:nil
|
||||
# End:
|
||||
# vim: set expandtab tabstop=4 shiftwidth=4:
|
221
rt-thread/tools/cmake.py
Normal file
221
rt-thread/tools/cmake.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""
|
||||
Utils for CMake
|
||||
Author: https://github.com/klivelinux
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import utils
|
||||
import rtconfig
|
||||
from utils import _make_path_relative
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def GenerateCFiles(env, project, project_name):
|
||||
"""
|
||||
Generate CMakeLists.txt files
|
||||
"""
|
||||
info = utils.ProjectInfo(env)
|
||||
|
||||
PROJECT_NAME = project_name if project_name != "project" else "rtthread"
|
||||
|
||||
tool_path_conv = defaultdict(lambda : {"name":"", "path": ""})
|
||||
tool_path_conv_helper = lambda tool: {"name": tool, "path": os.path.join(rtconfig.EXEC_PATH, tool).replace('\\', "/")}
|
||||
|
||||
tool_path_conv["CMAKE_C_COMPILER"] = tool_path_conv_helper(rtconfig.CC)
|
||||
if 'CXX' in dir(rtconfig):
|
||||
tool_path_conv["CMAKE_CXX_COMPILER"] = tool_path_conv_helper(rtconfig.CXX)
|
||||
tool_path_conv["CMAKE_ASM_COMPILER"] = tool_path_conv_helper(rtconfig.AS)
|
||||
tool_path_conv["CMAKE_AR"] = tool_path_conv_helper(rtconfig.AR)
|
||||
tool_path_conv["CMAKE_LINKER"] = tool_path_conv_helper(rtconfig.LINK)
|
||||
if rtconfig.PLATFORM in ['gcc']:
|
||||
tool_path_conv["CMAKE_SIZE"] = tool_path_conv_helper(rtconfig.SIZE)
|
||||
tool_path_conv["CMAKE_OBJDUMP"] = tool_path_conv_helper(rtconfig.OBJDUMP)
|
||||
tool_path_conv["CMAKE_OBJCOPY"] = tool_path_conv_helper(rtconfig.OBJCPY)
|
||||
elif rtconfig.PLATFORM in ['armcc', 'armclang']:
|
||||
tool_path_conv["CMAKE_FROMELF"] = tool_path_conv_helper(rtconfig.FROMELF)
|
||||
|
||||
CC = tool_path_conv["CMAKE_C_COMPILER"]["path"]
|
||||
CXX = tool_path_conv["CMAKE_CXX_COMPILER"]["path"]
|
||||
AS = tool_path_conv["CMAKE_ASM_COMPILER"]["path"]
|
||||
AR = tool_path_conv["CMAKE_AR"]["path"]
|
||||
LINK = tool_path_conv["CMAKE_LINKER"]["path"]
|
||||
SIZE = tool_path_conv["CMAKE_SIZE"]["path"]
|
||||
OBJDUMP = tool_path_conv["CMAKE_OBJDUMP"]["path"]
|
||||
OBJCOPY = tool_path_conv["CMAKE_OBJCOPY"]["path"]
|
||||
FROMELF = tool_path_conv["CMAKE_FROMELF"]["path"]
|
||||
|
||||
CFLAGS = rtconfig.CFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
if 'CXXFLAGS' in dir(rtconfig):
|
||||
CXXFLAGS = rtconfig.CXXFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
else:
|
||||
CXXFLAGS = CFLAGS
|
||||
AFLAGS = rtconfig.AFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
LFLAGS = env['LINKFLAGS'].replace('\\', "/").replace('\"', "\\\"")
|
||||
|
||||
POST_ACTION = rtconfig.POST_ACTION
|
||||
# replace the tool name with the cmake variable
|
||||
for cmake_var, each_tool in tool_path_conv.items():
|
||||
tool_name = each_tool['name']
|
||||
if tool_name == "": continue
|
||||
if "win32" in sys.platform:
|
||||
while f"{tool_name}.exe" in POST_ACTION: # find the tool with `.exe` suffix first
|
||||
POST_ACTION = POST_ACTION.replace(tool_name, "string_to_replace")
|
||||
while tool_name in POST_ACTION:
|
||||
POST_ACTION = POST_ACTION.replace(tool_name, "string_to_replace")
|
||||
while "string_to_replace" in POST_ACTION:
|
||||
POST_ACTION = POST_ACTION.replace("string_to_replace", f"${{{cmake_var}}}")
|
||||
# replace the `$TARGET` with `${CMAKE_PROJECT_NAME}.elf`
|
||||
while "$TARGET" in POST_ACTION:
|
||||
POST_ACTION = POST_ACTION.replace("$TARGET", "${CMAKE_PROJECT_NAME}.elf")
|
||||
# add COMMAAND before each command
|
||||
POST_ACTION = POST_ACTION.split('\n')
|
||||
POST_ACTION = [each_line.strip() for each_line in POST_ACTION]
|
||||
POST_ACTION = [f"\tCOMMAND {each_line}" for each_line in POST_ACTION if each_line != '']
|
||||
POST_ACTION = "\n".join(POST_ACTION)
|
||||
|
||||
if "win32" in sys.platform:
|
||||
CC += ".exe"
|
||||
if CXX != '':
|
||||
CXX += ".exe"
|
||||
AS += ".exe"
|
||||
AR += ".exe"
|
||||
LINK += ".exe"
|
||||
if rtconfig.PLATFORM in ['gcc']:
|
||||
SIZE += ".exe"
|
||||
OBJDUMP += ".exe"
|
||||
OBJCOPY += ".exe"
|
||||
elif rtconfig.PLATFORM in ['armcc', 'armclang']:
|
||||
FROMELF += ".exe"
|
||||
|
||||
if not os.path.exists(CC) or not os.path.exists(AS) or not os.path.exists(AR) or not os.path.exists(LINK):
|
||||
print("'Cannot found toolchain directory, please check RTT_CC and RTT_EXEC_PATH'")
|
||||
sys.exit(-1)
|
||||
|
||||
with open("CMakeLists.txt", "w") as cm_file:
|
||||
cm_file.write("CMAKE_MINIMUM_REQUIRED(VERSION 3.10)\n\n")
|
||||
|
||||
cm_file.write("SET(CMAKE_SYSTEM_NAME Generic)\n")
|
||||
cm_file.write("SET(CMAKE_SYSTEM_PROCESSOR " + rtconfig.CPU +")\n")
|
||||
cm_file.write("#SET(CMAKE_VERBOSE_MAKEFILE ON)\n\n")
|
||||
cm_file.write("SET(CMAKE_EXPORT_COMPILE_COMMANDS ON)\n\n")
|
||||
|
||||
cm_file.write("SET(CMAKE_C_COMPILER \""+ CC + "\")\n")
|
||||
cm_file.write("SET(CMAKE_ASM_COMPILER \""+ AS + "\")\n")
|
||||
cm_file.write("SET(CMAKE_C_FLAGS \""+ CFLAGS + "\")\n")
|
||||
cm_file.write("SET(CMAKE_ASM_FLAGS \""+ AFLAGS + "\")\n")
|
||||
cm_file.write("SET(CMAKE_C_COMPILER_WORKS TRUE)\n\n")
|
||||
|
||||
if CXX != '':
|
||||
cm_file.write("SET(CMAKE_CXX_COMPILER \""+ CXX + "\")\n")
|
||||
cm_file.write("SET(CMAKE_CXX_FLAGS \""+ CXXFLAGS + "\")\n")
|
||||
cm_file.write("SET(CMAKE_CXX_COMPILER_WORKS TRUE)\n\n")
|
||||
|
||||
if rtconfig.PLATFORM in ['gcc']:
|
||||
cm_file.write("SET(CMAKE_OBJCOPY \""+ OBJCOPY + "\")\n")
|
||||
cm_file.write("SET(CMAKE_SIZE \""+ SIZE + "\")\n\n")
|
||||
elif rtconfig.PLATFORM in ['armcc', 'armclang']:
|
||||
cm_file.write("SET(CMAKE_FROMELF \""+ FROMELF + "\")\n\n")
|
||||
|
||||
LINKER_FLAGS = ''
|
||||
LINKER_LIBS = ''
|
||||
if rtconfig.PLATFORM in ['gcc']:
|
||||
LINKER_FLAGS += '-T'
|
||||
elif rtconfig.PLATFORM in ['armcc', 'armclang']:
|
||||
LINKER_FLAGS += '--scatter'
|
||||
for group in project:
|
||||
if 'LIBPATH' in group.keys():
|
||||
for f in group['LIBPATH']:
|
||||
LINKER_LIBS += ' --userlibpath ' + f.replace("\\", "/")
|
||||
for group in project:
|
||||
if 'LIBS' in group.keys():
|
||||
for f in group['LIBS']:
|
||||
LINKER_LIBS += ' ' + f.replace("\\", "/") + '.lib'
|
||||
cm_file.write("SET(CMAKE_EXE_LINKER_FLAGS \""+ re.sub(LINKER_FLAGS + '(\s*)', LINKER_FLAGS + ' ${CMAKE_SOURCE_DIR}/', LFLAGS) + LINKER_LIBS + "\")\n\n")
|
||||
|
||||
# get the c/cpp standard version from compilation flags
|
||||
# not support the version with alphabet in `-std` param yet
|
||||
pattern = re.compile('-std=[\w+]+')
|
||||
c_standard = 11
|
||||
if '-std=' in CFLAGS:
|
||||
c_standard = re.search(pattern, CFLAGS).group(0)
|
||||
c_standard = "".join([each for each in c_standard if each.isdigit()])
|
||||
else:
|
||||
print(f"Cannot find the param of the c standard in build flag, set to default {c_standard}")
|
||||
cm_file.write(f"SET(CMAKE_C_STANDARD {c_standard})\n")
|
||||
|
||||
if CXX != '':
|
||||
cpp_standard = 17
|
||||
if '-std=' in CXXFLAGS:
|
||||
cpp_standard = re.search(pattern, CXXFLAGS).group(0)
|
||||
cpp_standard = "".join([each for each in cpp_standard if each.isdigit()])
|
||||
else:
|
||||
print(f"Cannot find the param of the cpp standard in build flag, set to default {cpp_standard}")
|
||||
cm_file.write(f"SET(CMAKE_CXX_STANDARD {cpp_standard})\n")
|
||||
|
||||
cm_file.write('\n')
|
||||
|
||||
cm_file.write(f"PROJECT({PROJECT_NAME} C {'CXX' if CXX != '' else ''} ASM)\n")
|
||||
|
||||
cm_file.write('\n')
|
||||
|
||||
cm_file.write("INCLUDE_DIRECTORIES(\n")
|
||||
for i in info['CPPPATH']:
|
||||
# use relative path
|
||||
path = _make_path_relative(os.getcwd(), i)
|
||||
cm_file.write( "\t" + path.replace("\\", "/") + "\n")
|
||||
cm_file.write(")\n\n")
|
||||
|
||||
cm_file.write("ADD_DEFINITIONS(\n")
|
||||
for i in info['CPPDEFINES']:
|
||||
cm_file.write("\t-D" + i + "\n")
|
||||
cm_file.write(")\n\n")
|
||||
|
||||
cm_file.write("SET(PROJECT_SOURCES\n")
|
||||
for group in project:
|
||||
for f in group['src']:
|
||||
# use relative path
|
||||
path = _make_path_relative(os.getcwd(), os.path.normpath(f.rfile().abspath))
|
||||
cm_file.write( "\t" + path.replace("\\", "/") + "\n" )
|
||||
cm_file.write(")\n\n")
|
||||
|
||||
if rtconfig.PLATFORM in ['gcc']:
|
||||
cm_file.write("LINK_DIRECTORIES(\n")
|
||||
for group in project:
|
||||
if 'LIBPATH' in group.keys():
|
||||
for f in group['LIBPATH']:
|
||||
cm_file.write( "\t"+ f.replace("\\", "/") + "\n" )
|
||||
cm_file.write(")\n\n")
|
||||
|
||||
cm_file.write("LINK_LIBRARIES(\n")
|
||||
for group in project:
|
||||
if 'LIBS' in group.keys():
|
||||
for f in group['LIBS']:
|
||||
cm_file.write( "\t"+ "{}\n".format(f.replace("\\", "/")))
|
||||
cm_file.write(")\n\n")
|
||||
|
||||
cm_file.write("ADD_EXECUTABLE(${CMAKE_PROJECT_NAME}.elf ${PROJECT_SOURCES})\n")
|
||||
cm_file.write("ADD_CUSTOM_COMMAND(TARGET ${CMAKE_PROJECT_NAME}.elf POST_BUILD \n" + POST_ACTION + '\n)\n')
|
||||
|
||||
# auto inclue `custom.cmake` for user custom settings
|
||||
custom_cmake = \
|
||||
'''
|
||||
# if custom.cmake is exist, add it
|
||||
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/custom.cmake)
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/custom.cmake)
|
||||
endif()
|
||||
'''
|
||||
custom_cmake = custom_cmake.split('\n')
|
||||
custom_cmake = [each.strip() for each in custom_cmake]
|
||||
custom_cmake = "\n".join(custom_cmake)
|
||||
cm_file.write(custom_cmake)
|
||||
|
||||
return
|
||||
|
||||
def CMakeProject(env, project, project_name):
|
||||
print('Update setting files for CMakeLists.txt...')
|
||||
GenerateCFiles(env, project, project_name)
|
||||
print('Done!')
|
||||
|
||||
return
|
140
rt-thread/tools/codeblocks.py
Normal file
140
rt-thread/tools/codeblocks.py
Normal file
@@ -0,0 +1,140 @@
|
||||
#
|
||||
# File : codeblocks.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import building
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
|
||||
import utils
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
def CB_AddHeadFiles(program, elem, project_path):
|
||||
utils.source_ext = []
|
||||
utils.source_ext = ["h"]
|
||||
for item in program:
|
||||
utils.walk_children(item)
|
||||
utils.source_list.sort()
|
||||
# print utils.source_list
|
||||
|
||||
for f in utils.source_list:
|
||||
path = _make_path_relative(project_path, f)
|
||||
Unit = SubElement(elem, 'Unit')
|
||||
Unit.set('filename', path.decode(fs_encoding))
|
||||
|
||||
def CB_AddCFiles(ProjectFiles, parent, gname, files, project_path):
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
|
||||
Unit = SubElement(parent, 'Unit')
|
||||
Unit.set('filename', path.decode(fs_encoding))
|
||||
Option = SubElement(Unit, 'Option')
|
||||
Option.set('compilerVar', "CC")
|
||||
|
||||
def CBProject(target, script, program):
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
if os.path.isfile('template.cbp'):
|
||||
tree = etree.parse('template.cbp')
|
||||
else:
|
||||
tree = etree.parse(os.path.join(os.path.dirname(__file__), 'template.cbp'))
|
||||
|
||||
root = tree.getroot()
|
||||
|
||||
out = open(target, 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>\n')
|
||||
|
||||
ProjectFiles = []
|
||||
|
||||
# SECTION 1. add "*.c|*.h" files group
|
||||
for elem in tree.iter(tag='Project'):
|
||||
# print elem.tag, elem.attrib
|
||||
break
|
||||
# add c files
|
||||
for group in script:
|
||||
group_xml = CB_AddCFiles(ProjectFiles, elem, group['name'], group['src'], project_path)
|
||||
# add h files
|
||||
CB_AddHeadFiles(program, elem, project_path)
|
||||
|
||||
# SECTION 2.
|
||||
# write head include path
|
||||
if 'CPPPATH' in building.Env:
|
||||
cpp_path = building.Env['CPPPATH']
|
||||
paths = set()
|
||||
for path in cpp_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
# write include path, definitions
|
||||
for elem in tree.iter(tag='Compiler'):
|
||||
break
|
||||
for path in paths:
|
||||
Add = SubElement(elem, 'Add')
|
||||
Add.set('directory', path)
|
||||
|
||||
for macro in building.Env.get('CPPDEFINES', []):
|
||||
Add = SubElement(elem, 'Add')
|
||||
for d in macro:
|
||||
Add.set('option', "-D"+d)
|
||||
|
||||
# write link flags
|
||||
'''
|
||||
# write lib dependence
|
||||
if 'LIBS' in building.Env:
|
||||
for elem in tree.iter(tag='Tool'):
|
||||
if elem.attrib['Name'] == 'VCLinkerTool':
|
||||
break
|
||||
libs_with_extention = [i+'.lib' for i in building.Env['LIBS']]
|
||||
libs = ' '.join(libs_with_extention)
|
||||
elem.set('AdditionalDependencies', libs)
|
||||
|
||||
# write lib include path
|
||||
if 'LIBPATH' in building.Env:
|
||||
lib_path = building.Env['LIBPATH']
|
||||
paths = set()
|
||||
for path in lib_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
lib_paths = ';'.join(paths)
|
||||
elem.set('AdditionalLibraryDirectories', lib_paths)
|
||||
'''
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8'))
|
||||
out.close()
|
215
rt-thread/tools/codelite.py
Normal file
215
rt-thread/tools/codelite.py
Normal file
@@ -0,0 +1,215 @@
|
||||
#
|
||||
# File : codelite.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2020, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2020-10-14 LiuMin Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import building
|
||||
import rtconfig
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
|
||||
import utils
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
def CLSetCFlags(root, flags):
|
||||
node = root.find('Settings').find('Configuration').find('Compiler')
|
||||
node.attrib['C_Options'] = flags
|
||||
|
||||
def CLSetCxxFlags(root, flags):
|
||||
node = root.find('Settings').find('Configuration').find('Compiler')
|
||||
node.attrib['Options'] = flags
|
||||
|
||||
def CLSetAsFlags(root, flags):
|
||||
node = root.find('Settings').find('Configuration').find('Compiler')
|
||||
node.attrib['Assembler'] = flags
|
||||
|
||||
def CLAddIncludePath(root, path):
|
||||
node = root.find('Settings').find('Configuration').find('Compiler')
|
||||
node = SubElement(node, 'IncludePath')
|
||||
node.attrib['Value'] = path
|
||||
|
||||
def CLAddPreprocessor(root, value):
|
||||
node = root.find('Settings').find('Configuration').find('Compiler')
|
||||
node = SubElement(node, 'Preprocessor')
|
||||
node.attrib['Value'] = value
|
||||
|
||||
|
||||
def CLSetLdFlags(root, flags):
|
||||
node = root.find('Settings').find('Configuration').find('Linker')
|
||||
node.attrib['Options'] = flags
|
||||
|
||||
def CLAddLibrary_path(root, path):
|
||||
node = root.find('Settings').find('Configuration').find('Linker')
|
||||
node = SubElement(node, 'LibraryPath')
|
||||
node.attrib['Value'] = path
|
||||
|
||||
def CLAddLibrary(root, lib):
|
||||
node = root.find('Settings').find('Configuration').find('Linker')
|
||||
node = SubElement(node, 'Library')
|
||||
node.attrib['Value'] = lib
|
||||
|
||||
def CLAddFile(root, file_path):
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
dir_list = file_path.split('/')
|
||||
dir_list.pop()
|
||||
if not len(dir_list):
|
||||
dir_list.append(os.path.abspath('.').replace('\\', '/').split('/')[-1])
|
||||
|
||||
parent = root
|
||||
for dir_name in dir_list:
|
||||
if dir_name == '..':
|
||||
continue
|
||||
|
||||
node = None
|
||||
nodes = parent.findall('VirtualDirectory')
|
||||
for iter in nodes:
|
||||
if iter.attrib['Name'] == dir_name:
|
||||
node = iter
|
||||
break
|
||||
if node is None:
|
||||
node = SubElement(parent, 'VirtualDirectory')
|
||||
node.attrib['Name'] = dir_name
|
||||
parent = node
|
||||
|
||||
if parent != root:
|
||||
node = SubElement(parent, 'File')
|
||||
node.attrib['Name'] = file_path
|
||||
|
||||
def CLAddHeaderFiles(parent, program, project_path):
|
||||
utils.source_ext = []
|
||||
utils.source_ext = ["h"]
|
||||
for item in program:
|
||||
utils.walk_children(item)
|
||||
utils.source_list.sort()
|
||||
|
||||
for f in utils.source_list:
|
||||
path = _make_path_relative(project_path, f)
|
||||
CLAddFile(parent, path)
|
||||
|
||||
def CLAddCFiles(parent, files, project_path):
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
CLAddFile(parent, path)
|
||||
|
||||
|
||||
|
||||
def CLGenWorkspace(project_name, project_path):
|
||||
if os.path.isfile('codelite_template.workspace'):
|
||||
tree = etree.parse('codelite_template.workspace')
|
||||
else:
|
||||
tree = etree.parse(os.path.join(os.path.dirname(__file__), 'codelite_template.workspace'))
|
||||
|
||||
root = tree.getroot()
|
||||
root.attrib['Name'] = project_name
|
||||
|
||||
node = root.find('Project')
|
||||
node.attrib['Name'] = project_name
|
||||
node.attrib['Path'] = project_name + '.project'
|
||||
|
||||
node = root.find('BuildMatrix').find('WorkspaceConfiguration').find('Project')
|
||||
node.attrib['Name'] = project_name
|
||||
|
||||
out = open(project_name + '.workspace', 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8'))
|
||||
out.close()
|
||||
|
||||
def TargetCodelite(script, program):
|
||||
project_name = os.path.abspath('.').replace('\\', '/').split('/')[-1]
|
||||
#project_name.replace('-', '_')
|
||||
project_path = os.path.abspath('.')
|
||||
CLGenWorkspace(project_name, project_path)
|
||||
|
||||
if os.path.isfile('codelite_template.project'):
|
||||
tree = etree.parse('codelite_template.project')
|
||||
else:
|
||||
tree = etree.parse(os.path.join(os.path.dirname(__file__), 'codelite_template.project'))
|
||||
|
||||
root = tree.getroot()
|
||||
root.attrib['Name'] = project_name
|
||||
|
||||
out = open(project_name + '.project', 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
|
||||
# add files
|
||||
for group in script:
|
||||
CLAddCFiles(root, group['src'], project_path)
|
||||
# add header file
|
||||
CLAddHeaderFiles(root, program, project_path)
|
||||
|
||||
# SECTION 2.
|
||||
# write head include path
|
||||
|
||||
if 'CPPPATH' in building.Env:
|
||||
cpp_path = building.Env['CPPPATH']
|
||||
paths = set()
|
||||
for path in cpp_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
|
||||
# write include path, definitions
|
||||
for elem in tree.iter(tag='Compiler'):
|
||||
break
|
||||
|
||||
for path in paths:
|
||||
CLAddIncludePath(root, path)
|
||||
|
||||
|
||||
#print building.Env.get('LIBPATH', [])
|
||||
#print building.Env.get('LIBS', [])
|
||||
|
||||
CLSetCFlags(root, building.Env.get('CFLAGS', []))
|
||||
CLSetCxxFlags(root, building.Env.get('CFLAGS', []))
|
||||
|
||||
asflags = building.Env.get('ASFLAGS', [])
|
||||
asflags = asflags.replace('-ffunction-sections', '')
|
||||
asflags = asflags.replace('-fdata-sections', '')
|
||||
asflags = asflags.replace('-x', '')
|
||||
asflags = asflags.replace('-Wa,', '')
|
||||
asflags = asflags.replace('assembler-with-cpp', '')
|
||||
CLSetAsFlags(root, asflags)
|
||||
CLSetLdFlags(root, building.Env.get('LINKFLAGS', []))
|
||||
|
||||
for macro in building.Env.get('CPPDEFINES', []):
|
||||
for d in macro:
|
||||
CLAddPreprocessor(root, d)
|
||||
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8'))
|
||||
out.close()
|
61
rt-thread/tools/codelite_template.project
Normal file
61
rt-thread/tools/codelite_template.project
Normal file
@@ -0,0 +1,61 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<CodeLite_Project Name="project" Version="11000" InternalType="">
|
||||
<Description/>
|
||||
<Dependencies/>
|
||||
<Settings Type="Executable">
|
||||
<GlobalSettings>
|
||||
<Compiler Options="" C_Options="" Assembler="">
|
||||
<IncludePath Value="."/>
|
||||
</Compiler>
|
||||
<Linker Options="">
|
||||
<LibraryPath Value="."/>
|
||||
</Linker>
|
||||
<ResourceCompiler Options=""/>
|
||||
</GlobalSettings>
|
||||
<Configuration Name="Debug" CompilerType="Cross GCC ( arm-none-eabi )" DebuggerType="GNU gdb debugger" Type="Executable" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
|
||||
<Compiler Options="" C_Options="" Assembler="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" PCHFlags="" PCHFlagsPolicy="0">
|
||||
</Compiler>
|
||||
<Linker Options="" Required="yes">
|
||||
</Linker>
|
||||
<ResourceCompiler Options="" Required="no"/>
|
||||
<General OutputFile="$(IntermediateDirectory)/$(ProjectName).elf" IntermediateDirectory="$(ConfigurationName)" Command="$(OutputFile)" CommandArguments="" UseSeparateDebugArgs="no" DebugArguments="" WorkingDirectory="" PauseExecWhenProcTerminates="yes" IsGUIProgram="no" IsEnabled="yes"/>
|
||||
<BuildSystem Name="Default"/>
|
||||
<Environment EnvVarSetName="<Use Defaults>" DbgSetName="<Use Defaults>">
|
||||
<![CDATA[]]>
|
||||
</Environment>
|
||||
<Debugger IsRemote="yes" RemoteHostName="127.0.0.1" RemoteHostPort="2331" DebuggerPath="" IsExtended="no">
|
||||
<DebuggerSearchPaths/>
|
||||
<PostConnectCommands>monitor reset
|
||||
monitor halt
|
||||
load</PostConnectCommands>
|
||||
<StartupCommands/>
|
||||
</Debugger>
|
||||
<PreBuild/>
|
||||
<PostBuild>
|
||||
<Command Enabled="yes">arm-none-eabi-objcopy -O ihex $(IntermediateDirectory)/$(ProjectName).elf $(IntermediateDirectory)/$(ProjectName).hex</Command>
|
||||
<Command Enabled="yes">arm-none-eabi-objcopy -I ihex -O binary $(IntermediateDirectory)/$(ProjectName).hex $(IntermediateDirectory)/$(ProjectName).bin</Command>
|
||||
<Command Enabled="yes">arm-none-eabi-size $(IntermediateDirectory)/$(ProjectName).elf</Command>
|
||||
</PostBuild>
|
||||
<CustomBuild Enabled="no">
|
||||
<RebuildCommand/>
|
||||
<CleanCommand/>
|
||||
<BuildCommand/>
|
||||
<PreprocessFileCommand/>
|
||||
<SingleFileCommand/>
|
||||
<MakefileGenerationCommand/>
|
||||
<ThirdPartyToolName/>
|
||||
<WorkingDirectory/>
|
||||
</CustomBuild>
|
||||
<AdditionalRules>
|
||||
<CustomPostBuild/>
|
||||
<CustomPreBuild/>
|
||||
</AdditionalRules>
|
||||
<Completion EnableCpp11="no" EnableCpp14="no">
|
||||
<ClangCmpFlagsC/>
|
||||
<ClangCmpFlags/>
|
||||
<ClangPP/>
|
||||
<SearchPaths/>
|
||||
</Completion>
|
||||
</Configuration>
|
||||
</Settings>
|
||||
</CodeLite_Project>
|
10
rt-thread/tools/codelite_template.workspace
Normal file
10
rt-thread/tools/codelite_template.workspace
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<CodeLite_Workspace Name="project" Database="" Version="10000">
|
||||
<Project Name="project" Path="project.project" Active="Yes"/>
|
||||
<BuildMatrix>
|
||||
<WorkspaceConfiguration Name="Debug" Selected="yes">
|
||||
<Environment/>
|
||||
<Project Name="project" ConfigName="Debug"/>
|
||||
</WorkspaceConfiguration>
|
||||
</BuildMatrix>
|
||||
</CodeLite_Workspace>
|
61
rt-thread/tools/cscope.py
Normal file
61
rt-thread/tools/cscope.py
Normal file
@@ -0,0 +1,61 @@
|
||||
#
|
||||
# File : cscope.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
def _get_src(project):
|
||||
li = []
|
||||
for group in project:
|
||||
for f in group['src']:
|
||||
li.append(os.path.normpath(f.rfile().abspath))
|
||||
return li
|
||||
|
||||
def _get_header_dir(dirp):
|
||||
li = []
|
||||
for root, dirs, files in os.walk(dirp):
|
||||
for d in dirs:
|
||||
fpath = os.path.join(root, d)
|
||||
li.extend(_get_header_dir(fpath))
|
||||
|
||||
for f in files:
|
||||
if f[-2:] == '.h':
|
||||
fpath = os.path.join(root, f)
|
||||
li.append(os.path.normpath(fpath))
|
||||
return li
|
||||
|
||||
def _get_header(project):
|
||||
li = []
|
||||
for g in project:
|
||||
for d in g.get('CPPPATH', []):
|
||||
li.extend(_get_header_dir(d))
|
||||
return li
|
||||
|
||||
def CscopeDatabase(project):
|
||||
files = set(_get_src(project) + _get_header(project))
|
||||
with open('cscope.files', 'w') as db:
|
||||
db.write('-k\n-q\n')
|
||||
db.write('\n'.join(files))
|
||||
db.flush() # let cscope see the full content
|
||||
os.system('cscope -b')
|
||||
|
582
rt-thread/tools/eclipse.py
Normal file
582
rt-thread/tools/eclipse.py
Normal file
@@ -0,0 +1,582 @@
|
||||
#
|
||||
# Copyright (c) 2006-2022, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2019-03-21 Bernard the first version
|
||||
# 2019-04-15 armink fix project update error
|
||||
#
|
||||
|
||||
import glob
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
|
||||
import rt_studio
|
||||
from building import *
|
||||
from utils import *
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
|
||||
MODULE_VER_NUM = 6
|
||||
|
||||
source_pattern = ['*.c', '*.cpp', '*.cxx', '*.s', '*.S', '*.asm','*.cmd']
|
||||
|
||||
|
||||
def OSPath(path):
|
||||
import platform
|
||||
|
||||
if type(path) == type('str'):
|
||||
if platform.system() == 'Windows':
|
||||
return path.replace('/', '\\')
|
||||
else:
|
||||
return path.replace('\\', '/')
|
||||
else:
|
||||
if platform.system() == 'Windows':
|
||||
return [item.replace('/', '\\') for item in path]
|
||||
else:
|
||||
return [item.replace('\\', '/') for item in path]
|
||||
|
||||
|
||||
# collect the build source code path and parent path
|
||||
def CollectPaths(paths):
|
||||
all_paths = []
|
||||
|
||||
def ParentPaths(path):
|
||||
ret = os.path.dirname(path)
|
||||
if ret == path or ret == '':
|
||||
return []
|
||||
|
||||
return [ret] + ParentPaths(ret)
|
||||
|
||||
for path in paths:
|
||||
# path = os.path.abspath(path)
|
||||
path = path.replace('\\', '/')
|
||||
all_paths = all_paths + [path] + ParentPaths(path)
|
||||
|
||||
cwd = os.getcwd()
|
||||
for path in os.listdir(cwd):
|
||||
temp_path = cwd.replace('\\', '/') + '/' + path
|
||||
if os.path.isdir(temp_path):
|
||||
all_paths = all_paths + [temp_path]
|
||||
|
||||
all_paths = list(set(all_paths))
|
||||
return sorted(all_paths)
|
||||
|
||||
|
||||
'''
|
||||
Collect all of files under paths
|
||||
'''
|
||||
|
||||
|
||||
def CollectFiles(paths, pattern):
|
||||
files = []
|
||||
for path in paths:
|
||||
if type(pattern) == type(''):
|
||||
files = files + glob.glob(path + '/' + pattern)
|
||||
else:
|
||||
for item in pattern:
|
||||
# print('--> %s' % (path + '/' + item))
|
||||
files = files + glob.glob(path + '/' + item)
|
||||
|
||||
return sorted(files)
|
||||
|
||||
|
||||
def CollectAllFilesinPath(path, pattern):
|
||||
files = []
|
||||
|
||||
for item in pattern:
|
||||
files += glob.glob(path + '/' + item)
|
||||
|
||||
list = os.listdir(path)
|
||||
if len(list):
|
||||
for item in list:
|
||||
if item.startswith('.'):
|
||||
continue
|
||||
if item == 'bsp':
|
||||
continue
|
||||
|
||||
if os.path.isdir(os.path.join(path, item)):
|
||||
files = files + CollectAllFilesinPath(os.path.join(path, item), pattern)
|
||||
return files
|
||||
|
||||
|
||||
'''
|
||||
Exclude files from infiles
|
||||
'''
|
||||
|
||||
|
||||
def ExcludeFiles(infiles, files):
|
||||
in_files = set([OSPath(file) for file in infiles])
|
||||
exl_files = set([OSPath(file) for file in files])
|
||||
|
||||
exl_files = in_files - exl_files
|
||||
|
||||
return exl_files
|
||||
|
||||
|
||||
# caluclate the exclude path for project
|
||||
def ExcludePaths(rootpath, paths):
|
||||
ret = []
|
||||
|
||||
files = os.listdir(OSPath(rootpath))
|
||||
for file in files:
|
||||
if file.startswith('.'):
|
||||
continue
|
||||
|
||||
fullname = os.path.join(OSPath(rootpath), file)
|
||||
|
||||
if os.path.isdir(fullname):
|
||||
# print(fullname)
|
||||
if not fullname in paths:
|
||||
ret = ret + [fullname]
|
||||
else:
|
||||
ret = ret + ExcludePaths(fullname, paths)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
rtt_path_prefix = '"${workspace_loc://${ProjName}//'
|
||||
|
||||
|
||||
def ConverToRttEclipsePathFormat(path):
|
||||
return rtt_path_prefix + path + '}"'
|
||||
|
||||
|
||||
def IsRttEclipsePathFormat(path):
|
||||
if path.startswith(rtt_path_prefix):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
# all libs added by scons should be ends with five whitespace as a flag
|
||||
rtt_lib_flag = 5 * " "
|
||||
|
||||
|
||||
def ConverToRttEclipseLibFormat(lib):
|
||||
return str(lib) + str(rtt_lib_flag)
|
||||
|
||||
|
||||
def IsRttEclipseLibFormat(path):
|
||||
if path.endswith(rtt_lib_flag):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def IsCppProject():
|
||||
return GetDepend('RT_USING_CPLUSPLUS')
|
||||
|
||||
|
||||
def HandleToolOption(tools, env, project, reset):
|
||||
is_cpp_prj = IsCppProject()
|
||||
BSP_ROOT = os.path.abspath(env['BSP_ROOT'])
|
||||
|
||||
CPPDEFINES = project['CPPDEFINES']
|
||||
paths = [ConverToRttEclipsePathFormat(RelativeProjectPath(env, os.path.normpath(i)).replace('\\', '/')) for i in project['CPPPATH']]
|
||||
|
||||
compile_include_paths_options = []
|
||||
compile_include_files_options = []
|
||||
compile_defs_options = []
|
||||
linker_scriptfile_option = None
|
||||
linker_script_option = None
|
||||
linker_nostart_option = None
|
||||
linker_libs_option = None
|
||||
linker_paths_option = None
|
||||
|
||||
linker_newlib_nano_option = None
|
||||
|
||||
for tool in tools:
|
||||
|
||||
if tool.get('id').find('compile') != 1:
|
||||
options = tool.findall('option')
|
||||
# find all compile options
|
||||
for option in options:
|
||||
option_id = option.get('id')
|
||||
if ('compiler.include.paths' in option_id) or ('compiler.option.includepaths' in option_id) or ('compiler.tasking.include' in option_id):
|
||||
compile_include_paths_options += [option]
|
||||
elif option.get('id').find('compiler.include.files') != -1 or option.get('id').find('compiler.option.includefiles') != -1 :
|
||||
compile_include_files_options += [option]
|
||||
elif option.get('id').find('compiler.defs') != -1 or option.get('id').find('compiler.option.definedsymbols') != -1:
|
||||
compile_defs_options += [option]
|
||||
|
||||
if tool.get('id').find('linker') != -1:
|
||||
options = tool.findall('option')
|
||||
# find all linker options
|
||||
for option in options:
|
||||
# the project type and option type must equal
|
||||
if is_cpp_prj != (option.get('id').find('cpp.linker') != -1):
|
||||
continue
|
||||
|
||||
if option.get('id').find('linker.scriptfile') != -1:
|
||||
linker_scriptfile_option = option
|
||||
elif option.get('id').find('linker.option.script') != -1:
|
||||
linker_script_option = option
|
||||
elif option.get('id').find('linker.nostart') != -1:
|
||||
linker_nostart_option = option
|
||||
elif option.get('id').find('linker.libs') != -1:
|
||||
linker_libs_option = option
|
||||
elif option.get('id').find('linker.paths') != -1 and 'LIBPATH' in env:
|
||||
linker_paths_option = option
|
||||
elif option.get('id').find('linker.usenewlibnano') != -1:
|
||||
linker_newlib_nano_option = option
|
||||
|
||||
# change the inclue path
|
||||
for option in compile_include_paths_options:
|
||||
# find all of paths in this project
|
||||
include_paths = option.findall('listOptionValue')
|
||||
for item in include_paths:
|
||||
if reset is True or IsRttEclipsePathFormat(item.get('value')) :
|
||||
# clean old configuration
|
||||
option.remove(item)
|
||||
# print('c.compiler.include.paths')
|
||||
paths = sorted(paths)
|
||||
for item in paths:
|
||||
SubElement(option, 'listOptionValue', {'builtIn': 'false', 'value': item})
|
||||
# change the inclue files (default) or definitions
|
||||
for option in compile_include_files_options:
|
||||
# add '_REENT_SMALL' to CPPDEFINES when --specs=nano.specs has select
|
||||
if linker_newlib_nano_option is not None and linker_newlib_nano_option.get('value') == 'true' and '_REENT_SMALL' not in CPPDEFINES:
|
||||
CPPDEFINES += ['_REENT_SMALL']
|
||||
|
||||
file_header = '''
|
||||
#ifndef RTCONFIG_PREINC_H__
|
||||
#define RTCONFIG_PREINC_H__
|
||||
|
||||
/* Automatically generated file; DO NOT EDIT. */
|
||||
/* RT-Thread pre-include file */
|
||||
|
||||
'''
|
||||
file_tail = '\n#endif /*RTCONFIG_PREINC_H__*/\n'
|
||||
rtt_pre_inc_item = '"${workspace_loc:/${ProjName}/rtconfig_preinc.h}"'
|
||||
# save the CPPDEFINES in to rtconfig_preinc.h
|
||||
with open('rtconfig_preinc.h', mode = 'w+') as f:
|
||||
f.write(file_header)
|
||||
for cppdef in CPPDEFINES:
|
||||
f.write("#define " + cppdef.replace('=', ' ') + '\n')
|
||||
f.write(file_tail)
|
||||
# change the c.compiler.include.files
|
||||
files = option.findall('listOptionValue')
|
||||
find_ok = False
|
||||
for item in files:
|
||||
if item.get('value') == rtt_pre_inc_item:
|
||||
find_ok = True
|
||||
break
|
||||
if find_ok is False:
|
||||
SubElement(option, 'listOptionValue', {'builtIn': 'false', 'value': rtt_pre_inc_item})
|
||||
if len(compile_include_files_options) == 0:
|
||||
for option in compile_defs_options:
|
||||
defs = option.findall('listOptionValue')
|
||||
project_defs = []
|
||||
for item in defs:
|
||||
if reset is True:
|
||||
# clean all old configuration
|
||||
option.remove(item)
|
||||
else:
|
||||
project_defs += [item.get('value')]
|
||||
if len(project_defs) > 0:
|
||||
cproject_defs = set(CPPDEFINES) - set(project_defs)
|
||||
else:
|
||||
cproject_defs = CPPDEFINES
|
||||
|
||||
# print('c.compiler.defs')
|
||||
cproject_defs = sorted(cproject_defs)
|
||||
for item in cproject_defs:
|
||||
SubElement(option, 'listOptionValue', {'builtIn': 'false', 'value': item})
|
||||
|
||||
# update linker script config
|
||||
if linker_scriptfile_option is not None :
|
||||
option = linker_scriptfile_option
|
||||
linker_script = 'link.lds'
|
||||
items = env['LINKFLAGS'].split(' ')
|
||||
if '-T' in items:
|
||||
linker_script = items[items.index('-T') + 1]
|
||||
linker_script = ConverToRttEclipsePathFormat(linker_script)
|
||||
|
||||
listOptionValue = option.find('listOptionValue')
|
||||
if listOptionValue != None:
|
||||
if reset is True or IsRttEclipsePathFormat(listOptionValue.get('value')):
|
||||
listOptionValue.set('value', linker_script)
|
||||
else:
|
||||
SubElement(option, 'listOptionValue', {'builtIn': 'false', 'value': linker_script})
|
||||
# scriptfile in stm32cubeIDE
|
||||
if linker_script_option is not None :
|
||||
option = linker_script_option
|
||||
items = env['LINKFLAGS'].split(' ')
|
||||
if '-T' in items:
|
||||
linker_script = ConverToRttEclipsePathFormat(items[items.index('-T') + 1]).strip('"')
|
||||
option.set('value', linker_script)
|
||||
# update nostartfiles config
|
||||
if linker_nostart_option is not None :
|
||||
option = linker_nostart_option
|
||||
if env['LINKFLAGS'].find('-nostartfiles') != -1:
|
||||
option.set('value', 'true')
|
||||
else:
|
||||
option.set('value', 'false')
|
||||
# update libs
|
||||
if linker_libs_option is not None:
|
||||
option = linker_libs_option
|
||||
# remove old libs
|
||||
for item in option.findall('listOptionValue'):
|
||||
if IsRttEclipseLibFormat(item.get("value")):
|
||||
option.remove(item)
|
||||
|
||||
# add new libs
|
||||
if 'LIBS' in env:
|
||||
for lib in env['LIBS']:
|
||||
lib_name = os.path.basename(str(lib))
|
||||
if lib_name.endswith('.a'):
|
||||
if lib_name.startswith('lib'):
|
||||
lib = lib_name[3:].split('.')[0]
|
||||
else:
|
||||
lib = ':' + lib_name
|
||||
formatedLib = ConverToRttEclipseLibFormat(lib)
|
||||
SubElement(option, 'listOptionValue', {
|
||||
'builtIn': 'false', 'value': formatedLib})
|
||||
|
||||
# update lib paths
|
||||
if linker_paths_option is not None:
|
||||
option = linker_paths_option
|
||||
# remove old lib paths
|
||||
for item in option.findall('listOptionValue'):
|
||||
if IsRttEclipsePathFormat(item.get('value')):
|
||||
# clean old configuration
|
||||
option.remove(item)
|
||||
# add new old lib paths
|
||||
for path in env['LIBPATH']:
|
||||
SubElement(option, 'listOptionValue', {'builtIn': 'false', 'value': ConverToRttEclipsePathFormat(RelativeProjectPath(env, path).replace('\\', '/'))})
|
||||
|
||||
return
|
||||
|
||||
|
||||
def UpdateProjectStructure(env, prj_name):
|
||||
bsp_root = env['BSP_ROOT']
|
||||
rtt_root = env['RTT_ROOT']
|
||||
|
||||
project = etree.parse('.project')
|
||||
root = project.getroot()
|
||||
|
||||
if rtt_root.startswith(bsp_root):
|
||||
linkedResources = root.find('linkedResources')
|
||||
if linkedResources == None:
|
||||
linkedResources = SubElement(root, 'linkedResources')
|
||||
|
||||
links = linkedResources.findall('link')
|
||||
# delete all RT-Thread folder links
|
||||
for link in links:
|
||||
if link.find('name').text.startswith('rt-thread'):
|
||||
linkedResources.remove(link)
|
||||
|
||||
if prj_name:
|
||||
name = root.find('name')
|
||||
if name == None:
|
||||
name = SubElement(root, 'name')
|
||||
name.text = prj_name
|
||||
|
||||
out = open('.project', 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8').decode('utf-8'))
|
||||
out.close()
|
||||
|
||||
return
|
||||
|
||||
|
||||
def GenExcluding(env, project):
|
||||
rtt_root = os.path.abspath(env['RTT_ROOT'])
|
||||
bsp_root = os.path.abspath(env['BSP_ROOT'])
|
||||
coll_dirs = CollectPaths(project['DIRS'])
|
||||
all_paths_temp = [OSPath(path) for path in coll_dirs]
|
||||
all_paths = []
|
||||
|
||||
# add used path
|
||||
for path in all_paths_temp:
|
||||
if path.startswith(rtt_root) or path.startswith(bsp_root):
|
||||
all_paths.append(path)
|
||||
|
||||
if bsp_root.startswith(rtt_root):
|
||||
# bsp folder is in the RT-Thread root folder, such as the RT-Thread source code on GitHub
|
||||
exclude_paths = ExcludePaths(rtt_root, all_paths)
|
||||
elif rtt_root.startswith(bsp_root):
|
||||
# RT-Thread root folder is in the bsp folder, such as project folder which generate by 'scons --dist' cmd
|
||||
check_path = []
|
||||
exclude_paths = []
|
||||
# analyze the primary folder which relative to BSP_ROOT and in all_paths
|
||||
for path in all_paths:
|
||||
if path.startswith(bsp_root):
|
||||
folders = RelativeProjectPath(env, path).split('\\')
|
||||
if folders[0] != '.' and '\\' + folders[0] not in check_path:
|
||||
check_path += ['\\' + folders[0]]
|
||||
# exclue the folder which has managed by scons
|
||||
for path in check_path:
|
||||
exclude_paths += ExcludePaths(bsp_root + path, all_paths)
|
||||
else:
|
||||
exclude_paths = ExcludePaths(rtt_root, all_paths)
|
||||
exclude_paths += ExcludePaths(bsp_root, all_paths)
|
||||
|
||||
paths = exclude_paths
|
||||
exclude_paths = []
|
||||
# remove the folder which not has source code by source_pattern
|
||||
for path in paths:
|
||||
# add bsp and libcpu folder and not collect source files (too more files)
|
||||
if path.endswith('rt-thread\\bsp') or path.endswith('rt-thread\\libcpu'):
|
||||
exclude_paths += [path]
|
||||
continue
|
||||
|
||||
set = CollectAllFilesinPath(path, source_pattern)
|
||||
if len(set):
|
||||
exclude_paths += [path]
|
||||
|
||||
exclude_paths = [RelativeProjectPath(env, path).replace('\\', '/') for path in exclude_paths]
|
||||
|
||||
all_files = CollectFiles(all_paths, source_pattern)
|
||||
src_files = project['FILES']
|
||||
|
||||
exclude_files = ExcludeFiles(all_files, src_files)
|
||||
exclude_files = [RelativeProjectPath(env, file).replace('\\', '/') for file in exclude_files]
|
||||
|
||||
env['ExPaths'] = exclude_paths
|
||||
env['ExFiles'] = exclude_files
|
||||
|
||||
return exclude_paths + exclude_files
|
||||
|
||||
|
||||
def RelativeProjectPath(env, path):
|
||||
project_root = os.path.abspath(env['BSP_ROOT'])
|
||||
rtt_root = os.path.abspath(env['RTT_ROOT'])
|
||||
|
||||
if path.startswith(project_root):
|
||||
return _make_path_relative(project_root, path)
|
||||
|
||||
if path.startswith(rtt_root):
|
||||
return 'rt-thread/' + _make_path_relative(rtt_root, path)
|
||||
|
||||
# TODO add others folder
|
||||
print('ERROR: the ' + path + ' not support')
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def HandleExcludingOption(entry, sourceEntries, excluding):
|
||||
old_excluding = []
|
||||
if entry != None:
|
||||
exclud = entry.get('excluding')
|
||||
if exclud != None:
|
||||
old_excluding = entry.get('excluding').split('|')
|
||||
sourceEntries.remove(entry)
|
||||
|
||||
value = ''
|
||||
for item in old_excluding:
|
||||
if item.startswith('//'):
|
||||
old_excluding.remove(item)
|
||||
else:
|
||||
if value == '':
|
||||
value = item
|
||||
else:
|
||||
value += '|' + item
|
||||
|
||||
for item in excluding:
|
||||
# add special excluding path prefix for RT-Thread
|
||||
item = '//' + item
|
||||
if value == '':
|
||||
value = item
|
||||
else:
|
||||
value += '|' + item
|
||||
|
||||
SubElement(sourceEntries, 'entry', {'excluding': value, 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED', 'kind':'sourcePath', 'name':""})
|
||||
|
||||
|
||||
def UpdateCproject(env, project, excluding, reset, prj_name):
|
||||
excluding = sorted(excluding)
|
||||
|
||||
cproject = etree.parse('.cproject')
|
||||
|
||||
root = cproject.getroot()
|
||||
cconfigurations = root.findall('storageModule/cconfiguration')
|
||||
for cconfiguration in cconfigurations:
|
||||
tools = cconfiguration.findall('storageModule/configuration/folderInfo/toolChain/tool')
|
||||
HandleToolOption(tools, env, project, reset)
|
||||
|
||||
sourceEntries = cconfiguration.find('storageModule/configuration/sourceEntries')
|
||||
if sourceEntries != None:
|
||||
entry = sourceEntries.find('entry')
|
||||
HandleExcludingOption(entry, sourceEntries, excluding)
|
||||
# update refreshScope
|
||||
if prj_name:
|
||||
prj_name = '/' + prj_name
|
||||
configurations = root.findall('storageModule/configuration')
|
||||
for configuration in configurations:
|
||||
resource = configuration.find('resource')
|
||||
configuration.remove(resource)
|
||||
SubElement(configuration, 'resource', {'resourceType': "PROJECT", 'workspacePath': prj_name})
|
||||
|
||||
# write back to .cproject
|
||||
out = open('.cproject', 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8" standalone="no"?>\n')
|
||||
out.write('<?fileVersion 4.0.0?>')
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8').decode('utf-8'))
|
||||
out.close()
|
||||
|
||||
|
||||
def TargetEclipse(env, reset=False, prj_name=None):
|
||||
global source_pattern
|
||||
|
||||
print('Update eclipse setting...')
|
||||
|
||||
# generate cproject file
|
||||
if not os.path.exists('.cproject'):
|
||||
if rt_studio.gen_cproject_file(os.path.abspath(".cproject")) is False:
|
||||
print('Fail!')
|
||||
return
|
||||
|
||||
# generate project file
|
||||
if not os.path.exists('.project'):
|
||||
if rt_studio.gen_project_file(os.path.abspath(".project")) is False:
|
||||
print('Fail!')
|
||||
return
|
||||
|
||||
# generate projcfg.ini file
|
||||
if not os.path.exists('.settings/projcfg.ini'):
|
||||
# if search files with uvprojx or uvproj suffix
|
||||
file = ""
|
||||
items = os.listdir(".")
|
||||
if len(items) > 0:
|
||||
for item in items:
|
||||
if item.endswith(".uvprojx") or item.endswith(".uvproj"):
|
||||
file = os.path.abspath(item)
|
||||
break
|
||||
chip_name = rt_studio.get_mcu_info(file)
|
||||
if rt_studio.gen_projcfg_ini_file(chip_name, prj_name, os.path.abspath(".settings/projcfg.ini")) is False:
|
||||
print('Fail!')
|
||||
return
|
||||
|
||||
# enable lowwer .s file compiled in eclipse cdt
|
||||
if not os.path.exists('.settings/org.eclipse.core.runtime.prefs'):
|
||||
if rt_studio.gen_org_eclipse_core_runtime_prefs(
|
||||
os.path.abspath(".settings/org.eclipse.core.runtime.prefs")) is False:
|
||||
print('Fail!')
|
||||
return
|
||||
|
||||
# add clean2 target to fix issues when files too many
|
||||
if not os.path.exists('makefile.targets'):
|
||||
if rt_studio.gen_makefile_targets(os.path.abspath("makefile.targets")) is False:
|
||||
print('Fail!')
|
||||
return
|
||||
|
||||
project = ProjectInfo(env)
|
||||
|
||||
# update the project file structure info on '.project' file
|
||||
UpdateProjectStructure(env, prj_name)
|
||||
|
||||
# generate the exclude paths and files
|
||||
excluding = GenExcluding(env, project)
|
||||
|
||||
# update the project configuration on '.cproject' file
|
||||
UpdateCproject(env, project, excluding, reset, prj_name)
|
||||
|
||||
print('done!')
|
||||
|
||||
return
|
55
rt-thread/tools/env.py
Normal file
55
rt-thread/tools/env.py
Normal file
@@ -0,0 +1,55 @@
|
||||
#! /usr/bin/env python
|
||||
#coding=utf-8
|
||||
#
|
||||
# Copyright (c) 2024, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2024-04-20 Bernard the first version
|
||||
|
||||
import os
|
||||
import json
|
||||
import platform
|
||||
|
||||
def GetEnvPath():
|
||||
env = os.environ.get('ENV_ROOT')
|
||||
|
||||
if env is None:
|
||||
if platform.system() == 'Windows':
|
||||
return os.path.join(os.environ['USERPROFILE'], '.env')
|
||||
else:
|
||||
return os.path.join(os.environ['HOME'], '.env')
|
||||
|
||||
return env
|
||||
|
||||
def GetSDKPackage():
|
||||
env = GetEnvPath()
|
||||
|
||||
if env:
|
||||
return os.path.join(env, 'tools', 'packages')
|
||||
|
||||
return None
|
||||
|
||||
def GetSDKPath(name):
|
||||
env = GetEnvPath()
|
||||
|
||||
if env:
|
||||
#read packages.json under env/tools/packages
|
||||
with open(os.path.join(env, 'tools', 'packages', 'pkgs.json'), 'r', encoding='utf-8') as f:
|
||||
# packages_json = f.read()
|
||||
packages = json.load(f)
|
||||
|
||||
for item in packages:
|
||||
package_path = os.path.join(GetEnvPath(), 'packages', item['path'], 'package.json')
|
||||
# read package['path']/package.json under env/packages
|
||||
with open(package_path, 'r', encoding='utf-8') as f:
|
||||
# package_json = f.read()
|
||||
package = json.load(f)
|
||||
|
||||
if package['name'] == name:
|
||||
return os.path.join(GetSDKPackage(), package['name'] + '-' + item['ver'])
|
||||
|
||||
# not found named package
|
||||
return None
|
58
rt-thread/tools/esp_idf.py
Normal file
58
rt-thread/tools/esp_idf.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import os
|
||||
import re
|
||||
import utils
|
||||
from utils import _make_path_relative
|
||||
|
||||
def GenerateCFiles(env,project):
|
||||
"""
|
||||
Generate CMakeLists.txt files
|
||||
"""
|
||||
info = utils.ProjectInfo(env)
|
||||
init_export = []
|
||||
|
||||
main_component_dir = os.path.join(os.getcwd(), 'main')
|
||||
cm_file = open(os.path.join(main_component_dir, 'CMakeLists.txt'), 'w')
|
||||
if cm_file:
|
||||
cm_file.write("idf_component_register(\n")
|
||||
|
||||
cm_file.write("\tSRCS\n")
|
||||
for group in project:
|
||||
for f in group['src']:
|
||||
path = _make_path_relative(main_component_dir, os.path.normpath(f.rfile().abspath))
|
||||
cm_file.write( "\t" + path.replace("\\", "/") + "\n" )
|
||||
src = open(f.rfile().abspath, 'r')
|
||||
for line in src.readlines():
|
||||
if re.match('INIT_(BOARD|PREV|DEVICE|COMPONENT|ENV|APP)_EXPORT\(.+\)', line):
|
||||
init_export.append(re.search('\(.+\)', line).group(0)[1:-1])
|
||||
src.close()
|
||||
|
||||
cm_file.write("\n")
|
||||
|
||||
cm_file.write("\tINCLUDE_DIRS\n")
|
||||
for i in info['CPPPATH']:
|
||||
path = _make_path_relative(main_component_dir, i)
|
||||
cm_file.write( "\t" + path.replace("\\", "/") + "\n")
|
||||
cm_file.write(")\n\n")
|
||||
|
||||
n = len(init_export)
|
||||
if n:
|
||||
cm_file.write("target_link_libraries(${COMPONENT_LIB}\n")
|
||||
for i in range(n):
|
||||
cm_file.write("\tINTERFACE \"-u __rt_init_" + init_export[i] + "\"\n")
|
||||
cm_file.write(")\n")
|
||||
cm_file.close()
|
||||
|
||||
cm_file = open('CMakeLists.txt', 'w')
|
||||
if cm_file:
|
||||
cm_file.write("cmake_minimum_required(VERSION 3.16)\n")
|
||||
cm_file.write("set(COMPONENTS esptool_py main)\n")
|
||||
cm_file.write("include($ENV{IDF_PATH}/tools/cmake/project.cmake)\n")
|
||||
freertos_root = os.getcwd().replace('\\', '/') + '/packages/FreeRTOS_Wrapper-latest/FreeRTOS'
|
||||
cm_file.write("set(freertos_root " + freertos_root + ')\n')
|
||||
cm_file.write("project(rtthread)\n")
|
||||
cm_file.close()
|
||||
|
||||
def ESPIDFProject(env,project):
|
||||
print('Update setting files for CMakeLists.txt...')
|
||||
GenerateCFiles(env,project)
|
||||
print('Done!')
|
285
rt-thread/tools/gcc.py
Normal file
285
rt-thread/tools/gcc.py
Normal file
@@ -0,0 +1,285 @@
|
||||
#
|
||||
# File : gcc.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2018, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2018-05-22 Bernard The first version
|
||||
# 2023-11-03 idings return file path in GetHeader
|
||||
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
def GetGCCRoot(rtconfig):
|
||||
exec_path = rtconfig.EXEC_PATH
|
||||
prefix = rtconfig.PREFIX
|
||||
|
||||
if prefix.endswith('-'):
|
||||
prefix = prefix[:-1]
|
||||
|
||||
if exec_path == '/usr/bin':
|
||||
root_path = os.path.join('/usr/lib', prefix)
|
||||
else:
|
||||
root_path = os.path.join(exec_path, '..', prefix)
|
||||
|
||||
return root_path
|
||||
|
||||
# https://stackoverflow.com/questions/4980819/what-are-the-gcc-default-include-directories
|
||||
# https://stackoverflow.com/questions/53937211/how-can-i-parse-gcc-output-by-regex-to-get-default-include-paths
|
||||
def match_pattern(pattern, input, start = 0, stop = -1, flags = 0):
|
||||
length = len(input)
|
||||
|
||||
if length == 0:
|
||||
return None
|
||||
|
||||
end_it = max(0, length - 1)
|
||||
|
||||
if start >= end_it:
|
||||
return None
|
||||
|
||||
if stop<0:
|
||||
stop = length
|
||||
|
||||
if stop <= start:
|
||||
return None
|
||||
|
||||
for it in range(max(0, start), min(stop, length)):
|
||||
elem = input[it]
|
||||
match = re.match(pattern, elem, flags)
|
||||
if match:
|
||||
return it
|
||||
|
||||
def GetGccDefaultSearchDirs(rtconfig):
|
||||
start_pattern = r' *#include <\.\.\.> search starts here: *'
|
||||
end_pattern = r' *End of search list\. *'
|
||||
|
||||
gcc_cmd = os.path.join(rtconfig.EXEC_PATH, rtconfig.CC)
|
||||
device_flags = rtconfig.DEVICE.split()
|
||||
command = [gcc_cmd] + device_flags + ['-xc', '-E', '-v', os.devnull]
|
||||
|
||||
# if gcc_cmd can not access , return empty list
|
||||
if not os.access(gcc_cmd, os.X_OK):
|
||||
return []
|
||||
|
||||
if(platform.system() == 'Windows'):
|
||||
child = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
else:
|
||||
child = subprocess.Popen(' '.join(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
|
||||
stdout = child.communicate()
|
||||
stdout_string = (b''.join(stdout)).decode()
|
||||
lines = stdout_string.splitlines()
|
||||
|
||||
start_it = match_pattern(start_pattern, lines)
|
||||
if start_it == None:
|
||||
return []
|
||||
|
||||
end_it = match_pattern(end_pattern, lines, start_it)
|
||||
if end_it == None:
|
||||
return []
|
||||
|
||||
# theres no paths between them
|
||||
if (end_it - start_it) == 1:
|
||||
return []
|
||||
|
||||
return lines[start_it + 1 : end_it]
|
||||
|
||||
def GetHeader(rtconfig, filename):
|
||||
include_dirs = GetGccDefaultSearchDirs(rtconfig)
|
||||
for directory in include_dirs:
|
||||
fn = os.path.join(directory, filename).strip()
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
# fallback to use fixed method if can't autodetect
|
||||
root = GetGCCRoot(rtconfig)
|
||||
fn = os.path.join(root, 'include', filename)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
# Usually the cross compiling gcc toolchain has directory as:
|
||||
#
|
||||
# bin
|
||||
# lib
|
||||
# share
|
||||
# arm-none-eabi
|
||||
# bin
|
||||
# include
|
||||
# lib
|
||||
# share
|
||||
prefix = rtconfig.PREFIX
|
||||
if prefix.endswith('-'):
|
||||
prefix = prefix[:-1]
|
||||
|
||||
fn = os.path.join(root, prefix, 'include', filename)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
return None
|
||||
|
||||
# GCC like means the toolchains which are compatible with GCC
|
||||
def GetGCCLikePLATFORM():
|
||||
return ['gcc', 'armclang', 'llvm-arm']
|
||||
|
||||
def GetPicoLibcVersion(rtconfig):
|
||||
version = None
|
||||
try:
|
||||
rtconfig.PREFIX
|
||||
except:
|
||||
return version
|
||||
|
||||
# get version from picolibc.h
|
||||
fn = GetHeader(rtconfig, 'picolibc.h')
|
||||
|
||||
if fn:
|
||||
f = open(fn, 'r')
|
||||
if f:
|
||||
for line in f:
|
||||
if line.find('__PICOLIBC_VERSION__') != -1 and line.find('"') != -1:
|
||||
version = re.search(r'\"([^"]+)\"', line).groups()[0]
|
||||
f.close()
|
||||
|
||||
return version
|
||||
|
||||
def GetNewLibVersion(rtconfig):
|
||||
version = None
|
||||
|
||||
try:
|
||||
rtconfig.PREFIX
|
||||
except:
|
||||
return version
|
||||
|
||||
# if find picolibc.h, use picolibc
|
||||
fn = GetHeader(rtconfig, 'picolibc.h')
|
||||
if fn:
|
||||
return version
|
||||
|
||||
# get version from _newlib_version.h file
|
||||
fn = GetHeader(rtconfig, '_newlib_version.h')
|
||||
|
||||
# get version from newlib.h
|
||||
if not fn:
|
||||
fn = GetHeader(rtconfig, 'newlib.h')
|
||||
|
||||
if fn:
|
||||
f = open(fn, 'r')
|
||||
for line in f:
|
||||
if line.find('_NEWLIB_VERSION') != -1 and line.find('"') != -1:
|
||||
version = re.search(r'\"([^"]+)\"', line).groups()[0]
|
||||
f.close()
|
||||
|
||||
return version
|
||||
|
||||
# FIXME: there is no musl version or musl macros can be found officially
|
||||
def GetMuslVersion(rtconfig):
|
||||
version = None
|
||||
|
||||
try:
|
||||
rtconfig.PREFIX
|
||||
except:
|
||||
return version
|
||||
|
||||
if 'musl' in rtconfig.PREFIX:
|
||||
version = 'unknown'
|
||||
return version
|
||||
|
||||
def GCCResult(rtconfig, str):
|
||||
result = ''
|
||||
|
||||
def checkAndGetResult(pattern, string):
|
||||
if re.search(pattern, string):
|
||||
return re.search(pattern, string).group(0)
|
||||
return None
|
||||
|
||||
gcc_cmd = os.path.join(rtconfig.EXEC_PATH, rtconfig.CC)
|
||||
|
||||
# use temp file to get more information
|
||||
f = open('__tmp.c', 'w')
|
||||
if f:
|
||||
f.write(str)
|
||||
f.close()
|
||||
|
||||
# '-fdirectives-only',
|
||||
if(platform.system() == 'Windows'):
|
||||
child = subprocess.Popen([gcc_cmd, '-E', '-P', '__tmp.c'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
else:
|
||||
child = subprocess.Popen(gcc_cmd + ' -E -P __tmp.c', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
|
||||
stdout, stderr = child.communicate()
|
||||
|
||||
# print(stdout)
|
||||
if stderr != '' and stderr != b'':
|
||||
print(stderr)
|
||||
|
||||
have_fdset = 0
|
||||
have_sigaction = 0
|
||||
have_sigevent = 0
|
||||
have_siginfo = 0
|
||||
have_sigval = 0
|
||||
version = None
|
||||
stdc = '1989'
|
||||
posix_thread = 0
|
||||
|
||||
for line in stdout.split(b'\n'):
|
||||
line = line.decode()
|
||||
if re.search('fd_set', line):
|
||||
have_fdset = 1
|
||||
|
||||
# check for sigal
|
||||
if re.search('struct[ \t]+sigaction', line):
|
||||
have_sigaction = 1
|
||||
if re.search('struct[ \t]+sigevent', line):
|
||||
have_sigevent = 1
|
||||
if re.search('siginfo_t', line):
|
||||
have_siginfo = 1
|
||||
if re.search('union[ \t]+sigval', line):
|
||||
have_sigval = 1
|
||||
|
||||
if re.search(r'char\* version', line):
|
||||
version = re.search(r'"([^"]+)"', line).groups()[0]
|
||||
|
||||
if re.findall(r'iso_c_visible = \d+', line):
|
||||
stdc = re.findall(r'\d+', line)[0]
|
||||
|
||||
if re.findall('pthread_create', line):
|
||||
posix_thread = 1
|
||||
|
||||
if have_fdset:
|
||||
result += '#define HAVE_FDSET 1\n'
|
||||
|
||||
if have_sigaction:
|
||||
result += '#define HAVE_SIGACTION 1\n'
|
||||
if have_sigevent:
|
||||
result += '#define HAVE_SIGEVENT 1\n'
|
||||
if have_siginfo:
|
||||
result += '#define HAVE_SIGINFO 1\n'
|
||||
if have_sigval:
|
||||
result += '#define HAVE_SIGVAL 1\n'
|
||||
|
||||
if version:
|
||||
result += '#define GCC_VERSION_STR "%s"\n' % version
|
||||
|
||||
result += '#define STDC "%s"\n' % stdc
|
||||
|
||||
if posix_thread:
|
||||
result += '#define LIBC_POSIX_THREADS 1\n'
|
||||
|
||||
os.remove('__tmp.c')
|
||||
return result
|
214
rt-thread/tools/iar.py
Normal file
214
rt-thread/tools/iar.py
Normal file
@@ -0,0 +1,214 @@
|
||||
#
|
||||
# File : iar.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import utils
|
||||
|
||||
from SCons.Script import *
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
iar_workspace = '''<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
|
||||
<workspace>
|
||||
<project>
|
||||
<path>$WS_DIR$\%s</path>
|
||||
</project>
|
||||
<batchBuild/>
|
||||
</workspace>
|
||||
|
||||
|
||||
'''
|
||||
|
||||
def IARAddGroup(parent, name, files, project_path):
|
||||
group = SubElement(parent, 'group')
|
||||
group_name = SubElement(group, 'name')
|
||||
group_name.text = name
|
||||
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
|
||||
file = SubElement(group, 'file')
|
||||
file_name = SubElement(file, 'name')
|
||||
|
||||
if os.path.isabs(path):
|
||||
file_name.text = path # path.decode(fs_encoding)
|
||||
else:
|
||||
file_name.text = '$PROJ_DIR$\\' + path # ('$PROJ_DIR$\\' + path).decode(fs_encoding)
|
||||
|
||||
def IARWorkspace(target):
|
||||
# make an workspace
|
||||
workspace = target.replace('.ewp', '.eww')
|
||||
out = open(workspace, 'w')
|
||||
xml = iar_workspace % target
|
||||
out.write(xml)
|
||||
out.close()
|
||||
|
||||
def IARProject(target, script):
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
tree = etree.parse('template.ewp')
|
||||
root = tree.getroot()
|
||||
|
||||
out = open(target, 'w')
|
||||
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
LOCAL_CPPDEFINES = []
|
||||
LINKFLAGS = ''
|
||||
CFLAGS = ''
|
||||
Libs = []
|
||||
lib_prefix = ['lib', '']
|
||||
lib_suffix = ['.a', '.o', '']
|
||||
|
||||
def searchLib(group):
|
||||
for path_item in group['LIBPATH']:
|
||||
for prefix_item in lib_prefix:
|
||||
for suffix_item in lib_suffix:
|
||||
lib_full_path = os.path.join(path_item, prefix_item + item + suffix_item)
|
||||
if os.path.isfile(lib_full_path):
|
||||
return lib_full_path
|
||||
else:
|
||||
return ''
|
||||
|
||||
# add group
|
||||
for group in script:
|
||||
IARAddGroup(root, group['name'], group['src'], project_path)
|
||||
|
||||
# get each include path
|
||||
if 'CPPPATH' in group and group['CPPPATH']:
|
||||
CPPPATH += group['CPPPATH']
|
||||
|
||||
# get each group's definitions
|
||||
if 'CPPDEFINES' in group and group['CPPDEFINES']:
|
||||
CPPDEFINES += group['CPPDEFINES']
|
||||
|
||||
if 'LOCAL_CPPDEFINES' in group and group['LOCAL_CPPDEFINES']:
|
||||
LOCAL_CPPDEFINES += group['LOCAL_CPPDEFINES']
|
||||
|
||||
# get each group's link flags
|
||||
if 'LINKFLAGS' in group and group['LINKFLAGS']:
|
||||
LINKFLAGS += group['LINKFLAGS']
|
||||
|
||||
if 'LIBS' in group and group['LIBS']:
|
||||
for item in group['LIBS']:
|
||||
lib_path = searchLib(group)
|
||||
if lib_path != '':
|
||||
lib_path = _make_path_relative(project_path, lib_path)
|
||||
Libs += [lib_path]
|
||||
# print('found lib isfile: ' + lib_path)
|
||||
else:
|
||||
print('not found LIB: ' + item)
|
||||
|
||||
# make relative path
|
||||
paths = set()
|
||||
for path in CPPPATH:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
# setting options
|
||||
options = tree.findall('configuration/settings/data/option')
|
||||
for option in options:
|
||||
# print option.text
|
||||
name = option.find('name')
|
||||
|
||||
if name.text == 'CCIncludePath2' or name.text == 'newCCIncludePaths':
|
||||
for path in paths:
|
||||
state = SubElement(option, 'state')
|
||||
if os.path.isabs(path) or path.startswith('$'):
|
||||
state.text = path
|
||||
else:
|
||||
state.text = '$PROJ_DIR$\\' + path
|
||||
|
||||
if name.text == 'CCDefines':
|
||||
for define in CPPDEFINES:
|
||||
state = SubElement(option, 'state')
|
||||
state.text = define
|
||||
|
||||
for define in LOCAL_CPPDEFINES:
|
||||
state = SubElement(option, 'state')
|
||||
state.text = define
|
||||
|
||||
if name.text == 'IlinkAdditionalLibs':
|
||||
for path in Libs:
|
||||
state = SubElement(option, 'state')
|
||||
if os.path.isabs(path) or path.startswith('$'):
|
||||
path = path.decode(fs_encoding)
|
||||
else:
|
||||
path = ('$PROJ_DIR$\\' + path).decode(fs_encoding)
|
||||
state.text = path
|
||||
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8').decode())
|
||||
out.close()
|
||||
|
||||
IARWorkspace(target)
|
||||
|
||||
def IARPath():
|
||||
import rtconfig
|
||||
|
||||
# backup environ
|
||||
old_environ = os.environ
|
||||
os.environ['RTT_CC'] = 'iar'
|
||||
utils.ReloadModule(rtconfig)
|
||||
|
||||
# get iar path
|
||||
path = rtconfig.EXEC_PATH
|
||||
|
||||
# restore environ
|
||||
os.environ = old_environ
|
||||
utils.ReloadModule(rtconfig)
|
||||
|
||||
return path
|
||||
|
||||
def IARVersion():
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
path = IARPath()
|
||||
|
||||
if os.path.exists(path):
|
||||
cmd = os.path.join(path, 'iccarm.exe')
|
||||
else:
|
||||
return "0.0"
|
||||
|
||||
child = subprocess.Popen([cmd, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
stdout, stderr = child.communicate()
|
||||
if not isinstance(stdout, str):
|
||||
stdout = str(stdout, 'utf8') # Patch for Python 3
|
||||
# example stdout: IAR ANSI C/C++ Compiler V8.20.1.14183/W32 for ARM
|
||||
iar_version = re.search('[\d\.]+', stdout).group(0)
|
||||
return iar_version
|
191
rt-thread/tools/install_env.py
Normal file
191
rt-thread/tools/install_env.py
Normal file
@@ -0,0 +1,191 @@
|
||||
#! /usr/bin/env python
|
||||
#coding=utf-8
|
||||
|
||||
#
|
||||
# File : env.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2024, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2024-04-05 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
PKG_URL = 'https://github.com/RT-Thread/packages.git'
|
||||
SDK_URL = 'https://github.com/RT-Thread/sdk.git'
|
||||
ENV_URL = 'https://github.com/RT-Thread/env.git'
|
||||
|
||||
def help_info():
|
||||
print("**********************************************************************************\n"
|
||||
"* Help infomation:\n"
|
||||
"* Git tool install step.\n"
|
||||
"* If your system is linux, you can use command below to install git.\n"
|
||||
"* $ sudo yum install git\n"
|
||||
"* $ sudo apt-get install git\n"
|
||||
"* If your system is windows, you should download git software(msysGit).\n"
|
||||
"* Download path: http://git-scm.com/download/win\n"
|
||||
"* After you install it, be sure to add the git command execution PATH \n"
|
||||
"* to your system PATH.\n"
|
||||
"* Usually, git command PATH is $YOUR_INSTALL_DIR\\Git\\bin\n"
|
||||
"* If your system is OSX, please download git and install it.\n"
|
||||
"* Download path: http://git-scm.com/download/mac\n"
|
||||
"**********************************************************************************\n")
|
||||
|
||||
def touch_env():
|
||||
if sys.platform != 'win32':
|
||||
home_dir = os.environ['HOME']
|
||||
else:
|
||||
home_dir = os.environ['USERPROFILE']
|
||||
|
||||
pkg_url = os.getenv('RTT_PACKAGE_URL') or PKG_URL
|
||||
sdk_url = os.getenv('RTT_SDK_URL') or SDK_URL
|
||||
env_url = os.getenv('RTT_ENV_URL') or ENV_URL
|
||||
if len(sys.argv) == 2 and sys.argv[1] == '--gitee':
|
||||
# "Install RT-Thread Environment from Gitee"
|
||||
sdk_url = 'https://gitee.com/RT-Thread-Mirror/sdk.git'
|
||||
pkg_url = 'https://gitee.com/RT-Thread-Mirror/packages.git'
|
||||
env_url = 'https://gitee.com/RT-Thread-Mirror/env.git'
|
||||
|
||||
# make .env and other directories
|
||||
env_dir = os.path.join(home_dir, '.env')
|
||||
if not os.path.exists(env_dir):
|
||||
os.mkdir(env_dir)
|
||||
os.mkdir(os.path.join(env_dir, 'local_pkgs'))
|
||||
os.mkdir(os.path.join(env_dir, 'packages'))
|
||||
os.mkdir(os.path.join(env_dir, 'tools'))
|
||||
kconfig = open(os.path.join(env_dir, 'packages', 'Kconfig'), 'w')
|
||||
kconfig.close()
|
||||
|
||||
# git clone packages
|
||||
if not os.path.exists(os.path.join(env_dir, 'packages', 'packages')):
|
||||
try:
|
||||
ret = os.system('git clone %s %s' % (pkg_url, os.path.join(env_dir, 'packages', 'packages')))
|
||||
if ret != 0:
|
||||
shutil.rmtree(os.path.join(env_dir, 'packages', 'packages'))
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/packages.git\".\n"
|
||||
"* This error may have been caused by not found a git tool or network error.\n"
|
||||
"* If the git tool is not installed, install the git tool first.\n"
|
||||
"* If the git utility is installed, check whether the git command is added to \n"
|
||||
"* the system PATH.\n"
|
||||
"* This error may cause the RT-Thread packages to not work properly.\n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
else:
|
||||
kconfig = open(os.path.join(env_dir, 'packages', 'Kconfig'), 'w')
|
||||
kconfig.write('source "$PKGS_DIR/packages/Kconfig"')
|
||||
kconfig.close()
|
||||
except:
|
||||
print("**********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/packages.git\". \n"
|
||||
"* This error may have been caused by not found a git tool or git tool not in \n"
|
||||
"* the system PATH. \n"
|
||||
"* This error may cause the RT-Thread packages to not work properly. \n"
|
||||
"**********************************************************************************\n")
|
||||
help_info()
|
||||
|
||||
# git clone env scripts
|
||||
if not os.path.exists(os.path.join(env_dir, 'tools', 'scripts')):
|
||||
try:
|
||||
ret = os.system('git clone %s %s' % (env_url, os.path.join(env_dir, 'tools', 'scripts')))
|
||||
if ret != 0:
|
||||
shutil.rmtree(os.path.join(env_dir, 'tools', 'scripts'))
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/env.git\".\n"
|
||||
"* This error may have been caused by not found a git tool or network error.\n"
|
||||
"* If the git tool is not installed, install the git tool first.\n"
|
||||
"* If the git utility is installed, check whether the git command is added \n"
|
||||
"* to the system PATH.\n"
|
||||
"* This error may cause script tools to fail to work properly.\n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
except:
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/env.git\". \n"
|
||||
"* This error may have been caused by not found a git tool or git tool not in \n"
|
||||
"* the system PATH. \n"
|
||||
"* This error may cause script tools to fail to work properly. \n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
|
||||
# git clone sdk
|
||||
if not os.path.exists(os.path.join(env_dir, 'packages', 'sdk')):
|
||||
try:
|
||||
ret = os.system('git clone %s %s' % (sdk_url, os.path.join(env_dir, 'packages', 'sdk')))
|
||||
if ret != 0:
|
||||
shutil.rmtree(os.path.join(env_dir, 'packages', 'sdk'))
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/sdk.git\".\n"
|
||||
"* This error may have been caused by not found a git tool or network error.\n"
|
||||
"* If the git tool is not installed, install the git tool first.\n"
|
||||
"* If the git utility is installed, check whether the git command is added \n"
|
||||
"* to the system PATH.\n"
|
||||
"* This error may cause the RT-Thread SDK to not work properly.\n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
except:
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"https://github.com/RT-Thread/sdk.git\".\n"
|
||||
"* This error may have been caused by not found a git tool or git tool not in \n"
|
||||
"* the system PATH. \n"
|
||||
"* This error may cause the RT-Thread SDK to not work properly. \n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
|
||||
# try to create an empty .config file
|
||||
if not os.path.exists(os.path.join(env_dir, 'tools', '.config')):
|
||||
kconfig = open(os.path.join(env_dir, 'tools', '.config'), 'w')
|
||||
kconfig.close()
|
||||
|
||||
# copy env.sh or env.ps1, Kconfig
|
||||
shutil.copy(os.path.join(env_dir, 'tools', 'scripts', 'Kconfig'), os.path.join(home_dir, '.env', 'tools'))
|
||||
if sys.platform != 'win32':
|
||||
shutil.copy(os.path.join(env_dir, 'tools', 'scripts', 'env.sh'), os.path.join(home_dir, '.env', 'env.sh'))
|
||||
else:
|
||||
shutil.copy(os.path.join(env_dir, 'tools', 'scripts', 'env.ps1'), os.path.join(home_dir, '.env', 'env.ps1'))
|
||||
# unzip kconfig-mconf.zip
|
||||
zip_file = os.path.join(env_dir, 'tools', 'scripts', 'kconfig-mconf.zip')
|
||||
if os.path.exists(zip_file):
|
||||
zip_file_dir = os.path.join(env_dir, 'tools', 'bin')
|
||||
if os.path.exists(zip_file_dir):
|
||||
shutil.rmtree(zip_file_dir)
|
||||
zip_file_obj = zipfile.ZipFile(zip_file, 'r')
|
||||
for file in zip_file_obj.namelist():
|
||||
zip_file_obj.extract(file, zip_file_dir)
|
||||
zip_file_obj.close()
|
||||
|
||||
def main():
|
||||
print("Install RT-Thread Environment")
|
||||
|
||||
touch_env()
|
||||
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
497
rt-thread/tools/keil.py
Normal file
497
rt-thread/tools/keil.py
Normal file
@@ -0,0 +1,497 @@
|
||||
#
|
||||
# File : keil.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
def _get_filetype(fn):
|
||||
if fn.rfind('.cpp') != -1 or fn.rfind('.cxx') != -1:
|
||||
return 8
|
||||
|
||||
if fn.rfind('.c') != -1 or fn.rfind('.C') != -1:
|
||||
return 1
|
||||
|
||||
# assemble file type
|
||||
if fn.rfind('.s') != -1 or fn.rfind('.S') != -1:
|
||||
return 2
|
||||
|
||||
# header type
|
||||
if fn.rfind('.h') != -1:
|
||||
return 5
|
||||
|
||||
if fn.rfind('.lib') != -1:
|
||||
return 4
|
||||
|
||||
if fn.rfind('.o') != -1:
|
||||
return 3
|
||||
|
||||
# other filetype
|
||||
return 5
|
||||
|
||||
def MDK4AddGroupForFN(ProjectFiles, parent, name, filename, project_path):
|
||||
group = SubElement(parent, 'Group')
|
||||
group_name = SubElement(group, 'GroupName')
|
||||
group_name.text = name
|
||||
|
||||
name = os.path.basename(filename)
|
||||
path = os.path.dirname (filename)
|
||||
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
files = SubElement(group, 'Files')
|
||||
file = SubElement(files, 'File')
|
||||
file_name = SubElement(file, 'FileName')
|
||||
name = os.path.basename(path)
|
||||
|
||||
if name.find('.cpp') != -1:
|
||||
obj_name = name.replace('.cpp', '.o')
|
||||
elif name.find('.c') != -1:
|
||||
obj_name = name.replace('.c', '.o')
|
||||
elif name.find('.s') != -1:
|
||||
obj_name = name.replace('.s', '.o')
|
||||
elif name.find('.S') != -1:
|
||||
obj_name = name.replace('.s', '.o')
|
||||
else:
|
||||
obj_name = name
|
||||
|
||||
if ProjectFiles.count(obj_name):
|
||||
name = basename + '_' + name
|
||||
ProjectFiles.append(obj_name)
|
||||
try: # python 2
|
||||
file_name.text = name.decode(fs_encoding)
|
||||
except: # python 3
|
||||
file_name.text = name
|
||||
file_type = SubElement(file, 'FileType')
|
||||
file_type.text = '%d' % _get_filetype(name)
|
||||
file_path = SubElement(file, 'FilePath')
|
||||
try: # python 2
|
||||
file_path.text = path.decode(fs_encoding)
|
||||
except: # python 3
|
||||
file_path.text = path
|
||||
|
||||
|
||||
return group
|
||||
|
||||
def MDK4AddLibToGroup(ProjectFiles, group, name, filename, project_path):
|
||||
name = os.path.basename(filename)
|
||||
path = os.path.dirname (filename)
|
||||
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
files = SubElement(group, 'Files')
|
||||
file = SubElement(files, 'File')
|
||||
file_name = SubElement(file, 'FileName')
|
||||
name = os.path.basename(path)
|
||||
|
||||
if name.find('.cpp') != -1:
|
||||
obj_name = name.replace('.cpp', '.o')
|
||||
elif name.find('.c') != -1:
|
||||
obj_name = name.replace('.c', '.o')
|
||||
elif name.find('.s') != -1:
|
||||
obj_name = name.replace('.s', '.o')
|
||||
elif name.find('.S') != -1:
|
||||
obj_name = name.replace('.s', '.o')
|
||||
else:
|
||||
obj_name = name
|
||||
|
||||
if ProjectFiles.count(obj_name):
|
||||
name = basename + '_' + name
|
||||
ProjectFiles.append(obj_name)
|
||||
try:
|
||||
file_name.text = name.decode(fs_encoding)
|
||||
except:
|
||||
file_name.text = name
|
||||
file_type = SubElement(file, 'FileType')
|
||||
file_type.text = '%d' % _get_filetype(name)
|
||||
file_path = SubElement(file, 'FilePath')
|
||||
|
||||
try:
|
||||
file_path.text = path.decode(fs_encoding)
|
||||
except:
|
||||
file_path.text = path
|
||||
|
||||
return group
|
||||
|
||||
def MDK4AddGroup(ProjectFiles, parent, name, files, project_path, group_scons):
|
||||
# don't add an empty group
|
||||
if len(files) == 0:
|
||||
return
|
||||
|
||||
group = SubElement(parent, 'Group')
|
||||
group_name = SubElement(group, 'GroupName')
|
||||
group_name.text = name
|
||||
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
|
||||
files = SubElement(group, 'Files')
|
||||
file = SubElement(files, 'File')
|
||||
file_name = SubElement(file, 'FileName')
|
||||
name = os.path.basename(path)
|
||||
|
||||
if name.find('.cpp') != -1:
|
||||
obj_name = name.replace('.cpp', '.o')
|
||||
elif name.find('.c') != -1:
|
||||
obj_name = name.replace('.c', '.o')
|
||||
elif name.find('.s') != -1:
|
||||
obj_name = name.replace('.s', '.o')
|
||||
elif name.find('.S') != -1:
|
||||
obj_name = name.replace('.s', '.o')
|
||||
|
||||
if ProjectFiles.count(obj_name):
|
||||
name = basename + '_' + name
|
||||
ProjectFiles.append(obj_name)
|
||||
file_name.text = name # name.decode(fs_encoding)
|
||||
file_type = SubElement(file, 'FileType')
|
||||
file_type.text = '%d' % _get_filetype(name)
|
||||
file_path = SubElement(file, 'FilePath')
|
||||
file_path.text = path # path.decode(fs_encoding)
|
||||
|
||||
# for local LOCAL_CFLAGS/LOCAL_CXXFLAGS/LOCAL_CCFLAGS/LOCAL_CPPPATH/LOCAL_CPPDEFINES
|
||||
MiscControls_text = ' '
|
||||
if file_type.text == '1' and 'LOCAL_CFLAGS' in group_scons:
|
||||
MiscControls_text = MiscControls_text + group_scons['LOCAL_CFLAGS']
|
||||
elif file_type.text == '8' and 'LOCAL_CXXFLAGS' in group_scons:
|
||||
MiscControls_text = MiscControls_text + group_scons['LOCAL_CXXFLAGS']
|
||||
if 'LOCAL_CCFLAGS' in group_scons:
|
||||
MiscControls_text = MiscControls_text + group_scons['LOCAL_CCFLAGS']
|
||||
if MiscControls_text != ' ' or ('LOCAL_CPPDEFINES' in group_scons):
|
||||
FileOption = SubElement(file, 'FileOption')
|
||||
FileArmAds = SubElement(FileOption, 'FileArmAds')
|
||||
Cads = SubElement(FileArmAds, 'Cads')
|
||||
VariousControls = SubElement(Cads, 'VariousControls')
|
||||
MiscControls = SubElement(VariousControls, 'MiscControls')
|
||||
MiscControls.text = MiscControls_text
|
||||
Define = SubElement(VariousControls, 'Define')
|
||||
if 'LOCAL_CPPDEFINES' in group_scons:
|
||||
Define.text = ', '.join(set(group_scons['LOCAL_CPPDEFINES']))
|
||||
else:
|
||||
Define.text = ' '
|
||||
Undefine = SubElement(VariousControls, 'Undefine')
|
||||
Undefine.text = ' '
|
||||
IncludePath = SubElement(VariousControls, 'IncludePath')
|
||||
if 'LOCAL_CPPPATH' in group_scons:
|
||||
IncludePath.text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in group_scons['LOCAL_CPPPATH']])
|
||||
else:
|
||||
IncludePath.text = ' '
|
||||
|
||||
return group
|
||||
|
||||
# The common part of making MDK4/5 project
|
||||
def MDK45Project(tree, target, script):
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
root = tree.getroot()
|
||||
out = open(target, 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8" standalone="no" ?>\n')
|
||||
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
LINKFLAGS = ''
|
||||
CXXFLAGS = ''
|
||||
CCFLAGS = ''
|
||||
CFLAGS = ''
|
||||
ProjectFiles = []
|
||||
|
||||
# add group
|
||||
groups = tree.find('Targets/Target/Groups')
|
||||
if groups is None:
|
||||
groups = SubElement(tree.find('Targets/Target'), 'Groups')
|
||||
groups.clear() # clean old groups
|
||||
for group in script:
|
||||
group_tree = MDK4AddGroup(ProjectFiles, groups, group['name'], group['src'], project_path, group)
|
||||
|
||||
# get each include path
|
||||
if 'CPPPATH' in group and group['CPPPATH']:
|
||||
if CPPPATH:
|
||||
CPPPATH += group['CPPPATH']
|
||||
else:
|
||||
CPPPATH += group['CPPPATH']
|
||||
|
||||
# get each group's definitions
|
||||
if 'CPPDEFINES' in group and group['CPPDEFINES']:
|
||||
if CPPDEFINES:
|
||||
CPPDEFINES += group['CPPDEFINES']
|
||||
else:
|
||||
CPPDEFINES = group['CPPDEFINES']
|
||||
|
||||
# get each group's link flags
|
||||
if 'LINKFLAGS' in group and group['LINKFLAGS']:
|
||||
if LINKFLAGS:
|
||||
LINKFLAGS += ' ' + group['LINKFLAGS']
|
||||
else:
|
||||
LINKFLAGS += group['LINKFLAGS']
|
||||
|
||||
# get each group's CXXFLAGS flags
|
||||
if 'CXXFLAGS' in group and group['CXXFLAGS']:
|
||||
if CXXFLAGS:
|
||||
CXXFLAGS += ' ' + group['CXXFLAGS']
|
||||
else:
|
||||
CXXFLAGS += group['CXXFLAGS']
|
||||
|
||||
# get each group's CCFLAGS flags
|
||||
if 'CCFLAGS' in group and group['CCFLAGS']:
|
||||
if CCFLAGS:
|
||||
CCFLAGS += ' ' + group['CCFLAGS']
|
||||
else:
|
||||
CCFLAGS += group['CCFLAGS']
|
||||
|
||||
# get each group's CFLAGS flags
|
||||
if 'CFLAGS' in group and group['CFLAGS']:
|
||||
if CFLAGS:
|
||||
CFLAGS += ' ' + group['CFLAGS']
|
||||
else:
|
||||
CFLAGS += group['CFLAGS']
|
||||
|
||||
# get each group's LIBS flags
|
||||
if 'LIBS' in group and group['LIBS']:
|
||||
for item in group['LIBS']:
|
||||
lib_path = ''
|
||||
for path_item in group['LIBPATH']:
|
||||
full_path = os.path.join(path_item, item + '.lib')
|
||||
if os.path.isfile(full_path): # has this library
|
||||
lib_path = full_path
|
||||
break
|
||||
|
||||
if lib_path != '':
|
||||
if group_tree != None:
|
||||
MDK4AddLibToGroup(ProjectFiles, group_tree, group['name'], lib_path, project_path)
|
||||
else:
|
||||
group_tree = MDK4AddGroupForFN(ProjectFiles, groups, group['name'], lib_path, project_path)
|
||||
|
||||
# write include path, definitions and link flags
|
||||
IncludePath = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/VariousControls/IncludePath')
|
||||
IncludePath.text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in set(CPPPATH)])
|
||||
|
||||
Define = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/VariousControls/Define')
|
||||
Define.text = ', '.join(set(CPPDEFINES))
|
||||
|
||||
if 'c99' in CXXFLAGS or 'c99' in CCFLAGS or 'c99' in CFLAGS:
|
||||
uC99 = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/uC99')
|
||||
uC99.text = '1'
|
||||
|
||||
if 'gnu' in CXXFLAGS or 'gnu' in CCFLAGS or 'gnu' in CFLAGS:
|
||||
uGnu = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/uGnu')
|
||||
uGnu.text = '1'
|
||||
|
||||
Misc = tree.find('Targets/Target/TargetOption/TargetArmAds/LDads/Misc')
|
||||
Misc.text = LINKFLAGS
|
||||
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8').decode())
|
||||
out.close()
|
||||
|
||||
def MDK4Project(target, script):
|
||||
|
||||
if os.path.isfile('template.uvproj') is False:
|
||||
print ('Warning: The template project file [template.uvproj] not found!')
|
||||
return
|
||||
|
||||
template_tree = etree.parse('template.uvproj')
|
||||
|
||||
MDK45Project(template_tree, target, script)
|
||||
|
||||
# remove project.uvopt file
|
||||
project_uvopt = os.path.abspath(target).replace('uvproj', 'uvopt')
|
||||
if os.path.isfile(project_uvopt):
|
||||
os.unlink(project_uvopt)
|
||||
|
||||
# copy uvopt file
|
||||
if os.path.exists('template.uvopt'):
|
||||
import shutil
|
||||
shutil.copy2('template.uvopt', '{}.uvopt'.format(os.path.splitext(target)[0]))
|
||||
|
||||
def MDK5Project(target, script):
|
||||
|
||||
if os.path.isfile('template.uvprojx') is False:
|
||||
print ('Warning: The template project file [template.uvprojx] not found!')
|
||||
return
|
||||
|
||||
template_tree = etree.parse('template.uvprojx')
|
||||
|
||||
MDK45Project(template_tree, target, script)
|
||||
|
||||
# remove project.uvopt file
|
||||
project_uvopt = os.path.abspath(target).replace('uvprojx', 'uvoptx')
|
||||
if os.path.isfile(project_uvopt):
|
||||
os.unlink(project_uvopt)
|
||||
# copy uvopt file
|
||||
if os.path.exists('template.uvoptx'):
|
||||
import shutil
|
||||
shutil.copy2('template.uvoptx', '{}.uvoptx'.format(os.path.splitext(target)[0]))
|
||||
|
||||
def MDK2Project(target, script):
|
||||
template = open('template.Uv2', "r")
|
||||
lines = template.readlines()
|
||||
|
||||
project = open(target, "w")
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
line_index = 5
|
||||
# write group
|
||||
for group in script:
|
||||
lines.insert(line_index, 'Group (%s)\r\n' % group['name'])
|
||||
line_index += 1
|
||||
|
||||
lines.insert(line_index, '\r\n')
|
||||
line_index += 1
|
||||
|
||||
# write file
|
||||
|
||||
ProjectFiles = []
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
LINKFLAGS = ''
|
||||
CFLAGS = ''
|
||||
|
||||
# number of groups
|
||||
group_index = 1
|
||||
for group in script:
|
||||
# print group['name']
|
||||
|
||||
# get each include path
|
||||
if 'CPPPATH' in group and group['CPPPATH']:
|
||||
if CPPPATH:
|
||||
CPPPATH += group['CPPPATH']
|
||||
else:
|
||||
CPPPATH += group['CPPPATH']
|
||||
|
||||
# get each group's definitions
|
||||
if 'CPPDEFINES' in group and group['CPPDEFINES']:
|
||||
if CPPDEFINES:
|
||||
CPPDEFINES += group['CPPDEFINES']
|
||||
else:
|
||||
CPPDEFINES = group['CPPDEFINES']
|
||||
|
||||
# get each group's link flags
|
||||
if 'LINKFLAGS' in group and group['LINKFLAGS']:
|
||||
if LINKFLAGS:
|
||||
LINKFLAGS += ' ' + group['LINKFLAGS']
|
||||
else:
|
||||
LINKFLAGS += group['LINKFLAGS']
|
||||
|
||||
# generate file items
|
||||
for node in group['src']:
|
||||
fn = node.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
if ProjectFiles.count(name):
|
||||
name = basename + '_' + name
|
||||
ProjectFiles.append(name)
|
||||
lines.insert(line_index, 'File %d,%d,<%s><%s>\r\n'
|
||||
% (group_index, _get_filetype(name), path, name))
|
||||
line_index += 1
|
||||
|
||||
group_index = group_index + 1
|
||||
|
||||
lines.insert(line_index, '\r\n')
|
||||
line_index += 1
|
||||
|
||||
# remove repeat path
|
||||
paths = set()
|
||||
for path in CPPPATH:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
CPPPATH = string.join(paths, ';')
|
||||
|
||||
definitions = [i for i in set(CPPDEFINES)]
|
||||
CPPDEFINES = string.join(definitions, ', ')
|
||||
|
||||
while line_index < len(lines):
|
||||
if lines[line_index].startswith(' ADSCINCD '):
|
||||
lines[line_index] = ' ADSCINCD (' + CPPPATH + ')\r\n'
|
||||
|
||||
if lines[line_index].startswith(' ADSLDMC ('):
|
||||
lines[line_index] = ' ADSLDMC (' + LINKFLAGS + ')\r\n'
|
||||
|
||||
if lines[line_index].startswith(' ADSCDEFN ('):
|
||||
lines[line_index] = ' ADSCDEFN (' + CPPDEFINES + ')\r\n'
|
||||
|
||||
line_index += 1
|
||||
|
||||
# write project
|
||||
for line in lines:
|
||||
project.write(line)
|
||||
|
||||
project.close()
|
||||
|
||||
def ARMCC_Version():
|
||||
import rtconfig
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
path = rtconfig.EXEC_PATH
|
||||
if(rtconfig.PLATFORM == 'armcc'):
|
||||
path = os.path.join(path, 'armcc.exe')
|
||||
elif(rtconfig.PLATFORM == 'armclang'):
|
||||
path = os.path.join(path, 'armlink.exe')
|
||||
|
||||
if os.path.exists(path):
|
||||
cmd = path
|
||||
else:
|
||||
return "0.0"
|
||||
|
||||
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
stdout, stderr = child.communicate()
|
||||
|
||||
'''
|
||||
example stdout:
|
||||
Product: MDK Plus 5.24
|
||||
Component: ARM Compiler 5.06 update 5 (build 528)
|
||||
Tool: armcc [4d3621]
|
||||
|
||||
return version: MDK Plus 5.24/ARM Compiler 5.06 update 5 (build 528)/armcc [4d3621]
|
||||
'''
|
||||
if not isinstance(stdout, str):
|
||||
stdout = str(stdout, 'utf8') # Patch for Python 3
|
||||
version_Product = re.search(r'Product: (.+)', stdout).group(1)
|
||||
version_Product = version_Product[:-1]
|
||||
version_Component = re.search(r'Component: (.*)', stdout).group(1)
|
||||
version_Component = version_Component[:-1]
|
||||
version_Tool = re.search(r'Tool: (.*)', stdout).group(1)
|
||||
version_Tool = version_Tool[:-1]
|
||||
version_str_format = '%s/%s/%s'
|
||||
version_str = version_str_format % (version_Product, version_Component, version_Tool)
|
||||
return version_str
|
58
rt-thread/tools/llvm_arm.py
Normal file
58
rt-thread/tools/llvm_arm.py
Normal file
@@ -0,0 +1,58 @@
|
||||
#
|
||||
# File : llvm_arm.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2018, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2023-05-17 Flybreak The first version
|
||||
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
|
||||
def GetLLVM_ARMRoot(rtconfig):
|
||||
exec_path = rtconfig.EXEC_PATH
|
||||
lib_path = 'lib/clang-runtimes/arm-none-eabi'
|
||||
root_path = os.path.join(exec_path, '..', lib_path)
|
||||
|
||||
return root_path
|
||||
|
||||
def CheckHeader(rtconfig, filename):
|
||||
root = GetLLVM_ARMRoot(rtconfig)
|
||||
if os.path.isdir(root):
|
||||
for config in os.listdir(root):
|
||||
fn = os.path.join(root, config, 'include', filename)
|
||||
if os.path.isfile(fn):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def GetPicoLibcVersion(rtconfig):
|
||||
version = None
|
||||
root = GetLLVM_ARMRoot(rtconfig)
|
||||
if CheckHeader(rtconfig, 'picolibc.h'): # get version from picolibc.h file
|
||||
for config in os.listdir(root):
|
||||
fn = os.path.join(root, config, 'include', 'picolibc.h')
|
||||
f = open(fn, 'r')
|
||||
if f:
|
||||
for line in f:
|
||||
if line.find('__PICOLIBC_VERSION__') != -1 and line.find('"') != -1:
|
||||
version = re.search(r'\"([^"]+)\"', line).groups()[0]
|
||||
f.close()
|
||||
return version
|
||||
return version
|
134
rt-thread/tools/makefile.py
Normal file
134
rt-thread/tools/makefile.py
Normal file
@@ -0,0 +1,134 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from utils import *
|
||||
from utils import _make_path_relative
|
||||
import rtconfig
|
||||
|
||||
makefile = '''phony := all
|
||||
all:
|
||||
|
||||
include config.mk
|
||||
|
||||
ifneq ($(MAKE_LIB),1)
|
||||
TARGET := rtthread.elf
|
||||
include src.mk
|
||||
endif
|
||||
|
||||
$(if $(strip $(RTT_ROOT)),,$(error RTT_ROOT not defined))
|
||||
|
||||
include $(RTT_ROOT)/tools/rtthread.mk
|
||||
'''
|
||||
|
||||
def TargetMakefile(env):
|
||||
project = ProjectInfo(env)
|
||||
|
||||
BSP_ROOT = os.path.abspath(env['BSP_ROOT'])
|
||||
RTT_ROOT = os.path.abspath(env['RTT_ROOT'])
|
||||
|
||||
match_bsp = False
|
||||
if BSP_ROOT.startswith(RTT_ROOT):
|
||||
match_bsp = True
|
||||
|
||||
make = open('config.mk', 'w')
|
||||
|
||||
make.write('BSP_ROOT ?= %s\n' % BSP_ROOT.replace('\\', '/'))
|
||||
make.write('RTT_ROOT ?= %s\n' % RTT_ROOT.replace('\\', '/'))
|
||||
make.write('\n')
|
||||
|
||||
cross = os.path.abspath(rtconfig.EXEC_PATH)
|
||||
cross = os.path.join(cross, rtconfig.PREFIX)
|
||||
make.write('CROSS_COMPILE ?=%s' % cross.replace('\\', '\\\\'))
|
||||
make.write('\n')
|
||||
make.write('\n')
|
||||
|
||||
make.write('CFLAGS :=%s' % (rtconfig.CFLAGS))
|
||||
make.write('\n')
|
||||
make.write('AFLAGS :=%s' % (rtconfig.AFLAGS))
|
||||
make.write('\n')
|
||||
make.write('LFLAGS :=%s' % (rtconfig.LFLAGS))
|
||||
make.write('\n')
|
||||
if 'CXXFLAGS' in dir(rtconfig):
|
||||
make.write('CXXFLAGS :=%s' % (rtconfig.CXXFLAGS))
|
||||
make.write('\n')
|
||||
if ('LIBS' in env):
|
||||
make.write('EXTERN_LIB := ')
|
||||
for tlib in env['LIBS']:
|
||||
make.write('-l%s ' % (tlib))
|
||||
if ('LIBPATH' in env):
|
||||
for tlibpath in env['LIBPATH']:
|
||||
make.write('-L%s ' % (tlibpath))
|
||||
make.write('\n')
|
||||
|
||||
make.write('\n')
|
||||
|
||||
Files = project['FILES']
|
||||
Headers = project['HEADERS']
|
||||
CPPDEFINES = project['CPPDEFINES']
|
||||
|
||||
paths = [os.path.normpath(i) for i in project['CPPPATH']]
|
||||
CPPPATH = []
|
||||
for path in paths:
|
||||
fn = os.path.normpath(path)
|
||||
if match_bsp:
|
||||
if fn.startswith(BSP_ROOT):
|
||||
fn = '$(BSP_ROOT)' + fn.replace(BSP_ROOT, '')
|
||||
elif fn.startswith(RTT_ROOT):
|
||||
fn = '$(RTT_ROOT)' + fn.replace(RTT_ROOT, '')
|
||||
else:
|
||||
if fn.startswith(RTT_ROOT):
|
||||
fn = '$(RTT_ROOT)' + fn.replace(RTT_ROOT, '')
|
||||
elif fn.startswith(BSP_ROOT):
|
||||
fn = '$(BSP_ROOT)' + fn.replace(BSP_ROOT, '')
|
||||
|
||||
CPPPATH.append(fn)
|
||||
|
||||
path = ''
|
||||
paths = CPPPATH
|
||||
for item in paths:
|
||||
path += '\t-I%s \\\n' % item
|
||||
|
||||
make.write('CPPPATHS :=')
|
||||
if path[0] == '\t': path = path[1:]
|
||||
length = len(path)
|
||||
if path[length - 2] == '\\': path = path[:length - 2]
|
||||
make.write(path)
|
||||
make.write('\n')
|
||||
make.write('\n')
|
||||
|
||||
defines = ''
|
||||
for item in project['CPPDEFINES']:
|
||||
defines += ' -D%s' % item
|
||||
make.write('DEFINES :=')
|
||||
make.write(defines)
|
||||
make.write('\n')
|
||||
|
||||
files = Files
|
||||
Files = []
|
||||
for file in files:
|
||||
fn = os.path.normpath(file)
|
||||
if match_bsp:
|
||||
if fn.startswith(BSP_ROOT):
|
||||
fn = '$(BSP_ROOT)' + fn.replace(BSP_ROOT, '')
|
||||
elif fn.startswith(RTT_ROOT):
|
||||
fn = '$(RTT_ROOT)' + fn.replace(RTT_ROOT, '')
|
||||
else:
|
||||
if fn.startswith(RTT_ROOT):
|
||||
fn = '$(RTT_ROOT)' + fn.replace(RTT_ROOT, '')
|
||||
elif fn.startswith(BSP_ROOT):
|
||||
fn = '$(BSP_ROOT)' + fn.replace(BSP_ROOT, '')
|
||||
|
||||
Files.append(fn)
|
||||
# print(fn)
|
||||
|
||||
src = open('src.mk', 'w')
|
||||
files = Files
|
||||
src.write('SRC_FILES :=\n')
|
||||
for item in files:
|
||||
src.write('SRC_FILES +=%s\n' % item.replace('\\', '/'))
|
||||
|
||||
make = open('Makefile', 'w')
|
||||
make.write(makefile)
|
||||
make.close()
|
||||
|
||||
return
|
392
rt-thread/tools/menukconfig.py
Normal file
392
rt-thread/tools/menukconfig.py
Normal file
@@ -0,0 +1,392 @@
|
||||
#
|
||||
# File : menuconfig.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2018, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2017-12-29 Bernard The first version
|
||||
# 2018-07-31 weety Support pyconfig
|
||||
# 2019-07-13 armink Support guiconfig
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import shutil
|
||||
import hashlib
|
||||
import operator
|
||||
|
||||
# test kconfiglib is installed
|
||||
try:
|
||||
import kconfiglib
|
||||
except ImportError as e:
|
||||
print("Failed to import kconfiglib: " + str(e))
|
||||
print("")
|
||||
print("You may need to install it using:")
|
||||
print(" pip install kconfiglib")
|
||||
print("")
|
||||
sys.exit(1)
|
||||
|
||||
DEFAULT_RTT_PACKAGE_URL = 'https://github.com/RT-Thread/packages.git'
|
||||
# you can change the package url by defining RTT_PACKAGE_URL, ex:
|
||||
# export RTT_PACKAGE_URL=https://github.com/Varanda-Labs/packages.git
|
||||
|
||||
# make rtconfig.h from .config
|
||||
|
||||
def is_pkg_special_config(config_str):
|
||||
''' judge if it's CONFIG_PKG_XX_PATH or CONFIG_PKG_XX_VER'''
|
||||
|
||||
if type(config_str) == type('a'):
|
||||
if config_str.startswith("PKG_") and (config_str.endswith('_PATH') or config_str.endswith('_VER')):
|
||||
return True
|
||||
return False
|
||||
|
||||
def mk_rtconfig(filename):
|
||||
try:
|
||||
config = open(filename, 'r')
|
||||
except:
|
||||
print('open config:%s failed' % filename)
|
||||
return
|
||||
|
||||
rtconfig = open('rtconfig.h', 'w')
|
||||
rtconfig.write('#ifndef RT_CONFIG_H__\n')
|
||||
rtconfig.write('#define RT_CONFIG_H__\n\n')
|
||||
|
||||
empty_line = 1
|
||||
|
||||
for line in config:
|
||||
line = line.lstrip(' ').replace('\n', '').replace('\r', '')
|
||||
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
if line[0] == '#':
|
||||
if len(line) == 1:
|
||||
if empty_line:
|
||||
continue
|
||||
|
||||
rtconfig.write('\n')
|
||||
empty_line = 1
|
||||
continue
|
||||
|
||||
if line.startswith('# CONFIG_'):
|
||||
line = ' ' + line[9:]
|
||||
else:
|
||||
line = line[1:]
|
||||
rtconfig.write('/*%s */\n' % line)
|
||||
|
||||
empty_line = 0
|
||||
else:
|
||||
empty_line = 0
|
||||
setting = line.split('=')
|
||||
if len(setting) >= 2:
|
||||
if setting[0].startswith('CONFIG_'):
|
||||
setting[0] = setting[0][7:]
|
||||
|
||||
# remove CONFIG_PKG_XX_PATH or CONFIG_PKG_XX_VER
|
||||
if is_pkg_special_config(setting[0]):
|
||||
continue
|
||||
|
||||
if setting[1] == 'y':
|
||||
rtconfig.write('#define %s\n' % setting[0])
|
||||
else:
|
||||
rtconfig.write('#define %s %s\n' % (setting[0], re.findall(r"^.*?=(.*)$",line)[0]))
|
||||
|
||||
if os.path.isfile('rtconfig_project.h'):
|
||||
rtconfig.write('#include "rtconfig_project.h"\n')
|
||||
|
||||
rtconfig.write('\n')
|
||||
rtconfig.write('#endif\n')
|
||||
rtconfig.close()
|
||||
|
||||
|
||||
def get_file_md5(file):
|
||||
MD5 = hashlib.new('md5')
|
||||
with open(file, 'r') as fp:
|
||||
MD5.update(fp.read().encode('utf8'))
|
||||
fp_md5 = MD5.hexdigest()
|
||||
return fp_md5
|
||||
|
||||
def config():
|
||||
mk_rtconfig('.config')
|
||||
|
||||
def get_env_dir():
|
||||
if os.environ.get('ENV_ROOT'):
|
||||
return os.environ.get('ENV_ROOT')
|
||||
|
||||
if sys.platform == 'win32':
|
||||
home_dir = os.environ['USERPROFILE']
|
||||
env_dir = os.path.join(home_dir, '.env')
|
||||
else:
|
||||
home_dir = os.environ['HOME']
|
||||
env_dir = os.path.join(home_dir, '.env')
|
||||
|
||||
if not os.path.exists(env_dir):
|
||||
return None
|
||||
|
||||
return env_dir
|
||||
|
||||
def help_info():
|
||||
print("**********************************************************************************\n"
|
||||
"* Help infomation:\n"
|
||||
"* Git tool install step.\n"
|
||||
"* If your system is linux, you can use command below to install git.\n"
|
||||
"* $ sudo yum install git\n"
|
||||
"* $ sudo apt-get install git\n"
|
||||
"* If your system is windows, you should download git software(msysGit).\n"
|
||||
"* Download path: http://git-scm.com/download/win\n"
|
||||
"* After you install it, be sure to add the git command execution PATH \n"
|
||||
"* to your system PATH.\n"
|
||||
"* Usually, git command PATH is $YOUR_INSTALL_DIR\\Git\\bin\n"
|
||||
"* If your system is OSX, please download git and install it.\n"
|
||||
"* Download path: http://git-scm.com/download/mac\n"
|
||||
"**********************************************************************************\n")
|
||||
|
||||
def touch_env():
|
||||
if sys.platform != 'win32':
|
||||
home_dir = os.environ['HOME']
|
||||
else:
|
||||
home_dir = os.environ['USERPROFILE']
|
||||
|
||||
package_url = os.getenv('RTT_PACKAGE_URL') or DEFAULT_RTT_PACKAGE_URL
|
||||
|
||||
env_dir = os.path.join(home_dir, '.env')
|
||||
if not os.path.exists(env_dir):
|
||||
os.mkdir(env_dir)
|
||||
os.mkdir(os.path.join(env_dir, 'local_pkgs'))
|
||||
os.mkdir(os.path.join(env_dir, 'packages'))
|
||||
os.mkdir(os.path.join(env_dir, 'tools'))
|
||||
kconfig = open(os.path.join(env_dir, 'packages', 'Kconfig'), 'w')
|
||||
kconfig.close()
|
||||
|
||||
if not os.path.exists(os.path.join(env_dir, 'packages', 'packages')):
|
||||
try:
|
||||
ret = os.system('git clone %s %s' % (package_url, os.path.join(env_dir, 'packages', 'packages')))
|
||||
if ret != 0:
|
||||
shutil.rmtree(os.path.join(env_dir, 'packages', 'packages'))
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/packages.git\".\n"
|
||||
"* This error may have been caused by not found a git tool or network error.\n"
|
||||
"* If the git tool is not installed, install the git tool first.\n"
|
||||
"* If the git utility is installed, check whether the git command is added to \n"
|
||||
"* the system PATH.\n"
|
||||
"* This error may cause the RT-Thread packages to not work properly.\n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
else:
|
||||
kconfig = open(os.path.join(env_dir, 'packages', 'Kconfig'), 'w')
|
||||
kconfig.write('source "$PKGS_DIR/packages/Kconfig"')
|
||||
kconfig.close()
|
||||
except:
|
||||
print("**********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/packages.git\". \n"
|
||||
"* This error may have been caused by not found a git tool or git tool not in \n"
|
||||
"* the system PATH. \n"
|
||||
"* This error may cause the RT-Thread packages to not work properly. \n"
|
||||
"**********************************************************************************\n")
|
||||
help_info()
|
||||
|
||||
if not os.path.exists(os.path.join(env_dir, 'tools', 'scripts')):
|
||||
try:
|
||||
ret = os.system('git clone https://github.com/RT-Thread/env.git %s' % os.path.join(env_dir, 'tools', 'scripts'))
|
||||
if ret != 0:
|
||||
shutil.rmtree(os.path.join(env_dir, 'tools', 'scripts'))
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/env.git\".\n"
|
||||
"* This error may have been caused by not found a git tool or network error.\n"
|
||||
"* If the git tool is not installed, install the git tool first.\n"
|
||||
"* If the git utility is installed, check whether the git command is added \n"
|
||||
"* to the system PATH.\n"
|
||||
"* This error may cause script tools to fail to work properly.\n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
except:
|
||||
print("********************************************************************************\n"
|
||||
"* Warnning:\n"
|
||||
"* Run command error for \"git clone https://github.com/RT-Thread/env.git\". \n"
|
||||
"* This error may have been caused by not found a git tool or git tool not in \n"
|
||||
"* the system PATH. \n"
|
||||
"* This error may cause script tools to fail to work properly. \n"
|
||||
"********************************************************************************\n")
|
||||
help_info()
|
||||
|
||||
if sys.platform != 'win32':
|
||||
env_sh = open(os.path.join(env_dir, 'env.sh'), 'w')
|
||||
env_sh.write('export PATH=~/.env/tools/scripts:$PATH')
|
||||
|
||||
# if fish config exists, generate env.fish
|
||||
if os.path.exists(os.path.join(home_dir, '.config', 'fish', 'config.fish')):
|
||||
env_fish = open(os.path.join(env_dir, 'env.fish'), 'w')
|
||||
env_fish.write('set -gx PATH ~/.env/tools/scripts $PATH')
|
||||
else:
|
||||
if os.path.exists(os.path.join(env_dir, 'tools', 'scripts')):
|
||||
os.environ["PATH"] = os.path.join(env_dir, 'tools', 'scripts') + ';' + os.environ["PATH"]
|
||||
|
||||
# Exclude utestcases
|
||||
def exclude_utestcases(RTT_ROOT):
|
||||
if os.path.isfile(os.path.join(RTT_ROOT, 'examples/utest/testcases/Kconfig')):
|
||||
return
|
||||
|
||||
if not os.path.isfile(os.path.join(RTT_ROOT, 'Kconfig')):
|
||||
return
|
||||
|
||||
with open(os.path.join(RTT_ROOT, 'Kconfig'), 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(os.path.join(RTT_ROOT, 'Kconfig'), 'w') as f:
|
||||
for line in data:
|
||||
if line.find('examples/utest/testcases/Kconfig') == -1:
|
||||
f.write(line)
|
||||
|
||||
# fix locale for kconfiglib
|
||||
def kconfiglib_fix_locale():
|
||||
import os
|
||||
import locale
|
||||
|
||||
# Get the list of supported locales
|
||||
supported_locales = set(locale.locale_alias.keys())
|
||||
|
||||
# Check if LANG is set and its value is not in the supported locales
|
||||
if 'LANG' in os.environ and os.environ['LANG'] not in supported_locales:
|
||||
os.environ['LANG'] = 'C'
|
||||
|
||||
# menuconfig for Linux and Windows
|
||||
def menuconfig(RTT_ROOT):
|
||||
import menuconfig
|
||||
|
||||
# Exclude utestcases
|
||||
exclude_utestcases(RTT_ROOT)
|
||||
|
||||
if sys.platform != 'win32':
|
||||
touch_env()
|
||||
|
||||
env_dir = get_env_dir()
|
||||
if isinstance(env_dir, str):
|
||||
os.environ['PKGS_ROOT'] = os.path.join(env_dir, 'packages')
|
||||
|
||||
fn = '.config'
|
||||
fn_old = '.config.old'
|
||||
|
||||
sys.argv = ['menuconfig', 'Kconfig']
|
||||
|
||||
# fix vscode console
|
||||
kconfiglib_fix_locale()
|
||||
|
||||
menuconfig._main()
|
||||
|
||||
if os.path.isfile(fn):
|
||||
if os.path.isfile(fn_old):
|
||||
diff_eq = operator.eq(get_file_md5(fn), get_file_md5(fn_old))
|
||||
else:
|
||||
diff_eq = False
|
||||
else:
|
||||
sys.exit(-1)
|
||||
|
||||
# make rtconfig.h
|
||||
if diff_eq == False:
|
||||
shutil.copyfile(fn, fn_old)
|
||||
mk_rtconfig(fn)
|
||||
|
||||
# guiconfig for windows and linux
|
||||
def guiconfig(RTT_ROOT):
|
||||
import guiconfig
|
||||
|
||||
# Exclude utestcases
|
||||
exclude_utestcases(RTT_ROOT)
|
||||
|
||||
if sys.platform != 'win32':
|
||||
touch_env()
|
||||
|
||||
env_dir = get_env_dir()
|
||||
if isinstance(env_dir, str):
|
||||
os.environ['PKGS_ROOT'] = os.path.join(env_dir, 'packages')
|
||||
|
||||
fn = '.config'
|
||||
fn_old = '.config.old'
|
||||
|
||||
sys.argv = ['guiconfig', 'Kconfig']
|
||||
guiconfig._main()
|
||||
|
||||
if os.path.isfile(fn):
|
||||
if os.path.isfile(fn_old):
|
||||
diff_eq = operator.eq(get_file_md5(fn), get_file_md5(fn_old))
|
||||
else:
|
||||
diff_eq = False
|
||||
else:
|
||||
sys.exit(-1)
|
||||
|
||||
# make rtconfig.h
|
||||
if diff_eq == False:
|
||||
shutil.copyfile(fn, fn_old)
|
||||
mk_rtconfig(fn)
|
||||
|
||||
|
||||
# guiconfig for windows and linux
|
||||
def guiconfig_silent(RTT_ROOT):
|
||||
import defconfig
|
||||
|
||||
# Exclude utestcases
|
||||
exclude_utestcases(RTT_ROOT)
|
||||
|
||||
if sys.platform != 'win32':
|
||||
touch_env()
|
||||
|
||||
env_dir = get_env_dir()
|
||||
if isinstance(env_dir, str):
|
||||
os.environ['PKGS_ROOT'] = os.path.join(env_dir, 'packages')
|
||||
|
||||
fn = '.config'
|
||||
|
||||
sys.argv = ['defconfig', '--kconfig', 'Kconfig', '.config']
|
||||
defconfig.main()
|
||||
|
||||
# silent mode, force to make rtconfig.h
|
||||
mk_rtconfig(fn)
|
||||
|
||||
|
||||
def genconfig() :
|
||||
from SCons.Script import SCons
|
||||
|
||||
PreProcessor = SCons.cpp.PreProcessor()
|
||||
|
||||
try:
|
||||
f = open('rtconfig.h', 'r')
|
||||
contents = f.read()
|
||||
f.close()
|
||||
except :
|
||||
print("Open rtconfig.h file failed.")
|
||||
|
||||
PreProcessor.process_contents(contents)
|
||||
options = PreProcessor.cpp_namespace
|
||||
|
||||
try:
|
||||
f = open('.config', 'w')
|
||||
for (opt, value) in options.items():
|
||||
if type(value) == type(1):
|
||||
f.write("CONFIG_%s=%d\n" % (opt, value))
|
||||
|
||||
if type(value) == type('') and value == '':
|
||||
f.write("CONFIG_%s=y\n" % opt)
|
||||
elif type(value) == type('str'):
|
||||
f.write("CONFIG_%s=%s\n" % (opt, value))
|
||||
|
||||
print("Generate .config done!")
|
||||
f.close()
|
||||
except:
|
||||
print("Generate .config file failed.")
|
250
rt-thread/tools/mkdist.py
Normal file
250
rt-thread/tools/mkdist.py
Normal file
@@ -0,0 +1,250 @@
|
||||
#
|
||||
# File : mkdir.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2018, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2017-10-04 Bernard The first version
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
from shutil import ignore_patterns
|
||||
from SCons.Script import *
|
||||
|
||||
def do_copy_file(src, dst):
|
||||
# check source file
|
||||
if not os.path.exists(src):
|
||||
return
|
||||
|
||||
path = os.path.dirname(dst)
|
||||
# mkdir if path not exist
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
def do_copy_folder(src_dir, dst_dir, ignore=None):
|
||||
import shutil
|
||||
# check source directory
|
||||
if not os.path.exists(src_dir):
|
||||
return
|
||||
|
||||
try:
|
||||
if os.path.exists(dst_dir):
|
||||
shutil.rmtree(dst_dir)
|
||||
except:
|
||||
print('Deletes folder: %s failed.' % dst_dir)
|
||||
return
|
||||
|
||||
shutil.copytree(src_dir, dst_dir, ignore = ignore)
|
||||
|
||||
source_ext = ['c', 'h', 's', 'S', 'cpp', 'xpm']
|
||||
source_list = []
|
||||
|
||||
def walk_children(child):
|
||||
global source_list
|
||||
global source_ext
|
||||
|
||||
# print child
|
||||
full_path = child.rfile().abspath
|
||||
file_type = full_path.rsplit('.',1)[1]
|
||||
#print file_type
|
||||
if file_type in source_ext:
|
||||
if full_path not in source_list:
|
||||
source_list.append(full_path)
|
||||
|
||||
children = child.all_children()
|
||||
if children != []:
|
||||
for item in children:
|
||||
walk_children(item)
|
||||
|
||||
def walk_kconfig(RTT_ROOT, source_list):
|
||||
for parent, dirnames, filenames in os.walk(RTT_ROOT):
|
||||
if 'bsp' in parent:
|
||||
continue
|
||||
if '.git' in parent:
|
||||
continue
|
||||
if 'tools' in parent:
|
||||
continue
|
||||
|
||||
if 'Kconfig' in filenames:
|
||||
pathfile = os.path.join(parent, 'Kconfig')
|
||||
source_list.append(pathfile)
|
||||
if 'KConfig' in filenames:
|
||||
pathfile = os.path.join(parent, 'KConfig')
|
||||
source_list.append(pathfile)
|
||||
|
||||
def bsp_copy_files(bsp_root, dist_dir):
|
||||
# copy BSP files
|
||||
do_copy_folder(os.path.join(bsp_root), dist_dir,
|
||||
ignore_patterns('build', 'dist', '*.pyc', '*.old', '*.map', 'rtthread.bin', '.sconsign.dblite', '*.elf', '*.axf', 'cconfig.h'))
|
||||
|
||||
def bsp_update_sconstruct(dist_dir):
|
||||
with open(os.path.join(dist_dir, 'SConstruct'), 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(os.path.join(dist_dir, 'SConstruct'), 'w') as f:
|
||||
for line in data:
|
||||
if line.find('RTT_ROOT') != -1:
|
||||
if line.find('sys.path') != -1:
|
||||
f.write('# set RTT_ROOT\n')
|
||||
f.write('if not os.getenv("RTT_ROOT"): \n RTT_ROOT="rt-thread"\n\n')
|
||||
f.write(line)
|
||||
|
||||
def bsp_update_kconfig_testcases(dist_dir):
|
||||
# delete testcases in rt-thread/Kconfig
|
||||
if not os.path.isfile(os.path.join(dist_dir, 'rt-thread/Kconfig')):
|
||||
return
|
||||
|
||||
with open(os.path.join(dist_dir, 'rt-thread/Kconfig'), 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(os.path.join(dist_dir, 'rt-thread/Kconfig'), 'w') as f:
|
||||
for line in data:
|
||||
if line.find('examples/utest/testcases/Kconfig') == -1:
|
||||
f.write(line)
|
||||
|
||||
def bsp_update_kconfig(dist_dir):
|
||||
# change RTT_ROOT in Kconfig
|
||||
if not os.path.isfile(os.path.join(dist_dir, 'Kconfig')):
|
||||
return
|
||||
|
||||
with open(os.path.join(dist_dir, 'Kconfig'), 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(os.path.join(dist_dir, 'Kconfig'), 'w') as f:
|
||||
for line in data:
|
||||
if line.find('RTT_DIR') != -1 and line.find(':=') != -1:
|
||||
line = 'RTT_DIR := rt-thread\n'
|
||||
f.write(line)
|
||||
|
||||
def bsp_update_kconfig_library(dist_dir):
|
||||
# change RTT_ROOT in Kconfig
|
||||
if not os.path.isfile(os.path.join(dist_dir, 'Kconfig')):
|
||||
return
|
||||
|
||||
with open(os.path.join(dist_dir, 'Kconfig'), 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(os.path.join(dist_dir, 'Kconfig'), 'w') as f:
|
||||
for line in data:
|
||||
if line.find('source') != -1 and line.find('../libraries') != -1:
|
||||
line = line.replace('../libraries', 'libraries')
|
||||
f.write(line)
|
||||
|
||||
# change board/kconfig path
|
||||
if not os.path.isfile(os.path.join(dist_dir, 'board/Kconfig')):
|
||||
return
|
||||
|
||||
with open(os.path.join(dist_dir, 'board/Kconfig'), 'r') as f:
|
||||
data = f.readlines()
|
||||
with open(os.path.join(dist_dir, 'board/Kconfig'), 'w') as f:
|
||||
for line in data:
|
||||
if line.find('source') != -1 and line.find('../libraries') != -1:
|
||||
line = line.replace('../libraries', 'libraries')
|
||||
f.write(line)
|
||||
|
||||
def zip_dist(dist_dir, dist_name):
|
||||
import zipfile
|
||||
|
||||
zip_filename = os.path.join(dist_dir)
|
||||
zip = zipfile.ZipFile(zip_filename + '.zip', 'w')
|
||||
pre_len = len(os.path.dirname(dist_dir))
|
||||
|
||||
for parent, dirnames, filenames in os.walk(dist_dir):
|
||||
for filename in filenames:
|
||||
pathfile = os.path.join(parent, filename)
|
||||
arcname = pathfile[pre_len:].strip(os.path.sep)
|
||||
zip.write(pathfile, arcname)
|
||||
|
||||
zip.close()
|
||||
|
||||
def MkDist(program, BSP_ROOT, RTT_ROOT, Env, project_name, project_path):
|
||||
print('make distribution....')
|
||||
|
||||
if project_path == None:
|
||||
dist_dir = os.path.join(BSP_ROOT, 'dist', project_name)
|
||||
else:
|
||||
dist_dir = project_path
|
||||
|
||||
rtt_dir_path = os.path.join(dist_dir, 'rt-thread')
|
||||
|
||||
# copy BSP files
|
||||
print('=> %s' % os.path.basename(BSP_ROOT))
|
||||
bsp_copy_files(BSP_ROOT, dist_dir)
|
||||
|
||||
# do bsp special dist handle
|
||||
if 'dist_handle' in Env:
|
||||
print("=> start dist handle")
|
||||
dist_handle = Env['dist_handle']
|
||||
dist_handle(BSP_ROOT, dist_dir)
|
||||
|
||||
# copy tools directory
|
||||
print('=> components')
|
||||
do_copy_folder(os.path.join(RTT_ROOT, 'components'), os.path.join(rtt_dir_path, 'components'))
|
||||
|
||||
# skip documentation directory
|
||||
# skip examples
|
||||
|
||||
# copy include directory
|
||||
print('=> include')
|
||||
do_copy_folder(os.path.join(RTT_ROOT, 'include'), os.path.join(rtt_dir_path, 'include'))
|
||||
|
||||
# copy all libcpu/ARCH directory
|
||||
print('=> libcpu')
|
||||
import rtconfig
|
||||
do_copy_folder(os.path.join(RTT_ROOT, 'libcpu', rtconfig.ARCH), os.path.join(rtt_dir_path, 'libcpu', rtconfig.ARCH))
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'libcpu', 'Kconfig'), os.path.join(rtt_dir_path, 'libcpu', 'Kconfig'))
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'libcpu', 'SConscript'), os.path.join(rtt_dir_path, 'libcpu', 'SConscript'))
|
||||
|
||||
# copy src directory
|
||||
print('=> src')
|
||||
do_copy_folder(os.path.join(RTT_ROOT, 'src'), os.path.join(rtt_dir_path, 'src'))
|
||||
|
||||
# copy tools directory
|
||||
print('=> tools')
|
||||
do_copy_folder(os.path.join(RTT_ROOT, 'tools'), os.path.join(rtt_dir_path, 'tools'), ignore_patterns('*.pyc'))
|
||||
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'Kconfig'), os.path.join(rtt_dir_path, 'Kconfig'))
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'AUTHORS'), os.path.join(rtt_dir_path, 'AUTHORS'))
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'COPYING'), os.path.join(rtt_dir_path, 'COPYING'))
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'README.md'), os.path.join(rtt_dir_path, 'README.md'))
|
||||
do_copy_file(os.path.join(RTT_ROOT, 'README_zh.md'), os.path.join(rtt_dir_path, 'README_zh.md'))
|
||||
|
||||
print('Update configuration files...')
|
||||
# change RTT_ROOT in SConstruct
|
||||
bsp_update_sconstruct(dist_dir)
|
||||
# change RTT_ROOT in Kconfig
|
||||
bsp_update_kconfig(dist_dir)
|
||||
bsp_update_kconfig_library(dist_dir)
|
||||
# delete testcases in Kconfig
|
||||
bsp_update_kconfig_testcases(dist_dir)
|
||||
|
||||
target_project_type = GetOption('target')
|
||||
if target_project_type:
|
||||
child = subprocess.Popen('scons --target={} --project-name="{}"'.format(target_project_type, project_name), cwd=dist_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
stdout, stderr = child.communicate()
|
||||
if child.returncode == 0:
|
||||
print(stdout)
|
||||
else:
|
||||
print(stderr)
|
||||
else:
|
||||
print('suggest to use command scons --dist [--target=xxx] [--project-name="xxx"] [--project-path="xxx"]')
|
||||
|
||||
# make zip package
|
||||
if project_path == None:
|
||||
zip_dist(dist_dir, project_name)
|
||||
|
||||
print('dist project successfully!')
|
267
rt-thread/tools/mkromfs.py
Normal file
267
rt-thread/tools/mkromfs.py
Normal file
@@ -0,0 +1,267 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import struct
|
||||
from collections import namedtuple
|
||||
import io
|
||||
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('rootdir', type=str, help='the path to rootfs')
|
||||
parser.add_argument('output', type=argparse.FileType('wb'), nargs='?', help='output file name')
|
||||
parser.add_argument('--dump', action='store_true', help='dump the fs hierarchy')
|
||||
parser.add_argument('--binary', action='store_true', help='output binary file')
|
||||
parser.add_argument('--addr', default='0', help='set the base address of the binary file, default to 0.')
|
||||
|
||||
class File(object):
|
||||
def __init__(self, name):
|
||||
self._name = name
|
||||
self._data = open(name, 'rb').read()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def c_name(self):
|
||||
return '_' + self._name.replace('.', '_')
|
||||
|
||||
@property
|
||||
def bin_name(self):
|
||||
# Pad to 4 bytes boundary with \0
|
||||
pad_len = 4
|
||||
bn = self._name + '\0' * (pad_len - len(self._name) % pad_len)
|
||||
return bn
|
||||
|
||||
def c_data(self, prefix=''):
|
||||
'''Get the C code represent of the file content.'''
|
||||
head = 'static const rt_uint8_t %s[] = {\n' % \
|
||||
(prefix + self.c_name)
|
||||
tail = '\n};'
|
||||
|
||||
if self.entry_size == 0:
|
||||
return ''
|
||||
if len(self._data) > 0 and type(self._data[0]) == int:
|
||||
return head + ','.join(('0x%02x' % i for i in self._data)) + tail
|
||||
else:
|
||||
return head + ','.join(('0x%02x' % ord(i) for i in self._data)) + tail
|
||||
|
||||
@property
|
||||
def entry_size(self):
|
||||
return len(self._data)
|
||||
|
||||
def bin_data(self, base_addr=0x0):
|
||||
return bytes(self._data)
|
||||
|
||||
def dump(self, indent=0):
|
||||
print('%s%s' % (' ' * indent, self._name))
|
||||
|
||||
class Folder(object):
|
||||
bin_fmt = struct.Struct('IIII')
|
||||
bin_item = namedtuple('dirent', 'type, name, data, size')
|
||||
|
||||
def __init__(self, name):
|
||||
self._name = name
|
||||
self._children = []
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def c_name(self):
|
||||
# add _ to avoid conflict with C key words.
|
||||
return '_' + self._name
|
||||
|
||||
@property
|
||||
def bin_name(self):
|
||||
# Pad to 4 bytes boundary with \0
|
||||
pad_len = 4
|
||||
bn = self._name + '\0' * (pad_len - len(self._name) % pad_len)
|
||||
return bn
|
||||
|
||||
def walk(self):
|
||||
# os.listdir will return unicode list if the argument is unicode.
|
||||
# TODO: take care of the unicode names
|
||||
for ent in os.listdir(u'.'):
|
||||
if os.path.isdir(ent):
|
||||
cwd = os.getcwd()
|
||||
d = Folder(ent)
|
||||
# depth-first
|
||||
os.chdir(os.path.join(cwd, ent))
|
||||
d.walk()
|
||||
# restore the cwd
|
||||
os.chdir(cwd)
|
||||
self._children.append(d)
|
||||
else:
|
||||
self._children.append(File(ent))
|
||||
|
||||
def sort(self):
|
||||
def _sort(x, y):
|
||||
if x.name == y.name:
|
||||
return 0
|
||||
elif x.name > y.name:
|
||||
return 1
|
||||
else:
|
||||
return -1
|
||||
from functools import cmp_to_key
|
||||
self._children.sort(key=cmp_to_key(_sort))
|
||||
|
||||
# sort recursively
|
||||
for c in self._children:
|
||||
if isinstance(c, Folder):
|
||||
c.sort()
|
||||
|
||||
def dump(self, indent=0):
|
||||
print('%s%s' % (' ' * indent, self._name))
|
||||
for c in self._children:
|
||||
c.dump(indent + 1)
|
||||
|
||||
def c_data(self, prefix=''):
|
||||
'''get the C code represent of the folder.
|
||||
|
||||
It is recursive.'''
|
||||
# make the current dirent
|
||||
# static is good. Only root dirent is global visible.
|
||||
if self.entry_size == 0:
|
||||
return ''
|
||||
|
||||
dhead = 'static const struct romfs_dirent %s[] = {\n' % (prefix + self.c_name)
|
||||
dtail = '\n};'
|
||||
body_fmt = ' {{{type}, "{name}", (rt_uint8_t *){data}, sizeof({data})/sizeof({data}[0])}}'
|
||||
body_fmt0= ' {{{type}, "{name}", RT_NULL, 0}}'
|
||||
# prefix of children
|
||||
cpf = prefix+self.c_name
|
||||
body_li = []
|
||||
payload_li = []
|
||||
for c in self._children:
|
||||
entry_size = c.entry_size
|
||||
if isinstance(c, File):
|
||||
tp = 'ROMFS_DIRENT_FILE'
|
||||
elif isinstance(c, Folder):
|
||||
tp = 'ROMFS_DIRENT_DIR'
|
||||
else:
|
||||
assert False, 'Unkown instance:%s' % str(c)
|
||||
if entry_size == 0:
|
||||
body_li.append(body_fmt0.format(type=tp, name = c.name))
|
||||
else:
|
||||
body_li.append(body_fmt.format(type=tp,
|
||||
name=c.name,
|
||||
data=cpf+c.c_name))
|
||||
payload_li.append(c.c_data(prefix=cpf))
|
||||
|
||||
# All the data we need is defined in payload so we should append the
|
||||
# dirent to it. It also meet the depth-first policy in this code.
|
||||
payload_li.append(dhead + ',\n'.join(body_li) + dtail)
|
||||
|
||||
return '\n\n'.join(payload_li)
|
||||
|
||||
@property
|
||||
def entry_size(self):
|
||||
return len(self._children)
|
||||
|
||||
def bin_data(self, base_addr=0x0):
|
||||
'''Return StringIO object'''
|
||||
# The binary layout is different from the C code layout. We put the
|
||||
# dirent before the payload in this mode. But the idea is still simple:
|
||||
# Depth-First.
|
||||
|
||||
#{
|
||||
# rt_uint32_t type;
|
||||
# const char *name;
|
||||
# const rt_uint8_t *data;
|
||||
# rt_size_t size;
|
||||
#}
|
||||
d_li = []
|
||||
# payload base
|
||||
p_base = base_addr + self.bin_fmt.size * self.entry_size
|
||||
# the length to record how many data is in
|
||||
v_len = p_base
|
||||
# payload
|
||||
p_li = []
|
||||
for c in self._children:
|
||||
if isinstance(c, File):
|
||||
# ROMFS_DIRENT_FILE
|
||||
tp = 0
|
||||
elif isinstance(c, Folder):
|
||||
# ROMFS_DIRENT_DIR
|
||||
tp = 1
|
||||
else:
|
||||
assert False, 'Unkown instance:%s' % str(c)
|
||||
|
||||
name = bytes(c.bin_name.encode('utf-8'))
|
||||
name_addr = v_len
|
||||
v_len += len(name)
|
||||
|
||||
data = c.bin_data(base_addr=v_len)
|
||||
data_addr = v_len
|
||||
# pad the data to 4 bytes boundary
|
||||
pad_len = 4
|
||||
if len(data) % pad_len != 0:
|
||||
data += ('\0' * (pad_len - len(data) % pad_len)).encode('utf-8')
|
||||
v_len += len(data)
|
||||
|
||||
d_li.append(self.bin_fmt.pack(*self.bin_item(
|
||||
type=tp,
|
||||
name=name_addr,
|
||||
data=data_addr,
|
||||
size=c.entry_size)))
|
||||
|
||||
p_li.extend((name, data))
|
||||
|
||||
return bytes().join(d_li) + bytes().join(p_li)
|
||||
|
||||
def get_c_data(tree):
|
||||
# Handle the root dirent specially.
|
||||
root_dirent_fmt = '''/* Generated by mkromfs. Edit with caution. */
|
||||
#include <rtthread.h>
|
||||
#include <dfs_romfs.h>
|
||||
|
||||
{data}
|
||||
|
||||
const struct romfs_dirent {name} = {{
|
||||
ROMFS_DIRENT_DIR, "/", (rt_uint8_t *){rootdirent}, sizeof({rootdirent})/sizeof({rootdirent}[0])
|
||||
}};
|
||||
'''
|
||||
|
||||
return root_dirent_fmt.format(name='romfs_root',
|
||||
rootdirent=tree.c_name,
|
||||
data=tree.c_data())
|
||||
|
||||
def get_bin_data(tree, base_addr):
|
||||
v_len = base_addr + Folder.bin_fmt.size
|
||||
name = bytes('/\0\0\0'.encode("utf-8"))
|
||||
name_addr = v_len
|
||||
v_len += len(name)
|
||||
data_addr = v_len
|
||||
# root entry
|
||||
data = Folder.bin_fmt.pack(*Folder.bin_item(type=1,
|
||||
name=name_addr,
|
||||
data=data_addr,
|
||||
size=tree.entry_size))
|
||||
return data + name + tree.bin_data(v_len)
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
|
||||
os.chdir(args.rootdir)
|
||||
|
||||
tree = Folder('romfs_root')
|
||||
tree.walk()
|
||||
tree.sort()
|
||||
|
||||
if args.dump:
|
||||
tree.dump()
|
||||
|
||||
if args.binary:
|
||||
data = get_bin_data(tree, int(args.addr, 16))
|
||||
else:
|
||||
data = get_c_data(tree).encode()
|
||||
|
||||
output = args.output
|
||||
if not output:
|
||||
output = sys.stdout
|
||||
|
||||
output.write(data)
|
143
rt-thread/tools/options.py
Normal file
143
rt-thread/tools/options.py
Normal file
@@ -0,0 +1,143 @@
|
||||
#
|
||||
# File : options.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2022-04-20 WuGensheng Add Options to SCons
|
||||
#
|
||||
|
||||
from SCons.Script import AddOption
|
||||
import platform
|
||||
|
||||
def AddOptions():
|
||||
''' ===== Add generic options to SCons ===== '''
|
||||
AddOption('--dist',
|
||||
dest = 'make-dist',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'make distribution')
|
||||
AddOption('--dist-ide', '--dist-rtstudio',
|
||||
dest = 'make-dist-ide',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'make distribution for RT-Thread Studio IDE')
|
||||
AddOption('--project-path',
|
||||
dest = 'project-path',
|
||||
type = 'string',
|
||||
default = None,
|
||||
help = 'set project output path')
|
||||
AddOption('--project-name',
|
||||
dest = 'project-name',
|
||||
type = 'string',
|
||||
default = "project",
|
||||
help = 'set project name')
|
||||
AddOption('--cscope',
|
||||
dest = 'cscope',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'Build Cscope cross reference database. Requires cscope installed.')
|
||||
AddOption('--clang-analyzer',
|
||||
dest = 'clang-analyzer',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'Perform static analyze with Clang-analyzer. ' + \
|
||||
'Requires Clang installed.' + \
|
||||
'It is recommended to use with scan-build like this:' + \
|
||||
'`scan-build scons --clang-analyzer`' + \
|
||||
'If things goes well, scan-build will instruct you to invoke scan-view.')
|
||||
AddOption('--buildlib',
|
||||
dest = 'buildlib',
|
||||
type = 'string',
|
||||
help = 'building library of a component')
|
||||
AddOption('--cleanlib',
|
||||
dest = 'cleanlib',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'clean up the library by --buildlib')
|
||||
AddOption('--target',
|
||||
dest = 'target',
|
||||
type = 'string',
|
||||
help = 'set target project: mdk/mdk4/mdk5/iar/vs/vsc/ua/cdk/ses/makefile/eclipse/codelite/cmake')
|
||||
AddOption('--cmsispack',
|
||||
dest = 'cmsispack',
|
||||
type = 'string',
|
||||
help = 'set pack: <cmsispack path>')
|
||||
AddOption('--strict',
|
||||
dest='strict-compiling',
|
||||
help='Compiling project with strict mode and ALL warning will be errors',
|
||||
action='store_true',
|
||||
default=False)
|
||||
AddOption('--verbose',
|
||||
dest = 'verbose',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'print verbose information during build')
|
||||
AddOption('--cc-prefix', '--exec-prefix',
|
||||
dest = 'exec-prefix',
|
||||
type = 'string',
|
||||
help = 'set RTT_CC_PREFIX temperately')
|
||||
AddOption('--cc-path', '--exec-path',
|
||||
dest = 'exec-path',
|
||||
type = 'string',
|
||||
help = 'set RTT_EXEC_PATH temperately')
|
||||
AddOption('--stackanalysis',
|
||||
dest = 'stackanalysis',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'thread stack static analysis')
|
||||
AddOption('--genconfig',
|
||||
dest = 'genconfig',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'Generate .config from rtconfig.h')
|
||||
AddOption('--useconfig',
|
||||
dest = 'useconfig',
|
||||
type = 'string',
|
||||
help = 'make rtconfig.h from config file.')
|
||||
AddOption('--global-macros',
|
||||
dest = 'global-macros',
|
||||
type = 'string',
|
||||
help = 'attach global macros in the project. '+\
|
||||
'e.g. scons --global-config=RT_USING_XX,RT_USING_YY'+\
|
||||
' or scons --global-config="RT_USING_XX, RT_USING_YY"')
|
||||
AddOption('--reset-project-config',
|
||||
dest = 'reset-project-config',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'reset the project configurations to default')
|
||||
AddOption('--pyconfig', '--guiconfig',
|
||||
dest = 'pyconfig',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'Python GUI menuconfig for RT-Thread BSP')
|
||||
AddOption('--pyconfig-silent', '--defconfig',
|
||||
dest = 'pyconfig-silent',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'Don`t show pyconfig window')
|
||||
AddOption('--menuconfig',
|
||||
dest = 'menuconfig',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'make menuconfig for RT-Thread BSP')
|
||||
AddOption('--cdb',
|
||||
dest = 'cdb',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'make compile_commands.json')
|
74
rt-thread/tools/package.py
Normal file
74
rt-thread/tools/package.py
Normal file
@@ -0,0 +1,74 @@
|
||||
#
|
||||
# File : package.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-04-10 Bernard First version
|
||||
#
|
||||
|
||||
# this script is used to build group with package.json instead of SConscript
|
||||
import os
|
||||
from building import *
|
||||
|
||||
def ExtendPackageVar(package, var):
|
||||
v = []
|
||||
if var not in package:
|
||||
return v
|
||||
|
||||
for item in package[var]:
|
||||
v = v + [item]
|
||||
|
||||
return v
|
||||
|
||||
def BuildPackage(package):
|
||||
import json
|
||||
f = open(package)
|
||||
package_json = f.read()
|
||||
|
||||
# get package.json path
|
||||
cwd = os.path.dirname(package)
|
||||
|
||||
package = json.loads(package_json)
|
||||
|
||||
# check package name
|
||||
if 'name' not in package:
|
||||
return []
|
||||
|
||||
# get depends
|
||||
depend = ExtendPackageVar(package, 'depends')
|
||||
|
||||
src = []
|
||||
if 'source_files' in package:
|
||||
for src_file in package['source_files']:
|
||||
src_file = os.path.join(cwd, src_file)
|
||||
src += Glob(src_file)
|
||||
|
||||
CPPPATH = []
|
||||
if 'CPPPATH' in package:
|
||||
for path in package['CPPPATH']:
|
||||
if path.startswith('/') and os.path.isdir(path):
|
||||
CPPPATH = CPPPATH + [path]
|
||||
else:
|
||||
CPPPATH = CPPPATH + [os.path.join(cwd, path)]
|
||||
|
||||
CPPDEFINES = ExtendPackageVar(package, 'CPPDEFINES')
|
||||
|
||||
objs = DefineGroup(package['name'], src, depend = depend, CPPPATH = CPPPATH, CPPDEFINES = CPPDEFINES)
|
||||
|
||||
return objs
|
52
rt-thread/tools/release/README.md
Normal file
52
rt-thread/tools/release/README.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# 版本发布前自动更新与部署
|
||||
|
||||
在ENV环境下,并在release文件夹下执行 `python buildbot.py update` 可完成自动版本发布**前** **部分** 准备工作。 欢迎补充其他发布前自动化脚本。
|
||||
|
||||
目前可以自动更新和部署的内容包括:
|
||||
|
||||
1. 更新所有BSP工程,包括.config文件、rtconfig文件更新,以及Keil\IAR等工程的刷新
|
||||
2. STM32启动文件更新:
|
||||
1. 对gcc的汇编启动文件中main替换为entry函数
|
||||
2. 将启动文件heap降为0(Keil IAR)
|
||||
3. 将GCC的堆大小扩展到0x400,与Keil IAR保持一致
|
||||
|
||||
## clang-format代码自动格式化
|
||||
|
||||
run-clang-format.py 根据`.clang-format`和`.clang-format-ignore`文件,使用clang-format工具对代码进行格式化。
|
||||
|
||||
如果**不希望**对某个文件夹进行格式化,那么在该文件夹下增加一个`.clang-format`,内容为:
|
||||
|
||||
```yaml
|
||||
---
|
||||
Language: Cpp
|
||||
DisableFormat: true
|
||||
---
|
||||
```
|
||||
|
||||
如果**不希望**对某个代码片段进行格式化,那么在代码中插入`// clang-format off/on`:
|
||||
|
||||
```c
|
||||
int formatted_code;
|
||||
// clang-format off
|
||||
void unformatted_code ;
|
||||
// clang-format on
|
||||
void formatted_code_again;
|
||||
```
|
||||
|
||||
使用以下命令,将对除了bsp、elmfat、lwip等文件夹之外的所有代码进行格式化:
|
||||
|
||||
```shell
|
||||
# 安装clang-format
|
||||
pip install clang-format
|
||||
# 切换到RTT目录
|
||||
cd $RTT_ROOT
|
||||
# 执行格式化
|
||||
# -r递归子目录,-i是将格式化结果写入文件,-e是排除目录,-j是并行线程,.是当前目录
|
||||
python tools/release/run-clang-format.py -r -i -e bsp/**/* -j 10 .
|
||||
```
|
||||
|
||||
如果格式化过程中提示以下错误,一般是文件中存在UTF-8编码无法识别的字符。
|
||||
|
||||
```shell
|
||||
error: Command 'clang-format -i libcpu\aarch64\common\asm-fpu.h' returned non-zero exit status 1
|
||||
```
|
88
rt-thread/tools/release/buildbot.py
Normal file
88
rt-thread/tools/release/buildbot.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
def usage():
|
||||
print('%s all -- build all bsp' % os.path.basename(sys.argv[0]))
|
||||
print('%s clean -- clean all bsp' % os.path.basename(sys.argv[0]))
|
||||
print('%s update -- update all prject files' % os.path.basename(sys.argv[0]))
|
||||
|
||||
BSP_ROOT = os.path.join("..", "..", "bsp")
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
usage()
|
||||
sys.exit(0)
|
||||
|
||||
def update_project_file(project_dir):
|
||||
if os.path.isfile(os.path.join(project_dir, 'template.Uv2')):
|
||||
print('prepare MDK3 project file on ' + project_dir)
|
||||
command = ' --target=mdk -s'
|
||||
os.system('scons --directory=' + project_dir + command + ' > 1.txt')
|
||||
|
||||
if os.path.isfile(os.path.join(project_dir, 'template.uvproj')):
|
||||
print('prepare MDK4 project file on ' + project_dir)
|
||||
command = ' --target=mdk4 -s'
|
||||
os.system('scons --directory=' + project_dir + command + ' > 1.txt')
|
||||
|
||||
if os.path.isfile(os.path.join(project_dir, 'template.uvprojx')):
|
||||
print('prepare MDK5 project file on ' + project_dir)
|
||||
command = ' --target=mdk5 -s'
|
||||
os.system('scons --directory=' + project_dir + command + ' > 1.txt')
|
||||
|
||||
if os.path.isfile(os.path.join(project_dir, 'template.ewp')):
|
||||
print('prepare IAR project file on ' + project_dir)
|
||||
command = ' --target=iar -s'
|
||||
os.system('scons --directory=' + project_dir + command + ' > 1.txt')
|
||||
|
||||
|
||||
def update_all_project_files(root_path):
|
||||
# current path is dir
|
||||
if os.path.isdir(root_path):
|
||||
projects = os.listdir(root_path)
|
||||
# is a project path?
|
||||
if "SConstruct" in projects:
|
||||
try:
|
||||
# update rtconfig.h and .config
|
||||
if "Kconfig" in projects:
|
||||
if "win32" in sys.platform:
|
||||
retval = os.getcwd()
|
||||
os.chdir(root_path)
|
||||
os.system("menuconfig --silent")
|
||||
os.chdir(retval)
|
||||
else:
|
||||
os.system('scons --pyconfig-silent -C {0}'.format(root_path))
|
||||
update_project_file(root_path)
|
||||
except Exception as e:
|
||||
print("error message: {}".format(e))
|
||||
sys.exit(-1)
|
||||
else:
|
||||
for i in projects:
|
||||
new_root_path = os.path.join(root_path, i)
|
||||
update_all_project_files(new_root_path)
|
||||
|
||||
# get command options
|
||||
command = ''
|
||||
if sys.argv[1] == 'all':
|
||||
command = ' '
|
||||
elif sys.argv[1] == 'clean':
|
||||
command = ' -c'
|
||||
elif sys.argv[1] == 'update':
|
||||
print('begin to update all the bsp projects')
|
||||
|
||||
from stm32_update import stm32_update
|
||||
stm32_update(os.path.join(BSP_ROOT, 'stm32'))
|
||||
|
||||
update_all_project_files(BSP_ROOT)
|
||||
|
||||
print('finished!')
|
||||
sys.exit(0)
|
||||
else:
|
||||
usage()
|
||||
sys.exit(0)
|
||||
|
||||
projects = os.listdir(BSP_ROOT)
|
||||
for item in projects:
|
||||
project_dir = os.path.join(BSP_ROOT, item)
|
||||
if os.path.isfile(os.path.join(project_dir, 'SConstruct')):
|
||||
if os.system('scons --directory=' + project_dir + command) != 0:
|
||||
print('build failed!!')
|
||||
break
|
426
rt-thread/tools/release/run-clang-format.py
Normal file
426
rt-thread/tools/release/run-clang-format.py
Normal file
@@ -0,0 +1,426 @@
|
||||
#!/usr/bin/env python
|
||||
"""A wrapper script around clang-format, suitable for linting multiple files
|
||||
and to use for continuous integration.
|
||||
|
||||
This is an alternative API for the clang-format command line.
|
||||
It runs over multiple files and directories in parallel.
|
||||
A diff output is produced and a sensible exit code is returned.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import difflib
|
||||
import fnmatch
|
||||
import io
|
||||
import errno
|
||||
import multiprocessing
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import platform
|
||||
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from subprocess import DEVNULL # py3k
|
||||
except ImportError:
|
||||
DEVNULL = open(os.devnull, "wb")
|
||||
|
||||
|
||||
DEFAULT_EXTENSIONS = "c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx"
|
||||
DEFAULT_CLANG_FORMAT_IGNORE = ".clang-format-ignore"
|
||||
|
||||
|
||||
class ExitStatus:
|
||||
SUCCESS = 0
|
||||
DIFF = 1
|
||||
TROUBLE = 2
|
||||
|
||||
|
||||
def excludes_from_file(ignore_file):
|
||||
excludes = []
|
||||
try:
|
||||
with io.open(ignore_file, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
if line.startswith("#"):
|
||||
# ignore comments
|
||||
continue
|
||||
pattern = line.rstrip()
|
||||
if not pattern:
|
||||
# allow empty lines
|
||||
continue
|
||||
excludes.append(pattern)
|
||||
except EnvironmentError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
return excludes
|
||||
|
||||
|
||||
def list_files(files, recursive=False, extensions=None, exclude=None):
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
out = []
|
||||
for file in files:
|
||||
if recursive and os.path.isdir(file):
|
||||
for dirpath, dnames, fnames in os.walk(file):
|
||||
fpaths = [
|
||||
os.path.relpath(os.path.join(dirpath, fname), os.getcwd())
|
||||
for fname in fnames
|
||||
]
|
||||
for pattern in exclude:
|
||||
# os.walk() supports trimming down the dnames list
|
||||
# by modifying it in-place,
|
||||
# to avoid unnecessary directory listings.
|
||||
dnames[:] = [
|
||||
x
|
||||
for x in dnames
|
||||
if not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)
|
||||
]
|
||||
fpaths = [x for x in fpaths if not fnmatch.fnmatch(x, pattern)]
|
||||
|
||||
for f in fpaths:
|
||||
ext = os.path.splitext(f)[1][1:]
|
||||
if ext in extensions:
|
||||
out.append(f)
|
||||
else:
|
||||
out.append(file)
|
||||
return out
|
||||
|
||||
|
||||
def make_diff(file, original, reformatted):
|
||||
return list(
|
||||
difflib.unified_diff(
|
||||
original,
|
||||
reformatted,
|
||||
fromfile="{}\t(original)".format(file),
|
||||
tofile="{}\t(reformatted)".format(file),
|
||||
n=3,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class DiffError(Exception):
|
||||
def __init__(self, message, errs=None):
|
||||
super(DiffError, self).__init__(message)
|
||||
self.errs = errs or []
|
||||
|
||||
|
||||
class UnexpectedError(Exception):
|
||||
def __init__(self, message, exc=None):
|
||||
super(UnexpectedError, self).__init__(message)
|
||||
self.formatted_traceback = traceback.format_exc()
|
||||
self.exc = exc
|
||||
|
||||
|
||||
def run_clang_format_diff_wrapper(args, file):
|
||||
try:
|
||||
ret = run_clang_format_diff(args, file)
|
||||
return ret
|
||||
except DiffError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise UnexpectedError("{}: {}: {}".format(file, e.__class__.__name__, e), e)
|
||||
|
||||
|
||||
def run_clang_format_diff(args, file):
|
||||
# try:
|
||||
# with io.open(file, "r", encoding="utf-8") as f:
|
||||
# original = f.readlines()
|
||||
# except IOError as exc:
|
||||
# raise DiffError(str(exc))
|
||||
|
||||
if args.in_place:
|
||||
invocation = [args.clang_format_executable, "-i", file]
|
||||
else:
|
||||
invocation = [args.clang_format_executable, file]
|
||||
|
||||
if args.style:
|
||||
invocation.extend(["--style", args.style])
|
||||
|
||||
if args.dry_run:
|
||||
print(" ".join(invocation))
|
||||
return [], []
|
||||
|
||||
# Use of utf-8 to decode the process output.
|
||||
#
|
||||
# Hopefully, this is the correct thing to do.
|
||||
#
|
||||
# It's done due to the following assumptions (which may be incorrect):
|
||||
# - clang-format will returns the bytes read from the files as-is,
|
||||
# without conversion, and it is already assumed that the files use utf-8.
|
||||
# - if the diagnostics were internationalized, they would use utf-8:
|
||||
# > Adding Translations to Clang
|
||||
# >
|
||||
# > Not possible yet!
|
||||
# > Diagnostic strings should be written in UTF-8,
|
||||
# > the client can translate to the relevant code page if needed.
|
||||
# > Each translation completely replaces the format string
|
||||
# > for the diagnostic.
|
||||
# > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation
|
||||
#
|
||||
# It's not pretty, due to Python 2 & 3 compatibility.
|
||||
encoding_py3 = {}
|
||||
if sys.version_info[0] >= 3:
|
||||
encoding_py3["encoding"] = "utf-8"
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
invocation,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
**encoding_py3
|
||||
)
|
||||
except OSError as exc:
|
||||
raise DiffError(
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(invocation), exc
|
||||
)
|
||||
)
|
||||
proc_stdout = proc.stdout
|
||||
proc_stderr = proc.stderr
|
||||
if sys.version_info[0] < 3:
|
||||
# make the pipes compatible with Python 3,
|
||||
# reading lines should output unicode
|
||||
encoding = "utf-8"
|
||||
proc_stdout = codecs.getreader(encoding)(proc_stdout)
|
||||
proc_stderr = codecs.getreader(encoding)(proc_stderr)
|
||||
# hopefully the stderr pipe won't get full and block the process
|
||||
outs = list(proc_stdout.readlines())
|
||||
errs = list(proc_stderr.readlines())
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise DiffError(
|
||||
"Command '{}' returned non-zero exit status {}".format(
|
||||
subprocess.list2cmdline(invocation), proc.returncode
|
||||
),
|
||||
errs,
|
||||
)
|
||||
if args.in_place:
|
||||
return [], errs
|
||||
return make_diff(file, original, outs), errs
|
||||
|
||||
|
||||
def bold_red(s):
|
||||
return "\x1b[1m\x1b[31m" + s + "\x1b[0m"
|
||||
|
||||
|
||||
def colorize(diff_lines):
|
||||
def bold(s):
|
||||
return "\x1b[1m" + s + "\x1b[0m"
|
||||
|
||||
def cyan(s):
|
||||
return "\x1b[36m" + s + "\x1b[0m"
|
||||
|
||||
def green(s):
|
||||
return "\x1b[32m" + s + "\x1b[0m"
|
||||
|
||||
def red(s):
|
||||
return "\x1b[31m" + s + "\x1b[0m"
|
||||
|
||||
for line in diff_lines:
|
||||
if line[:4] in ["--- ", "+++ "]:
|
||||
yield bold(line)
|
||||
elif line.startswith("@@ "):
|
||||
yield cyan(line)
|
||||
elif line.startswith("+"):
|
||||
yield green(line)
|
||||
elif line.startswith("-"):
|
||||
yield red(line)
|
||||
else:
|
||||
yield line
|
||||
|
||||
|
||||
def print_diff(diff_lines, use_color):
|
||||
if use_color:
|
||||
diff_lines = colorize(diff_lines)
|
||||
if sys.version_info[0] < 3:
|
||||
sys.stdout.writelines((l.encode("utf-8") for l in diff_lines))
|
||||
else:
|
||||
sys.stdout.writelines(diff_lines)
|
||||
|
||||
|
||||
def print_trouble(prog, message, use_colors):
|
||||
error_text = "error:"
|
||||
if use_colors:
|
||||
error_text = bold_red(error_text)
|
||||
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
"--clang-format-executable",
|
||||
metavar="EXECUTABLE",
|
||||
help="path to the clang-format executable",
|
||||
default="clang-format",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extensions",
|
||||
help="comma separated list of file extensions (default: {})".format(
|
||||
DEFAULT_EXTENSIONS
|
||||
),
|
||||
default=DEFAULT_EXTENSIONS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--recursive",
|
||||
action="store_true",
|
||||
help="run recursively over directories",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d", "--dry-run", action="store_true", help="just print the list of files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--in-place",
|
||||
action="store_true",
|
||||
help="format file instead of printing differences",
|
||||
)
|
||||
parser.add_argument("files", metavar="file", nargs="+")
|
||||
parser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="disable output, useful for the exit code",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-j",
|
||||
metavar="N",
|
||||
type=int,
|
||||
default=0,
|
||||
help="run N clang-format jobs in parallel" " (default number of cpus + 1)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--color",
|
||||
default="auto",
|
||||
choices=["auto", "always", "never"],
|
||||
help="show colored diff (default: auto)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--exclude",
|
||||
metavar="PATTERN",
|
||||
action="append",
|
||||
default=[],
|
||||
help="exclude paths matching the given glob-like pattern(s)"
|
||||
" from recursive search",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--style",
|
||||
default="file",
|
||||
help="formatting style to apply (LLVM, Google, Chromium, Mozilla, WebKit)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# use default signal handling, like diff return SIGINT value on ^C
|
||||
# https://bugs.python.org/issue14229#msg156446
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
try:
|
||||
signal.SIGPIPE
|
||||
except AttributeError:
|
||||
# compatibility, SIGPIPE does not exist on Windows
|
||||
pass
|
||||
else:
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||
|
||||
colored_stdout = False
|
||||
colored_stderr = False
|
||||
if args.color == "always":
|
||||
colored_stdout = True
|
||||
colored_stderr = True
|
||||
elif args.color == "auto":
|
||||
colored_stdout = sys.stdout.isatty()
|
||||
colored_stderr = sys.stderr.isatty()
|
||||
|
||||
version_invocation = [args.clang_format_executable, str("--version")]
|
||||
try:
|
||||
subprocess.check_call(version_invocation, stdout=DEVNULL)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
return ExitStatus.TROUBLE
|
||||
except OSError as e:
|
||||
print_trouble(
|
||||
parser.prog,
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(version_invocation), e
|
||||
),
|
||||
use_colors=colored_stderr,
|
||||
)
|
||||
return ExitStatus.TROUBLE
|
||||
|
||||
retcode = ExitStatus.SUCCESS
|
||||
|
||||
if os.path.exists(DEFAULT_CLANG_FORMAT_IGNORE):
|
||||
excludes = excludes_from_file(DEFAULT_CLANG_FORMAT_IGNORE)
|
||||
else:
|
||||
excludes = []
|
||||
excludes.extend(args.exclude)
|
||||
|
||||
files = list_files(
|
||||
args.files,
|
||||
recursive=args.recursive,
|
||||
exclude=excludes,
|
||||
extensions=args.extensions.split(","),
|
||||
)
|
||||
|
||||
if not files:
|
||||
return
|
||||
|
||||
njobs = args.j
|
||||
if njobs == 0:
|
||||
njobs = multiprocessing.cpu_count() + 1
|
||||
njobs = min(len(files), njobs)
|
||||
|
||||
if njobs == 1:
|
||||
# execute directly instead of in a pool,
|
||||
# less overhead, simpler stacktraces
|
||||
it = (run_clang_format_diff_wrapper(args, file) for file in files)
|
||||
pool = None
|
||||
else:
|
||||
pool = multiprocessing.Pool(njobs)
|
||||
it = pool.imap_unordered(partial(run_clang_format_diff_wrapper, args), files)
|
||||
pool.close()
|
||||
while True:
|
||||
try:
|
||||
outs, errs = next(it)
|
||||
except StopIteration:
|
||||
break
|
||||
except DiffError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
sys.stderr.writelines(e.errs)
|
||||
except UnexpectedError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
sys.stderr.write(e.formatted_traceback)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
# stop at the first unexpected error,
|
||||
# something could be very wrong,
|
||||
# don't process all files unnecessarily
|
||||
if pool:
|
||||
pool.terminate()
|
||||
break
|
||||
else:
|
||||
sys.stderr.writelines(errs)
|
||||
if outs == []:
|
||||
continue
|
||||
if not args.quiet:
|
||||
print_diff(outs, use_color=colored_stdout)
|
||||
if retcode == ExitStatus.SUCCESS:
|
||||
retcode = ExitStatus.DIFF
|
||||
if pool:
|
||||
pool.join()
|
||||
return retcode
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
125
rt-thread/tools/release/stm32_update.py
Normal file
125
rt-thread/tools/release/stm32_update.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# Copyright (c) 2006-2022, RT-Thread Development Team
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2021-10-11 Meco Man First version
|
||||
|
||||
# STM32 startup assembly language file:
|
||||
# 1.replace main to entry (GCC)
|
||||
# 2.reduce the heap size as 0x000 (Keil IAR)
|
||||
# 3.extend the GCC stack size as 0x400, which is the same as Keil and IAR startup files.
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
# replace 'bl main' to 'bl entry'
|
||||
def stm32update_main2entry(path):
|
||||
oldline = ''
|
||||
newline = ''
|
||||
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
if os.path.splitext(file)[1] == '.s': # find .s files (Keil MDK)
|
||||
file_path = os.path.join(root,file)
|
||||
flag_need_replace = False
|
||||
with open(file_path,'r+',) as f:
|
||||
while True:
|
||||
line = f.readline()
|
||||
if line == '':
|
||||
break
|
||||
elif ('bl' in line) and ('main' in line): # find 'bl main'
|
||||
oldline = line # bl main
|
||||
newline = line.replace('main', 'entry') # use 'entry' to replace 'main'
|
||||
flag_need_replace = True # mark that need to be replaced
|
||||
break
|
||||
|
||||
if (flag_need_replace == True): # use 'entry' to replace 'main'
|
||||
f.seek(0)
|
||||
content = f.read()
|
||||
f.seek(0)
|
||||
f.truncate()
|
||||
newcontent = content.replace(oldline, newline)
|
||||
f.write(newcontent)
|
||||
|
||||
#reduce the heap size as 0x000
|
||||
def stm32update_heap2zero(path):
|
||||
oldline = ''
|
||||
newline = ''
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root,file)
|
||||
if os.path.splitext(file)[1] == '.s': # find .s files (Keil MDK)
|
||||
with open(file_path,'r+',) as f:
|
||||
flag_need_replace = False
|
||||
while True:
|
||||
line = f.readline()
|
||||
if line == '':
|
||||
break
|
||||
|
||||
re_result = re.match('\s*Heap_Size\s+EQU\s+0[xX][0-9a-fA-F]+', line)
|
||||
if re_result != None:
|
||||
oldline = line
|
||||
newline = re.sub('0[xX][0-9a-fA-F]+','0x00000000', oldline)
|
||||
flag_need_replace = True
|
||||
break
|
||||
|
||||
if flag_need_replace == True:
|
||||
f.seek(0)
|
||||
content = f.read()
|
||||
f.seek(0)
|
||||
f.truncate()
|
||||
newcontent = content.replace(oldline, newline)
|
||||
f.write(newcontent)
|
||||
|
||||
elif os.path.splitext(file)[1] == '.icf': # find .icf files (IAR)
|
||||
with open(file_path,'r+',) as f:
|
||||
flag_need_replace = False
|
||||
while True:
|
||||
line = f.readline()
|
||||
if line == '':
|
||||
break
|
||||
|
||||
re_result = re.match('\s*define\s+symbol\s+__ICFEDIT_size_heap__\s*=\s*0[xX][0-9a-fA-F]+', line)
|
||||
if re_result != None:
|
||||
oldline = line
|
||||
newline = re.sub('0[xX][0-9a-fA-F]+','0x000', oldline)
|
||||
flag_need_replace = True
|
||||
break
|
||||
|
||||
if flag_need_replace == True:
|
||||
f.seek(0)
|
||||
content = f.read()
|
||||
f.seek(0)
|
||||
f.truncate()
|
||||
newcontent = content.replace(oldline, newline)
|
||||
f.write(newcontent)
|
||||
|
||||
elif os.path.splitext(file)[1] == '.lds': # find .lds files (GCC)
|
||||
with open(file_path,'r+',) as f:
|
||||
flag_need_replace = False
|
||||
while True:
|
||||
line = f.readline()
|
||||
if line == '':
|
||||
break
|
||||
|
||||
re_result = re.match('\s*_system_stack_size\s*=\s*0[xX][0-9a-fA-F]+', line)
|
||||
if re_result != None:
|
||||
oldline = line
|
||||
newline = re.sub('0[xX][0-9a-fA-F]+','0x400', oldline)
|
||||
flag_need_replace = True
|
||||
break
|
||||
|
||||
if flag_need_replace == True:
|
||||
f.seek(0)
|
||||
content = f.read()
|
||||
f.seek(0)
|
||||
f.truncate()
|
||||
newcontent = content.replace(oldline, newline)
|
||||
f.write(newcontent)
|
||||
|
||||
|
||||
def stm32_update(path):
|
||||
stm32update_main2entry(path)
|
||||
stm32update_heap2zero(path)
|
350
rt-thread/tools/rt_studio.py
Normal file
350
rt-thread/tools/rt_studio.py
Normal file
@@ -0,0 +1,350 @@
|
||||
import os
|
||||
import re
|
||||
from string import Template
|
||||
|
||||
import rtconfig
|
||||
import shutil
|
||||
import time
|
||||
|
||||
# version
|
||||
MODULE_VER_NUM = 1
|
||||
|
||||
cproject_temp = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<?fileVersion 4.0.0?><cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
|
||||
<storageModule moduleId="org.eclipse.cdt.core.settings">
|
||||
<cconfiguration id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094">
|
||||
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094" moduleId="org.eclipse.cdt.core.settings" name="Debug">
|
||||
<externalSettings/>
|
||||
<extensions>
|
||||
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
</extensions>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
<configuration artifactName="rtthread" buildArtefactType="org.eclipse.cdt.build.core.buildArtefactType.exe" buildProperties="org.eclipse.cdt.build.core.buildArtefactType=org.eclipse.cdt.build.core.buildArtefactType.exe,org.eclipse.cdt.build.core.buildType=org.eclipse.cdt.build.core.buildType.debug" cleanCommand="${cross_rm} -rf" description="" id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094" name="Debug" parent="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug">
|
||||
<folderInfo id="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094." name="/" resourcePath="">
|
||||
<toolChain id="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.debug.1201710416" name="ARM Cross GCC" superClass="ilg.gnuarmeclipse.managedbuild.cross.toolchain.elf.debug">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash.251260409" name="Create flash image" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createflash" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createlisting.1365878149" name="Create extended listing" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.createlisting" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize.709136944" name="Print size" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.addtools.printsize" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.1986446770" name="Optimization Level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.level.none" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength.1312975261" name="Message length (-fmessage-length=0)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.messagelength" useByScannerDiscovery="true" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar.1538128212" name="'char' is signed (-fsigned-char)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.signedchar" useByScannerDiscovery="true" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections.2136804218" name="Function sections (-ffunction-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.functionsections" useByScannerDiscovery="true" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections.244767666" name="Data sections (-fdata-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.optimization.datasections" useByScannerDiscovery="true" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.1055848773" name="Debug level" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.level.default" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.501941135" name="Debug format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.debugging.format.dwarf2" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name.1696308067" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.name" useByScannerDiscovery="false" value="GNU Tools for ARM Embedded Processors" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.architecture.1558403188" name="Architecture" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.architecture" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.architecture.arm" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family.749415257" name="ARM family" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.family" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.mcpu.cortex-m4" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.instructionset.2114153533" name="Instruction set" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.instructionset" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.instructionset.thumb" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix.1600865811" name="Prefix" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.prefix" useByScannerDiscovery="false" value="arm-none-eabi-" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.c.1109963929" name="C compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.c" useByScannerDiscovery="false" value="gcc" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp.1040883831" name="C++ compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.cpp" useByScannerDiscovery="false" value="g++" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.ar.1678200391" name="Archiver" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.ar" useByScannerDiscovery="false" value="ar" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy.1171840296" name="Hex/Bin converter" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objcopy" useByScannerDiscovery="false" value="objcopy" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump.342604837" name="Listing generator" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.objdump" useByScannerDiscovery="false" value="objdump" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.size.898269225" name="Size command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.size" useByScannerDiscovery="false" value="size" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.make.2016398076" name="Build command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.make" useByScannerDiscovery="false" value="make" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm.1606171496" name="Remove command" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.command.rm" useByScannerDiscovery="false" value="rm" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.id.540792084" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.toolchain.id" useByScannerDiscovery="false" value="1287942917" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.architecture.430121817" name="Architecture" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.architecture" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.arch.none" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.966735324" name="Float ABI" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.abi.hard" valueType="enumerated"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn.1381561249" name="Enable all common warnings (-Wall)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.warnings.allwarn" useByScannerDiscovery="true" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.target.other.2041717463" name="Other target flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.target.other" useByScannerDiscovery="true" value="" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.1463655269" name="FPU Type" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit" useByScannerDiscovery="true" value="ilg.gnuarmeclipse.managedbuild.cross.option.arm.target.fpu.unit.fpv4spd16" valueType="enumerated"/>
|
||||
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.ELF" id="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform.1798638225" isAbstract="false" osList="all" superClass="ilg.gnuarmeclipse.managedbuild.cross.targetPlatform"/>
|
||||
<builder buildPath="${workspace_loc:/${ProjName}/Debug" cleanBuildTarget="clean2" id="ilg.gnuarmeclipse.managedbuild.cross.builder.1736709688" keepEnvironmentInBuildfile="false" managedBuildOn="true" name="Gnu Make Builder" parallelBuildOn="true" parallelizationNumber="optimal" superClass="ilg.gnuarmeclipse.managedbuild.cross.builder"/>
|
||||
<tool commandLinePattern="${COMMAND} ${cross_toolchain_flags} ${FLAGS} -c ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.1810966071" name="GNU ARM Cross Assembler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor.1072524326" name="Use preprocessor" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.usepreprocessor" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths.161242639" name="Include paths (-I)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.include.paths" useByScannerDiscovery="true"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs.1521934876" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.defs" useByScannerDiscovery="true"/>
|
||||
<option IS_BUILTIN_EMPTY="false" IS_VALUE_EMPTY="false" id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.flags.1325367962" name="Assembler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.flags" useByScannerDiscovery="false" valueType="stringList">
|
||||
<listOptionValue builtIn="false" value="-mimplicit-it=thumb"/>
|
||||
</option>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.other.647856572" name="Other assembler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.assembler.other" useByScannerDiscovery="false" value="a_misc_flag" valueType="string"/>
|
||||
<inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input.1843333483" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.assembler.input"/>
|
||||
</tool>
|
||||
<tool commandLinePattern="${COMMAND} ${cross_toolchain_flags} ${FLAGS} -c ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1570350559" name="GNU ARM Cross C Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths.634882052" name="Include paths (-I)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.paths" useByScannerDiscovery="true"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs.100549972" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.defs" useByScannerDiscovery="true"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.other.2133065240" name="Other compiler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.other" useByScannerDiscovery="true" value="c_misc_flag" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.files.714348818" name="Include files (-include)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.compiler.include.files" useByScannerDiscovery="true"/>
|
||||
<inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.992053063" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input"/>
|
||||
</tool>
|
||||
<tool commandLinePattern="${COMMAND} ${cross_toolchain_flags} ${FLAGS} ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.869072473" name="Cross ARM C Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections.1167322178" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.gcsections" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostart.351692886" name="Do not use standard start files (-nostartfiles)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostart" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostdlibs.1009243715" name="No startup or default libs (-nostdlib)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nostdlibs" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nodeflibs.2016026082" name="Do not use default libraries (-nodefaultlibs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.nodeflibs" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.usenewlibnano.923990336" name="Use newlib-nano (--specs=nano.specs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.usenewlibnano" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option defaultValue="true" id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.shared.548869459" name="Shared (-shared)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.shared" useByScannerDiscovery="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.scriptfile.1818777301" name="Script files (-T)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.scriptfile" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.libs.1135656995" name="Libraries (-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.libs" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.paths.36884122" name="Library search path (-L)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.paths" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.other.396049466" name="Other linker flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.other" useByScannerDiscovery="false" value="c_link_misc_flag" valueType="string"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.cref.1645737861" name="Cross reference (-Xlinker --cref)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.c.linker.cref" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.input.334732222" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.c.linker.input">
|
||||
<additionalInput kind="additionalinputdependency" paths="$(USER_OBJS)"/>
|
||||
<additionalInput kind="additionalinput" paths="$(LIBS)"/>
|
||||
</inputType>
|
||||
</tool>
|
||||
<tool commandLinePattern="${COMMAND} ${cross_toolchain_flags} ${FLAGS} ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.1601059928" name="GNU ARM Cross C++ Linker" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections.437759352" name="Remove unused sections (-Xlinker --gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.gcsections" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile.1101974459" name="Script files (-T)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.scriptfile" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.cref.2007675975" name="Cross reference (-Xlinker --cref)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.cref" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnano.2105838438" name="Use newlib-nano (--specs=nano.specs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnano" useByScannerDiscovery="false" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs.934137837" name="Libraries (-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.libs" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostart.2118356996" name="Do not use standard start files (-nostartfiles)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostart" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nodeflibs.1427884346" name="Do not use default libraries (-nodefaultlibs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nodeflibs" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostdlibs.1433863653" name="No startup or default libs (-nostdlib)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.nostdlibs" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printgcsections.1387745410" name="Print removed sections (-Xlinker --print-gc-sections)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printgcsections" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.strip.1230158061" name="Omit all symbol information (-s)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.strip" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printmap.1307581821" name="Print link map (-Xlinker --print-map)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.printmap" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.useprintffloat.960778920" name="Use float with nano printf (-u _printf_float)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.useprintffloat" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usescanffloat.637205035" name="Use float with nano scanf (-u _scanf_float)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usescanffloat" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnosys.1948314201" name="Do not use syscalls (--specs=nosys.specs)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.usenewlibnosys" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.verbose.273162112" name="Verbose (-v)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.verbose" useByScannerDiscovery="false" value="false" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths.1399535143" name="Library search path (-L)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.paths" useByScannerDiscovery="false"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other.882307902" name="Other linker flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.linker.other" useByScannerDiscovery="false" value="cpp_link_misc_flag" valueType="string"/>
|
||||
<inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input.262373798" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.linker.input">
|
||||
<additionalInput kind="additionalinputdependency" paths="$(USER_OBJS)"/>
|
||||
<additionalInput kind="additionalinput" paths="$(LIBS)"/>
|
||||
</inputType>
|
||||
</tool>
|
||||
<tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver.506412204" name="GNU ARM Cross Archiver" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.archiver"/>
|
||||
<tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash.1461589245" name="GNU ARM Cross Create Flash Image" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createflash">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.1937707052" name="Output file format (-O)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice" useByScannerDiscovery="false" value="ilg.gnuarmeclipse.managedbuild.cross.option.createflash.choice.binary" valueType="enumerated"/>
|
||||
</tool>
|
||||
<tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting.82359725" name="GNU ARM Cross Create Listing" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.createlisting">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source.601724476" name="Display source (--source|-S)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.source" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders.692505279" name="Display all headers (--all-headers|-x)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.allheaders" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle.97345172" name="Demangle names (--demangle|-C)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.demangle" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers.1342893377" name="Display line numbers (--line-numbers|-l)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.linenumbers" value="true" valueType="boolean"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide.1533725981" name="Wide lines (--wide|-w)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.createlisting.wide" value="true" valueType="boolean"/>
|
||||
</tool>
|
||||
<tool id="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize.1073550295" name="GNU ARM Cross Print Size" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.printsize">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format.946451386" name="Size format" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.printsize.format" useByScannerDiscovery="false"/>
|
||||
</tool>
|
||||
<tool commandLinePattern="${COMMAND} ${cross_toolchain_flags} ${FLAGS} -c ${OUTPUT_FLAG} ${OUTPUT_PREFIX}${OUTPUT} ${INPUTS}" id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.1302177015" name="GNU ARM Cross C++ Compiler" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler">
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs.704468062" name="Defined symbols (-D)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.defs" useByScannerDiscovery="true"/>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths.302877723" name="Include paths (-I)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.paths" useByScannerDiscovery="true"/>
|
||||
<option IS_BUILTIN_EMPTY="false" IS_VALUE_EMPTY="false" id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.files.343249373" name="Include files (-include)" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.include.files" useByScannerDiscovery="true" valueType="includeFiles">
|
||||
<listOptionValue builtIn="false" value=""${workspace_loc:/${ProjName}/rtconfig_preinc.h}""/>
|
||||
</option>
|
||||
<option id="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.other.465079095" name="Other compiler flags" superClass="ilg.gnuarmeclipse.managedbuild.cross.option.cpp.compiler.other" useByScannerDiscovery="true" value="cpp_misc_flag" valueType="string"/>
|
||||
<inputType id="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input.45918001" superClass="ilg.gnuarmeclipse.managedbuild.cross.tool.cpp.compiler.input"/>
|
||||
</tool>
|
||||
</toolChain>
|
||||
</folderInfo>
|
||||
<sourceEntries>
|
||||
<entry excluding="|" flags="VALUE_WORKSPACE_PATH|RESOLVED" kind="sourcePath" name=""/>
|
||||
</sourceEntries>
|
||||
</configuration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
|
||||
</cconfiguration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
<project id="qemu-vexpress-a9.ilg.gnuarmeclipse.managedbuild.cross.target.elf.860020518" name="Executable" projectType="ilg.gnuarmeclipse.managedbuild.cross.target.elf"/>
|
||||
</storageModule>
|
||||
<storageModule moduleId="scannerConfiguration">
|
||||
<autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
|
||||
<scannerConfigBuildInfo instanceId="ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094;ilg.gnuarmeclipse.managedbuild.cross.config.elf.debug.553091094.;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.1570350559;ilg.gnuarmeclipse.managedbuild.cross.tool.c.compiler.input.992053063">
|
||||
<autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
|
||||
</scannerConfigBuildInfo>
|
||||
</storageModule>
|
||||
<storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
|
||||
<storageModule moduleId="refreshScope" versionNumber="2">
|
||||
<configuration configurationName="Debug">
|
||||
<resource resourceType="PROJECT" workspacePath="/f429_tmp"/>
|
||||
</configuration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="org.eclipse.cdt.make.core.buildtargets"/>
|
||||
<storageModule moduleId="org.eclipse.cdt.internal.ui.text.commentOwnerProjectMappings">
|
||||
<doc-comment-owner id="org.eclipse.cdt.ui.doxygen">
|
||||
<path value=""/>
|
||||
</doc-comment-owner>
|
||||
</storageModule>
|
||||
</cproject>"""
|
||||
|
||||
project_temp = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>__project_name_flag__</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
|
||||
<triggers>clean,full,incremental,</triggers>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
|
||||
<triggers>full,incremental,</triggers>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.cdt.core.cnature</nature>
|
||||
<nature>org.rt-thread.studio.rttnature</nature>
|
||||
<nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
|
||||
<nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
|
||||
</natures>
|
||||
</projectDescription>"""
|
||||
|
||||
projcfg_ini_temp = """#RT-Thread Studio Project Configuration
|
||||
# $time
|
||||
cfg_version=v3.0
|
||||
|
||||
board_name=
|
||||
bsp_version=
|
||||
bsp_path=
|
||||
chip_name=
|
||||
project_base_rtt_bsp=true
|
||||
is_use_scons_build=true
|
||||
hardware_adapter=
|
||||
selected_rtt_version=latest
|
||||
board_base_nano_proj=false
|
||||
is_base_example_project=false
|
||||
example_name=
|
||||
project_type=rt-thread
|
||||
os_branch=master
|
||||
os_version=latest
|
||||
project_name=$project_name
|
||||
output_project_path=$output_project_path"""
|
||||
|
||||
eclipse_core_runtime_temp = """content-types/enabled=true
|
||||
content-types/org.eclipse.cdt.core.asmSource/file-extensions=s
|
||||
eclipse.preferences.version=1"""
|
||||
|
||||
makefile_targets_temp = """clean2:
|
||||
\t-$(RM) $(CC_DEPS)$(C++_DEPS)$(C_UPPER_DEPS)$(CXX_DEPS)$(SECONDARY_FLASH)$(SECONDARY_SIZE)$(ASM_DEPS)$(S_UPPER_DEPS)$(C_DEPS)$(CPP_DEPS)
|
||||
\t-$(RM) $(OBJS) *.elf
|
||||
\t-@echo ' '
|
||||
|
||||
*.elf: $(wildcard ../linkscripts/*/*.lds) $(wildcard ../linkscripts/*/*/*.lds)"""
|
||||
|
||||
|
||||
def get_mcu_info(uvproj_file_path):
|
||||
if os.path.exists(uvproj_file_path):
|
||||
with open(uvproj_file_path, mode='r') as f:
|
||||
data = f.read()
|
||||
result = re.search("<Device>(.*)</Device>", data)
|
||||
if result:
|
||||
return result.group(1)
|
||||
else:
|
||||
return "unknown"
|
||||
else:
|
||||
return "unknown"
|
||||
|
||||
|
||||
def gen_makefile_targets(output_file_path):
|
||||
try:
|
||||
w_str = makefile_targets_temp
|
||||
dir_name = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
with open(output_file_path, 'w') as f:
|
||||
f.write(w_str)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
|
||||
def gen_org_eclipse_core_runtime_prefs(output_file_path):
|
||||
try:
|
||||
w_str = eclipse_core_runtime_temp
|
||||
dir_name = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
with open(output_file_path, 'w') as f:
|
||||
f.write(w_str)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
|
||||
def gen_cproject_file(output_file_path):
|
||||
template_file_path = os.path.join(os.path.dirname(output_file_path), "template.cproject")
|
||||
if os.path.exists(template_file_path):
|
||||
try:
|
||||
shutil.copy(template_file_path, output_file_path)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return True
|
||||
else:
|
||||
CFLAGS = rtconfig.CFLAGS
|
||||
AFLAGS = rtconfig.AFLAGS
|
||||
LFLAGS = rtconfig.LFLAGS
|
||||
if 'CXXFLAGS' in dir(rtconfig):
|
||||
CXXFLAGS = rtconfig.CXXFLAGS
|
||||
else:
|
||||
CXXFLAGS = ""
|
||||
|
||||
if "-T" in LFLAGS:
|
||||
items = str(LFLAGS).split()
|
||||
t_index = items.index("-T")
|
||||
items[t_index] = ""
|
||||
items[t_index + 1] = ""
|
||||
LFLAGS = " ".join(items)
|
||||
|
||||
try:
|
||||
w_str = cproject_temp
|
||||
if "a_misc_flag" in w_str:
|
||||
w_str = w_str.replace("a_misc_flag", AFLAGS)
|
||||
if "c_misc_flag" in w_str:
|
||||
w_str = w_str.replace("c_misc_flag", CFLAGS)
|
||||
if "cpp_misc_flag" in w_str:
|
||||
w_str = w_str.replace("cpp_misc_flag", CXXFLAGS)
|
||||
if "c_link_misc_flag" in w_str:
|
||||
w_str = w_str.replace("c_link_misc_flag", LFLAGS)
|
||||
if "cpp_link_misc_flag" in w_str:
|
||||
w_str = w_str.replace("cpp_link_misc_flag", LFLAGS)
|
||||
|
||||
dir_name = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
with open(output_file_path, 'w') as f:
|
||||
f.write(w_str)
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
||||
|
||||
|
||||
def gen_project_file(output_file_path):
|
||||
try:
|
||||
w_str = project_temp
|
||||
dir_name = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
with open(output_file_path, 'w') as f:
|
||||
f.write(w_str)
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
||||
|
||||
|
||||
def gen_projcfg_ini_file(chip_name, project_name, output_file_path):
|
||||
try:
|
||||
projcfg_file_tmp = Template(projcfg_ini_temp)
|
||||
w_str = projcfg_file_tmp.substitute(time=time.strftime("%a %b %d %H:%M:%S %Y", time.localtime()),
|
||||
project_name=project_name,
|
||||
output_project_path=os.path.abspath(""))
|
||||
dir_name = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(dir_name):
|
||||
os.makedirs(dir_name)
|
||||
with open(output_file_path, 'w') as f:
|
||||
f.write(w_str)
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
139
rt-thread/tools/rtthread.mk
Normal file
139
rt-thread/tools/rtthread.mk
Normal file
@@ -0,0 +1,139 @@
|
||||
$(if $(strip $(TARGET)),,$(error TARGET not defined))
|
||||
$(if $(strip $(SRC_FILES)),,$(error No source files))
|
||||
$(if $(strip $(BSP_ROOT)),,$(error BSP_ROOT not defined))
|
||||
|
||||
ifneq ($(MAKE_LIB),1)
|
||||
BUILD_DIR := $(BSP_ROOT)/build
|
||||
endif
|
||||
|
||||
$(if $(strip $(BUILD_DIR)),,$(error BUILD_DIR not defined))
|
||||
|
||||
RTT_BUILD_DIR := .
|
||||
BSP_BUILD_DIR := bsp
|
||||
|
||||
#################
|
||||
|
||||
define add_c_file
|
||||
$(eval C_SRC := $(1:$(BSP_ROOT)/%=%)) \
|
||||
$(eval C_SRC := $(C_SRC:$(RTT_ROOT)/%=%)) \
|
||||
$(eval COBJ := $(1:%.c=%.o)) \
|
||||
$(eval COBJ := $(COBJ:$(BSP_ROOT)/%=$(BSP_BUILD_DIR)/%)) \
|
||||
$(eval COBJ := $(COBJ:$(RTT_ROOT)/%=$(RTT_BUILD_DIR)/%)) \
|
||||
$(eval LOCALC := $(addprefix $(BUILD_DIR)/,$(COBJ))) \
|
||||
$(eval OBJS += $(LOCALC)) \
|
||||
$(if $(strip $(LOCALC)),$(eval $(LOCALC): $(C_SRC)
|
||||
@if [ ! -d $$(@D) ]; then mkdir -p $$(@D); fi
|
||||
@echo cc $$<
|
||||
@$(CROSS_COMPILE)gcc $$(CFLAGS) -c $$< -o $$@))
|
||||
endef
|
||||
|
||||
define add_cxx_file
|
||||
$(eval CXX_SRC := $(1:$(BSP_ROOT)/%=%)) \
|
||||
$(eval CXX_SRC := $(CXX_SRC:$(RTT_ROOT)/%=%)) \
|
||||
$(eval CXXOBJ := $(1:%.cpp=%.o)) \
|
||||
$(eval CXXOBJ := $(CXXOBJ:$(BSP_ROOT)/%=$(BSP_BUILD_DIR)/%)) \
|
||||
$(eval CXXOBJ := $(CXXOBJ:$(RTT_ROOT)/%=$(RTT_BUILD_DIR)/%)) \
|
||||
$(eval LOCALCXX := $(addprefix $(BUILD_DIR)/,$(CXXOBJ))) \
|
||||
$(eval OBJS += $(LOCALCXX)) \
|
||||
$(if $(strip $(LOCALCXX)),$(eval $(LOCALCXX): $(CXX_SRC)
|
||||
@if [ ! -d $$(@D) ]; then mkdir -p $$(@D); fi
|
||||
@echo cc $$<
|
||||
@$(CROSS_COMPILE)g++ $$(CXXFLAGS) -c $$< -o $$@))
|
||||
endef
|
||||
|
||||
define add_S_file
|
||||
$(eval S_SRC := $(1:$(BSP_ROOT)/%=%)) \
|
||||
$(eval S_SRC := $(S_SRC:$(RTT_ROOT)/%=%)) \
|
||||
$(eval SOBJ := $(1:%.S=%.o)) \
|
||||
$(eval SOBJ := $(SOBJ:$(BSP_ROOT)/%=$(BSP_BUILD_DIR)/%)) \
|
||||
$(eval SOBJ := $(SOBJ:$(RTT_ROOT)/%=$(RTT_BUILD_DIR)/%)) \
|
||||
$(eval LOCALS := $(addprefix $(BUILD_DIR)/,$(SOBJ))) \
|
||||
$(eval OBJS += $(LOCALS)) \
|
||||
$(if $(strip $(LOCALS)),$(eval $(LOCALS): $(S_SRC)
|
||||
@if [ ! -d $$(@D) ]; then mkdir -p $$(@D); fi
|
||||
@echo cc $$<
|
||||
@$(CROSS_COMPILE)gcc $$(AFLAGS) -c $$< -o $$@))
|
||||
endef
|
||||
|
||||
define add_s_file
|
||||
$(eval S_SRC := $(1:$(BSP_ROOT)/%=%)) \
|
||||
$(eval S_SRC := $(S_SRC:$(RTT_ROOT)/%=%)) \
|
||||
$(eval SOBJ := $(1:%.s=%.o)) \
|
||||
$(eval SOBJ := $(SOBJ:$(BSP_ROOT)/%=$(BSP_BUILD_DIR)/%)) \
|
||||
$(eval SOBJ := $(SOBJ:$(RTT_ROOT)/%=$(RTT_BUILD_DIR)/%)) \
|
||||
$(eval LOCALS := $(addprefix $(BUILD_DIR)/,$(SOBJ))) \
|
||||
$(eval OBJS += $(LOCALS)) \
|
||||
$(if $(strip $(LOCALS)),$(eval $(LOCALS): $(S_SRC)
|
||||
@if [ ! -d $$(@D) ]; then mkdir -p $$(@D); fi
|
||||
@echo cc $$<
|
||||
@$(CROSS_COMPILE)gcc $$(AFLAGS) -c $$< -o $$@))
|
||||
endef
|
||||
|
||||
add_flg = $(eval CFLAGS += $1) \
|
||||
$(eval AFLAGS += $1) \
|
||||
$(eval CXXFLAGS += $1)
|
||||
|
||||
add_inc = $(eval CFLAGS += -I$1) \
|
||||
$(eval AFLAGS += -I$1) \
|
||||
$(eval CXXFLAGS += -I$1)
|
||||
|
||||
add_def = $(eval CFLAGS += -D$1) \
|
||||
$(eval AFLAGS += -D$1) \
|
||||
$(eval CXXFLAGS += -D$1)
|
||||
|
||||
OBJS :=
|
||||
#VPATH := $(BSP_ROOT) $(RTT_ROOT)
|
||||
VPATH := $(RTT_ROOT)
|
||||
|
||||
CONFIG_FLG := $(strip $(EXTERN_FLAGS))
|
||||
$(if $(CONFIG_FLG),$(foreach f,$(CONFIG_FLG),$(call add_flg,$(f))))
|
||||
|
||||
CONFIG_DEF := $(strip $(PROJECT_DEFS))
|
||||
$(if $(CONFIG_DEF),$(foreach d,$(CONFIG_DEF),$(call add_def,$(d))))
|
||||
|
||||
CONFIG_INC := $(strip $(INCLUDE_PATH))
|
||||
$(if $(CONFIG_INC),$(foreach i,$(CONFIG_INC),$(call add_inc,$(i))))
|
||||
|
||||
SRCS := $(strip $(filter %.c,$(SRC_FILES)))
|
||||
$(if $(SRCS),$(foreach f,$(SRCS),$(call add_c_file,$(f))))
|
||||
|
||||
SRCS := $(strip $(filter %.cpp,$(SRC_FILES)))
|
||||
$(if $(SRCS),$(foreach f,$(SRCS),$(call add_cxx_file,$(f))))
|
||||
|
||||
SRCS := $(strip $(filter %.S,$(SRC_FILES)))
|
||||
$(if $(SRCS),$(foreach f,$(SRCS),$(call add_S_file,$(f))))
|
||||
|
||||
SRCS := $(strip $(filter %.s,$(SRC_FILES)))
|
||||
$(if $(SRCS),$(foreach f,$(SRCS),$(call add_s_file,$(f))))
|
||||
|
||||
CFLAGS += $(CPPPATHS)
|
||||
CXXFLAGS += $(CPPPATHS)
|
||||
AFLAGS += $(CPPPATHS)
|
||||
|
||||
CFLAGS += $(DEFINES)
|
||||
CXXFLAGS += $(DEFINES)
|
||||
AFLAGS += $(DEFINES)
|
||||
|
||||
all: $(TARGET)
|
||||
|
||||
ifeq ($(MAKE_LIB),1)
|
||||
$(TARGET): $(OBJS)
|
||||
@echo ------------------------------------------------
|
||||
@echo ar $(TARGET)
|
||||
@$(CROSS_COMPILE)ar -rv $@ $(OBJS)
|
||||
else
|
||||
$(TARGET): $(OBJS)
|
||||
@echo ------------------------------------------------
|
||||
@echo link $(TARGET)
|
||||
@$(CROSS_COMPILE)g++ -o $@ $(LFLAGS) $(OBJS) $(EXTERN_LIB)
|
||||
@echo ------------------------------------------------
|
||||
@$(CROSS_COMPILE)objcopy -O binary $@ rtthread.bin
|
||||
@$(CROSS_COMPILE)size $@
|
||||
endif
|
||||
|
||||
phony += clean
|
||||
clean:
|
||||
@echo clean
|
||||
@rm -rf $(TARGET) $(BUILD_DIR)
|
||||
|
||||
.PHONY: $(phony)
|
476
rt-thread/tools/sconsui.py
Normal file
476
rt-thread/tools/sconsui.py
Normal file
@@ -0,0 +1,476 @@
|
||||
#! /usr/bin/env python
|
||||
#coding=utf-8
|
||||
|
||||
#
|
||||
# File : sconsui.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import sys
|
||||
|
||||
py2 = py30 = py31 = False
|
||||
version = sys.hexversion
|
||||
if version >= 0x020600F0 and version < 0x03000000 :
|
||||
py2 = True # Python 2.6 or 2.7
|
||||
from Tkinter import *
|
||||
import ttk
|
||||
elif version >= 0x03000000 and version < 0x03010000 :
|
||||
py30 = True
|
||||
from tkinter import *
|
||||
import ttk
|
||||
elif version >= 0x03010000:
|
||||
py31 = True
|
||||
from tkinter import *
|
||||
import tkinter.ttk as ttk
|
||||
else:
|
||||
print ("""
|
||||
You do not have a version of python supporting ttk widgets..
|
||||
You need a version >= 2.6 to execute PAGE modules.
|
||||
""")
|
||||
sys.exit()
|
||||
|
||||
import ScrolledText
|
||||
import tkFileDialog
|
||||
import tkMessageBox
|
||||
|
||||
import os
|
||||
import threading
|
||||
import platform
|
||||
|
||||
builder = None
|
||||
executor = None
|
||||
lock = None
|
||||
|
||||
class CmdExecutor(threading.Thread):
|
||||
def __init__(self, cmd, output):
|
||||
threading.Thread.__init__(self)
|
||||
self.cmd = cmd
|
||||
self.child = None
|
||||
|
||||
def run(self):
|
||||
global executor, builder, lock
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
try:
|
||||
from win32spawn import Win32Spawn
|
||||
subprocess = Win32Spawn(self.cmd)
|
||||
subprocess.start_pipe()
|
||||
|
||||
builder.progressbar.start()
|
||||
while not subprocess.is_terminated or subprocess.qsize() > 0:
|
||||
try:
|
||||
line = subprocess.get(timeout=1)
|
||||
line = line.replace('\r', '')
|
||||
if line:
|
||||
lock.acquire()
|
||||
builder.output.see(END)
|
||||
builder.output.insert(END, line)
|
||||
lock.release()
|
||||
except:
|
||||
pass
|
||||
|
||||
builder.progressbar.stop()
|
||||
except:
|
||||
pass
|
||||
|
||||
executor = None
|
||||
if builder.is_makeing_project:
|
||||
builder.output.insert(END, 'Done')
|
||||
builder.is_makeing_project = False
|
||||
|
||||
def ExecCmd(cmd):
|
||||
global executor
|
||||
if executor:
|
||||
print 'last task does not exit'
|
||||
return
|
||||
|
||||
executor = CmdExecutor(cmd, builder)
|
||||
executor.start()
|
||||
|
||||
class DirSelectBox(ttk.Frame):
|
||||
def __init__(self, master=None, **kw):
|
||||
ttk.Frame.__init__(self, master, **kw)
|
||||
self.dir_var = StringVar()
|
||||
self.entry = ttk.Entry(self, textvariable = self.dir_var)
|
||||
self.entry.pack(fill=BOTH, expand=1,side=LEFT)
|
||||
self.entry.configure(width = 50)
|
||||
|
||||
self.browser_button = ttk.Button(self, text="Browser", command=self.browser)
|
||||
self.browser_button.pack(side=RIGHT)
|
||||
|
||||
def browser(self):
|
||||
dir = tkFileDialog.askdirectory(parent=self, title='Open directory', initialdir=self.dir_var.get())
|
||||
if dir != '':
|
||||
self.dir_var.set(dir)
|
||||
|
||||
def set_path(self, path):
|
||||
path = path.replace('\\', '/')
|
||||
self.dir_var.set(path)
|
||||
|
||||
def get_path(self):
|
||||
return self.dir_var.get()
|
||||
|
||||
COMPILER = [
|
||||
("GNU GCC", "GCC"),
|
||||
("Keil ARMCC", "ARMCC"),
|
||||
("IAR Compiler", "IAR"),
|
||||
]
|
||||
|
||||
IDE = [
|
||||
('Keil MDK4', 'mdk4'),
|
||||
('Keil MDK', 'mdk'),
|
||||
('IAR Compiler', 'iar')
|
||||
]
|
||||
|
||||
class SconsUI():
|
||||
def __init__(self, master=None):
|
||||
style = ttk.Style()
|
||||
theme = style.theme_use()
|
||||
default = style.lookup(theme, 'background')
|
||||
master.configure(background=default)
|
||||
|
||||
notebook = ttk.Notebook(master)
|
||||
notebook.pack(fill=BOTH, padx=5, pady=5)
|
||||
|
||||
# building page
|
||||
page_building = ttk.Frame(notebook)
|
||||
notebook.add(page_building, padding=3)
|
||||
notebook.tab(0, text='Build', underline="-1")
|
||||
self.setup_building_ui(page_building)
|
||||
self.building_page = page_building
|
||||
|
||||
# make project page
|
||||
page_project = ttk.Frame(notebook)
|
||||
notebook.add(page_project, padding = 3)
|
||||
notebook.tab(1, text = 'Project', underline = '-1')
|
||||
self.setup_project_ui(page_project)
|
||||
self.project_page = page_project
|
||||
|
||||
# setting page
|
||||
page_setting = ttk.Frame(notebook)
|
||||
notebook.add(page_setting, padding = 3)
|
||||
notebook.tab(2, text = 'Setting', underline = '-1')
|
||||
self.setup_setting_ui(page_setting)
|
||||
self.setting_page = page_setting
|
||||
|
||||
padding = ttk.Frame(master)
|
||||
padding.pack(fill=X)
|
||||
quit = ttk.Button(padding, text='Quit', command = self.quit)
|
||||
quit.pack(side=RIGHT)
|
||||
|
||||
# set notebook to self
|
||||
self.notebook = notebook
|
||||
|
||||
# read setting
|
||||
self.read_setting()
|
||||
self.is_makeing_project = False
|
||||
|
||||
def read_setting(self):
|
||||
import platform
|
||||
import os
|
||||
|
||||
home = ''
|
||||
if platform.system() == 'Windows':
|
||||
driver = os.environ['HOMEDRIVE']
|
||||
home = os.environ['HOMEPATH']
|
||||
home = os.path.join(driver, home)
|
||||
else:
|
||||
home = os.environ['HOME']
|
||||
|
||||
setting_path = os.path.join(home, '.rtt_scons')
|
||||
if os.path.exists(setting_path):
|
||||
setting = open(os.path.join(home, '.rtt_scons'))
|
||||
for line in setting:
|
||||
line = line.replace('\n', '')
|
||||
line = line.replace('\r', '')
|
||||
if line.find('=') != -1:
|
||||
items = line.split('=')
|
||||
if items[0] == 'RTTRoot':
|
||||
self.RTTRoot.set_path(items[1])
|
||||
elif items[0] == 'BSPRoot':
|
||||
self.BSPRoot.set_path(items[1])
|
||||
elif items[0] == 'compiler':
|
||||
compiler = items[1]
|
||||
else:
|
||||
self.CompilersPath[items[0]].set_path(items[1])
|
||||
setting.close()
|
||||
|
||||
# set RT-Thread Root Directory according environ
|
||||
if 'RTT_ROOT' in os.environ:
|
||||
self.RTTRoot.set_path(os.environ['RTT_ROOT'])
|
||||
|
||||
if self.RTTRoot.get_path() == '':
|
||||
rtt_root = ''
|
||||
# detect RT-Thread directory
|
||||
if os.path.exists(os.path.join('..', 'include', 'rtthread.h')):
|
||||
rtt_root = os.path.join('..')
|
||||
elif os.path.exists(os.path.join('..', '..', 'include', 'rtthread.h')):
|
||||
rtt_root = os.path.join('..', '..')
|
||||
if rtt_root:
|
||||
self.RTTRoot.set_path(os.path.abspath(rtt_root))
|
||||
|
||||
# detect compiler path
|
||||
if platform.system() == 'Windows':
|
||||
# Keil MDK
|
||||
if not self.CompilersPath['ARMCC'].get_path():
|
||||
if os.path.exists('C:\\Keil'):
|
||||
self.CompilersPath['ARMCC'].set_path('C:\\Keil')
|
||||
elif os.path.exists('D:\\Keil'):
|
||||
self.CompilersPath['ARMCC'].set_path('D:\\Keil')
|
||||
elif os.path.exists('E:\\Keil'):
|
||||
self.CompilersPath['ARMCC'].set_path('E:\\Keil')
|
||||
elif os.path.exists('F:\\Keil'):
|
||||
self.CompilersPath['ARMCC'].set_path('F:\\Keil')
|
||||
elif os.path.exists('G:\\Keil'):
|
||||
self.CompilersPath['ARMCC'].set_path('G:\\Keil')
|
||||
|
||||
# GNU GCC
|
||||
if not self.CompilersPath['GCC'].get_path():
|
||||
paths = os.environ['PATH']
|
||||
paths = paths.split(';')
|
||||
|
||||
for path in paths:
|
||||
if path.find('CodeSourcery') != -1:
|
||||
self.CompilersPath['GCC'].set_path(path)
|
||||
break
|
||||
elif path.find('GNU Tools ARM Embedded') != -1:
|
||||
self.CompilersPath['GCC'].set_path(path)
|
||||
break
|
||||
|
||||
def save_setting(self):
|
||||
import platform
|
||||
import os
|
||||
|
||||
home = ''
|
||||
if platform.system() == 'Windows':
|
||||
driver = os.environ['HOMEDRIVE']
|
||||
home = os.environ['HOMEPATH']
|
||||
home = os.path.join(driver, home)
|
||||
else:
|
||||
home = os.environ['HOME']
|
||||
|
||||
setting = open(os.path.join(home, '.rtt_scons'), 'w+')
|
||||
# current comiler
|
||||
# line = '%s=%s\n' % ('compiler', self.compilers.get()))
|
||||
line = '%s=%s\n' % ('compiler', 'iar')
|
||||
setting.write(line)
|
||||
|
||||
# RTT Root Folder
|
||||
if self.RTTRoot.get_path():
|
||||
line = '%s=%s\n' % ('RTTRoot', self.RTTRoot.get_path())
|
||||
setting.write(line)
|
||||
|
||||
# BSP Root Folder
|
||||
if self.BSPRoot.get_path():
|
||||
line = '%s=%s\n' % ('BSPRoot', self.BSPRoot.get_path())
|
||||
setting.write(line)
|
||||
|
||||
for (compiler, path) in self.CompilersPath.iteritems():
|
||||
if path.get_path():
|
||||
line = '%s=%s\n' % (compiler, path.get_path())
|
||||
setting.write(line)
|
||||
|
||||
setting.close()
|
||||
tkMessageBox.showinfo("RT-Thread SCons UI",
|
||||
"Save setting successfully")
|
||||
|
||||
def setup_building_ui(self, frame):
|
||||
padding = ttk.Frame(frame)
|
||||
padding.pack(fill=X)
|
||||
|
||||
button = ttk.Button(padding, text='Clean', command=self.do_clean)
|
||||
button.pack(side=RIGHT)
|
||||
button = ttk.Button(padding, text='Build', command=self.do_build)
|
||||
button.pack(side=RIGHT)
|
||||
label = ttk.Label(padding, relief = 'flat', text = 'Click Build or Clean to build or clean system -->')
|
||||
label.pack(side=RIGHT, ipady = 5)
|
||||
|
||||
self.progressbar = ttk.Progressbar(frame)
|
||||
self.progressbar.pack(fill=X)
|
||||
|
||||
separator = ttk.Separator(frame)
|
||||
separator.pack(fill=X)
|
||||
|
||||
self.output = ScrolledText.ScrolledText(frame)
|
||||
self.output.pack(fill=X)
|
||||
|
||||
def setup_project_ui(self, frame):
|
||||
label = ttk.Label(frame, relief = 'flat', text = 'Choose Integrated Development Environment:')
|
||||
label.pack(fill=X, pady = 5)
|
||||
|
||||
separator = ttk.Separator(frame)
|
||||
separator.pack(fill=X)
|
||||
|
||||
self.ide = StringVar()
|
||||
self.ide.set("mdk4") # initialize
|
||||
|
||||
for text,mode in IDE:
|
||||
radiobutton = ttk.Radiobutton(frame, text=text, variable = self.ide, value = mode)
|
||||
radiobutton.pack(fill=X, padx=10)
|
||||
|
||||
bottom = ttk.Frame(frame)
|
||||
bottom.pack(side=BOTTOM, fill=X)
|
||||
button = ttk.Button(bottom, text="Make Project", command = self.do_make_project)
|
||||
button.pack(side=RIGHT, padx = 10, pady = 10)
|
||||
|
||||
def setup_setting_ui(self, frame):
|
||||
row = 0
|
||||
label = ttk.Label (frame, relief = 'flat', text='RT-Thread Root Folder:')
|
||||
label.grid(row=row, column=0,ipadx=5, ipady=5, padx = 5)
|
||||
|
||||
self.RTTRoot = DirSelectBox(frame)
|
||||
self.RTTRoot.grid(row=row, column=1, sticky=E+W)
|
||||
row = row + 1
|
||||
|
||||
label = ttk.Label (frame, relief = 'flat', text='Board Support Folder:')
|
||||
label.grid(row=row, column=0,ipadx=5, ipady=5, padx = 5)
|
||||
|
||||
self.BSPRoot = DirSelectBox(frame)
|
||||
self.BSPRoot.grid(row=row, column=1, sticky=E+W)
|
||||
row = row + 1
|
||||
|
||||
label = ttk.Label (frame, relief='flat', text='Toolchain:')
|
||||
label.grid(row=row, column=0,ipadx=5, ipady=5, sticky=E+W)
|
||||
row = row + 1
|
||||
|
||||
separator = ttk.Separator(frame)
|
||||
separator.grid(row = row, column = 0, columnspan = 2, sticky = E+W)
|
||||
row = row + 1
|
||||
|
||||
self.compilers = StringVar()
|
||||
self.compilers.set("GCC") # initialize
|
||||
|
||||
self.CompilersPath = {}
|
||||
|
||||
for text,compiler in COMPILER:
|
||||
radiobutton = ttk.Radiobutton(frame, text=text, variable = self.compilers, value = compiler)
|
||||
radiobutton.grid(row=row, column = 0, sticky = W, ipadx = 5, ipady = 5, padx = 20)
|
||||
|
||||
self.CompilersPath[compiler] = DirSelectBox(frame)
|
||||
self.CompilersPath[compiler].grid(row=row, column=1, sticky=E+W)
|
||||
row = row + 1
|
||||
|
||||
button = ttk.Button(frame, text='Save Setting', command = self.save_setting)
|
||||
button.grid(row = row, column = 1, sticky = E)
|
||||
row = row + 1
|
||||
|
||||
def prepare_build(self):
|
||||
# get compiler
|
||||
compiler = self.compilers.get()
|
||||
if compiler == 'GCC':
|
||||
compiler = 'gcc'
|
||||
elif compiler == 'ARMCC':
|
||||
compiler = 'keil'
|
||||
elif compiler == 'IAR':
|
||||
compiler = 'iar'
|
||||
|
||||
# get RTT Root
|
||||
rtt_root = self.RTTRoot.get_path()
|
||||
# get Compiler path
|
||||
exec_path = self.CompilersPath[self.compilers.get()].get_path()
|
||||
|
||||
command = ''
|
||||
|
||||
os.environ['RTT_ROOT'] = rtt_root
|
||||
os.environ['RTT_CC'] = compiler
|
||||
os.environ['RTT_EXEC_PATH'] = exec_path
|
||||
|
||||
return command
|
||||
|
||||
def check_path(self):
|
||||
result = True
|
||||
|
||||
if self.BSPRoot.get_path() == '':
|
||||
result = False
|
||||
|
||||
if self.RTTRoot.get_path() == '':
|
||||
result = False
|
||||
|
||||
if not result:
|
||||
tkMessageBox.showinfo("RT-Thread SCons UI",
|
||||
"Folder is empty, please choose correct directory.")
|
||||
|
||||
return result
|
||||
|
||||
def do_build(self):
|
||||
self.prepare_build()
|
||||
command = 'scons'
|
||||
|
||||
if not self.check_path():
|
||||
return
|
||||
|
||||
bsp = self.BSPRoot.get_path()
|
||||
os.chdir(bsp)
|
||||
|
||||
self.output.delete(1.0, END)
|
||||
self.output.insert(END, 'building project...\n')
|
||||
ExecCmd(command)
|
||||
|
||||
def do_clean(self):
|
||||
self.prepare_build()
|
||||
command = 'scons -c'
|
||||
|
||||
if not self.check_path():
|
||||
return
|
||||
|
||||
bsp = self.BSPRoot.get_path()
|
||||
os.chdir(bsp)
|
||||
|
||||
self.output.delete(1.0, END)
|
||||
self.output.insert(END, 'clean project...\n')
|
||||
ExecCmd(command)
|
||||
|
||||
def do_make_project(self):
|
||||
ide = self.ide.get()
|
||||
self.prepare_build()
|
||||
command = 'scons --target=%s -s' % ide
|
||||
|
||||
if not self.check_path():
|
||||
return
|
||||
|
||||
# select build page
|
||||
self.notebook.select(self.building_page)
|
||||
|
||||
bsp = self.BSPRoot.get_path()
|
||||
os.chdir(bsp)
|
||||
|
||||
self.output.delete(1.0, END)
|
||||
self.output.insert(END, 'Generate project ...\n')
|
||||
self.is_makeing_project = True
|
||||
ExecCmd(command)
|
||||
|
||||
def quit(self):
|
||||
exit(0)
|
||||
|
||||
def StartSConsUI(path=None):
|
||||
global val, root, builder, lock
|
||||
root = Tk()
|
||||
root.title('RT-Thread SCons UI')
|
||||
#root.geometrygeometry('590x510+50+50')
|
||||
lock = threading.RLock()
|
||||
builder = SconsUI(root)
|
||||
if path:
|
||||
builder.BSPRoot.set_path(path)
|
||||
root.mainloop()
|
||||
|
||||
if __name__ == '__main__':
|
||||
StartSConsUI()
|
92
rt-thread/tools/ses.py
Normal file
92
rt-thread/tools/ses.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# SEGGER Embedded Studio Project Generator
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
from utils import ProjectInfo
|
||||
|
||||
def SDKAddGroup(parent, name, files, project_path):
|
||||
# don't add an empty group
|
||||
if len(files) == 0:
|
||||
return
|
||||
|
||||
group = SubElement(parent, 'folder', attrib={'Name': name})
|
||||
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
basename = os.path.basename(path)
|
||||
path = _make_path_relative(project_path, path)
|
||||
elm_attr_name = os.path.join(path, name)
|
||||
|
||||
file = SubElement(group, 'file', attrib={'file_name': elm_attr_name})
|
||||
|
||||
return group
|
||||
|
||||
def SESProject(env) :
|
||||
target = 'project.emProject'
|
||||
tree = etree.parse('template.emProject')
|
||||
# print(etree.dump(tree.getroot()))
|
||||
# etree.dump(tree.getroot())
|
||||
|
||||
project = ProjectInfo(env)
|
||||
# print(project)
|
||||
# return
|
||||
|
||||
project_path = os.path.abspath(env['BSP_ROOT'])
|
||||
script = env['project']
|
||||
|
||||
root = tree.getroot()
|
||||
out = file(target, 'w')
|
||||
out.write('<!DOCTYPE CrossStudio_Project_File>\n')
|
||||
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
LINKFLAGS = ''
|
||||
CFLAGS = ''
|
||||
|
||||
project_node = tree.find('project')
|
||||
|
||||
for group in script:
|
||||
# print(group)
|
||||
|
||||
group_tree = SDKAddGroup(project_node, group['name'], group['src'], project_path)
|
||||
|
||||
# get each group's cc flags
|
||||
if 'CFLAGS' in group and group['CFLAGS']:
|
||||
if CFLAGS:
|
||||
CFLAGS += ' ' + group['CFLAGS']
|
||||
else:
|
||||
CFLAGS += group['CFLAGS']
|
||||
|
||||
# get each group's link flags
|
||||
if 'LINKFLAGS' in group and group['LINKFLAGS']:
|
||||
if LINKFLAGS:
|
||||
LINKFLAGS += ' ' + group['LINKFLAGS']
|
||||
else:
|
||||
LINKFLAGS += group['LINKFLAGS']
|
||||
|
||||
# write include path, definitions and link flags
|
||||
path = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in project['CPPPATH']])
|
||||
path = path.replace('\\', '/')
|
||||
defines = ';'.join(set(project['CPPDEFINES']))
|
||||
|
||||
node = tree.findall('project/configuration')
|
||||
for item in node:
|
||||
if item.get('c_preprocessor_definitions'):
|
||||
item.set('c_preprocessor_definitions', defines)
|
||||
|
||||
if item.get('c_user_include_directories'):
|
||||
item.set('c_user_include_directories', path)
|
||||
|
||||
xml_indent(root)
|
||||
out.write(etree.tostring(root, encoding='utf-8'))
|
||||
out.close()
|
||||
|
||||
return
|
41
rt-thread/tools/template.cbp
Normal file
41
rt-thread/tools/template.cbp
Normal file
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>
|
||||
<CodeBlocks_project_file>
|
||||
<FileVersion major="1" minor="6" />
|
||||
<Project>
|
||||
<Option title="project" />
|
||||
<Option pch_mode="2" />
|
||||
<Option compiler="gcc" />
|
||||
<Build>
|
||||
<Target title="Debug">
|
||||
<Option output="build/bin/Debug/project" prefix_auto="1" extension_auto="1" />
|
||||
<Option object_output="build/obj/Debug/" />
|
||||
<Option type="1" />
|
||||
<Option compiler="gcc" />
|
||||
<Compiler>
|
||||
<Add option="-g" />
|
||||
</Compiler>
|
||||
</Target>
|
||||
<Target title="Release">
|
||||
<Option output="build/bin/Release/project" prefix_auto="1" extension_auto="1" />
|
||||
<Option object_output="build/obj/Release/" />
|
||||
<Option type="1" />
|
||||
<Option compiler="gcc" />
|
||||
<Compiler>
|
||||
<Add option="-O2" />
|
||||
</Compiler>
|
||||
<Linker>
|
||||
<Add option="-s" />
|
||||
</Linker>
|
||||
</Target>
|
||||
</Build>
|
||||
<Compiler>
|
||||
<Add option="-Wall" />
|
||||
</Compiler>
|
||||
<Extensions>
|
||||
<code_completion />
|
||||
<envvars />
|
||||
<debugger />
|
||||
<lib_finder disable_auto="1" />
|
||||
</Extensions>
|
||||
</Project>
|
||||
</CodeBlocks_project_file>
|
101
rt-thread/tools/ua.py
Normal file
101
rt-thread/tools/ua.py
Normal file
@@ -0,0 +1,101 @@
|
||||
#
|
||||
# File : ua.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
from utils import _make_path_relative
|
||||
|
||||
def PrefixPath(prefix, path):
|
||||
path = os.path.abspath(path)
|
||||
prefix = os.path.abspath(prefix)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
prefix = prefix.lower()
|
||||
path = path.lower()
|
||||
|
||||
if path.startswith(prefix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def PrepareUA(project, RTT_ROOT, BSP_ROOT):
|
||||
with open('rtua.py', 'w') as ua:
|
||||
# ua.write('import os\n')
|
||||
# ua.write('import sys\n')
|
||||
ua.write('\n')
|
||||
|
||||
print(RTT_ROOT)
|
||||
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
|
||||
for group in project:
|
||||
# get each include path
|
||||
if 'CPPPATH' in group and group['CPPPATH']:
|
||||
CPPPATH += group['CPPPATH']
|
||||
|
||||
# get each group's definitions
|
||||
if 'CPPDEFINES' in group and group['CPPDEFINES']:
|
||||
CPPDEFINES += group['CPPDEFINES']
|
||||
|
||||
if len(CPPPATH):
|
||||
# use absolute path
|
||||
for i in range(len(CPPPATH)):
|
||||
CPPPATH[i] = os.path.abspath(CPPPATH[i])
|
||||
|
||||
# remove repeat path
|
||||
paths = [i for i in set(CPPPATH)]
|
||||
CPPPATH = []
|
||||
for path in paths:
|
||||
if PrefixPath(RTT_ROOT, path):
|
||||
CPPPATH += ['RTT_ROOT + "/%s",' % _make_path_relative(RTT_ROOT, path).replace('\\', '/')]
|
||||
|
||||
elif PrefixPath(BSP_ROOT, path):
|
||||
CPPPATH += ['BSP_ROOT + "/%s",' % _make_path_relative(BSP_ROOT, path).replace('\\', '/')]
|
||||
else:
|
||||
CPPPATH += ['"%s",' % path.replace('\\', '/')]
|
||||
|
||||
CPPPATH.sort()
|
||||
ua.write('def GetCPPPATH(BSP_ROOT, RTT_ROOT):\n')
|
||||
ua.write('\tCPPPATH=[\n')
|
||||
for path in CPPPATH:
|
||||
ua.write('\t\t%s\n' % path)
|
||||
ua.write('\t]\n\n')
|
||||
ua.write('\treturn CPPPATH\n\n')
|
||||
else:
|
||||
ua.write('def GetCPPPATH(BSP_ROOT, RTT_ROOT):\n')
|
||||
ua.write('\tCPPPATH=[]\n\n')
|
||||
ua.write('\treturn CPPPATH\n\n')
|
||||
|
||||
if len(CPPDEFINES):
|
||||
CPPDEFINES = [i for i in set(CPPDEFINES)]
|
||||
|
||||
ua.write('def GetCPPDEFINES():\n')
|
||||
ua.write('\tCPPDEFINES=%s\n' % str(CPPDEFINES))
|
||||
ua.write('\treturn CPPDEFINES\n\n')
|
||||
|
||||
else:
|
||||
ua.write('def GetCPPDEFINES():\n')
|
||||
ua.write('\tCPPDEFINES=""\n\n')
|
||||
ua.write('\treturn CPPDEFINES\n\n')
|
328
rt-thread/tools/utils.py
Normal file
328
rt-thread/tools/utils.py
Normal file
@@ -0,0 +1,328 @@
|
||||
#
|
||||
# File : utils.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
# 2024-04-21 Bernard Add ImportModule to import local module
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
def splitall(loc):
|
||||
"""
|
||||
Return a list of the path components in loc. (Used by relpath_).
|
||||
|
||||
The first item in the list will be either ``os.curdir``, ``os.pardir``, empty,
|
||||
or the root directory of loc (for example, ``/`` or ``C:\\).
|
||||
|
||||
The other items in the list will be strings.
|
||||
|
||||
Adapted from *path.py* by Jason Orendorff.
|
||||
"""
|
||||
parts = []
|
||||
while loc != os.curdir and loc != os.pardir:
|
||||
prev = loc
|
||||
loc, child = os.path.split(prev)
|
||||
if loc == prev:
|
||||
break
|
||||
parts.append(child)
|
||||
parts.append(loc)
|
||||
parts.reverse()
|
||||
return parts
|
||||
|
||||
def _make_path_relative(origin, dest):
|
||||
"""
|
||||
Return the relative path between origin and dest.
|
||||
|
||||
If it's not possible return dest.
|
||||
|
||||
|
||||
If they are identical return ``os.curdir``
|
||||
|
||||
Adapted from `path.py <http://www.jorendorff.com/articles/python/path/>`_ by Jason Orendorff.
|
||||
"""
|
||||
origin = os.path.abspath(origin).replace('\\', '/')
|
||||
dest = os.path.abspath(dest).replace('\\', '/')
|
||||
#
|
||||
orig_list = splitall(os.path.normcase(origin))
|
||||
# Don't normcase dest! We want to preserve the case.
|
||||
dest_list = splitall(dest)
|
||||
#
|
||||
if orig_list[0] != os.path.normcase(dest_list[0]):
|
||||
# Can't get here from there.
|
||||
return dest
|
||||
#
|
||||
# Find the location where the two paths start to differ.
|
||||
i = 0
|
||||
for start_seg, dest_seg in zip(orig_list, dest_list):
|
||||
if start_seg != os.path.normcase(dest_seg):
|
||||
break
|
||||
i += 1
|
||||
#
|
||||
# Now i is the point where the two paths diverge.
|
||||
# Need a certain number of "os.pardir"s to work up
|
||||
# from the origin to the point of divergence.
|
||||
segments = [os.pardir] * (len(orig_list) - i)
|
||||
# Need to add the diverging part of dest_list.
|
||||
segments += dest_list[i:]
|
||||
if len(segments) == 0:
|
||||
# If they happen to be identical, use os.curdir.
|
||||
return os.curdir
|
||||
else:
|
||||
# return os.path.join(*segments).replace('\\', '/')
|
||||
return os.path.join(*segments)
|
||||
|
||||
def xml_indent(elem, level=0):
|
||||
i = "\n" + level*" "
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
xml_indent(elem, level+1)
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
else:
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
|
||||
source_ext = ["c", "h", "s", "S", "cpp", "xpm"]
|
||||
source_list = []
|
||||
|
||||
def walk_children(child):
|
||||
global source_list
|
||||
global source_ext
|
||||
|
||||
# print child
|
||||
full_path = child.rfile().abspath
|
||||
file_type_list = full_path.rsplit('.',1)
|
||||
#print file_type
|
||||
if (len(file_type_list) > 1):
|
||||
file_type = full_path.rsplit('.',1)[1]
|
||||
|
||||
if file_type in source_ext:
|
||||
if full_path not in source_list:
|
||||
source_list.append(full_path)
|
||||
|
||||
children = child.all_children()
|
||||
if children != []:
|
||||
for item in children:
|
||||
walk_children(item)
|
||||
|
||||
def PrefixPath(prefix, path):
|
||||
path = os.path.abspath(path)
|
||||
prefix = os.path.abspath(prefix)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
prefix = prefix.lower()
|
||||
path = path.lower()
|
||||
|
||||
if path.startswith(prefix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def ListMap(l):
|
||||
ret_list = []
|
||||
for item in l:
|
||||
if type(item) == type(()):
|
||||
ret = ListMap(item)
|
||||
ret_list += ret
|
||||
elif type(item) == type([]):
|
||||
ret = ListMap(item)
|
||||
ret_list += ret
|
||||
else:
|
||||
ret_list.append(item)
|
||||
|
||||
return ret_list
|
||||
|
||||
def TargetGetList(env, postfix):
|
||||
global source_ext
|
||||
global source_list
|
||||
|
||||
target = env['target']
|
||||
|
||||
source_ext = postfix
|
||||
for item in target:
|
||||
walk_children(item)
|
||||
|
||||
source_list.sort()
|
||||
|
||||
return source_list
|
||||
|
||||
def ProjectInfo(env):
|
||||
|
||||
project = env['project']
|
||||
RTT_ROOT = env['RTT_ROOT']
|
||||
BSP_ROOT = env['BSP_ROOT']
|
||||
|
||||
FILES = []
|
||||
DIRS = []
|
||||
HEADERS = []
|
||||
CPPPATH = []
|
||||
CPPDEFINES = []
|
||||
|
||||
for group in project:
|
||||
# get each files
|
||||
if 'src' in group and group['src']:
|
||||
FILES += group['src']
|
||||
|
||||
# get each include path
|
||||
if 'CPPPATH' in group and group['CPPPATH']:
|
||||
CPPPATH += group['CPPPATH']
|
||||
|
||||
if 'CPPDEFINES' in env:
|
||||
CPPDEFINES = env['CPPDEFINES']
|
||||
CPPDEFINES = ListMap(CPPDEFINES)
|
||||
|
||||
# process FILES and DIRS
|
||||
if len(FILES):
|
||||
# use absolute path
|
||||
for i in range(len(FILES)):
|
||||
FILES[i] = os.path.abspath(str(FILES[i]))
|
||||
DIRS.append(os.path.dirname(FILES[i]))
|
||||
|
||||
FILES.sort()
|
||||
DIRS = list(set(DIRS))
|
||||
DIRS.sort()
|
||||
|
||||
# process HEADERS
|
||||
HEADERS = TargetGetList(env, ['h'])
|
||||
|
||||
# process CPPPATH
|
||||
if len(CPPPATH):
|
||||
# use absolute path
|
||||
for i in range(len(CPPPATH)):
|
||||
CPPPATH[i] = os.path.abspath(CPPPATH[i])
|
||||
|
||||
# remove repeat path
|
||||
paths = []
|
||||
for p in CPPPATH:
|
||||
if p not in paths:
|
||||
paths.append(p)
|
||||
|
||||
CPPPATH = []
|
||||
for path in paths:
|
||||
if PrefixPath(RTT_ROOT, path):
|
||||
CPPPATH += [os.path.abspath(path).replace('\\', '/')]
|
||||
|
||||
elif PrefixPath(BSP_ROOT, path):
|
||||
CPPPATH += [os.path.abspath(path).replace('\\', '/')]
|
||||
|
||||
else:
|
||||
CPPPATH += ['"%s",' % path.replace('\\', '/')]
|
||||
|
||||
# process CPPDEFINES
|
||||
if len(CPPDEFINES):
|
||||
CPPDEFINES = [i for i in set(CPPDEFINES)]
|
||||
|
||||
CPPDEFINES.sort()
|
||||
|
||||
proj = {}
|
||||
proj['FILES'] = FILES
|
||||
proj['DIRS'] = DIRS
|
||||
proj['HEADERS'] = HEADERS
|
||||
proj['CPPPATH'] = CPPPATH
|
||||
proj['CPPDEFINES'] = CPPDEFINES
|
||||
|
||||
return proj
|
||||
|
||||
def VersionCmp(ver1, ver2):
|
||||
la=[]
|
||||
if ver1:
|
||||
la = re.split("[. ]", ver1)
|
||||
lb = re.split("[. ]", ver2)
|
||||
|
||||
f = 0
|
||||
if len(la) > len(lb):
|
||||
f = len(la)
|
||||
else:
|
||||
f = len(lb)
|
||||
for i in range(f):
|
||||
try:
|
||||
if int(la[i]) > int(lb[i]):
|
||||
return 1
|
||||
elif int(la[i]) == int(lb[i]):
|
||||
continue
|
||||
else:
|
||||
return -1
|
||||
except (IndexError, ValueError) as e:
|
||||
if len(la) > len(lb):
|
||||
return 1
|
||||
else:
|
||||
return -1
|
||||
return 0
|
||||
|
||||
def GCCC99Patch(cflags):
|
||||
import building
|
||||
gcc_version = building.GetDepend('GCC_VERSION_STR')
|
||||
if gcc_version:
|
||||
gcc_version = gcc_version.replace('"', '')
|
||||
if VersionCmp(gcc_version, "4.8.0") == 1:
|
||||
# remove -std=c99 after GCC 4.8.x
|
||||
cflags = cflags.replace('-std=c99', '')
|
||||
|
||||
return cflags
|
||||
|
||||
def ReloadModule(module):
|
||||
import sys
|
||||
if sys.version_info.major >= 3:
|
||||
import importlib
|
||||
importlib.reload(module)
|
||||
else:
|
||||
reload(module)
|
||||
|
||||
def ImportModule(module):
|
||||
import sys
|
||||
if sys.version_info.major >= 3:
|
||||
import importlib.util
|
||||
path = os.path.dirname(__file__)
|
||||
spec = importlib.util.spec_from_file_location(module, os.path.join(path, module+".py"))
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
else:
|
||||
return __import__(module, fromlist=[module])
|
||||
|
||||
def VerTuple(version_str):
|
||||
ver_parts = version_str.split('.')
|
||||
ver = tuple(int(part) for part in ver_parts)
|
||||
|
||||
return ver
|
||||
|
||||
def CmdExists(cmd):
|
||||
# Check if the path directly points to an existing file.
|
||||
if os.path.isfile(cmd):
|
||||
return True
|
||||
else:
|
||||
# On Windows systems, check for common script file extensions
|
||||
# if the file does not exist as specified.
|
||||
if sys.platform.startswith('win'):
|
||||
# Loop through possible extensions to cover cases where the extension is omitted in the input.
|
||||
for ext in ['.exe', '.bat', '.ps1']:
|
||||
# Append the extension to the command path and check if this file exists.
|
||||
if os.path.isfile(cmd + ext):
|
||||
return True
|
||||
|
||||
# If none of the checks confirm the file exists, return False.
|
||||
return False
|
185
rt-thread/tools/vs.py
Normal file
185
rt-thread/tools/vs.py
Normal file
@@ -0,0 +1,185 @@
|
||||
#
|
||||
# File : vs.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import building
|
||||
import utils
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
def VS_AddGroup(ProjectFiles, parent, name, files, libs, project_path):
|
||||
Filter = SubElement(parent, 'Filter')
|
||||
Filter.set('Name', name) #set group name to group
|
||||
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
try:
|
||||
path = path.decode(fs_encoding)
|
||||
except:
|
||||
path = path
|
||||
File = SubElement(Filter, 'File')
|
||||
File.set('RelativePath', path)
|
||||
|
||||
for lib in libs:
|
||||
name = os.path.basename(lib)
|
||||
path = os.path.dirname(lib)
|
||||
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
|
||||
File = SubElement(Filter, 'File')
|
||||
try:
|
||||
path = path.decode(fs_encoding)
|
||||
except:
|
||||
path = path
|
||||
File.set('RelativePath', path)
|
||||
|
||||
def VS_AddHeadFilesGroup(program, elem, project_path):
|
||||
utils.source_ext = []
|
||||
utils.source_ext = ["h"]
|
||||
for item in program:
|
||||
utils.walk_children(item)
|
||||
utils.source_list.sort()
|
||||
# print utils.source_list
|
||||
|
||||
for f in utils.source_list:
|
||||
path = _make_path_relative(project_path, f)
|
||||
File = SubElement(elem, 'File')
|
||||
try:
|
||||
path = path.decode(fs_encoding)
|
||||
except:
|
||||
path = path
|
||||
File.set('RelativePath', path)
|
||||
|
||||
def VSProject(target, script, program):
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
tree = etree.parse('template_vs2005.vcproj')
|
||||
root = tree.getroot()
|
||||
|
||||
out = open(target, 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\r\n')
|
||||
|
||||
ProjectFiles = []
|
||||
|
||||
# add "*.c" files group
|
||||
for elem in tree.iter(tag='Filter'):
|
||||
if elem.attrib['Name'] == 'Source Files':
|
||||
#print elem.tag, elem.attrib
|
||||
break
|
||||
|
||||
for group in script:
|
||||
libs = []
|
||||
if 'LIBS' in group and group['LIBS']:
|
||||
for item in group['LIBS']:
|
||||
lib_path = ''
|
||||
for path_item in group['LIBPATH']:
|
||||
full_path = os.path.join(path_item, item + '.lib')
|
||||
if os.path.isfile(full_path): # has this library
|
||||
lib_path = full_path
|
||||
|
||||
if lib_path != '':
|
||||
libs.append(lib_path)
|
||||
|
||||
group_xml = VS_AddGroup(ProjectFiles, elem, group['name'], group['src'], libs, project_path)
|
||||
|
||||
# add "*.h" files group
|
||||
for elem in tree.iter(tag='Filter'):
|
||||
if elem.attrib['Name'] == 'Header Files':
|
||||
break
|
||||
VS_AddHeadFilesGroup(program, elem, project_path)
|
||||
|
||||
# write head include path
|
||||
if 'CPPPATH' in building.Env:
|
||||
cpp_path = building.Env['CPPPATH']
|
||||
paths = set()
|
||||
for path in cpp_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
cpp_path = ';'.join(paths)
|
||||
|
||||
# write include path, definitions
|
||||
for elem in tree.iter(tag='Tool'):
|
||||
if elem.attrib['Name'] == 'VCCLCompilerTool':
|
||||
#print elem.tag, elem.attrib
|
||||
break
|
||||
elem.set('AdditionalIncludeDirectories', cpp_path)
|
||||
|
||||
# write cppdefinitons flags
|
||||
if 'CPPDEFINES' in building.Env:
|
||||
CPPDEFINES = building.Env['CPPDEFINES']
|
||||
definitions = []
|
||||
if type(CPPDEFINES[0]) == type(()):
|
||||
for item in CPPDEFINES:
|
||||
definitions += [i for i in item]
|
||||
definitions = ';'.join(definitions)
|
||||
else:
|
||||
definitions = ';'.join(building.Env['CPPDEFINES'])
|
||||
elem.set('PreprocessorDefinitions', definitions)
|
||||
# write link flags
|
||||
|
||||
# write lib dependence
|
||||
if 'LIBS' in building.Env:
|
||||
for elem in tree.iter(tag='Tool'):
|
||||
if elem.attrib['Name'] == 'VCLinkerTool':
|
||||
break
|
||||
libs_with_extention = [i+'.lib' for i in building.Env['LIBS']]
|
||||
libs = ' '.join(libs_with_extention)
|
||||
elem.set('AdditionalDependencies', libs)
|
||||
|
||||
# write lib include path
|
||||
if 'LIBPATH' in building.Env:
|
||||
lib_path = building.Env['LIBPATH']
|
||||
paths = set()
|
||||
for path in lib_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
lib_paths = ';'.join(paths)
|
||||
elem.set('AdditionalLibraryDirectories', lib_paths)
|
||||
|
||||
xml_indent(root)
|
||||
text = etree.tostring(root, encoding='utf-8')
|
||||
try:
|
||||
text = text.decode(encoding="utf-8")
|
||||
except:
|
||||
text = text
|
||||
out.write(text)
|
||||
out.close()
|
281
rt-thread/tools/vs2012.py
Normal file
281
rt-thread/tools/vs2012.py
Normal file
@@ -0,0 +1,281 @@
|
||||
#
|
||||
# File : vs2012.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import string
|
||||
import building
|
||||
import uuid
|
||||
|
||||
import xml.etree.ElementTree as etree
|
||||
from xml.etree.ElementTree import SubElement
|
||||
from utils import _make_path_relative
|
||||
from utils import xml_indent
|
||||
import utils
|
||||
|
||||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
#reference
|
||||
# http://woodpecker.org.cn/diveintopython3/xml.html
|
||||
# https://pycoders-weekly-chinese.readthedocs.org/en/latest/issue6/processing-xml-in-python-with-element-tree.html
|
||||
# http://www.cnblogs.com/ifantastic/archive/2013/04/12/3017110.html
|
||||
|
||||
filter_project = etree.Element('Project', attrib={'ToolsVersion':'4.0'})
|
||||
def get_uuid():
|
||||
id = uuid.uuid1() # UUID('3e5526c0-2841-11e3-a376-20cf3048bcb3')
|
||||
if sys.version > '3':
|
||||
idstr = id.urn[9:] #'urn:uuid:3e5526c0-2841-11e3-a376-20cf3048bcb3'[9:]
|
||||
else:
|
||||
# python3 is no decode function
|
||||
idstr = id.get_urn()[9:] #'urn:uuid:3e5526c0-2841-11e3-a376-20cf3048bcb3'[9:]
|
||||
|
||||
return '{'+idstr+'}'
|
||||
|
||||
def VS2012_AddGroup(parent, group_name, files, project_path):
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
|
||||
ClCompile = SubElement(parent, 'ClCompile')
|
||||
|
||||
if sys.version > '3':
|
||||
ClCompile.set('Include', path)
|
||||
else:
|
||||
# python3 is no decode function
|
||||
ClCompile.set('Include', path.decode(fs_encoding))
|
||||
|
||||
Filter = SubElement(ClCompile, 'Filter')
|
||||
Filter.text='Source Files\\'+group_name
|
||||
|
||||
def VS2012_CreateFilter(script, project_path):
|
||||
c_ItemGroup = SubElement(filter_project, 'ItemGroup')
|
||||
filter_ItemGroup = SubElement(filter_project, 'ItemGroup')
|
||||
|
||||
Filter = SubElement(filter_ItemGroup, 'Filter')
|
||||
Filter.set('Include', 'Source Files')
|
||||
UniqueIdentifier = SubElement(Filter, 'UniqueIdentifier')
|
||||
UniqueIdentifier.text = get_uuid()
|
||||
Extensions = SubElement(Filter, 'Extensions')
|
||||
Extensions.text = 'cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx'
|
||||
|
||||
Filter = SubElement(filter_ItemGroup, 'Filter')
|
||||
Filter.set('Include', 'Header Files')
|
||||
UniqueIdentifier = SubElement(Filter, 'UniqueIdentifier')
|
||||
UniqueIdentifier.text = get_uuid()
|
||||
Extensions = SubElement(Filter, 'Extensions')
|
||||
Extensions.text = 'h;hpp;hxx;hm;inl;inc;xsd'
|
||||
for group in script:
|
||||
VS2012_AddGroup(c_ItemGroup, group['name'], group['src'], project_path)
|
||||
Filter = SubElement(filter_ItemGroup, 'Filter')
|
||||
Filter.set('Include', 'Source Files\\'+group['name'])
|
||||
UniqueIdentifier = SubElement(Filter, 'UniqueIdentifier')
|
||||
UniqueIdentifier.text = get_uuid()
|
||||
|
||||
#program: object from scons
|
||||
# parent: xml node
|
||||
# file_type: C or H
|
||||
# files: c/h list
|
||||
# project_path
|
||||
def VS_add_ItemGroup(parent, file_type, files, project_path):
|
||||
from building import Rtt_Root
|
||||
RTT_ROOT = os.path.normpath(Rtt_Root)
|
||||
|
||||
file_dict = {'C':"ClCompile", 'H':'ClInclude'}
|
||||
item_tag = file_dict[file_type]
|
||||
|
||||
ItemGroup = SubElement(parent, 'ItemGroup')
|
||||
for f in files:
|
||||
fn = f.rfile()
|
||||
name = fn.name
|
||||
path = os.path.dirname(fn.abspath)
|
||||
|
||||
objpath = path.lower()
|
||||
if len(project_path) >= len(RTT_ROOT) :
|
||||
if objpath.startswith(project_path.lower()) :
|
||||
objpath = ''.join('bsp'+objpath[len(project_path):])
|
||||
else :
|
||||
objpath = ''.join('kernel'+objpath[len(RTT_ROOT):])
|
||||
else :
|
||||
if objpath.startswith(RTT_ROOT.lower()) :
|
||||
objpath = ''.join('kernel'+objpath[len(RTT_ROOT):])
|
||||
else :
|
||||
objpath = ''.join('bsp'+objpath[len(project_path):])
|
||||
path = _make_path_relative(project_path, path)
|
||||
path = os.path.join(path, name)
|
||||
|
||||
File = SubElement(ItemGroup, item_tag)
|
||||
|
||||
if sys.version > '3':
|
||||
File.set('Include', path)
|
||||
else:
|
||||
# python3 is no decode function
|
||||
File.set('Include', path.decode(fs_encoding))
|
||||
|
||||
if file_type == 'C' :
|
||||
ObjName = SubElement(File, 'ObjectFileName')
|
||||
ObjName.text = ''.join('$(IntDir)'+objpath+'\\')
|
||||
|
||||
def VS_add_HeadFiles(program, elem, project_path):
|
||||
utils.source_ext = []
|
||||
utils.source_ext = ["h"]
|
||||
for item in program:
|
||||
utils.walk_children(item)
|
||||
utils.source_list.sort()
|
||||
# print utils.source_list
|
||||
ItemGroup = SubElement(elem, 'ItemGroup')
|
||||
|
||||
filter_h_ItemGroup = SubElement(filter_project, 'ItemGroup')
|
||||
for f in utils.source_list:
|
||||
path = _make_path_relative(project_path, f)
|
||||
File = SubElement(ItemGroup, 'ClInclude')
|
||||
|
||||
if sys.version > '3':
|
||||
File.set('Include', path)
|
||||
else:
|
||||
# python3 is no decode function
|
||||
File.set('Include', path.decode(fs_encoding))
|
||||
|
||||
# add project.vcxproj.filter
|
||||
ClInclude = SubElement(filter_h_ItemGroup, 'ClInclude')
|
||||
|
||||
if sys.version > '3':
|
||||
ClInclude.set('Include', path)
|
||||
else:
|
||||
# python3 is no decode function
|
||||
ClInclude.set('Include', path.decode(fs_encoding))
|
||||
|
||||
Filter = SubElement(ClInclude, 'Filter')
|
||||
Filter.text='Header Files'
|
||||
|
||||
def VS2012Project(target, script, program):
|
||||
project_path = os.path.dirname(os.path.abspath(target))
|
||||
|
||||
tree = etree.parse('template_vs2012.vcxproj')
|
||||
root = tree.getroot()
|
||||
elem = root
|
||||
|
||||
out = open(target, 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\r\n')
|
||||
|
||||
ProjectFiles = []
|
||||
|
||||
# add "*.c or *.h" files
|
||||
|
||||
VS2012_CreateFilter(script, project_path)
|
||||
# add "*.c" files
|
||||
for group in script:
|
||||
VS_add_ItemGroup(elem, 'C', group['src'], project_path)
|
||||
|
||||
# add "*.h" files
|
||||
VS_add_HeadFiles(program, elem, project_path)
|
||||
|
||||
# write head include path
|
||||
if 'CPPPATH' in building.Env:
|
||||
cpp_path = building.Env['CPPPATH']
|
||||
paths = set()
|
||||
for path in cpp_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc) #.replace('\\', '/')
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
cpp_path = ';'.join(paths) + ';%(AdditionalIncludeDirectories)'
|
||||
|
||||
# write include path
|
||||
for elem in tree.iter(tag='AdditionalIncludeDirectories'):
|
||||
elem.text = cpp_path
|
||||
break
|
||||
|
||||
# write cppdefinitons flags
|
||||
if 'CPPDEFINES' in building.Env:
|
||||
for elem in tree.iter(tag='PreprocessorDefinitions'):
|
||||
CPPDEFINES = building.Env['CPPDEFINES']
|
||||
definitions = []
|
||||
if type(CPPDEFINES[0]) == type(()):
|
||||
for item in CPPDEFINES:
|
||||
definitions += [i for i in item]
|
||||
definitions = ';'.join(definitions)
|
||||
else:
|
||||
definitions = ';'.join(building.Env['CPPDEFINES'])
|
||||
|
||||
definitions = definitions + ';%(PreprocessorDefinitions)'
|
||||
elem.text = definitions
|
||||
break
|
||||
# write link flags
|
||||
|
||||
# write lib dependence (Link)
|
||||
if 'LIBS' in building.Env:
|
||||
for elem in tree.iter(tag='AdditionalDependencies'):
|
||||
libs_with_extention = [i+'.lib' for i in building.Env['LIBS']]
|
||||
libs = ';'.join(libs_with_extention) + ';%(AdditionalDependencies)'
|
||||
elem.text = libs
|
||||
break
|
||||
|
||||
# write lib include path
|
||||
if 'LIBPATH' in building.Env:
|
||||
lib_path = building.Env['LIBPATH']
|
||||
paths = set()
|
||||
for path in lib_path:
|
||||
inc = _make_path_relative(project_path, os.path.normpath(path))
|
||||
paths.add(inc)
|
||||
|
||||
paths = [i for i in paths]
|
||||
paths.sort()
|
||||
lib_paths = ';'.join(paths) + ';%(AdditionalLibraryDirectories)'
|
||||
for elem in tree.iter(tag='AdditionalLibraryDirectories'):
|
||||
elem.text = lib_paths
|
||||
break
|
||||
|
||||
xml_indent(root)
|
||||
|
||||
if sys.version > '3':
|
||||
vcxproj_string = etree.tostring(root, encoding='unicode')
|
||||
else:
|
||||
# python3 is no decode function
|
||||
vcxproj_string = etree.tostring(root, encoding='utf-8')
|
||||
|
||||
root_node=r'<Project DefaultTargets="Build" ToolsVersion="4.0">'
|
||||
out.write(r'<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">')
|
||||
out.write(vcxproj_string[len(root_node):])
|
||||
out.close()
|
||||
|
||||
xml_indent(filter_project)
|
||||
|
||||
if sys.version > '3':
|
||||
filter_string = etree.tostring(filter_project, encoding='unicode')
|
||||
else:
|
||||
# python3 is no decode function
|
||||
filter_string = etree.tostring(filter_project, encoding='utf-8')
|
||||
|
||||
out = open('project.vcxproj.filters', 'w')
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\r\n')
|
||||
root_node=r'<Project ToolsVersion="4.0">'
|
||||
out.write(r'<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">')
|
||||
out.write(filter_string[len(root_node):])
|
||||
out.close()
|
||||
|
121
rt-thread/tools/vsc.py
Normal file
121
rt-thread/tools/vsc.py
Normal file
@@ -0,0 +1,121 @@
|
||||
#
|
||||
# File : vsc.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2018, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2018-05-30 Bernard The first version
|
||||
# 2023-03-03 Supperthomas Add the vscode workspace config file
|
||||
|
||||
"""
|
||||
Utils for VSCode
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import utils
|
||||
import rtconfig
|
||||
from utils import _make_path_relative
|
||||
|
||||
def delete_repeatelist(data):
|
||||
temp_dict = set([str(item) for item in data])
|
||||
data = [eval(i) for i in temp_dict]
|
||||
return data
|
||||
|
||||
def GenerateCFiles(env):
|
||||
"""
|
||||
Generate c_cpp_properties files
|
||||
"""
|
||||
if not os.path.exists('.vscode'):
|
||||
os.mkdir('.vscode')
|
||||
|
||||
vsc_file = open('.vscode/c_cpp_properties.json', 'w')
|
||||
if vsc_file:
|
||||
info = utils.ProjectInfo(env)
|
||||
|
||||
cc = os.path.join(rtconfig.EXEC_PATH, rtconfig.CC)
|
||||
cc = os.path.abspath(cc).replace('\\', '/')
|
||||
|
||||
config_obj = {}
|
||||
config_obj['name'] = 'rt-thread'
|
||||
config_obj['defines'] = info['CPPDEFINES']
|
||||
|
||||
intelliSenseMode = 'gcc-arm'
|
||||
if cc.find('aarch64') != -1:
|
||||
intelliSenseMode = 'gcc-arm64'
|
||||
elif cc.find('arm') != -1:
|
||||
intelliSenseMode = 'gcc-arm'
|
||||
config_obj['intelliSenseMode'] = intelliSenseMode
|
||||
config_obj['compilerPath'] = cc
|
||||
config_obj['cStandard'] = "c99"
|
||||
config_obj['cppStandard'] = "c++11"
|
||||
config_obj['compileCommands'] ="build/compile_commands.json"
|
||||
|
||||
# format "a/b," to a/b. remove first quotation mark("),and remove end (",)
|
||||
includePath = []
|
||||
for i in info['CPPPATH']:
|
||||
if i[0] == '\"' and i[len(i) - 2:len(i)] == '\",':
|
||||
includePath.append(_make_path_relative(os.getcwd(), i[1:len(i) - 2]))
|
||||
else:
|
||||
includePath.append(_make_path_relative(os.getcwd(), i))
|
||||
config_obj['includePath'] = includePath
|
||||
|
||||
json_obj = {}
|
||||
json_obj['configurations'] = [config_obj]
|
||||
|
||||
vsc_file.write(json.dumps(json_obj, ensure_ascii=False, indent=4))
|
||||
vsc_file.close()
|
||||
|
||||
"""
|
||||
Generate vscode.code-workspace files
|
||||
"""
|
||||
vsc_space_file = open('vscode.code-workspace', 'w')
|
||||
if vsc_space_file:
|
||||
info = utils.ProjectInfo(env)
|
||||
path_list = []
|
||||
for i in info['CPPPATH']:
|
||||
if _make_path_relative(os.getcwd(), i)[0] == '.':
|
||||
if i[0] == '\"' and i[len(i) - 2:len(i)] == '\",':
|
||||
path_list.append({'path':_make_path_relative(os.getcwd(), i[1:len(i) - 2])})
|
||||
else:
|
||||
path_list.append({'path':_make_path_relative(os.getcwd(), i)})
|
||||
for i in info['DIRS']:
|
||||
if _make_path_relative(os.getcwd(), i)[0] == '.':
|
||||
if i[0] == '\"' and i[len(i) - 2:len(i)] == '\",':
|
||||
path_list.append({'path':_make_path_relative(os.getcwd(), i[1:len(i) - 2])})
|
||||
else:
|
||||
path_list.append({'path':_make_path_relative(os.getcwd(), i)})
|
||||
|
||||
json_obj = {}
|
||||
path_list = delete_repeatelist(path_list)
|
||||
path_list = sorted(path_list, key=lambda x: x["path"])
|
||||
target_path_list = []
|
||||
for path in path_list:
|
||||
if path['path'] != '.':
|
||||
path['name'] = 'rtthread/' + '/'.join([p for p in path['path'].split('\\') if p != '..'])
|
||||
json_obj['folders'] = path_list
|
||||
vsc_space_file.write(json.dumps(json_obj, ensure_ascii=False, indent=4))
|
||||
vsc_space_file.close()
|
||||
return
|
||||
|
||||
def GenerateVSCode(env):
|
||||
print('Update setting files for VSCode...')
|
||||
GenerateCFiles(env)
|
||||
print('Done!')
|
||||
|
||||
return
|
190
rt-thread/tools/vscpyocd.py
Normal file
190
rt-thread/tools/vscpyocd.py
Normal file
@@ -0,0 +1,190 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
|
||||
def create_need_files(targetId, packpath):
|
||||
"""
|
||||
Generate pyocd.yaml files
|
||||
"""
|
||||
yaml_file = open('pyocd.yaml', 'w')
|
||||
if yaml_file:
|
||||
yaml_file.write('\npack:\n')
|
||||
yaml_file.write(' - ' + packpath + '\n')
|
||||
yaml_file.close()
|
||||
|
||||
"""
|
||||
Generate .vscode/launch.json files
|
||||
"""
|
||||
vsc_launch_file = open('.vscode/launch.json', 'w')
|
||||
if vsc_launch_file:
|
||||
config_obj = {}
|
||||
config_obj['name'] = 'Cortex Debug'
|
||||
config_obj['cwd'] = '${workspaceFolder}'
|
||||
config_obj['executable'] = 'rt-thread.elf'
|
||||
config_obj['request'] = 'launch'
|
||||
config_obj['type'] = 'cortex-debug'
|
||||
config_obj['runToEntryPoint'] = 'Reset_Handler'
|
||||
config_obj['servertype'] = 'pyocd'
|
||||
if os.getenv('RTT_EXEC_PATH'):
|
||||
config_obj['armToolchainPath'] = os.getenv('RTT_EXEC_PATH').replace('\\', '/')
|
||||
else:
|
||||
print('env <RTT_EXEC_PATH> not set!')
|
||||
config_obj['toolchainPrefix'] = 'arm-none-eabi'
|
||||
config_obj['targetId'] = targetId
|
||||
|
||||
json_obj = {}
|
||||
json_obj['version'] = '0.2.0'
|
||||
json_obj['configurations'] = [config_obj]
|
||||
vsc_launch_file.write(json.dumps(json_obj, ensure_ascii=False, indent=4))
|
||||
vsc_launch_file.close()
|
||||
|
||||
"""
|
||||
Generate .vscode/tasks.json files
|
||||
"""
|
||||
vsc_tasks_file = open('.vscode/tasks.json', 'w')
|
||||
if vsc_tasks_file:
|
||||
task_build_obj = {}
|
||||
task_build_obj['type'] = 'shell'
|
||||
task_build_obj['label'] = 'Build target files'
|
||||
task_build_obj['command'] = 'scons'
|
||||
task_build_obj['args'] = ['-j12']
|
||||
task_build_obj['problemMatcher'] = ['$gcc']
|
||||
task_build_obj['group'] = 'build'
|
||||
|
||||
task_download_obj = {}
|
||||
task_download_obj['type'] = 'shell'
|
||||
task_download_obj['label'] = 'Download code to flash memory'
|
||||
task_download_obj['command'] = 'python'
|
||||
task_download_obj['args'] = ['-m', 'pyocd', 'flash', '--erase', 'chip', '--target', \
|
||||
targetId, 'rt-thread.elf']
|
||||
task_download_obj['problemMatcher'] = ['$gcc']
|
||||
task_download_obj['group'] = 'build'
|
||||
|
||||
task_build_download_obj = task_download_obj.copy()
|
||||
task_build_download_obj['label'] = 'Build and Download'
|
||||
task_build_download_obj['dependsOn'] = 'Build target files'
|
||||
|
||||
json_obj = {}
|
||||
json_obj['version'] = '2.0.0'
|
||||
json_obj['tasks'] = [task_build_obj, task_download_obj, task_build_download_obj]
|
||||
vsc_tasks_file.write(json.dumps(json_obj, ensure_ascii=False, indent=4))
|
||||
vsc_tasks_file.close()
|
||||
|
||||
def similar_char_num(str1, str2):
|
||||
lstr1 = len(str1)
|
||||
lstr2 = len(str2)
|
||||
record = [[0 for i in range(lstr2+1)] for j in range(lstr1+1)]
|
||||
similar_num = 0
|
||||
|
||||
for i in range(lstr1):
|
||||
for j in range(lstr2):
|
||||
if str1[i] == str2[j]:
|
||||
record[i+1][j+1] = record[i][j] + 1
|
||||
if record[i+1][j+1] > similar_num:
|
||||
similar_num = record[i+1][j+1]
|
||||
return similar_num
|
||||
|
||||
def get_socName_from_rtconfig():
|
||||
socName = None
|
||||
rtconfig_file = open('rtconfig.h', 'r')
|
||||
if rtconfig_file:
|
||||
for line in rtconfig_file.readlines():
|
||||
if 'SOC' in line and 'FAMILY' not in line and 'SERIES' not in line:
|
||||
socName = line.strip().split('_')[-1]
|
||||
rtconfig_file.close()
|
||||
return socName
|
||||
|
||||
def get_pack_from_env():
|
||||
if os.environ.get('ENV_ROOT') == None:
|
||||
if sys.platform == 'win32':
|
||||
home_dir = os.environ['USERPROFILE']
|
||||
env_dir = os.path.join(home_dir, '.env')
|
||||
else:
|
||||
home_dir = os.environ['HOME']
|
||||
env_dir = os.path.join(home_dir, '.env')
|
||||
else:
|
||||
env_dir = os.environ.get('ENV_ROOT')
|
||||
|
||||
pack_dir = env_dir.replace('\\', '/') + '/tools/cmsisPacks/'
|
||||
|
||||
if not os.path.exists(pack_dir):
|
||||
print('<%s> does not exist, please create.' % pack_dir)
|
||||
return
|
||||
|
||||
# get soc name from <rtconfig.h> file
|
||||
socName = get_socName_from_rtconfig()
|
||||
if socName == None:
|
||||
return
|
||||
|
||||
# Find the pack that best matches soc name
|
||||
max_similar_num = 0
|
||||
max_similar_pack = None
|
||||
for file in os.listdir(pack_dir):
|
||||
if str(file).endswith('.pack'):
|
||||
similar_num = similar_char_num(socName, file)
|
||||
if(similar_num > max_similar_num):
|
||||
max_similar_num = similar_num
|
||||
max_similar_pack = file
|
||||
print('SOC<%s> match the pack is <%s>' % (socName, max_similar_pack))
|
||||
if max_similar_pack == None:
|
||||
return
|
||||
|
||||
return pack_dir + max_similar_pack
|
||||
|
||||
def get_trgetId_from_pack(pack):
|
||||
# get soc name from <rtconfig.h> file
|
||||
socName = get_socName_from_rtconfig()
|
||||
if socName == None:
|
||||
return
|
||||
|
||||
# View the soc supported by the most similar cmsisPack
|
||||
result = os.popen('python -m pyocd json --target --pack ' + pack)
|
||||
pyocd_json = json.loads(result.read())
|
||||
if pyocd_json['status'] != 0:
|
||||
return
|
||||
|
||||
# Find the targetId that best matches soc name
|
||||
max_similar_num = 0
|
||||
max_similar_targetId = None
|
||||
for target in pyocd_json['targets']:
|
||||
if (target['source'] == 'pack'):
|
||||
similar_num = similar_char_num(socName.lower(), target['name'])
|
||||
if(similar_num > max_similar_num):
|
||||
max_similar_num = similar_num
|
||||
max_similar_targetId = target['name']
|
||||
print('SOC<%s> match the targetId is <%s>' % (socName, max_similar_targetId))
|
||||
if max_similar_targetId == None:
|
||||
return
|
||||
if max_similar_num < len(socName):
|
||||
print('<%s> not match the <%s>' % (socName, pack))
|
||||
return
|
||||
|
||||
return max_similar_targetId
|
||||
|
||||
def GenerateVSCodePyocdConfig(pack):
|
||||
if pack == 'env':
|
||||
pack = get_pack_from_env()
|
||||
if pack == None:
|
||||
return
|
||||
else:
|
||||
# Check is exist
|
||||
if not os.path.exists(pack):
|
||||
return
|
||||
# Check is file
|
||||
if not os.path.isfile(pack):
|
||||
return
|
||||
# Check is pack
|
||||
if not str(pack).endswith('.pack'):
|
||||
return
|
||||
|
||||
pack = pack.replace('\\', '/')
|
||||
|
||||
targetId = get_trgetId_from_pack(pack)
|
||||
if targetId ==None:
|
||||
return
|
||||
|
||||
create_need_files(targetId, pack)
|
||||
print('Pyocd Config Done!')
|
||||
|
||||
return
|
||||
|
185
rt-thread/tools/win32spawn.py
Normal file
185
rt-thread/tools/win32spawn.py
Normal file
@@ -0,0 +1,185 @@
|
||||
#
|
||||
# File : win32spawn.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
import os
|
||||
import threading
|
||||
import sys
|
||||
|
||||
_PY2 = sys.version_info[0] < 3
|
||||
if _PY2:
|
||||
import Queue
|
||||
else:
|
||||
import queue as Queue
|
||||
|
||||
# Windows import
|
||||
import win32file
|
||||
import win32pipe
|
||||
import win32api
|
||||
import win32con
|
||||
import win32security
|
||||
import win32process
|
||||
import win32event
|
||||
|
||||
class Win32Spawn(object):
|
||||
def __init__(self, cmd, shell=False):
|
||||
self.queue = Queue.Queue()
|
||||
self.is_terminated = False
|
||||
self.wake_up_event = win32event.CreateEvent(None, 0, 0, None)
|
||||
|
||||
exec_dir = os.getcwd()
|
||||
comspec = os.environ.get("COMSPEC", "cmd.exe")
|
||||
cmd = comspec + ' /c ' + cmd
|
||||
|
||||
win32event.ResetEvent(self.wake_up_event)
|
||||
|
||||
currproc = win32api.GetCurrentProcess()
|
||||
|
||||
sa = win32security.SECURITY_ATTRIBUTES()
|
||||
sa.bInheritHandle = 1
|
||||
|
||||
child_stdout_rd, child_stdout_wr = win32pipe.CreatePipe(sa, 0)
|
||||
child_stdout_rd_dup = win32api.DuplicateHandle(currproc, child_stdout_rd, currproc, 0, 0, win32con.DUPLICATE_SAME_ACCESS)
|
||||
win32file.CloseHandle(child_stdout_rd)
|
||||
|
||||
child_stderr_rd, child_stderr_wr = win32pipe.CreatePipe(sa, 0)
|
||||
child_stderr_rd_dup = win32api.DuplicateHandle(currproc, child_stderr_rd, currproc, 0, 0, win32con.DUPLICATE_SAME_ACCESS)
|
||||
win32file.CloseHandle(child_stderr_rd)
|
||||
|
||||
child_stdin_rd, child_stdin_wr = win32pipe.CreatePipe(sa, 0)
|
||||
child_stdin_wr_dup = win32api.DuplicateHandle(currproc, child_stdin_wr, currproc, 0, 0, win32con.DUPLICATE_SAME_ACCESS)
|
||||
win32file.CloseHandle(child_stdin_wr)
|
||||
|
||||
startup_info = win32process.STARTUPINFO()
|
||||
startup_info.hStdInput = child_stdin_rd
|
||||
startup_info.hStdOutput = child_stdout_wr
|
||||
startup_info.hStdError = child_stderr_wr
|
||||
startup_info.dwFlags = win32process.STARTF_USESTDHANDLES
|
||||
|
||||
cr_flags = 0
|
||||
cr_flags = win32process.CREATE_NEW_PROCESS_GROUP
|
||||
|
||||
env = os.environ.copy()
|
||||
self.h_process, h_thread, dw_pid, dw_tid = win32process.CreateProcess(None, cmd, None, None, 1,
|
||||
cr_flags, env, os.path.abspath(exec_dir),
|
||||
startup_info)
|
||||
|
||||
win32api.CloseHandle(h_thread)
|
||||
|
||||
win32file.CloseHandle(child_stdin_rd)
|
||||
win32file.CloseHandle(child_stdout_wr)
|
||||
win32file.CloseHandle(child_stderr_wr)
|
||||
|
||||
self.__child_stdout = child_stdout_rd_dup
|
||||
self.__child_stderr = child_stderr_rd_dup
|
||||
self.__child_stdin = child_stdin_wr_dup
|
||||
|
||||
self.exit_code = -1
|
||||
|
||||
def close(self):
|
||||
win32file.CloseHandle(self.__child_stdout)
|
||||
win32file.CloseHandle(self.__child_stderr)
|
||||
win32file.CloseHandle(self.__child_stdin)
|
||||
win32api.CloseHandle(self.h_process)
|
||||
win32api.CloseHandle(self.wake_up_event)
|
||||
|
||||
def kill_subprocess():
|
||||
win32event.SetEvent(self.wake_up_event)
|
||||
|
||||
def sleep(secs):
|
||||
win32event.ResetEvent(self.wake_up_event)
|
||||
timeout = int(1000 * secs)
|
||||
val = win32event.WaitForSingleObject(self.wake_up_event, timeout)
|
||||
if val == win32event.WAIT_TIMEOUT:
|
||||
return True
|
||||
else:
|
||||
# The wake_up_event must have been signalled
|
||||
return False
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
return self.queue.get(block=block, timeout=timeout)
|
||||
|
||||
def qsize(self):
|
||||
return self.queue.qsize()
|
||||
|
||||
def __wait_for_child(self):
|
||||
# kick off threads to read from stdout and stderr of the child process
|
||||
threading.Thread(target=self.__do_read, args=(self.__child_stdout, )).start()
|
||||
threading.Thread(target=self.__do_read, args=(self.__child_stderr, )).start()
|
||||
|
||||
while True:
|
||||
# block waiting for the process to finish or the interrupt to happen
|
||||
handles = (self.wake_up_event, self.h_process)
|
||||
val = win32event.WaitForMultipleObjects(handles, 0, win32event.INFINITE)
|
||||
|
||||
if val >= win32event.WAIT_OBJECT_0 and val < win32event.WAIT_OBJECT_0 + len(handles):
|
||||
handle = handles[val - win32event.WAIT_OBJECT_0]
|
||||
if handle == self.wake_up_event:
|
||||
win32api.TerminateProcess(self.h_process, 1)
|
||||
win32event.ResetEvent(self.wake_up_event)
|
||||
return False
|
||||
elif handle == self.h_process:
|
||||
# the process has ended naturally
|
||||
return True
|
||||
else:
|
||||
assert False, "Unknown handle fired"
|
||||
else:
|
||||
assert False, "Unexpected return from WaitForMultipleObjects"
|
||||
|
||||
# Wait for job to finish. Since this method blocks, it can to be called from another thread.
|
||||
# If the application wants to kill the process, it should call kill_subprocess().
|
||||
def wait(self):
|
||||
if not self.__wait_for_child():
|
||||
# it's been killed
|
||||
result = False
|
||||
else:
|
||||
# normal termination
|
||||
self.exit_code = win32process.GetExitCodeProcess(self.h_process)
|
||||
result = self.exit_code == 0
|
||||
self.close()
|
||||
self.is_terminated = True
|
||||
|
||||
return result
|
||||
|
||||
# This method gets called on a worker thread to read from either a stderr
|
||||
# or stdout thread from the child process.
|
||||
def __do_read(self, handle):
|
||||
bytesToRead = 1024
|
||||
while 1:
|
||||
try:
|
||||
finished = 0
|
||||
hr, data = win32file.ReadFile(handle, bytesToRead, None)
|
||||
if data:
|
||||
self.queue.put_nowait(data)
|
||||
except win32api.error:
|
||||
finished = 1
|
||||
|
||||
if finished:
|
||||
return
|
||||
|
||||
def start_pipe(self):
|
||||
def worker(pipe):
|
||||
return pipe.wait()
|
||||
|
||||
thrd = threading.Thread(target=worker, args=(self, ))
|
||||
thrd.start()
|
95
rt-thread/tools/wizard.py
Normal file
95
rt-thread/tools/wizard.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#! /usr/bin/env python
|
||||
#coding=utf-8
|
||||
|
||||
#
|
||||
# File : wizard.py
|
||||
# This file is part of RT-Thread RTOS
|
||||
# COPYRIGHT (C) 2006 - 2015, RT-Thread Development Team
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Change Logs:
|
||||
# Date Author Notes
|
||||
# 2015-01-20 Bernard Add copyright information
|
||||
#
|
||||
|
||||
"""
|
||||
wizard.py - a script to generate SConscript in RT-Thread RTOS.
|
||||
|
||||
`wizard --component name' to generate SConscript for name component.
|
||||
`wizard --bridge' to generate SConscript as a bridge to connect each
|
||||
SConscript script file of sub-directory.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
SConscript_com = '''# RT-Thread building script for component
|
||||
|
||||
from building import *
|
||||
|
||||
cwd = GetCurrentDir()
|
||||
src = Glob('*.c') + Glob('*.cpp')
|
||||
CPPPATH = [cwd]
|
||||
|
||||
group = DefineGroup('COMPONENT_NAME', src, depend = [''], CPPPATH = CPPPATH)
|
||||
|
||||
Return('group')
|
||||
'''
|
||||
|
||||
SConscript_bridge = '''# RT-Thread building script for bridge
|
||||
|
||||
import os
|
||||
from building import *
|
||||
|
||||
cwd = GetCurrentDir()
|
||||
objs = []
|
||||
list = os.listdir(cwd)
|
||||
|
||||
for d in list:
|
||||
path = os.path.join(cwd, d)
|
||||
if os.path.isfile(os.path.join(path, 'SConscript')):
|
||||
objs = objs + SConscript(os.path.join(d, 'SConscript'))
|
||||
|
||||
Return('objs')
|
||||
'''
|
||||
|
||||
def usage():
|
||||
print('wizard --component name')
|
||||
print('wizard --bridge')
|
||||
|
||||
def gen_component(name):
|
||||
print('generate SConscript for ' + name)
|
||||
text = SConscript_com.replace('COMPONENT_NAME', name)
|
||||
f = open('SConscript', 'w')
|
||||
f.write(text)
|
||||
f.close()
|
||||
|
||||
def gen_bridge():
|
||||
print('generate SConscript for bridge')
|
||||
f = open('SConscript', 'w')
|
||||
f.write(SConscript_bridge)
|
||||
f.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) == 1:
|
||||
usage()
|
||||
sys.exit(2)
|
||||
|
||||
if sys.argv[1] == '--component':
|
||||
gen_component(sys.argv[2])
|
||||
elif sys.argv[1] == '--bridge':
|
||||
gen_bridge()
|
||||
else:
|
||||
usage()
|
46
rt-thread/tools/xmake.lua
Normal file
46
rt-thread/tools/xmake.lua
Normal file
@@ -0,0 +1,46 @@
|
||||
add_rules("mode.debug", "mode.release")
|
||||
|
||||
toolchain("${toolchain}")
|
||||
set_kind("standalone")
|
||||
set_sdkdir("${sdkdir}")
|
||||
toolchain_end()
|
||||
|
||||
target("${target}")
|
||||
set_kind("binary")
|
||||
set_toolchains("${toolchain}")
|
||||
|
||||
add_files(
|
||||
${src_path}
|
||||
)
|
||||
|
||||
add_includedirs(
|
||||
${inc_path}
|
||||
)
|
||||
|
||||
add_defines(
|
||||
${define}
|
||||
)
|
||||
|
||||
add_cflags(
|
||||
"${cflags}" ,{force = true}
|
||||
)
|
||||
add_cxxflags(
|
||||
"${cxxflags}" ,{force = true}
|
||||
)
|
||||
|
||||
add_asflags(
|
||||
"${asflags}" ,{force = true}
|
||||
)
|
||||
|
||||
add_ldflags(
|
||||
"${ldflags}" ,{force = true}
|
||||
)
|
||||
|
||||
set_targetdir("./")
|
||||
set_filename("rtthread.elf")
|
||||
|
||||
after_build(function(target)
|
||||
os.exec("${bindir}/${toolchain}-objcopy -O ihex rtthread.elf rtthread.hex")
|
||||
os.exec("${bindir}/${toolchain}-objcopy -O binary rtthread.elf rtthread.bin")
|
||||
os.exec("${bindir}/${toolchain}-size rtthread.elf")
|
||||
end)
|
93
rt-thread/tools/xmake.py
Normal file
93
rt-thread/tools/xmake.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""
|
||||
Utils for CMake
|
||||
Author: https://github.com/klivelinux
|
||||
"""
|
||||
|
||||
import os
|
||||
import utils
|
||||
from string import Template
|
||||
import rtconfig
|
||||
|
||||
from utils import _make_path_relative
|
||||
|
||||
|
||||
class XmakeProject:
|
||||
def __init__(self, env, project):
|
||||
self.env = env
|
||||
self.project = project
|
||||
self.sdkdir = ""
|
||||
self.bindir = ""
|
||||
self.toolchain = ""
|
||||
self.src_path = ""
|
||||
self.inc_path = ""
|
||||
self.cflags = ""
|
||||
self.cxxflags = ""
|
||||
self.ldflags = ""
|
||||
self.asflags = ""
|
||||
self.define = ""
|
||||
|
||||
def set_toolchain_path(self):
|
||||
self.bindir = os.path.abspath(rtconfig.EXEC_PATH).replace('\\', "/")
|
||||
self.sdkdir = self.bindir[:-4]
|
||||
# delete -
|
||||
self.toolchain = rtconfig.PREFIX[:-1]
|
||||
|
||||
def set_target_config(self):
|
||||
info = utils.ProjectInfo(self.env)
|
||||
# 1. config src path
|
||||
for group in self.project:
|
||||
for f in group['src']:
|
||||
# use relative path
|
||||
path = _make_path_relative(os.getcwd(), os.path.normpath(f.rfile().abspath))
|
||||
self.src_path += "\t\"{0}\",\n".format(path.replace("\\", "/"))
|
||||
self.src_path = self.src_path[:-2]
|
||||
# 2. config dir path
|
||||
for i in info['CPPPATH']:
|
||||
# use relative path
|
||||
path = _make_path_relative(os.getcwd(), i)
|
||||
self.inc_path += "\t\"{0}\",\n".format(path.replace("\\", "/"))
|
||||
self.inc_path = self.inc_path[:-2]
|
||||
# 3. config cflags
|
||||
self.cflags = rtconfig.CFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
# 4. config cxxflags
|
||||
if 'CXXFLAGS' in dir(rtconfig):
|
||||
self.cxxflags = rtconfig.CXXFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
else:
|
||||
self.cxxflags = self.cflags
|
||||
# 5. config asflags
|
||||
self.asflags = rtconfig.AFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
# 6. config lflags
|
||||
self.ldflags = rtconfig.LFLAGS.replace('\\', "/").replace('\"', "\\\"")
|
||||
# 7. config define
|
||||
for i in info['CPPDEFINES']:
|
||||
self.define += "\t\"{0}\",\n".format(i)
|
||||
self.define = self.define[:-2]
|
||||
|
||||
def generate_xmake_file(self):
|
||||
if os.getenv('RTT_ROOT'):
|
||||
RTT_ROOT = os.getenv('RTT_ROOT')
|
||||
else:
|
||||
RTT_ROOT = os.path.normpath(os.getcwd() + '/../../..')
|
||||
|
||||
template_path = os.path.join(RTT_ROOT, "tools", "xmake.lua")
|
||||
with open(template_path, "r") as f:
|
||||
data = f.read()
|
||||
data = Template(data)
|
||||
data = data.safe_substitute(toolchain=self.toolchain, sdkdir=self.sdkdir, bindir=self.bindir, src_path=self.src_path, inc_path=self.inc_path,
|
||||
define=self.define, cflags=self.cflags, cxxflags=self.cxxflags, asflags=self.asflags,
|
||||
ldflags=self.ldflags, target="rt-thread")
|
||||
with open("xmake.lua", "w") as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def XMakeProject(env,project):
|
||||
print('Update setting files for xmake.lua...')
|
||||
|
||||
xmake_project = XmakeProject(env, project)
|
||||
xmake_project.set_toolchain_path()
|
||||
xmake_project.set_target_config()
|
||||
xmake_project.generate_xmake_file()
|
||||
|
||||
print('Done!')
|
||||
|
||||
return
|
Reference in New Issue
Block a user