diff --git a/tools/.mbedignore b/tools/.mbedignore
new file mode 100644
index 0000000..f59ec20
--- /dev/null
+++ b/tools/.mbedignore
@@ -0,0 +1 @@
+*
\ No newline at end of file
diff --git a/tools/__init__.py b/tools/__init__.py
new file mode 100644
index 0000000..1fa8431
--- /dev/null
+++ b/tools/__init__.py
@@ -0,0 +1,16 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin b/tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin
new file mode 100644
index 0000000..e5c640d
--- /dev/null
+++ b/tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin
Binary files differ
diff --git a/tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin b/tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin
new file mode 100644
index 0000000..5a270bc
--- /dev/null
+++ b/tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin
Binary files differ
diff --git a/tools/build.py b/tools/build.py
new file mode 100644
index 0000000..8ae18c0
--- /dev/null
+++ b/tools/build.py
@@ -0,0 +1,269 @@
+#! /usr/bin/env python2
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+LIBRARIES BUILD
+"""
+import sys
+from time import time
+from os.path import join, abspath, dirname
+
+
+# Be sure that the tools directory is in the search path
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+
+from tools.toolchains import TOOLCHAINS
+from tools.targets import TARGET_NAMES, TARGET_MAP
+from tools.options import get_default_options_parser
+from tools.build_api import build_mbed_libs, build_lib
+from tools.build_api import mcu_toolchain_matrix
+from tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library
+from tools.build_api import print_build_results
+from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
+
+if __name__ == '__main__':
+ start = time()
+
+ # Parse Options
+ parser = get_default_options_parser()
+
+ # Extra libraries
+ parser.add_option("-r", "--rtos",
+ action="store_true",
+ dest="rtos",
+ default=False,
+ help="Compile the rtos")
+
+ parser.add_option("--rpc",
+ action="store_true",
+ dest="rpc",
+ default=False,
+ help="Compile the rpc library")
+
+ parser.add_option("-e", "--eth",
+ action="store_true", dest="eth",
+ default=False,
+ help="Compile the ethernet library")
+
+ parser.add_option("-U", "--usb_host",
+ action="store_true",
+ dest="usb_host",
+ default=False,
+ help="Compile the USB Host library")
+
+ parser.add_option("-u", "--usb",
+ action="store_true",
+ dest="usb",
+ default=False,
+ help="Compile the USB Device library")
+
+ parser.add_option("-d", "--dsp",
+ action="store_true",
+ dest="dsp",
+ default=False,
+ help="Compile the DSP library")
+
+ parser.add_option("-F", "--fat",
+ action="store_true",
+ dest="fat",
+ default=False,
+ help="Compile FS and SD card file system library")
+
+ parser.add_option("-b", "--ublox",
+ action="store_true",
+ dest="ublox",
+ default=False,
+ help="Compile the u-blox library")
+
+ parser.add_option("", "--cpputest",
+ action="store_true",
+ dest="cpputest_lib",
+ default=False,
+ help="Compiles 'cpputest' unit test library (library should be on the same directory level as mbed repository)")
+
+ parser.add_option("-D", "",
+ action="append",
+ dest="macros",
+ help="Add a macro definition")
+
+ parser.add_option("-S", "--supported-toolchains",
+ action="store_true",
+ dest="supported_toolchains",
+ default=False,
+ help="Displays supported matrix of MCUs and toolchains")
+
+ parser.add_option("", "--cppcheck",
+ action="store_true",
+ dest="cppcheck_validation",
+ default=False,
+ help="Forces 'cppcheck' static code analysis")
+
+ parser.add_option('-f', '--filter',
+ dest='general_filter_regex',
+ default=None,
+ help='For some commands you can use filter to filter out results')
+
+ parser.add_option("-j", "--jobs", type="int", dest="jobs",
+ default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
+
+ parser.add_option("-v", "--verbose",
+ action="store_true",
+ dest="verbose",
+ default=False,
+ help="Verbose diagnostic output")
+
+ parser.add_option("--silent",
+ action="store_true",
+ dest="silent",
+ default=False,
+ help="Silent diagnostic output (no copy, compile notification)")
+
+ parser.add_option("-x", "--extra-verbose-notifications",
+ action="store_true",
+ dest="extra_verbose_notify",
+ default=False,
+ help="Makes compiler more verbose, CI friendly.")
+
+ (options, args) = parser.parse_args()
+
+ # Only prints matrix of supported toolchains
+ if options.supported_toolchains:
+ print mcu_toolchain_matrix(platform_filter=options.general_filter_regex)
+ exit(0)
+
+ # Get target list
+ if options.mcu:
+ mcu_list = (options.mcu).split(",")
+ for mcu in mcu_list:
+ if mcu not in TARGET_NAMES:
+ print "Given MCU '%s' not into the supported list:\n%s" % (mcu, TARGET_NAMES)
+ sys.exit(1)
+ targets = mcu_list
+ else:
+ targets = TARGET_NAMES
+
+ # Get toolchains list
+ if options.tool:
+ toolchain_list = (options.tool).split(",")
+ for tc in toolchain_list:
+ if tc not in TOOLCHAINS:
+ print "Given toolchain '%s' not into the supported list:\n%s" % (tc, TOOLCHAINS)
+ sys.exit(1)
+ toolchains = toolchain_list
+ else:
+ toolchains = TOOLCHAINS
+
+ # Get libraries list
+ libraries = []
+
+ # Additional Libraries
+ if options.rtos:
+ libraries.extend(["rtx", "rtos"])
+ if options.rpc:
+ libraries.extend(["rpc"])
+ if options.eth:
+ libraries.append("eth")
+ if options.usb:
+ libraries.append("usb")
+ if options.usb_host:
+ libraries.append("usb_host")
+ if options.dsp:
+ libraries.extend(["cmsis_dsp", "dsp"])
+ if options.fat:
+ libraries.extend(["fat"])
+ if options.ublox:
+ libraries.extend(["rtx", "rtos", "usb_host", "ublox"])
+ if options.cpputest_lib:
+ libraries.extend(["cpputest"])
+
+ # Build results
+ failures = []
+ successes = []
+ skipped = []
+
+ # CPPCHECK code validation
+ if options.cppcheck_validation:
+ for toolchain in toolchains:
+ for target in targets:
+ try:
+ mcu = TARGET_MAP[target]
+ # CMSIS and MBED libs analysis
+ static_analysis_scan(mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, verbose=options.verbose, jobs=options.jobs)
+ for lib_id in libraries:
+ # Static check for library
+ static_analysis_scan_lib(lib_id, mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT,
+ options=options.options,
+ extra_verbose=options.extra_verbose_notify, verbose=options.verbose, jobs=options.jobs, clean=options.clean,
+ macros=options.macros)
+ pass
+ except Exception, e:
+ if options.verbose:
+ import traceback
+ traceback.print_exc(file=sys.stdout)
+ sys.exit(1)
+ print e
+ else:
+ # Build
+ for toolchain in toolchains:
+ for target in targets:
+ tt_id = "%s::%s" % (toolchain, target)
+ try:
+ mcu = TARGET_MAP[target]
+ lib_build_res = build_mbed_libs(mcu, toolchain,
+ options=options.options,
+ extra_verbose=options.extra_verbose_notify,
+ verbose=options.verbose,
+ silent=options.silent,
+ jobs=options.jobs,
+ clean=options.clean,
+ macros=options.macros)
+ for lib_id in libraries:
+ build_lib(lib_id, mcu, toolchain,
+ options=options.options,
+ extra_verbose=options.extra_verbose_notify,
+ verbose=options.verbose,
+ silent=options.silent,
+ clean=options.clean,
+ macros=options.macros,
+ jobs=options.jobs)
+ if lib_build_res:
+ successes.append(tt_id)
+ else:
+ skipped.append(tt_id)
+ except Exception, e:
+ if options.verbose:
+ import traceback
+ traceback.print_exc(file=sys.stdout)
+ sys.exit(1)
+ failures.append(tt_id)
+ print e
+
+ # Write summary of the builds
+ print
+ print "Completed in: (%.2f)s" % (time() - start)
+ print
+
+ for report, report_name in [(successes, "Build successes:"),
+ (skipped, "Build skipped:"),
+ (failures, "Build failures:"),
+ ]:
+ if report:
+ print print_build_results(report, report_name),
+
+ if failures:
+ sys.exit(1)
diff --git a/tools/build_api.py b/tools/build_api.py
new file mode 100644
index 0000000..6a19eb0
--- /dev/null
+++ b/tools/build_api.py
@@ -0,0 +1,736 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import tempfile
+import colorama
+
+
+from types import ListType
+from shutil import rmtree
+from os.path import join, exists, basename
+from time import time
+
+from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException
+from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
+from tools.targets import TARGET_NAMES, TARGET_MAP
+from tools.libraries import Library
+from tools.toolchains import TOOLCHAIN_CLASSES
+from jinja2 import FileSystemLoader
+from jinja2.environment import Environment
+
+
+def prep_report(report, target_name, toolchain_name, id_name):
+ # Setup report keys
+ if not target_name in report:
+ report[target_name] = {}
+
+ if not toolchain_name in report[target_name]:
+ report[target_name][toolchain_name] = {}
+
+ if not id_name in report[target_name][toolchain_name]:
+ report[target_name][toolchain_name][id_name] = []
+
+def prep_properties(properties, target_name, toolchain_name, vendor_label):
+ # Setup test properties
+ if not target_name in properties:
+ properties[target_name] = {}
+
+ if not toolchain_name in properties[target_name]:
+ properties[target_name][toolchain_name] = {}
+
+ properties[target_name][toolchain_name]["target"] = target_name
+ properties[target_name][toolchain_name]["vendor"] = vendor_label
+ properties[target_name][toolchain_name]["toolchain"] = toolchain_name
+
+def create_result(target_name, toolchain_name, id_name, description):
+ cur_result = {}
+ cur_result["target_name"] = target_name
+ cur_result["toolchain_name"] = toolchain_name
+ cur_result["id"] = id_name
+ cur_result["description"] = description
+ cur_result["elapsed_time"] = 0
+ cur_result["output"] = ""
+
+ return cur_result
+
+def add_result_to_report(report, result):
+ target = result["target_name"]
+ toolchain = result["toolchain_name"]
+ id_name = result['id']
+ result_wrap = { 0: result }
+ report[target][toolchain][id_name].append(result_wrap)
+
+def build_project(src_path, build_path, target, toolchain_name,
+ libraries_paths=None, options=None, linker_script=None,
+ clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
+ jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False):
+ """ This function builds project. Project can be for example one test / UT
+ """
+ # Toolchain instance
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
+ toolchain.VERBOSE = verbose
+ toolchain.jobs = jobs
+ toolchain.build_all = clean
+ src_paths = [src_path] if type(src_path) != ListType else src_path
+
+ # We need to remove all paths which are repeated to avoid
+ # multiple compilations and linking with the same objects
+ src_paths = [src_paths[0]] + list(set(src_paths[1:]))
+ PROJECT_BASENAME = basename(src_paths[0])
+
+ if name is None:
+ # We will use default project name based on project folder name
+ name = PROJECT_BASENAME
+ toolchain.info("Building project %s (%s, %s)" % (PROJECT_BASENAME.upper(), target.name, toolchain_name))
+ else:
+ # User used custom global project name to have the same name for the
+ toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name))
+
+
+ if report != None:
+ start = time()
+ id_name = project_id.upper()
+ description = project_description
+ vendor_label = target.extra_labels[0]
+ cur_result = None
+ prep_report(report, target.name, toolchain_name, id_name)
+ cur_result = create_result(target.name, toolchain_name, id_name, description)
+
+ if properties != None:
+ prep_properties(properties, target.name, toolchain_name, vendor_label)
+
+ try:
+ # Scan src_path and libraries_paths for resources
+ resources = toolchain.scan_resources(src_paths[0])
+ for path in src_paths[1:]:
+ resources.add(toolchain.scan_resources(path))
+ if libraries_paths is not None:
+ src_paths.extend(libraries_paths)
+ for path in libraries_paths:
+ resources.add(toolchain.scan_resources(path))
+
+ if linker_script is not None:
+ resources.linker_script = linker_script
+
+ # Build Directory
+ if clean:
+ if exists(build_path):
+ rmtree(build_path)
+ mkdir(build_path)
+
+ # We need to add if necessary additional include directories
+ if inc_dirs:
+ if type(inc_dirs) == ListType:
+ resources.inc_dirs.extend(inc_dirs)
+ else:
+ resources.inc_dirs.append(inc_dirs)
+ # Compile Sources
+ for path in src_paths:
+ src = toolchain.scan_resources(path)
+ objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
+ resources.objects.extend(objects)
+
+
+ # Link Program
+ res, needed_update = toolchain.link_program(resources, build_path, name)
+
+ if report != None and needed_update:
+ end = time()
+ cur_result["elapsed_time"] = end - start
+ cur_result["output"] = toolchain.get_output()
+ cur_result["result"] = "OK"
+
+ add_result_to_report(report, cur_result)
+
+ return res
+
+ except Exception, e:
+ if report != None:
+ end = time()
+
+ if isinstance(e, NotSupportedException):
+ cur_result["result"] = "NOT_SUPPORTED"
+ else:
+ cur_result["result"] = "FAIL"
+
+ cur_result["elapsed_time"] = end - start
+
+ toolchain_output = toolchain.get_output()
+ if toolchain_output:
+ cur_result["output"] += toolchain_output
+
+ cur_result["output"] += str(e)
+
+ add_result_to_report(report, cur_result)
+
+ # Let Exception propagate
+ raise e
+
+
+def build_library(src_paths, build_path, target, toolchain_name,
+ dependencies_paths=None, options=None, name=None, clean=False,
+ notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None,
+ jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
+ """ src_path: the path of the source directory
+ build_path: the path of the build directory
+ target: ['LPC1768', 'LPC11U24', 'LPC2368']
+ toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
+ library_paths: List of paths to additional libraries
+ clean: Rebuild everything if True
+ notify: Notify function for logs
+ verbose: Write the actual tools command lines if True
+ inc_dirs: additional include directories which should be included in build
+ inc_dirs_ext: additional include directories which should be copied to library directory
+ """
+ if type(src_paths) != ListType:
+ src_paths = [src_paths]
+
+ # The first path will give the name to the library
+ name = basename(src_paths[0])
+
+ if report != None:
+ start = time()
+ id_name = name.upper()
+ description = name
+ vendor_label = target.extra_labels[0]
+ cur_result = None
+ prep_report(report, target.name, toolchain_name, id_name)
+ cur_result = create_result(target.name, toolchain_name, id_name, description)
+
+ if properties != None:
+ prep_properties(properties, target.name, toolchain_name, vendor_label)
+
+ for src_path in src_paths:
+ if not exists(src_path):
+ error_msg = "The library source folder does not exist: %s", src_path
+
+ if report != None:
+ cur_result["output"] = error_msg
+ cur_result["result"] = "FAIL"
+ add_result_to_report(report, cur_result)
+
+ raise Exception(error_msg)
+
+ try:
+ # Toolchain instance
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
+ toolchain.VERBOSE = verbose
+ toolchain.jobs = jobs
+ toolchain.build_all = clean
+
+ toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+
+ # Scan Resources
+ resources = []
+ for src_path in src_paths:
+ resources.append(toolchain.scan_resources(src_path))
+
+ # Add extra include directories / files which are required by library
+ # This files usually are not in the same directory as source files so
+ # previous scan will not include them
+ if inc_dirs_ext is not None:
+ for inc_ext in inc_dirs_ext:
+ resources.append(toolchain.scan_resources(inc_ext))
+
+ # Dependencies Include Paths
+ dependencies_include_dir = []
+ if dependencies_paths is not None:
+ for path in dependencies_paths:
+ lib_resources = toolchain.scan_resources(path)
+ dependencies_include_dir.extend(lib_resources.inc_dirs)
+
+ if inc_dirs:
+ dependencies_include_dir.extend(inc_dirs)
+
+ # Create the desired build directory structure
+ bin_path = join(build_path, toolchain.obj_path)
+ mkdir(bin_path)
+ tmp_path = join(build_path, '.temp', toolchain.obj_path)
+ mkdir(tmp_path)
+
+ # Copy Headers
+ for resource in resources:
+ toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
+ dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+
+ # Compile Sources
+ objects = []
+ for resource in resources:
+ objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
+
+ needed_update = toolchain.build_library(objects, bin_path, name)
+
+ if report != None and needed_update:
+ end = time()
+ cur_result["elapsed_time"] = end - start
+ cur_result["output"] = toolchain.get_output()
+ cur_result["result"] = "OK"
+
+ add_result_to_report(report, cur_result)
+
+ except Exception, e:
+ if report != None:
+ end = time()
+ cur_result["result"] = "FAIL"
+ cur_result["elapsed_time"] = end - start
+
+ toolchain_output = toolchain.get_output()
+ if toolchain_output:
+ cur_result["output"] += toolchain_output
+
+ cur_result["output"] += str(e)
+
+ add_result_to_report(report, cur_result)
+
+ # Let Exception propagate
+ raise e
+
+def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
+ """ Wrapper for build_library function.
+ Function builds library in proper directory using all dependencies and macros defined by user.
+ """
+ lib = Library(lib_id)
+ if lib.is_supported(target, toolchain):
+ # We need to combine macros from parameter list with macros from library definition
+ MACROS = lib.macros if lib.macros else []
+ if macros:
+ MACROS.extend(macros)
+
+ return build_library(lib.source_dir, lib.build_dir, target, toolchain, lib.dependencies, options,
+ verbose=verbose,
+ silent=silent,
+ clean=clean,
+ macros=MACROS,
+ notify=notify,
+ inc_dirs=lib.inc_dirs,
+ inc_dirs_ext=lib.inc_dirs_ext,
+ jobs=jobs,
+ report=report,
+ properties=properties,
+ extra_verbose=extra_verbose)
+ else:
+ print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
+ return False
+
+
+# We do have unique legacy conventions about how we build and package the mbed library
+def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
+ """ Function returns True is library was built and false if building was skipped """
+
+ if report != None:
+ start = time()
+ id_name = "MBED"
+ description = "mbed SDK"
+ vendor_label = target.extra_labels[0]
+ cur_result = None
+ prep_report(report, target.name, toolchain_name, id_name)
+ cur_result = create_result(target.name, toolchain_name, id_name, description)
+
+ if properties != None:
+ prep_properties(properties, target.name, toolchain_name, vendor_label)
+
+ # Check toolchain support
+ if toolchain_name not in target.supported_toolchains:
+ supported_toolchains_text = ", ".join(target.supported_toolchains)
+ print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
+ print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')
+
+ if report != None:
+ cur_result["result"] = "SKIP"
+ add_result_to_report(report, cur_result)
+
+ return False
+
+ try:
+ # Toolchain
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
+ toolchain.VERBOSE = verbose
+ toolchain.jobs = jobs
+ toolchain.build_all = clean
+
+ # Source and Build Paths
+ BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
+ BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
+ mkdir(BUILD_TOOLCHAIN)
+
+ TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
+ mkdir(TMP_PATH)
+
+ # CMSIS
+ toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
+ cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
+ resources = toolchain.scan_resources(cmsis_src)
+
+ toolchain.copy_files(resources.headers, BUILD_TARGET)
+ toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
+ toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
+
+ objects = toolchain.compile_sources(resources, TMP_PATH)
+ toolchain.copy_files(objects, BUILD_TOOLCHAIN)
+
+ # mbed
+ toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
+
+ # Common Headers
+ toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
+ toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
+
+ # Target specific sources
+ HAL_SRC = join(MBED_TARGETS_PATH, "hal")
+ hal_implementation = toolchain.scan_resources(HAL_SRC)
+ toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
+ incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
+ objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
+
+ # Common Sources
+ mbed_resources = toolchain.scan_resources(MBED_COMMON)
+ objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
+
+ # A number of compiled files need to be copied as objects as opposed to
+ # being part of the mbed library, for reasons that have to do with the way
+ # the linker search for symbols in archives. These are:
+ # - retarget.o: to make sure that the C standard lib symbols get overridden
+ # - board.o: mbed_die is weak
+ # - mbed_overrides.o: this contains platform overrides of various weak SDK functions
+ separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
+
+ for o in objects:
+ for name in separate_names:
+ if o.endswith(name):
+ separate_objects.append(o)
+
+ for o in separate_objects:
+ objects.remove(o)
+
+ needed_update = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
+
+ for o in separate_objects:
+ toolchain.copy_files(o, BUILD_TOOLCHAIN)
+
+ if report != None and needed_update:
+ end = time()
+ cur_result["elapsed_time"] = end - start
+ cur_result["output"] = toolchain.get_output()
+ cur_result["result"] = "OK"
+
+ add_result_to_report(report, cur_result)
+
+ return True
+
+ except Exception, e:
+ if report != None:
+ end = time()
+ cur_result["result"] = "FAIL"
+ cur_result["elapsed_time"] = end - start
+
+ toolchain_output = toolchain.get_output()
+ if toolchain_output:
+ cur_result["output"] += toolchain_output
+
+ cur_result["output"] += str(e)
+
+ add_result_to_report(report, cur_result)
+
+ # Let Exception propagate
+ raise e
+
+def get_unique_supported_toolchains():
+ """ Get list of all unique toolchains supported by targets """
+ unique_supported_toolchains = []
+ for target in TARGET_NAMES:
+ for toolchain in TARGET_MAP[target].supported_toolchains:
+ if toolchain not in unique_supported_toolchains:
+ unique_supported_toolchains.append(toolchain)
+ return unique_supported_toolchains
+
+
+def mcu_toolchain_matrix(verbose_html=False, platform_filter=None):
+ """ Shows target map using prettytable """
+ unique_supported_toolchains = get_unique_supported_toolchains()
+ from prettytable import PrettyTable # Only use it in this function so building works without extra modules
+
+ # All tests status table print
+ columns = ["Platform"] + unique_supported_toolchains
+ pt = PrettyTable(["Platform"] + unique_supported_toolchains)
+ # Align table
+ for col in columns:
+ pt.align[col] = "c"
+ pt.align["Platform"] = "l"
+
+ perm_counter = 0
+ target_counter = 0
+ for target in sorted(TARGET_NAMES):
+ if platform_filter is not None:
+ # FIlter out platforms using regex
+ if re.search(platform_filter, target) is None:
+ continue
+ target_counter += 1
+
+ row = [target] # First column is platform name
+ default_toolchain = TARGET_MAP[target].default_toolchain
+ for unique_toolchain in unique_supported_toolchains:
+ text = "-"
+ if default_toolchain == unique_toolchain:
+ text = "Default"
+ perm_counter += 1
+ elif unique_toolchain in TARGET_MAP[target].supported_toolchains:
+ text = "Supported"
+ perm_counter += 1
+ row.append(text)
+ pt.add_row(row)
+
+ result = pt.get_html_string() if verbose_html else pt.get_string()
+ result += "\n"
+ result += "*Default - default on-line compiler\n"
+ result += "*Supported - supported off-line compiler\n"
+ result += "\n"
+ result += "Total platforms: %d\n"% (target_counter)
+ result += "Total permutations: %d"% (perm_counter)
+ return result
+
+
+def get_target_supported_toolchains(target):
+ """ Returns target supported toolchains list """
+ return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP else None
+
+
+def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
+ # Toolchain
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
+ toolchain.VERBOSE = verbose
+ toolchain.jobs = jobs
+ toolchain.build_all = clean
+
+ # Source and Build Paths
+ BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
+ BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
+ mkdir(BUILD_TOOLCHAIN)
+
+ TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
+ mkdir(TMP_PATH)
+
+ # CMSIS
+ toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name))
+ cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
+ resources = toolchain.scan_resources(cmsis_src)
+
+ # Copy files before analysis
+ toolchain.copy_files(resources.headers, BUILD_TARGET)
+ toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
+
+ # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+ includes = ["-I%s"% i for i in resources.inc_dirs]
+ includes.append("-I%s"% str(BUILD_TARGET))
+ c_sources = " ".join(resources.c_sources)
+ cpp_sources = " ".join(resources.cpp_sources)
+ macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
+
+ includes = map(str.strip, includes)
+ macros = map(str.strip, macros)
+
+ check_cmd = CPPCHECK_CMD
+ check_cmd += CPPCHECK_MSG_FORMAT
+ check_cmd += includes
+ check_cmd += macros
+
+ # We need to pass some params via file to avoid "command line too long in some OSs"
+ tmp_file = tempfile.NamedTemporaryFile(delete=False)
+ tmp_file.writelines(line + '\n' for line in c_sources.split())
+ tmp_file.writelines(line + '\n' for line in cpp_sources.split())
+ tmp_file.close()
+ check_cmd += ["--file-list=%s"% tmp_file.name]
+
+ _stdout, _stderr, _rc = run_cmd(check_cmd)
+ if verbose:
+ print _stdout
+ print _stderr
+
+ # =========================================================================
+
+ # MBED
+ toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name))
+
+ # Common Headers
+ toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
+ toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
+
+ # Target specific sources
+ HAL_SRC = join(MBED_TARGETS_PATH, "hal")
+ hal_implementation = toolchain.scan_resources(HAL_SRC)
+
+ # Copy files before analysis
+ toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, HAL_SRC)
+ incdirs = toolchain.scan_resources(BUILD_TARGET)
+
+ target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
+ target_includes.append("-I%s"% str(BUILD_TARGET))
+ target_includes.append("-I%s"% str(HAL_SRC))
+ target_c_sources = " ".join(incdirs.c_sources)
+ target_cpp_sources = " ".join(incdirs.cpp_sources)
+ target_macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
+
+ # Common Sources
+ mbed_resources = toolchain.scan_resources(MBED_COMMON)
+
+ # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+ mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
+ mbed_includes.append("-I%s"% str(BUILD_TARGET))
+ mbed_includes.append("-I%s"% str(MBED_COMMON))
+ mbed_includes.append("-I%s"% str(MBED_API))
+ mbed_includes.append("-I%s"% str(MBED_HAL))
+ mbed_c_sources = " ".join(mbed_resources.c_sources)
+ mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
+
+ target_includes = map(str.strip, target_includes)
+ mbed_includes = map(str.strip, mbed_includes)
+ target_macros = map(str.strip, target_macros)
+
+ check_cmd = CPPCHECK_CMD
+ check_cmd += CPPCHECK_MSG_FORMAT
+ check_cmd += target_includes
+ check_cmd += mbed_includes
+ check_cmd += target_macros
+
+ # We need to pass some parames via file to avoid "command line too long in some OSs"
+ tmp_file = tempfile.NamedTemporaryFile(delete=False)
+ tmp_file.writelines(line + '\n' for line in target_c_sources.split())
+ tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
+ tmp_file.writelines(line + '\n' for line in mbed_c_sources.split())
+ tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split())
+ tmp_file.close()
+ check_cmd += ["--file-list=%s"% tmp_file.name]
+
+ _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
+ if verbose:
+ print _stdout
+ print _stderr
+
+
+def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
+ options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
+ lib = Library(lib_id)
+ if lib.is_supported(target, toolchain):
+ static_analysis_scan_library(lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
+ lib.dependencies, options,
+ verbose=verbose, clean=clean, macros=macros, notify=notify, jobs=jobs, extra_verbose=extra_verbose)
+ else:
+ print 'Library "%s" is not yet supported on target %s with toolchain %s'% (lib_id, target.name, toolchain)
+
+
+def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format,
+ dependencies_paths=None, options=None, name=None, clean=False,
+ notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False):
+ """ Function scans library (or just some set of sources/headers) for staticly detectable defects """
+ if type(src_paths) != ListType:
+ src_paths = [src_paths]
+
+ for src_path in src_paths:
+ if not exists(src_path):
+ raise Exception("The library source folder does not exist: %s", src_path)
+
+ # Toolchain instance
+ toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
+ toolchain.VERBOSE = verbose
+ toolchain.jobs = jobs
+
+ # The first path will give the name to the library
+ name = basename(src_paths[0])
+ toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
+
+ # Scan Resources
+ resources = []
+ for src_path in src_paths:
+ resources.append(toolchain.scan_resources(src_path))
+
+ # Dependencies Include Paths
+ dependencies_include_dir = []
+ if dependencies_paths is not None:
+ for path in dependencies_paths:
+ lib_resources = toolchain.scan_resources(path)
+ dependencies_include_dir.extend(lib_resources.inc_dirs)
+
+ # Create the desired build directory structure
+ bin_path = join(build_path, toolchain.obj_path)
+ mkdir(bin_path)
+ tmp_path = join(build_path, '.temp', toolchain.obj_path)
+ mkdir(tmp_path)
+
+ # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
+ includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
+ c_sources = " "
+ cpp_sources = " "
+ macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros]
+
+ # Copy Headers
+ for resource in resources:
+ toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
+ includes += ["-I%s" % i for i in resource.inc_dirs]
+ c_sources += " ".join(resource.c_sources) + " "
+ cpp_sources += " ".join(resource.cpp_sources) + " "
+
+ dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
+
+ includes = map(str.strip, includes)
+ macros = map(str.strip, macros)
+
+ check_cmd = cppcheck_cmd
+ check_cmd += cppcheck_msg_format
+ check_cmd += includes
+ check_cmd += macros
+
+ # We need to pass some parameters via file to avoid "command line too long in some OSs"
+ # Temporary file is created to store e.g. cppcheck list of files for command line
+ tmp_file = tempfile.NamedTemporaryFile(delete=False)
+ tmp_file.writelines(line + '\n' for line in c_sources.split())
+ tmp_file.writelines(line + '\n' for line in cpp_sources.split())
+ tmp_file.close()
+ check_cmd += ["--file-list=%s"% tmp_file.name]
+
+ # This will allow us to grab result from both stdio and stderr outputs (so we can show them)
+ # We assume static code analysis tool is outputting defects on STDERR
+ _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
+ if verbose:
+ print _stdout
+ print _stderr
+
+
+def print_build_results(result_list, build_name):
+ """ Generate result string for build results """
+ result = ""
+ if len(result_list) > 0:
+ result += build_name + "\n"
+ result += "\n".join([" * %s" % f for f in result_list])
+ result += "\n"
+ return result
+
+def write_build_report(build_report, template_filename, filename):
+ build_report_failing = []
+ build_report_passing = []
+
+ for report in build_report:
+ if len(report["failing"]) > 0:
+ build_report_failing.append(report)
+ else:
+ build_report_passing.append(report)
+
+ env = Environment(extensions=['jinja2.ext.with_'])
+ env.loader = FileSystemLoader('ci_templates')
+ template = env.get_template(template_filename)
+
+ with open(filename, 'w+') as f:
+ f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
diff --git a/tools/build_release.py b/tools/build_release.py
new file mode 100644
index 0000000..d9d3683
--- /dev/null
+++ b/tools/build_release.py
@@ -0,0 +1,295 @@
+#! /usr/bin/env python
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import sys
+from time import time
+from os.path import join, abspath, dirname, normpath
+from optparse import OptionParser
+import json
+
+# Be sure that the tools directory is in the search path
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+from tools.build_api import build_mbed_libs
+from tools.build_api import write_build_report
+from tools.targets import TARGET_MAP, TARGET_NAMES
+from tools.test_exporters import ReportExporter, ResultExporterType
+from tools.test_api import SingleTestRunner
+from tools.test_api import singletest_in_cli_mode
+from tools.paths import TEST_DIR
+from tools.tests import TEST_MAP
+
+OFFICIAL_MBED_LIBRARY_BUILD = (
+ ('LPC11U24', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
+ ('LPC1768', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('UBLOX_C027', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('ARCH_PRO', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('LPC2368', ('ARM', 'GCC_ARM')),
+ ('LPC2460', ('GCC_ARM',)),
+ ('LPC812', ('uARM','IAR')),
+ ('LPC824', ('uARM', 'GCC_ARM', 'IAR', 'GCC_CR')),
+ ('SSCI824', ('uARM','GCC_ARM')),
+ ('LPC1347', ('ARM','IAR')),
+ ('LPC4088', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('LPC4088_DM', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('LPC1114', ('uARM','GCC_ARM', 'GCC_CR', 'IAR')),
+ ('LPC11U35_401', ('ARM', 'uARM','GCC_ARM','GCC_CR', 'IAR')),
+ ('LPC11U35_501', ('ARM', 'uARM','GCC_ARM','GCC_CR', 'IAR')),
+ ('LPC1549', ('uARM','GCC_ARM','GCC_CR', 'IAR')),
+ ('XADOW_M0', ('ARM', 'uARM','GCC_ARM','GCC_CR')),
+ ('ARCH_GPRS', ('ARM', 'uARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('LPC4337', ('ARM',)),
+ ('LPC11U37H_401', ('ARM', 'uARM','GCC_ARM','GCC_CR')),
+ ('MICRONFCBOARD', ('ARM', 'uARM','GCC_ARM')),
+
+ ('KL05Z', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
+ ('KL25Z', ('ARM', 'GCC_ARM', 'IAR')),
+ ('KL27Z', ('ARM', 'GCC_ARM', 'IAR')),
+ ('KL43Z', ('ARM', 'GCC_ARM')),
+ ('KL46Z', ('ARM', 'GCC_ARM', 'IAR')),
+ ('K64F', ('ARM', 'GCC_ARM', 'IAR')),
+ ('K22F', ('ARM', 'GCC_ARM', 'IAR')),
+ ('K20D50M', ('ARM', 'GCC_ARM' , 'IAR')),
+ ('TEENSY3_1', ('ARM', 'GCC_ARM')),
+
+ ('B96B_F446VE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F030R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F031K6', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F042K6', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F070RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F072RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F091RC', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F103RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F302R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F303K8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F303RE', ('ARM', 'uARM', 'IAR')),
+ ('NUCLEO_F334R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F401RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F410RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F411RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F446RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('ELMO_F411RE', ('ARM', 'uARM', 'GCC_ARM')),
+ ('NUCLEO_L053R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_L152RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('MTS_MDOT_F405RG', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('MTS_MDOT_F411RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('MTS_DRAGONFLY_F411RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('DISCO_L053C8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('DISCO_F334C8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('DISCO_F429ZI', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('DISCO_F469NI', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('DISCO_F746NG', ('ARM', 'uARM', 'GCC_ARM','IAR')),
+ ('DISCO_L476VG', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_L476RG', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+ ('NUCLEO_F746ZG', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
+ ('NUCLEO_L031K6', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
+ ('NUCLEO_L073RZ', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
+
+ ('MOTE_L152RC', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
+
+ ('ARCH_MAX', ('ARM', 'GCC_ARM')),
+
+ ('NRF51822', ('ARM', 'GCC_ARM', 'IAR')),
+ ('NRF51_DK', ('ARM', 'GCC_ARM', 'IAR')),
+ ('NRF51_DONGLE', ('ARM', 'GCC_ARM', 'IAR')),
+ ('HRM1017', ('ARM', 'GCC_ARM', 'IAR')),
+ ('ARCH_BLE', ('ARM', 'GCC_ARM', 'IAR')),
+ ('SEEED_TINY_BLE', ('ARM', 'GCC_ARM', 'IAR')),
+ ('RBLAB_NRF51822', ('ARM', 'GCC_ARM')),
+ ('RBLAB_BLENANO', ('ARM', 'GCC_ARM')),
+ ('WALLBOT_BLE', ('ARM', 'GCC_ARM')),
+ ('DELTA_DFCM_NNN40', ('ARM', 'GCC_ARM')),
+ ('NRF51_MICROBIT', ('ARM','GCC_ARM')),
+ ('NRF51_MICROBIT_B', ('ARM',)),
+ ('TY51822R3', ('ARM', 'GCC_ARM')),
+
+ ('LPC11U68', ('ARM', 'uARM','GCC_ARM','GCC_CR', 'IAR')),
+ ('OC_MBUINO', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
+
+ ('ARM_MPS2_M0' , ('ARM',)),
+ ('ARM_MPS2_M0P' , ('ARM',)),
+ ('ARM_MPS2_M3' , ('ARM',)),
+ ('ARM_MPS2_M4' , ('ARM',)),
+ ('ARM_MPS2_M7' , ('ARM',)),
+ ('ARM_IOTSS_BEID' , ('ARM',)),
+
+ ('RZ_A1H' , ('ARM', 'GCC_ARM')),
+
+ ('EFM32ZG_STK3200', ('GCC_ARM', 'uARM')),
+ ('EFM32HG_STK3400', ('GCC_ARM', 'uARM')),
+ ('EFM32LG_STK3600', ('ARM', 'GCC_ARM', 'uARM')),
+ ('EFM32GG_STK3700', ('ARM', 'GCC_ARM', 'uARM')),
+ ('EFM32WG_STK3800', ('ARM', 'GCC_ARM', 'uARM')),
+ ('EFM32PG_STK3401', ('ARM', 'GCC_ARM', 'uARM')),
+
+ ('MAXWSNENV', ('ARM', 'GCC_ARM', 'IAR')),
+ ('MAX32600MBED', ('ARM', 'GCC_ARM', 'IAR')),
+
+ ('WIZWIKI_W7500', ('ARM', 'uARM')),
+ ('WIZWIKI_W7500P',('ARM', 'uARM')),
+ ('WIZWIKI_W7500ECO',('ARM', 'uARM')),
+
+ ('SAMR21G18A',('ARM', 'uARM', 'GCC_ARM')),
+ ('SAMD21J18A',('ARM', 'uARM', 'GCC_ARM')),
+ ('SAMD21G18A',('ARM', 'uARM', 'GCC_ARM')),
+
+)
+
+
+if __name__ == '__main__':
+ parser = OptionParser()
+ parser.add_option('-o', '--official', dest="official_only", default=False, action="store_true",
+ help="Build using only the official toolchain for each target")
+ parser.add_option("-j", "--jobs", type="int", dest="jobs",
+ default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
+ parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
+ default=False, help="Verbose diagnostic output")
+ parser.add_option("-t", "--toolchains", dest="toolchains", help="Use toolchains names separated by comma")
+
+ parser.add_option("-p", "--platforms", dest="platforms", default="", help="Build only for the platform namesseparated by comma")
+
+ parser.add_option("-L", "--list-config", action="store_true", dest="list_config",
+ default=False, help="List the platforms and toolchains in the release in JSON")
+
+ parser.add_option("", "--report-build", dest="report_build_file_name", help="Output the build results to an junit xml file")
+
+ parser.add_option("", "--build-tests", dest="build_tests", help="Build all tests in the given directories (relative to /libraries/tests)")
+
+
+ options, args = parser.parse_args()
+
+
+
+ if options.list_config:
+ print json.dumps(OFFICIAL_MBED_LIBRARY_BUILD, indent=4)
+ sys.exit()
+
+ start = time()
+ build_report = {}
+ build_properties = {}
+
+ platforms = None
+ if options.platforms != "":
+ platforms = set(options.platforms.split(","))
+
+ if options.build_tests:
+ # Get all paths
+ directories = options.build_tests.split(',')
+ for i in range(len(directories)):
+ directories[i] = normpath(join(TEST_DIR, directories[i]))
+
+ test_names = []
+
+ for test_id in TEST_MAP.keys():
+ # Prevents tests with multiple source dirs from being checked
+ if isinstance( TEST_MAP[test_id].source_dir, basestring):
+ test_path = normpath(TEST_MAP[test_id].source_dir)
+ for directory in directories:
+ if directory in test_path:
+ test_names.append(test_id)
+
+ mut_counter = 1
+ mut = {}
+ test_spec = {
+ "targets": {}
+ }
+
+ if options.toolchains:
+ print "Only building using the following toolchains: %s" % (options.toolchains)
+
+ for target_name, toolchain_list in OFFICIAL_MBED_LIBRARY_BUILD:
+ toolchains = None
+ if platforms is not None and not target_name in platforms:
+ print("Excluding %s from release" % target_name)
+ continue
+
+ if target_name not in TARGET_NAMES:
+ print "Target '%s' is not a valid target. Excluding from release"
+ continue
+
+ if options.official_only:
+ toolchains = (getattr(TARGET_MAP[target_name], 'default_toolchain', 'ARM'),)
+ else:
+ toolchains = toolchain_list
+
+ if options.toolchains:
+ toolchainSet = set(toolchains)
+ toolchains = toolchainSet.intersection(set((options.toolchains).split(',')))
+
+ mut[str(mut_counter)] = {
+ "mcu": target_name
+ }
+
+ mut_counter += 1
+
+ test_spec["targets"][target_name] = toolchains
+
+ single_test = SingleTestRunner(_muts=mut,
+ _opts_report_build_file_name=options.report_build_file_name,
+ _test_spec=test_spec,
+ _opts_test_by_names=",".join(test_names),
+ _opts_verbose=options.verbose,
+ _opts_only_build_tests=True,
+ _opts_suppress_summary=True,
+ _opts_jobs=options.jobs,
+ _opts_include_non_automated=True,
+ _opts_build_report=build_report,
+ _opts_build_properties=build_properties)
+ # Runs test suite in CLI mode
+ test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, new_build_report, new_build_properties = single_test.execute()
+ else:
+ for target_name, toolchain_list in OFFICIAL_MBED_LIBRARY_BUILD:
+ if platforms is not None and not target_name in platforms:
+ print("Excluding %s from release" % target_name)
+ continue
+
+ if target_name not in TARGET_NAMES:
+ print "Target '%s' is not a valid target. Excluding from release"
+ continue
+
+ if options.official_only:
+ toolchains = (getattr(TARGET_MAP[target_name], 'default_toolchain', 'ARM'),)
+ else:
+ toolchains = toolchain_list
+
+ if options.toolchains:
+ print "Only building using the following toolchains: %s" % (options.toolchains)
+ toolchainSet = set(toolchains)
+ toolchains = toolchainSet.intersection(set((options.toolchains).split(',')))
+
+ for toolchain in toolchains:
+ id = "%s::%s" % (target_name, toolchain)
+
+ try:
+ built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs, report=build_report, properties=build_properties)
+
+ except Exception, e:
+ print str(e)
+
+ # Write summary of the builds
+ if options.report_build_file_name:
+ file_report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
+ file_report_exporter.report_to_file(build_report, options.report_build_file_name, test_suite_properties=build_properties)
+
+ print "\n\nCompleted in: (%.2f)s" % (time() - start)
+
+ print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
+ status = print_report_exporter.report(build_report)
+
+ if not status:
+ sys.exit(1)
diff --git a/tools/build_travis.py b/tools/build_travis.py
new file mode 100644
index 0000000..7189dba
--- /dev/null
+++ b/tools/build_travis.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python2
+
+"""
+Travis-CI build script
+
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+import sys
+
+################################################################################
+# Configure builds here
+# "libs" can contain "dsp", "rtos", "eth", "usb_host", "usb", "ublox", "fat"
+
+build_list = (
+ { "target": "LPC1768", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "eth", "usb_host", "usb", "ublox", "fat"] },
+ { "target": "LPC2368", "toolchains": "GCC_ARM", "libs": ["fat"] },
+ { "target": "LPC2460", "toolchains": "GCC_ARM", "libs": ["rtos", "usb_host", "usb", "fat"] },
+ { "target": "LPC11U24", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "OC_MBUINO", "toolchains": "GCC_ARM", "libs": ["fat"] },
+
+ { "target": "LPC11U24_301", "toolchains": "GCC_ARM", "libs": ["fat"] },
+
+ { "target": "B96B_F446VE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_L053R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_L152RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F030R8", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_F031K6", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_F042K6", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_F070RB", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_F072RB", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F091RC", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F103RB", "toolchains": "GCC_ARM", "libs": ["rtos", "fat"] },
+ { "target": "NUCLEO_F302R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F303K8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F303RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F334R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F401RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F410RB", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NUCLEO_L476RG", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_L031K6", "toolchains": "GCC_ARM", "libs": ["dsp"] },
+ { "target": "NUCLEO_L073RZ", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NUCLEO_F446RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+
+ { "target": "MOTE_L152RC", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+
+ { "target": "ELMO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+
+ { "target": "MTS_MDOT_F405RG", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
+ { "target": "MTS_MDOT_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
+ { "target": "MTS_DRAGONFLY_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "ARCH_MAX", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+
+ { "target": "DISCO_F051R8", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "DISCO_F334C8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "DISCO_F401VC", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "DISCO_F407VG", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "DISCO_F429ZI", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "DISCO_F469NI", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "DISCO_F746NG", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+
+ { "target": "LPC1114", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "LPC11U35_401", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "UBLOX_C027", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "LPC11U35_501", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "LPC11U68", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "LPC11U37H_401", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+
+ { "target": "KL05Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "KL25Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
+ { "target": "KL27Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
+ { "target": "KL43Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
+ { "target": "KL46Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
+ { "target": "K20D50M", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "TEENSY3_1", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "K64F", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
+ { "target": "LPC4088", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
+ { "target": "ARCH_PRO", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "LPC1549", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NRF51822", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "DELTA_DFCM_NNN40", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "NRF51_DK", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+ { "target": "NRF51_MICROBIT", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
+
+ { "target": "EFM32ZG_STK3200", "toolchains": "GCC_ARM", "libs": ["dsp"] },
+ { "target": "EFM32HG_STK3400", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
+ { "target": "EFM32LG_STK3600", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
+ { "target": "EFM32GG_STK3700", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
+ { "target": "EFM32WG_STK3800", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
+ { "target": "EFM32PG_STK3401", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
+
+ { "target": "MAXWSNENV", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "MAX32600MBED", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+
+ { "target": "RZ_A1H", "toolchains": "GCC_ARM", "libs": ["fat"] },
+
+ { "target": "SAMR21G18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "SAMD21J18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "SAMD21G18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+ { "target": "SAML21J18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
+)
+
+################################################################################
+# Configure example test building (linking against external mbed SDK libraries liek fat or rtos)
+
+linking_list = [
+ {"target": "LPC1768",
+ "toolchains": "GCC_ARM",
+ "tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_15", "MBED_16", "MBED_17"],
+ "eth" : ["NET_1", "NET_2", "NET_3", "NET_4"],
+ "fat" : ["MBED_A12", "MBED_19", "PERF_1", "PERF_2", "PERF_3"],
+ "rtos" : ["RTOS_1", "RTOS_2", "RTOS_3"],
+ "usb" : ["USB_1", "USB_2" ,"USB_3"],
+ }
+ }
+ ]
+
+################################################################################
+
+# Driver
+
+def run_builds(dry_run):
+ for build in build_list:
+ toolchain_list = build["toolchains"]
+ if type(toolchain_list) != type([]): toolchain_list = [toolchain_list]
+ for toolchain in toolchain_list:
+ cmdline = "python workspace_tools/build.py -m %s -t %s -j 4 -c --silent "% (build["target"], toolchain)
+ libs = build.get("libs", [])
+ if libs:
+ cmdline = cmdline + " ".join(["--" + l for l in libs])
+ print "Executing: " + cmdline
+ if not dry_run:
+ if os.system(cmdline) != 0:
+ sys.exit(1)
+
+
+def run_test_linking(dry_run):
+ """ Function run make.py commands to build and link simple mbed SDK
+ tests against few libraries to make sure there are no simple linking errors.
+ """
+ for link in linking_list:
+ toolchain_list = link["toolchains"]
+ if type(toolchain_list) != type([]):
+ toolchain_list = [toolchain_list]
+ for toolchain in toolchain_list:
+ tests = link["tests"]
+ # Call make.py for each test group for particular library
+ for test_lib in tests:
+ test_names = tests[test_lib]
+ test_lib_switch = "--" + test_lib if test_lib else ""
+ cmdline = "python workspace_tools/make.py -m %s -t %s -c --silent %s -n %s " % (link["target"], toolchain, test_lib_switch, ",".join(test_names))
+ print "Executing: " + cmdline
+ if not dry_run:
+ if os.system(cmdline) != 0:
+ sys.exit(1)
+
+def run_test_testsuite(dry_run):
+ cmdline = "python workspace_tools/singletest.py --version"
+ print "Executing: " + cmdline
+ if not dry_run:
+ if os.system(cmdline) != 0:
+ sys.exit(1)
+
+if __name__ == "__main__":
+ run_builds("-s" in sys.argv)
+ run_test_linking("-s" in sys.argv)
+ run_test_testsuite("-s" in sys.argv)
diff --git a/tools/buildbot/master.cfg b/tools/buildbot/master.cfg
new file mode 100644
index 0000000..0a8a662
--- /dev/null
+++ b/tools/buildbot/master.cfg
@@ -0,0 +1,406 @@
+# -*- python -*-
+# ex: set syntax=python:
+
+# This is a sample buildmaster config file. It must be installed as
+# 'master.cfg' in your buildmaster's base directory.
+
+# This is the dictionary that the buildmaster pays attention to. We also use
+# a shorter alias to save typing.
+c = BuildmasterConfig = {}
+
+####### BUILDSLAVES
+
+# The 'slaves' list defines the set of recognized buildslaves. Each element is
+# a BuildSlave object, specifying a unique slave name and password. The same
+# slave name and password must be configured on the slave.
+from buildbot.buildslave import BuildSlave
+c['slaves'] = [BuildSlave("example-slave", "pass"),
+ BuildSlave("example-slave-2", "pass"),
+ BuildSlave("example-slave-KL25Z", "pass"),
+ BuildSlave("example-slave-LPC1768", "pass"),
+ BuildSlave("example-slave-LPC11U24", "pass"),
+ ]
+
+# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
+# This must match the value configured into the buildslaves (with their
+# --master option)
+c['slavePortnum'] = 9989
+
+####### OFFICIAL_MBED_LIBRARY_BUILD
+
+OFFICIAL_MBED_LIBRARY_BUILD = (
+ ('LPC1768', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ ('KL05Z', ('ARM', 'uARM', 'GCC_ARM')),
+ ('KL25Z', ('ARM', 'GCC_ARM')),
+ ('LPC11U24', ('ARM', 'uARM')),
+ ('KL46Z', ('ARM', 'GCC_ARM')),
+ ('LPC4088', ('ARM', 'GCC_ARM', 'GCC_CR')),
+ ('LPC1347', ('ARM',)),
+ ('LPC1549', ('uARM',)),
+ ('LPC2368', ('ARM',)),
+ ('LPC812', ('uARM',)),
+ ('LPC11U35_401', ('ARM', 'uARM')),
+ ('LPC1114', ('uARM',)),
+ ('NUCLEO_F103RB', ('ARM', 'uARM')),
+ ('NUCLEO_L152RE', ('ARM', 'uARM')),
+ ('NUCLEO_F401RE', ('ARM', 'uARM')),
+ ('NUCLEO_F030R8', ('ARM', 'uARM')),
+ ('UBLOX_C027', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
+ # ('NRF51822', ('ARM',)),
+)
+
+# Which hardware platforms are supported for target testing
+OFFICIAL_MBED_TESTBED_SUPPORTED_HARDWARE = (
+ # 'KL25Z',
+ # 'LPC1768',
+ # 'LPC11U24',
+)
+
+####### CHANGESOURCES
+
+# the 'change_source' setting tells the buildmaster how it should find out
+# about source code changes. Here we point to the buildbot clone of pyflakes.
+
+from buildbot.changes.gitpoller import GitPoller
+c['change_source'] = []
+"""
+c['change_source'].append(GitPoller(
+ 'git://github.com/buildbot/pyflakes.git',
+ workdir='gitpoller-workdir', branch='master',
+ pollinterval=300))
+"""
+####### SCHEDULERS
+
+# Configure the Schedulers, which decide how to react to incoming changes. In this
+# case, just kick off a 'runtests' build
+
+from buildbot.schedulers.basic import SingleBranchScheduler
+from buildbot.schedulers.forcesched import ForceScheduler
+from buildbot.changes import filter
+c['schedulers'] = []
+
+# Create builders to generate one target using all assigned toolchains
+release_builder_name = "BuildRelease"
+builder_names = [release_builder_name]
+for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD:
+ builder_name = "All_TC_%s" % target_name
+ builder_names.append(builder_name)
+c['schedulers'].append(ForceScheduler(name="force", builderNames=builder_names))
+
+####### BUILDERS
+
+# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
+# what steps, and which slaves can execute them. Note that any particular build will
+# only take place on one slave.
+
+from buildbot.process.factory import BuildFactory
+from buildbot.steps.source.git import Git
+from buildbot.steps.shell import ShellCommand
+from buildbot.process.buildstep import LogLineObserver
+import buildbot.status.results
+import re
+import pprint
+
+class TestCommand(ShellCommand):
+ failedTestsCount = 0 # FAIL
+ passedTestsCount = 0 # OK
+ errorsTestsCount = 0 # ERROR
+ undefsTestsCount = 0 # UNDEF
+ testsResults = []
+
+ def __init__(self, stage=None,module=None, moduleset=None, **kwargs):
+ ShellCommand.__init__(self, **kwargs)
+ self.failedTestsCount = 0
+ self.passedTestsCount = 0
+ self.errorsTestsCount = 0
+ self.tracebackPyCount = 0
+ self.testsResults = []
+ testFailuresObserver = UnitTestsObserver ()
+ self.addLogObserver('stdio', testFailuresObserver)
+
+ def createSummary(self, log):
+ if self.failedTestsCount >= 0 or self.passedTestsCount >= 0 or self.errorsTestsCount >= 0 or self.undefsTestsCount >= 0:
+ self.addHTMLLog ('tests summary', self.createTestsSummary())
+
+ def getText(self, cmd, results):
+ text = ShellCommand.getText(self, cmd, results)
+ text.append("OK: " + str(self.passedTestsCount))
+ text.append("FAIL: " + str(self.failedTestsCount))
+ text.append("ERROR: " + str(self.errorsTestsCount))
+ text.append("UNDEF: " + str(self.undefsTestsCount))
+ text.append("Traceback: " + str(self.tracebackPyCount))
+ return text
+
+ def evaluateCommand(self, cmd):
+ if self.failedTestsCount > 0:
+ return buildbot.status.results.WARNINGS
+ elif self.errorsTestsCount > 0 or self.undefsTestsCount > 0 or self.tracebackPyCount > 0:
+ return buildbot.status.results.FAILURE
+ return buildbot.status.results.SUCCESS
+
+ def find_unique_tc_result_value(self, index):
+ """ Get unique values from each row in data parameter """
+ result = []
+ for tc_result_list in self.testsResults:
+ if tc_result_list[index] not in result:
+ result.append(tc_result_list[index])
+ return result
+
+ def html_view_test_result(self, targets, tests, toolchain):
+ """ Generates simple result table """
+ COLOR_OK = "LimeGreen"
+ COLOR_FAIL = "LightCoral"
+ COLOR_UNDEF = "LightSlateGray"
+ COLOR_NEUTRAL = "Silver"
+
+ STATUS_COLORS = { "OK" : COLOR_OK,
+ "FAIL" : COLOR_FAIL,
+ "UNDEF" : COLOR_UNDEF}
+
+ result = "
"
+ result += "
" + toolchain + "
"
+ for test in tests:
+ result += "
" + test + "
"
+ result += "
"
+
+ for target in targets:
+ result += "
" + target + "
"
+ for test in tests:
+ for tc_result_list in self.testsResults:
+ if tc_result_list[1] == target and tc_result_list[2] == toolchain and tc_result_list[3] == test:
+ status = tc_result_list[4]
+ bgcolor = STATUS_COLORS[status]
+ result += "
"
+ return html
+
+class BuildObserver(LogLineObserver):
+ regroupresult = []
+
+ def __init__(self):
+ LogLineObserver.__init__(self)
+ if len(self.regroupresult) == 0:
+ self.regroupresult.append(re.compile("^\[([Ww]arning)\] (.*)"))
+ self.regroupresult.append(re.compile("^\[([Ee]rror)\] (.*)"))
+
+ def outLineReceived(self, line):
+ matched = False
+ for r in self.regroupresult:
+ result = r.match(line)
+ if result:
+ self.step.testsResults.append(result.groups())
+ if result.group(1) == 'Warning':
+ self.step.warningsCount += 1
+ elif result.group(1) == 'Error':
+ self.step.errorsCount += 1
+ matched = True
+ #if not matched:
+ # [Future-Dev] Other check...
+
+
+####### BUILDERS - mbed project
+git_clone = Git(repourl='https://github.com/mbedmicro/mbed.git', mode='incremental')
+
+# create the build factory for mbed and add the steps to it
+from buildbot.config import BuilderConfig
+
+c['builders'] = []
+
+copy_private_settings = ShellCommand(name = "copy private_settings.py",
+ command = "cp ../private_settings.py workspace_tools/private_settings.py",
+ haltOnFailure = True,
+ description = "Copy private_settings.py")
+
+mbed_build_release = BuildFactory()
+mbed_build_release.addStep(git_clone)
+mbed_build_release.addStep(copy_private_settings)
+
+for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD:
+ builder_name = "All_TC_%s" % target_name
+ mbed_build = BuildFactory()
+ mbed_build.addStep(git_clone)
+ mbed_build.addStep(copy_private_settings)
+ # Adding all chains for target
+ for toolchain in toolchains:
+ build_py = BuildCommand(name = "Build %s using %s" % (target_name, toolchain),
+ command = "python workspace_tools/build.py -m %s -t %s" % (target_name, toolchain),
+ haltOnFailure = True,
+ warnOnWarnings = True,
+ description = "Building %s using %s" % (target_name, toolchain),
+ descriptionDone = "Built %s using %s" % (target_name, toolchain))
+
+ mbed_build.addStep(build_py)
+ mbed_build_release.addStep(build_py) # For build release we need all toolchains
+
+ if target_name in OFFICIAL_MBED_TESTBED_SUPPORTED_HARDWARE:
+ copy_example_test_spec_json = ShellCommand(name = "Copy example_test_spec.json",
+ command = "cp ../example_test_spec.json workspace_tools/data/example_test_spec.json",
+ haltOnFailure = True,
+ description = "Copy example_test_spec.json")
+
+ autotest_py = ShellCommand(name = "Running autotest.py for %s" % (target_name),
+ command = "python workspace_tools/autotest.py workspace_tools/data/example_test_spec.json",
+ haltOnFailure = True,
+ description = "Running autotest.py")
+
+ mbed_build.addStep(copy_example_test_spec_json)
+ mbed_build.addStep(autotest_py)
+
+ # Add builder with steps for each toolchain
+ c['builders'].append(BuilderConfig(name=builder_name,
+ slavenames=["example-slave-%s" % (target_name)],
+ factory=mbed_build))
+ else:
+ # Add builder with steps for each toolchain
+ c['builders'].append(BuilderConfig(name=builder_name,
+ slavenames=["example-slave"],
+ factory=mbed_build))
+
+# copy_example_test_spec_json = ShellCommand(name = "Copy example_test_spec.json",
+ # command = "cp ../example_test_spec.json workspace_tools/data/example_test_spec.json",
+ # haltOnFailure = True,
+ # description = "Copy example_test_spec.json")
+
+singletest_py = TestCommand(name = "Running Target Tests",
+ command = "python workspace_tools/singletest.py -i workspace_tools/test_spec.json -M workspace_tools/muts_all.json",
+ haltOnFailure = True,
+ warnOnWarnings = True,
+ description = "Running Target Tests",
+ descriptionDone = "Target Testing Finished")
+
+mbed_build_release.addStep(singletest_py)
+# Release build collects all building toolchains
+c['builders'].append(BuilderConfig(name=release_builder_name,
+ slavenames=["example-slave"],
+ factory=mbed_build_release))
+
+####### STATUS TARGETS
+
+# 'status' is a list of Status Targets. The results of each build will be
+# pushed to these targets. buildbot/status/*.py has a variety to choose from,
+# including web pages, email senders, and IRC bots.
+
+c['status'] = []
+
+from buildbot.status import html
+from buildbot.status.web import authz, auth
+
+authz_cfg=authz.Authz(
+ # change any of these to True to enable; see the manual for more
+ # options
+ auth=auth.BasicAuth([("pyflakes","pyflakes")]),
+ gracefulShutdown = False,
+ forceBuild = 'auth', # use this to test your slave once it is set up
+ forceAllBuilds = True,
+ pingBuilder = True,
+ stopBuild = True,
+ stopAllBuilds = True,
+ cancelPendingBuild = True,
+)
+c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg, order_console_by_time=True))
+
+####### PROJECT IDENTITY
+
+# the 'title' string will appear at the top of this buildbot
+# installation's html.WebStatus home page (linked to the
+# 'titleURL') and is embedded in the title of the waterfall HTML page.
+
+c['title'] = "Green Tea"
+c['titleURL'] = ""
+
+# the 'buildbotURL' string should point to the location where the buildbot's
+# internal web server (usually the html.WebStatus page) is visible. This
+# typically uses the port number set in the Waterfall 'status' entry, but
+# with an externally-visible host name which the buildbot cannot figure out
+# without some help.
+
+c['buildbotURL'] = "http://localhost:8010/"
+
+####### DB URL
+
+c['db'] = {
+ # This specifies what database buildbot uses to store its state. You can leave
+ # this at its default for all but the largest installations.
+ 'db_url' : "sqlite:///state.sqlite",
+ # 'db_url' : "mysql://buildbot:123456@localhost/buildbot_mbed?max_idle=300",
+}
diff --git a/tools/ci_templates/library_build/build_report.html b/tools/ci_templates/library_build/build_report.html
new file mode 100644
index 0000000..1b2b693
--- /dev/null
+++ b/tools/ci_templates/library_build/build_report.html
@@ -0,0 +1,31 @@
+
+ {% with build = report.failing %}
+ {% include 'tests_build/build_report_table.html' %}
+ {% endwith %}
+
+
Passing
+ {% with build = report.passing %}
+ {% include 'tests_build/build_report_table.html' %}
+ {% endwith %}
+
+
Skipped
+ {% with build = report.skipped %}
+ {% include 'tests_build/build_report_table.html' %}
+ {% endwith %}
+
+
diff --git a/tools/ci_templates/tests_build/build_report_table.html b/tools/ci_templates/tests_build/build_report_table.html
new file mode 100644
index 0000000..79d41c1
--- /dev/null
+++ b/tools/ci_templates/tests_build/build_report_table.html
@@ -0,0 +1,12 @@
+
+
+
Toolchain
+
Project
+
+ {% for run in build %}
+
+
{{run.toolchain}}
+
{{run.project}}
+
+ {% endfor %}
+
diff --git a/tools/ci_templates/tests_build/report.html b/tools/ci_templates/tests_build/report.html
new file mode 100644
index 0000000..3f26255
--- /dev/null
+++ b/tools/ci_templates/tests_build/report.html
@@ -0,0 +1,11 @@
+
{{failing_builds|length}} Failing Builds
+{% for report in failing_builds %}
+{% include 'tests_build/build_report.html' %}
+{% endfor %}
+
+
{{passing_builds|length}} Passing Builds
+{% for report in passing_builds %}
+{% include 'tests_build/build_report.html' %}
+{% endfor %}
+
+{% include 'scripts.js' %}
diff --git a/tools/compliance/__init__.py b/tools/compliance/__init__.py
new file mode 100644
index 0000000..3840c9e
--- /dev/null
+++ b/tools/compliance/__init__.py
@@ -0,0 +1,16 @@
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/tools/compliance/ioper_base.py b/tools/compliance/ioper_base.py
new file mode 100644
index 0000000..53a4ed2
--- /dev/null
+++ b/tools/compliance/ioper_base.py
@@ -0,0 +1,69 @@
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+
+"""
+
+import sys
+
+try:
+ from colorama import Fore
+except:
+ pass
+
+COLORAMA = 'colorama' in sys.modules
+
+
+class IOperTestCaseBase():
+ """ Interoperability test case base class
+ @return list of tuple (severity, Description)
+ Example: (result.append((IOperTestSeverity.INFO, ""))
+ """
+
+ def __init__(self, scope=None):
+ self.PASS = 'PASS'
+ self.INFO = 'INFO'
+ self.ERROR = 'ERROR'
+ self.WARN = 'WARN'
+
+ self.scope = scope # Default test scope (basic, pedantic, mbed-enabled etc...)
+
+ def test(self, param=None):
+ result = []
+ return result
+
+ def RED(self, text):
+ return self.color_text(text, color=Fore.RED, delim=Fore.RESET) if COLORAMA else text
+
+ def GREEN(self, text):
+ return self.color_text(text, color=Fore.GREEN, delim=Fore.RESET) if COLORAMA else text
+
+ def YELLOW(self, text):
+ return self.color_text(text, color=Fore.YELLOW, delim=Fore.RESET) if COLORAMA else text
+
+ def color_text(self, text, color='', delim=''):
+ return color + text + color + delim
+
+ def COLOR(self, severity, text):
+ colors = {
+ self.PASS : self.GREEN,
+ self.ERROR : self.RED,
+ self.WARN : self.YELLOW
+ }
+ if severity in colors:
+ return colors[severity](text)
+ return text
diff --git a/tools/compliance/ioper_runner.py b/tools/compliance/ioper_runner.py
new file mode 100644
index 0000000..6b5bf57
--- /dev/null
+++ b/tools/compliance/ioper_runner.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python2
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+
+"""
+
+import sys
+import mbed_lstools
+from prettytable import PrettyTable
+
+try:
+ from colorama import init
+except:
+ pass
+
+COLORAMA = 'colorama' in sys.modules
+
+from ioper_base import IOperTestCaseBase
+from ioper_test_fs import IOperTest_FileStructure_Basic
+from ioper_test_fs import IOperTest_FileStructure_MbedEnabled
+from ioper_test_target_id import IOperTest_TargetID_Basic
+from ioper_test_target_id import IOperTest_TargetID_MbedEnabled
+
+
+TEST_LIST = [IOperTest_TargetID_Basic('basic'),
+ IOperTest_TargetID_MbedEnabled('mbed-enabled'),
+ IOperTest_FileStructure_Basic('basic'),
+ IOperTest_FileStructure_MbedEnabled('mbed-enabled'),
+ IOperTestCaseBase('all'), # Dummy used to add 'all' option
+ ]
+
+
+class IOperTestRunner():
+ """ Calls all i/face interoperability tests
+ """
+
+ def __init__(self, scope=None):
+ """ Test scope:
+ 'pedantic' - all
+ 'mbed-enabled' - let's try to check if this device is mbed-enabled
+ 'basic' - just simple, passive tests (no device flashing)
+ """
+ self.requested_scope = scope # Test scope given by user
+ self.raw_test_results = {} # Raw test results, can be used by exporters: { Platform: [test results]}
+
+ # Test scope definitions
+ self.SCOPE_BASIC = 'basic' # Basic tests, sanity checks
+ self.SCOPE_MBED_ENABLED = 'mbed-enabled' # Let's try to check if this device is mbed-enabled
+ self.SCOPE_PEDANTIC = 'pedantic' # Extensive tests
+ self.SCOPE_ALL = 'all' # All tests, equal to highest scope level
+
+ # This structure will help us sort test scopes so we can include them
+ # e.g. pedantic also includes basic and mbed-enabled tests
+ self.scopes = {self.SCOPE_BASIC : 0,
+ self.SCOPE_MBED_ENABLED : 1,
+ self.SCOPE_PEDANTIC : 2,
+ self.SCOPE_ALL : 99,
+ }
+
+ if COLORAMA:
+ init() # colorama.init()
+
+ def run(self):
+ """ Run tests, calculate overall score and print test results
+ """
+ mbeds = mbed_lstools.create()
+ muts_list = mbeds.list_mbeds()
+ test_base = IOperTestCaseBase()
+
+ self.raw_test_results = {}
+ for i, mut in enumerate(muts_list):
+ result = []
+ self.raw_test_results[mut['platform_name']] = []
+
+ print "MBEDLS: Detected %s, port: %s, mounted: %s"% (mut['platform_name'],
+ mut['serial_port'],
+ mut['mount_point'])
+ print "Running interoperability test suite, scope '%s'" % (self.requested_scope)
+ for test_case in TEST_LIST:
+ if self.scopes[self.requested_scope] >= self.scopes[test_case.scope]:
+ res = test_case.test(param=mut)
+ result.extend(res)
+ self.raw_test_results[mut['platform_name']].extend(res)
+
+ columns = ['Platform', 'Test Case', 'Result', 'Scope', 'Description']
+ pt = PrettyTable(columns)
+ for col in columns:
+ pt.align[col] = 'l'
+
+ for tr in result:
+ severity, tr_name, tr_scope, text = tr
+ tr = (test_base.COLOR(severity, mut['platform_name']),
+ test_base.COLOR(severity, tr_name),
+ test_base.COLOR(severity, severity),
+ test_base.COLOR(severity, tr_scope),
+ test_base.COLOR(severity, text))
+ pt.add_row(list(tr))
+ print pt.get_string(border=True, sortby='Result')
+ if i + 1 < len(muts_list):
+ print
+ return self.raw_test_results
+
+def get_available_oper_test_scopes():
+ """ Get list of available test scopes
+ """
+ scopes = set()
+ for oper_test in TEST_LIST:
+ if oper_test.scope is not None:
+ scopes.add(oper_test.scope)
+ return list(scopes)
diff --git a/tools/compliance/ioper_test_fs.py b/tools/compliance/ioper_test_fs.py
new file mode 100644
index 0000000..945855d
--- /dev/null
+++ b/tools/compliance/ioper_test_fs.py
@@ -0,0 +1,69 @@
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+
+"""
+
+import os.path
+from ioper_base import IOperTestCaseBase
+
+
+class IOperTest_FileStructure(IOperTestCaseBase):
+
+ def __init__(self, scope=None):
+ IOperTestCaseBase.__init__(self, scope)
+
+ def if_file_exist(self, fname, fail_severity=None):
+ file_path = os.path.join(self.param['mount_point'], fname)
+ exist = os.path.isfile(file_path)
+ tr_name = "FILE_EXIST(%s)" % fname.upper()
+ if exist:
+ self.result.append((self.PASS, tr_name, self.scope, "File '%s' exists" % file_path))
+ else:
+ self.result.append((fail_severity if fail_severity else self.ERROR, tr_name, self.scope, "File '%s' not found" % file_path))
+
+ def test(self, param=None):
+ self.result = []
+ if param:
+ pass
+ return self.result
+
+
+class IOperTest_FileStructure_Basic(IOperTest_FileStructure):
+ def __init__(self, scope=None):
+ IOperTest_FileStructure.__init__(self, scope)
+
+ def test(self, param=None):
+ self.param = param
+ self.result = []
+ if param:
+ self.if_file_exist('mbed.htm', self.ERROR)
+ return self.result
+
+
+class IOperTest_FileStructure_MbedEnabled(IOperTest_FileStructure):
+ def __init__(self, scope=None):
+ IOperTest_FileStructure.__init__(self, scope)
+
+ def test(self, param=None):
+ self.param = param
+ self.result = []
+ if param:
+ self.if_file_exist('mbed.htm', self.ERROR)
+ self.if_file_exist('DETAILS.TXT', self.ERROR)
+ self.if_file_exist('FAIL.TXT', self.INFO)
+ return self.result
diff --git a/tools/compliance/ioper_test_target_id.py b/tools/compliance/ioper_test_target_id.py
new file mode 100644
index 0000000..55fa0d1
--- /dev/null
+++ b/tools/compliance/ioper_test_target_id.py
@@ -0,0 +1,111 @@
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+
+"""
+
+from ioper_base import IOperTestCaseBase
+
+
+class IOperTest_TargetID(IOperTestCaseBase):
+ """ tests related to target_id value
+ """
+
+ def __init__(self, scope=None):
+ IOperTestCaseBase.__init__(self, scope)
+ self.TARGET_ID_LEN = 24
+
+ def test_target_id_format(self, target_id, target_id_name):
+ # Expected length == 24, eg. "02400203D94B0E7724B7F3CF"
+ result = []
+ target_id_len = len(target_id) if target_id else 0
+ if target_id_len == self.TARGET_ID_LEN:
+ result.append((self.PASS, "TARGET_ID_LEN", self.scope, "%s '%s' is %d chars long " % (target_id_name, target_id, target_id_len)))
+ result.append((self.INFO, "FW_VER_STR", self.scope, "%s Version String is %s.%s.%s " % (target_id_name,
+ target_id[0:4],
+ target_id[4:8],
+ target_id[8:24],
+ )))
+ else:
+ result.append((self.ERROR, "TARGET_ID_LEN", self.scope, "%s '%s' is %d chars long. Expected %d chars" % (target_id_name, target_id, target_id_len, self.TARGET_ID_LEN)))
+ return result
+
+ def test_decode_target_id(self, target_id, target_id_name):
+ result = []
+ target_id_len = len(target_id) if target_id else 0
+ if target_id_len >= 4:
+ result.append((self.INFO, "FW_VEN_CODE", self.scope, "%s Vendor Code is '%s'" % (target_id_name, target_id[0:2])))
+ result.append((self.INFO, "FW_PLAT_CODE", self.scope, "%s Platform Code is '%s'" % (target_id_name, target_id[2:4])))
+ result.append((self.INFO, "FW_VER", self.scope, "%s Firmware Version is '%s'" % (target_id_name, target_id[4:8])))
+ result.append((self.INFO, "FW_HASH_SEC", self.scope, "%s Hash of secret is '%s'" % (target_id_name, target_id[8:24])))
+ return result
+
+ def test(self, param=None):
+ result = []
+ if param:
+ pass
+ return result
+
+
+class IOperTest_TargetID_Basic(IOperTest_TargetID):
+ """ Basic interoperability tests checking TargetID compliance
+ """
+
+ def __init__(self, scope=None):
+ IOperTest_TargetID.__init__(self, scope)
+
+ def test(self, param=None):
+ result = []
+
+ if param:
+ result.append((self.PASS, "TARGET_ID", self.scope, "TargetID '%s' found" % param['target_id']))
+
+ # Check if target name can be decoded with mbed-ls
+ if param['platform_name']:
+ result.append((self.PASS, "TARGET_ID_DECODE", self.scope, "TargetID '%s' decoded as '%s'" % (param['target_id'][0:4], param['platform_name'])))
+ else:
+ result.append((self.ERROR, "TARGET_ID_DECODE", self.scope, "TargetID '%s'... not decoded" % (param['target_id'] if param['target_id'] else '')))
+
+ # Test for USBID and mbed.htm consistency
+ if param['target_id_mbed_htm'] == param['target_id_usb_id']:
+ result.append((self.PASS, "TARGET_ID_MATCH", self.scope, "TargetID (USBID) and TargetID (mbed.htm) match"))
+ else:
+ text = "TargetID (USBID) and TargetID (mbed.htm) don't match: '%s' != '%s'" % (param['target_id_usb_id'], param['target_id_mbed_htm'])
+ result.append((self.WARN, "TARGET_ID_MATCH", self.scope, text))
+ else:
+ result.append((self.ERROR, "TARGET_ID", self.scope, "TargetID not found"))
+ return result
+
+class IOperTest_TargetID_MbedEnabled(IOperTest_TargetID):
+ """ Basic interoperability tests checking TargetID compliance
+ """
+
+ def __init__(self, scope=None):
+ IOperTest_TargetID.__init__(self, scope)
+
+ def test(self, param=None):
+ result = []
+
+ if param:
+ # Target ID tests:
+ result += self.test_target_id_format(param['target_id_usb_id'], "TargetId (USBID)")
+ result += self.test_target_id_format(param['target_id_mbed_htm'], "TargetId (mbed.htm)")
+
+ # Some extra info about TargetID itself
+ result += self.test_decode_target_id(param['target_id_usb_id'], "TargetId (USBID)")
+ result += self.test_decode_target_id(param['target_id_mbed_htm'], "TargetId (mbed.htm)")
+ return result
diff --git a/tools/data/__init__.py b/tools/data/__init__.py
new file mode 100644
index 0000000..1fa8431
--- /dev/null
+++ b/tools/data/__init__.py
@@ -0,0 +1,16 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/tools/data/rpc/RPCClasses.h b/tools/data/rpc/RPCClasses.h
new file mode 100644
index 0000000..ab90b53
--- /dev/null
+++ b/tools/data/rpc/RPCClasses.h
@@ -0,0 +1,34 @@
+/* mbed Microcontroller Library
+ * Copyright (c) 2006-2012 ARM Limited
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+#ifndef MBED_CLASSES_H
+#define MBED_CLASSES_H
+
+#include "rpc.h"
+
+namespace mbed {
+
+{{classes}}
+
+}
+
+#endif
+
diff --git a/tools/data/rpc/class.cpp b/tools/data/rpc/class.cpp
new file mode 100644
index 0000000..f783198
--- /dev/null
+++ b/tools/data/rpc/class.cpp
@@ -0,0 +1,24 @@
+class Rpc{{name}} : public RPC {
+public:
+ Rpc{{name}}({{cons_proto}}) : RPC(name), o({{cons_call}}) {}
+
+ {{methods}}
+
+ virtual const struct rpc_method *get_rpc_methods() {
+ static const rpc_method rpc_methods[] = {
+ {{rpc_methods}},
+ RPC_METHOD_SUPER(RPC)
+ };
+ return rpc_methods;
+ }
+ static struct rpc_class *get_rpc_class() {
+ static const rpc_function funcs[] = {
+ {"new", rpc_function_caller >},
+ RPC_METHOD_END
+ };
+ static rpc_class c = {"{{name}}", funcs, NULL};
+ return &c;
+ }
+private:
+ {{name}} o;
+};
diff --git a/tools/data/support.py b/tools/data/support.py
new file mode 100644
index 0000000..7af3774
--- /dev/null
+++ b/tools/data/support.py
@@ -0,0 +1,27 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from tools.targets import TARGETS
+
+DEFAULT_SUPPORT = {}
+CORTEX_ARM_SUPPORT = {}
+
+for target in TARGETS:
+ DEFAULT_SUPPORT[target.name] = target.supported_toolchains
+
+ if target.core.startswith('Cortex'):
+ CORTEX_ARM_SUPPORT[target.name] = [t for t in target.supported_toolchains
+ if (t=='ARM' or t=='uARM')]
diff --git a/tools/dev/__init__.py b/tools/dev/__init__.py
new file mode 100644
index 0000000..1fa8431
--- /dev/null
+++ b/tools/dev/__init__.py
@@ -0,0 +1,16 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/tools/dev/dsp_fir.py b/tools/dev/dsp_fir.py
new file mode 100644
index 0000000..f62c2b4
--- /dev/null
+++ b/tools/dev/dsp_fir.py
@@ -0,0 +1,89 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from numpy import sin, arange, pi
+from scipy.signal import lfilter, firwin
+from pylab import figure, plot, grid, show
+
+#------------------------------------------------
+# Create a signal for demonstration.
+#------------------------------------------------
+# 320 samples of (1000Hz + 15000 Hz) at 48 kHz
+sample_rate = 48000.
+nsamples = 320
+
+F_1KHz = 1000.
+A_1KHz = 1.0
+
+F_15KHz = 15000.
+A_15KHz = 0.5
+
+t = arange(nsamples) / sample_rate
+signal = A_1KHz * sin(2*pi*F_1KHz*t) + A_15KHz*sin(2*pi*F_15KHz*t)
+
+#------------------------------------------------
+# Create a FIR filter and apply it to signal.
+#------------------------------------------------
+# The Nyquist rate of the signal.
+nyq_rate = sample_rate / 2.
+
+# The cutoff frequency of the filter: 6KHz
+cutoff_hz = 6000.0
+
+# Length of the filter (number of coefficients, i.e. the filter order + 1)
+numtaps = 29
+
+# Use firwin to create a lowpass FIR filter
+fir_coeff = firwin(numtaps, cutoff_hz/nyq_rate)
+
+# Use lfilter to filter the signal with the FIR filter
+filtered_signal = lfilter(fir_coeff, 1.0, signal)
+
+#------------------------------------------------
+# Plot the original and filtered signals.
+#------------------------------------------------
+
+# The first N-1 samples are "corrupted" by the initial conditions
+warmup = numtaps - 1
+
+# The phase delay of the filtered signal
+delay = (warmup / 2) / sample_rate
+
+figure(1)
+# Plot the original signal
+plot(t, signal)
+
+# Plot the filtered signal, shifted to compensate for the phase delay
+plot(t-delay, filtered_signal, 'r-')
+
+# Plot just the "good" part of the filtered signal. The first N-1
+# samples are "corrupted" by the initial conditions.
+plot(t[warmup:]-delay, filtered_signal[warmup:], 'g', linewidth=4)
+
+grid(True)
+
+show()
+
+#------------------------------------------------
+# Print values
+#------------------------------------------------
+def print_values(label, values):
+ var = "float32_t %s[%d]" % (label, len(values))
+ print "%-30s = {%s}" % (var, ', '.join(["%+.10f" % x for x in values]))
+
+print_values('signal', signal)
+print_values('fir_coeff', fir_coeff)
+print_values('filtered_signal', filtered_signal)
diff --git a/tools/dev/intel_hex_utils.py b/tools/dev/intel_hex_utils.py
new file mode 100644
index 0000000..c60e9c4
--- /dev/null
+++ b/tools/dev/intel_hex_utils.py
@@ -0,0 +1,31 @@
+from intelhex import IntelHex
+from cStringIO import StringIO
+
+
+def sections(h):
+ start, last_address = None, None
+ for a in h.addresses():
+ if last_address is None:
+ start, last_address = a, a
+ continue
+
+ if a > last_address + 1:
+ yield (start, last_address)
+ start = a
+
+ last_address = a
+
+ if start:
+ yield (start, last_address)
+
+
+def print_sections(h):
+ for s in sections(h):
+ print "[0x%08X - 0x%08X]" % s
+
+
+def decode(record):
+ h = IntelHex()
+ f = StringIO(record)
+ h.loadhex(f)
+ h.dump()
diff --git a/tools/dev/rpc_classes.py b/tools/dev/rpc_classes.py
new file mode 100644
index 0000000..46fd902
--- /dev/null
+++ b/tools/dev/rpc_classes.py
@@ -0,0 +1,190 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from os.path import join
+from jinja2 import Template
+
+from tools.paths import TOOLS_DATA, MBED_RPC
+
+RPC_TEMPLATES_PATH = join(TOOLS_DATA, "rpc")
+
+RPC_TEMPLATE = "RPCClasses.h"
+CLASS_TEMPLATE = "class.cpp"
+RPC_CLASSES_PATH = join(MBED_RPC, RPC_TEMPLATE)
+
+
+def get_template(name):
+ return Template(open(join(RPC_TEMPLATES_PATH, name)).read())
+
+
+def write_rpc_classes(classes):
+ template = get_template(RPC_TEMPLATE)
+ open(RPC_CLASSES_PATH, "w").write(template.render({"classes":classes}))
+
+
+RPC_CLASSES = (
+ {
+ "name": "DigitalOut",
+ "cons_args": ["PinName"],
+ "methods": [
+ (None , "write", ["int"]),
+ ("int", "read" , []),
+ ]
+ },
+ {
+ "name": "DigitalIn",
+ "cons_args": ["PinName"],
+ "methods": [
+ ("int", "read" , []),
+ ]
+ },
+ {
+ "name": "DigitalInOut",
+ "cons_args": ["PinName"],
+ "methods": [
+ ("int", "read" , []),
+ (None , "write" , ["int"]),
+ (None , "input" , []),
+ (None , "output", []),
+ ]
+ },
+ {
+ "name": "AnalogIn",
+ "required": "ANALOGIN",
+ "cons_args": ["PinName"],
+ "methods": [
+ ("float" , "read" , []),
+ ("unsigned short", "read_u16", []),
+ ]
+ },
+ {
+ "name": "AnalogOut",
+ "required": "ANALOGOUT",
+ "cons_args": ["PinName"],
+ "methods": [
+ ("float", "read" , []),
+ (None , "write" , ["float"]),
+ (None , "write_u16", ["unsigned short"]),
+ ]
+ },
+ {
+ "name": "PwmOut",
+ "required": "PWMOUT",
+ "cons_args": ["PinName"],
+ "methods": [
+ ("float", "read" , []),
+ (None , "write" , ["float"]),
+ (None , "period" , ["float"]),
+ (None , "period_ms" , ["int"]),
+ (None , "pulsewidth" , ["float"]),
+ (None , "pulsewidth_ms", ["int"]),
+ ]
+ },
+ {
+ "name": "SPI",
+ "required": "SPI",
+ "cons_args": ["PinName", "PinName", "PinName"],
+ "methods": [
+ (None , "format" , ["int", "int"]),
+ (None , "frequency", ["int"]),
+ ("int", "write" , ["int"]),
+ ]
+ },
+ {
+ "name": "Serial",
+ "required": "SERIAL",
+ "cons_args": ["PinName", "PinName"],
+ "methods": [
+ (None , "baud" , ["int"]),
+ ("int", "readable" , []),
+ ("int", "writeable", []),
+ ("int", "putc" , ["int"]),
+ ("int", "getc" , []),
+ ("int", "puts" , ["const char *"]),
+ ]
+ },
+ {
+ "name": "Timer",
+ "cons_args": [],
+ "methods": [
+ (None , "start" , []),
+ (None , "stop" , []),
+ (None , "reset" , []),
+ ("float", "read" , []),
+ ("int" , "read_ms", []),
+ ("int" , "read_us", []),
+ ]
+ }
+)
+
+
+def get_args_proto(args_types, extra=None):
+ args = ["%s a%d" % (s, n) for n, s in enumerate(args_types)]
+ if extra:
+ args.extend(extra)
+ return ', '.join(args)
+
+
+def get_args_call(args):
+ return ', '.join(["a%d" % (n) for n in range(len(args))])
+
+
+classes = []
+class_template = get_template(CLASS_TEMPLATE)
+
+for c in RPC_CLASSES:
+ c_args = c['cons_args']
+ data = {
+ 'name': c['name'],
+ 'cons_type': ', '.join(c_args + ['const char*']),
+ "cons_proto": get_args_proto(c_args, ["const char *name=NULL"]),
+ "cons_call": get_args_call(c_args)
+ }
+
+ c_name = "Rpc" + c['name']
+
+ methods = []
+ rpc_methods = []
+ for r, m, a in c['methods']:
+ ret_proto = r if r else "void"
+ args_proto = "void"
+
+ ret_defin = "return " if r else ""
+ args_defin = ""
+
+ if a:
+ args_proto = get_args_proto(a)
+ args_defin = get_args_call(a)
+
+ proto = "%s %s(%s)" % (ret_proto, m, args_proto)
+ defin = "{%so.%s(%s);}" % (ret_defin, m, args_defin)
+ methods.append("%s %s" % (proto, defin))
+
+ rpc_method_type = [r] if r else []
+ rpc_method_type.append(c_name)
+ rpc_method_type.extend(a)
+ rpc_methods.append('{"%s", rpc_method_caller<%s, &%s::%s>}' % (m, ', '.join(rpc_method_type), c_name, m))
+
+ data['methods'] = "\n ".join(methods)
+ data['rpc_methods'] = ",\n ".join(rpc_methods)
+
+ class_decl = class_template.render(data)
+ if 'required' in c:
+ class_decl = "#if DEVICE_%s\n%s\n#endif" % (c['required'], class_decl)
+
+ classes.append(class_decl)
+
+write_rpc_classes('\n\n'.join(classes))
diff --git a/tools/dev/syms.py b/tools/dev/syms.py
new file mode 100644
index 0000000..2fdbd2d
--- /dev/null
+++ b/tools/dev/syms.py
@@ -0,0 +1,75 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+Utility to find which libraries could define a given symbol
+"""
+from argparse import ArgumentParser
+from os.path import join, splitext
+from os import walk
+from subprocess import Popen, PIPE
+
+
+OBJ_EXT = ['.o', '.a', '.ar']
+
+
+def find_sym_in_lib(sym, obj_path):
+ contain_symbol = False
+
+ out = Popen(["nm", "-C", obj_path], stdout=PIPE, stderr=PIPE).communicate()[0]
+ for line in out.splitlines():
+ tokens = line.split()
+ n = len(tokens)
+ if n == 2:
+ sym_type = tokens[0]
+ sym_name = tokens[1]
+ elif n == 3:
+ sym_type = tokens[1]
+ sym_name = tokens[2]
+ else:
+ continue
+
+ if sym_type == "U":
+ # This object is using this symbol, not defining it
+ continue
+
+ if sym_name == sym:
+ contain_symbol = True
+
+ return contain_symbol
+
+
+def find_sym_in_path(sym, dir_path):
+ for root, _, files in walk(dir_path):
+ for file in files:
+
+ _, ext = splitext(file)
+ if ext not in OBJ_EXT: continue
+
+ path = join(root, file)
+ if find_sym_in_lib(sym, path):
+ print path
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser(description='Find Symbol')
+ parser.add_argument('-s', '--sym', required=True,
+ help='The symbol to be searched')
+ parser.add_argument('-p', '--path', required=True,
+ help='The path where to search')
+ args = parser.parse_args()
+
+ find_sym_in_path(args.sym, args.path)
diff --git a/tools/export/.hgignore b/tools/export/.hgignore
new file mode 100644
index 0000000..c309ef5
--- /dev/null
+++ b/tools/export/.hgignore
@@ -0,0 +1,22 @@
+syntax: regexp
+\.hgignore$
+\.git$
+\.svn$
+\.orig$
+\.msub$
+\.meta$
+\.ctags
+\.uvproj$
+\.uvopt$
+\.project$
+\.cproject$
+\.launch$
+\.project$
+\.cproject$
+\.launch$
+Makefile$
+\.ewp$
+\.eww$
+\.htm$
+Debug$
+.settings$
diff --git a/tools/export/README.md b/tools/export/README.md
new file mode 100644
index 0000000..1027775
--- /dev/null
+++ b/tools/export/README.md
@@ -0,0 +1,1148 @@
+Exporter IDE/Platform Support
+-----------------------------------
+
+
+
+
Platform
+
codesourcery
+
coide
+
ds5_5
+
emblocks
+
gcc_arm
+
iar
+
kds
+
lpcxpresso
+
uvision
+
+
+
APPNEARME_MICRONFCBOARD
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARCH_BLE
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
ARCH_GPRS
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARCH_MAX
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
ARCH_PRO
+
✓
+
✓
+
✓
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
ARM_MPS2
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARM_MPS2_M0
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARM_MPS2_M0P
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARM_MPS2_M1
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARM_MPS2_M3
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARM_MPS2_M4
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
ARM_MPS2_M7
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
DELTA_DFCM_NNN40
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
DELTA_DFCM_NNN40_OTA
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
DISCO_F051R8
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
DISCO_F100RB
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
DISCO_F303VC
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
DISCO_F334C8
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
DISCO_F401VC
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
DISCO_F407VG
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
DISCO_F429ZI
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
DISCO_L053C8
+
-
+
✓
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
HRM1017
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
K20D50M
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
K22F
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
✓
+
-
+
✓
+
+
+
K64F
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
✓
+
-
+
✓
+
+
+
KL05Z
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
KL25Z
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
KL43Z
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
KL46Z
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
LPC1114
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
LPC11C24
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
✓
+
+
+
LPC11U24
+
-
+
-
+
✓
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
LPC11U24_301
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
LPC11U34_421
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
LPC11U35_401
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
✓
+
-
+
+
+
LPC11U35_501
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
✓
+
-
+
+
+
LPC11U35_Y5_MBUG
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
LPC11U37H_401
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
✓
+
✓
+
+
+
LPC11U37_501
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
LPC11U68
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
✓
+
✓
+
+
+
LPC1347
+
-
+
-
+
-
+
✓
+
-
+
✓
+
-
+
-
+
✓
+
+
+
LPC1549
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
LPC1768
+
✓
+
✓
+
✓
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
LPC2368
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
LPC4088
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
LPC4088_DM
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
LPC4330_M0
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
+
LPC4330_M4
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
✓
+
✓
+
+
+
LPC4337
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
✓
+
+
+
LPC810
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
-
+
+
+
LPC812
+
-
+
-
+
✓
+
-
+
-
+
✓
+
-
+
-
+
✓
+
+
+
LPC824
+
-
+
-
+
-
+
✓
+
-
+
✓
+
-
+
✓
+
✓
+
+
+
LPCCAPPUCCINO
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
✓
+
-
+
+
+
MTS_DRAGONFLY_F411RE
+
-
+
-
+
-
+
✓
+
-
+
✓
+
-
+
-
+
-
+
+
+
MTS_GAMBIT
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
MTS_MDOT_F405RG
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
MTS_MDOT_F411RE
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
-
+
+
+
NRF51822
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NRF51822_BOOT
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
NRF51822_OTA
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
NRF51822_Y5_MBUG
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
NRF51_DK
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NRF51_DK_BOOT
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
NRF51_DK_OTA
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
NRF51_DONGLE
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F030R8
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F070RB
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F072RB
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F091RC
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F103RB
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F302R8
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F303RE
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F334R8
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F401RE
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_F411RE
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_L053R8
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_L073RZ
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
NUCLEO_L152RE
+
-
+
✓
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
OC_MBUINO
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
RBLAB_BLENANO
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
RBLAB_NRF51822
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
RZ_A1H
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
SEEED_TINY_BLE
+
-
+
-
+
-
+
✓
+
✓
+
✓
+
-
+
-
+
✓
+
+
+
SEEED_TINY_BLE_BOOT
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
SEEED_TINY_BLE_OTA
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
SSCI824
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
STM32F3XX
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
STM32F407
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
-
+
+
+
TEENSY3_1
+
-
+
-
+
-
+
✓
+
✓
+
-
+
-
+
-
+
✓
+
+
+
UBLOX_C027
+
✓
+
✓
+
✓
+
✓
+
✓
+
✓
+
-
+
✓
+
✓
+
+
+
UBLOX_C029
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
WALLBOT_BLE
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+
XADOW_M0
+
-
+
-
+
-
+
✓
+
-
+
-
+
-
+
-
+
-
+
+
+Total IDEs: 9
+ Total platforms: 94
+ Total permutations: 288
diff --git a/tools/export/__init__.py b/tools/export/__init__.py
new file mode 100644
index 0000000..c1db817
--- /dev/null
+++ b/tools/export/__init__.py
@@ -0,0 +1,219 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os, tempfile
+from os.path import join, exists, basename
+from shutil import copytree, rmtree, copy
+import yaml
+
+from tools.utils import mkdir
+from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio
+from tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException
+from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP
+
+from project_generator_definitions.definitions import ProGenDef
+
+EXPORTERS = {
+ 'uvision': uvision4.Uvision4,
+ 'uvision5': uvision5.Uvision5,
+ 'lpcxpresso': codered.CodeRed,
+ 'gcc_arm': gccarm.GccArm,
+ 'ds5_5': ds5_5.DS5_5,
+ 'iar': iar.IAREmbeddedWorkbench,
+ 'emblocks' : emblocks.IntermediateFile,
+ 'coide' : coide.CoIDE,
+ 'kds' : kds.KDS,
+ 'simplicityv3' : simplicityv3.SimplicityV3,
+ 'atmelstudio' : atmelstudio.AtmelStudio,
+ 'sw4stm32' : sw4stm32.Sw4STM32,
+ 'e2studio' : e2studio.E2Studio,
+}
+
+ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN = """
+Sorry, the target %s is not currently supported on the %s toolchain.
+Please refer to Exporting to offline toolchains for more information.
+"""
+
+ERROR_MESSAGE_NOT_EXPORT_LIBS = """
+To export this project please import the export version of the mbed library.
+"""
+
+def online_build_url_resolver(url):
+ # TODO: Retrieve the path and name of an online library build URL
+ return {'path':'', 'name':''}
+
+
+def export(project_path, project_name, ide, target, destination='/tmp/',
+ tempdir=None, clean=True, extra_symbols=None, build_url_resolver=online_build_url_resolver):
+ # Convention: we are using capitals for toolchain and target names
+ if target is not None:
+ target = target.upper()
+
+ if tempdir is None:
+ tempdir = tempfile.mkdtemp()
+
+ use_progen = False
+ supported = True
+ report = {'success': False, 'errormsg':''}
+
+ if ide is None or ide == "zip":
+ # Simple ZIP exporter
+ try:
+ ide = "zip"
+ exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
+ exporter.scan_and_copy_resources(project_path, tempdir)
+ exporter.generate()
+ report['success'] = True
+ except OldLibrariesException, e:
+ report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
+ else:
+ if ide not in EXPORTERS:
+ report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
+ else:
+ Exporter = EXPORTERS[ide]
+ target = EXPORT_MAP.get(target, target)
+ try:
+ if Exporter.PROGEN_ACTIVE:
+ use_progen = True
+ except AttributeError:
+ pass
+ if use_progen:
+ if not ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']):
+ supported = False
+ else:
+ if target not in Exporter.TARGETS:
+ supported = False
+
+ if supported:
+ # target checked, export
+ try:
+ exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
+ exporter.scan_and_copy_resources(project_path, tempdir)
+ exporter.generate()
+ report['success'] = True
+ except OldLibrariesException, e:
+ report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
+ else:
+ report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
+
+ zip_path = None
+ if report['success']:
+ # readme.txt to contain more exported data
+ exporter_yaml = {
+ 'project_generator': {
+ 'active' : False,
+ }
+ }
+ if use_progen:
+ try:
+ import pkg_resources
+ version = pkg_resources.get_distribution('project_generator').version
+ exporter_yaml['project_generator']['version'] = version
+ exporter_yaml['project_generator']['active'] = True;
+ exporter_yaml['project_generator_definitions'] = {}
+ version = pkg_resources.get_distribution('project_generator_definitions').version
+ exporter_yaml['project_generator_definitions']['version'] = version
+ except ImportError:
+ pass
+ with open(os.path.join(tempdir, 'exporter.yaml'), 'w') as outfile:
+ yaml.dump(exporter_yaml, outfile, default_flow_style=False)
+ # add readme file to every offline export.
+ open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write(''% (ide))
+ # copy .hgignore file to exported direcotry as well.
+ copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'),tempdir)
+ zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
+
+ return zip_path, report
+
+
+###############################################################################
+# Generate project folders following the online conventions
+###############################################################################
+def copy_tree(src, dst, clean=True):
+ if exists(dst):
+ if clean:
+ rmtree(dst)
+ else:
+ return
+
+ copytree(src, dst)
+
+
+def setup_user_prj(user_dir, prj_path, lib_paths=None):
+ """
+ Setup a project with the same directory structure of the mbed online IDE
+ """
+ mkdir(user_dir)
+
+ # Project Path
+ copy_tree(prj_path, join(user_dir, "src"))
+
+ # Project Libraries
+ user_lib = join(user_dir, "lib")
+ mkdir(user_lib)
+
+ if lib_paths is not None:
+ for lib_path in lib_paths:
+ copy_tree(lib_path, join(user_lib, basename(lib_path)))
+
+def mcu_ide_matrix(verbose_html=False, platform_filter=None):
+ """ Shows target map using prettytable """
+ supported_ides = []
+ for key in EXPORTERS.iterkeys():
+ supported_ides.append(key)
+ supported_ides.sort()
+ from prettytable import PrettyTable, ALL # Only use it in this function so building works without extra modules
+
+ # All tests status table print
+ columns = ["Platform"] + supported_ides
+ pt = PrettyTable(columns)
+ # Align table
+ for col in columns:
+ pt.align[col] = "c"
+ pt.align["Platform"] = "l"
+
+ perm_counter = 0
+ target_counter = 0
+ for target in sorted(TARGET_NAMES):
+ target_counter += 1
+
+ row = [target] # First column is platform name
+ for ide in supported_ides:
+ text = "-"
+ if target in EXPORTERS[ide].TARGETS:
+ if verbose_html:
+ text = "✓"
+ else:
+ text = "x"
+ perm_counter += 1
+ row.append(text)
+ pt.add_row(row)
+
+ pt.border = True
+ pt.vrules = ALL
+ pt.hrules = ALL
+ # creates a html page suitable for a browser
+ # result = pt.get_html_string(format=True) if verbose_html else pt.get_string()
+ # creates a html page in a shorter format suitable for readme.md
+ result = pt.get_html_string() if verbose_html else pt.get_string()
+ result += "\n"
+ result += "Total IDEs: %d\n"% (len(supported_ides))
+ if verbose_html: result += " "
+ result += "Total platforms: %d\n"% (target_counter)
+ if verbose_html: result += " "
+ result += "Total permutations: %d"% (perm_counter)
+ if verbose_html: result = result.replace("&", "&")
+ return result
diff --git a/tools/export/atmelstudio.py b/tools/export/atmelstudio.py
new file mode 100644
index 0000000..7b69d20
--- /dev/null
+++ b/tools/export/atmelstudio.py
@@ -0,0 +1,76 @@
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import uuid
+from exporters import Exporter
+from os.path import splitext, basename, dirname
+
+
+class AtmelStudio(Exporter):
+ NAME = 'AtmelStudio'
+ TOOLCHAIN = 'GCC_ARM'
+
+ TARGETS = [
+ 'SAMD21J18A',
+ 'SAMR21G18A',
+ 'SAMD21G18A',
+ 'SAML21J18A',
+ 'SAMG55J19',
+ ]
+
+ DOT_IN_RELATIVE_PATH = True
+
+ def generate(self):
+
+ source_files = []
+ dirs = []
+ for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
+ r = getattr(self.resources, r_type)
+ if r:
+ for source in r:
+ source_files.append(source[2:])
+ dirs.append(dirname(source[2:]))
+
+ source_folders = []
+ for e in dirs:
+ if e and e not in source_folders:
+ source_folders.append(e)
+
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ solution_uuid = '{' + str(uuid.uuid4()) + '}'
+ project_uuid = '{' + str(uuid.uuid4()) + '}'
+
+ ctx = {
+ 'target': self.target,
+ 'name': self.program_name,
+ 'source_files': source_files,
+ 'source_folders': source_folders,
+ 'object_files': self.resources.objects,
+ 'include_paths': self.resources.inc_dirs,
+ 'library_paths': self.resources.lib_dirs,
+ 'linker_script': self.resources.linker_script,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols(),
+ 'solution_uuid': solution_uuid.upper(),
+ 'project_uuid': project_uuid.upper()
+ }
+ target = self.target.lower()
+ self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.program_name)
+ self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.program_name)
diff --git a/tools/export/atmelstudio6_2.atsln.tmpl b/tools/export/atmelstudio6_2.atsln.tmpl
new file mode 100644
index 0000000..3c8ea50
--- /dev/null
+++ b/tools/export/atmelstudio6_2.atsln.tmpl
@@ -0,0 +1,20 @@
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Atmel Studio Solution File, Format Version 11.00
+Project("{{solution_uuid}}") = "{{name}}", "{{name}}.cppproj", "{{project_uuid}}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|ARM = Debug|ARM
+ Release|ARM = Release|ARM
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {{project_uuid}}.Debug|ARM.ActiveCfg = Debug|ARM
+ {{project_uuid}}.Debug|ARM.Build.0 = Debug|ARM
+ {{project_uuid}}.Release|ARM.ActiveCfg = Release|ARM
+ {{project_uuid}}.Release|ARM.Build.0 = Release|ARM
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/tools/export/atmelstudio6_2.cppproj.tmpl b/tools/export/atmelstudio6_2.cppproj.tmpl
new file mode 100644
index 0000000..98696b3
--- /dev/null
+++ b/tools/export/atmelstudio6_2.cppproj.tmpl
@@ -0,0 +1,176 @@
+
+
+
+ 2.0
+ 6.2
+ com.Atmel.ARMGCC.CPP
+ {{project_uuid}}
+ AT{{target}}
+ none
+ Executable
+ CPP
+ $(MSBuildProjectName)
+ .elf
+ $(MSBuildProjectDirectory)\$(Configuration)
+ AtmelStudio6_2
+ AtmelStudio6_2
+ AtmelStudio6_2
+ Native
+ true
+ false
+ true
+ true
+
+
+ true
+
+ 2
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ True
+ True
+ True
+ True
+ True
+
+
+ NDEBUG
+ {% for s in symbols %}{{s}}
+ {% endfor %}
+
+
+
+
+ {% for i in include_paths %}../{{i}}
+ {% endfor %}
+
+
+ Optimize for size (-Os)
+ True
+ True
+ -std=gnu99 -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
+
+
+ NDEBUG
+ {% for s in symbols %}{{s}}
+ {% endfor %}
+
+
+
+
+ {% for i in include_paths %}../{{i}}
+ {% endfor %}
+
+
+ Optimize for size (-Os)
+ True
+ True
+ -std=gnu++98 -fno-rtti -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
+
+
+ libm
+
+
+
+
+
+
+ True
+ {% for p in library_paths %}-L../{{p}} {% endfor %} {% for f in object_files %}../{{f}} {% endfor %} {% for lib in libraries %}-l{{lib}} {% endfor %} -T../{{linker_script}} -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,--cref -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
+
+
+ {% for i in include_paths %}../{{i}}
+ {% endfor %}
+
+
+
+
+
+
+
+
+ True
+ True
+ True
+ True
+ True
+
+
+ DEBUG
+ {% for s in symbols %}{{s}}
+ {% endfor %}
+
+
+
+
+ {% for i in include_paths %}../{{i}}
+ {% endfor %}
+
+
+ Optimize (-O1)
+ True
+ Maximum (-g3)
+ True
+ -std=gnu99 -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
+
+
+ DEBUG
+ {% for s in symbols %}{{s}}
+ {% endfor %}
+
+
+
+
+ {% for i in include_paths %}../{{i}}
+ {% endfor %}
+
+
+ Optimize (-O1)
+ True
+ Maximum (-g3)
+ True
+ -std=gnu++98 -fno-rtti -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
+
+
+ libm
+
+
+
+
+
+
+ True
+ {% for p in library_paths %}-L../{{p}} {% endfor %} {% for f in object_files %}../{{f}} {% endfor %} {% for lib in libraries %}-l{{lib}} {% endfor %} -T../{{linker_script}} -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,--cref -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
+ Default (-g)
+
+
+ {% for i in include_paths %}../{{i}}
+ {% endfor %}
+
+
+ Default (-Wa,-g)
+
+
+
+
+ {% for f in source_folders %}
+ {% endfor %}
+ {% for s in source_files %}
+ compile
+
+ {% endfor %}
+
+
+
\ No newline at end of file
diff --git a/tools/export/codered.py b/tools/export/codered.py
new file mode 100644
index 0000000..c502096
--- /dev/null
+++ b/tools/export/codered.py
@@ -0,0 +1,57 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename
+
+
+class CodeRed(Exporter):
+ NAME = 'CodeRed'
+ TOOLCHAIN = 'GCC_CR'
+
+ TARGETS = [
+ 'LPC1768',
+ 'LPC4088',
+ 'LPC4088_DM',
+ 'LPC4330_M4',
+ 'LPC1114',
+ 'LPC11U35_401',
+ 'LPC11U35_501',
+ 'UBLOX_C027',
+ 'ARCH_PRO',
+ 'LPC1549',
+ 'LPC11U68',
+ 'LPCCAPPUCCINO',
+ 'LPC824',
+ 'LPC11U37H_401',
+ ]
+
+ def generate(self):
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ ctx = {
+ 'name': self.program_name,
+ 'include_paths': self.resources.inc_dirs,
+ 'linker_script': self.resources.linker_script,
+ 'object_files': self.resources.objects,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols()
+ }
+ self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project')
+ self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
diff --git a/tools/export/codered_arch_pro_cproject.tmpl b/tools/export/codered_arch_pro_cproject.tmpl
new file mode 100644
index 0000000..b39438a
--- /dev/null
+++ b/tools/export/codered_arch_pro_cproject.tmpl
@@ -0,0 +1,79 @@
+{% extends "codered_cproject_cortexm3_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc176x.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC1768" property_count="5" version="1"/>
+<infoList vendor="NXP">
+<info chip="LPC1768" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC1768" package="lpc17_lqfp100.xml">
+<chip>
+<name>LPC1768</name>
+<family>LPC17xx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
+<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
+<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
+<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
+<peripheralInstance derived_from="LPC17_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
+<peripheralInstance derived_from="LPC17_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
+<peripheralInstance derived_from="LPC17_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
+<peripheralInstance derived_from="LPC17_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
+<peripheralInstance derived_from="LPC17_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
+<peripheralInstance derived_from="LPC17xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
+<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
+<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
+<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
+<peripheralInstance derived_from="LPC17_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
+<peripheralInstance derived_from="LPC17_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
+<peripheralInstance derived_from="LPC17_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
+<peripheralInstance derived_from="LPC17_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
+<peripheralInstance derived_from="LPC17_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
+<peripheralInstance derived_from="LPC17_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
+<peripheralInstance derived_from="LPC17_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
+<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
+<peripheralInstance derived_from="LPC17_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
+<peripheralInstance derived_from="LPC17_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
+<peripheralInstance derived_from="LPC17_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC17_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC17_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
+<peripheralInstance derived_from="LPC1x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
+<peripheralInstance derived_from="LPC17_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
+<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
+<peripheralInstance derived_from="LPC17_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
+<peripheralInstance derived_from="LPC17_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
+<peripheralInstance derived_from="LPC17_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
+<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
+<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
+<peripheralInstance derived_from="LPC17_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m3">Cortex-M3</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_arch_pro_project.tmpl b/tools/export/codered_arch_pro_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_arch_pro_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_cproject_common.tmpl b/tools/export/codered_cproject_common.tmpl
new file mode 100644
index 0000000..b71f131
--- /dev/null
+++ b/tools/export/codered_cproject_common.tmpl
@@ -0,0 +1,1850 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% block cpu_config %}{% endblock %}
+
+
+
diff --git a/tools/export/codered_cproject_cortexm0_common.tmpl b/tools/export/codered_cproject_cortexm0_common.tmpl
new file mode 100644
index 0000000..895485f
--- /dev/null
+++ b/tools/export/codered_cproject_cortexm0_common.tmpl
@@ -0,0 +1,3 @@
+{% extends "codered_cproject_common.tmpl" %}
+
+{% block core %}cm0{% endblock %}
diff --git a/tools/export/codered_cproject_cortexm3_common.tmpl b/tools/export/codered_cproject_cortexm3_common.tmpl
new file mode 100644
index 0000000..894afaa
--- /dev/null
+++ b/tools/export/codered_cproject_cortexm3_common.tmpl
@@ -0,0 +1,3 @@
+{% extends "codered_cproject_common.tmpl" %}
+
+{% block core %}cm3{% endblock %}
diff --git a/tools/export/codered_lpc1114_cproject.tmpl b/tools/export/codered_lpc1114_cproject.tmpl
new file mode 100644
index 0000000..ae49cd5
--- /dev/null
+++ b/tools/export/codered_lpc1114_cproject.tmpl
@@ -0,0 +1,48 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC11_12_13_32K_4K.cfx" property_3="NXP" property_4="LPC1114FN/102" property_count="5" version="60100"/>
+<infoList vendor="NXP">
+<info chip="LPC1114FN/102" flash_driver="LPC11_12_13_32K_4K.cfx" match_id="0x0A40902B,0x1A40902B" name="LPC1114FN/102" stub="crt_emu_lpc11_13_nxp">
+<chip>
+<name>LPC1114FN/102</name>
+<family>LPC11xx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash32" location="0x0" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="RamLoc4" location="0x10000000" size="0x1000"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
+<peripheralInstance derived_from="UART" determined="infoFile" id="UART" location="0x40008000"/>
+<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
+<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
+<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
+<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
+<peripheralInstance derived_from="SPI0" determined="infoFile" id="SPI0" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="GPIO0" determined="infoFile" id="GPIO0" location="0x50000000"/>
+<peripheralInstance derived_from="GPIO1" determined="infoFile" id="GPIO1" location="0x50010000"/>
+<peripheralInstance derived_from="GPIO2" determined="infoFile" id="GPIO2" location="0x50020000"/>
+<peripheralInstance derived_from="GPIO3" determined="infoFile" id="GPIO3" location="0x50030000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC11xx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc1114_project.tmpl b/tools/export/codered_lpc1114_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc1114_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc11u35_401_cproject.tmpl b/tools/export/codered_lpc11u35_401_cproject.tmpl
new file mode 100644
index 0000000..e874ee6
--- /dev/null
+++ b/tools/export/codered_lpc11u35_401_cproject.tmpl
@@ -0,0 +1,51 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U35/401" property_count="5" version="70002"/>
+<infoList vendor="NXP">
+<info chip="LPC11U35/401" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U35/401" stub="crt_emu_lpc11_13_nxp">
+<chip>
+<name>LPC11U35/401</name>
+<family>LPC11Uxx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
+<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
+<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
+<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
+<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
+<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
+<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
+<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
+<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
+<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
+<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc11u35_401_project.tmpl b/tools/export/codered_lpc11u35_401_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc11u35_401_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc11u35_501_cproject.tmpl b/tools/export/codered_lpc11u35_501_cproject.tmpl
new file mode 100644
index 0000000..622844e
--- /dev/null
+++ b/tools/export/codered_lpc11u35_501_cproject.tmpl
@@ -0,0 +1,51 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U35/501" property_count="5" version="70002"/>
+<infoList vendor="NXP">
+<info chip="LPC11U35/501" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U35/501" stub="crt_emu_lpc11_13_nxp">
+<chip>
+<name>LPC11U35/501</name>
+<family>LPC11Uxx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
+<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
+<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
+<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
+<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
+<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
+<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
+<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
+<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
+<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
+<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc11u35_501_project.tmpl b/tools/export/codered_lpc11u35_501_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc11u35_501_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc11u37h_401_cproject.tmpl b/tools/export/codered_lpc11u37h_401_cproject.tmpl
new file mode 100644
index 0000000..4b9fd6e
--- /dev/null
+++ b/tools/export/codered_lpc11u37h_401_cproject.tmpl
@@ -0,0 +1,51 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U37H/401" property_count="5" version="70002"/>
+<infoList vendor="NXP">
+<info chip="LPC11U37H/401" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U37H/401" stub="crt_emu_lpc11_13_nxp">
+<chip>
+<name>LPC11U37H/401</name>
+<family>LPC11Uxx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
+<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
+<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
+<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
+<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
+<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
+<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
+<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
+<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
+<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
+<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc11u37h_401_project.tmpl b/tools/export/codered_lpc11u37h_401_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc11u37h_401_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc11u68_cproject.tmpl b/tools/export/codered_lpc11u68_cproject.tmpl
new file mode 100644
index 0000000..0af4174
--- /dev/null
+++ b/tools/export/codered_lpc11u68_cproject.tmpl
@@ -0,0 +1,60 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}startup_LPC11U68.cpp{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC11U6x_256K.cfx" property_3="NXP" property_4="LPC11U68" property_count="5" version="70200"/>
+<infoList vendor="NXP">
<info chip="LPC11U68" flash_driver="LPC11U6x_256K.cfx" match_id="0x0" name="LPC11U68" stub="crt_emu_cm3_gen">
<chip>
<name>
LPC11U68</name>
+<family>
LPC11U6x</family>
+<vendor>
NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash256" location="0x0" size="0x40000"/>
+<memoryInstance derived_from="RAM" id="Ram0_32" location="0x10000000" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="Ram1_2" location="0x20000000" size="0x800"/>
+<memoryInstance derived_from="RAM" id="Ram2USB_2" location="0x20004000" size="0x800"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="I2C0" determined="infoFile" id="I2C0" location="0x40000000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
+<peripheralInstance derived_from="USART0" determined="infoFile" id="USART0" location="0x40008000"/>
+<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
+<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
+<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
+<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="I2C1" determined="infoFile" id="I2C1" location="0x40020000"/>
+<peripheralInstance derived_from="RTC" determined="infoFile" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="DMATRIGMUX" determined="infoFile" id="DMATRIGMUX" location="0x40028000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
+<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="USART4" determined="infoFile" id="USART4" location="0x4004c000"/>
+<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
+<peripheralInstance derived_from="GINT0" determined="infoFile" id="GINT0" location="0x4005c000"/>
+<peripheralInstance derived_from="GINT1" determined="infoFile" id="GINT1" location="0x40060000"/>
+<peripheralInstance derived_from="USART1" determined="infoFile" id="USART1" location="0x4006c000"/>
+<peripheralInstance derived_from="USART2" determined="infoFile" id="USART2" location="0x40070000"/>
+<peripheralInstance derived_from="USART3" determined="infoFile" id="USART3" location="0x40074000"/>
+<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
+<peripheralInstance derived_from="CRC" determined="infoFile" id="CRC" location="0x50000000"/>
+<peripheralInstance derived_from="DMA" determined="infoFile" id="DMA" location="0x50004000"/>
+<peripheralInstance derived_from="SCT0" determined="infoFile" id="SCT0" location="0x5000c000"/>
+<peripheralInstance derived_from="SCT1" determined="infoFile" id="SCT1" location="0x5000e000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0xa0000000"/>
+<peripheralInstance derived_from="PINT" determined="infoFile" id="PINT" location="0xa0004000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc11u68_project.tmpl b/tools/export/codered_lpc11u68_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc11u68_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc1549_cproject.tmpl b/tools/export/codered_lpc1549_cproject.tmpl
new file mode 100644
index 0000000..73529ef
--- /dev/null
+++ b/tools/export/codered_lpc1549_cproject.tmpl
@@ -0,0 +1,69 @@
+{% extends "codered_cproject_cortexm3_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc15xx.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC15xx_256K.cfx" property_3="NXP" property_4="LPC1549" property_count="5" version="70200"/>
+<infoList vendor="NXP">
+<info chip="LPC1549" connectscript="LPC15RunBootRomConnect.scp" flash_driver="LPC15xx_256K.cfx" match_id="0x0" name="LPC1549" resetscript="LPC15RunBootRomReset.scp" stub="crt_emu_cm3_gen">
+<chip>
+<name>LPC1549</name>
+<family>LPC15xx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash256" location="0x0" size="0x40000"/>
+<memoryInstance derived_from="RAM" id="Ram0_16" location="0x2000000" size="0x4000"/>
+<memoryInstance derived_from="RAM" id="Ram1_16" location="0x2004000" size="0x4000"/>
+<memoryInstance derived_from="RAM" id="Ram2_4" location="0x2008000" size="0x1000"/>
+<peripheralInstance derived_from="LPC15_MPU" determined="infoFile" id="MPU" location="0xe000ed90"/>
+<peripheralInstance derived_from="LPC15_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="LPC15_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="LPC15_ITM" determined="infoFile" id="ITM" location="0xe0000000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x1c000000"/>
+<peripheralInstance derived_from="DMA" determined="infoFile" id="DMA" location="0x1c004000"/>
+<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x1c00c000"/>
+<peripheralInstance derived_from="CRC" determined="infoFile" id="CRC" location="0x1c010000"/>
+<peripheralInstance derived_from="SCT0" determined="infoFile" id="SCT0" location="0x1c018000"/>
+<peripheralInstance derived_from="SCT1" determined="infoFile" id="SCT1" location="0x1c01c000"/>
+<peripheralInstance derived_from="SCT2" determined="infoFile" id="SCT2" location="0x1c020000"/>
+<peripheralInstance derived_from="SCT3" determined="infoFile" id="SCT3" location="0x1c024000"/>
+<peripheralInstance derived_from="ADC0" determined="infoFile" id="ADC0" location="0x40000000"/>
+<peripheralInstance derived_from="DAC" determined="infoFile" id="DAC" location="0x40004000"/>
+<peripheralInstance derived_from="ACMP" determined="infoFile" id="ACMP" location="0x40008000"/>
+<peripheralInstance derived_from="INMUX" determined="infoFile" id="INMUX" location="0x40014000"/>
+<peripheralInstance derived_from="RTC" determined="infoFile" id="RTC" location="0x40028000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x4002c000"/>
+<peripheralInstance derived_from="SWM" determined="infoFile" id="SWM" location="0x40038000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x4003c000"/>
+<peripheralInstance derived_from="USART0" determined="infoFile" id="USART0" location="0x40040000"/>
+<peripheralInstance derived_from="USART1" determined="infoFile" id="USART1" location="0x40044000"/>
+<peripheralInstance derived_from="SPI0" determined="infoFile" id="SPI0" location="0x40048000"/>
+<peripheralInstance derived_from="SPI1" determined="infoFile" id="SPI1" location="0x4004c000"/>
+<peripheralInstance derived_from="I2C0" determined="infoFile" id="I2C0" location="0x40050000"/>
+<peripheralInstance derived_from="QEI" determined="infoFile" id="QEI" location="0x40058000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40074000"/>
+<peripheralInstance derived_from="ADC1" determined="infoFile" id="ADC1" location="0x40080000"/>
+<peripheralInstance derived_from="MRT" determined="infoFile" id="MRT" location="0x400a0000"/>
+<peripheralInstance derived_from="PINT" determined="infoFile" id="PINT" location="0x400a4000"/>
+<peripheralInstance derived_from="GINT0" determined="infoFile" id="GINT0" location="0x400a8000"/>
+<peripheralInstance derived_from="GINT1" determined="infoFile" id="GINT1" location="0x400ac000"/>
+<peripheralInstance derived_from="RIT" determined="infoFile" id="RIT" location="0x400b4000"/>
+<peripheralInstance derived_from="SCTIPU" determined="infoFile" id="SCTIPU" location="0x400b8000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x400bc000"/>
+<peripheralInstance derived_from="USART2" determined="infoFile" id="USART2" location="0x400c0000"/>
+<peripheralInstance derived_from="C-CAN0" determined="infoFile" id="C-CAN0" location="0x400f0000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x400f8000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m3">Cortex-M3</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc1549_project.tmpl b/tools/export/codered_lpc1549_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc1549_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc1768_cproject.tmpl b/tools/export/codered_lpc1768_cproject.tmpl
new file mode 100644
index 0000000..b39438a
--- /dev/null
+++ b/tools/export/codered_lpc1768_cproject.tmpl
@@ -0,0 +1,79 @@
+{% extends "codered_cproject_cortexm3_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc176x.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC1768" property_count="5" version="1"/>
+<infoList vendor="NXP">
+<info chip="LPC1768" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC1768" package="lpc17_lqfp100.xml">
+<chip>
+<name>LPC1768</name>
+<family>LPC17xx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
+<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
+<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
+<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
+<peripheralInstance derived_from="LPC17_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
+<peripheralInstance derived_from="LPC17_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
+<peripheralInstance derived_from="LPC17_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
+<peripheralInstance derived_from="LPC17_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
+<peripheralInstance derived_from="LPC17_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
+<peripheralInstance derived_from="LPC17xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
+<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
+<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
+<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
+<peripheralInstance derived_from="LPC17_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
+<peripheralInstance derived_from="LPC17_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
+<peripheralInstance derived_from="LPC17_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
+<peripheralInstance derived_from="LPC17_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
+<peripheralInstance derived_from="LPC17_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
+<peripheralInstance derived_from="LPC17_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
+<peripheralInstance derived_from="LPC17_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
+<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
+<peripheralInstance derived_from="LPC17_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
+<peripheralInstance derived_from="LPC17_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
+<peripheralInstance derived_from="LPC17_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC17_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC17_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
+<peripheralInstance derived_from="LPC1x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
+<peripheralInstance derived_from="LPC17_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
+<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
+<peripheralInstance derived_from="LPC17_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
+<peripheralInstance derived_from="LPC17_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
+<peripheralInstance derived_from="LPC17_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
+<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
+<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
+<peripheralInstance derived_from="LPC17_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m3">Cortex-M3</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc1768_project.tmpl b/tools/export/codered_lpc1768_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc1768_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc4088_cproject.tmpl b/tools/export/codered_lpc4088_cproject.tmpl
new file mode 100644
index 0000000..35ffa7c
--- /dev/null
+++ b/tools/export/codered_lpc4088_cproject.tmpl
@@ -0,0 +1,1922 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC177x_8x_407x_8x_512.cfx" property_3="NXP" property_4="LPC4088" property_count="5" version="1"/>
+<infoList vendor="NXP"><info chip="LPC4088" flash_driver="LPC177x_8x_407x_8x_512.cfx" match_id="0x481D3F47" name="LPC4088" stub="crt_emu_cm3_nxp"><chip><name>LPC4088</name>
+<family>LPC407x_8x</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash512" location="0x0" size="0x80000"/>
+<memoryInstance derived_from="RAM" id="RamLoc64" location="0x10000000" size="0x10000"/>
+<memoryInstance derived_from="RAM" id="RamPeriph32" location="0x20000000" size="0x8000"/>
+<prog_flash blocksz="0x1000" location="0x0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
+<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
+<peripheralInstance derived_from="V7M_MPU" id="MPU" location="0xe000ed90"/>
+<peripheralInstance derived_from="V7M_NVIC" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V7M_DCR" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="V7M_ITM" id="ITM" location="0xe0000000"/>
+<peripheralInstance derived_from="FLASHCTRL" id="FLASHCTRL" location="0x200000"/>
+<peripheralInstance derived_from="GPDMA" id="GPDMA" location="0x20080000"/>
+<peripheralInstance derived_from="ETHERNET" id="ETHERNET" location="0x20084000"/>
+<peripheralInstance derived_from="LCD" id="LCD" location="0x20088000"/>
+<peripheralInstance derived_from="USB" id="USB" location="0x2008c000"/>
+<peripheralInstance derived_from="CRC" id="CRC" location="0x20090000"/>
+<peripheralInstance derived_from="GPIO" id="GPIO" location="0x20098000"/>
+<peripheralInstance derived_from="EMC" id="EMC" location="0x2009c000"/>
+<peripheralInstance derived_from="WWDT" id="WWDT" location="0x40000000"/>
+<peripheralInstance derived_from="TIMER0" id="TIMER0" location="0x40004000"/>
+<peripheralInstance derived_from="TIMER1" id="TIMER1" location="0x40008000"/>
+<peripheralInstance derived_from="UART0" id="UART0" location="0x4000c000"/>
+<peripheralInstance derived_from="UART1" id="UART1" location="0x40010000"/>
+<peripheralInstance derived_from="PWM0" id="PWM0" location="0x40014000"/>
+<peripheralInstance derived_from="PWM1" id="PWM1" location="0x40018000"/>
+<peripheralInstance derived_from="I2C0" id="I2C0" location="0x4001c000"/>
+<peripheralInstance derived_from="COMPARATOR" id="COMPARATOR" location="0x40020000"/>
+<peripheralInstance derived_from="RTC" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="GPIOINT" id="GPIOINT" location="0x40028080"/>
+<peripheralInstance derived_from="IOCON" id="IOCON" location="0x4002c000"/>
+<peripheralInstance derived_from="SSP1" id="SSP1" location="0x40030000"/>
+<peripheralInstance derived_from="ADC" id="ADC" location="0x40034000"/>
+<peripheralInstance derived_from="CANAFRAM" id="CANAFRAM" location="0x40038000"/>
+<peripheralInstance derived_from="CANAF" id="CANAF" location="0x4003c000"/>
+<peripheralInstance derived_from="CCAN" id="CCAN" location="0x40040000"/>
+<peripheralInstance derived_from="CAN1" id="CAN1" location="0x40044000"/>
+<peripheralInstance derived_from="CAN2" id="CAN2" location="0x40048000"/>
+<peripheralInstance derived_from="I2C1" id="I2C1" location="0x4005c000"/>
+<peripheralInstance derived_from="SSP0" id="SSP0" location="0x40088000"/>
+<peripheralInstance derived_from="DAC" id="DAC" location="0x4008c000"/>
+<peripheralInstance derived_from="TIMER2" id="TIMER2" location="0x40090000"/>
+<peripheralInstance derived_from="TIMER3" id="TIMER3" location="0x40094000"/>
+<peripheralInstance derived_from="UART2" id="UART2" location="0x40098000"/>
+<peripheralInstance derived_from="UART3" id="UART3" location="0x4009c000"/>
+<peripheralInstance derived_from="I2C2" id="I2C2" location="0x400a0000"/>
+<peripheralInstance derived_from="UART4" id="UART4" location="0x400a4000"/>
+<peripheralInstance derived_from="I2S" id="I2S" location="0x400a8000"/>
+<peripheralInstance derived_from="SSP2" id="SSP2" location="0x400ac000"/>
+<peripheralInstance derived_from="MCPWM" id="MCPWM" location="0x400b8000"/>
+<peripheralInstance derived_from="QEI" id="QEI" location="0x400bc000"/>
+<peripheralInstance derived_from="SDMMC" id="SDMMC" location="0x400c0000"/>
+<peripheralInstance derived_from="SYSCON" id="SYSCON" location="0x400fc000"/>
+</chip>
+<processor><name gcc_name="cortex-m4">Cortex-M4</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpc407x_8x_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>
+
+
+
diff --git a/tools/export/codered_lpc4088_dm_cproject.tmpl b/tools/export/codered_lpc4088_dm_cproject.tmpl
new file mode 100644
index 0000000..35ffa7c
--- /dev/null
+++ b/tools/export/codered_lpc4088_dm_cproject.tmpl
@@ -0,0 +1,1922 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC177x_8x_407x_8x_512.cfx" property_3="NXP" property_4="LPC4088" property_count="5" version="1"/>
+<infoList vendor="NXP"><info chip="LPC4088" flash_driver="LPC177x_8x_407x_8x_512.cfx" match_id="0x481D3F47" name="LPC4088" stub="crt_emu_cm3_nxp"><chip><name>LPC4088</name>
+<family>LPC407x_8x</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash512" location="0x0" size="0x80000"/>
+<memoryInstance derived_from="RAM" id="RamLoc64" location="0x10000000" size="0x10000"/>
+<memoryInstance derived_from="RAM" id="RamPeriph32" location="0x20000000" size="0x8000"/>
+<prog_flash blocksz="0x1000" location="0x0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
+<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
+<peripheralInstance derived_from="V7M_MPU" id="MPU" location="0xe000ed90"/>
+<peripheralInstance derived_from="V7M_NVIC" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V7M_DCR" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="V7M_ITM" id="ITM" location="0xe0000000"/>
+<peripheralInstance derived_from="FLASHCTRL" id="FLASHCTRL" location="0x200000"/>
+<peripheralInstance derived_from="GPDMA" id="GPDMA" location="0x20080000"/>
+<peripheralInstance derived_from="ETHERNET" id="ETHERNET" location="0x20084000"/>
+<peripheralInstance derived_from="LCD" id="LCD" location="0x20088000"/>
+<peripheralInstance derived_from="USB" id="USB" location="0x2008c000"/>
+<peripheralInstance derived_from="CRC" id="CRC" location="0x20090000"/>
+<peripheralInstance derived_from="GPIO" id="GPIO" location="0x20098000"/>
+<peripheralInstance derived_from="EMC" id="EMC" location="0x2009c000"/>
+<peripheralInstance derived_from="WWDT" id="WWDT" location="0x40000000"/>
+<peripheralInstance derived_from="TIMER0" id="TIMER0" location="0x40004000"/>
+<peripheralInstance derived_from="TIMER1" id="TIMER1" location="0x40008000"/>
+<peripheralInstance derived_from="UART0" id="UART0" location="0x4000c000"/>
+<peripheralInstance derived_from="UART1" id="UART1" location="0x40010000"/>
+<peripheralInstance derived_from="PWM0" id="PWM0" location="0x40014000"/>
+<peripheralInstance derived_from="PWM1" id="PWM1" location="0x40018000"/>
+<peripheralInstance derived_from="I2C0" id="I2C0" location="0x4001c000"/>
+<peripheralInstance derived_from="COMPARATOR" id="COMPARATOR" location="0x40020000"/>
+<peripheralInstance derived_from="RTC" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="GPIOINT" id="GPIOINT" location="0x40028080"/>
+<peripheralInstance derived_from="IOCON" id="IOCON" location="0x4002c000"/>
+<peripheralInstance derived_from="SSP1" id="SSP1" location="0x40030000"/>
+<peripheralInstance derived_from="ADC" id="ADC" location="0x40034000"/>
+<peripheralInstance derived_from="CANAFRAM" id="CANAFRAM" location="0x40038000"/>
+<peripheralInstance derived_from="CANAF" id="CANAF" location="0x4003c000"/>
+<peripheralInstance derived_from="CCAN" id="CCAN" location="0x40040000"/>
+<peripheralInstance derived_from="CAN1" id="CAN1" location="0x40044000"/>
+<peripheralInstance derived_from="CAN2" id="CAN2" location="0x40048000"/>
+<peripheralInstance derived_from="I2C1" id="I2C1" location="0x4005c000"/>
+<peripheralInstance derived_from="SSP0" id="SSP0" location="0x40088000"/>
+<peripheralInstance derived_from="DAC" id="DAC" location="0x4008c000"/>
+<peripheralInstance derived_from="TIMER2" id="TIMER2" location="0x40090000"/>
+<peripheralInstance derived_from="TIMER3" id="TIMER3" location="0x40094000"/>
+<peripheralInstance derived_from="UART2" id="UART2" location="0x40098000"/>
+<peripheralInstance derived_from="UART3" id="UART3" location="0x4009c000"/>
+<peripheralInstance derived_from="I2C2" id="I2C2" location="0x400a0000"/>
+<peripheralInstance derived_from="UART4" id="UART4" location="0x400a4000"/>
+<peripheralInstance derived_from="I2S" id="I2S" location="0x400a8000"/>
+<peripheralInstance derived_from="SSP2" id="SSP2" location="0x400ac000"/>
+<peripheralInstance derived_from="MCPWM" id="MCPWM" location="0x400b8000"/>
+<peripheralInstance derived_from="QEI" id="QEI" location="0x400bc000"/>
+<peripheralInstance derived_from="SDMMC" id="SDMMC" location="0x400c0000"/>
+<peripheralInstance derived_from="SYSCON" id="SYSCON" location="0x400fc000"/>
+</chip>
+<processor><name gcc_name="cortex-m4">Cortex-M4</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpc407x_8x_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>
+
+
+
diff --git a/tools/export/codered_lpc4088_dm_project.tmpl b/tools/export/codered_lpc4088_dm_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc4088_dm_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc4088_project.tmpl b/tools/export/codered_lpc4088_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc4088_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc4330_m4_cproject.tmpl b/tools/export/codered_lpc4330_m4_cproject.tmpl
new file mode 100644
index 0000000..bd1f896
--- /dev/null
+++ b/tools/export/codered_lpc4330_m4_cproject.tmpl
@@ -0,0 +1,1924 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC4330" property_count="5" version="1"/>
+<infoList vendor="NXP">
+<info chip="LPC4330" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC4330" package="LPC43_lqfp100.xml">
+<chip>
+<name>LPC4330</name>
+<family>LPC43xx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
+<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
+<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
+<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
+<peripheralInstance derived_from="LPC43_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
+<peripheralInstance derived_from="LPC43_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
+<peripheralInstance derived_from="LPC43_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
+<peripheralInstance derived_from="LPC43_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
+<peripheralInstance derived_from="LPC43_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
+<peripheralInstance derived_from="LPC43xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
+<peripheralInstance derived_from="LPC43xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
+<peripheralInstance derived_from="LPC43xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
+<peripheralInstance derived_from="LPC43xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
+<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
+<peripheralInstance derived_from="LPC43_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
+<peripheralInstance derived_from="LPC43_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
+<peripheralInstance derived_from="LPC43_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
+<peripheralInstance derived_from="LPC43_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
+<peripheralInstance derived_from="LPC43_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
+<peripheralInstance derived_from="LPC43_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
+<peripheralInstance derived_from="LPC43_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
+<peripheralInstance derived_from="LPC43_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
+<peripheralInstance derived_from="LPC43_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
+<peripheralInstance derived_from="LPC43_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
+<peripheralInstance derived_from="LPC43_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
+<peripheralInstance derived_from="LPC43_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
+<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
+<peripheralInstance derived_from="LPC43_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
+<peripheralInstance derived_from="LPC43_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
+<peripheralInstance derived_from="LPC43_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC43_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC43_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
+<peripheralInstance derived_from="LPC4x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
+<peripheralInstance derived_from="LPC43_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
+<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
+<peripheralInstance derived_from="LPC43_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
+<peripheralInstance derived_from="LPC43_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
+<peripheralInstance derived_from="LPC43_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
+<peripheralInstance derived_from="LPC43_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
+<peripheralInstance derived_from="LPC43_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
+<peripheralInstance derived_from="LPC43_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m4">Cortex-M4</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>
+
+
diff --git a/tools/export/codered_lpc4330_m4_project.tmpl b/tools/export/codered_lpc4330_m4_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc4330_m4_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpc824_cproject.tmpl b/tools/export/codered_lpc824_cproject.tmpl
new file mode 100644
index 0000000..ee22cb6
--- /dev/null
+++ b/tools/export/codered_lpc824_cproject.tmpl
@@ -0,0 +1,53 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}startup_LPC824_CR.cpp{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC800_32.cfx" property_3="NXP" property_4="LPC824" property_count="5" version="70200"/>
+<infoList vendor="NXP"><info chip="LPC824" flash_driver="LPC800_32.cfx" match_id="0x0" name="LPC824" stub="crt_emu_cm3_gen"><chip><name>LPC824</name>
+<family>LPC82x</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash32" location="0x0" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40000000"/>
+<peripheralInstance derived_from="MRT" determined="infoFile" id="MRT" location="0x40004000"/>
+<peripheralInstance derived_from="WKT" determined="infoFile" id="WKT" location="0x40008000"/>
+<peripheralInstance derived_from="SWM" determined="infoFile" id="SWM" location="0x4000c000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40020000"/>
+<peripheralInstance derived_from="CMP" determined="infoFile" id="CMP" location="0x40024000"/>
+<peripheralInstance derived_from="DMATRIGMUX" determined="infoFile" id="DMATRIGMUX" location="0x40028000"/>
+<peripheralInstance derived_from="INPUTMUX" determined="infoFile" id="INPUTMUX" location="0x4002c000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="I2C0" determined="infoFile" id="I2C0" location="0x40050000"/>
+<peripheralInstance derived_from="I2C1" determined="infoFile" id="I2C1" location="0x40054000"/>
+<peripheralInstance derived_from="SPI0" determined="infoFile" id="SPI0" location="0x40058000"/>
+<peripheralInstance derived_from="SPI1" determined="infoFile" id="SPI1" location="0x4005c000"/>
+<peripheralInstance derived_from="USART0" determined="infoFile" id="USART0" location="0x40064000"/>
+<peripheralInstance derived_from="USART1" determined="infoFile" id="USART1" location="0x40068000"/>
+<peripheralInstance derived_from="USART2" determined="infoFile" id="USART2" location="0x4006c000"/>
+<peripheralInstance derived_from="I2C2" determined="infoFile" id="I2C2" location="0x40070000"/>
+<peripheralInstance derived_from="I2C3" determined="infoFile" id="I2C3" location="0x40074000"/>
+<peripheralInstance derived_from="CRC" determined="infoFile" id="CRC" location="0x50000000"/>
+<peripheralInstance derived_from="SCT" determined="infoFile" id="SCT" location="0x50004000"/>
+<peripheralInstance derived_from="DMA" determined="infoFile" id="DMA" location="0x50008000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0xa0000000"/>
+<peripheralInstance derived_from="PIN-INT" determined="infoFile" id="PIN-INT" location="0xa0004000"/>
+</chip>
+<processor><name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC82x_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpc824_project.tmpl b/tools/export/codered_lpc824_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpc824_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_lpccappuccino_cproject.tmpl b/tools/export/codered_lpccappuccino_cproject.tmpl
new file mode 100644
index 0000000..69256f8
--- /dev/null
+++ b/tools/export/codered_lpccappuccino_cproject.tmpl
@@ -0,0 +1,51 @@
+{% extends "codered_cproject_cortexm0_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U37/501" property_count="5" version="70002"/>
+<infoList vendor="NXP">
+<info chip="LPC11U37/501" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U37/501" stub="crt_emu_lpc11_13_nxp">
+<chip>
+<name>LPC11U37/501</name>
+<family>LPC11Uxx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
+<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
+<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
+<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
+<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
+<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
+<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
+<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
+<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
+<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
+<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
+<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
+<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
+<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
+<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
+<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
+<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
+<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
+<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
+<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
+<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
+<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
+<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m0">Cortex-M0</name>
+<family>Cortex-M</family>
+</processor>
+<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_lpccappuccino_project.tmpl b/tools/export/codered_lpccappuccino_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_lpccappuccino_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/codered_project_common.tmpl b/tools/export/codered_project_common.tmpl
new file mode 100644
index 0000000..42ef438
--- /dev/null
+++ b/tools/export/codered_project_common.tmpl
@@ -0,0 +1,84 @@
+
+
+ {{name}}
+ This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-Code-Red
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/{{name}}/Debug}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
+
diff --git a/tools/export/codered_ublox_c027_cproject.tmpl b/tools/export/codered_ublox_c027_cproject.tmpl
new file mode 100644
index 0000000..b39438a
--- /dev/null
+++ b/tools/export/codered_ublox_c027_cproject.tmpl
@@ -0,0 +1,79 @@
+{% extends "codered_cproject_cortexm3_common.tmpl" %}
+
+{% block startup_file %}cr_startup_lpc176x.c{% endblock %}
+
+{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
+<TargetConfig>
+<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC1768" property_count="5" version="1"/>
+<infoList vendor="NXP">
+<info chip="LPC1768" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC1768" package="lpc17_lqfp100.xml">
+<chip>
+<name>LPC1768</name>
+<family>LPC17xx</family>
+<vendor>NXP (formerly Philips)</vendor>
+<reset board="None" core="Real" sys="Real"/>
+<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
+<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
+<memory id="RAM" type="RAM"/>
+<memory id="Periph" is_volatile="true" type="Peripheral"/>
+<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
+<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
+<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
+<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
+<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
+<peripheralInstance derived_from="LPC17_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
+<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
+<peripheralInstance derived_from="LPC17_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
+<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
+<peripheralInstance derived_from="LPC17_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
+<peripheralInstance derived_from="LPC17_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
+<peripheralInstance derived_from="LPC17_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
+<peripheralInstance derived_from="LPC17xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
+<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
+<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
+<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
+<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
+<peripheralInstance derived_from="LPC17_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
+<peripheralInstance derived_from="LPC17_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
+<peripheralInstance derived_from="LPC17_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
+<peripheralInstance derived_from="LPC17_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
+<peripheralInstance derived_from="LPC17_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
+<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
+<peripheralInstance derived_from="LPC17_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
+<peripheralInstance derived_from="LPC17_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
+<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
+<peripheralInstance derived_from="LPC17_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
+<peripheralInstance derived_from="LPC17_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
+<peripheralInstance derived_from="LPC17_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC17_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
+<peripheralInstance derived_from="LPC17_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
+<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
+<peripheralInstance derived_from="LPC1x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
+<peripheralInstance derived_from="LPC17_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
+<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
+<peripheralInstance derived_from="LPC17_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
+<peripheralInstance derived_from="LPC17_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
+<peripheralInstance derived_from="LPC17_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
+<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
+<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
+<peripheralInstance derived_from="LPC17_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
+</chip>
+<processor>
+<name gcc_name="cortex-m3">Cortex-M3</name>
+<family>Cortex-M</family>
+</processor>
+<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
+</info>
+</infoList>
+</TargetConfig>{% endblock %}
diff --git a/tools/export/codered_ublox_c027_project.tmpl b/tools/export/codered_ublox_c027_project.tmpl
new file mode 100644
index 0000000..d77c507
--- /dev/null
+++ b/tools/export/codered_ublox_c027_project.tmpl
@@ -0,0 +1 @@
+{% extends "codered_project_common.tmpl" %}
diff --git a/tools/export/coide.py b/tools/export/coide.py
new file mode 100644
index 0000000..50ecc38
--- /dev/null
+++ b/tools/export/coide.py
@@ -0,0 +1,110 @@
+"""
+mbed SDK
+Copyright (c) 2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename
+
+
+class CoIDE(Exporter):
+ NAME = 'CoIDE'
+ TOOLCHAIN = 'GCC_ARM'
+
+ TARGETS = [
+ 'KL25Z',
+ 'KL05Z',
+ 'LPC1768',
+ 'ARCH_PRO',
+ 'ARCH_MAX',
+ 'UBLOX_C027',
+ 'NUCLEO_L053R8',
+ 'NUCLEO_L152RE',
+ 'NUCLEO_F030R8',
+ 'NUCLEO_F042K6',
+ 'NUCLEO_F070RB',
+ 'NUCLEO_F072RB',
+ 'NUCLEO_F091RC',
+ 'NUCLEO_F103RB',
+ 'NUCLEO_F302R8',
+ 'NUCLEO_F303K8',
+ 'NUCLEO_F303RE',
+ 'NUCLEO_F334R8',
+ 'NUCLEO_F401RE',
+ 'NUCLEO_F410RB',
+ 'NUCLEO_F411RE',
+ 'NUCLEO_F446RE',
+ 'DISCO_L053C8',
+ 'DISCO_F051R8',
+ 'DISCO_F100RB',
+ 'DISCO_F303VC',
+ 'DISCO_F334C8',
+ 'DISCO_F401VC',
+ 'DISCO_F407VG',
+ 'DISCO_F429ZI',
+ 'DISCO_F469NI',
+ 'MTS_MDOT_F405RG',
+ 'MTS_MDOT_F411RE',
+ 'MOTE_L152RC',
+ 'NZ32_SC151',
+ ]
+
+ # seems like CoIDE currently supports only one type
+ FILE_TYPES = {
+ 'c_sources':'1',
+ 'cpp_sources':'1',
+ 's_sources':'1'
+ }
+ FILE_TYPES2 = {
+ 'headers':'1'
+ }
+
+ def generate(self):
+ self.resources.win_to_unix()
+ source_files = []
+ for r_type, n in CoIDE.FILE_TYPES.iteritems():
+ for file in getattr(self.resources, r_type):
+ source_files.append({
+ 'name': basename(file), 'type': n, 'path': file
+ })
+ header_files = []
+ for r_type, n in CoIDE.FILE_TYPES2.iteritems():
+ for file in getattr(self.resources, r_type):
+ header_files.append({
+ 'name': basename(file), 'type': n, 'path': file
+ })
+
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ if self.resources.linker_script is None:
+ self.resources.linker_script = ''
+
+ ctx = {
+ 'name': self.program_name,
+ 'source_files': source_files,
+ 'header_files': header_files,
+ 'include_paths': self.resources.inc_dirs,
+ 'scatter_file': self.resources.linker_script,
+ 'library_paths': self.resources.lib_dirs,
+ 'object_files': self.resources.objects,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols()
+ }
+ target = self.target.lower()
+
+ # Project file
+ self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.program_name)
diff --git a/tools/export/coide_arch_max.coproj.tmpl b/tools/export/coide_arch_max.coproj.tmpl
new file mode 100644
index 0000000..a9824a6
--- /dev/null
+++ b/tools/export/coide_arch_max.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_arch_pro.coproj.tmpl b/tools/export/coide_arch_pro.coproj.tmpl
new file mode 100644
index 0000000..687d38e
--- /dev/null
+++ b/tools/export/coide_arch_pro.coproj.tmpl
@@ -0,0 +1,88 @@
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f051r8.coproj.tmpl b/tools/export/coide_disco_f051r8.coproj.tmpl
new file mode 100644
index 0000000..ee4100b
--- /dev/null
+++ b/tools/export/coide_disco_f051r8.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f100rb.coproj.tmpl b/tools/export/coide_disco_f100rb.coproj.tmpl
new file mode 100644
index 0000000..f289ff5
--- /dev/null
+++ b/tools/export/coide_disco_f100rb.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f303vc.coproj.tmpl b/tools/export/coide_disco_f303vc.coproj.tmpl
new file mode 100644
index 0000000..fbe5820
--- /dev/null
+++ b/tools/export/coide_disco_f303vc.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f334c8.coproj.tmpl b/tools/export/coide_disco_f334c8.coproj.tmpl
new file mode 100644
index 0000000..d7dd7ec
--- /dev/null
+++ b/tools/export/coide_disco_f334c8.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f401vc.coproj.tmpl b/tools/export/coide_disco_f401vc.coproj.tmpl
new file mode 100644
index 0000000..8056335
--- /dev/null
+++ b/tools/export/coide_disco_f401vc.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f407vg.coproj.tmpl b/tools/export/coide_disco_f407vg.coproj.tmpl
new file mode 100644
index 0000000..fbde49a
--- /dev/null
+++ b/tools/export/coide_disco_f407vg.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_f429zi.coproj.tmpl b/tools/export/coide_disco_f429zi.coproj.tmpl
new file mode 100644
index 0000000..cff2c62
--- /dev/null
+++ b/tools/export/coide_disco_f429zi.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_disco_l053c8.coproj.tmpl b/tools/export/coide_disco_l053c8.coproj.tmpl
new file mode 100644
index 0000000..8857942
--- /dev/null
+++ b/tools/export/coide_disco_l053c8.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_kl05z.coproj.tmpl b/tools/export/coide_kl05z.coproj.tmpl
new file mode 100644
index 0000000..3127e47
--- /dev/null
+++ b/tools/export/coide_kl05z.coproj.tmpl
@@ -0,0 +1,88 @@
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_kl25z.coproj.tmpl b/tools/export/coide_kl25z.coproj.tmpl
new file mode 100644
index 0000000..7a9b745
--- /dev/null
+++ b/tools/export/coide_kl25z.coproj.tmpl
@@ -0,0 +1,88 @@
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_lpc1768.coproj.tmpl b/tools/export/coide_lpc1768.coproj.tmpl
new file mode 100644
index 0000000..687d38e
--- /dev/null
+++ b/tools/export/coide_lpc1768.coproj.tmpl
@@ -0,0 +1,88 @@
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_mote_l152rc.coproj.tmpl b/tools/export/coide_mote_l152rc.coproj.tmpl
new file mode 100644
index 0000000..91ec35e
--- /dev/null
+++ b/tools/export/coide_mote_l152rc.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_mts_mdot_f405rg.coproj.tmpl b/tools/export/coide_mts_mdot_f405rg.coproj.tmpl
new file mode 100644
index 0000000..a73727f
--- /dev/null
+++ b/tools/export/coide_mts_mdot_f405rg.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_mts_mdot_f411re.coproj.tmpl b/tools/export/coide_mts_mdot_f411re.coproj.tmpl
new file mode 100644
index 0000000..adcd169
--- /dev/null
+++ b/tools/export/coide_mts_mdot_f411re.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f030r8.coproj.tmpl b/tools/export/coide_nucleo_f030r8.coproj.tmpl
new file mode 100644
index 0000000..f6e121f
--- /dev/null
+++ b/tools/export/coide_nucleo_f030r8.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f042k6.coproj.tmpl b/tools/export/coide_nucleo_f042k6.coproj.tmpl
new file mode 100644
index 0000000..8d150d0
--- /dev/null
+++ b/tools/export/coide_nucleo_f042k6.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f070rb.coproj.tmpl b/tools/export/coide_nucleo_f070rb.coproj.tmpl
new file mode 100644
index 0000000..9756fd2
--- /dev/null
+++ b/tools/export/coide_nucleo_f070rb.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f072rb.coproj.tmpl b/tools/export/coide_nucleo_f072rb.coproj.tmpl
new file mode 100644
index 0000000..3687c43
--- /dev/null
+++ b/tools/export/coide_nucleo_f072rb.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f091rc.coproj.tmpl b/tools/export/coide_nucleo_f091rc.coproj.tmpl
new file mode 100644
index 0000000..e77d738
--- /dev/null
+++ b/tools/export/coide_nucleo_f091rc.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f103rb.coproj.tmpl b/tools/export/coide_nucleo_f103rb.coproj.tmpl
new file mode 100644
index 0000000..ce2c8fd
--- /dev/null
+++ b/tools/export/coide_nucleo_f103rb.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f302r8.coproj.tmpl b/tools/export/coide_nucleo_f302r8.coproj.tmpl
new file mode 100644
index 0000000..56d6cc7
--- /dev/null
+++ b/tools/export/coide_nucleo_f302r8.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f303re.coproj.tmpl b/tools/export/coide_nucleo_f303re.coproj.tmpl
new file mode 100644
index 0000000..0995181
--- /dev/null
+++ b/tools/export/coide_nucleo_f303re.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f334r8.coproj.tmpl b/tools/export/coide_nucleo_f334r8.coproj.tmpl
new file mode 100644
index 0000000..d7dd7ec
--- /dev/null
+++ b/tools/export/coide_nucleo_f334r8.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f401re.coproj.tmpl b/tools/export/coide_nucleo_f401re.coproj.tmpl
new file mode 100644
index 0000000..ffc886f
--- /dev/null
+++ b/tools/export/coide_nucleo_f401re.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f410rb.coproj.tmpl b/tools/export/coide_nucleo_f410rb.coproj.tmpl
new file mode 100644
index 0000000..92f3917
--- /dev/null
+++ b/tools/export/coide_nucleo_f410rb.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f411re.coproj.tmpl b/tools/export/coide_nucleo_f411re.coproj.tmpl
new file mode 100644
index 0000000..fb222fb
--- /dev/null
+++ b/tools/export/coide_nucleo_f411re.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_f446re.coproj.tmpl b/tools/export/coide_nucleo_f446re.coproj.tmpl
new file mode 100644
index 0000000..91bbd30
--- /dev/null
+++ b/tools/export/coide_nucleo_f446re.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_l053r8.coproj.tmpl b/tools/export/coide_nucleo_l053r8.coproj.tmpl
new file mode 100644
index 0000000..fd18d26
--- /dev/null
+++ b/tools/export/coide_nucleo_l053r8.coproj.tmpl
@@ -0,0 +1,168 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nucleo_l152re.coproj.tmpl b/tools/export/coide_nucleo_l152re.coproj.tmpl
new file mode 100644
index 0000000..cb98166
--- /dev/null
+++ b/tools/export/coide_nucleo_l152re.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_nz32_sc151.coproj.tmpl b/tools/export/coide_nz32_sc151.coproj.tmpl
new file mode 100644
index 0000000..7341c95
--- /dev/null
+++ b/tools/export/coide_nz32_sc151.coproj.tmpl
@@ -0,0 +1,90 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/coide_ublox_c027.coproj.tmpl b/tools/export/coide_ublox_c027.coproj.tmpl
new file mode 100644
index 0000000..687d38e
--- /dev/null
+++ b/tools/export/coide_ublox_c027.coproj.tmpl
@@ -0,0 +1,88 @@
+
+
+
+
+
+
+
+
+
+
+ {% for path in include_paths %} {% endfor %}
+
+
+ {% for s in symbols %} {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for lib in libraries %}
+
+ {% endfor %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% for file in source_files %}
+
+ {% endfor %}
+ {% for file in header_files %}
+
+ {% endfor %}
+
+
diff --git a/tools/export/ds5_5.py b/tools/export/ds5_5.py
new file mode 100644
index 0000000..b6b9d3e
--- /dev/null
+++ b/tools/export/ds5_5.py
@@ -0,0 +1,67 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import basename
+
+
+class DS5_5(Exporter):
+ NAME = 'DS5'
+
+ TARGETS = [
+ 'LPC1768',
+ 'LPC11U24',
+ 'LPC812',
+ 'UBLOX_C027',
+ 'ARCH_PRO',
+ 'RZ_A1H',
+ ]
+
+ USING_MICROLIB = [
+ 'LPC812',
+ ]
+
+ FILE_TYPES = {
+ 'c_sources':'1',
+ 'cpp_sources':'8',
+ 's_sources':'2'
+ }
+
+ def get_toolchain(self):
+ return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
+
+ def generate(self):
+ source_files = []
+ for r_type, n in DS5_5.FILE_TYPES.iteritems():
+ for file in getattr(self.resources, r_type):
+ source_files.append({
+ 'name': basename(file), 'type': n, 'path': file
+ })
+
+ ctx = {
+ 'name': self.program_name,
+ 'include_paths': self.resources.inc_dirs,
+ 'scatter_file': self.resources.linker_script,
+ 'object_files': self.resources.objects + self.resources.libraries,
+ 'source_files': source_files,
+ 'symbols': self.get_symbols()
+ }
+ target = self.target.lower()
+
+ # Project file
+ self.gen_file('ds5_5_%s.project.tmpl' % target, ctx, '.project')
+ self.gen_file('ds5_5_%s.cproject.tmpl' % target, ctx, '.cproject')
+ self.gen_file('ds5_5_%s.launch.tmpl' % target, ctx, 'ds5_%s.launch' % target)
diff --git a/tools/export/ds5_5_arch_pro.cproject.tmpl b/tools/export/ds5_5_arch_pro.cproject.tmpl
new file mode 100644
index 0000000..5a5fff4
--- /dev/null
+++ b/tools/export/ds5_5_arch_pro.cproject.tmpl
@@ -0,0 +1,115 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_arch_pro.launch.tmpl b/tools/export/ds5_5_arch_pro.launch.tmpl
new file mode 100644
index 0000000..74bc964
--- /dev/null
+++ b/tools/export/ds5_5_arch_pro.launch.tmpl
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_arch_pro.project.tmpl b/tools/export/ds5_5_arch_pro.project.tmpl
new file mode 100644
index 0000000..4f892f3
--- /dev/null
+++ b/tools/export/ds5_5_arch_pro.project.tmpl
@@ -0,0 +1,83 @@
+
+
+ {{name}}_ds5_lpc1768
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/ds5_lpc1768/Build}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/ds5_5_lpc11u24.cproject.tmpl b/tools/export/ds5_5_lpc11u24.cproject.tmpl
new file mode 100644
index 0000000..32adc51
--- /dev/null
+++ b/tools/export/ds5_5_lpc11u24.cproject.tmpl
@@ -0,0 +1,103 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_lpc11u24.launch.tmpl b/tools/export/ds5_5_lpc11u24.launch.tmpl
new file mode 100644
index 0000000..868cba3
--- /dev/null
+++ b/tools/export/ds5_5_lpc11u24.launch.tmpl
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_lpc11u24.project.tmpl b/tools/export/ds5_5_lpc11u24.project.tmpl
new file mode 100644
index 0000000..8ad8f0a
--- /dev/null
+++ b/tools/export/ds5_5_lpc11u24.project.tmpl
@@ -0,0 +1,83 @@
+
+
+ {{name}}_ds5_lpc11u24
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/ds5_lpc11u24/Build}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/ds5_5_lpc1768.cproject.tmpl b/tools/export/ds5_5_lpc1768.cproject.tmpl
new file mode 100644
index 0000000..5a5fff4
--- /dev/null
+++ b/tools/export/ds5_5_lpc1768.cproject.tmpl
@@ -0,0 +1,115 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_lpc1768.launch.tmpl b/tools/export/ds5_5_lpc1768.launch.tmpl
new file mode 100644
index 0000000..74bc964
--- /dev/null
+++ b/tools/export/ds5_5_lpc1768.launch.tmpl
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_lpc1768.project.tmpl b/tools/export/ds5_5_lpc1768.project.tmpl
new file mode 100644
index 0000000..4f892f3
--- /dev/null
+++ b/tools/export/ds5_5_lpc1768.project.tmpl
@@ -0,0 +1,83 @@
+
+
+ {{name}}_ds5_lpc1768
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/ds5_lpc1768/Build}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/ds5_5_lpc812.cproject.tmpl b/tools/export/ds5_5_lpc812.cproject.tmpl
new file mode 100644
index 0000000..96f0dc9
--- /dev/null
+++ b/tools/export/ds5_5_lpc812.cproject.tmpl
@@ -0,0 +1,103 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_lpc812.launch.tmpl b/tools/export/ds5_5_lpc812.launch.tmpl
new file mode 100644
index 0000000..57ab0ba
--- /dev/null
+++ b/tools/export/ds5_5_lpc812.launch.tmpl
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_lpc812.project.tmpl b/tools/export/ds5_5_lpc812.project.tmpl
new file mode 100644
index 0000000..2e9c358
--- /dev/null
+++ b/tools/export/ds5_5_lpc812.project.tmpl
@@ -0,0 +1,83 @@
+
+
+ {{name}}_ds5_lpc812
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/ds5_lpc812/Build}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/ds5_5_rz_a1h.cproject.tmpl b/tools/export/ds5_5_rz_a1h.cproject.tmpl
new file mode 100644
index 0000000..44e66ca
--- /dev/null
+++ b/tools/export/ds5_5_rz_a1h.cproject.tmpl
@@ -0,0 +1,115 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_rz_a1h.launch.tmpl b/tools/export/ds5_5_rz_a1h.launch.tmpl
new file mode 100644
index 0000000..a4bee75
--- /dev/null
+++ b/tools/export/ds5_5_rz_a1h.launch.tmpl
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_rz_a1h.project.tmpl b/tools/export/ds5_5_rz_a1h.project.tmpl
new file mode 100644
index 0000000..eee5209
--- /dev/null
+++ b/tools/export/ds5_5_rz_a1h.project.tmpl
@@ -0,0 +1,83 @@
+
+
+ {{name}}_ds5_rz_a1h
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/ds5_rz_a1h/Build}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/ds5_5_ublox_c027.cproject.tmpl b/tools/export/ds5_5_ublox_c027.cproject.tmpl
new file mode 100644
index 0000000..5a5fff4
--- /dev/null
+++ b/tools/export/ds5_5_ublox_c027.cproject.tmpl
@@ -0,0 +1,115 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_ublox_c027.launch.tmpl b/tools/export/ds5_5_ublox_c027.launch.tmpl
new file mode 100644
index 0000000..74bc964
--- /dev/null
+++ b/tools/export/ds5_5_ublox_c027.launch.tmpl
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/ds5_5_ublox_c027.project.tmpl b/tools/export/ds5_5_ublox_c027.project.tmpl
new file mode 100644
index 0000000..4f892f3
--- /dev/null
+++ b/tools/export/ds5_5_ublox_c027.project.tmpl
@@ -0,0 +1,83 @@
+
+
+ {{name}}_ds5_lpc1768
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.autoBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ make
+
+
+ org.eclipse.cdt.make.core.buildLocation
+ ${workspace_loc:/ds5_lpc1768/Build}
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.fullBuildTarget
+ all
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/e2studio.py b/tools/export/e2studio.py
new file mode 100644
index 0000000..66cd9de
--- /dev/null
+++ b/tools/export/e2studio.py
@@ -0,0 +1,47 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename
+
+
+class E2Studio(Exporter):
+ NAME = 'e2 studio'
+ TOOLCHAIN = 'GCC_ARM'
+
+ TARGETS = [
+ 'RZ_A1H',
+ ]
+
+ def generate(self):
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ ctx = {
+ 'name': self.program_name,
+ 'include_paths': self.resources.inc_dirs,
+ 'linker_script': self.resources.linker_script,
+
+ 'object_files': self.resources.objects,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols()
+ }
+ self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project')
+ self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
+ self.gen_file('e2studio_%s_gdbinit.tmpl' % self.target.lower(), ctx, '.gdbinit')
+ self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.program_name)
diff --git a/tools/export/e2studio_launch.tmpl b/tools/export/e2studio_launch.tmpl
new file mode 100644
index 0000000..2524c4d
--- /dev/null
+++ b/tools/export/e2studio_launch.tmpl
@@ -0,0 +1,59 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/e2studio_rz_a1h_cproject.tmpl b/tools/export/e2studio_rz_a1h_cproject.tmpl
new file mode 100644
index 0000000..85dcd99
--- /dev/null
+++ b/tools/export/e2studio_rz_a1h_cproject.tmpl
@@ -0,0 +1,318 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/e2studio_rz_a1h_gdbinit.tmpl b/tools/export/e2studio_rz_a1h_gdbinit.tmpl
new file mode 100644
index 0000000..a59f78c
--- /dev/null
+++ b/tools/export/e2studio_rz_a1h_gdbinit.tmpl
@@ -0,0 +1,29 @@
+define hook-step
+mon cortex_a maskisr on
+end
+
+define hook-stepi
+mon cortex_a maskisr on
+end
+
+define hook-next
+mon cortex_a maskisr on
+end
+
+define hook-nexti
+mon cortex_a maskisr on
+end
+
+define hook-finish
+mon cortex_a maskisr on
+end
+
+define hook-stop
+mon cortex_a maskisr off
+end
+
+define hook-kill
+mon reset init
+end
+
+set mem inaccessible-by-default off
\ No newline at end of file
diff --git a/tools/export/e2studio_rz_a1h_project.tmpl b/tools/export/e2studio_rz_a1h_project.tmpl
new file mode 100644
index 0000000..0bab8dd
--- /dev/null
+++ b/tools/export/e2studio_rz_a1h_project.tmpl
@@ -0,0 +1,27 @@
+
+
+ {{name}}
+ This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-e2studio
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/emblocks.eix.tmpl b/tools/export/emblocks.eix.tmpl
new file mode 100644
index 0000000..2bf5b0b
--- /dev/null
+++ b/tools/export/emblocks.eix.tmpl
@@ -0,0 +1,57 @@
+
+
+
+
+
+
+
+
+ {% for s in include_paths %}
+ {% endfor %}
+
+
+
+
+
+
+ {% for s in libraries %}
+ {% endfor %}
+
+
+ {% for s in library_paths %}
+ {% endfor %}
+
+
+
+ {% for f in source_files %}
+ {% endfor %}
+
+
+
diff --git a/tools/export/emblocks.py b/tools/export/emblocks.py
new file mode 100644
index 0000000..d964b3d
--- /dev/null
+++ b/tools/export/emblocks.py
@@ -0,0 +1,78 @@
+"""
+mbed SDK
+Copyright (c) 2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename
+from tools.targets import TARGETS
+
+# filter all the GCC_ARM targets out of the target list
+gccTargets = []
+for t in TARGETS:
+ if 'GCC_ARM' in t.supported_toolchains:
+ gccTargets.append(t.name)
+
+class IntermediateFile(Exporter):
+ NAME = 'EmBlocks'
+ TOOLCHAIN = 'GCC_ARM'
+
+ # we support all GCC targets (is handled on IDE side)
+ TARGETS = gccTargets
+
+ FILE_TYPES = {
+ 'headers': 'h',
+ 'c_sources': 'c',
+ 's_sources': 'a',
+ 'cpp_sources': 'cpp'
+ }
+
+
+ def generate(self):
+ self.resources.win_to_unix()
+ source_files = []
+ for r_type, n in IntermediateFile.FILE_TYPES.iteritems():
+ for file in getattr(self.resources, r_type):
+ source_files.append({
+ 'name': file, 'type': n
+ })
+
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+
+ if self.resources.linker_script is None:
+ self.resources.linker_script = ''
+
+ ctx = {
+ 'name': self.program_name,
+ 'target': self.target,
+ 'toolchain': self.toolchain.name,
+ 'source_files': source_files,
+ 'include_paths': self.resources.inc_dirs,
+ 'script_file': self.resources.linker_script,
+ 'library_paths': self.resources.lib_dirs,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols(),
+ 'object_files': self.resources.objects,
+ 'sys_libs': self.toolchain.sys_libs,
+ 'cc_org': self.toolchain.cc[1:],
+ 'ld_org': self.toolchain.ld[1:],
+ 'cppc_org': self.toolchain.cppc[1:]
+ }
+
+ # EmBlocks intermediate file template
+ self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.program_name)
diff --git a/tools/export/exporters.py b/tools/export/exporters.py
new file mode 100644
index 0000000..42e1f7f
--- /dev/null
+++ b/tools/export/exporters.py
@@ -0,0 +1,173 @@
+"""Just a template for subclassing"""
+import uuid, shutil, os, logging, fnmatch
+from os import walk, remove
+from os.path import join, dirname, isdir, split
+from copy import copy
+from jinja2 import Template, FileSystemLoader
+from jinja2.environment import Environment
+from contextlib import closing
+from zipfile import ZipFile, ZIP_DEFLATED
+
+from tools.utils import mkdir
+from tools.toolchains import TOOLCHAIN_CLASSES
+from tools.targets import TARGET_MAP
+
+from project_generator.generate import Generator
+from project_generator.project import Project
+from project_generator.settings import ProjectSettings
+
+class OldLibrariesException(Exception): pass
+
+class Exporter(object):
+ TEMPLATE_DIR = dirname(__file__)
+ DOT_IN_RELATIVE_PATH = False
+
+ def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None):
+ self.inputDir = inputDir
+ self.target = target
+ self.program_name = program_name
+ self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target])
+ self.build_url_resolver = build_url_resolver
+ jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
+ self.jinja_environment = Environment(loader=jinja_loader)
+ self.extra_symbols = extra_symbols
+
+ def get_toolchain(self):
+ return self.TOOLCHAIN
+
+ def __scan_and_copy(self, src_path, trg_path):
+ resources = self.toolchain.scan_resources(src_path)
+
+ for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
+ 'objects', 'libraries', 'linker_script',
+ 'lib_builds', 'lib_refs', 'repo_files', 'hex_files', 'bin_files']:
+ r = getattr(resources, r_type)
+ if r:
+ self.toolchain.copy_files(r, trg_path, rel_path=src_path)
+ return resources
+
+ def progen_get_project_data(self):
+ """ Get ProGen project data """
+ # provide default data, some tools don't require any additional
+ # tool specific settings
+ sources = []
+ for r_type in ['c_sources', 'cpp_sources', 's_sources']:
+ for file in getattr(self.resources, r_type):
+ sources.append(file)
+
+ project_data = {
+ 'common': {
+ 'sources': {
+ 'Source Files': sources + self.resources.hex_files +
+ self.resources.objects + self.resources.libraries,
+ },
+ 'includes': {
+ 'Include Files': self.resources.headers,
+ },
+ 'target': [TARGET_MAP[self.target].progen['target']],
+ 'macros': self.get_symbols(),
+ 'export_dir': [self.inputDir],
+ 'linker_file': [self.resources.linker_script],
+ }
+ }
+ return project_data
+
+ def progen_gen_file(self, tool_name, project_data):
+ """ Generate project using ProGen Project API """
+ settings = ProjectSettings()
+ project = Project(self.program_name, [project_data], settings)
+ # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen
+ # thinks it is not dict but a file, and adds them to workspace.
+ project.project['common']['include_paths'] = self.resources.inc_dirs
+ project.generate(tool_name, copied=True)
+
+ def __scan_all(self, path):
+ resources = []
+
+ for root, dirs, files in walk(path):
+ for d in copy(dirs):
+ if d == '.' or d == '..':
+ dirs.remove(d)
+
+ for file in files:
+ file_path = join(root, file)
+ resources.append(file_path)
+
+ return resources
+
+ def scan_and_copy_resources(self, prj_path, trg_path):
+ # Copy only the file for the required target and toolchain
+ lib_builds = []
+ for src in ['lib', 'src']:
+ resources = self.__scan_and_copy(join(prj_path, src), trg_path)
+ lib_builds.extend(resources.lib_builds)
+
+ # The repository files
+ for repo_dir in resources.repo_dirs:
+ repo_files = self.__scan_all(repo_dir)
+ self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src))
+
+ # The libraries builds
+ for bld in lib_builds:
+ build_url = open(bld).read().strip()
+ lib_data = self.build_url_resolver(build_url)
+ lib_path = lib_data['path'].rstrip('\\/')
+ self.__scan_and_copy(lib_path, join(trg_path, lib_data['name']))
+
+ # Create .hg dir in mbed build dir so it's ignored when versioning
+ hgdir = join(trg_path, lib_data['name'], '.hg')
+ mkdir(hgdir)
+ fhandle = file(join(hgdir, 'keep.me'), 'a')
+ fhandle.close()
+
+ # Final scan of the actual exported resources
+ self.resources = self.toolchain.scan_resources(trg_path)
+ self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
+ # Check the existence of a binary build of the mbed library for the desired target
+ # This prevents exporting the mbed libraries from source
+ # if not self.toolchain.mbed_libs:
+ # raise OldLibrariesException()
+
+ def gen_file(self, template_file, data, target_file):
+ template_path = join(Exporter.TEMPLATE_DIR, template_file)
+ template = self.jinja_environment.get_template(template_file)
+ target_text = template.render(data)
+
+ target_path = join(self.inputDir, target_file)
+ logging.debug("Generating: %s" % target_path)
+ open(target_path, "w").write(target_text)
+
+ def get_symbols(self, add_extra_symbols=True):
+ """ This function returns symbols which must be exported.
+ Please add / overwrite symbols in each exporter separately
+ """
+ symbols = self.toolchain.get_symbols()
+ # We have extra symbols from e.g. libraries, we want to have them also added to export
+ if add_extra_symbols:
+ if self.extra_symbols is not None:
+ symbols.extend(self.extra_symbols)
+ return symbols
+
+def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
+ uid = str(uuid.uuid4())
+ zipfilename = '%s.zip'%uid
+
+ logging.debug("Zipping up %s to %s" % (tempdirectory, join(destination, zipfilename)))
+ # make zip
+ def zipdir(basedir, archivename):
+ assert isdir(basedir)
+ fakeroot = program_name + '/'
+ with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
+ for root, _, files in os.walk(basedir):
+ # NOTE: ignore empty directories
+ for fn in files:
+ absfn = join(root, fn)
+ zfn = fakeroot + '/' + absfn[len(basedir)+len(os.sep):]
+ z.write(absfn, zfn)
+
+ zipdir(tempdirectory, join(destination, zipfilename))
+
+ if clean:
+ shutil.rmtree(tempdirectory)
+
+ return join(destination, zipfilename)
diff --git a/tools/export/gcc_arm_arch_ble.tmpl b/tools/export/gcc_arm_arch_ble.tmpl
new file mode 100644
index 0000000..2f4c03f
--- /dev/null
+++ b/tools/export/gcc_arm_arch_ble.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_ARCH_BLE/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_arch_max.tmpl b/tools/export/gcc_arm_arch_max.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_arch_max.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_arch_pro.tmpl b/tools/export/gcc_arm_arch_pro.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_arch_pro.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_b96b_f446ve.tmpl b/tools/export/gcc_arm_b96b_f446ve.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_b96b_f446ve.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_common.tmpl b/tools/export/gcc_arm_common.tmpl
new file mode 100644
index 0000000..d641b0a
--- /dev/null
+++ b/tools/export/gcc_arm_common.tmpl
@@ -0,0 +1,105 @@
+# This file was automagically generated by mbed.org. For more information,
+# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
+
+GCC_BIN =
+PROJECT = {{name}}
+OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
+SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
+INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
+LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
+LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
+LINKER_SCRIPT = {{linker_script}}
+{%- block additional_variables -%}{% endblock %}
+
+###############################################################################
+AS = $(GCC_BIN)arm-none-eabi-as
+CC = $(GCC_BIN)arm-none-eabi-gcc
+CPP = $(GCC_BIN)arm-none-eabi-g++
+LD = $(GCC_BIN)arm-none-eabi-gcc
+OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
+OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
+SIZE = $(GCC_BIN)arm-none-eabi-size
+{%- block additional_executables -%}{% endblock %}
+
+{%- block flags -%}
+
+{% block hardfp %}
+{% if "-mfloat-abi=softfp" in cpu_flags %}
+ifeq ($(HARDFP),1)
+ FLOAT_ABI = hard
+else
+ FLOAT_ABI = softfp
+endif
+{% endif %}
+{%- endblock %}
+
+CPU = {% block cpu %}{% for cf in cpu_flags %}{{cf|replace("-mfloat-abi=softfp","-mfloat-abi=$(FLOAT_ABI)")}} {% endfor %}{% endblock %}
+CC_FLAGS = {% block cc_flags %}$(CPU) -c -g -fno-common -fmessage-length=0 -Wall -Wextra -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP{% endblock %}
+CC_SYMBOLS = {% block cc_symbols %}{% for s in symbols %}-D{{s}} {% endfor %}{% endblock %}
+
+LD_FLAGS = {%- block ld_flags -%}
+{%- if "-mcpu=cortex-m0" in cpu_flags or "-mcpu=cortex-m0plus" in cpu_flags -%}
+{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -Wl,--wrap,main -Wl,-Map=$(PROJECT).map,--cref
+#LD_FLAGS += -u _printf_float -u _scanf_float
+{%- else -%}
+{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,-Map=$(PROJECT).map,--cref
+{%- endif -%}
+{% endblock %}
+LD_SYS_LIBS = {% block ld_sys_libs %}-lstdc++ -lsupc++ -lm -lc -lgcc -lnosys{% endblock %}
+{% endblock %}
+
+ifeq ($(DEBUG), 1)
+ CC_FLAGS += -DDEBUG -O0
+else
+ CC_FLAGS += -DNDEBUG -Os
+endif
+
+.PHONY: all clean lst size
+
+{% block target_all -%}
+all: $(PROJECT).bin $(PROJECT).hex size
+{% endblock %}
+
+{% block target_clean -%}
+clean:
+ rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
+{% endblock %}
+
+.asm.o:
+ $(CC) $(CPU) -c -x assembler-with-cpp -o $@ $<
+.s.o:
+ $(CC) $(CPU) -c -x assembler-with-cpp -o $@ $<
+.S.o:
+ $(CC) $(CPU) -c -x assembler-with-cpp -o $@ $<
+
+.c.o:
+ $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
+
+.cpp.o:
+ $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
+
+
+{% block target_project_elf %}
+$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
+ $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ -Wl,--start-group $(LIBRARIES) $(LD_SYS_LIBS) -Wl,--end-group
+{% endblock %}
+
+$(PROJECT).bin: $(PROJECT).elf
+ $(OBJCOPY) -O binary $< $@
+
+$(PROJECT).hex: $(PROJECT).elf
+ @$(OBJCOPY) -O ihex $< $@
+
+$(PROJECT).lst: $(PROJECT).elf
+ @$(OBJDUMP) -Sdh $< > $@
+
+lst: $(PROJECT).lst
+
+size: $(PROJECT).elf
+ $(SIZE) $(PROJECT).elf
+
+DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
+-include $(DEPS)
+
+{% block additional_targets %}{% endblock %}
+
diff --git a/tools/export/gcc_arm_delta_dfcm_nnn40.tmpl b/tools/export/gcc_arm_delta_dfcm_nnn40.tmpl
new file mode 100644
index 0000000..c35964e
--- /dev/null
+++ b/tools/export/gcc_arm_delta_dfcm_nnn40.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_0_0/s110_nrf51822_7.0.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_disco_f051r8.tmpl b/tools/export/gcc_arm_disco_f051r8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f051r8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f100rb.tmpl b/tools/export/gcc_arm_disco_f100rb.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f100rb.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f303vc.tmpl b/tools/export/gcc_arm_disco_f303vc.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f303vc.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f334c8.tmpl b/tools/export/gcc_arm_disco_f334c8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f334c8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f401vc.tmpl b/tools/export/gcc_arm_disco_f401vc.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f401vc.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f407vg.tmpl b/tools/export/gcc_arm_disco_f407vg.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f407vg.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f429zi.tmpl b/tools/export/gcc_arm_disco_f429zi.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f429zi.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f469ni.tmpl b/tools/export/gcc_arm_disco_f469ni.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f469ni.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_f746ng.tmpl b/tools/export/gcc_arm_disco_f746ng.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_f746ng.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_l053c8.tmpl b/tools/export/gcc_arm_disco_l053c8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_l053c8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_disco_l476vg.tmpl b/tools/export/gcc_arm_disco_l476vg.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_disco_l476vg.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_efm32_common.tmpl b/tools/export/gcc_arm_efm32_common.tmpl
new file mode 100644
index 0000000..c73a8c3
--- /dev/null
+++ b/tools/export/gcc_arm_efm32_common.tmpl
@@ -0,0 +1,115 @@
+# This file was automagically generated by mbed.org. For more information,
+# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
+
+GCC_BIN =
+PROJECT = {{name}}
+OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
+SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
+INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
+LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
+LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
+LINKER_SCRIPT = {{linker_script}}
+
+OUT_DIR = bin
+OBJ_FOLDER = $(strip $(OUT_DIR))/
+
+{%- block additional_variables -%}{% endblock %}
+
+###############################################################################
+AS = $(GCC_BIN)arm-none-eabi-as
+CC = $(GCC_BIN)arm-none-eabi-gcc
+CPP = $(GCC_BIN)arm-none-eabi-g++
+LD = $(GCC_BIN)arm-none-eabi-gcc
+OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
+OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
+SIZE = $(GCC_BIN)arm-none-eabi-size
+{%- block additional_executables -%}{% endblock %}
+
+{%- block flags -%}
+
+{% block hardfp %}
+{% if "-mfloat-abi=softfp" in cpu_flags %}
+ifeq ($(HARDFP),1)
+ FLOAT_ABI = hard
+else
+ FLOAT_ABI = softfp
+endif
+{% endif %}
+{%- endblock %}
+
+CPU = {% block cpu %}{% for cf in cpu_flags %}{{cf|replace("-mfloat-abi=softfp","-mfloat-abi=$(FLOAT_ABI)")}} {% endfor %}{% endblock %}
+CC_FLAGS = {% block cc_flags %}$(CPU) -c -g -fno-common -fmessage-length=0 -Wall -Wextra -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP{% endblock %}
+CC_SYMBOLS = {% block cc_symbols %}{% for s in symbols %}-D{{s}} {% endfor %}{% endblock %}
+
+ifeq ($(DEBUG), 1)
+ CC_FLAGS += -DDEBUG -O0
+else
+ CC_FLAGS += -DNDEBUG -Os
+endif
+
+LD_FLAGS = {%- block ld_flags -%}
+{%- if "-mcpu=cortex-m0" in cpu_flags or "-mcpu=cortex-m0plus" in cpu_flags -%}
+{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -Wl,--wrap,main -Wl,-Map=$(OBJ_FOLDER)$(PROJECT).map,--cref
+#LD_FLAGS += -u _printf_float -u _scanf_float
+{%- else -%}
+{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,-Map=$(OBJ_FOLDER)$(PROJECT).map,--cref
+{%- endif -%}
+{% endblock %}
+LD_SYS_LIBS = {% block ld_sys_libs %}-lstdc++ -lsupc++ -lm -lc -lgcc -lnosys{% endblock %}
+{% endblock %}
+
+.PHONY: all clean lst size
+
+{% block target_all -%}
+all: create_outputdir $(OBJ_FOLDER)$(PROJECT).bin $(OBJ_FOLDER)$(PROJECT).hex size
+{% endblock %}
+
+{% block target_create_outputdir -%}
+create_outputdir:
+ $(shell mkdir $(OBJ_FOLDER) 2>/dev/null)
+{% endblock %}
+
+{% block target_clean -%}
+clean:
+ rm -f $(OBJ_FOLDER)$(PROJECT).bin $(OBJ_FOLDER)$(PROJECT).axf $(OBJ_FOLDER)$(PROJECT).hex $(OBJ_FOLDER)$(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
+{% endblock %}
+
+.s.o:
+ $(AS) $(CPU) -o $@ $<
+
+.c.o:
+ $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
+
+.cpp.o:
+ $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
+
+
+{% block target_project_axf %}
+$(OBJ_FOLDER)$(PROJECT).axf: $(OBJECTS) $(SYS_OBJECTS)
+ $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
+
+$(OBJ_FOLDER)$(PROJECT).bin: $(OBJ_FOLDER)$(PROJECT).axf
+ @$(OBJCOPY) -O binary $< $@
+
+$(OBJ_FOLDER)$(PROJECT).hex: $(OBJ_FOLDER)$(PROJECT).axf
+ @$(OBJCOPY) -O ihex $< $@
+
+$(OBJ_FOLDER)$(PROJECT).lst: $(OBJ_FOLDER)$(PROJECT).axf
+ @$(OBJDUMP) -Sdh $< > $@
+
+lst: $(OBJ_FOLDER)$(PROJECT).lst
+
+size: $(OBJ_FOLDER)$(PROJECT).axf
+ $(SIZE) $(OBJ_FOLDER)$(PROJECT).axf
+
+DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
+-include $(DEPS)
+
+{% block additional_targets %}{% endblock %}
+
diff --git a/tools/export/gcc_arm_efm32gg_stk3700.tmpl b/tools/export/gcc_arm_efm32gg_stk3700.tmpl
new file mode 100644
index 0000000..7187abc
--- /dev/null
+++ b/tools/export/gcc_arm_efm32gg_stk3700.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/tools/export/gcc_arm_efm32hg_stk3400.tmpl b/tools/export/gcc_arm_efm32hg_stk3400.tmpl
new file mode 100644
index 0000000..7187abc
--- /dev/null
+++ b/tools/export/gcc_arm_efm32hg_stk3400.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/tools/export/gcc_arm_efm32lg_stk3600.tmpl b/tools/export/gcc_arm_efm32lg_stk3600.tmpl
new file mode 100644
index 0000000..7187abc
--- /dev/null
+++ b/tools/export/gcc_arm_efm32lg_stk3600.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/tools/export/gcc_arm_efm32pg_stk3401.tmpl b/tools/export/gcc_arm_efm32pg_stk3401.tmpl
new file mode 100644
index 0000000..7187abc
--- /dev/null
+++ b/tools/export/gcc_arm_efm32pg_stk3401.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/tools/export/gcc_arm_efm32wg_stk3800.tmpl b/tools/export/gcc_arm_efm32wg_stk3800.tmpl
new file mode 100644
index 0000000..7187abc
--- /dev/null
+++ b/tools/export/gcc_arm_efm32wg_stk3800.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/tools/export/gcc_arm_efm32zg_stk3200.tmpl b/tools/export/gcc_arm_efm32zg_stk3200.tmpl
new file mode 100644
index 0000000..7187abc
--- /dev/null
+++ b/tools/export/gcc_arm_efm32zg_stk3200.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/tools/export/gcc_arm_hrm1017.tmpl b/tools/export/gcc_arm_hrm1017.tmpl
new file mode 100644
index 0000000..0c6a037
--- /dev/null
+++ b/tools/export/gcc_arm_hrm1017.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_HRM1017/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_k20d50m.tmpl b/tools/export/gcc_arm_k20d50m.tmpl
new file mode 100644
index 0000000..47ed5cf
--- /dev/null
+++ b/tools/export/gcc_arm_k20d50m.tmpl
@@ -0,0 +1,4 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block hardfp %}{% endblock %}
+{% block cpu %}-mcpu=cortex-m4 -mthumb{% endblock %}
diff --git a/tools/export/gcc_arm_k22f.tmpl b/tools/export/gcc_arm_k22f.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_k22f.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_k64f.tmpl b/tools/export/gcc_arm_k64f.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_k64f.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_kl05z.tmpl b/tools/export/gcc_arm_kl05z.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_kl05z.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_kl25z.tmpl b/tools/export/gcc_arm_kl25z.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_kl25z.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_kl43z.tmpl b/tools/export/gcc_arm_kl43z.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_kl43z.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_kl46z.tmpl b/tools/export/gcc_arm_kl46z.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_kl46z.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_lpc1114.tmpl b/tools/export/gcc_arm_lpc1114.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_lpc1114.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_lpc11u24.tmpl b/tools/export/gcc_arm_lpc11u24.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_lpc11u24.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_lpc11u35_401.tmpl b/tools/export/gcc_arm_lpc11u35_401.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_lpc11u35_401.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_lpc11u35_501.tmpl b/tools/export/gcc_arm_lpc11u35_501.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_lpc11u35_501.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_lpc11u37h_401.tmpl b/tools/export/gcc_arm_lpc11u37h_401.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc11u37h_401.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpc1549.tmpl b/tools/export/gcc_arm_lpc1549.tmpl
new file mode 100644
index 0000000..047a815
--- /dev/null
+++ b/tools/export/gcc_arm_lpc1549.tmpl
@@ -0,0 +1,11 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
+
diff --git a/tools/export/gcc_arm_lpc1768.tmpl b/tools/export/gcc_arm_lpc1768.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc1768.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpc2368.tmpl b/tools/export/gcc_arm_lpc2368.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_lpc2368.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_lpc2460.tmpl b/tools/export/gcc_arm_lpc2460.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_lpc2460.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_lpc4088.tmpl b/tools/export/gcc_arm_lpc4088.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc4088.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpc4088_dm.tmpl b/tools/export/gcc_arm_lpc4088_dm.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_lpc4088_dm.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_lpc4330_m4.tmpl b/tools/export/gcc_arm_lpc4330_m4.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc4330_m4.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpc810.tmpl b/tools/export/gcc_arm_lpc810.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc810.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpc812.tmpl b/tools/export/gcc_arm_lpc812.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc812.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpc824.tmpl b/tools/export/gcc_arm_lpc824.tmpl
new file mode 100644
index 0000000..58de54b
--- /dev/null
+++ b/tools/export/gcc_arm_lpc824.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
\ No newline at end of file
diff --git a/tools/export/gcc_arm_lpccappuccino.tmpl b/tools/export/gcc_arm_lpccappuccino.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_lpccappuccino.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_max32600mbed.tmpl b/tools/export/gcc_arm_max32600mbed.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_max32600mbed.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_maxwsnenv.tmpl b/tools/export/gcc_arm_maxwsnenv.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_maxwsnenv.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_mote_l152rc.tmpl b/tools/export/gcc_arm_mote_l152rc.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_mote_l152rc.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_mts_gambit.tmpl b/tools/export/gcc_arm_mts_gambit.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_mts_gambit.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_mts_mdot_f405rg.tmpl b/tools/export/gcc_arm_mts_mdot_f405rg.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_mts_mdot_f405rg.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_mts_mdot_f411re.tmpl b/tools/export/gcc_arm_mts_mdot_f411re.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_mts_mdot_f411re.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nrf51822.tmpl b/tools/export/gcc_arm_nrf51822.tmpl
new file mode 100644
index 0000000..d7120ca
--- /dev/null
+++ b/tools/export/gcc_arm_nrf51822.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s130_nrf51822_1_0_0/s130_nrf51_1.0.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_nrf51_dk.tmpl b/tools/export/gcc_arm_nrf51_dk.tmpl
new file mode 100644
index 0000000..2afebf2
--- /dev/null
+++ b/tools/export/gcc_arm_nrf51_dk.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_NRF51_DK/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_nrf51_dongle.tmpl b/tools/export/gcc_arm_nrf51_dongle.tmpl
new file mode 100644
index 0000000..c35964e
--- /dev/null
+++ b/tools/export/gcc_arm_nrf51_dongle.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_0_0/s110_nrf51822_7.0.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_nrf51_microbit.tmpl b/tools/export/gcc_arm_nrf51_microbit.tmpl
new file mode 100644
index 0000000..8071c9b
--- /dev/null
+++ b/tools/export/gcc_arm_nrf51_microbit.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_NRF51_MICROBIT/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_8_0_0/s110_nrf51822_8.0.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_nucleo_f030r8.tmpl b/tools/export/gcc_arm_nucleo_f030r8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f030r8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f031k6.tmpl b/tools/export/gcc_arm_nucleo_f031k6.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f031k6.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f042k6.tmpl b/tools/export/gcc_arm_nucleo_f042k6.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f042k6.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f070rb.tmpl b/tools/export/gcc_arm_nucleo_f070rb.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f070rb.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f072rb.tmpl b/tools/export/gcc_arm_nucleo_f072rb.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f072rb.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f091rc.tmpl b/tools/export/gcc_arm_nucleo_f091rc.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f091rc.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f103rb.tmpl b/tools/export/gcc_arm_nucleo_f103rb.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f103rb.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f302r8.tmpl b/tools/export/gcc_arm_nucleo_f302r8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f302r8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f303k8.tmpl b/tools/export/gcc_arm_nucleo_f303k8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f303k8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f303re.tmpl b/tools/export/gcc_arm_nucleo_f303re.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f303re.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f334r8.tmpl b/tools/export/gcc_arm_nucleo_f334r8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f334r8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f401re.tmpl b/tools/export/gcc_arm_nucleo_f401re.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f401re.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f410rb.tmpl b/tools/export/gcc_arm_nucleo_f410rb.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f410rb.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f411re.tmpl b/tools/export/gcc_arm_nucleo_f411re.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f411re.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f446re.tmpl b/tools/export/gcc_arm_nucleo_f446re.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f446re.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_f746zg.tmpl b/tools/export/gcc_arm_nucleo_f746zg.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_f746zg.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_l031k6.tmpl b/tools/export/gcc_arm_nucleo_l031k6.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_l031k6.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_l053r8.tmpl b/tools/export/gcc_arm_nucleo_l053r8.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_l053r8.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_l073rz.tmpl b/tools/export/gcc_arm_nucleo_l073rz.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_l073rz.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_l152re.tmpl b/tools/export/gcc_arm_nucleo_l152re.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_l152re.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nucleo_l476rg.tmpl b/tools/export/gcc_arm_nucleo_l476rg.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nucleo_l476rg.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_nz32_sc151.tmpl b/tools/export/gcc_arm_nz32_sc151.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_nz32_sc151.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_rblab_blenano.tmpl b/tools/export/gcc_arm_rblab_blenano.tmpl
new file mode 100644
index 0000000..c65eb03
--- /dev/null
+++ b/tools/export/gcc_arm_rblab_blenano.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_RBLAB_BLENANO/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s130_nrf51822_1_0_0/s130_nrf51_1.0.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_rblab_nrf51822.tmpl b/tools/export/gcc_arm_rblab_nrf51822.tmpl
new file mode 100644
index 0000000..6a615fd
--- /dev/null
+++ b/tools/export/gcc_arm_rblab_nrf51822.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_RBLAB_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_rz_a1h.tmpl b/tools/export/gcc_arm_rz_a1h.tmpl
new file mode 100644
index 0000000..055d0e5
--- /dev/null
+++ b/tools/export/gcc_arm_rz_a1h.tmpl
@@ -0,0 +1,16 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block cc_flags -%}
+$(CPU) -c -g -fno-common -fmessage-length=0 -Wall -Wextra -Wno-unused-parameter -Wno-missing-field-initializers
+CC_FLAGS += -fno-exceptions -fno-builtin -ffunction-sections -fdata-sections -fno-delete-null-pointer-checks -fomit-frame-pointer
+CC_FLAGS += -MMD -MP
+{% endblock %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_samd21g18a.tmpl b/tools/export/gcc_arm_samd21g18a.tmpl
new file mode 100644
index 0000000..b71f60c
--- /dev/null
+++ b/tools/export/gcc_arm_samd21g18a.tmpl
@@ -0,0 +1,72 @@
+# This file was automagically generated by mbed.org. For more information,
+# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
+
+GCC_BIN =
+PROJECT = {{name}}
+OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
+SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
+INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
+LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
+LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
+LINKER_SCRIPT = {{linker_script}}
+
+###############################################################################
+AS = $(GCC_BIN)arm-none-eabi-as
+CC = $(GCC_BIN)arm-none-eabi-gcc
+CPP = $(GCC_BIN)arm-none-eabi-g++
+LD = $(GCC_BIN)arm-none-eabi-g++
+OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
+OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
+SIZE = $(GCC_BIN)arm-none-eabi-size
+
+CPU = -mcpu=cortex-m0plus -mthumb
+CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
+CC_FLAGS += -MMD -MP
+CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
+
+LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
+LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
+LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
+
+ifeq ($(DEBUG), 1)
+ CC_FLAGS += -DDEBUG -O0
+else
+ CC_FLAGS += -DNDEBUG -Os
+endif
+
+all: $(PROJECT).bin $(PROJECT).hex
+
+clean:
+ rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
+
+.s.o:
+ $(AS) $(CPU) -o $@ $<
+
+.c.o:
+ $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
+
+.cpp.o:
+ $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
+
+
+$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
+ $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
+ $(SIZE) $@
+
+$(PROJECT).bin: $(PROJECT).elf
+ @$(OBJCOPY) -O binary $< $@
+
+$(PROJECT).hex: $(PROJECT).elf
+ @$(OBJCOPY) -O ihex $< $@
+
+$(PROJECT).lst: $(PROJECT).elf
+ @$(OBJDUMP) -Sdh $< > $@
+
+lst: $(PROJECT).lst
+
+size:
+ $(SIZE) $(PROJECT).elf
+
+DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
+-include $(DEPS)
+
diff --git a/tools/export/gcc_arm_samd21j18a.tmpl b/tools/export/gcc_arm_samd21j18a.tmpl
new file mode 100644
index 0000000..b71f60c
--- /dev/null
+++ b/tools/export/gcc_arm_samd21j18a.tmpl
@@ -0,0 +1,72 @@
+# This file was automagically generated by mbed.org. For more information,
+# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
+
+GCC_BIN =
+PROJECT = {{name}}
+OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
+SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
+INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
+LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
+LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
+LINKER_SCRIPT = {{linker_script}}
+
+###############################################################################
+AS = $(GCC_BIN)arm-none-eabi-as
+CC = $(GCC_BIN)arm-none-eabi-gcc
+CPP = $(GCC_BIN)arm-none-eabi-g++
+LD = $(GCC_BIN)arm-none-eabi-g++
+OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
+OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
+SIZE = $(GCC_BIN)arm-none-eabi-size
+
+CPU = -mcpu=cortex-m0plus -mthumb
+CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
+CC_FLAGS += -MMD -MP
+CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
+
+LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
+LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
+LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
+
+ifeq ($(DEBUG), 1)
+ CC_FLAGS += -DDEBUG -O0
+else
+ CC_FLAGS += -DNDEBUG -Os
+endif
+
+all: $(PROJECT).bin $(PROJECT).hex
+
+clean:
+ rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
+
+.s.o:
+ $(AS) $(CPU) -o $@ $<
+
+.c.o:
+ $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
+
+.cpp.o:
+ $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
+
+
+$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
+ $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
+ $(SIZE) $@
+
+$(PROJECT).bin: $(PROJECT).elf
+ @$(OBJCOPY) -O binary $< $@
+
+$(PROJECT).hex: $(PROJECT).elf
+ @$(OBJCOPY) -O ihex $< $@
+
+$(PROJECT).lst: $(PROJECT).elf
+ @$(OBJDUMP) -Sdh $< > $@
+
+lst: $(PROJECT).lst
+
+size:
+ $(SIZE) $(PROJECT).elf
+
+DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
+-include $(DEPS)
+
diff --git a/tools/export/gcc_arm_samg55j19.tmpl b/tools/export/gcc_arm_samg55j19.tmpl
new file mode 100644
index 0000000..47ed5cf
--- /dev/null
+++ b/tools/export/gcc_arm_samg55j19.tmpl
@@ -0,0 +1,4 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block hardfp %}{% endblock %}
+{% block cpu %}-mcpu=cortex-m4 -mthumb{% endblock %}
diff --git a/tools/export/gcc_arm_saml21j18a.tmpl b/tools/export/gcc_arm_saml21j18a.tmpl
new file mode 100644
index 0000000..b71f60c
--- /dev/null
+++ b/tools/export/gcc_arm_saml21j18a.tmpl
@@ -0,0 +1,72 @@
+# This file was automagically generated by mbed.org. For more information,
+# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
+
+GCC_BIN =
+PROJECT = {{name}}
+OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
+SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
+INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
+LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
+LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
+LINKER_SCRIPT = {{linker_script}}
+
+###############################################################################
+AS = $(GCC_BIN)arm-none-eabi-as
+CC = $(GCC_BIN)arm-none-eabi-gcc
+CPP = $(GCC_BIN)arm-none-eabi-g++
+LD = $(GCC_BIN)arm-none-eabi-g++
+OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
+OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
+SIZE = $(GCC_BIN)arm-none-eabi-size
+
+CPU = -mcpu=cortex-m0plus -mthumb
+CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
+CC_FLAGS += -MMD -MP
+CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
+
+LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
+LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
+LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
+
+ifeq ($(DEBUG), 1)
+ CC_FLAGS += -DDEBUG -O0
+else
+ CC_FLAGS += -DNDEBUG -Os
+endif
+
+all: $(PROJECT).bin $(PROJECT).hex
+
+clean:
+ rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
+
+.s.o:
+ $(AS) $(CPU) -o $@ $<
+
+.c.o:
+ $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
+
+.cpp.o:
+ $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
+
+
+$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
+ $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
+ $(SIZE) $@
+
+$(PROJECT).bin: $(PROJECT).elf
+ @$(OBJCOPY) -O binary $< $@
+
+$(PROJECT).hex: $(PROJECT).elf
+ @$(OBJCOPY) -O ihex $< $@
+
+$(PROJECT).lst: $(PROJECT).elf
+ @$(OBJDUMP) -Sdh $< > $@
+
+lst: $(PROJECT).lst
+
+size:
+ $(SIZE) $(PROJECT).elf
+
+DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
+-include $(DEPS)
+
diff --git a/tools/export/gcc_arm_samr21g18a.tmpl b/tools/export/gcc_arm_samr21g18a.tmpl
new file mode 100644
index 0000000..b71f60c
--- /dev/null
+++ b/tools/export/gcc_arm_samr21g18a.tmpl
@@ -0,0 +1,72 @@
+# This file was automagically generated by mbed.org. For more information,
+# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
+
+GCC_BIN =
+PROJECT = {{name}}
+OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
+SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
+INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
+LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
+LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
+LINKER_SCRIPT = {{linker_script}}
+
+###############################################################################
+AS = $(GCC_BIN)arm-none-eabi-as
+CC = $(GCC_BIN)arm-none-eabi-gcc
+CPP = $(GCC_BIN)arm-none-eabi-g++
+LD = $(GCC_BIN)arm-none-eabi-g++
+OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
+OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
+SIZE = $(GCC_BIN)arm-none-eabi-size
+
+CPU = -mcpu=cortex-m0plus -mthumb
+CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
+CC_FLAGS += -MMD -MP
+CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
+
+LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
+LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
+LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
+
+ifeq ($(DEBUG), 1)
+ CC_FLAGS += -DDEBUG -O0
+else
+ CC_FLAGS += -DNDEBUG -Os
+endif
+
+all: $(PROJECT).bin $(PROJECT).hex
+
+clean:
+ rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
+
+.s.o:
+ $(AS) $(CPU) -o $@ $<
+
+.c.o:
+ $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
+
+.cpp.o:
+ $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
+
+
+$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
+ $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
+ $(SIZE) $@
+
+$(PROJECT).bin: $(PROJECT).elf
+ @$(OBJCOPY) -O binary $< $@
+
+$(PROJECT).hex: $(PROJECT).elf
+ @$(OBJCOPY) -O ihex $< $@
+
+$(PROJECT).lst: $(PROJECT).elf
+ @$(OBJDUMP) -Sdh $< > $@
+
+lst: $(PROJECT).lst
+
+size:
+ $(SIZE) $(PROJECT).elf
+
+DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
+-include $(DEPS)
+
diff --git a/tools/export/gcc_arm_seeed_tiny_ble.tmpl b/tools/export/gcc_arm_seeed_tiny_ble.tmpl
new file mode 100644
index 0000000..2f4c03f
--- /dev/null
+++ b/tools/export/gcc_arm_seeed_tiny_ble.tmpl
@@ -0,0 +1,14 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block additional_variables %}
+SOFTDEVICE = mbed/TARGET_ARCH_BLE/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
+{% endblock %}
+
+{% block additional_executables %}
+SREC_CAT = srec_cat
+{% endblock %}
+
+{% block additional_targets %}
+merge:
+ $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
+{% endblock %}
diff --git a/tools/export/gcc_arm_ssci824.tmpl b/tools/export/gcc_arm_ssci824.tmpl
new file mode 100644
index 0000000..dbbc6da
--- /dev/null
+++ b/tools/export/gcc_arm_ssci824.tmpl
@@ -0,0 +1,10 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block target_project_elf %}
+{{ super() }}
+ @echo ""
+ @echo "*****"
+ @echo "***** You must modify vector checksum value in *.bin and *.hex files."
+ @echo "*****"
+ @echo ""
+{% endblock %}
diff --git a/tools/export/gcc_arm_stm32f407.tmpl b/tools/export/gcc_arm_stm32f407.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_stm32f407.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gcc_arm_teensy3_1.tmpl b/tools/export/gcc_arm_teensy3_1.tmpl
new file mode 100644
index 0000000..47ed5cf
--- /dev/null
+++ b/tools/export/gcc_arm_teensy3_1.tmpl
@@ -0,0 +1,4 @@
+{% extends "gcc_arm_common.tmpl" %}
+
+{% block hardfp %}{% endblock %}
+{% block cpu %}-mcpu=cortex-m4 -mthumb{% endblock %}
diff --git a/tools/export/gcc_arm_ublox_c027.tmpl b/tools/export/gcc_arm_ublox_c027.tmpl
new file mode 100644
index 0000000..6e616cc
--- /dev/null
+++ b/tools/export/gcc_arm_ublox_c027.tmpl
@@ -0,0 +1 @@
+{% extends "gcc_arm_common.tmpl" %}
diff --git a/tools/export/gccarm.py b/tools/export/gccarm.py
new file mode 100644
index 0000000..18e67f5
--- /dev/null
+++ b/tools/export/gccarm.py
@@ -0,0 +1,150 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename
+
+
+class GccArm(Exporter):
+ NAME = 'GccArm'
+ TOOLCHAIN = 'GCC_ARM'
+
+ TARGETS = [
+ 'LPC1768',
+ 'LPC1549',
+ 'KL05Z',
+ 'KL25Z',
+ 'KL43Z',
+ 'KL46Z',
+ 'K64F',
+ 'K22F',
+ 'K20D50M',
+ 'LPC4088',
+ 'LPC4088_DM',
+ 'LPC4330_M4',
+ 'LPC11U24',
+ 'LPC1114',
+ 'LPC11U35_401',
+ 'LPC11U35_501',
+ 'LPC11U37H_401',
+ 'LPC810',
+ 'LPC812',
+ 'LPC824',
+ 'SSCI824',
+ 'STM32F407',
+ 'DISCO_F100RB',
+ 'DISCO_F051R8',
+ 'DISCO_F407VG',
+ 'DISCO_F429ZI',
+ 'DISCO_F469NI',
+ 'DISCO_F303VC',
+ 'DISCO_F746NG',
+ 'DISCO_L476VG',
+ 'UBLOX_C027',
+ 'ARCH_PRO',
+ 'NRF51822',
+ 'HRM1017',
+ 'RBLAB_NRF51822',
+ 'RBLAB_BLENANO',
+ 'LPC2368',
+ 'LPC2460',
+ 'LPCCAPPUCCINO',
+ 'ARCH_BLE',
+ 'MTS_GAMBIT',
+ 'ARCH_MAX',
+ 'NUCLEO_F401RE',
+ 'NUCLEO_F410RB',
+ 'NUCLEO_F411RE',
+ 'NUCLEO_F446RE',
+ 'B96B_F446VE',
+ 'ARCH_MAX',
+ 'NUCLEO_F030R8',
+ 'NUCLEO_F031K6',
+ 'NUCLEO_F042K6',
+ 'NUCLEO_F070RB',
+ 'NUCLEO_F072RB',
+ 'NUCLEO_F091RC',
+ 'NUCLEO_F103RB',
+ 'NUCLEO_F302R8',
+ 'NUCLEO_F303K8',
+ 'NUCLEO_F303RE',
+ 'NUCLEO_F334R8',
+ 'NUCLEO_F746ZG',
+ 'DISCO_L053C8',
+ 'NUCLEO_L031K6',
+ 'NUCLEO_L053R8',
+ 'NUCLEO_L073RZ',
+ 'NUCLEO_L476RG',
+ 'DISCO_F334C8',
+ 'MAX32600MBED',
+ 'MAXWSNENV',
+ 'MTS_MDOT_F405RG',
+ 'MTS_MDOT_F411RE',
+ 'NUCLEO_L152RE',
+ 'NRF51_DK',
+ 'NRF51_DONGLE',
+ 'NRF51_MICROBIT',
+ 'SEEED_TINY_BLE',
+ 'DISCO_F401VC',
+ 'DELTA_DFCM_NNN40',
+ 'RZ_A1H',
+ 'MOTE_L152RC',
+ 'EFM32WG_STK3800',
+ 'EFM32LG_STK3600',
+ 'EFM32GG_STK3700',
+ 'EFM32ZG_STK3200',
+ 'EFM32HG_STK3400',
+ 'EFM32PG_STK3401',
+ 'NZ32_SC151',
+ 'SAMR21G18A',
+ 'TEENSY3_1',
+ 'SAMD21J18A',
+ 'SAMD21G18A',
+ 'SAML21J18A',
+ 'SAMG55J19',
+ ]
+
+ DOT_IN_RELATIVE_PATH = True
+
+ def generate(self):
+ # "make" wants Unix paths
+ self.resources.win_to_unix()
+
+ to_be_compiled = []
+ for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
+ r = getattr(self.resources, r_type)
+ if r:
+ for source in r:
+ base, ext = splitext(source)
+ to_be_compiled.append(base + '.o')
+
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ ctx = {
+ 'name': self.program_name,
+ 'to_be_compiled': to_be_compiled,
+ 'object_files': self.resources.objects,
+ 'include_paths': self.resources.inc_dirs,
+ 'library_paths': self.resources.lib_dirs,
+ 'linker_script': self.resources.linker_script,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols(),
+ 'cpu_flags': self.toolchain.cpu
+ }
+ self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile')
diff --git a/tools/export/iar.py b/tools/export/iar.py
new file mode 100644
index 0000000..e043688
--- /dev/null
+++ b/tools/export/iar.py
@@ -0,0 +1,151 @@
+"""
+mbed SDK
+Copyright (c) 2011-2015 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import re
+import os
+from project_generator_definitions.definitions import ProGenDef
+
+from tools.export.exporters import Exporter
+from tools.targets import TARGET_MAP, TARGET_NAMES
+
+# If you wish to add a new target, add it to project_generator_definitions, and then
+# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
+class IAREmbeddedWorkbench(Exporter):
+ """
+ Exporter class for IAR Systems. This class uses project generator.
+ """
+ # These 2 are currently for exporters backward compatiblity
+ NAME = 'IAR'
+ TOOLCHAIN = 'IAR'
+ # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
+ PROGEN_ACTIVE = True
+
+ # backward compatibility with our scripts
+ TARGETS = []
+ for target in TARGET_NAMES:
+ try:
+ if (ProGenDef('iar').is_supported(str(TARGET_MAP[target])) or
+ ProGenDef('iar').is_supported(TARGET_MAP[target].progen['target'])):
+ TARGETS.append(target)
+ except AttributeError:
+ # target is not supported yet
+ continue
+
+ def generate(self):
+ """ Generates the project files """
+ project_data = self.progen_get_project_data()
+ tool_specific = {}
+ # Expand tool specific settings by IAR specific settings which are required
+ try:
+ if TARGET_MAP[self.target].progen['iar']['template']:
+ tool_specific['iar'] = TARGET_MAP[self.target].progen['iar']
+ except KeyError:
+ # use default template
+ # by the mbed projects
+ tool_specific['iar'] = {
+ # We currently don't use misc, template sets those for us
+ # 'misc': {
+ # 'cxx_flags': ['--no_rtti', '--no_exceptions'],
+ # 'c_flags': ['--diag_suppress=Pa050,Pa084,Pa093,Pa082'],
+ # 'ld_flags': ['--skip_dynamic_initialization'],
+ # },
+ 'template': [os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')],
+ }
+
+ project_data['tool_specific'] = {}
+ project_data['tool_specific'].update(tool_specific)
+ self.progen_gen_file('iar_arm', project_data)
+
+# Currently not used, we should reuse folder_name to create virtual folders
+class IarFolder():
+ """
+ This is a recursive folder object.
+ To present the folder structure in the IDE as it is presented on the disk.
+ This can be used for uvision as well if you replace the __str__ method.
+ Example:
+ files: ./main.cpp, ./apis/I2C.h, ./mbed/common/I2C.cpp
+ in the project this would look like:
+ main.cpp
+ common/I2C.cpp
+ input:
+ folder_level : folder path to current folder
+ folder_name : name of current folder
+ source_files : list of source_files (all must be in same directory)
+ """
+ def __init__(self, folder_level, folder_name, source_files):
+ self.folder_level = folder_level
+ self.folder_name = folder_name
+ self.source_files = source_files
+ self.sub_folders = {}
+
+ def __str__(self):
+ """
+ converts the folder structue to IAR project format.
+ """
+ group_start = ""
+ group_end = ""
+ if self.folder_name != "":
+ group_start = "\n%s\n" %(self.folder_name)
+ group_end = "\n"
+
+ str_content = group_start
+ #Add files in current folder
+ if self.source_files:
+ for src in self.source_files:
+ str_content += "\n$PROJ_DIR$/%s\n\n" % src
+ #Add sub folders
+ if self.sub_folders:
+ for folder_name in self.sub_folders.iterkeys():
+ str_content += self.sub_folders[folder_name].__str__()
+
+ str_content += group_end
+ return str_content
+
+ def insert_file(self, source_input):
+ """
+ Inserts a source file into the folder tree
+ """
+ if self.source_files:
+ #All source_files in a IarFolder must be in same directory.
+ dir_sources = IarFolder.get_directory(self.source_files[0])
+ #Check if sources are already at their deepest level.
+ if not self.folder_level == dir_sources:
+ _reg_exp = r"^" + re.escape(self.folder_level) + r"[/\\]?([^/\\]+)"
+ folder_name = re.match(_reg_exp, dir_sources).group(1)
+ self.sub_folders[folder_name] = IarFolder(os.path.join(self.folder_level, folder_name), folder_name, self.source_files)
+ self.source_files = []
+
+ dir_input = IarFolder.get_directory(source_input)
+ if dir_input == self.folder_level:
+ self.source_files.append(source_input)
+ else:
+ _reg_exp = r"^" + re.escape(self.folder_level) + r"[/\\]?([^/\\]+)"
+ folder_name = re.match(_reg_exp, dir_input).group(1)
+ if self.sub_folders.has_key(folder_name):
+ self.sub_folders[folder_name].insert_file(source_input)
+ else:
+ if self.folder_level == "":
+ #Top level exception
+ self.sub_folders[folder_name] = IarFolder(folder_name, folder_name, [source_input])
+ else:
+ self.sub_folders[folder_name] = IarFolder(os.path.join(self.folder_level, folder_name), folder_name, [source_input])
+
+ @staticmethod
+ def get_directory(file_path):
+ """
+ Returns the directory of the file
+ """
+ return os.path.dirname(file_path)
diff --git a/tools/export/iar_nucleo_f746zg.ewp.tmpl b/tools/export/iar_nucleo_f746zg.ewp.tmpl
new file mode 100644
index 0000000..228fbf8
--- /dev/null
+++ b/tools/export/iar_nucleo_f746zg.ewp.tmpl
@@ -0,0 +1,1917 @@
+
+
+
+ 2
+
+ Debug
+
+ ARM
+
+ 1
+
+ General
+ 3
+
+ 24
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ICCARM
+ 2
+
+ 31
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ AARM
+ 2
+
+ 9
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ OBJCOPY
+ 0
+
+ 1
+ 1
+ 1
+
+
+
+
+
+
+
+
+ CUSTOM
+ 3
+
+
+
+ 0
+
+
+
+ BICOMP
+ 0
+
+
+
+ BUILDACTION
+ 1
+
+
+
+
+
+
+ ILINK
+ 0
+
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ IARCHIVE
+ 0
+
+ 0
+ 1
+ 1
+
+
+
+
+
+
+ BILINK
+ 0
+
+
+
+
+ Release
+
+ ARM
+
+ 0
+
+ General
+ 3
+
+ 24
+ 1
+ 0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ICCARM
+ 2
+
+ 31
+ 1
+ 0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ AARM
+ 2
+
+ 9
+ 1
+ 0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ OBJCOPY
+ 0
+
+ 1
+ 1
+ 0
+
+
+
+
+
+
+
+
+ CUSTOM
+ 3
+
+
+
+ 0
+
+
+
+ BICOMP
+ 0
+
+
+
+ BUILDACTION
+ 1
+
+
+
+
+
+
+ ILINK
+ 0
+
+ 16
+ 1
+ 0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ IARCHIVE
+ 0
+
+ 0
+ 1
+ 0
+
+
+
+
+
+
+ BILINK
+ 0
+
+
+
+
+
+
diff --git a/tools/export/iar_rz_a1h.ewp.tmpl b/tools/export/iar_rz_a1h.ewp.tmpl
new file mode 100644
index 0000000..4d5338d
--- /dev/null
+++ b/tools/export/iar_rz_a1h.ewp.tmpl
@@ -0,0 +1,925 @@
+
+
+
+ 2
+
+ Debug
+
+ ARM
+
+ 1
+
+ General
+ 3
+
+ 21
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ICCARM
+ 2
+
+ 28
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ AARM
+ 2
+
+ 8
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ OBJCOPY
+ 0
+
+ 1
+ 1
+ 1
+
+
+
+
+
+
+
+
+ CUSTOM
+ 3
+
+
+
+
+
+
+ BICOMP
+ 0
+
+
+
+ BUILDACTION
+ 1
+
+
+
+
+
+
+ ILINK
+ 0
+
+ 14
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ IARCHIVE
+ 0
+
+ 0
+ 1
+ 1
+
+
+
+
+
+
+ BILINK
+ 0
+
+
+
+
+$PROJ_DIR$/main.cpp
+
+
+env
+
+$PROJ_DIR$/env\test_env.cpp
+
+
+
+
+
diff --git a/tools/export/iar_template.ewp.tmpl b/tools/export/iar_template.ewp.tmpl
new file mode 100644
index 0000000..332c581
--- /dev/null
+++ b/tools/export/iar_template.ewp.tmpl
@@ -0,0 +1,995 @@
+
+
+
+ 2
+
+ Debug
+
+ ARM
+
+ 1
+
+ General
+ 3
+
+ 22
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ICCARM
+ 2
+
+ 30
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ AARM
+ 2
+
+ 9
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ OBJCOPY
+ 0
+
+ 1
+ 1
+ 1
+
+
+
+
+
+
+
+
+ CUSTOM
+ 3
+
+
+
+
+
+
+ BICOMP
+ 0
+
+
+
+ BUILDACTION
+ 1
+
+
+
+
+
+
+ ILINK
+ 0
+
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ IARCHIVE
+ 0
+
+ 0
+ 1
+ 1
+
+
+
+
+
+
+ BILINK
+ 0
+
+
+
+
+$PROJ_DIR$/main.cpp
+
+
+env
+
+$PROJ_DIR$/env\test_env.cpp
+
+
+
+
+
diff --git a/tools/export/kds.py b/tools/export/kds.py
new file mode 100644
index 0000000..98f4161
--- /dev/null
+++ b/tools/export/kds.py
@@ -0,0 +1,46 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename
+
+
+class KDS(Exporter):
+ NAME = 'Kinetis Design Studio'
+ TOOLCHAIN = 'GCC_ARM'
+
+ TARGETS = [
+ 'K64F',
+ 'K22F',
+ ]
+
+ def generate(self):
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ ctx = {
+ 'name': self.program_name,
+ 'include_paths': self.resources.inc_dirs,
+ 'linker_script': self.resources.linker_script,
+ 'object_files': self.resources.objects,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols()
+ }
+ self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project')
+ self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
+ self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.program_name)
diff --git a/tools/export/kds_k22f_cproject.tmpl b/tools/export/kds_k22f_cproject.tmpl
new file mode 100644
index 0000000..18aa0fb
--- /dev/null
+++ b/tools/export/kds_k22f_cproject.tmpl
@@ -0,0 +1,306 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/kds_k22f_project.tmpl b/tools/export/kds_k22f_project.tmpl
new file mode 100644
index 0000000..1ab5ab3
--- /dev/null
+++ b/tools/export/kds_k22f_project.tmpl
@@ -0,0 +1,27 @@
+
+
+ {{name}}
+ This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-KDS
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/kds_k64f_cproject.tmpl b/tools/export/kds_k64f_cproject.tmpl
new file mode 100644
index 0000000..18aa0fb
--- /dev/null
+++ b/tools/export/kds_k64f_cproject.tmpl
@@ -0,0 +1,306 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/kds_k64f_project.tmpl b/tools/export/kds_k64f_project.tmpl
new file mode 100644
index 0000000..1ab5ab3
--- /dev/null
+++ b/tools/export/kds_k64f_project.tmpl
@@ -0,0 +1,27 @@
+
+
+ {{name}}
+ This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-KDS
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/tools/export/kds_launch.tmpl b/tools/export/kds_launch.tmpl
new file mode 100644
index 0000000..1fe8bbd
--- /dev/null
+++ b/tools/export/kds_launch.tmpl
@@ -0,0 +1,59 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/simplicityv3.py b/tools/export/simplicityv3.py
new file mode 100644
index 0000000..6c01dcf
--- /dev/null
+++ b/tools/export/simplicityv3.py
@@ -0,0 +1,191 @@
+"""
+mbed SDK
+Copyright (c) 2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import split,splitext, basename
+
+class Folder:
+ def __init__(self, name):
+ self.name = name
+ self.children = []
+
+ def contains(self, folderName):
+ for child in self.children:
+ if child.name == folderName:
+ return True
+ return False
+
+ def __str__(self):
+ retval = self.name + " "
+ if len(self.children) > 0:
+ retval += "[ "
+ for child in self.children:
+ retval += child.__str__()
+ retval += " ]"
+
+ return retval
+
+ def findChild(self, folderName):
+ for child in self.children:
+ if child.name == folderName:
+ return child
+ return None
+
+ def addChild(self, folderName):
+ if folderName == '':
+ return None
+
+ if not self.contains(folderName):
+ self.children.append(Folder(folderName))
+
+ return self.findChild(folderName)
+
+class SimplicityV3(Exporter):
+ NAME = 'SimplicityV3'
+ TOOLCHAIN = 'GCC_ARM'
+
+ TARGETS = [
+ 'EFM32GG_STK3700',
+ 'EFM32ZG_STK3200',
+ 'EFM32LG_STK3600',
+ 'EFM32WG_STK3800',
+ 'EFM32HG_STK3400',
+ 'EFM32PG_STK3401'
+ ]
+
+ PARTS = {
+ 'EFM32GG_STK3700': 'com.silabs.mcu.si32.efm32.efm32gg.efm32gg990f1024',
+ 'EFM32ZG_STK3200': 'com.silabs.mcu.si32.efm32.efm32zg.efm32zg222f32',
+ 'EFM32LG_STK3600': 'com.silabs.mcu.si32.efm32.efm32lg.efm32lg990f256',
+ 'EFM32WG_STK3800': 'com.silabs.mcu.si32.efm32.efm32wg.efm32wg990f256',
+ 'EFM32HG_STK3400': 'com.silabs.mcu.si32.efm32.efm32hg.efm32hg322f64',
+ 'EFM32PG_STK3401': 'com.silabs.mcu.si32.efm32.efm32pg1b.efm32pg1b200f256gm48'
+ }
+
+ KITS = {
+ 'EFM32GG_STK3700': 'com.silabs.kit.si32.efm32.efm32gg.stk3700',
+ 'EFM32ZG_STK3200': 'com.silabs.kit.si32.efm32.efm32zg.stk3200',
+ 'EFM32LG_STK3600': 'com.silabs.kit.si32.efm32.efm32lg.stk3600',
+ 'EFM32WG_STK3800': 'com.silabs.kit.si32.efm32.efm32wg.stk3800',
+ 'EFM32HG_STK3400': 'com.silabs.kit.si32.efm32.efm32hg.slstk3400a',
+ 'EFM32PG_STK3401': 'com.silabs.kit.si32.efm32.efm32pg.slstk3401a'
+ }
+
+ FILE_TYPES = {
+ 'c_sources':'1',
+ 'cpp_sources':'1',
+ 's_sources':'1'
+ }
+
+ EXCLUDED_LIBS = [
+ 'm',
+ 'c',
+ 'gcc',
+ 'nosys',
+ 'supc++',
+ 'stdc++'
+ ]
+
+ DOT_IN_RELATIVE_PATH = False
+
+ orderedPaths = Folder("Root")
+
+ def check_and_add_path(self, path):
+ levels = path.split('/')
+ base = self.orderedPaths
+ for level in levels:
+ if base.contains(level):
+ base = base.findChild(level)
+ else:
+ base.addChild(level)
+ base = base.findChild(level)
+
+
+ def generate(self):
+ # "make" wants Unix paths
+ self.resources.win_to_unix()
+
+ main_files = []
+
+ EXCLUDED_LIBS = [
+ 'm',
+ 'c',
+ 'gcc',
+ 'nosys',
+ 'supc++',
+ 'stdc++'
+ ]
+
+ for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
+ r = getattr(self.resources, r_type)
+ if r:
+ for source in r:
+ self.check_and_add_path(split(source)[0])
+
+ if not ('/' in source):
+ main_files.append(source)
+
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ if l[3:] not in EXCLUDED_LIBS:
+ libraries.append(l[3:])
+
+ defines = []
+ for define in self.get_symbols():
+ if '=' in define:
+ keyval = define.split('=')
+ defines.append( (keyval[0], keyval[1]) )
+ else:
+ defines.append( (define, '') )
+
+ self.check_and_add_path(split(self.resources.linker_script)[0])
+
+ ctx = {
+ 'name': self.program_name,
+ 'main_files': main_files,
+ 'recursiveFolders': self.orderedPaths,
+ 'object_files': self.resources.objects,
+ 'include_paths': self.resources.inc_dirs,
+ 'library_paths': self.resources.lib_dirs,
+ 'linker_script': self.resources.linker_script,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols(),
+ 'defines': defines,
+ 'part': self.PARTS[self.target],
+ 'kit': self.KITS[self.target],
+ 'loopcount': 0
+ }
+
+ ## Strip main folder from include paths because ssproj is not capable of handling it
+ if '.' in ctx['include_paths']:
+ ctx['include_paths'].remove('.')
+
+ '''
+ Suppress print statements
+ print('\n')
+ print(self.target)
+ print('\n')
+ print(ctx)
+ print('\n')
+ print(self.orderedPaths)
+ for path in self.orderedPaths.children:
+ print(path.name + "\n")
+ for bpath in path.children:
+ print("\t" + bpath.name + "\n")
+ '''
+
+ self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.program_name)
diff --git a/tools/export/simplicityv3_slsproj.tmpl b/tools/export/simplicityv3_slsproj.tmpl
new file mode 100644
index 0000000..4324eb1
--- /dev/null
+++ b/tools/export/simplicityv3_slsproj.tmpl
@@ -0,0 +1,140 @@
+
+
+{# Hierarchically include all folders into the project #}
+ {%- for child in recursiveFolders.children recursive %}
+
+ {%- if child.children -%}
+ {{ loop(child.children) }}
+ {%- endif %}
+
+ {%- endfor %}
+
+{# Include all source files not belonging to a subfolder separately #}
+ {%- for file in main_files -%}
+
+ {%- endfor %}
+
+
+
+
+
+
+{# Add all include paths to the managed build compiler, paths relative to project #}
+ {%- for path in include_paths %}
+
+ {%- endfor %}
+{# Add all mbed-defined #Defines for the preprocessor #}
+ {%- for define, value in defines %}
+
+ {%- endfor %}
+{# Include all standard libraries that mbed requires #}
+
+
+
+
+
+
+
+{# Include exported libraries #}
+ {%- for library in libraries %}
+
+ {%- endfor %}
+{# Add library search paths #}
+ {%- for path in library_paths %}
+
+ {%- endfor %}
+{# Add in separate object files if needed #}
+ {%- if object_files %}
+
+ {%- endif %}
+{# Manually override linker ordering #}
+ {%- if libraries %}
+
+ {%- endif %}
+{# Define mbed-specific linker file #}
+
+
+{# Make sure to wrap main in order to get clock initialization done right #}
+
+
+{# For debug build, don't apply optimizations #}
+
+
+
+
+
+
+
+
+{# Add all include paths to the managed build compiler, paths relative to project #}
+ {%- for path in include_paths %}
+
+ {%- endfor %}
+{# Add all mbed-defined #Defines for the preprocessor #}
+ {%- for define, value in defines %}
+
+ {%- endfor %}
+{# Include all standard libraries that mbed requires #}
+
+
+
+
+
+
+{# Include exported libraries #}
+ {%- for library in libraries %}
+
+ {%- endfor %}
+{# Add library search paths #}
+ {%- for path in library_paths %}
+
+ {%- endfor %}
+{# Add in separate object files if needed #}
+ {%- if object_files %}
+
+ {%- endif %}
+{# Manually override linker ordering #}
+ {%- if libraries %}
+
+ {%- endif %}
+{# Define mbed-specific linker file #}
+
+
+{# Make sure to wrap main in order to get clock initialization done right #}
+
+
+{# Use optimize for size on release build #}
+
+
+
+
+
+
+
diff --git a/tools/export/sw4stm32.py b/tools/export/sw4stm32.py
new file mode 100644
index 0000000..0d9e8e8
--- /dev/null
+++ b/tools/export/sw4stm32.py
@@ -0,0 +1,97 @@
+"""
+mbed SDK
+Copyright (c) 2011-2016 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import splitext, basename, join
+from random import randint
+from tools.utils import mkdir
+
+
+class Sw4STM32(Exporter):
+ NAME = 'Sw4STM32'
+ TOOLCHAIN = 'GCC_ARM'
+
+ BOARDS = {
+ # 'DISCO_F051R8': {'name': 'STM32F0DISCOVERY', 'mcuId': 'STM32F051R8Tx'},
+ # 'DISCO_F303VC': {'name': 'STM32F3DISCOVERY', 'mcuId': 'STM32F303VCTx'},
+ 'DISCO_F334C8': {'name': 'STM32F3348DISCOVERY', 'mcuId': 'STM32F334C8Tx'},
+ # 'DISCO_F401VC': {'name': 'STM32F401C-DISCO', 'mcuId': 'STM32F401VCTx'},
+ 'DISCO_F407VG': {'name': 'STM32F4DISCOVERY', 'mcuId': 'STM32F407VGTx'},
+ 'DISCO_F429ZI': {'name': 'STM32F429I-DISCO', 'mcuId': 'STM32F429ZITx'},
+ 'DISCO_F746NG': {'name': 'STM32F746G-DISCO', 'mcuId': 'STM32F746NGHx'},
+ 'DISCO_L053C8': {'name': 'STM32L0538DISCOVERY', 'mcuId': 'STM32L053C8Tx'},
+ 'DISCO_L476VG': {'name': 'STM32L476G-DISCO', 'mcuId': 'STM32L476VGTx'},
+ 'DISCO_F469NI': {'name': 'DISCO-F469NI', 'mcuId': 'STM32F469NIHx'},
+ 'NUCLEO_F030R8': {'name': 'NUCLEO-F030R8', 'mcuId': 'STM32F030R8Tx'},
+ 'NUCLEO_F070RB': {'name': 'NUCLEO-F070RB', 'mcuId': 'STM32F070RBTx'},
+ 'NUCLEO_F072RB': {'name': 'NUCLEO-F072RB', 'mcuId': 'STM32F072RBTx'},
+ 'NUCLEO_F091RC': {'name': 'NUCLEO-F091RC', 'mcuId': 'STM32F091RCTx'},
+ 'NUCLEO_F103RB': {'name': 'NUCLEO-F103RB', 'mcuId': 'STM32F103RBTx'},
+ 'NUCLEO_F302R8': {'name': 'NUCLEO-F302R8', 'mcuId': 'STM32F302R8Tx'},
+ 'NUCLEO_F303RE': {'name': 'NUCLEO-F303RE', 'mcuId': 'STM32F303RETx'},
+ 'NUCLEO_F334R8': {'name': 'NUCLEO-F334R8', 'mcuId': 'STM32F334R8Tx'},
+ 'NUCLEO_F401RE': {'name': 'NUCLEO-F401RE', 'mcuId': 'STM32F401RETx'},
+ 'NUCLEO_F411RE': {'name': 'NUCLEO-F411RE', 'mcuId': 'STM32F411RETx'},
+ 'NUCLEO_F446RE': {'name': 'NUCLEO-F446RE', 'mcuId': 'STM32F446RETx'},
+ 'NUCLEO_L031K6': {'name': 'NUCLEO-L031K6', 'mcuId': 'STM32L031K6Tx'},
+ 'NUCLEO_L053R8': {'name': 'NUCLEO-L053R8', 'mcuId': 'STM32L053R8Tx'},
+ 'NUCLEO_L073RZ': {'name': 'NUCLEO-L073RZ', 'mcuId': 'STM32L073RZTx'},
+ 'NUCLEO_L152RE': {'name': 'NUCLEO-L152RE', 'mcuId': 'STM32L152RETx'},
+ 'NUCLEO_L476RG': {'name': 'NUCLEO-L476RG', 'mcuId': 'STM32L476RGTx'},
+ 'NUCLEO_F031K6': {'name': 'NUCLEO-F031K6', 'mcuId': 'STM32F031K6Tx'},
+ 'NUCLEO_F042K6': {'name': 'NUCLEO-F042K6', 'mcuId': 'STM32F042K6Tx'},
+ 'NUCLEO_F303K8': {'name': 'NUCLEO-F303K8', 'mcuId': 'STM32F303K8Tx'},
+ 'NUCLEO_F410RB': {'name': 'NUCLEO-F410RB', 'mcuId': 'STM32F410RBTx'},
+ }
+
+ TARGETS = BOARDS.keys()
+
+ def __gen_dir(self, dirname):
+ settings = join(self.inputDir, dirname)
+ mkdir(settings)
+
+ def __generate_uid(self):
+ return "%0.9u" % randint(0, 999999999)
+
+ def generate(self):
+ libraries = []
+ for lib in self.resources.libraries:
+ l, _ = splitext(basename(lib))
+ libraries.append(l[3:])
+
+ ctx = {
+ 'name': self.program_name,
+ 'include_paths': self.resources.inc_dirs,
+ 'linker_script': self.resources.linker_script,
+ 'library_paths': self.resources.lib_dirs,
+ 'object_files': self.resources.objects,
+ 'libraries': libraries,
+ 'symbols': self.get_symbols(),
+ 'board_name': self.BOARDS[self.target.upper()]['name'],
+ 'mcu_name': self.BOARDS[self.target.upper()]['mcuId'],
+ 'debug_config_uid': self.__generate_uid(),
+ 'debug_tool_compiler_uid': self.__generate_uid(),
+ 'debug_tool_compiler_input_uid': self.__generate_uid(),
+ 'release_config_uid': self.__generate_uid(),
+ 'release_tool_compiler_uid': self.__generate_uid(),
+ 'release_tool_compiler_input_uid': self.__generate_uid(),
+ 'uid': self.__generate_uid()
+ }
+
+ self.__gen_dir('.settings')
+ self.gen_file('sw4stm32_language_settings_commom.tmpl', ctx, '.settings/language.settings.xml')
+ self.gen_file('sw4stm32_project_common.tmpl', ctx, '.project')
+ self.gen_file('sw4stm32_cproject_common.tmpl', ctx, '.cproject')
diff --git a/tools/export/sw4stm32_cproject_common.tmpl b/tools/export/sw4stm32_cproject_common.tmpl
new file mode 100644
index 0000000..0128f69
--- /dev/null
+++ b/tools/export/sw4stm32_cproject_common.tmpl
@@ -0,0 +1,212 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/sw4stm32_language_settings_commom.tmpl b/tools/export/sw4stm32_language_settings_commom.tmpl
new file mode 100644
index 0000000..d138720
--- /dev/null
+++ b/tools/export/sw4stm32_language_settings_commom.tmpl
@@ -0,0 +1,25 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/sw4stm32_project_common.tmpl b/tools/export/sw4stm32_project_common.tmpl
new file mode 100644
index 0000000..2e0378c
--- /dev/null
+++ b/tools/export/sw4stm32_project_common.tmpl
@@ -0,0 +1,28 @@
+
+
+ {{name}}
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+ fr.ac6.mcu.ide.core.MCUProjectNature
+
+
diff --git a/tools/export/uvision.uvproj.tmpl b/tools/export/uvision.uvproj.tmpl
new file mode 100644
index 0000000..ab8827b
--- /dev/null
+++ b/tools/export/uvision.uvproj.tmpl
@@ -0,0 +1,403 @@
+
+
+
+ 1.1
+
+ ###This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-Uvision
+
+
+
+ mbed FRDM-KL25Z
+ 0x4
+ ARM-ADS
+
+
+ MKL25Z128xxx4
+ Freescale Semiconductor
+ IRAM(0x1FFFF000-0x1FFFFFFF) IRAM2(0x20000000-0x20002FFF) IROM(0x0-0x1FFFF) CLOCK(8000000) CPUTYPE("Cortex-M0+") ELITTLE
+
+ "STARTUP\Freescale\Kinetis\startup_MKL25Z4.s" ("Freescale MKL25Zxxxxxx4 Startup Code")
+ ULP2CM3(-O2510 -S0 -C0 -FO15 -FD20000000 -FC800 -FN1 -FF0MK_P128_48MHZ -FS00 -FL020000)
+ 6533
+ MKL25Z4.H
+
+
+
+
+
+
+
+
+
+ SFD\Freescale\Kinetis\MKL25Z4.sfr
+ 0
+
+
+
+ Freescale\Kinetis\
+ Freescale\Kinetis\
+
+ 0
+ 0
+ 0
+ 0
+ 1
+
+ .\build\
+ MBED_11
+ 1
+ 0
+ 0
+ 1
+ 1
+ .\build\
+ 1
+ 0
+ 0
+
+ 0
+ 0
+
+
+ 0
+ 0
+ 0
+ 0
+
+
+ 0
+ 0
+
+
+ 0
+ 0
+
+
+ 1
+ 0
+ fromelf --bin --output=@L.bin !L
+
+ 0
+ 0
+
+ 0
+
+
+
+ 0
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+ 0
+ 0
+ 0
+ 3
+
+
+
+
+ SARMCM3.DLL
+
+ DARMCM1.DLL
+ -pCM0+
+ SARMCM3.DLL
+
+ TARMCM1.DLL
+ -pCM0+
+
+
+
+ 1
+ 0
+ 0
+ 0
+ 16
+
+
+ 0
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+
+
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+ 1
+
+ 0
+ 14
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BIN\CMSIS_AGDI.dll
+
+
+
+
+ 1
+ 0
+ 0
+ 1
+ 1
+ 4105
+
+ BIN\CMSIS_AGDI.dll
+ "" ()
+
+
+
+
+ 0
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+ 1
+ 1
+ 0
+ 1
+ 1
+ 0
+ 0
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+ 0
+ "Cortex-M0+"
+
+ 0
+ 0
+ 0
+ 1
+ 1
+ 0
+ 0
+ 0
+ 1
+ 0
+ 8
+ 0
+ 0
+ 0
+ 3
+ 3
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x1ffff000
+ 0x1000
+
+
+ 1
+ 0x0
+ 0x20000
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x20000
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x20000000
+ 0x3000
+
+
+ 0
+ 0x0
+ 0x0
+
+
+
+
+
+ 1
+ 1
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 1
+
+ --gnu --no_rtti
+
+
+
+
+
+
+ 1
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
+
+
+
+
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+ 0x00000000
+ 0x10000000
+ mbed\TARGET_KL25Z\TOOLCHAIN_ARM_STD\MKL25Z4.sct
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/uvision4.py b/tools/export/uvision4.py
new file mode 100644
index 0000000..fc0e34e
--- /dev/null
+++ b/tools/export/uvision4.py
@@ -0,0 +1,89 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from os.path import basename, join, dirname
+from project_generator_definitions.definitions import ProGenDef
+
+from tools.export.exporters import Exporter
+from tools.targets import TARGET_MAP, TARGET_NAMES
+
+# If you wish to add a new target, add it to project_generator_definitions, and then
+# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
+# There are 2 default mbed templates (predefined settings) uvision.uvproj and uvproj_microlib.uvproj.tmpl
+class Uvision4(Exporter):
+ """
+ Exporter class for uvision. This class uses project generator.
+ """
+ # These 2 are currently for exporters backward compatiblity
+ NAME = 'uVision4'
+ TOOLCHAIN = 'ARM'
+ # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
+ PROGEN_ACTIVE = True
+
+ # backward compatibility with our scripts
+ TARGETS = []
+ for target in TARGET_NAMES:
+ try:
+ if (ProGenDef('uvision').is_supported(str(TARGET_MAP[target])) or
+ ProGenDef('uvision').is_supported(TARGET_MAP[target].progen['target'])):
+ TARGETS.append(target)
+ except AttributeError:
+ # target is not supported yet
+ continue
+
+ def get_toolchain(self):
+ return TARGET_MAP[self.target].default_toolchain
+
+ def generate(self):
+ """ Generates the project files """
+ project_data = self.progen_get_project_data()
+ tool_specific = {}
+ # Expand tool specific settings by uvision specific settings which are required
+ try:
+ if TARGET_MAP[self.target].progen['uvision']['template']:
+ tool_specific['uvision'] = TARGET_MAP[self.target].progen['uvision']
+ except KeyError:
+ # use default template
+ # by the mbed projects
+ tool_specific['uvision'] = {
+ 'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')],
+ }
+
+ project_data['tool_specific'] = {}
+ project_data['tool_specific'].update(tool_specific)
+
+ # get flags from toolchain and apply
+ project_data['tool_specific']['uvision']['misc'] = {}
+ project_data['tool_specific']['uvision']['misc']['asm_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['asm']))
+ project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['c']))
+ # not compatible with c99 flag set in the template
+ project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99")
+ project_data['tool_specific']['uvision']['misc']['cxx_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['ld']))
+ project_data['tool_specific']['uvision']['misc']['ld_flags'] = self.toolchain.flags['ld']
+
+ i = 0
+ for macro in project_data['common']['macros']:
+ # armasm does not like floating numbers in macros, timestamp to int
+ if macro.startswith('MBED_BUILD_TIMESTAMP'):
+ timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
+ project_data['common']['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
+ # armasm does not even accept MACRO=string
+ if macro.startswith('MBED_USERNAME'):
+ project_data['common']['macros'].pop(i)
+ i += 1
+ project_data['common']['macros'].append('__ASSERT_MSG')
+ self.progen_gen_file('uvision', project_data)
+
diff --git a/tools/export/uvision5.py b/tools/export/uvision5.py
new file mode 100644
index 0000000..7662ffd
--- /dev/null
+++ b/tools/export/uvision5.py
@@ -0,0 +1,89 @@
+"""
+mbed SDK
+Copyright (c) 2016 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from os.path import basename, join, dirname
+from project_generator_definitions.definitions import ProGenDef
+
+from tools.export.exporters import Exporter
+from tools.targets import TARGET_MAP, TARGET_NAMES
+
+# If you wish to add a new target, add it to project_generator_definitions, and then
+# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
+# There are 2 default mbed templates (predefined settings) uvision.uvproj and uvproj_microlib.uvproj.tmpl
+class Uvision5(Exporter):
+ """
+ Exporter class for uvision5. This class uses project generator.
+ """
+ # These 2 are currently for exporters backward compatiblity
+ NAME = 'uVision5'
+ TOOLCHAIN = 'ARM'
+ # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
+ PROGEN_ACTIVE = True
+
+ # backward compatibility with our scripts
+ TARGETS = []
+ for target in TARGET_NAMES:
+ try:
+ if (ProGenDef('uvision5').is_supported(str(TARGET_MAP[target])) or
+ ProGenDef('uvision5').is_supported(TARGET_MAP[target].progen['target'])):
+ TARGETS.append(target)
+ except AttributeError:
+ # target is not supported yet
+ continue
+
+ def get_toolchain(self):
+ return TARGET_MAP[self.target].default_toolchain
+
+ def generate(self):
+ """ Generates the project files """
+ project_data = self.progen_get_project_data()
+ tool_specific = {}
+ # Expand tool specific settings by uvision specific settings which are required
+ try:
+ if TARGET_MAP[self.target].progen['uvision5']['template']:
+ tool_specific['uvision5'] = TARGET_MAP[self.target].progen['uvision5']
+ except KeyError:
+ # use default template
+ # by the mbed projects
+ tool_specific['uvision5'] = {
+ 'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')],
+ }
+
+ project_data['tool_specific'] = {}
+ project_data['tool_specific'].update(tool_specific)
+
+ # get flags from toolchain and apply
+ project_data['tool_specific']['uvision5']['misc'] = {}
+ project_data['tool_specific']['uvision5']['misc']['asm_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['asm']))
+ project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['c']))
+ # not compatible with c99 flag set in the template
+ project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99")
+ project_data['tool_specific']['uvision5']['misc']['cxx_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['ld']))
+ project_data['tool_specific']['uvision5']['misc']['ld_flags'] = self.toolchain.flags['ld']
+
+ i = 0
+ for macro in project_data['common']['macros']:
+ # armasm does not like floating numbers in macros, timestamp to int
+ if macro.startswith('MBED_BUILD_TIMESTAMP'):
+ timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
+ project_data['common']['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
+ # armasm does not even accept MACRO=string
+ if macro.startswith('MBED_USERNAME'):
+ project_data['common']['macros'].pop(i)
+ i += 1
+ project_data['common']['macros'].append('__ASSERT_MSG')
+ self.progen_gen_file('uvision5', project_data)
+
diff --git a/tools/export/uvision_microlib.uvproj.tmpl b/tools/export/uvision_microlib.uvproj.tmpl
new file mode 100644
index 0000000..fd0d552
--- /dev/null
+++ b/tools/export/uvision_microlib.uvproj.tmpl
@@ -0,0 +1,413 @@
+
+
+
+ 1.1
+
+ ###This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-Uvision
+
+
+
+ mbed FRDM-KL05Z
+ 0x4
+ ARM-ADS
+
+
+ MKL05Z32xxx4
+ Freescale Semiconductor
+ IRAM(0x1FFFFC00-0x1FFFFFFF) IRAM2(0x20000000-0x20000BFF) IROM(0x0-0x07FFF) CLOCK(8000000) CPUTYPE("Cortex-M0+") ELITTLE
+
+ "STARTUP\Freescale\Kinetis\startup_MKL05Z4.s" ("Freescale MKL05Zxxxxxx4 Startup Code")
+ ULP2CM3(-O2510 -S0 -C0 -FO15 -FD20000000 -FC800 -FN1 -FF0MK_P32_48MHZ -FS00 -FL08000)
+ 6544
+ MKL05Z4.H
+
+
+
+
+
+
+
+
+
+ SFD\Freescale\Kinetis\MKL05Z4.sfr
+ 0
+
+
+
+ Freescale\Kinetis\
+ Freescale\Kinetis\
+
+ 0
+ 0
+ 0
+ 0
+ 1
+
+ .\build\
+ MBED_11
+ 1
+ 0
+ 0
+ 1
+ 1
+ .\build\
+ 1
+ 0
+ 0
+
+ 0
+ 0
+
+
+ 0
+ 0
+ 0
+ 0
+
+
+ 0
+ 0
+
+
+ 0
+ 0
+
+
+ 1
+ 0
+ fromelf --bin --output=@L.bin !L
+
+ 0
+ 0
+
+ 0
+
+
+
+ 0
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+ 0
+ 0
+ 0
+ 3
+
+
+
+
+ SARMCM3.DLL
+
+ DARMCM1.DLL
+ -pCM0+
+ SARMCM3.DLL
+
+ TARMCM1.DLL
+ -pCM0+
+
+
+
+ 1
+ 0
+ 0
+ 0
+ 16
+
+
+ 0
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+
+
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+ 1
+
+ 0
+ 14
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BIN\CMSIS_AGDI.dll
+
+
+
+
+ 1
+ 0
+ 0
+ 1
+ 1
+ 4105
+
+ BIN\CMSIS_AGDI.dll
+ "" ()
+
+
+
+
+ 0
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+ 1
+ 1
+ 0
+ 1
+ 1
+ 0
+ 0
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 1
+ 0
+ 0
+ "Cortex-M0+"
+
+ 0
+ 0
+ 0
+ 1
+ 1
+ 0
+ 0
+ 0
+ 1
+ 0
+ 8
+ 1
+ 0
+ 0
+ 3
+ 3
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x1ffffc00
+ 0x400
+
+
+ 1
+ 0x0
+ 0x8000
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 1
+ 0x0
+ 0x8000
+
+
+ 1
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x0
+ 0x0
+
+
+ 0
+ 0x1ffffc00
+ 0x400
+
+
+ 0
+ 0x0
+ 0x0
+
+
+
+
+
+ 1
+ 1
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 1
+
+ --gnu --no_rtti
+
+
+ .; env; mbed;
+
+
+
+ 1
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
+
+
+
+
+ 0
+ 0
+ 0
+ 0
+ 1
+ 0
+ 0x00000000
+ 0x10000000
+ None
+
+
+
+
+
+
+
+
+
+
+
+
+
+ src
+
+
+
+ main.cpp
+ 8
+ main.cpp
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/export/zip.py b/tools/export/zip.py
new file mode 100644
index 0000000..b9828a6
--- /dev/null
+++ b/tools/export/zip.py
@@ -0,0 +1,41 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from exporters import Exporter
+from os.path import basename
+
+
+class ZIP(Exporter):
+ NAME = 'ZIP'
+
+ TARGETS = [
+ ]
+
+ USING_MICROLIB = [
+ ]
+
+ FILE_TYPES = {
+ 'c_sources':'1',
+ 'cpp_sources':'8',
+ 's_sources':'2'
+ }
+
+ def get_toolchain(self):
+ return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
+
+ def generate(self):
+ return True
+
\ No newline at end of file
diff --git a/tools/export_test.py b/tools/export_test.py
new file mode 100644
index 0000000..fdb990c
--- /dev/null
+++ b/tools/export_test.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import sys
+from os.path import join, abspath, dirname, exists
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+from shutil import move
+
+from tools.paths import *
+from tools.utils import mkdir, cmd
+from tools.export import export, setup_user_prj
+
+
+USR_PRJ_NAME = "usr_prj"
+USER_PRJ = join(EXPORT_WORKSPACE, USR_PRJ_NAME)
+USER_SRC = join(USER_PRJ, "src")
+
+
+def setup_test_user_prj():
+ if exists(USER_PRJ):
+ print 'Test user project already generated...'
+ return
+
+ setup_user_prj(USER_PRJ, join(TEST_DIR, "rtos", "mbed", "basic"), [join(LIB_DIR, "rtos"), join(LIB_DIR, "tests", "mbed", "env")])
+
+ # FAKE BUILD URL
+ open(join(USER_SRC, "mbed.bld"), 'w').write("http://mbed.org/users/mbed_official/code/mbed/builds/976df7c37ad5\n")
+
+
+def fake_build_url_resolver(url):
+ # FAKE BUILD URL: Ignore the URL, always return the path to the mbed library
+ return {'path':MBED_LIBRARIES, 'name':'mbed'}
+
+
+def test_export(toolchain, target, expected_error=None):
+ if toolchain is None and target is None:
+ base_dir = join(EXPORT_TMP, "zip")
+ else:
+ base_dir = join(EXPORT_TMP, toolchain, target)
+ temp_dir = join(base_dir, "temp")
+ mkdir(temp_dir)
+
+ zip_path, report = export(USER_PRJ, USR_PRJ_NAME, toolchain, target, base_dir, temp_dir, False, None, fake_build_url_resolver)
+
+ if report['success']:
+ move(zip_path, join(EXPORT_DIR, "export_%s_%s.zip" % (toolchain, target)))
+ print "[OK]"
+ else:
+ if expected_error is None:
+ print '[ERRROR] %s' % report['errormsg']
+ else:
+ if (zip_path is None) and (expected_error in report['errormsg']):
+ print '[OK]'
+ else:
+ print '[ERROR]'
+ print ' zip:', zip_path
+ print ' msg:', report['errormsg']
+
+
+if __name__ == '__main__':
+ setup_test_user_prj()
+
+ for toolchain, target in [
+ ('zip', 'LPC1768'),
+
+ ('emblocks', 'LPC1768'),
+ ('emblocks', 'LPC1549'),
+ ('emblocks', 'LPC1114'),
+ ('emblocks', 'LPC11U35_401'),
+ ('emblocks', 'LPC11U35_501'),
+ ('emblocks', 'LPCCAPPUCCINO'),
+ ('emblocks', 'LPC2368'),
+ ('emblocks', 'STM32F407'),
+ ('emblocks', 'DISCO_F100RB'),
+ ('emblocks', 'DISCO_F051R8'),
+ ('emblocks', 'DISCO_F407VG'),
+ ('emblocks', 'DISCO_F303VC'),
+ ('emblocks', 'NRF51822'),
+ ('emblocks', 'NUCLEO_F401RE'),
+ ('emblocks', 'NUCLEO_F410RB'),
+ ('emblocks', 'NUCLEO_F411RE'),
+ ('emblocks', 'MTS_MDOT_F405RG'),
+ ('emblocks', 'MTS_MDOT_F411RE'),
+
+ ('coide', 'KL05Z'),
+ ('coide', 'KL25Z'),
+ ('coide', 'LPC1768'),
+ ('coide', 'ARCH_PRO'),
+ ('coide', 'DISCO_F407VG'),
+ ('coide', 'NUCLEO_F401RE'),
+ ('coide', 'NUCLEO_F410RB'),
+ ('coide', 'NUCLEO_F411RE'),
+ ('coide', 'DISCO_F429ZI'),
+ #('coide', 'DISCO_F469NI'), removed because template not available
+ ('coide', 'NUCLEO_F334R8'),
+ ('coide', 'MTS_MDOT_F405RG'),
+ ('coide', 'MTS_MDOT_F411RE'),
+
+ ('uvision', 'LPC1768'),
+ ('uvision', 'LPC11U24'),
+ ('uvision', 'LPC11U35_401'),
+ ('uvision', 'LPC11U35_501'),
+ ('uvision', 'KL25Z'),
+ ('uvision', 'LPC1347'),
+ ('uvision', 'LPC1114'),
+ ('uvision', 'LPC4088'),
+ ('uvision', 'LPC4088_DM'),
+ ('uvision', 'LPC4337'),
+ ('uvision', 'LPC824'),
+ ('uvision', 'SSCI824'),
+ ('uvision', 'HRM1017'),
+
+ ('uvision', 'B96B_F446VE'),
+ ('uvision', 'NUCLEO_F030R8'),
+ ('uvision', 'NUCLEO_F031K6'),
+ ('uvision', 'NUCLEO_F042K6'),
+ ('uvision', 'NUCLEO_F070RB'),
+ ('uvision', 'NUCLEO_F072RB'),
+ ('uvision', 'NUCLEO_F091RC'),
+ ('uvision', 'NUCLEO_F103RB'),
+ ('uvision', 'NUCLEO_F302R8'),
+ ('uvision', 'NUCLEO_F303K8'),
+ ('uvision', 'NUCLEO_F303RE'),
+ ('uvision', 'NUCLEO_F334R8'),
+ ('uvision', 'NUCLEO_F401RE'),
+ ('uvision', 'NUCLEO_F410RB'),
+ ('uvision', 'NUCLEO_F411RE'),
+ ('uvision', 'NUCLEO_F446RE'),
+ ('uvision', 'NUCLEO_L031K6'),
+ ('uvision', 'NUCLEO_L053R8'),
+ ('uvision', 'NUCLEO_L073RZ'),
+ ('uvision', 'NUCLEO_L152RE'),
+ ('uvision', 'NUCLEO_L476RG'),
+ ('uvision', 'MTS_MDOT_F405RG'),
+ ('uvision', 'MAXWSNENV'),
+ ('uvision', 'MAX32600MBED'),
+ ('uvision', 'DISCO_F051R8'),
+ ('uvision', 'DISCO_F103RB'),
+ ('uvision', 'DISCO_F303VC'),
+ ('uvision', 'DISCO_L053C8'),
+ ('uvision', 'DISCO_F334C8'),
+ ('uvision', 'DISCO_F407VG'),
+ ('uvision', 'DISCO_F429ZI'),
+ ('uvision', 'DISCO_F746NG'),
+ ('uvision', 'DISCO_F469NI'),
+ ('uvision', 'DISCO_L476VG'),
+ ('uvision', 'MOTE_L152RC'),
+
+ ('lpcxpresso', 'LPC1768'),
+ ('lpcxpresso', 'LPC4088'),
+ ('lpcxpresso', 'LPC4088_DM'),
+ ('lpcxpresso', 'LPC1114'),
+ ('lpcxpresso', 'LPC11U35_401'),
+ ('lpcxpresso', 'LPC11U35_501'),
+ ('lpcxpresso', 'LPCCAPPUCCINO'),
+ ('lpcxpresso', 'LPC1549'),
+ ('lpcxpresso', 'LPC11U68'),
+
+ # Linux path: /home/emimon01/bin/gcc-arm/bin/
+ # Windows path: C:/arm-none-eabi-gcc-4_7/bin/
+ ('gcc_arm', 'LPC1768'),
+ ('gcc_arm', 'LPC4088_DM'),
+ ('gcc_arm', 'LPC1549'),
+ ('gcc_arm', 'LPC1114'),
+ ('gcc_arm', 'LPC11U35_401'),
+ ('gcc_arm', 'LPC11U35_501'),
+ ('gcc_arm', 'LPCCAPPUCCINO'),
+ ('gcc_arm', 'LPC2368'),
+ ('gcc_arm', 'LPC2460'),
+ ('gcc_arm', 'LPC824'),
+ ('gcc_arm', 'SSCI824'),
+
+ ('gcc_arm', 'B96B_F446VE'),
+ ('gcc_arm', 'STM32F407'),
+ ('gcc_arm', 'DISCO_F100RB'),
+ ('gcc_arm', 'DISCO_F051R8'),
+ ('gcc_arm', 'DISCO_F407VG'),
+ ('gcc_arm', 'DISCO_F303VC'),
+ ('gcc_arm', 'DISCO_L053C8'),
+ ('gcc_arm', 'DISCO_F334C8'),
+ ('gcc_arm', 'DISCO_L053C8'),
+ ('gcc_arm', 'DISCO_F429ZI'),
+ ('gcc_arm', 'DISCO_F746NG'),
+ ('gcc_arm', 'NUCLEO_F031K6'),
+ ('gcc_arm', 'NUCLEO_F042K6'),
+ ('gcc_arm', 'NRF51822'),
+ ('gcc_arm', 'RBLAB_BLENANO'),
+ ('gcc_arm', 'HRM1017'),
+ ('gcc_arm', 'NUCLEO_F401RE'),
+ ('gcc_arm', 'NUCLEO_F410RB'),
+ ('gcc_arm', 'NUCLEO_F411RE'),
+ ('gcc_arm', 'NUCLEO_F446RE'),
+ ('gcc_arm', 'ELMO_F411RE'),
+ ('gcc_arm', 'DISCO_F469NI'),
+ ('gcc_arm', 'NUCLEO_F334R8'),
+ ('gcc_arm', 'NUCLEO_L031K6'),
+ ('gcc_arm', 'MAX32600MBED'),
+ ('gcc_arm', 'MTS_MDOT_F405RG'),
+ ('gcc_arm', 'MTS_MDOT_F411RE'),
+ ('gcc_arm', 'RZ_A1H'),
+ ('gcc_arm', 'MAXWSNENV'),
+ ('gcc_arm', 'MAX32600MBED'),
+ ('gcc_arm', 'ARCH_BLE'),
+ ('gcc_arm', 'ARCH_MAX'),
+ ('gcc_arm', 'ARCH_PRO'),
+ ('gcc_arm', 'DELTA_DFCM_NNN40'),
+ ('gcc_arm', 'K20D50M'),
+ ('gcc_arm', 'K22F'),
+ ('gcc_arm', 'K64F'),
+ ('gcc_arm', 'KL05Z'),
+ ('gcc_arm', 'KL25Z'),
+ ('gcc_arm', 'KL43Z'),
+ ('gcc_arm', 'KL46Z'),
+ ('gcc_arm', 'EFM32GG_STK3700'),
+ ('gcc_arm', 'EFM32LG_STK3600'),
+ ('gcc_arm', 'EFM32WG_STK3800'),
+ ('gcc_arm', 'EFM32ZG_STK3200'),
+ ('gcc_arm', 'EFM32HG_STK3400'),
+ ('gcc_arm', 'EFM32PG_STK3401'),
+
+ ('ds5_5', 'LPC1768'),
+ ('ds5_5', 'LPC11U24'),
+ ('ds5_5', 'RZ_A1H'),
+
+ ('iar', 'LPC1768'),
+ ('iar', 'LPC4088_DM'),
+ ('iar', 'LPC1347'),
+
+ ('iar', 'B96B_F446VE'),
+ ('iar', 'NUCLEO_F030R8'),
+ ('iar', 'NUCLEO_F031K6'),
+ ('iar', 'NUCLEO_F042K6'),
+ ('iar', 'NUCLEO_F070RB'),
+ ('iar', 'NUCLEO_F072RB'),
+ ('iar', 'NUCLEO_F091RC'),
+ ('iar', 'NUCLEO_F302R8'),
+ ('iar', 'NUCLEO_F303K8'),
+ ('iar', 'NUCLEO_F303RE'),
+ ('iar', 'NUCLEO_F334R8'),
+ ('iar', 'NUCLEO_F401RE'),
+ ('iar', 'NUCLEO_F410RB'),
+ ('iar', 'NUCLEO_F411RE'),
+ ('iar', 'NUCLEO_F446RE'),
+ ('iar', 'NUCLEO_L031K6'),
+ ('iar', 'NUCLEO_L053R8'),
+ ('iar', 'NUCLEO_L073RZ'),
+ ('iar', 'NUCLEO_L152RE'),
+ ('iar', 'NUCLEO_L476RG'),
+ ('iar', 'DISCO_L053C8'),
+ ('iar', 'DISCO_F334C8'),
+ ('iar', 'DISCO_F429ZI'),
+ ('iar', 'DISCO_F469NI'),
+ ('iar', 'DISCO_F746NG'),
+ ('iar', 'DISCO_L476VG'),
+ ('iar', 'STM32F407'),
+ ('iar', 'MTS_MDOT_F405RG'),
+ ('iar', 'MTS_MDOT_F411RE'),
+ ('iar', 'MAXWSNENV'),
+ ('iar', 'MAX32600MBED'),
+ ('iar', 'MOTE_L152RC'),
+ ('iar', 'RZ_A1H'),
+
+ # ('sw4stm32', 'DISCO_F051R8'),
+ # ('sw4stm32', 'DISCO_F100RB'),
+ ('sw4stm32', 'DISCO_F303VC'),
+ ('sw4stm32', 'DISCO_F334C8'),
+ # ('sw4stm32', 'DISCO_F401VC'),
+ ('sw4stm32', 'DISCO_F407VG'),
+ ('sw4stm32', 'DISCO_F429ZI'),
+ ('sw4stm32', 'DISCO_F469NI'),
+ ('sw4stm32', 'DISCO_F746NG'),
+ ('sw4stm32', 'DISCO_L053C8'),
+ ('sw4stm32', 'DISCO_L476VG'),
+ ('sw4stm32', 'NUCLEO_F030R8'),
+ ('sw4stm32', 'NUCLEO_F031K6'),
+ ('sw4stm32', 'NUCLEO_F042K6'),
+ ('sw4stm32', 'NUCLEO_F070RB'),
+ ('sw4stm32', 'NUCLEO_F072RB'),
+ ('sw4stm32', 'NUCLEO_F091RC'),
+ ('sw4stm32', 'NUCLEO_F103RB'),
+ ('sw4stm32', 'NUCLEO_F302R8'),
+ ('sw4stm32', 'NUCLEO_F303K8'),
+ ('sw4stm32', 'NUCLEO_F303RE'),
+ ('sw4stm32', 'NUCLEO_F334R8'),
+ ('sw4stm32', 'NUCLEO_F401RE'),
+ ('sw4stm32', 'NUCLEO_F410RB'),
+ ('sw4stm32', 'NUCLEO_F411RE'),
+ ('sw4stm32', 'NUCLEO_F446RE'),
+ ('sw4stm32', 'NUCLEO_L053R8'),
+ ('sw4stm32', 'NUCLEO_L073RZ'),
+ ('sw4stm32', 'NUCLEO_L031K6'),
+ ('sw4stm32', 'NUCLEO_L152RE'),
+ ('sw4stm32', 'NUCLEO_L476RG'),
+ ('sw4stm32', 'NUCLEO_F031K6'),
+ ('sw4stm32', 'NUCLEO_F042K6'),
+ ('sw4stm32', 'NUCLEO_F303K8'),
+ ('sw4stm32', 'NUCLEO_F410RB'),
+
+ ('e2studio', 'RZ_A1H'),
+ # Removed following item to avoid script error
+ #(None, None),
+ ]:
+ print '\n=== Exporting to "%s::%s" ===' % (toolchain, target)
+ test_export(toolchain, target)
+
+ print "\n=== Test error messages ==="
+ test_export('lpcxpresso', 'LPC11U24', expected_error='lpcxpresso')
diff --git a/tools/hooks.py b/tools/hooks.py
new file mode 100644
index 0000000..d8018dd
--- /dev/null
+++ b/tools/hooks.py
@@ -0,0 +1,125 @@
+# Configurable hooks in the build system. Can be used by various platforms
+# to customize the build process.
+
+################################################################################
+# Hooks for the various parts of the build process
+
+# Internal mapping of hooks per tool
+_hooks = {}
+
+# Internal mapping of running hooks
+_running_hooks = {}
+
+# Available hook types
+_hook_types = ["binary", "compile", "link", "assemble"]
+
+# Available hook steps
+_hook_steps = ["pre", "replace", "post"]
+
+# Hook the given function. Use this function as a decorator
+def hook_tool(function):
+ tool = function.__name__
+ tool_flag = "_" + tool + "_done"
+ def wrapper(t_self, *args, **kwargs):
+ # if a hook for this tool is already running, it's most likely
+ # coming from a derived class, so don't hook the super class version
+ if _running_hooks.get(tool, False):
+ return function(t_self, *args, **kwargs)
+ _running_hooks[tool] = True
+ # If this tool isn't hooked, return original function
+ if not _hooks.has_key(tool):
+ res = function(t_self, *args, **kwargs)
+ _running_hooks[tool] = False
+ return res
+ tooldesc = _hooks[tool]
+ setattr(t_self, tool_flag, False)
+ # If there is a replace hook, execute the replacement instead
+ if tooldesc.has_key("replace"):
+ res = tooldesc["replace"](t_self, *args, **kwargs)
+ # If the replacement has set the "done" flag, exit now
+ # Otherwise continue as usual
+ if getattr(t_self, tool_flag, False):
+ _running_hooks[tool] = False
+ return res
+ # Execute pre-function before main function if specified
+ if tooldesc.has_key("pre"):
+ tooldesc["pre"](t_self, *args, **kwargs)
+ # Execute the main function now
+ res = function(t_self, *args, **kwargs)
+ # Execute post-function after main function if specified
+ if tooldesc.has_key("post"):
+ post_res = tooldesc["post"](t_self, *args, **kwargs)
+ _running_hooks[tool] = False
+ return post_res or res
+ else:
+ _running_hooks[tool] = False
+ return res
+ return wrapper
+
+class Hook:
+ def __init__(self, target, toolchain):
+ _hooks.clear()
+ self._cmdline_hooks = {}
+ self.toolchain = toolchain
+ target.init_hooks(self, toolchain.__class__.__name__)
+
+ # Hook various functions directly
+ def _hook_add(self, hook_type, hook_step, function):
+ if not hook_type in _hook_types or not hook_step in _hook_steps:
+ return False
+ if not hook_type in _hooks:
+ _hooks[hook_type] = {}
+ _hooks[hook_type][hook_step] = function
+ return True
+
+ def hook_add_compiler(self, hook_step, function):
+ return self._hook_add("compile", hook_step, function)
+
+ def hook_add_linker(self, hook_step, function):
+ return self._hook_add("link", hook_step, function)
+
+ def hook_add_assembler(self, hook_step, function):
+ return self._hook_add("assemble", hook_step, function)
+
+ def hook_add_binary(self, hook_step, function):
+ return self._hook_add("binary", hook_step, function)
+
+ # Hook command lines
+ def _hook_cmdline(self, hook_type, function):
+ if not hook_type in _hook_types:
+ return False
+ self._cmdline_hooks[hook_type] = function
+ return True
+
+ def hook_cmdline_compiler(self, function):
+ return self._hook_cmdline("compile", function)
+
+ def hook_cmdline_linker(self, function):
+ return self._hook_cmdline("link", function)
+
+ def hook_cmdline_assembler(self, function):
+ return self._hook_cmdline("assemble", function)
+
+ def hook_cmdline_binary(self, function):
+ return self._hook_cmdline("binary", function)
+
+ # Return the command line after applying the hook
+ def _get_cmdline(self, hook_type, cmdline):
+ if self._cmdline_hooks.has_key(hook_type):
+ cmdline = self._cmdline_hooks[hook_type](self.toolchain.__class__.__name__, cmdline)
+ return cmdline
+
+ def get_cmdline_compiler(self, cmdline):
+ return self._get_cmdline("compile", cmdline)
+
+ def get_cmdline_linker(self, cmdline):
+ return self._get_cmdline("link", cmdline)
+
+ def get_cmdline_assembler(self, cmdline):
+ return self._get_cmdline("assemble", cmdline)
+
+ def get_cmdline_binary(self, cmdline):
+ return self._get_cmdline("binary", cmdline)
+
+################################################################################
+
diff --git a/tools/host_tests/__init__.py b/tools/host_tests/__init__.py
new file mode 100644
index 0000000..b365450
--- /dev/null
+++ b/tools/host_tests/__init__.py
@@ -0,0 +1,65 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from host_registry import HostRegistry
+
+# Host test supervisors
+from echo import EchoTest
+from rtc_auto import RTCTest
+from stdio_auto import StdioTest
+from hello_auto import HelloTest
+from detect_auto import DetectPlatformTest
+from default_auto import DefaultAuto
+from dev_null_auto import DevNullTest
+from wait_us_auto import WaitusTest
+from tcpecho_server_auto import TCPEchoServerTest
+from udpecho_server_auto import UDPEchoServerTest
+from tcpecho_client_auto import TCPEchoClientTest
+from udpecho_client_auto import UDPEchoClientTest
+from wfi_auto import WFITest
+from serial_nc_rx_auto import SerialNCRXTest
+from serial_nc_tx_auto import SerialNCTXTest
+
+# Populate registry with supervising objects
+HOSTREGISTRY = HostRegistry()
+HOSTREGISTRY.register_host_test("echo", EchoTest())
+HOSTREGISTRY.register_host_test("default", DefaultAuto())
+HOSTREGISTRY.register_host_test("rtc_auto", RTCTest())
+HOSTREGISTRY.register_host_test("hello_auto", HelloTest())
+HOSTREGISTRY.register_host_test("stdio_auto", StdioTest())
+HOSTREGISTRY.register_host_test("detect_auto", DetectPlatformTest())
+HOSTREGISTRY.register_host_test("default_auto", DefaultAuto())
+HOSTREGISTRY.register_host_test("wait_us_auto", WaitusTest())
+HOSTREGISTRY.register_host_test("dev_null_auto", DevNullTest())
+HOSTREGISTRY.register_host_test("tcpecho_server_auto", TCPEchoServerTest())
+HOSTREGISTRY.register_host_test("udpecho_server_auto", UDPEchoServerTest())
+HOSTREGISTRY.register_host_test("tcpecho_client_auto", TCPEchoClientTest())
+HOSTREGISTRY.register_host_test("udpecho_client_auto", UDPEchoClientTest())
+HOSTREGISTRY.register_host_test("wfi_auto", WFITest())
+HOSTREGISTRY.register_host_test("serial_nc_rx_auto", SerialNCRXTest())
+HOSTREGISTRY.register_host_test("serial_nc_tx_auto", SerialNCTXTest())
+
+###############################################################################
+# Functional interface for test supervisor registry
+###############################################################################
+
+
+def get_host_test(ht_name):
+ return HOSTREGISTRY.get_host_test(ht_name)
+
+def is_host_test(ht_name):
+ return HOSTREGISTRY.is_host_test(ht_name)
diff --git a/tools/host_tests/default_auto.py b/tools/host_tests/default_auto.py
new file mode 100644
index 0000000..0883d79
--- /dev/null
+++ b/tools/host_tests/default_auto.py
@@ -0,0 +1,36 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from sys import stdout
+
+class DefaultAuto():
+ """ Simple, basic host test's test runner waiting for serial port
+ output from MUT, no supervision over test running in MUT is executed.
+ """
+ def test(self, selftest):
+ result = selftest.RESULT_SUCCESS
+ try:
+ while True:
+ c = selftest.mbed.serial_read(512)
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ stdout.write(c)
+ stdout.flush()
+ except KeyboardInterrupt, _:
+ selftest.notify("\r\n[CTRL+C] exit")
+ result = selftest.RESULT_ERROR
+ return result
diff --git a/tools/host_tests/detect_auto.py b/tools/host_tests/detect_auto.py
new file mode 100644
index 0000000..2999946
--- /dev/null
+++ b/tools/host_tests/detect_auto.py
@@ -0,0 +1,55 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+
+class DetectPlatformTest():
+ PATTERN_MICRO_NAME = "Target '(\w+)'"
+ re_detect_micro_name = re.compile(PATTERN_MICRO_NAME)
+
+ def test(self, selftest):
+ result = True
+
+ c = selftest.mbed.serial_readline() # {{start}} preamble
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+
+ selftest.notify(c.strip())
+ selftest.notify("HOST: Detecting target name...")
+
+ c = selftest.mbed.serial_readline()
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify(c.strip())
+
+ # Check for target name
+ m = self.re_detect_micro_name.search(c)
+ if m and len(m.groups()):
+ micro_name = m.groups()[0]
+ micro_cmp = selftest.mbed.options.micro == micro_name
+ result = result and micro_cmp
+ selftest.notify("HOST: MUT Target name '%s', expected '%s'... [%s]"% (micro_name,
+ selftest.mbed.options.micro,
+ "OK" if micro_cmp else "FAIL"))
+
+ for i in range(0, 2):
+ c = selftest.mbed.serial_readline()
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify(c.strip())
+
+ return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/dev_null_auto.py b/tools/host_tests/dev_null_auto.py
new file mode 100644
index 0000000..4538f6d
--- /dev/null
+++ b/tools/host_tests/dev_null_auto.py
@@ -0,0 +1,50 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+class DevNullTest():
+
+ def check_readline(self, selftest, text):
+ """ Reads line from serial port and checks if text was part of read string
+ """
+ result = False
+ c = selftest.mbed.serial_readline()
+ if c and text in c:
+ result = True
+ return result
+
+ def test(self, selftest):
+ result = True
+ # Test should print some text and later stop printing
+ # 'MBED: re-routing stdout to /null'
+ res = self.check_readline(selftest, "re-routing stdout to /null")
+ if not res:
+ # We haven't read preamble line
+ result = False
+ else:
+ # Check if there are printed characters
+ str = ''
+ for i in range(3):
+ c = selftest.mbed.serial_read(32)
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ else:
+ str += c
+ if len(str) > 0:
+ result = False
+ break
+ selftest.notify("Received %d bytes: %s"% (len(str), str))
+ return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/echo.py b/tools/host_tests/echo.py
new file mode 100644
index 0000000..75e534f
--- /dev/null
+++ b/tools/host_tests/echo.py
@@ -0,0 +1,59 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import uuid
+from sys import stdout
+
+class EchoTest():
+
+ # Test parameters
+ TEST_SERIAL_BAUDRATE = 115200
+ TEST_LOOP_COUNT = 50
+
+ def test(self, selftest):
+ """ This host test will use mbed serial port with
+ baudrate 115200 to perform echo test on that port.
+ """
+ # Custom initialization for echo test
+ selftest.mbed.init_serial_params(serial_baud=self.TEST_SERIAL_BAUDRATE)
+ selftest.mbed.init_serial()
+
+ # Test function, return True or False to get standard test notification on stdout
+ selftest.mbed.flush()
+ selftest.notify("HOST: Starting the ECHO test")
+ result = True
+
+ """ This ensures that there are no parasites left in the serial buffer.
+ """
+ for i in range(0, 2):
+ selftest.mbed.serial_write("\n")
+ c = selftest.mbed.serial_readline()
+
+ for i in range(0, self.TEST_LOOP_COUNT):
+ TEST_STRING = str(uuid.uuid4()) + "\n"
+ selftest.mbed.serial_write(TEST_STRING)
+ c = selftest.mbed.serial_readline()
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ if c.strip() != TEST_STRING.strip():
+ selftest.notify('HOST: "%s" != "%s"'% (c, TEST_STRING))
+ result = False
+ else:
+ sys.stdout.write('.')
+ stdout.flush()
+ return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/echo_flow_control.py b/tools/host_tests/echo_flow_control.py
new file mode 100644
index 0000000..7ea11e9
--- /dev/null
+++ b/tools/host_tests/echo_flow_control.py
@@ -0,0 +1,48 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from host_test import Test
+
+
+class EchoTest(Test):
+ def __init__(self):
+ Test.__init__(self)
+ self.mbed.init_serial()
+ self.mbed.extra_serial.rtscts = True
+ self.mbed.reset()
+
+ def test(self):
+ self.mbed.flush()
+ self.notify("Starting the ECHO test")
+ TEST="longer serial test"
+ check = True
+ for i in range(1, 100):
+ self.mbed.extra_serial.write(TEST + "\n")
+ l = self.mbed.extra_serial.readline().strip()
+ if not l: continue
+
+ if l != TEST:
+ check = False
+ self.notify('"%s" != "%s"' % (l, TEST))
+ else:
+ if (i % 10) == 0:
+ self.notify('.')
+
+ return check
+
+
+if __name__ == '__main__':
+ EchoTest().run()
diff --git a/tools/host_tests/example/BroadcastReceive.py b/tools/host_tests/example/BroadcastReceive.py
new file mode 100644
index 0000000..2e846ca
--- /dev/null
+++ b/tools/host_tests/example/BroadcastReceive.py
@@ -0,0 +1,25 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+
+BROADCAST_PORT = 58083
+
+s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+s.bind(('0.0.0.0', BROADCAST_PORT))
+
+while True:
+ print s.recvfrom(256)
diff --git a/tools/host_tests/example/BroadcastSend.py b/tools/host_tests/example/BroadcastSend.py
new file mode 100644
index 0000000..0a5f8c3
--- /dev/null
+++ b/tools/host_tests/example/BroadcastSend.py
@@ -0,0 +1,30 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+from time import sleep, time
+
+BROADCAST_PORT = 58083
+
+s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+s.bind(('', 0))
+s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
+
+while True:
+ print "Broadcasting..."
+ data = 'Hello World: ' + repr(time()) + '\n'
+ s.sendto(data, ('', BROADCAST_PORT))
+ sleep(1)
diff --git a/tools/host_tests/example/MulticastReceive.py b/tools/host_tests/example/MulticastReceive.py
new file mode 100644
index 0000000..9001f40
--- /dev/null
+++ b/tools/host_tests/example/MulticastReceive.py
@@ -0,0 +1,31 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+import struct
+
+MCAST_GRP = '224.1.1.1'
+MCAST_PORT = 5007
+
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
+sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+sock.bind(('', MCAST_PORT))
+mreq = struct.pack("4sl", socket.inet_aton(MCAST_GRP), socket.INADDR_ANY)
+
+sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
+
+while True:
+ print sock.recv(10240)
diff --git a/tools/host_tests/example/MulticastSend.py b/tools/host_tests/example/MulticastSend.py
new file mode 100644
index 0000000..8efd453
--- /dev/null
+++ b/tools/host_tests/example/MulticastSend.py
@@ -0,0 +1,30 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+from time import sleep, time
+
+MCAST_GRP = '224.1.1.1'
+MCAST_PORT = 5007
+
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
+sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
+
+while True:
+ print "Multicast to group: %s\n" % MCAST_GRP
+ data = 'Hello World: ' + repr(time()) + '\n'
+ sock.sendto(data, (MCAST_GRP, MCAST_PORT))
+ sleep(1)
diff --git a/tools/host_tests/example/TCPEchoClient.py b/tools/host_tests/example/TCPEchoClient.py
new file mode 100644
index 0000000..dfa9bfd
--- /dev/null
+++ b/tools/host_tests/example/TCPEchoClient.py
@@ -0,0 +1,28 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+
+ECHO_SERVER_ADDRESS = "10.2.202.45"
+ECHO_PORT = 7
+
+s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+s.connect((ECHO_SERVER_ADDRESS, ECHO_PORT))
+
+s.sendall('Hello, world')
+data = s.recv(1024)
+s.close()
+print 'Received', repr(data)
diff --git a/tools/host_tests/example/TCPEchoServer.py b/tools/host_tests/example/TCPEchoServer.py
new file mode 100644
index 0000000..1324edb
--- /dev/null
+++ b/tools/host_tests/example/TCPEchoServer.py
@@ -0,0 +1,30 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+
+s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+s.bind(('', 7))
+s.listen(1)
+
+while True:
+ conn, addr = s.accept()
+ print 'Connected by', addr
+ while True:
+ data = conn.recv(1024)
+ if not data: break
+ conn.sendall(data)
+ conn.close()
diff --git a/tools/host_tests/example/UDPEchoClient.py b/tools/host_tests/example/UDPEchoClient.py
new file mode 100644
index 0000000..6a6cf8c
--- /dev/null
+++ b/tools/host_tests/example/UDPEchoClient.py
@@ -0,0 +1,28 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+
+ECHO_SERVER_ADDRESS = '10.2.202.45'
+ECHO_PORT = 7
+
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+
+sock.sendto("Hello World\n", (ECHO_SERVER_ADDRESS, ECHO_PORT))
+response = sock.recv(256)
+sock.close()
+
+print response
diff --git a/tools/host_tests/example/UDPEchoServer.py b/tools/host_tests/example/UDPEchoServer.py
new file mode 100644
index 0000000..3850348
--- /dev/null
+++ b/tools/host_tests/example/UDPEchoServer.py
@@ -0,0 +1,27 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+
+ECHO_PORT = 7
+
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+sock.bind(('', ECHO_PORT))
+
+while True:
+ data, address = sock.recvfrom(256)
+ print "datagram from", address
+ sock.sendto(data, address)
diff --git a/tools/host_tests/example/__init__.py b/tools/host_tests/example/__init__.py
new file mode 100644
index 0000000..10e7e1d
--- /dev/null
+++ b/tools/host_tests/example/__init__.py
@@ -0,0 +1,16 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
\ No newline at end of file
diff --git a/tools/host_tests/hello_auto.py b/tools/host_tests/hello_auto.py
new file mode 100644
index 0000000..69b39bf
--- /dev/null
+++ b/tools/host_tests/hello_auto.py
@@ -0,0 +1,34 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+class HelloTest():
+ HELLO_WORLD = "Hello World"
+
+ def test(self, selftest):
+ c = selftest.mbed.serial_readline()
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify("Read %d bytes:"% len(c))
+ selftest.notify(c.strip())
+
+ result = True
+ # Because we can have targetID here let's try to decode
+ if len(c) < len(self.HELLO_WORLD):
+ result = False
+ else:
+ result = self.HELLO_WORLD in c
+ return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/host_registry.py b/tools/host_tests/host_registry.py
new file mode 100644
index 0000000..d523848
--- /dev/null
+++ b/tools/host_tests/host_registry.py
@@ -0,0 +1,36 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+class HostRegistry:
+ """ Class stores registry with host tests and objects representing them
+ """
+ HOST_TESTS = {} # host_test_name -> host_test_ojbect
+
+ def register_host_test(self, ht_name, ht_object):
+ if ht_name not in self.HOST_TESTS:
+ self.HOST_TESTS[ht_name] = ht_object
+
+ def unregister_host_test(self):
+ if ht_name in HOST_TESTS:
+ self.HOST_TESTS[ht_name] = None
+
+ def get_host_test(self, ht_name):
+ return self.HOST_TESTS[ht_name] if ht_name in self.HOST_TESTS else None
+
+ def is_host_test(self, ht_name):
+ return ht_name in self.HOST_TESTS
+
\ No newline at end of file
diff --git a/tools/host_tests/host_test.py b/tools/host_tests/host_test.py
new file mode 100644
index 0000000..fe611cb
--- /dev/null
+++ b/tools/host_tests/host_test.py
@@ -0,0 +1,426 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+# Check if 'serial' module is installed
+try:
+ from serial import Serial
+except ImportError, e:
+ print "Error: Can't import 'serial' module: %s"% e
+ exit(-1)
+
+import os
+import re
+import types
+from sys import stdout
+from time import sleep, time
+from optparse import OptionParser
+
+import host_tests_plugins
+
+# This is a little tricky. We need to add upper directory to path so
+# we can find packages we want from the same level as other files do
+import sys
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
+from tools.test_api import get_autodetected_MUTS_list
+from tools.test_api import get_module_avail
+
+
+class Mbed:
+ """ Base class for a host driven test
+ """
+ def __init__(self):
+ parser = OptionParser()
+
+ parser.add_option("-m", "--micro",
+ dest="micro",
+ help="The target microcontroller",
+ metavar="MICRO")
+
+ parser.add_option("-p", "--port",
+ dest="port",
+ help="The serial port of the target mbed",
+ metavar="PORT")
+
+ parser.add_option("-d", "--disk",
+ dest="disk",
+ help="The target disk path",
+ metavar="DISK_PATH")
+
+ parser.add_option("-f", "--image-path",
+ dest="image_path",
+ help="Path with target's image",
+ metavar="IMAGE_PATH")
+
+ parser.add_option("-c", "--copy",
+ dest="copy_method",
+ help="Copy method selector",
+ metavar="COPY_METHOD")
+
+ parser.add_option("-C", "--program_cycle_s",
+ dest="program_cycle_s",
+ help="Program cycle sleep. Define how many seconds you want wait after copying bianry onto target",
+ type="float",
+ metavar="COPY_METHOD")
+
+ parser.add_option("-t", "--timeout",
+ dest="timeout",
+ help="Timeout",
+ metavar="TIMEOUT")
+
+ parser.add_option("-r", "--reset",
+ dest="forced_reset_type",
+ help="Forces different type of reset")
+
+ parser.add_option("-R", "--reset-timeout",
+ dest="forced_reset_timeout",
+ metavar="NUMBER",
+ type="int",
+ help="When forcing a reset using option -r you can set up after reset timeout in seconds")
+
+ parser.add_option('', '--auto',
+ dest='auto_detect',
+ metavar=False,
+ action="store_true",
+ help='Use mbed-ls module to detect all connected mbed devices')
+
+ (self.options, _) = parser.parse_args()
+
+ self.DEFAULT_RESET_TOUT = 0
+ self.DEFAULT_TOUT = 10
+
+ if self.options.port is None:
+ raise Exception("The serial port of the target mbed have to be provided as command line arguments")
+
+ # Options related to copy / reset mbed device
+ self.port = self.options.port
+ self.disk = self.options.disk
+ self.image_path = self.options.image_path.strip('"')
+ self.copy_method = self.options.copy_method
+ self.program_cycle_s = float(self.options.program_cycle_s)
+
+ self.serial = None
+ self.serial_baud = 9600
+ self.serial_timeout = 1
+
+ self.timeout = self.DEFAULT_TOUT if self.options.timeout is None else self.options.timeout
+ print 'MBED: Instrumentation: "%s" and disk: "%s"' % (self.port, self.disk)
+
+ def init_serial_params(self, serial_baud=9600, serial_timeout=1):
+ """ Initialize port parameters.
+ This parameters will be used by self.init_serial() function to open serial port
+ """
+ self.serial_baud = serial_baud
+ self.serial_timeout = serial_timeout
+
+ def init_serial(self, serial_baud=None, serial_timeout=None):
+ """ Initialize serial port.
+ Function will return error is port can't be opened or initialized
+ """
+ # Overload serial port configuration from default to parameters' values if they are specified
+ serial_baud = serial_baud if serial_baud is not None else self.serial_baud
+ serial_timeout = serial_timeout if serial_timeout is not None else self.serial_timeout
+
+ if get_module_avail('mbed_lstools') and self.options.auto_detect:
+ # Ensure serial port is up-to-date (try to find it 60 times)
+ found = False
+
+ for i in range(0, 60):
+ print('Looking for %s with MBEDLS' % self.options.micro)
+ muts_list = get_autodetected_MUTS_list(platform_name_filter=[self.options.micro])
+
+ if 1 in muts_list:
+ mut = muts_list[1]
+ self.port = mut['port']
+ found = True
+ break
+ else:
+ sleep(3)
+
+ if not found:
+ return False
+
+ # Clear serial port
+ if self.serial:
+ self.serial.close()
+ self.serial = None
+
+ # We will pool for serial to be re-mounted if it was unmounted after device reset
+ result = self.pool_for_serial_init(serial_baud, serial_timeout) # Blocking
+
+ # Port can be opened
+ if result:
+ self.flush()
+ return result
+
+ def pool_for_serial_init(self, serial_baud, serial_timeout, pooling_loops=40, init_delay=0.5, loop_delay=0.25):
+ """ Functions pools for serial port readiness
+ """
+ result = True
+ last_error = None
+ # This loop is used to check for serial port availability due to
+ # some delays and remounting when devices are being flashed with new software.
+ for i in range(pooling_loops):
+ sleep(loop_delay if i else init_delay)
+ try:
+ self.serial = Serial(self.port, baudrate=serial_baud, timeout=serial_timeout)
+ except Exception as e:
+ result = False
+ last_error = "MBED: %s"% str(e)
+ stdout.write('.')
+ stdout.flush()
+ else:
+ print "...port ready!"
+ result = True
+ break
+ if not result and last_error:
+ print last_error
+ return result
+
+ def set_serial_timeout(self, timeout):
+ """ Wraps self.mbed.serial object timeout property
+ """
+ result = None
+ if self.serial:
+ self.serial.timeout = timeout
+ result = True
+ return result
+
+ def serial_read(self, count=1):
+ """ Wraps self.mbed.serial object read method
+ """
+ result = None
+ if self.serial:
+ try:
+ result = self.serial.read(count)
+ except:
+ result = None
+ return result
+
+ def serial_readline(self, timeout=5):
+ """ Wraps self.mbed.serial object read method to read one line from serial port
+ """
+ result = ''
+ start = time()
+ while (time() - start) < timeout:
+ if self.serial:
+ try:
+ c = self.serial.read(1)
+ result += c
+ except Exception as e:
+ print "MBED: %s"% str(e)
+ result = None
+ break
+ if c == '\n':
+ break
+ return result
+
+ def serial_write(self, write_buffer):
+ """ Wraps self.mbed.serial object write method
+ """
+ result = None
+ if self.serial:
+ try:
+ result = self.serial.write(write_buffer)
+ except:
+ result = None
+ return result
+
+ def reset_timeout(self, timeout):
+ """ Timeout executed just after reset command is issued
+ """
+ for n in range(0, timeout):
+ sleep(1)
+
+ def reset(self):
+ """ Calls proper reset plugin to do the job.
+ Please refer to host_test_plugins functionality
+ """
+ # Flush serials to get only input after reset
+ self.flush()
+ if self.options.forced_reset_type:
+ result = host_tests_plugins.call_plugin('ResetMethod', self.options.forced_reset_type, disk=self.disk)
+ else:
+ result = host_tests_plugins.call_plugin('ResetMethod', 'default', serial=self.serial)
+ # Give time to wait for the image loading
+ reset_tout_s = self.options.forced_reset_timeout if self.options.forced_reset_timeout is not None else self.DEFAULT_RESET_TOUT
+ self.reset_timeout(reset_tout_s)
+ return result
+
+ def copy_image(self, image_path=None, disk=None, copy_method=None):
+ """ Closure for copy_image_raw() method.
+ Method which is actually copying image to mbed
+ """
+ # Set closure environment
+ image_path = image_path if image_path is not None else self.image_path
+ disk = disk if disk is not None else self.disk
+ copy_method = copy_method if copy_method is not None else self.copy_method
+ # Call proper copy method
+ result = self.copy_image_raw(image_path, disk, copy_method)
+ return result
+
+ def copy_image_raw(self, image_path=None, disk=None, copy_method=None):
+ """ Copy file depending on method you want to use. Handles exception
+ and return code from shell copy commands.
+ """
+ # image_path - Where is binary with target's firmware
+ if copy_method is not None:
+ # We override 'default' method with 'shell' method
+ if copy_method == 'default':
+ copy_method = 'shell'
+ else:
+ copy_method = 'shell'
+
+ result = host_tests_plugins.call_plugin('CopyMethod', copy_method, image_path=image_path, destination_disk=disk, program_cycle_s=self.program_cycle_s, target_mcu=self.options.micro)
+ return result;
+
+ def flush(self):
+ """ Flush serial ports
+ """
+ result = False
+ if self.serial:
+ self.serial.flushInput()
+ self.serial.flushOutput()
+ result = True
+ return result
+
+
+class HostTestResults:
+ """ Test results set by host tests
+ """
+ def __init__(self):
+ self.RESULT_SUCCESS = 'success'
+ self.RESULT_FAILURE = 'failure'
+ self.RESULT_ERROR = 'error'
+ self.RESULT_IO_SERIAL = 'ioerr_serial'
+ self.RESULT_NO_IMAGE = 'no_image'
+ self.RESULT_IOERR_COPY = "ioerr_copy"
+ self.RESULT_PASSIVE = "passive"
+ self.RESULT_NOT_DETECTED = "not_detected"
+ self.RESULT_MBED_ASSERT = "mbed_assert"
+
+
+import tools.host_tests as host_tests
+
+
+class Test(HostTestResults):
+ """ Base class for host test's test runner
+ """
+ # Select default host_test supervision (replaced after autodetection)
+ test_supervisor = host_tests.get_host_test("default")
+
+ def __init__(self):
+ self.mbed = Mbed()
+
+ def detect_test_config(self, verbose=False):
+ """ Detects test case configuration
+ """
+ result = {}
+ while True:
+ line = self.mbed.serial_readline()
+ if "{start}" in line:
+ self.notify("HOST: Start test...")
+ break
+ else:
+ # Detect if this is property from TEST_ENV print
+ m = re.search('{([\w_]+);([\w\d\+ ]+)}}', line[:-1])
+ if m and len(m.groups()) == 2:
+ # This is most likely auto-detection property
+ result[m.group(1)] = m.group(2)
+ if verbose:
+ self.notify("HOST: Property '%s' = '%s'"% (m.group(1), m.group(2)))
+ else:
+ # We can check if this is TArget Id in mbed specific format
+ m2 = re.search('^([\$]+)([a-fA-F0-9]+)', line[:-1])
+ if m2 and len(m2.groups()) == 2:
+ if verbose:
+ target_id = m2.group(1) + m2.group(2)
+ self.notify("HOST: TargetID '%s'"% target_id)
+ self.notify(line[len(target_id):-1])
+ else:
+ self.notify("HOST: Unknown property: %s"% line.strip())
+ return result
+
+ def run(self):
+ """ Test runner for host test. This function will start executing
+ test and forward test result via serial port to test suite
+ """
+ # Copy image to device
+ self.notify("HOST: Copy image onto target...")
+ result = self.mbed.copy_image()
+ if not result:
+ self.print_result(self.RESULT_IOERR_COPY)
+
+ # Initialize and open target's serial port (console)
+ self.notify("HOST: Initialize serial port...")
+ result = self.mbed.init_serial()
+ if not result:
+ self.print_result(self.RESULT_IO_SERIAL)
+
+ # Reset device
+ self.notify("HOST: Reset target...")
+ result = self.mbed.reset()
+ if not result:
+ self.print_result(self.RESULT_IO_SERIAL)
+
+ # Run test
+ try:
+ CONFIG = self.detect_test_config(verbose=True) # print CONFIG
+
+ if "host_test_name" in CONFIG:
+ if host_tests.is_host_test(CONFIG["host_test_name"]):
+ self.test_supervisor = host_tests.get_host_test(CONFIG["host_test_name"])
+ result = self.test_supervisor.test(self) #result = self.test()
+
+ if result is not None:
+ self.print_result(result)
+ else:
+ self.notify("HOST: Passive mode...")
+ except Exception, e:
+ print str(e)
+ self.print_result(self.RESULT_ERROR)
+
+ def setup(self):
+ """ Setup and check if configuration for test is
+ correct. E.g. if serial port can be opened.
+ """
+ result = True
+ if not self.mbed.serial:
+ result = False
+ self.print_result(self.RESULT_IO_SERIAL)
+ return result
+
+ def notify(self, message):
+ """ On screen notification function
+ """
+ print message
+ stdout.flush()
+
+ def print_result(self, result):
+ """ Test result unified printing function
+ """
+ self.notify("\r\n{{%s}}\r\n{{end}}" % result)
+
+
+class DefaultTestSelector(Test):
+ """ Test class with serial port initialization
+ """
+ def __init__(self):
+ HostTestResults.__init__(self)
+ Test.__init__(self)
+
+if __name__ == '__main__':
+ DefaultTestSelector().run()
diff --git a/tools/host_tests/host_tests_plugins/__init__.py b/tools/host_tests/host_tests_plugins/__init__.py
new file mode 100644
index 0000000..c05241a
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/__init__.py
@@ -0,0 +1,80 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import host_test_registry
+
+# This plugins provide 'flashing' methods to host test scripts
+import module_copy_mbed
+import module_copy_shell
+import module_copy_silabs
+
+try:
+ import module_copy_smart
+except:
+ pass
+
+#import module_copy_firefox
+import module_copy_mps2
+
+# Plugins used to reset certain platform
+import module_reset_mbed
+import module_reset_silabs
+import module_reset_mps2
+
+
+# Plugin registry instance
+HOST_TEST_PLUGIN_REGISTRY = host_test_registry.HostTestRegistry()
+
+# Static plugin registration
+# Some plugins are commented out if they are not stable or not commonly used
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_mbed.load_plugin())
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_shell.load_plugin())
+
+try:
+ HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_smart.load_plugin())
+except:
+ pass
+
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_mbed.load_plugin())
+#HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_firefox.load_plugin())
+
+# Extra platforms support
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_mps2.load_plugin())
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_mps2.load_plugin())
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_silabs.load_plugin())
+HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_silabs.load_plugin())
+
+# TODO: extend plugin loading to files with name module_*.py loaded ad-hoc
+
+###############################################################################
+# Functional interface for host test plugin registry
+###############################################################################
+def call_plugin(type, capability, *args, **kwargs):
+ """ Interface to call plugin registry functional way
+ """
+ return HOST_TEST_PLUGIN_REGISTRY.call_plugin(type, capability, *args, **kwargs)
+
+def get_plugin_caps(type):
+ """ Returns list of all capabilities for plugin family with the same type.
+ If there are no capabilities empty list is returned
+ """
+ return HOST_TEST_PLUGIN_REGISTRY.get_plugin_caps(type)
+
+def print_plugin_info():
+ """ Prints plugins' information in user friendly way
+ """
+ print HOST_TEST_PLUGIN_REGISTRY
diff --git a/tools/host_tests/host_tests_plugins/host_test_plugins.py b/tools/host_tests/host_tests_plugins/host_test_plugins.py
new file mode 100644
index 0000000..ee60950
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/host_test_plugins.py
@@ -0,0 +1,119 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from os import access, F_OK
+from sys import stdout
+from time import sleep
+from subprocess import call
+
+
+class HostTestPluginBase:
+ """ Base class for all plug-ins used with host tests.
+ """
+ ###########################################################################
+ # Interface:
+ ###########################################################################
+
+ ###########################################################################
+ # Interface attributes defining plugin name, type etc.
+ ###########################################################################
+ name = "HostTestPluginBase" # Plugin name, can be plugin class name
+ type = "BasePlugin" # Plugin type: ResetMethod, Copymethod etc.
+ capabilities = [] # Capabilities names: what plugin can achieve
+ # (e.g. reset using some external command line tool)
+ stable = False # Determine if plugin is stable and can be used
+
+ ###########################################################################
+ # Interface methods
+ ###########################################################################
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ return False
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability e.g. may directly just call some command line
+ program or execute building pythonic function
+ """
+ return False
+
+ ###########################################################################
+ # Interface helper methods - overload only if you need to have custom behaviour
+ ###########################################################################
+ def print_plugin_error(self, text):
+ """ Function prints error in console and exits always with False
+ """
+ print "Plugin error: %s::%s: %s"% (self.name, self.type, text)
+ return False
+
+ def print_plugin_info(self, text, NL=True):
+ """ Function prints notification in console and exits always with True
+ """
+ if NL:
+ print "Plugin info: %s::%s: %s"% (self.name, self.type, text)
+ else:
+ print "Plugin info: %s::%s: %s"% (self.name, self.type, text),
+ return True
+
+ def print_plugin_char(self, char):
+ """ Function prints char on stdout
+ """
+ stdout.write(char)
+ stdout.flush()
+ return True
+
+ def check_mount_point_ready(self, destination_disk, init_delay=0.2, loop_delay=0.25):
+ """ Checks if destination_disk is ready and can be accessed by e.g. copy commands
+ @init_delay - Initial delay time before first access check
+ @loop_delay - pooling delay for access check
+ """
+ if not access(destination_disk, F_OK):
+ self.print_plugin_info("Waiting for mount point '%s' to be ready..."% destination_disk, NL=False)
+ sleep(init_delay)
+ while not access(destination_disk, F_OK):
+ sleep(loop_delay)
+ self.print_plugin_char('.')
+
+ def check_parameters(self, capabilitity, *args, **kwargs):
+ """ This function should be ran each time we call execute()
+ to check if none of the required parameters is missing.
+ """
+ missing_parameters = []
+ for parameter in self.required_parameters:
+ if parameter not in kwargs:
+ missing_parameters.append(parameter)
+ if len(missing_parameters) > 0:
+ self.print_plugin_error("execute parameter(s) '%s' missing!"% (', '.join(parameter)))
+ return False
+ return True
+
+ def run_command(self, cmd, shell=True):
+ """ Runs command from command line.
+ """
+ result = True
+ ret = 0
+ try:
+ ret = call(cmd, shell=shell)
+ if ret:
+ self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd))
+ return False
+ except Exception as e:
+ result = False
+ self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd))
+ self.print_plugin_error(str(e))
+ return result
diff --git a/tools/host_tests/host_tests_plugins/host_test_registry.py b/tools/host_tests/host_tests_plugins/host_test_registry.py
new file mode 100644
index 0000000..5237b9a
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/host_test_registry.py
@@ -0,0 +1,89 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+class HostTestRegistry:
+ """ Simple class used to register and store
+ host test plugins for further usage
+ """
+ # Here we actually store all the plugins
+ PLUGINS = {} # 'Plugin Name' : Plugin Object
+
+ def print_error(self, text):
+ print "Plugin load failed. Reason: %s"% text
+
+ def register_plugin(self, plugin):
+ """ Registers and stores plugin inside registry for further use.
+ Method also calls plugin's setup() function to configure plugin if needed.
+
+ Note: Different groups of plugins may demand different extra parameter. Plugins
+ should be at least for one type of plugin configured with the same parameters
+ because we do not know which of them will actually use particular parameter.
+ """
+ # TODO:
+ # - check for unique caps for specified type
+ if plugin.name not in self.PLUGINS:
+ if plugin.setup(): # Setup plugin can be completed without errors
+ self.PLUGINS[plugin.name] = plugin
+ return True
+ else:
+ self.print_error("%s setup failed"% plugin.name)
+ else:
+ self.print_error("%s already loaded"% plugin.name)
+ return False
+
+ def call_plugin(self, type, capability, *args, **kwargs):
+ """ Execute plugin functionality respectively to its purpose
+ """
+ for plugin_name in self.PLUGINS:
+ plugin = self.PLUGINS[plugin_name]
+ if plugin.type == type and capability in plugin.capabilities:
+ return plugin.execute(capability, *args, **kwargs)
+ return False
+
+ def get_plugin_caps(self, type):
+ """ Returns list of all capabilities for plugin family with the same type.
+ If there are no capabilities empty list is returned
+ """
+ result = []
+ for plugin_name in self.PLUGINS:
+ plugin = self.PLUGINS[plugin_name]
+ if plugin.type == type:
+ result.extend(plugin.capabilities)
+ return sorted(result)
+
+ def load_plugin(self, name):
+ """ Used to load module from
+ """
+ mod = __import__("module_%s"% name)
+ return mod
+
+ def __str__(self):
+ """ User friendly printing method to show hooked plugins
+ """
+ from prettytable import PrettyTable
+ column_names = ['name', 'type', 'capabilities', 'stable']
+ pt = PrettyTable(column_names)
+ for column in column_names:
+ pt.align[column] = 'l'
+ for plugin_name in sorted(self.PLUGINS.keys()):
+ name = self.PLUGINS[plugin_name].name
+ type = self.PLUGINS[plugin_name].type
+ stable = self.PLUGINS[plugin_name].stable
+ capabilities = ', '.join(self.PLUGINS[plugin_name].capabilities)
+ row = [name, type, capabilities, stable]
+ pt.add_row(row)
+ return pt.get_string()
diff --git a/tools/host_tests/host_tests_plugins/module_copy_firefox.py b/tools/host_tests/host_tests_plugins/module_copy_firefox.py
new file mode 100644
index 0000000..360835e
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_copy_firefox.py
@@ -0,0 +1,76 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from os.path import join, basename
+from host_test_plugins import HostTestPluginBase
+
+
+class HostTestPluginCopyMethod_Firefox(HostTestPluginBase):
+
+ def file_store_firefox(self, file_path, dest_disk):
+ try:
+ from selenium import webdriver
+ profile = webdriver.FirefoxProfile()
+ profile.set_preference('browser.download.folderList', 2) # custom location
+ profile.set_preference('browser.download.manager.showWhenStarting', False)
+ profile.set_preference('browser.download.dir', dest_disk)
+ profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'application/octet-stream')
+ # Launch browser with profile and get file
+ browser = webdriver.Firefox(profile)
+ browser.get(file_path)
+ browser.close()
+ except:
+ return False
+ return True
+
+ # Plugin interface
+ name = 'HostTestPluginCopyMethod_Firefox'
+ type = 'CopyMethod'
+ capabilities = ['firefox']
+ required_parameters = ['image_path', 'destination_disk']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ try:
+ from selenium import webdriver
+ except ImportError, e:
+ self.print_plugin_error("Error: firefox copy method requires selenium library. %s"% e)
+ return False
+ return True
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capabilitity, *args, **kwargs) is True:
+ image_path = kwargs['image_path']
+ destination_disk = kwargs['destination_disk']
+ # Prepare correct command line parameter values
+ image_base_name = basename(image_path)
+ destination_path = join(destination_disk, image_base_name)
+ if capabilitity == 'firefox':
+ self.file_store_firefox(image_path, destination_path)
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginCopyMethod_Firefox()
diff --git a/tools/host_tests/host_tests_plugins/module_copy_mbed.py b/tools/host_tests/host_tests_plugins/module_copy_mbed.py
new file mode 100644
index 0000000..913ff3c
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_copy_mbed.py
@@ -0,0 +1,78 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from shutil import copy
+from host_test_plugins import HostTestPluginBase
+from time import sleep
+
+
+class HostTestPluginCopyMethod_Mbed(HostTestPluginBase):
+
+ def generic_mbed_copy(self, image_path, destination_disk):
+ """ Generic mbed copy method for "mbed enabled" devices.
+ It uses standard python shuitl function to copy
+ image_file (target specific binary) to device's disk.
+ """
+ result = True
+ if not destination_disk.endswith('/') and not destination_disk.endswith('\\'):
+ destination_disk += '/'
+ try:
+ copy(image_path, destination_disk)
+ except Exception, e:
+ self.print_plugin_error("shutil.copy('%s', '%s')"% (image_path, destination_disk))
+ self.print_plugin_error("Error: %s"% str(e))
+ result = False
+ return result
+
+ # Plugin interface
+ name = 'HostTestPluginCopyMethod_Mbed'
+ type = 'CopyMethod'
+ stable = True
+ capabilities = ['shutil', 'default']
+ required_parameters = ['image_path', 'destination_disk', 'program_cycle_s']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ return True
+
+ def execute(self, capability, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capability, *args, **kwargs) is True:
+ # Capability 'default' is a dummy capability
+ if capability == 'shutil':
+ image_path = kwargs['image_path']
+ destination_disk = kwargs['destination_disk']
+ program_cycle_s = kwargs['program_cycle_s']
+ # Wait for mount point to be ready
+ self.check_mount_point_ready(destination_disk) # Blocking
+ result = self.generic_mbed_copy(image_path, destination_disk)
+
+ # Allow mbed to cycle
+ sleep(program_cycle_s)
+
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginCopyMethod_Mbed()
diff --git a/tools/host_tests/host_tests_plugins/module_copy_mps2.py b/tools/host_tests/host_tests_plugins/module_copy_mps2.py
new file mode 100644
index 0000000..bcfe1d7
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_copy_mps2.py
@@ -0,0 +1,150 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import os, shutil
+from os.path import join
+from host_test_plugins import HostTestPluginBase
+from time import sleep
+
+
+class HostTestPluginCopyMethod_MPS2(HostTestPluginBase):
+
+ # MPS2 specific flashing / binary setup funcitons
+ def mps2_set_board_image_file(self, disk, images_cfg_path, image0file_path, image_name='images.txt'):
+ """ This function will alter image cfg file.
+ Main goal of this function is to change number of images to 1, comment all
+ existing image entries and append at the end of file new entry with test path.
+ @return True when all steps succeed.
+ """
+ MBED_SDK_TEST_STAMP = 'test suite entry'
+ image_path = join(disk, images_cfg_path, image_name)
+ new_file_lines = [] # New configuration file lines (entries)
+
+ # Check each line of the image configuration file
+ try:
+ with open(image_path, 'r') as file:
+ for line in file:
+ if re.search('^TOTALIMAGES', line):
+ # Check number of total images, should be 1
+ new_file_lines.append(re.sub('^TOTALIMAGES:[\t ]*[\d]+', 'TOTALIMAGES: 1', line))
+ elif re.search('; - %s[\n\r]*$'% MBED_SDK_TEST_STAMP, line):
+ # Look for test suite entries and remove them
+ pass # Omit all test suite entries
+ elif re.search('^IMAGE[\d]+FILE', line):
+ # Check all image entries and mark the ';'
+ new_file_lines.append(';' + line) # Comment non test suite lines
+ else:
+ # Append line to new file
+ new_file_lines.append(line)
+ except IOError as e:
+ return False
+
+ # Add new image entry with proper commented stamp
+ new_file_lines.append('IMAGE0FILE: %s ; - %s\r\n'% (image0file_path, MBED_SDK_TEST_STAMP))
+
+ # Write all lines to file
+ try:
+ with open(image_path, 'w') as file:
+ for line in new_file_lines:
+ file.write(line),
+ except IOError:
+ return False
+
+ return True
+
+ def mps2_select_core(self, disk, mobo_config_name=""):
+ """ Function selects actual core
+ """
+ # TODO: implement core selection
+ pass
+
+ def mps2_switch_usb_auto_mounting_after_restart(self, disk, usb_config_name=""):
+ """ Function alters configuration to allow USB MSD to be mounted after restarts
+ """
+ # TODO: implement USB MSD restart detection
+ pass
+
+ def copy_file(self, file, disk):
+ if not file:
+ return
+
+ _, ext = os.path.splitext(file)
+ ext = ext.lower()
+ dfile = disk + "/SOFTWARE/mbed" + ext
+
+ if os.path.isfile(dfile):
+ print('Remove old binary %s' % dfile)
+ os.remove(dfile)
+
+ shutil.copy(file, dfile)
+ return True
+
+ def touch_file(self, file):
+ """ Touch file and set timestamp to items
+ """
+ tfile = file+'.tmp'
+ fhandle = open(tfile, 'a')
+ try:
+ fhandle.close()
+ finally:
+ os.rename(tfile, file)
+ return True
+
+ # Plugin interface
+ name = 'HostTestPluginCopyMethod_MPS2'
+ type = 'CopyMethod'
+ capabilities = ['mps2-copy']
+ required_parameters = ['image_path', 'destination_disk']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ return True
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capabilitity, *args, **kwargs) is True:
+ file = kwargs['image_path']
+ disk = kwargs['destination_disk']
+
+ """ Add a delay in case there a test just finished
+ Prevents interface firmware hiccups
+ """
+ sleep(20)
+ if capabilitity == 'mps2-copy' and self.copy_file(file, disk):
+ sleep(3)
+ if self.touch_file(disk + 'reboot.txt'):
+ """ Add a delay after the board was rebooted.
+ The actual reboot time is 20 seconds, but using 15 seconds
+ allows us to open the COM port and save a board reset.
+ This also prevents interface firmware hiccups.
+ """
+ sleep(7)
+ result = True
+
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginCopyMethod_MPS2()
diff --git a/tools/host_tests/host_tests_plugins/module_copy_shell.py b/tools/host_tests/host_tests_plugins/module_copy_shell.py
new file mode 100644
index 0000000..18ca062
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_copy_shell.py
@@ -0,0 +1,74 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+from os.path import join, basename
+from host_test_plugins import HostTestPluginBase
+from time import sleep
+
+
+class HostTestPluginCopyMethod_Shell(HostTestPluginBase):
+
+ # Plugin interface
+ name = 'HostTestPluginCopyMethod_Shell'
+ type = 'CopyMethod'
+ stable = True
+ capabilities = ['shell', 'cp', 'copy', 'xcopy']
+ required_parameters = ['image_path', 'destination_disk', 'program_cycle_s']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ return True
+
+ def execute(self, capability, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capability, *args, **kwargs) is True:
+ image_path = kwargs['image_path']
+ destination_disk = kwargs['destination_disk']
+ program_cycle_s = kwargs['program_cycle_s']
+ # Wait for mount point to be ready
+ self.check_mount_point_ready(destination_disk) # Blocking
+ # Prepare correct command line parameter values
+ image_base_name = basename(image_path)
+ destination_path = join(destination_disk, image_base_name)
+ if capability == 'shell':
+ if os.name == 'nt': capability = 'copy'
+ elif os.name == 'posix': capability = 'cp'
+ if capability == 'cp' or capability == 'copy' or capability == 'copy':
+ copy_method = capability
+ cmd = [copy_method, image_path, destination_path]
+ if os.name == 'posix':
+ result = self.run_command(cmd, shell=False)
+ result = self.run_command(["sync"])
+ else:
+ result = self.run_command(cmd)
+
+ # Allow mbed to cycle
+ sleep(program_cycle_s)
+
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginCopyMethod_Shell()
diff --git a/tools/host_tests/host_tests_plugins/module_copy_silabs.py b/tools/host_tests/host_tests_plugins/module_copy_silabs.py
new file mode 100644
index 0000000..494bcf4
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_copy_silabs.py
@@ -0,0 +1,67 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from host_test_plugins import HostTestPluginBase
+from time import sleep
+
+
+class HostTestPluginCopyMethod_Silabs(HostTestPluginBase):
+
+ # Plugin interface
+ name = 'HostTestPluginCopyMethod_Silabs'
+ type = 'CopyMethod'
+ capabilities = ['eACommander', 'eACommander-usb']
+ required_parameters = ['image_path', 'destination_disk', 'program_cycle_s']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ self.EACOMMANDER_CMD = 'eACommander.exe'
+ return True
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capabilitity, *args, **kwargs) is True:
+ image_path = kwargs['image_path']
+ destination_disk = kwargs['destination_disk']
+ program_cycle_s = kwargs['program_cycle_s']
+ if capabilitity == 'eACommander':
+ cmd = [self.EACOMMANDER_CMD,
+ '--serialno', destination_disk,
+ '--flash', image_path,
+ '--resettype', '2', '--reset']
+ result = self.run_command(cmd)
+ elif capabilitity == 'eACommander-usb':
+ cmd = [self.EACOMMANDER_CMD,
+ '--usb', destination_disk,
+ '--flash', image_path]
+ result = self.run_command(cmd)
+
+ # Allow mbed to cycle
+ sleep(program_cycle_s)
+
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginCopyMethod_Silabs()
diff --git a/tools/host_tests/host_tests_plugins/module_copy_smart.py b/tools/host_tests/host_tests_plugins/module_copy_smart.py
new file mode 100644
index 0000000..1af9eaf
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_copy_smart.py
@@ -0,0 +1,118 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+import sys
+from os.path import join, basename, exists, abspath, dirname
+from time import sleep
+from host_test_plugins import HostTestPluginBase
+
+sys.path.append(abspath(join(dirname(__file__), "../../../")))
+from tools.test_api import get_autodetected_MUTS_list
+
+class HostTestPluginCopyMethod_Smart(HostTestPluginBase):
+
+ # Plugin interface
+ name = 'HostTestPluginCopyMethod_Smart'
+ type = 'CopyMethod'
+ stable = True
+ capabilities = ['smart']
+ required_parameters = ['image_path', 'destination_disk', 'target_mcu']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ return True
+
+ def execute(self, capability, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capability, *args, **kwargs) is True:
+ image_path = kwargs['image_path']
+ destination_disk = kwargs['destination_disk']
+ target_mcu = kwargs['target_mcu']
+ # Wait for mount point to be ready
+ self.check_mount_point_ready(destination_disk) # Blocking
+ # Prepare correct command line parameter values
+ image_base_name = basename(image_path)
+ destination_path = join(destination_disk, image_base_name)
+ if capability == 'smart':
+ if os.name == 'posix':
+ cmd = ['cp', image_path, destination_path]
+ result = self.run_command(cmd, shell=False)
+
+ cmd = ['sync']
+ result = self.run_command(cmd, shell=False)
+ elif os.name == 'nt':
+ cmd = ['copy', image_path, destination_path]
+ result = self.run_command(cmd, shell=True)
+
+ # Give the OS and filesystem time to settle down
+ sleep(3)
+
+ platform_name_filter = [target_mcu]
+ muts_list = {}
+
+ remount_complete = False
+
+ for i in range(0, 60):
+ print('Looking for %s with MBEDLS' % target_mcu)
+ muts_list = get_autodetected_MUTS_list(platform_name_filter=platform_name_filter)
+
+ if 1 in muts_list:
+ mut = muts_list[1]
+ destination_disk = mut['disk']
+ destination_path = join(destination_disk, image_base_name)
+
+ if mut['mcu'] == 'LPC1768' or mut['mcu'] == 'LPC11U24':
+ if exists(destination_disk) and exists(destination_path):
+ remount_complete = True
+ break;
+ else:
+ if exists(destination_disk) and not exists(destination_path):
+ remount_complete = True
+ break;
+
+ sleep(1)
+
+ if remount_complete:
+ print('Remount complete')
+ else:
+ print('Remount FAILED')
+
+ if exists(destination_disk):
+ print('Disk exists')
+ else:
+ print('Disk does not exist')
+
+ if exists(destination_path):
+ print('Image exists')
+ else:
+ print('Image does not exist')
+
+ result = None
+
+
+ return result
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginCopyMethod_Smart()
diff --git a/tools/host_tests/host_tests_plugins/module_reset_mbed.py b/tools/host_tests/host_tests_plugins/module_reset_mbed.py
new file mode 100644
index 0000000..0390d84
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_reset_mbed.py
@@ -0,0 +1,72 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from host_test_plugins import HostTestPluginBase
+
+
+class HostTestPluginResetMethod_Mbed(HostTestPluginBase):
+
+ def safe_sendBreak(self, serial):
+ """ Wraps serial.sendBreak() to avoid serial::serialposix.py exception on Linux
+ Traceback (most recent call last):
+ File "make.py", line 189, in
+ serial.sendBreak()
+ File "/usr/lib/python2.7/dist-packages/serial/serialposix.py", line 511, in sendBreak
+ termios.tcsendbreak(self.fd, int(duration/0.25))
+ error: (32, 'Broken pipe')
+ """
+ result = True
+ try:
+ serial.sendBreak()
+ except:
+ # In linux a termios.error is raised in sendBreak and in setBreak.
+ # The following setBreak() is needed to release the reset signal on the target mcu.
+ try:
+ serial.setBreak(False)
+ except:
+ result = False
+ return result
+
+ # Plugin interface
+ name = 'HostTestPluginResetMethod_Mbed'
+ type = 'ResetMethod'
+ stable = True
+ capabilities = ['default']
+ required_parameters = ['serial']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ return True
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capabilitity, *args, **kwargs) is True:
+ if capabilitity == 'default':
+ serial = kwargs['serial']
+ result = self.safe_sendBreak(serial)
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginResetMethod_Mbed()
diff --git a/tools/host_tests/host_tests_plugins/module_reset_mps2.py b/tools/host_tests/host_tests_plugins/module_reset_mps2.py
new file mode 100644
index 0000000..40ff267
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_reset_mps2.py
@@ -0,0 +1,78 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+from host_test_plugins import HostTestPluginBase
+from time import sleep
+
+# Note: This plugin is not fully functional, needs improvements
+
+class HostTestPluginResetMethod_MPS2(HostTestPluginBase):
+ """ Plugin used to reset ARM_MPS2 platform
+ Supports:
+ reboot.txt - startup from standby state, reboots when in run mode.
+ shutdown.txt - shutdown from run mode.
+ reset.txt - reset FPGA during run mode.
+ """
+ def touch_file(self, file):
+ """ Touch file and set timestamp to items
+ """
+ tfile = file+'.tmp'
+ fhandle = open(tfile, 'a')
+ try:
+ fhandle.close()
+ finally:
+ os.rename(tfile, file)
+ return True
+
+ # Plugin interface
+ name = 'HostTestPluginResetMethod_MPS2'
+ type = 'ResetMethod'
+ capabilities = ['mps2-reboot', 'mps2-reset']
+ required_parameters = ['disk']
+
+ def setup(self, *args, **kwargs):
+ """ Prepare / configure plugin to work.
+ This method can receive plugin specific parameters by kwargs and
+ ignore other parameters which may affect other plugins.
+ """
+ return True
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ return True
+ result = False
+ if self.check_parameters(capabilitity, *args, **kwargs) is True:
+ disk = kwargs['disk']
+
+ if capabilitity == 'mps2-reboot' and self.touch_file(disk + 'reboot.txt'):
+ sleep(20)
+ result = True
+
+ elif capabilitity == 'mps2-reset' and self.touch_file(disk + 'reboot.txt'):
+ sleep(20)
+ result = True
+
+ return result
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginResetMethod_MPS2()
diff --git a/tools/host_tests/host_tests_plugins/module_reset_silabs.py b/tools/host_tests/host_tests_plugins/module_reset_silabs.py
new file mode 100644
index 0000000..2c05cb2
--- /dev/null
+++ b/tools/host_tests/host_tests_plugins/module_reset_silabs.py
@@ -0,0 +1,66 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from host_test_plugins import HostTestPluginBase
+
+
+class HostTestPluginResetMethod_SiLabs(HostTestPluginBase):
+
+ # Plugin interface
+ name = 'HostTestPluginResetMethod_SiLabs'
+ type = 'ResetMethod'
+ stable = True
+ capabilities = ['eACommander', 'eACommander-usb']
+ required_parameters = ['disk']
+
+ def setup(self, *args, **kwargs):
+ """ Configure plugin, this function should be called before plugin execute() method is used.
+ """
+ # Note you need to have eACommander.exe on your system path!
+ self.EACOMMANDER_CMD = 'eACommander.exe'
+ return True
+
+ def execute(self, capabilitity, *args, **kwargs):
+ """ Executes capability by name.
+ Each capability may directly just call some command line
+ program or execute building pythonic function
+ """
+ result = False
+ if self.check_parameters(capabilitity, *args, **kwargs) is True:
+ disk = kwargs['disk'].rstrip('/\\')
+
+ if capabilitity == 'eACommander':
+ # For this copy method 'disk' will be 'serialno' for eACommander command line parameters
+ # Note: Commands are executed in the order they are specified on the command line
+ cmd = [self.EACOMMANDER_CMD,
+ '--serialno', disk,
+ '--resettype', '2', '--reset',]
+ result = self.run_command(cmd)
+ elif capabilitity == 'eACommander-usb':
+ # For this copy method 'disk' will be 'usb address' for eACommander command line parameters
+ # Note: Commands are executed in the order they are specified on the command line
+ cmd = [self.EACOMMANDER_CMD,
+ '--usb', disk,
+ '--resettype', '2', '--reset',]
+ result = self.run_command(cmd)
+ return result
+
+
+def load_plugin():
+ """ Returns plugin available in this module
+ """
+ return HostTestPluginResetMethod_SiLabs()
diff --git a/tools/host_tests/mbedrpc.py b/tools/host_tests/mbedrpc.py
new file mode 100644
index 0000000..ffbdef4
--- /dev/null
+++ b/tools/host_tests/mbedrpc.py
@@ -0,0 +1,225 @@
+# mbedRPC.py - mbed RPC interface for Python
+#
+##Copyright (c) 2010 ARM Ltd
+##
+##Permission is hereby granted, free of charge, to any person obtaining a copy
+##of this software and associated documentation files (the "Software"), to deal
+##in the Software without restriction, including without limitation the rights
+##to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+##copies of the Software, and to permit persons to whom the Software is
+##furnished to do so, subject to the following conditions:
+##
+##The above copyright notice and this permission notice shall be included in
+##all copies or substantial portions of the Software.
+##
+##THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+##IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+##FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+##AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+##LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+##OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+##THE SOFTWARE.
+#
+# Example:
+# >from mbedRPC import*
+# >mbed = SerialRPC("COM5",9600)
+# >myled = DigitalOut(mbed,"myled") <--- Where the text in quotations matches your RPC pin definition's second parameter, in this case it could be RpcDigitalOut myled(LED1,"myled");
+# >myled.write(1)
+# >
+
+import serial, urllib2, time
+
+# mbed super class
+class mbed:
+ def __init__(self):
+ print("This will work as a demo but no transport mechanism has been selected")
+
+ def rpc(self, name, method, args):
+ print("Superclass method not overridden")
+
+
+# Transport mechanisms, derived from mbed
+class SerialRPC(mbed):
+ def __init__(self, port, baud):
+ self.ser = serial.Serial(port)
+ self.ser.setBaudrate(baud)
+
+ def rpc(self, name, method, args):
+ # creates the command to be sent serially - /name/method arg1 arg2 arg3 ... argN
+ str = "/" + name + "/" + method + " " + " ".join(args) + "\n"
+ # prints the command being executed
+ print str
+ # writes the command to serial
+ self.ser.write(str)
+ # strips trailing characters from the line just written
+ ret_val = self.ser.readline().strip()
+ return ret_val
+
+
+class HTTPRPC(mbed):
+ def __init__(self, ip):
+ self.host = "http://" + ip
+
+ def rpc(self, name, method, args):
+ response = urllib2.urlopen(self.host + "/rpc/" + name + "/" + method + "%20" + "%20".join(args))
+ return response.read().strip()
+
+
+# generic mbed interface super class
+class mbed_interface():
+ # initialize an mbed interface with a transport mechanism and pin name
+ def __init__(self, this_mbed, mpin):
+ self.mbed = this_mbed
+ if isinstance(mpin, str):
+ self.name = mpin
+
+ def __del__(self):
+ r = self.mbed.rpc(self.name, "delete", [])
+
+ def new(self, class_name, name, pin1, pin2 = "", pin3 = ""):
+ args = [arg for arg in [pin1,pin2,pin3,name] if arg != ""]
+ r = self.mbed.rpc(class_name, "new", args)
+
+ # generic read
+ def read(self):
+ r = self.mbed.rpc(self.name, "read", [])
+ return int(r)
+
+
+# for classes that need write functionality - inherits from the generic reading interface
+class mbed_interface_write(mbed_interface):
+ def __init__(self, this_mbed, mpin):
+ mbed_interface.__init__(self, this_mbed, mpin)
+
+ # generic write
+ def write(self, value):
+ r = self.mbed.rpc(self.name, "write", [str(value)])
+
+
+# mbed interfaces
+class DigitalOut(mbed_interface_write):
+ def __init__(self, this_mbed, mpin):
+ mbed_interface_write.__init__(self, this_mbed, mpin)
+
+
+class AnalogIn(mbed_interface):
+ def __init__(self, this_mbed, mpin):
+ mbed_interface.__init__(self, this_mbed, mpin)
+
+ def read_u16(self):
+ r = self.mbed.rpc(self.name, "read_u16", [])
+ return int(r)
+
+
+class AnalogOut(mbed_interface_write):
+ def __init__(self, this_mbed, mpin):
+ mbed_interface_write.__init__(self, this_mbed, mpin)
+
+ def write_u16(self, value):
+ self.mbed.rpc(self.name, "write_u16", [str(value)])
+
+ def read(self):
+ r = self.mbed.rpc(self.name, "read", [])
+ return float(r)
+
+
+class DigitalIn(mbed_interface):
+ def __init__(self, this_mbed, mpin):
+ mbed_interface.__init__(self, this_mbed, mpin)
+
+
+class PwmOut(mbed_interface_write):
+ def __init__(self, this_mbed, mpin):
+ mbed_interface_write.__init__(self, this_mbed, mpin)
+
+ def read(self):
+ r = self.mbed.rpc(self.name, "read", [])
+ return r
+
+ def period(self, value):
+ self.mbed.rpc(self.name, "period", [str(value)])
+
+ def period_ms(self, value):
+ self.mbed.rpc(self.name, "period_ms", [str(value)])
+
+ def period_us(self, value):
+ self.mbed.rpc(self.name, "period_us", [str(value)])
+
+ def pulsewidth(self, value):
+ self.mbed.rpc(self.name, "pulsewidth", [str(value)])
+
+ def pulsewidth_ms(self, value):
+ self.mbed.rpc(self.name, "pulsewidth_ms", [str(value)])
+
+ def pulsewidth_us(self, value):
+ self.mbed.rpc(self.name, "pulsewidth_us", [str(value)])
+
+
+class RPCFunction(mbed_interface):
+ def __init__(self, this_mbed, name):
+ mbed_interface.__init__(self, this_mbed, name)
+
+ def run(self, input):
+ r = self.mbed.rpc(self.name, "run", [input])
+ return r
+
+
+class RPCVariable(mbed_interface_write):
+ def __init__(self, this_mbed, name):
+ mbed_interface_write.__init__(self, this_mbed, name)
+
+ def read(self):
+ r = self.mbed.rpc(self.name, "read", [])
+ return r
+
+class Timer(mbed_interface):
+ def __init__(self, this_mbed, name):
+ mbed_interface.__init__(self, this_mbed, name)
+
+ def start(self):
+ r = self.mbed.rpc(self.name, "start", [])
+
+ def stop(self):
+ r = self.mbed.rpc(self.name, "stop", [])
+
+ def reset(self):
+ r = self.mbed.rpc(self.name, "reset", [])
+
+ def read(self):
+ r = self.mbed.rpc(self.name, "read", [])
+ return float(re.search('\d+\.*\d*', r).group(0))
+
+ def read_ms(self):
+ r = self.mbed.rpc(self.name, "read_ms", [])
+ return float(re.search('\d+\.*\d*', r).group(0))
+
+ def read_us(self):
+ r = self.mbed.rpc(self.name, "read_us", [])
+ return float(re.search('\d+\.*\d*', r).group(0))
+
+# Serial
+class Serial():
+ def __init__(self, this_mbed, tx, rx=""):
+ self.mbed = this_mbed
+ if isinstance(tx, str):
+ self.name = tx
+
+ def __del__(self):
+ r = self.mbed.rpc(self.name, "delete", [])
+
+ def baud(self, value):
+ r = self.mbed.rpc(self.name, "baud", [str(value)])
+
+ def putc(self, value):
+ r = self.mbed.rpc(self.name, "putc", [str(value)])
+
+ def puts(self, value):
+ r = self.mbed.rpc(self.name, "puts", ["\"" + str(value) + "\""])
+
+ def getc(self):
+ r = self.mbed.rpc(self.name, "getc", [])
+ return int(r)
+
+
+def wait(s):
+ time.sleep(s)
diff --git a/tools/host_tests/midi.py b/tools/host_tests/midi.py
new file mode 100644
index 0000000..67f34ea
--- /dev/null
+++ b/tools/host_tests/midi.py
@@ -0,0 +1,72 @@
+from __future__ import print_function
+import sys
+import re
+import time
+import mido
+from mido import Message
+
+
+def test_midi_in(port):
+ expected_messages_count=0
+ while expected_messages_count < 7:
+ for message in port.iter_pending():
+ if message.type in ('note_on', 'note_off', 'program_change', 'sysex'):
+ yield message
+ expected_messages_count+=1
+ time.sleep(0.1)
+
+def test_midi_loopback(input_port):
+ expected_messages_count=0
+ while expected_messages_count < 1:
+ for message in input_port.iter_pending():
+ print('Test MIDI OUT loopback received {}'.format(message.hex()))
+ expected_messages_count+=1
+
+def test_midi_out_loopback(output_port,input_port):
+ print("Test MIDI OUT loopback")
+ output_port.send(Message('program_change', program=1))
+ test_midi_loopback(input_port)
+
+ output_port.send(Message('note_on', note=21))
+ test_midi_loopback(input_port)
+
+ output_port.send(Message('note_off', note=21))
+ test_midi_loopback(input_port)
+
+ output_port.send(Message('sysex', data=[0x7E,0x7F,0x09,0x01]))
+ test_midi_loopback(input_port)
+
+ output_port.send(Message('sysex', data=[0x7F,0x7F,0x04,0x01,0x7F,0x7F]))
+ test_midi_loopback(input_port)
+
+ output_port.send(Message('sysex', data=[0x41,0x10,0x42,0x12,0x40,0x00,0x7F,0x00,0x41]))
+ test_midi_loopback(input_port)
+
+ output_port.send(Message('sysex', data=[0x41,0x10,0x42,0x12,0x40,0x00,0x04,0x7F,0x3D]))
+ test_midi_loopback(input_port)
+
+portname=""
+
+while portname=="":
+ print("Wait for MIDI IN plug ...")
+ for name in mido.get_input_names():
+ matchObj = re.match( r'Mbed', name)
+
+ if matchObj:
+ portname=name
+ time.sleep( 1 )
+
+try:
+ input_port = mido.open_input(portname)
+ output_port = mido.open_output(portname)
+
+ print('Using {}'.format(input_port))
+
+ print("Test MIDI IN")
+
+ for message in test_midi_in(input_port):
+ print('Test MIDI IN received {}'.format(message.hex()))
+
+ test_midi_out_loopback(output_port,input_port)
+except KeyboardInterrupt:
+ pass
\ No newline at end of file
diff --git a/tools/host_tests/net_test.py b/tools/host_tests/net_test.py
new file mode 100644
index 0000000..01b4541
--- /dev/null
+++ b/tools/host_tests/net_test.py
@@ -0,0 +1,27 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from host_test import Test, Simple
+from sys import stdout
+
+class NETTest(Simple):
+ def __init__(self):
+ Test.__init__(self)
+ self.mbed.init_serial(115200)
+ self.mbed.reset()
+
+if __name__ == '__main__':
+ NETTest().run()
diff --git a/tools/host_tests/rpc.py b/tools/host_tests/rpc.py
new file mode 100644
index 0000000..84b85d2
--- /dev/null
+++ b/tools/host_tests/rpc.py
@@ -0,0 +1,56 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from host_test import Test
+from mbedrpc import SerialRPC, DigitalOut, DigitalIn, pin
+
+
+class RpcTest(Test):
+ def test(self):
+ self.notify("RPC Test")
+ s = SerialRPC(self.mbed.port, debug=True)
+
+ self.notify("Init remote objects")
+
+ p_out = pin("p10")
+ p_in = pin("p11")
+
+ if hasattr(self.mbed.options, 'micro'):
+ if self.mbed.options.micro == 'M0+':
+ print "Freedom Board: PTA12 <-> PTC4"
+ p_out = pin("PTA12")
+ p_in = pin("PTC4")
+
+ self.output = DigitalOut(s, p_out);
+ self.input = DigitalIn(s, p_in);
+
+ self.check = True
+ self.write_read_test(1)
+ self.write_read_test(0)
+ return self.check
+
+ def write_read_test(self, v):
+ self.notify("Check %d" % v)
+ self.output.write(v)
+ if self.input.read() != v:
+ self.notify("ERROR")
+ self.check = False
+ else:
+ self.notify("OK")
+
+
+if __name__ == '__main__':
+ RpcTest().run()
diff --git a/tools/host_tests/rtc_auto.py b/tools/host_tests/rtc_auto.py
new file mode 100644
index 0000000..d267936
--- /dev/null
+++ b/tools/host_tests/rtc_auto.py
@@ -0,0 +1,50 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+from time import time, strftime, gmtime
+
+class RTCTest():
+ PATTERN_RTC_VALUE = "\[(\d+)\] \[(\d+-\d+-\d+ \d+:\d+:\d+ [AaPpMm]{2})\]"
+ re_detect_rtc_value = re.compile(PATTERN_RTC_VALUE)
+
+ def test(self, selftest):
+ test_result = True
+ start = time()
+ sec_prev = 0
+ for i in range(0, 5):
+ # Timeout changed from default: we need to wait longer for some boards to start-up
+ c = selftest.mbed.serial_readline(timeout=10)
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify(c.strip())
+ delta = time() - start
+ m = self.re_detect_rtc_value.search(c)
+ if m and len(m.groups()):
+ sec = int(m.groups()[0])
+ time_str = m.groups()[1]
+ correct_time_str = strftime("%Y-%m-%d %H:%M:%S %p", gmtime(float(sec)))
+ single_result = time_str == correct_time_str and sec > 0 and sec > sec_prev
+ test_result = test_result and single_result
+ result_msg = "OK" if single_result else "FAIL"
+ selftest.notify("HOST: [%s] [%s] received time %+d sec after %.2f sec... %s"% (sec, time_str, sec - sec_prev, delta, result_msg))
+ sec_prev = sec
+ else:
+ test_result = False
+ break
+ start = time()
+ return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/serial_nc_rx_auto.py b/tools/host_tests/serial_nc_rx_auto.py
new file mode 100644
index 0000000..59975db
--- /dev/null
+++ b/tools/host_tests/serial_nc_rx_auto.py
@@ -0,0 +1,87 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import uuid
+import time
+import string
+from sys import stdout
+
+class SerialNCRXTest():
+
+ def test(self, selftest):
+ selftest.mbed.flush();
+ # Wait 0.5 seconds to ensure mbed is listening
+ time.sleep(0.5)
+
+ #handshake with target to sync test start
+ selftest.mbed.serial_write("S");
+
+ strip_chars = string.whitespace + "\0"
+
+ out_str = selftest.mbed.serial_readline()
+
+ if not out_str:
+ selftest.notify("HOST: No output detected")
+ return selftest.RESULT_IO_SERIAL
+
+ out_str_stripped = out_str.strip(strip_chars)
+
+ if out_str_stripped != "RX OK - Start NC test":
+ selftest.notify("HOST: Unexpected output. Expected 'RX OK - Expected' but received '%s'" % out_str_stripped)
+ return selftest.RESULT_FAILURE
+
+ # Wait 0.5 seconds to ensure mbed is listening
+ time.sleep(0.5)
+
+ selftest.mbed.serial_write("E");
+
+ strip_chars = string.whitespace + "\0"
+
+ out_str = selftest.mbed.serial_readline()
+
+ if not out_str:
+ selftest.notify("HOST: No output detected")
+ return selftest.RESULT_IO_SERIAL
+
+ out_str_stripped = out_str.strip(strip_chars)
+
+ if out_str_stripped != "RX OK - Expected":
+ selftest.notify("HOST: Unexpected output. Expected 'RX OK - Expected' but received '%s'" % out_str_stripped)
+ return selftest.RESULT_FAILURE
+
+ # Wait 0.5 seconds to ensure mbed is listening
+ time.sleep(0.5)
+
+ # Send character, mbed shouldn't receive
+ selftest.mbed.serial_write("U");
+
+ out_str = selftest.mbed.serial_readline()
+
+ # If no characters received, pass the test
+ if not out_str:
+ selftest.notify("HOST: No further output detected")
+ return selftest.RESULT_SUCCESS
+ else:
+ out_str_stripped = out_str.strip(strip_chars)
+
+ if out_str_stripped == "RX OK - Unexpected":
+ selftest.notify("HOST: Unexpected output returned indicating RX still functioning")
+ else:
+ selftest.notify("HOST: Extraneous output '%s' detected indicating unknown error" % out_str_stripped)
+
+ return selftest.RESULT_FAILURE
diff --git a/tools/host_tests/serial_nc_tx_auto.py b/tools/host_tests/serial_nc_tx_auto.py
new file mode 100644
index 0000000..707c476
--- /dev/null
+++ b/tools/host_tests/serial_nc_tx_auto.py
@@ -0,0 +1,62 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import uuid
+import time
+import string
+from sys import stdout
+
+class SerialNCTXTest():
+
+ def test(self, selftest):
+ selftest.mbed.flush();
+ # Wait 0.5 seconds to ensure mbed is listening
+ time.sleep(0.5)
+
+ selftest.mbed.serial_write("S");
+
+ strip_chars = string.whitespace + "\0"
+
+ out_str = selftest.mbed.serial_readline()
+ selftest.notify("HOST: " + out_str)
+
+ if not out_str:
+ selftest.notify("HOST: No output detected")
+ return selftest.RESULT_IO_SERIAL
+
+ out_str_stripped = out_str.strip(strip_chars)
+
+ if out_str_stripped != "TX OK - Expected":
+ selftest.notify("HOST: Unexpected output. Expected 'TX OK - Expected' but received '%s'" % out_str_stripped)
+ return selftest.RESULT_FAILURE
+
+ out_str = selftest.mbed.serial_readline()
+
+ # If no characters received, pass the test
+ if not out_str:
+ selftest.notify("HOST: No further output detected")
+ return selftest.RESULT_SUCCESS
+ else:
+ out_str_stripped = out_str.strip(strip_chars)
+
+ if out_str_stripped == "TX OK - Unexpected":
+ selftest.notify("HOST: Unexpected output returned indicating TX still functioning")
+ else:
+ selftest.notify("HOST: Extraneous output '%s' detected indicating unknown error" % out_str_stripped)
+
+ return selftest.RESULT_FAILURE
diff --git a/tools/host_tests/stdio_auto.py b/tools/host_tests/stdio_auto.py
new file mode 100644
index 0000000..1fe1890
--- /dev/null
+++ b/tools/host_tests/stdio_auto.py
@@ -0,0 +1,56 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import random
+from time import time
+
+class StdioTest():
+ PATTERN_INT_VALUE = "Your value was: (-?\d+)"
+ re_detect_int_value = re.compile(PATTERN_INT_VALUE)
+
+ def test(self, selftest):
+ test_result = True
+
+ c = selftest.mbed.serial_readline() # {{start}} preamble
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify(c)
+
+ for i in range(0, 10):
+ random_integer = random.randint(-99999, 99999)
+ selftest.notify("HOST: Generated number: " + str(random_integer))
+ start = time()
+ selftest.mbed.serial_write(str(random_integer) + "\n")
+
+ serial_stdio_msg = selftest.mbed.serial_readline()
+ if serial_stdio_msg is None:
+ return selftest.RESULT_IO_SERIAL
+ delay_time = time() - start
+ selftest.notify(serial_stdio_msg.strip())
+
+ # Searching for reply with scanned values
+ m = self.re_detect_int_value.search(serial_stdio_msg)
+ if m and len(m.groups()):
+ int_value = m.groups()[0]
+ int_value_cmp = random_integer == int(int_value)
+ test_result = test_result and int_value_cmp
+ selftest.notify("HOST: Number %s read after %.3f sec ... [%s]"% (int_value, delay_time, "OK" if int_value_cmp else "FAIL"))
+ else:
+ test_result = False
+ break
+ return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/tcpecho_client.py b/tools/host_tests/tcpecho_client.py
new file mode 100644
index 0000000..303f002
--- /dev/null
+++ b/tools/host_tests/tcpecho_client.py
@@ -0,0 +1,57 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import socket
+import string, random
+from time import time
+
+from private_settings import SERVER_ADDRESS
+
+ECHO_PORT = 7
+
+LEN_PACKET = 127
+N_PACKETS = 5000
+TOT_BITS = float(LEN_PACKET * N_PACKETS * 8) * 2
+MEGA = float(1024 * 1024)
+UPDATE_STEP = (N_PACKETS/10)
+
+class TCP_EchoClient:
+ def __init__(self, host):
+ self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.s.connect((host, ECHO_PORT))
+ self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
+
+ def __packet(self):
+ # Comment out the checks when measuring the throughput
+ # self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
+ self.s.send(self.packet)
+ data = self.s.recv(LEN_PACKET)
+ # assert self.packet == data, "packet error:\n%s\n%s\n" % (self.packet, data)
+
+ def test(self):
+ start = time()
+ for i in range(N_PACKETS):
+ if (i % UPDATE_STEP) == 0: print '%.2f%%' % ((float(i)/float(N_PACKETS)) * 100.)
+ self.__packet()
+ t = time() - start
+ print 'Throughput: (%.2f)Mbits/s' % ((TOT_BITS / t)/MEGA)
+
+ def __del__(self):
+ self.s.close()
+
+while True:
+ e = TCP_EchoClient(SERVER_ADDRESS)
+ e.test()
diff --git a/tools/host_tests/tcpecho_client_auto.py b/tools/host_tests/tcpecho_client_auto.py
new file mode 100644
index 0000000..fe915a1
--- /dev/null
+++ b/tools/host_tests/tcpecho_client_auto.py
@@ -0,0 +1,87 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import socket
+from sys import stdout
+from SocketServer import BaseRequestHandler, TCPServer
+
+class TCPEchoClient_Handler(BaseRequestHandler):
+ def handle(self):
+ """ One handle per connection
+ """
+ print "HOST: Connection received...",
+ count = 1;
+ while True:
+ data = self.request.recv(1024)
+ if not data: break
+ self.request.sendall(data)
+ if '{{end}}' in str(data):
+ print
+ print str(data)
+ else:
+ if not count % 10:
+ sys.stdout.write('.')
+ count += 1
+ stdout.flush()
+
+class TCPEchoClientTest():
+ def send_server_ip_port(self, selftest, ip_address, port_no):
+ """ Set up network host. Reset target and and send server IP via serial to Mbed
+ """
+ c = selftest.mbed.serial_readline() # 'TCPCllient waiting for server IP and port...'
+ if c is None:
+ self.print_result(selftest.RESULT_IO_SERIAL)
+ return
+
+ selftest.notify(c.strip())
+ selftest.notify("HOST: Sending server IP Address to target...")
+
+ connection_str = ip_address + ":" + str(port_no) + "\n"
+ selftest.mbed.serial_write(connection_str)
+ selftest.notify(connection_str)
+
+ # Two more strings about connection should be sent by MBED
+ for i in range(0, 2):
+ c = selftest.mbed.serial_readline()
+ if c is None:
+ selftest.print_result(self.RESULT_IO_SERIAL)
+ return
+ selftest.notify(c.strip())
+
+ def test(self, selftest):
+ # We need to discover SERVEP_IP and set up SERVER_PORT
+ # Note: Port 7 is Echo Protocol:
+ #
+ # Port number rationale:
+ #
+ # The Echo Protocol is a service in the Internet Protocol Suite defined
+ # in RFC 862. It was originally proposed for testing and measurement
+ # of round-trip times[citation needed] in IP networks.
+ #
+ # A host may connect to a server that supports the Echo Protocol using
+ # the Transmission Control Protocol (TCP) or the User Datagram Protocol
+ # (UDP) on the well-known port number 7. The server sends back an
+ # identical copy of the data it received.
+ SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
+ SERVER_PORT = 7
+
+ # Returning none will suppress host test from printing success code
+ server = TCPServer((SERVER_IP, SERVER_PORT), TCPEchoClient_Handler)
+ print "HOST: Listening for TCP connections: " + SERVER_IP + ":" + str(SERVER_PORT)
+ self.send_server_ip_port(selftest, SERVER_IP, SERVER_PORT)
+ server.serve_forever()
diff --git a/tools/host_tests/tcpecho_server.py b/tools/host_tests/tcpecho_server.py
new file mode 100644
index 0000000..4a68bd9
--- /dev/null
+++ b/tools/host_tests/tcpecho_server.py
@@ -0,0 +1,50 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from SocketServer import BaseRequestHandler, TCPServer
+from time import time
+
+from private_settings import LOCALHOST
+
+MAX_INDEX = 126
+MEGA = float(1024 * 1024)
+
+class TCP_EchoHandler(BaseRequestHandler):
+ def handle(self):
+ print "\nconnection received"
+ start = time()
+ bytes = 0
+ index = 0
+ while True:
+ data = self.request.recv(1024)
+ if not data: break
+
+ bytes += len(data)
+ for n in map(ord, data):
+ if n != index:
+ print "data error %d != %d" % (n , index)
+ index += 1
+ if index > MAX_INDEX:
+ index = 0
+
+ self.request.sendall(data)
+ t = time() - start
+ b = float(bytes * 8) * 2
+ print "Throughput: (%.2f)Mbits/s" % ((b/t)/MEGA)
+
+server = TCPServer((LOCALHOST, 7), TCP_EchoHandler)
+print "listening for connections"
+server.serve_forever()
diff --git a/tools/host_tests/tcpecho_server_auto.py b/tools/host_tests/tcpecho_server_auto.py
new file mode 100644
index 0000000..8bc0e30
--- /dev/null
+++ b/tools/host_tests/tcpecho_server_auto.py
@@ -0,0 +1,84 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import sys
+import uuid
+import socket
+from sys import stdout
+
+class TCPEchoServerTest():
+ ECHO_SERVER_ADDRESS = ""
+ ECHO_PORT = 0
+ ECHO_LOOPs = 100
+ s = None # Socket
+
+ PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
+ re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
+
+ def test(self, selftest):
+ result = False
+ c = selftest.mbed.serial_readline()
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify(c)
+
+ m = self.re_detect_server_ip.search(c)
+ if m and len(m.groups()):
+ self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
+ self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
+ selftest.notify("HOST: TCP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
+
+ # We assume this test fails so can't send 'error' message to server
+ try:
+ self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.s.connect((self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
+ except Exception, e:
+ self.s = None
+ selftest.notify("HOST: Socket error: %s"% e)
+ return selftest.RESULT_ERROR
+
+ print 'HOST: Sending %d echo strings...'% self.ECHO_LOOPs,
+ for i in range(0, self.ECHO_LOOPs):
+ TEST_STRING = str(uuid.uuid4())
+ try:
+ self.s.sendall(TEST_STRING)
+ data = self.s.recv(128)
+ except Exception, e:
+ self.s = None
+ selftest.notify("HOST: Socket error: %s"% e)
+ return selftest.RESULT_ERROR
+
+ received_str = repr(data)[1:-1]
+ if TEST_STRING == received_str: # We need to cut not needed single quotes from the string
+ sys.stdout.write('.')
+ stdout.flush()
+ result = True
+ else:
+ print "Expected: "
+ print "'%s'"% TEST_STRING
+ print "received: "
+ print "'%s'"% received_str
+ result = False
+ break
+
+ if self.s is not None:
+ self.s.close()
+ else:
+ selftest.notify("HOST: TCP Server not found")
+ result = False
+ return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/tcpecho_server_loop.py b/tools/host_tests/tcpecho_server_loop.py
new file mode 100644
index 0000000..be95f13
--- /dev/null
+++ b/tools/host_tests/tcpecho_server_loop.py
@@ -0,0 +1,40 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+# Be sure that the tools directory is in the search path
+import sys
+from os.path import join, abspath, dirname
+ROOT = abspath(join(dirname(__file__), "..", ".."))
+sys.path.insert(0, ROOT)
+
+from tools.private_settings import LOCALHOST
+from SocketServer import BaseRequestHandler, TCPServer
+
+
+class TCP_EchoHandler(BaseRequestHandler):
+ def handle(self):
+ print "\nHandle connection from:", self.client_address
+ while True:
+ data = self.request.recv(1024)
+ if not data: break
+ self.request.sendall(data)
+ self.request.close()
+ print "socket closed"
+
+if __name__ == '__main__':
+ server = TCPServer((LOCALHOST, 7), TCP_EchoHandler)
+ print "listening for connections on:", (LOCALHOST, 7)
+ server.serve_forever()
diff --git a/tools/host_tests/udp_link_layer_auto.py b/tools/host_tests/udp_link_layer_auto.py
new file mode 100644
index 0000000..cb0578f
--- /dev/null
+++ b/tools/host_tests/udp_link_layer_auto.py
@@ -0,0 +1,145 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+"""
+How to use:
+make.py -m LPC1768 -t ARM -d E:\ -n NET_14
+udp_link_layer_auto.py -p COM20 -d E:\ -t 10
+"""
+
+import re
+import uuid
+import socket
+import thread
+from sys import stdout
+from time import time, sleep
+from host_test import DefaultTest
+from SocketServer import BaseRequestHandler, UDPServer
+
+
+# Received datagrams (with time)
+dict_udp_recv_datagrams = dict()
+
+# Sent datagrams (with time)
+dict_udp_sent_datagrams = dict()
+
+
+class UDPEchoClient_Handler(BaseRequestHandler):
+ def handle(self):
+ """ One handle per connection
+ """
+ _data, _socket = self.request
+ # Process received datagram
+ data_str = repr(_data)[1:-1]
+ dict_udp_recv_datagrams[data_str] = time()
+
+
+def udp_packet_recv(threadName, server_ip, server_port):
+ """ This function will receive packet stream from mbed device
+ """
+ server = UDPServer((server_ip, server_port), UDPEchoClient_Handler)
+ print "[UDP_COUNTER] Listening for connections... %s:%d"% (server_ip, server_port)
+ server.serve_forever()
+
+
+class UDPEchoServerTest(DefaultTest):
+ ECHO_SERVER_ADDRESS = "" # UDP IP of datagram bursts
+ ECHO_PORT = 0 # UDP port for datagram bursts
+ CONTROL_PORT = 23 # TCP port used to get stats from mbed device, e.g. counters
+ s = None # Socket
+
+ TEST_PACKET_COUNT = 1000 # how many packets should be send
+ TEST_STRESS_FACTOR = 0.001 # stress factor: 10 ms
+ PACKET_SATURATION_RATIO = 29.9 # Acceptable packet transmission in %
+
+ PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
+ re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
+
+ def get_control_data(self, command="stat\n"):
+ BUFFER_SIZE = 256
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.connect((self.ECHO_SERVER_ADDRESS, self.CONTROL_PORT))
+ except Exception, e:
+ data = None
+ s.send(command)
+ data = s.recv(BUFFER_SIZE)
+ s.close()
+ return data
+
+ def test(self):
+ serial_ip_msg = self.mbed.serial_readline()
+ if serial_ip_msg is None:
+ return self.RESULT_IO_SERIAL
+ stdout.write(serial_ip_msg)
+ stdout.flush()
+ # Searching for IP address and port prompted by server
+ m = self.re_detect_server_ip.search(serial_ip_msg)
+ if m and len(m.groups()):
+ self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
+ self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
+ self.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
+
+ # Open client socket to burst datagrams to UDP server in mbed
+ try:
+ self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ except Exception, e:
+ self.s = None
+ self.notify("HOST: Error: %s"% e)
+ return self.RESULT_ERROR
+
+ # UDP replied receiver works in background to get echoed datagrams
+ SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
+ SERVER_PORT = self.ECHO_PORT + 1
+ thread.start_new_thread(udp_packet_recv, ("Thread-udp-recv", SERVER_IP, SERVER_PORT))
+ sleep(0.5)
+
+ # Burst part
+ for no in range(self.TEST_PACKET_COUNT):
+ TEST_STRING = str(uuid.uuid4())
+ payload = str(no) + "__" + TEST_STRING
+ self.s.sendto(payload, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
+ dict_udp_sent_datagrams[payload] = time()
+ sleep(self.TEST_STRESS_FACTOR)
+
+ if self.s is not None:
+ self.s.close()
+
+ # Wait 5 seconds for packets to come
+ result = True
+ self.notify("HOST: Test Summary:")
+ for d in range(5):
+ sleep(1.0)
+ summary_datagram_success = (float(len(dict_udp_recv_datagrams)) / float(self.TEST_PACKET_COUNT)) * 100.0
+ self.notify("HOST: Datagrams received after +%d sec: %.3f%% (%d / %d), stress=%.3f ms"% (d,
+ summary_datagram_success,
+ len(dict_udp_recv_datagrams),
+ self.TEST_PACKET_COUNT,
+ self.TEST_STRESS_FACTOR))
+ result = result and (summary_datagram_success >= self.PACKET_SATURATION_RATIO)
+ stdout.flush()
+
+ # Getting control data from test
+ self.notify("...")
+ self.notify("HOST: Mbed Summary:")
+ mbed_stats = self.get_control_data()
+ self.notify(mbed_stats)
+ return self.RESULT_SUCCESS if result else self.RESULT_FAILURE
+
+
+if __name__ == '__main__':
+ UDPEchoServerTest().run()
diff --git a/tools/host_tests/udpecho_client.py b/tools/host_tests/udpecho_client.py
new file mode 100644
index 0000000..1ff833f
--- /dev/null
+++ b/tools/host_tests/udpecho_client.py
@@ -0,0 +1,55 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from socket import socket, AF_INET, SOCK_DGRAM
+import string, random
+from time import time
+
+from private_settings import CLIENT_ADDRESS
+
+ECHO_PORT = 7
+
+LEN_PACKET = 127
+N_PACKETS = 5000
+TOT_BITS = float(LEN_PACKET * N_PACKETS * 8) * 2
+MEGA = float(1024 * 1024)
+UPDATE_STEP = (N_PACKETS/10)
+
+class UDP_EchoClient:
+ s = socket(AF_INET, SOCK_DGRAM)
+
+ def __init__(self, host):
+ self.host = host
+ self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
+
+ def __packet(self):
+ # Comment out the checks when measuring the throughput
+ # packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
+ UDP_EchoClient.s.sendto(packet, (self.host, ECHO_PORT))
+ data = UDP_EchoClient.s.recv(LEN_PACKET)
+ # assert packet == data, "packet error:\n%s\n%s\n" % (packet, data)
+
+ def test(self):
+ start = time()
+ for i in range(N_PACKETS):
+ if (i % UPDATE_STEP) == 0: print '%.2f%%' % ((float(i)/float(N_PACKETS)) * 100.)
+ self.__packet()
+ t = time() - start
+ print 'Throughput: (%.2f)Mbits/s' % ((TOT_BITS / t)/MEGA)
+
+while True:
+ e = UDP_EchoClient(CLIENT_ADDRESS)
+ e.test()
diff --git a/tools/host_tests/udpecho_client_auto.py b/tools/host_tests/udpecho_client_auto.py
new file mode 100644
index 0000000..7896127
--- /dev/null
+++ b/tools/host_tests/udpecho_client_auto.py
@@ -0,0 +1,77 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import socket
+from sys import stdout
+from SocketServer import BaseRequestHandler, UDPServer
+
+class UDPEchoClient_Handler(BaseRequestHandler):
+ def handle(self):
+ """ One handle per connection
+ """
+ data, socket = self.request
+ socket.sendto(data, self.client_address)
+ if '{{end}}' in data:
+ print
+ print data
+ else:
+ sys.stdout.write('.')
+ stdout.flush()
+
+class UDPEchoClientTest():
+
+ def send_server_ip_port(self, selftest, ip_address, port_no):
+ c = selftest.mbed.serial_readline() # 'UDPCllient waiting for server IP and port...'
+ if c is None:
+ selftest.print_result(selftest.RESULT_IO_SERIAL)
+ return
+ selftest.notify(c.strip())
+
+ selftest.notify("HOST: Sending server IP Address to target...")
+ connection_str = ip_address + ":" + str(port_no) + "\n"
+ selftest.mbed.serial_write(connection_str)
+
+ c = selftest.mbed.serial_readline() # 'UDPCllient waiting for server IP and port...'
+ if c is None:
+ self.print_result(selftest.RESULT_IO_SERIAL)
+ return
+ selftest.notify(c.strip())
+ return selftest.RESULT_PASSIVE
+
+ def test(self, selftest):
+ # We need to discover SERVEP_IP and set up SERVER_PORT
+ # Note: Port 7 is Echo Protocol:
+ #
+ # Port number rationale:
+ #
+ # The Echo Protocol is a service in the Internet Protocol Suite defined
+ # in RFC 862. It was originally proposed for testing and measurement
+ # of round-trip times[citation needed] in IP networks.
+ #
+ # A host may connect to a server that supports the Echo Protocol using
+ # the Transmission Control Protocol (TCP) or the User Datagram Protocol
+ # (UDP) on the well-known port number 7. The server sends back an
+ # identical copy of the data it received.
+ SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
+ SERVER_PORT = 7
+
+ # Returning none will suppress host test from printing success code
+ server = UDPServer((SERVER_IP, SERVER_PORT), UDPEchoClient_Handler)
+ print "HOST: Listening for UDP connections..."
+ self.send_server_ip_port(selftest, SERVER_IP, SERVER_PORT)
+ server.serve_forever()
diff --git a/tools/host_tests/udpecho_server.py b/tools/host_tests/udpecho_server.py
new file mode 100644
index 0000000..f607433
--- /dev/null
+++ b/tools/host_tests/udpecho_server.py
@@ -0,0 +1,29 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from SocketServer import BaseRequestHandler, UDPServer
+from private_settings import SERVER_ADDRESS
+
+class UDP_EchoHandler(BaseRequestHandler):
+ def handle(self):
+ data, socket = self.request
+ print "client:", self.client_address
+ print "data:", data
+ socket.sendto(data, self.client_address)
+
+server = UDPServer((SERVER_ADDRESS, 7195), UDP_EchoHandler)
+print "listening for connections"
+server.serve_forever()
diff --git a/tools/host_tests/udpecho_server_auto.py b/tools/host_tests/udpecho_server_auto.py
new file mode 100644
index 0000000..a7ee026
--- /dev/null
+++ b/tools/host_tests/udpecho_server_auto.py
@@ -0,0 +1,68 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import sys
+import uuid
+from sys import stdout
+from socket import socket, AF_INET, SOCK_DGRAM
+
+class UDPEchoServerTest():
+ ECHO_SERVER_ADDRESS = ""
+ ECHO_PORT = 0
+ s = None # Socket
+
+ PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
+ re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
+
+ def test(self, selftest):
+ result = True
+ serial_ip_msg = selftest.mbed.serial_readline()
+ if serial_ip_msg is None:
+ return selftest.RESULT_IO_SERIAL
+ selftest.notify(serial_ip_msg)
+ # Searching for IP address and port prompted by server
+ m = self.re_detect_server_ip.search(serial_ip_msg)
+ if m and len(m.groups()):
+ self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
+ self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
+ selftest.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
+
+ # We assume this test fails so can't send 'error' message to server
+ try:
+ self.s = socket(AF_INET, SOCK_DGRAM)
+ except Exception, e:
+ self.s = None
+ selftest.notify("HOST: Socket error: %s"% e)
+ return selftest.RESULT_ERROR
+
+ for i in range(0, 100):
+ TEST_STRING = str(uuid.uuid4())
+ self.s.sendto(TEST_STRING, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
+ data = self.s.recv(len(TEST_STRING))
+ received_str = repr(data)[1:-1]
+ if TEST_STRING != received_str:
+ result = False
+ break
+ sys.stdout.write('.')
+ stdout.flush()
+ else:
+ result = False
+
+ if self.s is not None:
+ self.s.close()
+ return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/wait_us_auto.py b/tools/host_tests/wait_us_auto.py
new file mode 100644
index 0000000..2ab66a3
--- /dev/null
+++ b/tools/host_tests/wait_us_auto.py
@@ -0,0 +1,69 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from time import time
+
+class WaitusTest():
+ """ This test is reading single characters from stdio
+ and measures time between their occurrences.
+ """
+ TICK_LOOP_COUNTER = 13
+ TICK_LOOP_SUCCESSFUL_COUNTS = 10
+ DEVIATION = 0.10 # +/-10%
+
+ def test(self, selftest):
+ test_result = True
+ # First character to start test (to know after reset when test starts)
+ if selftest.mbed.set_serial_timeout(None) is None:
+ return selftest.RESULT_IO_SERIAL
+ c = selftest.mbed.serial_read(1)
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ if c == '$': # target will printout TargetID e.g.: $$$$1040e649d5c09a09a3f6bc568adef61375c6
+ #Read additional 39 bytes of TargetID
+ if selftest.mbed.serial_read(39) is None:
+ return selftest.RESULT_IO_SERIAL
+ c = selftest.mbed.serial_read(1) # Re-read first 'tick'
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ start_serial_pool = time()
+ start = time()
+
+ success_counter = 0
+
+ for i in range(0, self.TICK_LOOP_COUNTER):
+ c = selftest.mbed.serial_read(1)
+ if c is None:
+ return selftest.RESULT_IO_SERIAL
+ delta = time() - start
+ deviation = abs(delta - 1)
+ # Round values
+ delta = round(delta, 2)
+ deviation = round(deviation, 2)
+ # Check if time measurements are in given range
+ deviation_ok = True if delta > 0 and deviation <= self.DEVIATION else False
+ success_counter = success_counter+1 if deviation_ok else 0
+ msg = "OK" if deviation_ok else "FAIL"
+ selftest.notify("%s in %.2f sec (%.2f) [%s]"% (c, delta, deviation, msg))
+ start = time()
+ if success_counter >= self.TICK_LOOP_SUCCESSFUL_COUNTS:
+ break
+ measurement_time = time() - start_serial_pool
+ selftest.notify("Consecutive OK timer reads: %d"% success_counter)
+ selftest.notify("Completed in %.2f sec" % (measurement_time))
+ test_result = True if success_counter >= self.TICK_LOOP_SUCCESSFUL_COUNTS else False
+ return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
diff --git a/tools/host_tests/wfi_auto.py b/tools/host_tests/wfi_auto.py
new file mode 100644
index 0000000..a62c432
--- /dev/null
+++ b/tools/host_tests/wfi_auto.py
@@ -0,0 +1,45 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import uuid
+import time
+from sys import stdout
+
+class WFITest():
+
+ def test(self, selftest):
+ c = selftest.mbed.serial_readline()
+
+ if c == None:
+ selftest.notify("HOST: No output detected")
+ return selftest.RESULT_IO_SERIAL
+
+ if c.strip() != "0":
+ selftest.notify("HOST: Unexpected output. Expected '0' but received '%s'" % c.strip())
+ return selftest.RESULT_FAILURE
+
+ # Wait 10 seconds to allow serial prints (indicating failure)
+ selftest.mbed.set_serial_timeout(10)
+
+ # If no characters received, pass the test
+ if not selftest.mbed.serial_readline():
+ selftest.notify("HOST: No further output detected")
+ return selftest.RESULT_SUCCESS
+ else:
+ selftest.notify("HOST: Extra output detected")
+ return selftest.RESULT_FAILURE
diff --git a/tools/libraries.py b/tools/libraries.py
new file mode 100644
index 0000000..a6d2c0b
--- /dev/null
+++ b/tools/libraries.py
@@ -0,0 +1,129 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from tools.paths import *
+from tools.data.support import *
+from tools.tests import TEST_MBED_LIB
+
+
+LIBRARIES = [
+ # RTOS libraries
+ {
+ "id": "rtx",
+ "source_dir": MBED_RTX,
+ "build_dir": RTOS_LIBRARIES,
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "rtos",
+ "source_dir": RTOS_ABSTRACTION,
+ "build_dir": RTOS_LIBRARIES,
+ "dependencies": [MBED_LIBRARIES, MBED_RTX],
+ },
+
+ # RPC
+ {
+ "id": "rpc",
+ "source_dir": MBED_RPC,
+ "build_dir": RPC_LIBRARY,
+ "dependencies": [MBED_LIBRARIES],
+ },
+
+ # USB Device libraries
+ {
+ "id": "usb",
+ "source_dir": USB,
+ "build_dir": USB_LIBRARIES,
+ "dependencies": [MBED_LIBRARIES],
+ },
+
+ # USB Host libraries
+ {
+ "id": "usb_host",
+ "source_dir": USB_HOST,
+ "build_dir": USB_HOST_LIBRARIES,
+ "dependencies": [MBED_LIBRARIES, FAT_FS, MBED_RTX, RTOS_ABSTRACTION],
+ },
+
+ # DSP libraries
+ {
+ "id": "cmsis_dsp",
+ "source_dir": DSP_CMSIS,
+ "build_dir": DSP_LIBRARIES,
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "dsp",
+ "source_dir": DSP_ABSTRACTION,
+ "build_dir": DSP_LIBRARIES,
+ "dependencies": [MBED_LIBRARIES, DSP_CMSIS],
+ },
+
+ # File system libraries
+ {
+ "id": "fat",
+ "source_dir": [FAT_FS, SD_FS],
+ "build_dir": FS_LIBRARY,
+ "dependencies": [MBED_LIBRARIES]
+ },
+
+ # Network libraries
+ {
+ "id": "eth",
+ "source_dir": [ETH_SOURCES, LWIP_SOURCES],
+ "build_dir": ETH_LIBRARY,
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES]
+ },
+
+ {
+ "id": "ublox",
+ "source_dir": [UBLOX_SOURCES, CELLULAR_SOURCES, CELLULAR_USB_SOURCES, LWIP_SOURCES],
+ "build_dir": UBLOX_LIBRARY,
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES],
+ },
+
+ # Unit Testing library
+ {
+ "id": "cpputest",
+ "source_dir": [CPPUTEST_SRC, CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR],
+ "build_dir": CPPUTEST_LIBRARY,
+ "dependencies": [MBED_LIBRARIES],
+ 'inc_dirs': [CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC, TEST_MBED_LIB],
+ 'inc_dirs_ext': [CPPUTEST_INC_EXT],
+ 'macros': ["CPPUTEST_USE_MEM_LEAK_DETECTION=0", "CPPUTEST_USE_STD_CPP_LIB=0", "CPPUTEST=1"],
+ },
+]
+
+
+LIBRARY_MAP = dict([(library['id'], library) for library in LIBRARIES])
+
+
+class Library:
+ DEFAULTS = {
+ "supported": DEFAULT_SUPPORT,
+ 'dependencies': None,
+ 'inc_dirs': None, # Include dirs required by library build
+ 'inc_dirs_ext': None, # Include dirs required by others to use with this library
+ 'macros': None, # Additional macros you want to define when building library
+ }
+ def __init__(self, lib_id):
+ self.__dict__.update(Library.DEFAULTS)
+ self.__dict__.update(LIBRARY_MAP[lib_id])
+
+ def is_supported(self, target, toolchain):
+ if not hasattr(self, 'supported'):
+ return True
+ return (target.name in self.supported) and (toolchain in self.supported[target.name])
diff --git a/tools/make.py b/tools/make.py
new file mode 100644
index 0000000..adfd82b
--- /dev/null
+++ b/tools/make.py
@@ -0,0 +1,293 @@
+#! /usr/bin/env python2
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+TEST BUILD & RUN
+"""
+import sys
+from time import sleep
+from shutil import copy
+from os.path import join, abspath, dirname
+
+# Be sure that the tools directory is in the search path
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+from tools.utils import args_error
+from tools.paths import BUILD_DIR
+from tools.paths import RTOS_LIBRARIES
+from tools.paths import RPC_LIBRARY
+from tools.paths import ETH_LIBRARY
+from tools.paths import USB_HOST_LIBRARIES, USB_LIBRARIES
+from tools.paths import DSP_LIBRARIES
+from tools.paths import FS_LIBRARY
+from tools.paths import UBLOX_LIBRARY
+from tools.tests import TESTS, Test, TEST_MAP
+from tools.tests import TEST_MBED_LIB
+from tools.targets import TARGET_MAP
+from tools.options import get_default_options_parser
+from tools.build_api import build_project
+try:
+ import tools.private_settings as ps
+except:
+ ps = object()
+
+
+if __name__ == '__main__':
+ # Parse Options
+ parser = get_default_options_parser()
+ parser.add_option("-p",
+ type="int",
+ dest="program",
+ help="The index of the desired test program: [0-%d]" % (len(TESTS)-1))
+
+ parser.add_option("-n",
+ dest="program_name",
+ help="The name of the desired test program")
+
+ parser.add_option("-j", "--jobs",
+ type="int",
+ dest="jobs",
+ default=1,
+ help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
+
+ parser.add_option("-v", "--verbose",
+ action="store_true",
+ dest="verbose",
+ default=False,
+ help="Verbose diagnostic output")
+
+ parser.add_option("--silent",
+ action="store_true",
+ dest="silent",
+ default=False,
+ help="Silent diagnostic output (no copy, compile notification)")
+
+ parser.add_option("-D", "",
+ action="append",
+ dest="macros",
+ help="Add a macro definition")
+
+ # Local run
+ parser.add_option("--automated", action="store_true", dest="automated",
+ default=False, help="Automated test")
+ parser.add_option("--host", dest="host_test",
+ default=None, help="Host test")
+ parser.add_option("--extra", dest="extra",
+ default=None, help="Extra files")
+ parser.add_option("--peripherals", dest="peripherals",
+ default=None, help="Required peripherals")
+ parser.add_option("--dep", dest="dependencies",
+ default=None, help="Dependencies")
+ parser.add_option("--source", dest="source_dir",
+ default=None, help="The source (input) directory")
+ parser.add_option("--duration", type="int", dest="duration",
+ default=None, help="Duration of the test")
+ parser.add_option("--build", dest="build_dir",
+ default=None, help="The build (output) directory")
+ parser.add_option("-d", "--disk", dest="disk",
+ default=None, help="The mbed disk")
+ parser.add_option("-s", "--serial", dest="serial",
+ default=None, help="The mbed serial port")
+ parser.add_option("-b", "--baud", type="int", dest="baud",
+ default=None, help="The mbed serial baud rate")
+ parser.add_option("-L", "--list-tests", action="store_true", dest="list_tests",
+ default=False, help="List available tests in order and exit")
+
+ # Ideally, all the tests with a single "main" thread can be run with, or
+ # without the rtos, eth, usb_host, usb, dsp, fat, ublox
+ parser.add_option("--rtos",
+ action="store_true", dest="rtos",
+ default=False, help="Link with RTOS library")
+
+ parser.add_option("--rpc",
+ action="store_true", dest="rpc",
+ default=False, help="Link with RPC library")
+
+ parser.add_option("--eth",
+ action="store_true", dest="eth",
+ default=False,
+ help="Link with Ethernet library")
+
+ parser.add_option("--usb_host",
+ action="store_true",
+ dest="usb_host",
+ default=False,
+ help="Link with USB Host library")
+
+ parser.add_option("--usb",
+ action="store_true",
+ dest="usb",
+ default=False,
+ help="Link with USB Device library")
+
+ parser.add_option("--dsp",
+ action="store_true",
+ dest="dsp",
+ default=False,
+ help="Link with DSP library")
+
+ parser.add_option("--fat",
+ action="store_true",
+ dest="fat",
+ default=False,
+ help="Link with FS ad SD card file system library")
+
+ parser.add_option("--ublox",
+ action="store_true",
+ dest="ublox",
+ default=False,
+ help="Link with U-Blox library")
+
+ parser.add_option("--testlib",
+ action="store_true",
+ dest="testlib",
+ default=False,
+ help="Link with mbed test library")
+
+ # Specify a different linker script
+ parser.add_option("-l", "--linker", dest="linker_script",
+ default=None, help="use the specified linker script")
+
+ (options, args) = parser.parse_args()
+
+ # Print available tests in order and exit
+ if options.list_tests is True:
+ print '\n'.join(map(str, sorted(TEST_MAP.values())))
+ sys.exit()
+
+ # force program to "0" if a source dir is specified
+ if options.source_dir is not None:
+ p = 0
+ n = None
+ else:
+ # Program Number or name
+ p, n = options.program, options.program_name
+
+ if n is not None and p is not None:
+ args_error(parser, "[ERROR] specify either '-n' or '-p', not both")
+ if n:
+ # We will transform 'n' to list of 'p' (integers which are test numbers)
+ nlist = n.split(',')
+ for test_id in nlist:
+ if test_id not in TEST_MAP.keys():
+ args_error(parser, "[ERROR] Program with name '%s' not found"% test_id)
+
+ p = [TEST_MAP[n].n for n in nlist]
+ elif p is None or (p < 0) or (p > (len(TESTS)-1)):
+ message = "[ERROR] You have to specify one of the following tests:\n"
+ message += '\n'.join(map(str, sorted(TEST_MAP.values())))
+ args_error(parser, message)
+
+ # If 'p' was set via -n to list of numbers make this a single element integer list
+ if type(p) != type([]):
+ p = [p]
+
+ # Target
+ if options.mcu is None :
+ args_error(parser, "[ERROR] You should specify an MCU")
+ mcu = options.mcu
+
+ # Toolchain
+ if options.tool is None:
+ args_error(parser, "[ERROR] You should specify a TOOLCHAIN")
+ toolchain = options.tool
+
+ # Test
+ for test_no in p:
+ test = Test(test_no)
+ if options.automated is not None: test.automated = options.automated
+ if options.dependencies is not None: test.dependencies = options.dependencies
+ if options.host_test is not None: test.host_test = options.host_test;
+ if options.peripherals is not None: test.peripherals = options.peripherals;
+ if options.duration is not None: test.duration = options.duration;
+ if options.extra is not None: test.extra_files = options.extra
+
+ if not test.is_supported(mcu, toolchain):
+ print 'The selected test is not supported on target %s with toolchain %s' % (mcu, toolchain)
+ sys.exit()
+
+ # Linking with extra libraries
+ if options.rtos: test.dependencies.append(RTOS_LIBRARIES)
+ if options.rpc: test.dependencies.append(RPC_LIBRARY)
+ if options.eth: test.dependencies.append(ETH_LIBRARY)
+ if options.usb_host: test.dependencies.append(USB_HOST_LIBRARIES)
+ if options.usb: test.dependencies.append(USB_LIBRARIES)
+ if options.dsp: test.dependencies.append(DSP_LIBRARIES)
+ if options.fat: test.dependencies.append(FS_LIBRARY)
+ if options.ublox: test.dependencies.append(UBLOX_LIBRARY)
+ if options.testlib: test.dependencies.append(TEST_MBED_LIB)
+
+ build_dir = join(BUILD_DIR, "test", mcu, toolchain, test.id)
+ if options.source_dir is not None:
+ test.source_dir = options.source_dir
+ build_dir = options.source_dir
+
+ if options.build_dir is not None:
+ build_dir = options.build_dir
+
+ target = TARGET_MAP[mcu]
+ try:
+ bin_file = build_project(test.source_dir, build_dir, target, toolchain, test.dependencies, options.options,
+ linker_script=options.linker_script,
+ clean=options.clean,
+ verbose=options.verbose,
+ silent=options.silent,
+ macros=options.macros,
+ jobs=options.jobs)
+ print 'Image: %s'% bin_file
+
+ if options.disk:
+ # Simple copy to the mbed disk
+ copy(bin_file, options.disk)
+
+ if options.serial:
+ # Import pyserial: https://pypi.python.org/pypi/pyserial
+ from serial import Serial
+
+ sleep(target.program_cycle_s())
+
+ serial = Serial(options.serial, timeout = 1)
+ if options.baud:
+ serial.setBaudrate(options.baud)
+
+ serial.flushInput()
+ serial.flushOutput()
+
+ try:
+ serial.sendBreak()
+ except:
+ # In linux a termios.error is raised in sendBreak and in setBreak.
+ # The following setBreak() is needed to release the reset signal on the target mcu.
+ try:
+ serial.setBreak(False)
+ except:
+ pass
+
+ while True:
+ c = serial.read(512)
+ sys.stdout.write(c)
+ sys.stdout.flush()
+
+ except KeyboardInterrupt, e:
+ print "\n[CTRL+c] exit"
+ except Exception,e:
+ if options.verbose:
+ import traceback
+ traceback.print_exc(file=sys.stdout)
+ else:
+ print "[ERROR] %s" % str(e)
diff --git a/tools/options.py b/tools/options.py
new file mode 100644
index 0000000..cf146c4
--- /dev/null
+++ b/tools/options.py
@@ -0,0 +1,44 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from optparse import OptionParser
+from tools.toolchains import TOOLCHAINS
+from tools.targets import TARGET_NAMES
+
+
+def get_default_options_parser():
+ parser = OptionParser()
+
+ targetnames = TARGET_NAMES
+ targetnames.sort()
+ toolchainlist = list(TOOLCHAINS)
+ toolchainlist.sort()
+
+ parser.add_option("-m", "--mcu",
+ help="build for the given MCU (%s)" % ', '.join(targetnames),
+ metavar="MCU")
+
+ parser.add_option("-t", "--tool",
+ help="build using the given TOOLCHAIN (%s)" % ', '.join(toolchainlist),
+ metavar="TOOLCHAIN")
+
+ parser.add_option("-c", "--clean", action="store_true", default=False,
+ help="clean the build directory")
+
+ parser.add_option("-o", "--options", action="append",
+ help='Add a build option ("save-asm": save the asm generated by the compiler, "debug-info": generate debugging information, "analyze": run Goanna static code analyzer")')
+
+ return parser
diff --git a/tools/patch.py b/tools/patch.py
new file mode 100644
index 0000000..6fda63f
--- /dev/null
+++ b/tools/patch.py
@@ -0,0 +1,50 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+http://www.nxp.com/documents/user_manual/UM10360.pdf
+
+32.3.1.1 Criterion for Valid User Code
+The reserved Cortex-M3 exception vector location 7 (offset 0x1C in the vector table)
+should contain the 2's complement of the check-sum of table entries 0 through 6. This
+causes the checksum of the first 8 table entries to be 0. The boot loader code checksums
+the first 8 locations in sector 0 of the flash. If the result is 0, then execution control is
+transferred to the user code.
+"""
+from struct import unpack, pack
+
+
+def patch(bin_path):
+ with open(bin_path, 'r+b') as bin:
+ # Read entries 0 through 6 (Little Endian 32bits words)
+ vector = [unpack(' (len(TESTS)-1)):
+ message = "[ERROR] You have to specify one of the following tests:\n"
+ message += '\n'.join(map(str, sorted(TEST_MAP.values())))
+ args_error(parser, message)
+
+ # Project
+ if p is None or (p < 0) or (p > (len(TESTS)-1)):
+ message = "[ERROR] You have to specify one of the following tests:\n"
+ message += '\n'.join(map(str, sorted(TEST_MAP.values())))
+ args_error(parser, message)
+ test = Test(p)
+
+ # Some libraries have extra macros (called by exporter symbols) to we need to pass
+ # them to maintain compilation macros integrity between compiled library and
+ # header files we might use with it
+ lib_symbols = []
+ for lib in LIBRARIES:
+ if lib['build_dir'] in test.dependencies:
+ lib_macros = lib.get('macros', None)
+ if lib_macros is not None:
+ lib_symbols.extend(lib_macros)
+
+ if not options.build:
+ # Substitute the library builds with the sources
+ # TODO: Substitute also the other library build paths
+ if MBED_LIBRARIES in test.dependencies:
+ test.dependencies.remove(MBED_LIBRARIES)
+ test.dependencies.append(MBED_BASE)
+
+ # Build the project with the same directory structure of the mbed online IDE
+ project_dir = join(EXPORT_WORKSPACE, test.id)
+ setup_user_prj(project_dir, test.source_dir, test.dependencies)
+
+ # Export to selected toolchain
+ tmp_path, report = export(project_dir, test.id, ide, mcu, EXPORT_WORKSPACE, EXPORT_TMP, extra_symbols=lib_symbols)
+ if report['success']:
+ zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (test.id, ide, mcu))
+ move(tmp_path, zip_path)
+ successes.append("%s::%s\t%s"% (mcu, ide, zip_path))
+ else:
+ failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg']))
+
+ # Prints export results
+ print
+ if len(successes) > 0:
+ print "Successful exports:"
+ for success in successes:
+ print " * %s"% success
+ if len(failures) > 0:
+ print "Failed exports:"
+ for failure in failures:
+ print " * %s"% failure
diff --git a/tools/remove-device-h.py b/tools/remove-device-h.py
new file mode 100644
index 0000000..bbed161
--- /dev/null
+++ b/tools/remove-device-h.py
@@ -0,0 +1,216 @@
+import json
+import os
+import stat
+import re
+from collections import OrderedDict
+from subprocess import Popen
+
+git_processes = []
+
+class MyJSONEncoder(json.JSONEncoder):
+ def __init__(self, *args, **kwargs):
+ super(MyJSONEncoder, self).__init__(*args, **kwargs)
+ self.current_indent = 0
+ self.current_indent_str = ""
+
+
+ def encode(self, o):
+ #Special Processing for lists
+ if isinstance(o, (list, tuple)):
+ primitives_only = True
+ for item in o:
+ if isinstance(item, (list, tuple, dict)):
+ primitives_only = False
+ break
+ output = []
+ if primitives_only:
+ for item in o:
+ output.append(json.dumps(item))
+ return "[" + ", ".join(output) + "]"
+ else:
+ self.current_indent += self.indent
+ self.current_indent_str = " " * self.current_indent
+ for item in o:
+ output.append(self.current_indent_str + self.encode(item))
+ self.current_indent -= self.indent
+ self.current_indent_str = " " * self.current_indent
+ return "[\n" + ",\n".join(output) + "\n" + self.current_indent_str + "]"
+ elif isinstance(o, dict):
+ primitives_only = True
+ for item in o.values():
+ if isinstance(item, (list, tuple, dict)):
+ primitives_only = False
+ break
+ output = []
+ if primitives_only and len(o) < 3:
+ for key, value in o.iteritems():
+ output.append(json.dumps(key) + ": " + self.encode(value))
+ return "{" + ", ".join(output) + "}"
+ else:
+ self.current_indent += self.indent
+ self.current_indent_str = " " * self.current_indent
+ for key, value in o.iteritems():
+ output.append(self.current_indent_str + json.dumps(key) + ": " + self.encode(value))
+ self.current_indent -= self.indent
+ self.current_indent_str = " " * self.current_indent
+ return "{\n" + ",\n".join(output) + "\n" + self.current_indent_str + "}"
+ else:
+ return json.dumps(o)
+
+def load(path):
+ with open(path, 'r') as f :
+ return json.load(f, object_pairs_hook=OrderedDict)
+
+def dump(path, obj):
+ with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f :
+ os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
+ f.write(MyJSONEncoder(indent=4).encode(obj))
+ f.write(u'\n')
+ f.truncate()
+
+def find(stem, path) :
+ for root, directories, files in os.walk(path, followlinks=True) :
+ [dir for dir in directories if dir[0] != '.']
+ if (stem_match(stem,os.path.basename(os.path.normpath(root))) and
+ "device.h" in files) :
+ return os.path.join(root, "device.h")
+
+def find_all_devices(path, verbose=False) :
+ for root, directories, files in os.walk(path, followlinks=True) :
+ [dir for dir in directories if dir[0] != '.']
+ if "device.h" in files :
+ if verbose : print("[VERBOSE] found a device.h file in {}".format(root))
+ yield os.path.join(root, "device.h")
+
+mbed_matcher = re.compile('mbed', re.IGNORECASE)
+def stem_match(stem, thing) :
+ return (stem in thing or
+ re.sub(mbed_matcher, '', stem) in thing)
+
+attr_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+1.*$')
+def parse_attributes(path) :
+ with open(path) as input :
+ for line in input :
+ m = re.match(attr_matcher, line)
+ if m: yield m.group(1)
+
+remove_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+[10].*$')
+def remove_attributes(path) :
+ with open(path) as input :
+ remainder = filter(lambda l: not re.match(remove_matcher, l), input)
+ with open(path,"wb") as output :
+ output.truncate(0)
+ output.write("// The 'provides' section in 'target.json' is now used"+
+ " to create the device's hardware preprocessor switches.\n")
+ output.write("// Check the 'provides' section of the target description"+
+ " in 'targets.json' for more details.\n")
+ output.writelines(remainder)
+
+def user_select(things, message) :
+ print(message)
+ for thing, number in zip(things, range(len(things))):
+ print("{} : {}".format(number, thing))
+ selection = None
+ while selection is None :
+ print("please select an integer [0..{}] or specify all".format(len(things) - 1))
+ try :
+ i = raw_input()
+ if i == "all" :
+ selection = "all"
+ else :
+ selection = int(i)
+ if (selection > len(things) or
+ selection < 0) :
+ print("selection {} out of range".format(selection))
+ selection = None
+ except (ValueError, SyntaxError) :
+ print("selection not understood")
+ if selection == "all" :
+ return things
+ else :
+ return [things[selection]]
+
+target_matcher = re.compile("TARGET_")
+def strip_target(str) :
+ return re.sub(target_matcher, "", str)
+
+def add_to_targets(targets, device_file, verbose=False, remove=False) :
+ if verbose : print("[VERBOSE] trying target {}".format(device_file))
+ device = strip_target(os.path.basename(os.path.normpath(os.path.dirname(device_file))))
+ if not device :
+ print("[WARNING] device {} did not have an associated device.h".format(device))
+ else :
+ possible_matches = set([key for key in targets.keys() if stem_match(device, key)])
+ for key, value in targets.iteritems() :
+ for alt in value['extra_labels'] if 'extra_labels' in value else [] :
+ if stem_match(device, alt) : possible_matches.add(key)
+ for alt in value['extra_labels_add'] if 'extra_labels_add' in value else [] :
+ if stem_match(device, alt) : possible_matches.add(key)
+ possible_matches = list(possible_matches)
+ for match in possible_matches :
+ if device == match : possible_matches = [match]
+ if not possible_matches :
+ print("[WARNING] device {} did not have an associated entry in targets.json".format(device))
+ return None
+ elif len(possible_matches) > 1 :
+ message = ("possible matches for file {}".format(device_file))
+ target = user_select(possible_matches, message)
+ else :
+ target = possible_matches
+ attrs = list(parse_attributes(device_file))
+ if attrs :
+ for t in target :
+ targets[t]["device_has"] = sorted(list(set(targets[t].setdefault("device_has",[]) + attrs)))
+ if verbose : print("[VERBOSE] target {} now device_has {}".format(t, attrs))
+ if remove is True:
+ remove_attributes(device_file)
+
+if __name__ == '__main__' :
+ import argparse
+ parser = argparse.ArgumentParser(description='A helpful little script for converting' +
+ ' device.h files to parts of the targets.json file')
+ parser.add_argument('-a', '--all', action='store_true',
+ help='find and convert all available device.h files in the'+
+ ' directory tree starting at the current directory')
+ parser.add_argument('-f', '--file', nargs='+', help='specify an individual file to '+
+ 'convert from device.h format to a piece of targets.json')
+ parser.add_argument('-t', '--target', nargs='+', help='specify an individual target'+
+ ' to convert from device.h format to a piece of targets.json')
+ parser.add_argument('-v', '--verbose', action='store_true',
+ help="print out every target that is updated in the targets.json")
+ parser.add_argument('-r', '--rm', action='store_true',
+ help="remove the used attributes from a device.h file")
+ args = parser.parse_args()
+ if not args.target and not args.file and not args.all :
+ print("[WARNING] no action specified; auto-formatting targets.json")
+
+ targets_file_name = os.path.join(os.curdir, "hal", "targets.json")
+ try :
+ targets = load(targets_file_name)
+ except OSError :
+ print("[ERROR] did not find targets.json where I expected it {}".format(targets_file_name))
+ exit(1)
+ except ValueError :
+ print("[ERROR] invalid json found in {}".format(targets_file_name))
+ exit(2)
+
+ if args.target :
+ for target in args.target :
+ device_file = find(target, os.curdir)
+ if device_file :
+ add_to_targets(targets, device_file, verbose=args.verbose, remove=args.rm)
+ else :
+ print("[WARNING] could not locate a device file for target {}".format(target))
+
+ if args.file :
+ for file in args.file :
+ add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
+
+ if args.all :
+ for file in find_all_devices(os.curdir, verbose=args.verbose) :
+ add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
+
+ dump(targets_file_name, targets)
+
+ for process in git_processes :
+ process.wait()
diff --git a/tools/settings.py b/tools/settings.py
new file mode 100644
index 0000000..cd416a0
--- /dev/null
+++ b/tools/settings.py
@@ -0,0 +1,104 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from os.path import join, abspath, dirname
+import logging
+
+ROOT = abspath(join(dirname(__file__), ".."))
+
+# These default settings have two purposes:
+# 1) Give a template for writing local "private_settings.py"
+# 2) Give default initialization fields for the "toolchains.py" constructors
+
+##############################################################################
+# Build System Settings
+##############################################################################
+BUILD_DIR = abspath(join(ROOT, ".build"))
+
+# ARM
+armcc = "standalone" # "keil", or "standalone", or "ds-5"
+
+if armcc == "keil":
+ ARM_PATH = "C:/Keil_v5/ARM/ARMCC"
+ ARM_BIN = join(ARM_PATH, "bin")
+ ARM_INC = join(ARM_PATH, "incldue")
+ ARM_LIB = join(ARM_PATH, "lib")
+
+elif armcc == "standalone":
+ ARM_PATH = "C:/Program Files (x86)/ARM_Compiler_5.06u1"
+ ARM_BIN = join(ARM_PATH, "bin")
+ ARM_INC = join(ARM_PATH, "include")
+ ARM_LIB = join(ARM_PATH, "lib")
+
+elif armcc == "ds-5":
+ ARM_PATH = "C:/Program Files (x86)/DS-5"
+ ARM_BIN = join(ARM_PATH, "bin")
+ ARM_INC = join(ARM_PATH, "include")
+ ARM_LIB = join(ARM_PATH, "lib")
+
+ARM_CPPLIB = join(ARM_LIB, "cpplib")
+MY_ARM_CLIB = join(ARM_PATH, "lib", "microlib")
+
+# GCC ARM
+GCC_ARM_PATH = ""
+
+# GCC CodeRed
+GCC_CR_PATH = "C:/code_red/RedSuite_4.2.0_349/redsuite/Tools/bin"
+
+# IAR
+IAR_PATH = "C:/Program Files (x86)/IAR Systems/Embedded Workbench 7.3/arm"
+
+# Goanna static analyser. Please overload it in private_settings.py
+GOANNA_PATH = "c:/Program Files (x86)/RedLizards/Goanna Central 3.2.3/bin"
+
+# cppcheck path (command) and output message format
+CPPCHECK_CMD = ["cppcheck", "--enable=all"]
+CPPCHECK_MSG_FORMAT = ["--template=[{severity}] {file}@{line}: {id}:{message}"]
+
+BUILD_OPTIONS = []
+
+# mbed.org username
+MBED_ORG_USER = ""
+
+##############################################################################
+# Test System Settings
+##############################################################################
+SERVER_PORT = 59432
+SERVER_ADDRESS = "10.2.200.94"
+LOCALHOST = "10.2.200.94"
+
+MUTs = {
+ "1" : {"mcu": "LPC1768",
+ "port":"COM41", "disk":'E:\\',
+ "peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
+ },
+ "2": {"mcu": "LPC11U24",
+ "port":"COM42", "disk":'F:\\',
+ "peripherals": ["TMP102", "digital_loop", "port_loop", "SD"]
+ },
+ "3" : {"mcu": "KL25Z",
+ "port":"COM43", "disk":'G:\\',
+ "peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
+ },
+}
+
+##############################################################################
+# Private Settings
+##############################################################################
+try:
+ # Allow to overwrite the default settings without the need to edit the
+ # settings file stored in the repository
+ from tools.private_settings import *
+except ImportError:
+ print '[WARNING] Using default settings. Define your settings in the file "workspace_tools/private_settings.py" or in "./mbed_settings.py"'
diff --git a/tools/singletest.py b/tools/singletest.py
new file mode 100644
index 0000000..058b96d
--- /dev/null
+++ b/tools/singletest.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python2
+
+"""
+mbed SDK
+Copyright (c) 2011-2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+"""
+
+"""
+File format example: test_spec.json:
+{
+ "targets": {
+ "KL46Z": ["ARM", "GCC_ARM"],
+ "LPC1768": ["ARM", "GCC_ARM", "GCC_CR", "IAR"],
+ "LPC11U24": ["uARM"],
+ "NRF51822": ["ARM"]
+ }
+}
+
+File format example: muts_all.json:
+{
+ "1" : {"mcu": "LPC1768",
+ "port":"COM4",
+ "disk":"J:\\",
+ "peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
+ },
+
+ "2" : {"mcu": "KL25Z",
+ "port":"COM7",
+ "disk":"G:\\",
+ "peripherals": ["digital_loop", "port_loop", "analog_loop"]
+ }
+}
+"""
+
+
+# Be sure that the tools directory is in the search path
+import sys
+from os.path import join, abspath, dirname
+
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+
+# Check: Extra modules which are required by core test suite
+from tools.utils import check_required_modules
+check_required_modules(['prettytable', 'serial'])
+
+# Imports related to mbed build api
+from tools.build_api import mcu_toolchain_matrix
+
+# Imports from TEST API
+from tools.test_api import SingleTestRunner
+from tools.test_api import singletest_in_cli_mode
+from tools.test_api import detect_database_verbose
+from tools.test_api import get_json_data_from_file
+from tools.test_api import get_avail_tests_summary_table
+from tools.test_api import get_default_test_options_parser
+from tools.test_api import print_muts_configuration_from_json
+from tools.test_api import print_test_configuration_from_json
+from tools.test_api import get_autodetected_MUTS_list
+from tools.test_api import get_autodetected_TEST_SPEC
+from tools.test_api import get_module_avail
+from tools.test_exporters import ReportExporter, ResultExporterType
+
+
+# Importing extra modules which can be not installed but if available they can extend test suite functionality
+try:
+ import mbed_lstools
+ from tools.compliance.ioper_runner import IOperTestRunner
+ from tools.compliance.ioper_runner import get_available_oper_test_scopes
+except:
+ pass
+
+def get_version():
+ """ Returns test script version
+ """
+ single_test_version_major = 1
+ single_test_version_minor = 5
+ return (single_test_version_major, single_test_version_minor)
+
+
+if __name__ == '__main__':
+ # Command line options
+ parser = get_default_test_options_parser()
+
+ parser.description = """This script allows you to run mbed defined test cases for particular MCU(s) and corresponding toolchain(s)."""
+ parser.epilog = """Example: singletest.py -i test_spec.json -M muts_all.json"""
+
+ (opts, args) = parser.parse_args()
+
+ # Print scrip version
+ if opts.version:
+ print parser.description
+ print parser.epilog
+ print "Version %d.%d"% get_version()
+ exit(0)
+
+ if opts.db_url and opts.verbose_test_configuration_only:
+ detect_database_verbose(opts.db_url)
+ exit(0)
+
+ # Print summary / information about automation test status
+ if opts.test_automation_report:
+ print get_avail_tests_summary_table(platform_filter=opts.general_filter_regex)
+ exit(0)
+
+ # Print summary / information about automation test status
+ if opts.test_case_report:
+ test_case_report_cols = ['id',
+ 'automated',
+ 'description',
+ 'peripherals',
+ 'host_test',
+ 'duration',
+ 'source_dir']
+ print get_avail_tests_summary_table(cols=test_case_report_cols,
+ result_summary=False,
+ join_delim='\n',
+ platform_filter=opts.general_filter_regex)
+ exit(0)
+
+ # Only prints matrix of supported toolchains
+ if opts.supported_toolchains:
+ print mcu_toolchain_matrix(platform_filter=opts.general_filter_regex)
+ exit(0)
+
+ test_spec = None
+ MUTs = None
+
+ if hasattr(opts, 'auto_detect') and opts.auto_detect:
+ # If auto_detect attribute is present, we assume other auto-detection
+ # parameters like 'toolchains_filter' are also set.
+ print "MBEDLS: Detecting connected mbed-enabled devices... "
+
+ MUTs = get_autodetected_MUTS_list()
+
+ for mut in MUTs.values():
+ print "MBEDLS: Detected %s, port: %s, mounted: %s"% (mut['mcu_unique'] if 'mcu_unique' in mut else mut['mcu'],
+ mut['port'],
+ mut['disk'])
+
+ # Set up parameters for test specification filter function (we need to set toolchains per target here)
+ use_default_toolchain = 'default' in opts.toolchains_filter.split(',') if opts.toolchains_filter is not None else True
+ use_supported_toolchains = 'all' in opts.toolchains_filter.split(',') if opts.toolchains_filter is not None else False
+ toolchain_filter = opts.toolchains_filter
+ platform_name_filter = opts.general_filter_regex.split(',') if opts.general_filter_regex is not None else opts.general_filter_regex
+ # Test specification with information about each target and associated toolchain
+ test_spec = get_autodetected_TEST_SPEC(MUTs.values(),
+ use_default_toolchain=use_default_toolchain,
+ use_supported_toolchains=use_supported_toolchains,
+ toolchain_filter=toolchain_filter,
+ platform_name_filter=platform_name_filter)
+ else:
+ # Open file with test specification
+ # test_spec_filename tells script which targets and their toolchain(s)
+ # should be covered by the test scenario
+ opts.auto_detect = False
+ test_spec = get_json_data_from_file(opts.test_spec_filename) if opts.test_spec_filename else None
+ if test_spec is None:
+ if not opts.test_spec_filename:
+ parser.print_help()
+ exit(-1)
+
+ # Get extra MUTs if applicable
+ MUTs = get_json_data_from_file(opts.muts_spec_filename) if opts.muts_spec_filename else None
+
+ if MUTs is None:
+ if not opts.muts_spec_filename:
+ parser.print_help()
+ exit(-1)
+
+ if opts.verbose_test_configuration_only:
+ print "MUTs configuration in %s:" % ('auto-detected' if opts.auto_detect else opts.muts_spec_filename)
+ if MUTs:
+ print print_muts_configuration_from_json(MUTs, platform_filter=opts.general_filter_regex)
+ print
+ print "Test specification in %s:" % ('auto-detected' if opts.auto_detect else opts.test_spec_filename)
+ if test_spec:
+ print print_test_configuration_from_json(test_spec)
+ exit(0)
+
+ if get_module_avail('mbed_lstools'):
+ if opts.operability_checks:
+ # Check if test scope is valid and run tests
+ test_scope = get_available_oper_test_scopes()
+ if opts.operability_checks in test_scope:
+ tests = IOperTestRunner(scope=opts.operability_checks)
+ test_results = tests.run()
+
+ # Export results in form of JUnit XML report to separate file
+ if opts.report_junit_file_name:
+ report_exporter = ReportExporter(ResultExporterType.JUNIT_OPER)
+ report_exporter.report_to_file(test_results, opts.report_junit_file_name)
+ else:
+ print "Unknown interoperability test scope name: '%s'" % (opts.operability_checks)
+ print "Available test scopes: %s" % (','.join(["'%s'" % n for n in test_scope]))
+
+ exit(0)
+
+ # Verbose test specification and MUTs configuration
+ if MUTs and opts.verbose:
+ print print_muts_configuration_from_json(MUTs)
+ if test_spec and opts.verbose:
+ print print_test_configuration_from_json(test_spec)
+
+ if opts.only_build_tests:
+ # We are skipping testing phase, and suppress summary
+ opts.suppress_summary = True
+
+ single_test = SingleTestRunner(_global_loops_count=opts.test_global_loops_value,
+ _test_loops_list=opts.test_loops_list,
+ _muts=MUTs,
+ _clean=opts.clean,
+ _opts_db_url=opts.db_url,
+ _opts_log_file_name=opts.log_file_name,
+ _opts_report_html_file_name=opts.report_html_file_name,
+ _opts_report_junit_file_name=opts.report_junit_file_name,
+ _opts_report_build_file_name=opts.report_build_file_name,
+ _test_spec=test_spec,
+ _opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
+ _opts_goanna_for_tests=opts.goanna_for_tests,
+ _opts_shuffle_test_order=opts.shuffle_test_order,
+ _opts_shuffle_test_seed=opts.shuffle_test_seed,
+ _opts_test_by_names=opts.test_by_names,
+ _opts_peripheral_by_names=opts.peripheral_by_names,
+ _opts_test_only_peripheral=opts.test_only_peripheral,
+ _opts_test_only_common=opts.test_only_common,
+ _opts_verbose_skipped_tests=opts.verbose_skipped_tests,
+ _opts_verbose_test_result_only=opts.verbose_test_result_only,
+ _opts_verbose=opts.verbose,
+ _opts_firmware_global_name=opts.firmware_global_name,
+ _opts_only_build_tests=opts.only_build_tests,
+ _opts_parallel_test_exec=opts.parallel_test_exec,
+ _opts_suppress_summary=opts.suppress_summary,
+ _opts_test_x_toolchain_summary=opts.test_x_toolchain_summary,
+ _opts_copy_method=opts.copy_method,
+ _opts_mut_reset_type=opts.mut_reset_type,
+ _opts_jobs=opts.jobs,
+ _opts_waterfall_test=opts.waterfall_test,
+ _opts_consolidate_waterfall_test=opts.consolidate_waterfall_test,
+ _opts_extend_test_timeout=opts.extend_test_timeout,
+ _opts_auto_detect=opts.auto_detect)
+
+ # Runs test suite in CLI mode
+ if (singletest_in_cli_mode(single_test)):
+ exit(0)
+ else:
+ exit(-1)
diff --git a/tools/size.py b/tools/size.py
new file mode 100644
index 0000000..0e19ae3
--- /dev/null
+++ b/tools/size.py
@@ -0,0 +1,121 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import sys
+from os.path import join, abspath, dirname, exists, splitext
+from subprocess import Popen, PIPE
+import csv
+from collections import defaultdict
+
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+from tools.paths import BUILD_DIR, TOOLS_DATA
+from tools.settings import GCC_ARM_PATH
+from tools.tests import TEST_MAP
+from tools.build_api import build_mbed_libs, build_project
+
+SIZE = join(GCC_ARM_PATH, 'arm-none-eabi-size')
+
+def get_size(path):
+ out = Popen([SIZE, path], stdout=PIPE).communicate()[0]
+ return map(int, out.splitlines()[1].split()[:4])
+
+def get_percentage(before, after):
+ if before == 0:
+ return 0 if after == 0 else 100.0
+ return float(after - before) / float(before) * 100.0
+
+def human_size(val):
+ if val>1024:
+ return "%.0fKb" % (float(val)/1024.0)
+ return "%d" % val
+
+def print_diff(name, before, after):
+ print "%s: (%s -> %s) %.2f%%" % (name, human_size(before) , human_size(after) , get_percentage(before , after))
+
+BENCHMARKS = [
+ ("BENCHMARK_1", "CENV"),
+ ("BENCHMARK_2", "PRINTF"),
+ ("BENCHMARK_3", "FP"),
+ ("BENCHMARK_4", "MBED"),
+ ("BENCHMARK_5", "ALL"),
+]
+BENCHMARK_DATA_PATH = join(TOOLS_DATA, 'benchmarks.csv')
+
+
+def benchmarks():
+ # CSV Data
+ csv_data = csv.writer(open(BENCHMARK_DATA_PATH, 'wb'))
+ csv_data.writerow(['Toolchain', "Target", "Benchmark", "code", "data", "bss", "flash"])
+
+ # Build
+ for toolchain in ['ARM', 'uARM', 'GCC_CR', 'GCC_ARM']:
+ for mcu in ["LPC1768", "LPC11U24"]:
+ # Build Libraries
+ build_mbed_libs(mcu, toolchain)
+
+ # Build benchmarks
+ build_dir = join(BUILD_DIR, "benchmarks", mcu, toolchain)
+ for test_id, title in BENCHMARKS:
+ # Build Benchmark
+ try:
+ test = TEST_MAP[test_id]
+ path = build_project(test.source_dir, join(build_dir, test_id),
+ mcu, toolchain, test.dependencies)
+ base, ext = splitext(path)
+ # Check Size
+ code, data, bss, flash = get_size(base+'.elf')
+ csv_data.writerow([toolchain, mcu, title, code, data, bss, flash])
+ except Exception, e:
+ print "Unable to build %s for toolchain %s targeting %s" % (test_id, toolchain, mcu)
+ print e
+
+
+def compare(t1, t2, target):
+ if not exists(BENCHMARK_DATA_PATH):
+ benchmarks()
+ else:
+ print "Loading: %s" % BENCHMARK_DATA_PATH
+
+ data = csv.reader(open(BENCHMARK_DATA_PATH, 'rb'))
+
+ benchmarks_data = defaultdict(dict)
+ for (toolchain, mcu, name, code, data, bss, flash) in data:
+ if target == mcu:
+ for t in [t1, t2]:
+ if toolchain == t:
+ benchmarks_data[name][t] = map(int, (code, data, bss, flash))
+
+ print "%s vs %s for %s" % (t1, t2, target)
+ for name, data in benchmarks_data.iteritems():
+ try:
+ # Check Size
+ code_a, data_a, bss_a, flash_a = data[t1]
+ code_u, data_u, bss_u, flash_u = data[t2]
+
+ print "\n=== %s ===" % name
+ print_diff("code", code_a , code_u)
+ print_diff("data", data_a , data_u)
+ print_diff("bss", bss_a , bss_u)
+ print_diff("flash", flash_a , flash_u)
+ except Exception, e:
+ print "No data for benchmark %s" % (name)
+ print e
+
+
+if __name__ == '__main__':
+ compare("GCC_CR", "LPC1768")
diff --git a/tools/synch.py b/tools/synch.py
new file mode 100644
index 0000000..4efeb2c
--- /dev/null
+++ b/tools/synch.py
@@ -0,0 +1,372 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+One repository to update them all
+On mbed.org the mbed SDK is split up in multiple repositories, this script takes
+care of updating them all.
+"""
+import sys
+from copy import copy
+from os import walk, remove, makedirs
+from os.path import join, abspath, dirname, relpath, exists, isfile
+from shutil import copyfile
+from optparse import OptionParser
+import re
+import string
+
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+from tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR
+from tools.paths import *
+from tools.utils import run_cmd
+
+MBED_URL = "mbed.org"
+MBED_USER = "mbed_official"
+
+changed = []
+push_remote = True
+quiet = False
+commit_msg = ''
+
+# Code that does have a mirror in the mbed SDK
+# Tuple data: (repo_name, list_of_code_dirs, [team])
+# team is optional - if not specified, the code is published under mbed_official
+OFFICIAL_CODE = (
+ ("mbed-dev" , MBED_BASE),
+ ("mbed-rtos", RTOS),
+ ("mbed-dsp" , DSP),
+ ("mbed-rpc" , MBED_RPC),
+
+ ("lwip" , LWIP_SOURCES+"/lwip"),
+ ("lwip-sys", LWIP_SOURCES+"/lwip-sys"),
+ ("Socket" , LWIP_SOURCES+"/Socket"),
+
+ ("lwip-eth" , ETH_SOURCES+"/lwip-eth"),
+ ("EthernetInterface", ETH_SOURCES+"/EthernetInterface"),
+
+ ("USBDevice", USB),
+ ("USBHost" , USB_HOST),
+
+ ("CellularModem", CELLULAR_SOURCES),
+ ("CellularUSBModem", CELLULAR_USB_SOURCES),
+ ("UbloxUSBModem", UBLOX_SOURCES),
+ ("UbloxModemHTTPClientTest", [TEST_DIR+"/net/cellular/http/common", TEST_DIR+"/net/cellular/http/ubloxusb"]),
+ ("UbloxModemSMSTest", [TEST_DIR+"/net/cellular/sms/common", TEST_DIR+"/net/cellular/sms/ubloxusb"]),
+ ("FATFileSystem", FAT_FS, "mbed-official"),
+)
+
+
+# Code that does have dependencies to libraries should point to
+# the latest revision. By default, they point to a specific revision.
+CODE_WITH_DEPENDENCIES = (
+ # Libraries
+ "EthernetInterface",
+
+ # RTOS Examples
+ "rtos_basic",
+ "rtos_isr",
+ "rtos_mail",
+ "rtos_mutex",
+ "rtos_queue",
+ "rtos_semaphore",
+ "rtos_signals",
+ "rtos_timer",
+
+ # Net Examples
+ "TCPEchoClient",
+ "TCPEchoServer",
+ "TCPSocket_HelloWorld",
+ "UDPSocket_HelloWorld",
+ "UDPEchoClient",
+ "UDPEchoServer",
+ "BroadcastReceive",
+ "BroadcastSend",
+
+ # mbed sources
+ "mbed-src-program",
+)
+
+# A list of regular expressions that will be checked against each directory
+# name and skipped if they match.
+IGNORE_DIRS = (
+)
+
+IGNORE_FILES = (
+ 'COPYING',
+ '\.md',
+ "\.lib",
+ "\.bld"
+)
+
+def ignore_path(name, reg_exps):
+ for r in reg_exps:
+ if re.search(r, name):
+ return True
+ return False
+
+class MbedRepository:
+ @staticmethod
+ def run_and_print(command, cwd):
+ stdout, _, _ = run_cmd(command, wd=cwd, redirect=True)
+ print(stdout)
+
+ def __init__(self, name, team = None):
+ self.name = name
+ self.path = join(MBED_ORG_PATH, name)
+ if team is None:
+ self.url = "http://" + MBED_URL + "/users/" + MBED_USER + "/code/%s/"
+ else:
+ self.url = "http://" + MBED_URL + "/teams/" + team + "/code/%s/"
+ if not exists(self.path):
+ # Checkout code
+ if not exists(MBED_ORG_PATH):
+ makedirs(MBED_ORG_PATH)
+
+ self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
+
+ else:
+ # Update
+ self.run_and_print(['hg', 'pull'], cwd=self.path)
+ self.run_and_print(['hg', 'update'], cwd=self.path)
+
+ def publish(self):
+ # The maintainer has to evaluate the changes first and explicitly accept them
+ self.run_and_print(['hg', 'addremove'], cwd=self.path)
+ stdout, _, _ = run_cmd(['hg', 'status'], wd=self.path)
+ if stdout == '':
+ print "No changes"
+ return False
+ print stdout
+ if quiet:
+ commit = 'Y'
+ else:
+ commit = raw_input(push_remote and "Do you want to commit and push? Y/N: " or "Do you want to commit? Y/N: ")
+ if commit == 'Y':
+ args = ['hg', 'commit', '-u', MBED_ORG_USER]
+ if commit_msg:
+ args = args + ['-m', commit_msg]
+ self.run_and_print(args, cwd=self.path)
+ if push_remote:
+ self.run_and_print(['hg', 'push'], cwd=self.path)
+ return True
+
+# Check if a file is a text file or a binary file
+# Taken from http://code.activestate.com/recipes/173220/
+text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
+_null_trans = string.maketrans("", "")
+def is_text_file(filename):
+ block_size = 1024
+ def istext(s):
+ if "\0" in s:
+ return 0
+
+ if not s: # Empty files are considered text
+ return 1
+
+ # Get the non-text characters (maps a character to itself then
+ # use the 'remove' option to get rid of the text characters.)
+ t = s.translate(_null_trans, text_characters)
+
+ # If more than 30% non-text characters, then
+ # this is considered a binary file
+ if float(len(t))/len(s) > 0.30:
+ return 0
+ return 1
+ with open(filename) as f:
+ res = istext(f.read(block_size))
+ return res
+
+# Return the line ending type for the given file ('cr' or 'crlf')
+def get_line_endings(f):
+ examine_size = 1024
+ try:
+ tf = open(f, "rb")
+ lines, ncrlf = tf.readlines(examine_size), 0
+ tf.close()
+ for l in lines:
+ if l.endswith("\r\n"):
+ ncrlf = ncrlf + 1
+ return 'crlf' if ncrlf > len(lines) >> 1 else 'cr'
+ except:
+ return 'cr'
+
+# Copy file to destination, but preserve destination line endings if possible
+# This prevents very annoying issues with huge diffs that appear because of
+# differences in line endings
+def copy_with_line_endings(sdk_file, repo_file):
+ if not isfile(repo_file):
+ copyfile(sdk_file, repo_file)
+ return
+ is_text = is_text_file(repo_file)
+ if is_text:
+ sdk_le = get_line_endings(sdk_file)
+ repo_le = get_line_endings(repo_file)
+ if not is_text or sdk_le == repo_le:
+ copyfile(sdk_file, repo_file)
+ else:
+ print "Converting line endings in '%s' to '%s'" % (abspath(repo_file), repo_le)
+ f = open(sdk_file, "rb")
+ data = f.read()
+ f.close()
+ f = open(repo_file, "wb")
+ data = data.replace("\r\n", "\n") if repo_le == 'cr' else data.replace('\n','\r\n')
+ f.write(data)
+ f.close()
+
+def visit_files(path, visit):
+ for root, dirs, files in walk(path):
+ # Ignore hidden directories
+ for d in copy(dirs):
+ full = join(root, d)
+ if d.startswith('.'):
+ dirs.remove(d)
+ if ignore_path(full, IGNORE_DIRS):
+ print "Skipping '%s'" % full
+ dirs.remove(d)
+
+ for file in files:
+ if ignore_path(file, IGNORE_FILES):
+ continue
+
+ visit(join(root, file))
+
+
+def update_repo(repo_name, sdk_paths, team_name):
+ repo = MbedRepository(repo_name, team_name)
+ # copy files from mbed SDK to mbed_official repository
+ def visit_mbed_sdk(sdk_file):
+ repo_file = join(repo.path, relpath(sdk_file, sdk_path))
+
+ repo_dir = dirname(repo_file)
+ if not exists(repo_dir):
+ makedirs(repo_dir)
+
+ copy_with_line_endings(sdk_file, repo_file)
+ for sdk_path in sdk_paths:
+ visit_files(sdk_path, visit_mbed_sdk)
+
+ # remove repository files that do not exist in the mbed SDK
+ def visit_repo(repo_file):
+ for sdk_path in sdk_paths:
+ sdk_file = join(sdk_path, relpath(repo_file, repo.path))
+ if exists(sdk_file):
+ break
+ else:
+ remove(repo_file)
+ print "remove: %s" % repo_file
+ visit_files(repo.path, visit_repo)
+
+ if repo.publish():
+ changed.append(repo_name)
+
+
+def update_code(repositories):
+ for r in repositories:
+ repo_name, sdk_dir = r[0], r[1]
+ team_name = r[2] if len(r) == 3 else None
+ print '\n=== Updating "%s" ===' % repo_name
+ sdk_dirs = [sdk_dir] if type(sdk_dir) != type([]) else sdk_dir
+ update_repo(repo_name, sdk_dirs, team_name)
+
+def update_single_repo(repo):
+ repos = [r for r in OFFICIAL_CODE if r[0] == repo]
+ if not repos:
+ print "Repository '%s' not found" % repo
+ else:
+ update_code(repos)
+
+def update_dependencies(repositories):
+ for repo_name in repositories:
+ print '\n=== Updating "%s" ===' % repo_name
+ repo = MbedRepository(repo_name)
+
+ # point to the latest libraries
+ def visit_repo(repo_file):
+ with open(repo_file, "r") as f:
+ url = f.read()
+ with open(repo_file, "w") as f:
+ f.write(url[:(url.rindex('/')+1)])
+ visit_files(repo.path, visit_repo, None, MBED_REPO_EXT)
+
+ if repo.publish():
+ changed.append(repo_name)
+
+
+def update_mbed():
+ update_repo("mbed", [join(BUILD_DIR, "mbed")], None)
+
+def do_sync(options):
+ global push_remote, quiet, commit_msg, changed
+
+ push_remote = not options.nopush
+ quiet = options.quiet
+ commit_msg = options.msg
+ chnaged = []
+
+ if options.code:
+ update_code(OFFICIAL_CODE)
+
+ if options.dependencies:
+ update_dependencies(CODE_WITH_DEPENDENCIES)
+
+ if options.mbed:
+ update_mbed()
+
+ if options.repo:
+ update_single_repo(options.repo)
+
+ if changed:
+ print "Repositories with changes:", changed
+
+ return changed
+
+if __name__ == '__main__':
+ parser = OptionParser()
+
+ parser.add_option("-c", "--code",
+ action="store_true", default=False,
+ help="Update the mbed_official code")
+
+ parser.add_option("-d", "--dependencies",
+ action="store_true", default=False,
+ help="Update the mbed_official code dependencies")
+
+ parser.add_option("-m", "--mbed",
+ action="store_true", default=False,
+ help="Release a build of the mbed library")
+
+ parser.add_option("-n", "--nopush",
+ action="store_true", default=False,
+ help="Commit the changes locally only, don't push them")
+
+ parser.add_option("", "--commit_message",
+ action="store", type="string", default='', dest='msg',
+ help="Commit message to use for all the commits")
+
+ parser.add_option("-r", "--repository",
+ action="store", type="string", default='', dest='repo',
+ help="Synchronize only the given repository")
+
+ parser.add_option("-q", "--quiet",
+ action="store_true", default=False,
+ help="Don't ask for confirmation before commiting or pushing")
+
+ (options, args) = parser.parse_args()
+
+ do_sync(options)
+
diff --git a/tools/targets.py b/tools/targets.py
new file mode 100644
index 0000000..806d8b7
--- /dev/null
+++ b/tools/targets.py
@@ -0,0 +1,385 @@
+"""
+mbed SDK
+Copyright (c) 2011-2016 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+CORE_LABELS = {
+ "ARM7TDMI-S": ["ARM7", "LIKE_CORTEX_ARM7"],
+ "Cortex-M0" : ["M0", "CORTEX_M", "LIKE_CORTEX_M0"],
+ "Cortex-M0+": ["M0P", "CORTEX_M", "LIKE_CORTEX_M0"],
+ "Cortex-M1" : ["M1", "CORTEX_M", "LIKE_CORTEX_M1"],
+ "Cortex-M3" : ["M3", "CORTEX_M", "LIKE_CORTEX_M3"],
+ "Cortex-M4" : ["M4", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M4"],
+ "Cortex-M4F" : ["M4", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M4"],
+ "Cortex-M7" : ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7"],
+ "Cortex-M7F" : ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7"],
+ "Cortex-A9" : ["A9", "CORTEX_A", "LIKE_CORTEX_A9"]
+}
+
+import os
+import binascii
+import struct
+import shutil
+from tools.patch import patch
+from paths import TOOLS_BOOTLOADERS
+import json
+import inspect
+import sys
+
+
+########################################################################################################################
+# Generic Target class that reads and interprets the data in targets.json
+
+# A simple class that represents all the exceptions associated with hooking
+class HookError(Exception):
+ pass
+
+# A simple decorator used for automatically caching data returned by a function
+caches = {}
+def cached(func):
+ def wrapper(*args, **kwargs):
+ if not caches.has_key(func):
+ caches[func] = func(*args, **kwargs)
+ return caches[func]
+ return wrapper
+
+class Target:
+ # Cumulative attributes can have values appended to them, so they
+ # need to be computed differently than regular attributes
+ __cumulative_attributes = ['extra_labels', 'macros', 'device_has']
+
+ # Utility function: traverse a dictionary and change all the strings in the dictionary to
+ # ASCII from Unicode. Needed because the original mbed target definitions were written in
+ # Python and used only ASCII strings, but the Python JSON decoder always returns Unicode
+ # Based on http://stackoverflow.com/a/13105359
+ @staticmethod
+ def to_ascii(input):
+ if isinstance(input, dict):
+ return dict([(Target.to_ascii(key), Target.to_ascii(value)) for key, value in input.iteritems()])
+ elif isinstance(input, list):
+ return [Target.to_ascii(element) for element in input]
+ elif isinstance(input, unicode):
+ return input.encode('ascii')
+ else:
+ return input
+
+ # Load the description of JSON target data
+ @staticmethod
+ @cached
+ def get_json_target_data():
+ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../hal/targets.json"), "rt") as f:
+ return Target.to_ascii(json.load(f))
+
+ # Get the members of this module using Python's "inspect" module
+ @staticmethod
+ @cached
+ def get_module_data():
+ return dict([(m[0], m[1]) for m in inspect.getmembers(sys.modules[__name__])])
+
+ # Return the order in which target descriptions are searched for attributes
+ # This mimics the Python 2.2 method resolution order, which is what the old targets.py module used
+ # For more details, check http://makina-corpus.com/blog/metier/2014/python-tutorial-understanding-python-mro-class-search-path
+ # The resolution order contains (name, level) tuples, where "name" is the name of the class and "level"
+ # is the level in the inheritance hierarchy (the target itself is at level 0, its first parent at level 1,
+ # its parent's parent at level 1 and so on)
+ def __get_resolution_order(self, target_name, order, level = 0):
+ if not target_name in [l[0] for l in order]: # the resolution order can't contain duplicate target names
+ order.append((target_name, level))
+ parents = self.get_json_target_data()[target_name].get("inherits", [])
+ for p in parents:
+ order = self.__get_resolution_order(p, order, level + 1)
+ return order
+
+ # Modify the exporter specification ("progen") by changing all "template" keys to full paths
+ @staticmethod
+ def __add_paths_to_progen(data):
+ out = {}
+ for key, value in data.items():
+ if isinstance(value, dict):
+ out[key] = Target.__add_paths_to_progen(value)
+ elif key == "template":
+ out[key] = [os.path.join(os.path.dirname(__file__), 'export', v) for v in value]
+ else:
+ out[key] = value
+ return out
+
+ # Comute the value of a given target attribute
+ def __getattr_helper(self, attrname):
+ tdata = self.get_json_target_data()
+ if attrname in self.__cumulative_attributes:
+ # For a cumulative attribute, figure out when it was defined the last time (in attribute
+ # resolution order) then follow the "_add" and "_remove" data fields
+ for idx, t in enumerate(self.resolution_order):
+ if attrname in tdata[t[0]]: # the attribute was defined at this level in the resolution order
+ def_idx = idx
+ break
+ else:
+ raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name))
+ # Get the starting value of the attribute
+ v = (tdata[self.resolution_order[def_idx][0]][attrname] or [])[:]
+ # Traverse the resolution list in high inheritance to low inheritance level, left to right order
+ # to figure out all the other classes that change the definition by adding or removing elements
+ for idx in xrange(self.resolution_order[def_idx][1] - 1, -1, -1):
+ same_level_targets = [t[0] for t in self.resolution_order if t[1] == idx]
+ for t in same_level_targets:
+ data = tdata[t]
+ # Do we have anything to add ?
+ if data.has_key(attrname + "_add"):
+ v.extend(data[attrname + "_add"])
+ # Do we have anything to remove ?
+ if data.has_key(attrname + "_remove"):
+ # Macros can be defined either without a value (MACRO) or with a value (MACRO=10).
+ # When removing, we specify only the name of the macro, without the value. So we need
+ # to create a mapping between the macro name and its value. This will work for
+ # extra_labels and other type of arrays as well, since they fall into the "macros
+ # without a value" category (simple definitions without a value).
+ name_def_map = {}
+ for crtv in v:
+ if crtv.find('=') != -1:
+ temp = crtv.split('=')
+ if len(temp) != 2:
+ raise ValueError("Invalid macro definition '%s'" % crtv)
+ name_def_map[temp[0]] = crtv
+ else:
+ name_def_map[crtv] = crtv
+ for e in data[attrname + "_remove"]:
+ if not e in name_def_map:
+ raise ValueError("Unable to remove '%s' in '%s.%s' since it doesn't exist" % (e, self.name, attrname))
+ v.remove(name_def_map[e])
+ return v
+ # Look for the attribute in the class and its parents, as defined by the resolution order
+ v = None
+ for t in self.resolution_order:
+ data = tdata[t[0]]
+ if data.has_key(attrname):
+ v = data[attrname]
+ break
+ else: # Attribute not found
+ raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name))
+ # 'progen' needs the full path to the template (the path in JSON is relative to workspace_tools/export)
+ return v if attrname != "progen" else self.__add_paths_to_progen(v)
+
+ # Return the value of an attribute
+ # This function only looks for the attribute's value in the cache, the real work of computing the
+ # attribute's value is done in the function above (__getattr_helper)
+ def __getattr__(self, attrname):
+ if not self.attr_cache.has_key(attrname):
+ self.attr_cache[attrname] = self.__getattr_helper(attrname)
+ return self.attr_cache[attrname]
+
+ def __init__(self, name):
+ self.name = name
+
+ # Compute resolution order once (it will be used later in __getattr__)
+ self.resolution_order = self.__get_resolution_order(self.name, [])
+
+ # Attribute cache: once an attribute's value is computed, don't compute it again
+ self.attr_cache = {}
+
+ def program_cycle_s(self):
+ try:
+ return self.__getattr__("program_cycle_s")
+ except AttributeError:
+ return 4 if self.is_disk_virtual else 1.5
+
+ def get_labels(self):
+ return [self.name] + CORE_LABELS[self.core] + self.extra_labels
+
+ # For now, this function only allows "post binary" hooks (hooks that are executed after
+ # the binary image is extracted from the executable file)
+ def init_hooks(self, hook, toolchain_name):
+ # If there's no hook, simply return
+ try:
+ hook_data = self.post_binary_hook
+ except AttributeError:
+ return
+ # A hook was found. The hook's name is in the format "classname.functionname"
+ temp = hook_data["function"].split(".")
+ if len(temp) != 2:
+ raise HookError("Invalid format for hook '%s' in target '%s' (must be 'class_name.function_name')" % (hook_data["function"], self.name))
+ class_name, function_name = temp[0], temp[1]
+ # "class_name" must refer to a class in this file, so check if the class exists
+ mdata = self.get_module_data()
+ if not mdata.has_key(class_name) or not inspect.isclass(mdata[class_name]):
+ raise HookError("Class '%s' required by '%s' in target '%s' not found in targets.py" % (class_name, hook_data["function"], self.name))
+ # "function_name" must refer to a static function inside class "class_name"
+ cls = mdata[class_name]
+ if (not hasattr(cls, function_name)) or (not inspect.isfunction(getattr(cls, function_name))):
+ raise HookError("Static function '%s' required by '%s' in target '%s' not found in class '%s'" % (function_name, hook_data["function"], self.name, class_name))
+ # Check if the hook specification also has target restrictions
+ toolchain_restrictions = hook_data.get("toolchains", [])
+ if toolchain_restrictions and (toolchain_name not in toolchain_restrictions):
+ return
+ # Finally, hook the requested function
+ hook.hook_add_binary("post", getattr(cls, function_name))
+
+########################################################################################################################
+# Target specific code goes in this section
+# This code can be invoked from the target description using the "post_binary_hook" key
+
+class LPCTargetCode:
+ @staticmethod
+ def lpc_patch(t_self, resources, elf, binf):
+ t_self.debug("LPC Patch: %s" % os.path.split(binf)[1])
+ patch(binf)
+
+class LPC4088Code:
+ @staticmethod
+ def binary_hook(t_self, resources, elf, binf):
+ if not os.path.isdir(binf):
+ # Regular binary file, nothing to do
+ LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
+ return
+ outbin = open(binf + ".temp", "wb")
+ partf = open(os.path.join(binf, "ER_IROM1"), "rb")
+ # Pad the fist part (internal flash) with 0xFF to 512k
+ data = partf.read()
+ outbin.write(data)
+ outbin.write('\xFF' * (512*1024 - len(data)))
+ partf.close()
+ # Read and append the second part (external flash) in chunks of fixed size
+ chunksize = 128 * 1024
+ partf = open(os.path.join(binf, "ER_IROM2"), "rb")
+ while True:
+ data = partf.read(chunksize)
+ outbin.write(data)
+ if len(data) < chunksize:
+ break
+ partf.close()
+ outbin.close()
+ # Remove the directory with the binary parts and rename the temporary
+ # file to 'binf'
+ shutil.rmtree(binf, True)
+ os.rename(binf + '.temp', binf)
+ t_self.debug("Generated custom binary file (internal flash + SPIFI)")
+ LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
+
+class TEENSY3_1Code:
+ @staticmethod
+ def binary_hook(t_self, resources, elf, binf):
+ from intelhex import IntelHex
+ binh = IntelHex()
+ binh.loadbin(binf, offset = 0)
+
+ with open(binf.replace(".bin", ".hex"), "w") as f:
+ binh.tofile(f, format='hex')
+
+class MTSCode:
+ @staticmethod
+ def _combine_bins_helper(target_name, t_self, resources, elf, binf):
+ loader = os.path.join(TOOLS_BOOTLOADERS, target_name, "bootloader.bin")
+ target = binf + ".tmp"
+ if not os.path.exists(loader):
+ print "Can't find bootloader binary: " + loader
+ return
+ outbin = open(target, 'w+b')
+ part = open(loader, 'rb')
+ data = part.read()
+ outbin.write(data)
+ outbin.write('\xFF' * (64*1024 - len(data)))
+ part.close()
+ part = open(binf, 'rb')
+ data = part.read()
+ outbin.write(data)
+ part.close()
+ outbin.seek(0, 0)
+ data = outbin.read()
+ outbin.seek(0, 1)
+ crc = struct.pack(' platform_name
+ """
+ result = {}
+ for target in TARGETS:
+ for detect_code in target.detect_code:
+ result[detect_code] = target.name
+ return result
diff --git a/tools/test_api.py b/tools/test_api.py
new file mode 100644
index 0000000..4df448e
--- /dev/null
+++ b/tools/test_api.py
@@ -0,0 +1,1951 @@
+"""
+mbed SDK
+Copyright (c) 2011-2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+"""
+
+import os
+import re
+import sys
+import json
+import uuid
+import pprint
+import random
+import optparse
+import datetime
+import threading
+import ctypes
+from types import ListType
+from colorama import Fore, Back, Style
+from prettytable import PrettyTable
+
+from time import sleep, time
+from Queue import Queue, Empty
+from os.path import join, exists, basename
+from threading import Thread, Lock
+from subprocess import Popen, PIPE
+
+# Imports related to mbed build api
+from tools.tests import TESTS
+from tools.tests import TEST_MAP
+from tools.paths import BUILD_DIR
+from tools.paths import HOST_TESTS
+from tools.utils import ToolException
+from tools.utils import NotSupportedException
+from tools.utils import construct_enum
+from tools.targets import TARGET_MAP
+from tools.test_db import BaseDBAccess
+from tools.build_api import build_project, build_mbed_libs, build_lib
+from tools.build_api import get_target_supported_toolchains
+from tools.build_api import write_build_report
+from tools.build_api import prep_report
+from tools.build_api import prep_properties
+from tools.build_api import create_result
+from tools.build_api import add_result_to_report
+from tools.libraries import LIBRARIES, LIBRARY_MAP
+from tools.toolchains import TOOLCHAIN_BIN_PATH
+from tools.test_exporters import ReportExporter, ResultExporterType
+
+import tools.host_tests.host_tests_plugins as host_tests_plugins
+
+try:
+ import mbed_lstools
+ from tools.compliance.ioper_runner import get_available_oper_test_scopes
+except:
+ pass
+
+
+class ProcessObserver(Thread):
+ def __init__(self, proc):
+ Thread.__init__(self)
+ self.proc = proc
+ self.queue = Queue()
+ self.daemon = True
+ self.active = True
+ self.start()
+
+ def run(self):
+ while self.active:
+ c = self.proc.stdout.read(1)
+ self.queue.put(c)
+
+ def stop(self):
+ self.active = False
+ try:
+ self.proc.terminate()
+ except Exception, _:
+ pass
+
+
+class SingleTestExecutor(threading.Thread):
+ """ Example: Single test class in separate thread usage
+ """
+ def __init__(self, single_test):
+ self.single_test = single_test
+ threading.Thread.__init__(self)
+
+ def run(self):
+ start = time()
+ # Execute tests depending on options and filter applied
+ test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext = self.single_test.execute()
+ elapsed_time = time() - start
+
+ # Human readable summary
+ if not self.single_test.opts_suppress_summary:
+ # prints well-formed summary with results (SQL table like)
+ print self.single_test.generate_test_summary(test_summary, shuffle_seed)
+ if self.single_test.opts_test_x_toolchain_summary:
+ # prints well-formed summary with results (SQL table like)
+ # table shows text x toolchain test result matrix
+ print self.single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
+ print "Completed in %.2f sec"% (elapsed_time)
+
+
+class SingleTestRunner(object):
+ """ Object wrapper for single test run which may involve multiple MUTs
+ """
+ RE_DETECT_TESTCASE_RESULT = None
+
+ # Return codes for test script
+ TEST_RESULT_OK = "OK"
+ TEST_RESULT_FAIL = "FAIL"
+ TEST_RESULT_ERROR = "ERROR"
+ TEST_RESULT_UNDEF = "UNDEF"
+ TEST_RESULT_IOERR_COPY = "IOERR_COPY"
+ TEST_RESULT_IOERR_DISK = "IOERR_DISK"
+ TEST_RESULT_IOERR_SERIAL = "IOERR_SERIAL"
+ TEST_RESULT_TIMEOUT = "TIMEOUT"
+ TEST_RESULT_NO_IMAGE = "NO_IMAGE"
+ TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
+ TEST_RESULT_BUILD_FAILED = "BUILD_FAILED"
+ TEST_RESULT_NOT_SUPPORTED = "NOT_SUPPORTED"
+
+ GLOBAL_LOOPS_COUNT = 1 # How many times each test should be repeated
+ TEST_LOOPS_LIST = [] # We redefine no.of loops per test_id
+ TEST_LOOPS_DICT = {} # TEST_LOOPS_LIST in dict format: { test_id : test_loop_count}
+
+ muts = {} # MUTs descriptor (from external file)
+ test_spec = {} # Test specification (from external file)
+
+ # mbed test suite -> SingleTestRunner
+ TEST_RESULT_MAPPING = {"success" : TEST_RESULT_OK,
+ "failure" : TEST_RESULT_FAIL,
+ "error" : TEST_RESULT_ERROR,
+ "ioerr_copy" : TEST_RESULT_IOERR_COPY,
+ "ioerr_disk" : TEST_RESULT_IOERR_DISK,
+ "ioerr_serial" : TEST_RESULT_IOERR_SERIAL,
+ "timeout" : TEST_RESULT_TIMEOUT,
+ "no_image" : TEST_RESULT_NO_IMAGE,
+ "end" : TEST_RESULT_UNDEF,
+ "mbed_assert" : TEST_RESULT_MBED_ASSERT,
+ "build_failed" : TEST_RESULT_BUILD_FAILED,
+ "not_supproted" : TEST_RESULT_NOT_SUPPORTED
+ }
+
+ def __init__(self,
+ _global_loops_count=1,
+ _test_loops_list=None,
+ _muts={},
+ _clean=False,
+ _opts_db_url=None,
+ _opts_log_file_name=None,
+ _opts_report_html_file_name=None,
+ _opts_report_junit_file_name=None,
+ _opts_report_build_file_name=None,
+ _opts_build_report={},
+ _opts_build_properties={},
+ _test_spec={},
+ _opts_goanna_for_mbed_sdk=None,
+ _opts_goanna_for_tests=None,
+ _opts_shuffle_test_order=False,
+ _opts_shuffle_test_seed=None,
+ _opts_test_by_names=None,
+ _opts_peripheral_by_names=None,
+ _opts_test_only_peripheral=False,
+ _opts_test_only_common=False,
+ _opts_verbose_skipped_tests=False,
+ _opts_verbose_test_result_only=False,
+ _opts_verbose=False,
+ _opts_firmware_global_name=None,
+ _opts_only_build_tests=False,
+ _opts_parallel_test_exec=False,
+ _opts_suppress_summary=False,
+ _opts_test_x_toolchain_summary=False,
+ _opts_copy_method=None,
+ _opts_mut_reset_type=None,
+ _opts_jobs=None,
+ _opts_waterfall_test=None,
+ _opts_consolidate_waterfall_test=None,
+ _opts_extend_test_timeout=None,
+ _opts_auto_detect=None,
+ _opts_include_non_automated=False):
+ """ Let's try hard to init this object
+ """
+ from colorama import init
+ init()
+
+ PATTERN = "\\{(" + "|".join(self.TEST_RESULT_MAPPING.keys()) + ")\\}"
+ self.RE_DETECT_TESTCASE_RESULT = re.compile(PATTERN)
+ # Settings related to test loops counters
+ try:
+ _global_loops_count = int(_global_loops_count)
+ except:
+ _global_loops_count = 1
+ if _global_loops_count < 1:
+ _global_loops_count = 1
+ self.GLOBAL_LOOPS_COUNT = _global_loops_count
+ self.TEST_LOOPS_LIST = _test_loops_list if _test_loops_list else []
+ self.TEST_LOOPS_DICT = self.test_loop_list_to_dict(_test_loops_list)
+
+ self.shuffle_random_seed = 0.0
+ self.SHUFFLE_SEED_ROUND = 10
+
+ # MUT list and test specification storage
+ self.muts = _muts
+ self.test_spec = _test_spec
+
+ # Settings passed e.g. from command line
+ self.opts_db_url = _opts_db_url
+ self.opts_log_file_name = _opts_log_file_name
+ self.opts_report_html_file_name = _opts_report_html_file_name
+ self.opts_report_junit_file_name = _opts_report_junit_file_name
+ self.opts_report_build_file_name = _opts_report_build_file_name
+ self.opts_goanna_for_mbed_sdk = _opts_goanna_for_mbed_sdk
+ self.opts_goanna_for_tests = _opts_goanna_for_tests
+ self.opts_shuffle_test_order = _opts_shuffle_test_order
+ self.opts_shuffle_test_seed = _opts_shuffle_test_seed
+ self.opts_test_by_names = _opts_test_by_names
+ self.opts_peripheral_by_names = _opts_peripheral_by_names
+ self.opts_test_only_peripheral = _opts_test_only_peripheral
+ self.opts_test_only_common = _opts_test_only_common
+ self.opts_verbose_skipped_tests = _opts_verbose_skipped_tests
+ self.opts_verbose_test_result_only = _opts_verbose_test_result_only
+ self.opts_verbose = _opts_verbose
+ self.opts_firmware_global_name = _opts_firmware_global_name
+ self.opts_only_build_tests = _opts_only_build_tests
+ self.opts_parallel_test_exec = _opts_parallel_test_exec
+ self.opts_suppress_summary = _opts_suppress_summary
+ self.opts_test_x_toolchain_summary = _opts_test_x_toolchain_summary
+ self.opts_copy_method = _opts_copy_method
+ self.opts_mut_reset_type = _opts_mut_reset_type
+ self.opts_jobs = _opts_jobs if _opts_jobs is not None else 1
+ self.opts_waterfall_test = _opts_waterfall_test
+ self.opts_consolidate_waterfall_test = _opts_consolidate_waterfall_test
+ self.opts_extend_test_timeout = _opts_extend_test_timeout
+ self.opts_clean = _clean
+ self.opts_auto_detect = _opts_auto_detect
+ self.opts_include_non_automated = _opts_include_non_automated
+
+ self.build_report = _opts_build_report
+ self.build_properties = _opts_build_properties
+
+ # File / screen logger initialization
+ self.logger = CLITestLogger(file_name=self.opts_log_file_name) # Default test logger
+
+ # Database related initializations
+ self.db_logger = factory_db_logger(self.opts_db_url)
+ self.db_logger_build_id = None # Build ID (database index of build_id table)
+ # Let's connect to database to set up credentials and confirm database is ready
+ if self.db_logger:
+ self.db_logger.connect_url(self.opts_db_url) # Save db access info inside db_logger object
+ if self.db_logger.is_connected():
+ # Get hostname and uname so we can use it as build description
+ # when creating new build_id in external database
+ (_hostname, _uname) = self.db_logger.get_hostname()
+ _host_location = os.path.dirname(os.path.abspath(__file__))
+ build_id_type = None if self.opts_only_build_tests is None else self.db_logger.BUILD_ID_TYPE_BUILD_ONLY
+ self.db_logger_build_id = self.db_logger.get_next_build_id(_hostname, desc=_uname, location=_host_location, type=build_id_type)
+ self.db_logger.disconnect()
+
+ def dump_options(self):
+ """ Function returns data structure with common settings passed to SingelTestRunner
+ It can be used for example to fill _extra fields in database storing test suite single run data
+ Example:
+ data = self.dump_options()
+ or
+ data_str = json.dumps(self.dump_options())
+ """
+ result = {"db_url" : str(self.opts_db_url),
+ "log_file_name" : str(self.opts_log_file_name),
+ "shuffle_test_order" : str(self.opts_shuffle_test_order),
+ "shuffle_test_seed" : str(self.opts_shuffle_test_seed),
+ "test_by_names" : str(self.opts_test_by_names),
+ "peripheral_by_names" : str(self.opts_peripheral_by_names),
+ "test_only_peripheral" : str(self.opts_test_only_peripheral),
+ "test_only_common" : str(self.opts_test_only_common),
+ "verbose" : str(self.opts_verbose),
+ "firmware_global_name" : str(self.opts_firmware_global_name),
+ "only_build_tests" : str(self.opts_only_build_tests),
+ "copy_method" : str(self.opts_copy_method),
+ "mut_reset_type" : str(self.opts_mut_reset_type),
+ "jobs" : str(self.opts_jobs),
+ "extend_test_timeout" : str(self.opts_extend_test_timeout),
+ "_dummy" : ''
+ }
+ return result
+
+ def shuffle_random_func(self):
+ return self.shuffle_random_seed
+
+ def is_shuffle_seed_float(self):
+ """ return true if function parameter can be converted to float
+ """
+ result = True
+ try:
+ float(self.shuffle_random_seed)
+ except ValueError:
+ result = False
+ return result
+
+ # This will store target / toolchain specific properties
+ test_suite_properties_ext = {} # target : toolchain
+ # Here we store test results
+ test_summary = []
+ # Here we store test results in extended data structure
+ test_summary_ext = {}
+ execute_thread_slice_lock = Lock()
+
+ def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report, build_properties):
+ for toolchain in toolchains:
+ tt_id = "%s::%s" % (toolchain, target)
+
+ T = TARGET_MAP[target]
+
+ # print target, toolchain
+ # Test suite properties returned to external tools like CI
+ test_suite_properties = {
+ 'jobs': self.opts_jobs,
+ 'clean': clean,
+ 'target': target,
+ 'vendor': T.extra_labels[0],
+ 'test_ids': ', '.join(test_ids),
+ 'toolchain': toolchain,
+ 'shuffle_random_seed': self.shuffle_random_seed
+ }
+
+
+ # print '=== %s::%s ===' % (target, toolchain)
+ # Let's build our test
+ if target not in TARGET_MAP:
+ print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Target platform not found'% (target))
+ continue
+
+ build_mbed_libs_options = ["analyze"] if self.opts_goanna_for_mbed_sdk else None
+ clean_mbed_libs_options = True if self.opts_goanna_for_mbed_sdk or clean or self.opts_clean else None
+
+
+ try:
+ build_mbed_libs_result = build_mbed_libs(T,
+ toolchain,
+ options=build_mbed_libs_options,
+ clean=clean_mbed_libs_options,
+ verbose=self.opts_verbose,
+ jobs=self.opts_jobs,
+ report=build_report,
+ properties=build_properties)
+
+ if not build_mbed_libs_result:
+ print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Toolchain %s is not yet supported for this target'% (T.name, toolchain))
+ continue
+
+ except ToolException:
+ print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
+ continue
+
+ build_dir = join(BUILD_DIR, "test", target, toolchain)
+
+ test_suite_properties['build_mbed_libs_result'] = build_mbed_libs_result
+ test_suite_properties['build_dir'] = build_dir
+ test_suite_properties['skipped'] = []
+
+ # Enumerate through all tests and shuffle test order if requested
+ test_map_keys = sorted(TEST_MAP.keys())
+
+ if self.opts_shuffle_test_order:
+ random.shuffle(test_map_keys, self.shuffle_random_func)
+ # Update database with shuffle seed f applicable
+ if self.db_logger:
+ self.db_logger.reconnect();
+ if self.db_logger.is_connected():
+ self.db_logger.update_build_id_info(self.db_logger_build_id, _shuffle_seed=self.shuffle_random_func())
+ self.db_logger.disconnect();
+
+ if self.db_logger:
+ self.db_logger.reconnect();
+ if self.db_logger.is_connected():
+ # Update MUTs and Test Specification in database
+ self.db_logger.update_build_id_info(self.db_logger_build_id, _muts=self.muts, _test_spec=self.test_spec)
+ # Update Extra information in database (some options passed to test suite)
+ self.db_logger.update_build_id_info(self.db_logger_build_id, _extra=json.dumps(self.dump_options()))
+ self.db_logger.disconnect();
+
+ valid_test_map_keys = self.get_valid_tests(test_map_keys, target, toolchain, test_ids, self.opts_include_non_automated)
+ skipped_test_map_keys = self.get_skipped_tests(test_map_keys, valid_test_map_keys)
+
+ for skipped_test_id in skipped_test_map_keys:
+ test_suite_properties['skipped'].append(skipped_test_id)
+
+
+ # First pass through all tests and determine which libraries need to be built
+ libraries = []
+ for test_id in valid_test_map_keys:
+ test = TEST_MAP[test_id]
+
+ # Detect which lib should be added to test
+ # Some libs have to compiled like RTOS or ETH
+ for lib in LIBRARIES:
+ if lib['build_dir'] in test.dependencies and lib['id'] not in libraries:
+ libraries.append(lib['id'])
+
+
+ build_project_options = ["analyze"] if self.opts_goanna_for_tests else None
+ clean_project_options = True if self.opts_goanna_for_tests or clean or self.opts_clean else None
+
+ # Build all required libraries
+ for lib_id in libraries:
+ try:
+ build_lib(lib_id,
+ T,
+ toolchain,
+ options=build_project_options,
+ verbose=self.opts_verbose,
+ clean=clean_mbed_libs_options,
+ jobs=self.opts_jobs,
+ report=build_report,
+ properties=build_properties)
+
+ except ToolException:
+ print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
+ continue
+
+
+ for test_id in valid_test_map_keys:
+ test = TEST_MAP[test_id]
+
+ test_suite_properties['test.libs.%s.%s.%s'% (target, toolchain, test_id)] = ', '.join(libraries)
+
+ # TODO: move this 2 below loops to separate function
+ INC_DIRS = []
+ for lib_id in libraries:
+ if 'inc_dirs_ext' in LIBRARY_MAP[lib_id] and LIBRARY_MAP[lib_id]['inc_dirs_ext']:
+ INC_DIRS.extend(LIBRARY_MAP[lib_id]['inc_dirs_ext'])
+
+ MACROS = []
+ for lib_id in libraries:
+ if 'macros' in LIBRARY_MAP[lib_id] and LIBRARY_MAP[lib_id]['macros']:
+ MACROS.extend(LIBRARY_MAP[lib_id]['macros'])
+ MACROS.append('TEST_SUITE_TARGET_NAME="%s"'% target)
+ MACROS.append('TEST_SUITE_TEST_ID="%s"'% test_id)
+ test_uuid = uuid.uuid4()
+ MACROS.append('TEST_SUITE_UUID="%s"'% str(test_uuid))
+
+ # Prepare extended test results data structure (it can be used to generate detailed test report)
+ if target not in self.test_summary_ext:
+ self.test_summary_ext[target] = {} # test_summary_ext : toolchain
+ if toolchain not in self.test_summary_ext[target]:
+ self.test_summary_ext[target][toolchain] = {} # test_summary_ext : toolchain : target
+
+ tt_test_id = "%s::%s::%s" % (toolchain, target, test_id) # For logging only
+
+ project_name = self.opts_firmware_global_name if self.opts_firmware_global_name else None
+ try:
+ path = build_project(test.source_dir,
+ join(build_dir, test_id),
+ T,
+ toolchain,
+ test.dependencies,
+ options=build_project_options,
+ clean=clean_project_options,
+ verbose=self.opts_verbose,
+ name=project_name,
+ macros=MACROS,
+ inc_dirs=INC_DIRS,
+ jobs=self.opts_jobs,
+ report=build_report,
+ properties=build_properties,
+ project_id=test_id,
+ project_description=test.get_description())
+
+ except Exception, e:
+ project_name_str = project_name if project_name is not None else test_id
+
+
+ test_result = self.TEST_RESULT_FAIL
+
+ if isinstance(e, ToolException):
+ print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
+ test_result = self.TEST_RESULT_BUILD_FAILED
+ elif isinstance(e, NotSupportedException):
+ print self.logger.log_line(self.logger.LogType.INFO, 'The project %s is not supported'% (project_name_str))
+ test_result = self.TEST_RESULT_NOT_SUPPORTED
+
+
+ # Append test results to global test summary
+ self.test_summary.append(
+ (test_result, target, toolchain, test_id, test.get_description(), 0, 0, '-')
+ )
+
+ # Add detailed test result to test summary structure
+ if test_id not in self.test_summary_ext[target][toolchain]:
+ self.test_summary_ext[target][toolchain][test_id] = []
+
+ self.test_summary_ext[target][toolchain][test_id].append({ 0: {
+ 'result' : test_result,
+ 'output' : '',
+ 'target_name' : target,
+ 'target_name_unique': target,
+ 'toolchain_name' : toolchain,
+ 'id' : test_id,
+ 'description' : test.get_description(),
+ 'elapsed_time' : 0,
+ 'duration' : 0,
+ 'copy_method' : None
+ }})
+ continue
+
+ if self.opts_only_build_tests:
+ # With this option we are skipping testing phase
+ continue
+
+ # Test duration can be increased by global value
+ test_duration = test.duration
+ if self.opts_extend_test_timeout is not None:
+ test_duration += self.opts_extend_test_timeout
+
+ # For an automated test the duration act as a timeout after
+ # which the test gets interrupted
+ test_spec = self.shape_test_request(target, path, test_id, test_duration)
+ test_loops = self.get_test_loop_count(test_id)
+
+ test_suite_properties['test.duration.%s.%s.%s'% (target, toolchain, test_id)] = test_duration
+ test_suite_properties['test.loops.%s.%s.%s'% (target, toolchain, test_id)] = test_loops
+ test_suite_properties['test.path.%s.%s.%s'% (target, toolchain, test_id)] = path
+
+ # read MUTs, test specification and perform tests
+ handle_results = self.handle(test_spec, target, toolchain, test_loops=test_loops)
+
+ if handle_results is None:
+ continue
+
+ for handle_result in handle_results:
+ if handle_result:
+ single_test_result, detailed_test_results = handle_result
+ else:
+ continue
+
+ # Append test results to global test summary
+ if single_test_result is not None:
+ self.test_summary.append(single_test_result)
+
+ # Add detailed test result to test summary structure
+ if target not in self.test_summary_ext[target][toolchain]:
+ if test_id not in self.test_summary_ext[target][toolchain]:
+ self.test_summary_ext[target][toolchain][test_id] = []
+
+ append_test_result = detailed_test_results
+
+ # If waterfall and consolidate-waterfall options are enabled,
+ # only include the last test result in the report.
+ if self.opts_waterfall_test and self.opts_consolidate_waterfall_test:
+ append_test_result = {0: detailed_test_results[len(detailed_test_results) - 1]}
+
+ self.test_summary_ext[target][toolchain][test_id].append(append_test_result)
+
+ test_suite_properties['skipped'] = ', '.join(test_suite_properties['skipped'])
+ self.test_suite_properties_ext[target][toolchain] = test_suite_properties
+
+ q.put(target + '_'.join(toolchains))
+ return
+
+ def execute(self):
+ clean = self.test_spec.get('clean', False)
+ test_ids = self.test_spec.get('test_ids', [])
+ q = Queue()
+
+ # Generate seed for shuffle if seed is not provided in
+ self.shuffle_random_seed = round(random.random(), self.SHUFFLE_SEED_ROUND)
+ if self.opts_shuffle_test_seed is not None and self.is_shuffle_seed_float():
+ self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
+
+
+ if self.opts_parallel_test_exec:
+ ###################################################################
+ # Experimental, parallel test execution per singletest instance.
+ ###################################################################
+ execute_threads = [] # Threads used to build mbed SDL, libs, test cases and execute tests
+ # Note: We are building here in parallel for each target separately!
+ # So we are not building the same thing multiple times and compilers
+ # in separate threads do not collide.
+ # Inside execute_thread_slice() function function handle() will be called to
+ # get information about available MUTs (per target).
+ for target, toolchains in self.test_spec['targets'].iteritems():
+ self.test_suite_properties_ext[target] = {}
+ t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids, self.build_report, self.build_properties))
+ t.daemon = True
+ t.start()
+ execute_threads.append(t)
+
+ for t in execute_threads:
+ q.get() # t.join() would block some threads because we should not wait in any order for thread end
+ else:
+ # Serialized (not parallel) test execution
+ for target, toolchains in self.test_spec['targets'].iteritems():
+ if target not in self.test_suite_properties_ext:
+ self.test_suite_properties_ext[target] = {}
+
+ self.execute_thread_slice(q, target, toolchains, clean, test_ids, self.build_report, self.build_properties)
+ q.get()
+
+ if self.db_logger:
+ self.db_logger.reconnect();
+ if self.db_logger.is_connected():
+ self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
+ self.db_logger.disconnect();
+
+ return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, self.build_report, self.build_properties
+
+ def get_valid_tests(self, test_map_keys, target, toolchain, test_ids, include_non_automated):
+ valid_test_map_keys = []
+
+ for test_id in test_map_keys:
+ test = TEST_MAP[test_id]
+ if self.opts_test_by_names and test_id not in self.opts_test_by_names.split(','):
+ continue
+
+ if test_ids and test_id not in test_ids:
+ continue
+
+ if self.opts_test_only_peripheral and not test.peripherals:
+ if self.opts_verbose_skipped_tests:
+ print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
+ continue
+
+ if self.opts_peripheral_by_names and test.peripherals and not len([i for i in test.peripherals if i in self.opts_peripheral_by_names.split(',')]):
+ # We will skip tests not forced with -p option
+ if self.opts_verbose_skipped_tests:
+ print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
+ continue
+
+ if self.opts_test_only_common and test.peripherals:
+ if self.opts_verbose_skipped_tests:
+ print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral test skipped for target %s'% (target))
+ continue
+
+ if not include_non_automated and not test.automated:
+ if self.opts_verbose_skipped_tests:
+ print self.logger.log_line(self.logger.LogType.INFO, 'Non automated test skipped for target %s'% (target))
+ continue
+
+ if test.is_supported(target, toolchain):
+ if test.peripherals is None and self.opts_only_build_tests:
+ # When users are using 'build only flag' and test do not have
+ # specified peripherals we can allow test building by default
+ pass
+ elif self.opts_peripheral_by_names and test_id not in self.opts_peripheral_by_names.split(','):
+ # If we force peripheral with option -p we expect test
+ # to pass even if peripheral is not in MUTs file.
+ pass
+ elif not self.is_peripherals_available(target, test.peripherals):
+ if self.opts_verbose_skipped_tests:
+ if test.peripherals:
+ print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral %s test skipped for target %s'% (",".join(test.peripherals), target))
+ else:
+ print self.logger.log_line(self.logger.LogType.INFO, 'Test %s skipped for target %s'% (test_id, target))
+ continue
+
+ # The test has made it through all the filters, so add it to the valid tests list
+ valid_test_map_keys.append(test_id)
+
+ return valid_test_map_keys
+
+ def get_skipped_tests(self, all_test_map_keys, valid_test_map_keys):
+ # NOTE: This will not preserve order
+ return list(set(all_test_map_keys) - set(valid_test_map_keys))
+
+ def generate_test_summary_by_target(self, test_summary, shuffle_seed=None):
+ """ Prints well-formed summary with results (SQL table like)
+ table shows text x toolchain test result matrix
+ """
+ RESULT_INDEX = 0
+ TARGET_INDEX = 1
+ TOOLCHAIN_INDEX = 2
+ TEST_INDEX = 3
+ DESC_INDEX = 4
+
+ unique_targets = get_unique_value_from_summary(test_summary, TARGET_INDEX)
+ unique_tests = get_unique_value_from_summary(test_summary, TEST_INDEX)
+ unique_test_desc = get_unique_value_from_summary_ext(test_summary, TEST_INDEX, DESC_INDEX)
+ unique_toolchains = get_unique_value_from_summary(test_summary, TOOLCHAIN_INDEX)
+
+ result = "Test summary:\n"
+ for target in unique_targets:
+ result_dict = {} # test : { toolchain : result }
+ unique_target_toolchains = []
+ for test in test_summary:
+ if test[TARGET_INDEX] == target:
+ if test[TOOLCHAIN_INDEX] not in unique_target_toolchains:
+ unique_target_toolchains.append(test[TOOLCHAIN_INDEX])
+ if test[TEST_INDEX] not in result_dict:
+ result_dict[test[TEST_INDEX]] = {}
+ result_dict[test[TEST_INDEX]][test[TOOLCHAIN_INDEX]] = test[RESULT_INDEX]
+
+ pt_cols = ["Target", "Test ID", "Test Description"] + unique_target_toolchains
+ pt = PrettyTable(pt_cols)
+ for col in pt_cols:
+ pt.align[col] = "l"
+ pt.padding_width = 1 # One space between column edges and contents (default)
+
+ for test in unique_tests:
+ if test in result_dict:
+ test_results = result_dict[test]
+ if test in unique_test_desc:
+ row = [target, test, unique_test_desc[test]]
+ for toolchain in unique_toolchains:
+ if toolchain in test_results:
+ row.append(test_results[toolchain])
+ pt.add_row(row)
+ result += pt.get_string()
+ shuffle_seed_text = "Shuffle Seed: %.*f"% (self.SHUFFLE_SEED_ROUND,
+ shuffle_seed if shuffle_seed else self.shuffle_random_seed)
+ result += "\n%s"% (shuffle_seed_text if self.opts_shuffle_test_order else '')
+ return result
+
+ def generate_test_summary(self, test_summary, shuffle_seed=None):
+ """ Prints well-formed summary with results (SQL table like)
+ table shows target x test results matrix across
+ """
+ success_code = 0 # Success code that can be leter returned to
+ result = "Test summary:\n"
+ # Pretty table package is used to print results
+ pt = PrettyTable(["Result", "Target", "Toolchain", "Test ID", "Test Description",
+ "Elapsed Time (sec)", "Timeout (sec)", "Loops"])
+ pt.align["Result"] = "l" # Left align
+ pt.align["Target"] = "l" # Left align
+ pt.align["Toolchain"] = "l" # Left align
+ pt.align["Test ID"] = "l" # Left align
+ pt.align["Test Description"] = "l" # Left align
+ pt.padding_width = 1 # One space between column edges and contents (default)
+
+ result_dict = {self.TEST_RESULT_OK : 0,
+ self.TEST_RESULT_FAIL : 0,
+ self.TEST_RESULT_ERROR : 0,
+ self.TEST_RESULT_UNDEF : 0,
+ self.TEST_RESULT_IOERR_COPY : 0,
+ self.TEST_RESULT_IOERR_DISK : 0,
+ self.TEST_RESULT_IOERR_SERIAL : 0,
+ self.TEST_RESULT_NO_IMAGE : 0,
+ self.TEST_RESULT_TIMEOUT : 0,
+ self.TEST_RESULT_MBED_ASSERT : 0,
+ self.TEST_RESULT_BUILD_FAILED : 0,
+ self.TEST_RESULT_NOT_SUPPORTED : 0
+ }
+
+ for test in test_summary:
+ if test[0] in result_dict:
+ result_dict[test[0]] += 1
+ pt.add_row(test)
+ result += pt.get_string()
+ result += "\n"
+
+ # Print result count
+ result += "Result: " + ' / '.join(['%s %s' % (value, key) for (key, value) in {k: v for k, v in result_dict.items() if v != 0}.iteritems()])
+ shuffle_seed_text = "Shuffle Seed: %.*f\n"% (self.SHUFFLE_SEED_ROUND,
+ shuffle_seed if shuffle_seed else self.shuffle_random_seed)
+ result += "\n%s"% (shuffle_seed_text if self.opts_shuffle_test_order else '')
+ return result
+
+ def test_loop_list_to_dict(self, test_loops_str):
+ """ Transforms test_id=X,test_id=X,test_id=X into dictionary {test_id : test_id_loops_count}
+ """
+ result = {}
+ if test_loops_str:
+ test_loops = test_loops_str.split(',')
+ for test_loop in test_loops:
+ test_loop_count = test_loop.split('=')
+ if len(test_loop_count) == 2:
+ _test_id, _test_loops = test_loop_count
+ try:
+ _test_loops = int(_test_loops)
+ except:
+ continue
+ result[_test_id] = _test_loops
+ return result
+
+ def get_test_loop_count(self, test_id):
+ """ This function returns no. of loops per test (deducted by test_id_.
+ If test is not in list of redefined loop counts it will use default value.
+ """
+ result = self.GLOBAL_LOOPS_COUNT
+ if test_id in self.TEST_LOOPS_DICT:
+ result = self.TEST_LOOPS_DICT[test_id]
+ return result
+
+ def delete_file(self, file_path):
+ """ Remove file from the system
+ """
+ result = True
+ resutl_msg = ""
+ try:
+ os.remove(file_path)
+ except Exception, e:
+ resutl_msg = e
+ result = False
+ return result, resutl_msg
+
+ def handle_mut(self, mut, data, target_name, toolchain_name, test_loops=1):
+ """ Test is being invoked for given MUT.
+ """
+ # Get test information, image and test timeout
+ test_id = data['test_id']
+ test = TEST_MAP[test_id]
+ test_description = TEST_MAP[test_id].get_description()
+ image = data["image"]
+ duration = data.get("duration", 10)
+
+ if mut is None:
+ print "Error: No Mbed available: MUT[%s]" % data['mcu']
+ return None
+
+ mcu = mut['mcu']
+ copy_method = mut.get('copy_method') # Available board configuration selection e.g. core selection etc.
+
+ if self.db_logger:
+ self.db_logger.reconnect()
+
+ selected_copy_method = self.opts_copy_method if copy_method is None else copy_method
+
+ # Tests can be looped so test results must be stored for the same test
+ test_all_result = []
+ # Test results for one test ran few times
+ detailed_test_results = {} # { Loop_number: { results ... } }
+
+ for test_index in range(test_loops):
+
+ # If mbedls is available and we are auto detecting MUT info,
+ # update MUT info (mounting may changed)
+ if get_module_avail('mbed_lstools') and self.opts_auto_detect:
+ platform_name_filter = [mcu]
+ muts_list = {}
+ found = False
+
+ for i in range(0, 60):
+ print('Looking for %s with MBEDLS' % mcu)
+ muts_list = get_autodetected_MUTS_list(platform_name_filter=platform_name_filter)
+
+ if 1 not in muts_list:
+ sleep(3)
+ else:
+ found = True
+ break
+
+ if not found:
+ print "Error: mbed not found with MBEDLS: %s" % data['mcu']
+ return None
+ else:
+ mut = muts_list[1]
+
+ disk = mut.get('disk')
+ port = mut.get('port')
+
+ if disk is None or port is None:
+ return None
+
+ target_by_mcu = TARGET_MAP[mut['mcu']]
+ target_name_unique = mut['mcu_unique'] if 'mcu_unique' in mut else mut['mcu']
+ # Some extra stuff can be declared in MUTs structure
+ reset_type = mut.get('reset_type') # reboot.txt, reset.txt, shutdown.txt
+ reset_tout = mut.get('reset_tout') # COPY_IMAGE -> RESET_PROC -> SLEEP(RESET_TOUT)
+
+ # When the build and test system were separate, this was relative to a
+ # base network folder base path: join(NETWORK_BASE_PATH, )
+ image_path = image
+
+ # Host test execution
+ start_host_exec_time = time()
+
+ single_test_result = self.TEST_RESULT_UNDEF # single test run result
+ _copy_method = selected_copy_method
+
+ if not exists(image_path):
+ single_test_result = self.TEST_RESULT_NO_IMAGE
+ elapsed_time = 0
+ single_test_output = self.logger.log_line(self.logger.LogType.ERROR, 'Image file does not exist: %s'% image_path)
+ print single_test_output
+ else:
+ # Host test execution
+ start_host_exec_time = time()
+
+ host_test_verbose = self.opts_verbose_test_result_only or self.opts_verbose
+ host_test_reset = self.opts_mut_reset_type if reset_type is None else reset_type
+ host_test_result = self.run_host_test(test.host_test,
+ image_path, disk, port, duration,
+ micro=target_name,
+ verbose=host_test_verbose,
+ reset=host_test_reset,
+ reset_tout=reset_tout,
+ copy_method=selected_copy_method,
+ program_cycle_s=target_by_mcu.program_cycle_s())
+ single_test_result, single_test_output, single_testduration, single_timeout = host_test_result
+
+ # Store test result
+ test_all_result.append(single_test_result)
+ total_elapsed_time = time() - start_host_exec_time # Test time with copy (flashing) / reset
+ elapsed_time = single_testduration # TIme of single test case execution after reset
+
+ detailed_test_results[test_index] = {
+ 'result' : single_test_result,
+ 'output' : single_test_output,
+ 'target_name' : target_name,
+ 'target_name_unique' : target_name_unique,
+ 'toolchain_name' : toolchain_name,
+ 'id' : test_id,
+ 'description' : test_description,
+ 'elapsed_time' : round(elapsed_time, 2),
+ 'duration' : single_timeout,
+ 'copy_method' : _copy_method,
+ }
+
+ print self.print_test_result(single_test_result, target_name_unique, toolchain_name,
+ test_id, test_description, elapsed_time, single_timeout)
+
+ # Update database entries for ongoing test
+ if self.db_logger and self.db_logger.is_connected():
+ test_type = 'SingleTest'
+ self.db_logger.insert_test_entry(self.db_logger_build_id,
+ target_name,
+ toolchain_name,
+ test_type,
+ test_id,
+ single_test_result,
+ single_test_output,
+ elapsed_time,
+ single_timeout,
+ test_index)
+
+ # If we perform waterfall test we test until we get OK and we stop testing
+ if self.opts_waterfall_test and single_test_result == self.TEST_RESULT_OK:
+ break
+
+ if self.db_logger:
+ self.db_logger.disconnect()
+
+ return (self.shape_global_test_loop_result(test_all_result, self.opts_waterfall_test and self.opts_consolidate_waterfall_test),
+ target_name_unique,
+ toolchain_name,
+ test_id,
+ test_description,
+ round(elapsed_time, 2),
+ single_timeout,
+ self.shape_test_loop_ok_result_count(test_all_result)), detailed_test_results
+
+ def handle(self, test_spec, target_name, toolchain_name, test_loops=1):
+ """ Function determines MUT's mbed disk/port and copies binary to
+ target.
+ """
+ handle_results = []
+ data = json.loads(test_spec)
+
+ # Find a suitable MUT:
+ mut = None
+ for id, m in self.muts.iteritems():
+ if m['mcu'] == data['mcu']:
+ mut = m
+ handle_result = self.handle_mut(mut, data, target_name, toolchain_name, test_loops=test_loops)
+ handle_results.append(handle_result)
+
+ return handle_results
+
+ def print_test_result(self, test_result, target_name, toolchain_name,
+ test_id, test_description, elapsed_time, duration):
+ """ Use specific convention to print test result and related data
+ """
+ tokens = []
+ tokens.append("TargetTest")
+ tokens.append(target_name)
+ tokens.append(toolchain_name)
+ tokens.append(test_id)
+ tokens.append(test_description)
+ separator = "::"
+ time_info = " in %.2f of %d sec" % (round(elapsed_time, 2), duration)
+ result = separator.join(tokens) + " [" + test_result +"]" + time_info
+ return Fore.MAGENTA + result + Fore.RESET
+
+ def shape_test_loop_ok_result_count(self, test_all_result):
+ """ Reformats list of results to simple string
+ """
+ test_loop_count = len(test_all_result)
+ test_loop_ok_result = test_all_result.count(self.TEST_RESULT_OK)
+ return "%d/%d"% (test_loop_ok_result, test_loop_count)
+
+ def shape_global_test_loop_result(self, test_all_result, waterfall_and_consolidate):
+ """ Reformats list of results to simple string
+ """
+ result = self.TEST_RESULT_FAIL
+
+ if all(test_all_result[0] == res for res in test_all_result):
+ result = test_all_result[0]
+ elif waterfall_and_consolidate and any(res == self.TEST_RESULT_OK for res in test_all_result):
+ result = self.TEST_RESULT_OK
+
+ return result
+
+ def run_host_test(self, name, image_path, disk, port, duration,
+ micro=None, reset=None, reset_tout=None,
+ verbose=False, copy_method=None, program_cycle_s=None):
+ """ Function creates new process with host test configured with particular test case.
+ Function also is pooling for serial port activity from process to catch all data
+ printed by test runner and host test during test execution
+ """
+
+ def get_char_from_queue(obs):
+ """ Get character from queue safe way
+ """
+ try:
+ c = obs.queue.get(block=True, timeout=0.5)
+ except Empty, _:
+ c = None
+ return c
+
+ def filter_queue_char(c):
+ """ Filters out non ASCII characters from serial port
+ """
+ if ord(c) not in range(128):
+ c = ' '
+ return c
+
+ def get_test_result(output):
+ """ Parse test 'output' data
+ """
+ result = self.TEST_RESULT_TIMEOUT
+ for line in "".join(output).splitlines():
+ search_result = self.RE_DETECT_TESTCASE_RESULT.search(line)
+ if search_result and len(search_result.groups()):
+ result = self.TEST_RESULT_MAPPING[search_result.groups(0)[0]]
+ break
+ return result
+
+ def get_auto_property_value(property_name, line):
+ """ Scans auto detection line from MUT and returns scanned parameter 'property_name'
+ Returns string
+ """
+ result = None
+ if re.search("HOST: Property '%s'"% property_name, line) is not None:
+ property = re.search("HOST: Property '%s' = '([\w\d _]+)'"% property_name, line)
+ if property is not None and len(property.groups()) == 1:
+ result = property.groups()[0]
+ return result
+
+ # print "{%s} port:%s disk:%s" % (name, port, disk),
+ cmd = ["python",
+ '%s.py'% name,
+ '-d', disk,
+ '-f', '"%s"'% image_path,
+ '-p', port,
+ '-t', str(duration),
+ '-C', str(program_cycle_s)]
+
+ if get_module_avail('mbed_lstools') and self.opts_auto_detect:
+ cmd += ['--auto']
+
+ # Add extra parameters to host_test
+ if copy_method is not None:
+ cmd += ["-c", copy_method]
+ if micro is not None:
+ cmd += ["-m", micro]
+ if reset is not None:
+ cmd += ["-r", reset]
+ if reset_tout is not None:
+ cmd += ["-R", str(reset_tout)]
+
+ if verbose:
+ print Fore.MAGENTA + "Executing '" + " ".join(cmd) + "'" + Fore.RESET
+ print "Test::Output::Start"
+
+ proc = Popen(cmd, stdout=PIPE, cwd=HOST_TESTS)
+ obs = ProcessObserver(proc)
+ update_once_flag = {} # Stores flags checking if some auto-parameter was already set
+ line = ''
+ output = []
+ start_time = time()
+ while (time() - start_time) < (2 * duration):
+ c = get_char_from_queue(obs)
+ if c:
+ if verbose:
+ sys.stdout.write(c)
+ c = filter_queue_char(c)
+ output.append(c)
+ # Give the mbed under test a way to communicate the end of the test
+ if c in ['\n', '\r']:
+
+ # Checking for auto-detection information from the test about MUT reset moment
+ if 'reset_target' not in update_once_flag and "HOST: Reset target..." in line:
+ # We will update this marker only once to prevent multiple time resets
+ update_once_flag['reset_target'] = True
+ start_time = time()
+
+ # Checking for auto-detection information from the test about timeout
+ auto_timeout_val = get_auto_property_value('timeout', line)
+ if 'timeout' not in update_once_flag and auto_timeout_val is not None:
+ # We will update this marker only once to prevent multiple time resets
+ update_once_flag['timeout'] = True
+ duration = int(auto_timeout_val)
+
+ # Detect mbed assert:
+ if 'mbed assertation failed: ' in line:
+ output.append('{{mbed_assert}}')
+ break
+
+ # Check for test end
+ if '{end}' in line:
+ break
+ line = ''
+ else:
+ line += c
+ end_time = time()
+ testcase_duration = end_time - start_time # Test case duration from reset to {end}
+
+ c = get_char_from_queue(obs)
+
+ if c:
+ if verbose:
+ sys.stdout.write(c)
+ c = filter_queue_char(c)
+ output.append(c)
+
+ if verbose:
+ print "Test::Output::Finish"
+ # Stop test process
+ obs.stop()
+
+ result = get_test_result(output)
+ return (result, "".join(output), testcase_duration, duration)
+
+ def is_peripherals_available(self, target_mcu_name, peripherals=None):
+ """ Checks if specified target should run specific peripheral test case defined in MUTs file
+ """
+ if peripherals is not None:
+ peripherals = set(peripherals)
+ for id, mut in self.muts.iteritems():
+ # Target MCU name check
+ if mut["mcu"] != target_mcu_name:
+ continue
+ # Peripherals check
+ if peripherals is not None:
+ if 'peripherals' not in mut:
+ continue
+ if not peripherals.issubset(set(mut['peripherals'])):
+ continue
+ return True
+ return False
+
+ def shape_test_request(self, mcu, image_path, test_id, duration=10):
+ """ Function prepares JSON structure describing test specification
+ """
+ test_spec = {
+ "mcu": mcu,
+ "image": image_path,
+ "duration": duration,
+ "test_id": test_id,
+ }
+ return json.dumps(test_spec)
+
+
+def get_unique_value_from_summary(test_summary, index):
+ """ Gets list of unique target names
+ """
+ result = []
+ for test in test_summary:
+ target_name = test[index]
+ if target_name not in result:
+ result.append(target_name)
+ return sorted(result)
+
+
+def get_unique_value_from_summary_ext(test_summary, index_key, index_val):
+ """ Gets list of unique target names and return dictionary
+ """
+ result = {}
+ for test in test_summary:
+ key = test[index_key]
+ val = test[index_val]
+ if key not in result:
+ result[key] = val
+ return result
+
+
+def show_json_file_format_error(json_spec_filename, line, column):
+ """ Prints JSON broken content
+ """
+ with open(json_spec_filename) as data_file:
+ line_no = 1
+ for json_line in data_file:
+ if line_no + 5 >= line: # Print last few lines before error
+ print 'Line %d:\t'%line_no + json_line, # Prints line
+ if line_no == line:
+ print ' ' * len('Line %d:'%line_no) + '\t', '-' * (column-1) + '^'
+ break
+ line_no += 1
+
+
+def json_format_error_defect_pos(json_error_msg):
+ """ Gets first error line and column in JSON file format.
+ Parsed from exception thrown by json.loads() string
+ """
+ result = None
+ line, column = 0, 0
+ # Line value search
+ line_search = re.search('line [0-9]+', json_error_msg)
+ if line_search is not None:
+ ls = line_search.group().split(' ')
+ if len(ls) == 2:
+ line = int(ls[1])
+ # Column position search
+ column_search = re.search('column [0-9]+', json_error_msg)
+ if column_search is not None:
+ cs = column_search.group().split(' ')
+ if len(cs) == 2:
+ column = int(cs[1])
+ result = [line, column]
+ return result
+
+
+def get_json_data_from_file(json_spec_filename, verbose=False):
+ """ Loads from file JSON formatted string to data structure
+ """
+ result = None
+ try:
+ with open(json_spec_filename) as data_file:
+ try:
+ result = json.load(data_file)
+ except ValueError as json_error_msg:
+ result = None
+ print 'JSON file %s parsing failed. Reason: %s' % (json_spec_filename, json_error_msg)
+ # We can print where error occurred inside JSON file if we can parse exception msg
+ json_format_defect_pos = json_format_error_defect_pos(str(json_error_msg))
+ if json_format_defect_pos is not None:
+ line = json_format_defect_pos[0]
+ column = json_format_defect_pos[1]
+ print
+ show_json_file_format_error(json_spec_filename, line, column)
+
+ except IOError as fileopen_error_msg:
+ print 'JSON file %s not opened. Reason: %s'% (json_spec_filename, fileopen_error_msg)
+ print
+ if verbose and result:
+ pp = pprint.PrettyPrinter(indent=4)
+ pp.pprint(result)
+ return result
+
+
+def print_muts_configuration_from_json(json_data, join_delim=", ", platform_filter=None):
+ """ Prints MUTs configuration passed to test script for verboseness
+ """
+ muts_info_cols = []
+ # We need to check all unique properties for each defined MUT
+ for k in json_data:
+ mut_info = json_data[k]
+ for mut_property in mut_info:
+ if mut_property not in muts_info_cols:
+ muts_info_cols.append(mut_property)
+
+ # Prepare pretty table object to display all MUTs
+ pt_cols = ["index"] + muts_info_cols
+ pt = PrettyTable(pt_cols)
+ for col in pt_cols:
+ pt.align[col] = "l"
+
+ # Add rows to pretty print object
+ for k in json_data:
+ row = [k]
+ mut_info = json_data[k]
+
+ add_row = True
+ if platform_filter and 'mcu' in mut_info:
+ add_row = re.search(platform_filter, mut_info['mcu']) is not None
+ if add_row:
+ for col in muts_info_cols:
+ cell_val = mut_info[col] if col in mut_info else None
+ if type(cell_val) == ListType:
+ cell_val = join_delim.join(cell_val)
+ row.append(cell_val)
+ pt.add_row(row)
+ return pt.get_string()
+
+
+def print_test_configuration_from_json(json_data, join_delim=", "):
+ """ Prints test specification configuration passed to test script for verboseness
+ """
+ toolchains_info_cols = []
+ # We need to check all toolchains for each device
+ for k in json_data:
+ # k should be 'targets'
+ targets = json_data[k]
+ for target in targets:
+ toolchains = targets[target]
+ for toolchain in toolchains:
+ if toolchain not in toolchains_info_cols:
+ toolchains_info_cols.append(toolchain)
+
+ # Prepare pretty table object to display test specification
+ pt_cols = ["mcu"] + sorted(toolchains_info_cols)
+ pt = PrettyTable(pt_cols)
+ for col in pt_cols:
+ pt.align[col] = "l"
+
+ # { target : [conflicted toolchains] }
+ toolchain_conflicts = {}
+ toolchain_path_conflicts = []
+ for k in json_data:
+ # k should be 'targets'
+ targets = json_data[k]
+ for target in targets:
+ target_supported_toolchains = get_target_supported_toolchains(target)
+ if not target_supported_toolchains:
+ target_supported_toolchains = []
+ target_name = target if target in TARGET_MAP else "%s*"% target
+ row = [target_name]
+ toolchains = targets[target]
+
+ for toolchain in sorted(toolchains_info_cols):
+ # Check for conflicts: target vs toolchain
+ conflict = False
+ conflict_path = False
+ if toolchain in toolchains:
+ if toolchain not in target_supported_toolchains:
+ conflict = True
+ if target not in toolchain_conflicts:
+ toolchain_conflicts[target] = []
+ toolchain_conflicts[target].append(toolchain)
+ # Add marker inside table about target usage / conflict
+ cell_val = 'Yes' if toolchain in toolchains else '-'
+ if conflict:
+ cell_val += '*'
+ # Check for conflicts: toolchain vs toolchain path
+ if toolchain in TOOLCHAIN_BIN_PATH:
+ toolchain_path = TOOLCHAIN_BIN_PATH[toolchain]
+ if not os.path.isdir(toolchain_path):
+ conflict_path = True
+ if toolchain not in toolchain_path_conflicts:
+ toolchain_path_conflicts.append(toolchain)
+ if conflict_path:
+ cell_val += '#'
+ row.append(cell_val)
+ pt.add_row(row)
+
+ # generate result string
+ result = pt.get_string() # Test specification table
+ if toolchain_conflicts or toolchain_path_conflicts:
+ result += "\n"
+ result += "Toolchain conflicts:\n"
+ for target in toolchain_conflicts:
+ if target not in TARGET_MAP:
+ result += "\t* Target %s unknown\n"% (target)
+ conflict_target_list = join_delim.join(toolchain_conflicts[target])
+ sufix = 's' if len(toolchain_conflicts[target]) > 1 else ''
+ result += "\t* Target %s does not support %s toolchain%s\n"% (target, conflict_target_list, sufix)
+
+ for toolchain in toolchain_path_conflicts:
+ # Let's check toolchain configuration
+ if toolchain in TOOLCHAIN_BIN_PATH:
+ toolchain_path = TOOLCHAIN_BIN_PATH[toolchain]
+ if not os.path.isdir(toolchain_path):
+ result += "\t# Toolchain %s path not found: %s\n"% (toolchain, toolchain_path)
+ return result
+
+
+def get_avail_tests_summary_table(cols=None, result_summary=True, join_delim=',',platform_filter=None):
+ """ Generates table summary with all test cases and additional test cases
+ information using pretty print functionality. Allows test suite user to
+ see test cases
+ """
+ # get all unique test ID prefixes
+ unique_test_id = []
+ for test in TESTS:
+ split = test['id'].split('_')[:-1]
+ test_id_prefix = '_'.join(split)
+ if test_id_prefix not in unique_test_id:
+ unique_test_id.append(test_id_prefix)
+ unique_test_id.sort()
+ counter_dict_test_id_types = dict((t, 0) for t in unique_test_id)
+ counter_dict_test_id_types_all = dict((t, 0) for t in unique_test_id)
+
+ test_properties = ['id',
+ 'automated',
+ 'description',
+ 'peripherals',
+ 'host_test',
+ 'duration'] if cols is None else cols
+
+ # All tests status table print
+ pt = PrettyTable(test_properties)
+ for col in test_properties:
+ pt.align[col] = "l"
+ pt.align['duration'] = "r"
+
+ counter_all = 0
+ counter_automated = 0
+ pt.padding_width = 1 # One space between column edges and contents (default)
+
+ for test_id in sorted(TEST_MAP.keys()):
+ if platform_filter is not None:
+ # FIlter out platforms using regex
+ if re.search(platform_filter, test_id) is None:
+ continue
+ row = []
+ test = TEST_MAP[test_id]
+ split = test_id.split('_')[:-1]
+ test_id_prefix = '_'.join(split)
+
+ for col in test_properties:
+ col_value = test[col]
+ if type(test[col]) == ListType:
+ col_value = join_delim.join(test[col])
+ elif test[col] == None:
+ col_value = "-"
+
+ row.append(col_value)
+ if test['automated'] == True:
+ counter_dict_test_id_types[test_id_prefix] += 1
+ counter_automated += 1
+ pt.add_row(row)
+ # Update counters
+ counter_all += 1
+ counter_dict_test_id_types_all[test_id_prefix] += 1
+ result = pt.get_string()
+ result += "\n\n"
+
+ if result_summary and not platform_filter:
+ # Automation result summary
+ test_id_cols = ['automated', 'all', 'percent [%]', 'progress']
+ pt = PrettyTable(test_id_cols)
+ pt.align['automated'] = "r"
+ pt.align['all'] = "r"
+ pt.align['percent [%]'] = "r"
+
+ percent_progress = round(100.0 * counter_automated / float(counter_all), 1)
+ str_progress = progress_bar(percent_progress, 75)
+ pt.add_row([counter_automated, counter_all, percent_progress, str_progress])
+ result += "Automation coverage:\n"
+ result += pt.get_string()
+ result += "\n\n"
+
+ # Test automation coverage table print
+ test_id_cols = ['id', 'automated', 'all', 'percent [%]', 'progress']
+ pt = PrettyTable(test_id_cols)
+ pt.align['id'] = "l"
+ pt.align['automated'] = "r"
+ pt.align['all'] = "r"
+ pt.align['percent [%]'] = "r"
+ for unique_id in unique_test_id:
+ # print "\t\t%s: %d / %d" % (unique_id, counter_dict_test_id_types[unique_id], counter_dict_test_id_types_all[unique_id])
+ percent_progress = round(100.0 * counter_dict_test_id_types[unique_id] / float(counter_dict_test_id_types_all[unique_id]), 1)
+ str_progress = progress_bar(percent_progress, 75)
+ row = [unique_id,
+ counter_dict_test_id_types[unique_id],
+ counter_dict_test_id_types_all[unique_id],
+ percent_progress,
+ "[" + str_progress + "]"]
+ pt.add_row(row)
+ result += "Test automation coverage:\n"
+ result += pt.get_string()
+ result += "\n\n"
+ return result
+
+
+def progress_bar(percent_progress, saturation=0):
+ """ This function creates progress bar with optional simple saturation mark
+ """
+ step = int(percent_progress / 2) # Scale by to (scale: 1 - 50)
+ str_progress = '#' * step + '.' * int(50 - step)
+ c = '!' if str_progress[38] == '.' else '|'
+ if saturation > 0:
+ saturation = saturation / 2
+ str_progress = str_progress[:saturation] + c + str_progress[saturation:]
+ return str_progress
+
+
+def singletest_in_cli_mode(single_test):
+ """ Runs SingleTestRunner object in CLI (Command line interface) mode
+
+ @return returns success code (0 == success) for building and running tests
+ """
+ start = time()
+ # Execute tests depending on options and filter applied
+ test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, build_report, build_properties = single_test.execute()
+ elapsed_time = time() - start
+
+ # Human readable summary
+ if not single_test.opts_suppress_summary:
+ # prints well-formed summary with results (SQL table like)
+ print single_test.generate_test_summary(test_summary, shuffle_seed)
+ if single_test.opts_test_x_toolchain_summary:
+ # prints well-formed summary with results (SQL table like)
+ # table shows text x toolchain test result matrix
+ print single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
+
+ print "Completed in %.2f sec"% (elapsed_time)
+ print
+ # Write summary of the builds
+
+ print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
+ status = print_report_exporter.report(build_report)
+
+ # Store extra reports in files
+ if single_test.opts_report_html_file_name:
+ # Export results in form of HTML report to separate file
+ report_exporter = ReportExporter(ResultExporterType.HTML)
+ report_exporter.report_to_file(test_summary_ext, single_test.opts_report_html_file_name, test_suite_properties=test_suite_properties_ext)
+ if single_test.opts_report_junit_file_name:
+ # Export results in form of JUnit XML report to separate file
+ report_exporter = ReportExporter(ResultExporterType.JUNIT)
+ report_exporter.report_to_file(test_summary_ext, single_test.opts_report_junit_file_name, test_suite_properties=test_suite_properties_ext)
+ if single_test.opts_report_build_file_name:
+ # Export build results as html report to sparate file
+ report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
+ report_exporter.report_to_file(build_report, single_test.opts_report_build_file_name, test_suite_properties=build_properties)
+
+ # Returns True if no build failures of the test projects or their dependencies
+ return status
+
+class TestLogger():
+ """ Super-class for logging and printing ongoing events for test suite pass
+ """
+ def __init__(self, store_log=True):
+ """ We can control if logger actually stores log in memory
+ or just handled all log entries immediately
+ """
+ self.log = []
+ self.log_to_file = False
+ self.log_file_name = None
+ self.store_log = store_log
+
+ self.LogType = construct_enum(INFO='Info',
+ WARN='Warning',
+ NOTIF='Notification',
+ ERROR='Error',
+ EXCEPT='Exception')
+
+ self.LogToFileAttr = construct_enum(CREATE=1, # Create or overwrite existing log file
+ APPEND=2) # Append to existing log file
+
+ def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'):
+ """ Log one line of text
+ """
+ log_timestamp = time()
+ log_entry = {'log_type' : LogType,
+ 'log_timestamp' : log_timestamp,
+ 'log_line' : log_line,
+ '_future' : None
+ }
+ # Store log in memory
+ if self.store_log:
+ self.log.append(log_entry)
+ return log_entry
+
+
+class CLITestLogger(TestLogger):
+ """ Logger used with CLI (Command line interface) test suite. Logs on screen and to file if needed
+ """
+ def __init__(self, store_log=True, file_name=None):
+ TestLogger.__init__(self)
+ self.log_file_name = file_name
+ #self.TIMESTAMP_FORMAT = '%y-%m-%d %H:%M:%S' # Full date and time
+ self.TIMESTAMP_FORMAT = '%H:%M:%S' # Time only
+
+ def log_print(self, log_entry, timestamp=True):
+ """ Prints on screen formatted log entry
+ """
+ ts = log_entry['log_timestamp']
+ timestamp_str = datetime.datetime.fromtimestamp(ts).strftime("[%s] "% self.TIMESTAMP_FORMAT) if timestamp else ''
+ log_line_str = "%(log_type)s: %(log_line)s"% (log_entry)
+ return timestamp_str + log_line_str
+
+ def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'):
+ """ Logs line, if log file output was specified log line will be appended
+ at the end of log file
+ """
+ log_entry = TestLogger.log_line(self, LogType, log_line)
+ log_line_str = self.log_print(log_entry, timestamp)
+ if self.log_file_name is not None:
+ try:
+ with open(self.log_file_name, 'a') as f:
+ f.write(log_line_str + line_delim)
+ except IOError:
+ pass
+ return log_line_str
+
+
+def factory_db_logger(db_url):
+ """ Factory database driver depending on database type supplied in database connection string db_url
+ """
+ if db_url is not None:
+ from tools.test_mysql import MySQLDBAccess
+ connection_info = BaseDBAccess().parse_db_connection_string(db_url)
+ if connection_info is not None:
+ (db_type, username, password, host, db_name) = BaseDBAccess().parse_db_connection_string(db_url)
+ if db_type == 'mysql':
+ return MySQLDBAccess()
+ return None
+
+
+def detect_database_verbose(db_url):
+ """ uses verbose mode (prints) database detection sequence to check it database connection string is valid
+ """
+ result = BaseDBAccess().parse_db_connection_string(db_url)
+ if result is not None:
+ # Parsing passed
+ (db_type, username, password, host, db_name) = result
+ #print "DB type '%s', user name '%s', password '%s', host '%s', db name '%s'"% result
+ # Let's try to connect
+ db_ = factory_db_logger(db_url)
+ if db_ is not None:
+ print "Connecting to database '%s'..."% db_url,
+ db_.connect(host, username, password, db_name)
+ if db_.is_connected():
+ print "ok"
+ print "Detecting database..."
+ print db_.detect_database(verbose=True)
+ print "Disconnecting...",
+ db_.disconnect()
+ print "done"
+ else:
+ print "Database type '%s' unknown"% db_type
+ else:
+ print "Parse error: '%s' - DB Url error"% (db_url)
+
+
+def get_module_avail(module_name):
+ """ This function returns True if module_name is already impored module
+ """
+ return module_name in sys.modules.keys()
+
+
+def get_autodetected_MUTS_list(platform_name_filter=None):
+ oldError = None
+ if os.name == 'nt':
+ # Disable Windows error box temporarily
+ oldError = ctypes.windll.kernel32.SetErrorMode(1) #note that SEM_FAILCRITICALERRORS = 1
+
+ mbeds = mbed_lstools.create()
+ detect_muts_list = mbeds.list_mbeds()
+
+ if os.name == 'nt':
+ ctypes.windll.kernel32.SetErrorMode(oldError)
+
+ return get_autodetected_MUTS(detect_muts_list, platform_name_filter=platform_name_filter)
+
+def get_autodetected_MUTS(mbeds_list, platform_name_filter=None):
+ """ Function detects all connected to host mbed-enabled devices and generates artificial MUTS file.
+ If function fails to auto-detect devices it will return empty dictionary.
+
+ if get_module_avail('mbed_lstools'):
+ mbeds = mbed_lstools.create()
+ mbeds_list = mbeds.list_mbeds()
+
+ @param mbeds_list list of mbeds captured from mbed_lstools
+ @param platform_name You can filter 'platform_name' with list of filtered targets from 'platform_name_filter'
+ """
+ result = {} # Should be in muts_all.json format
+ # Align mbeds_list from mbed_lstools to MUT file format (JSON dictionary with muts)
+ # mbeds_list = [{'platform_name': 'NUCLEO_F302R8', 'mount_point': 'E:', 'target_id': '07050200623B61125D5EF72A', 'serial_port': u'COM34'}]
+ index = 1
+ for mut in mbeds_list:
+ # Filter the MUTS if a filter is specified
+
+ if platform_name_filter and not mut['platform_name'] in platform_name_filter:
+ continue
+
+ # For mcu_unique - we are assigning 'platform_name_unique' value from mbedls output (if its existing)
+ # if not we are creating our own unique value (last few chars from platform's target_id).
+ m = {'mcu': mut['platform_name'],
+ 'mcu_unique' : mut['platform_name_unique'] if 'platform_name_unique' in mut else "%s[%s]" % (mut['platform_name'], mut['target_id'][-4:]),
+ 'port': mut['serial_port'],
+ 'disk': mut['mount_point'],
+ 'peripherals': [] # No peripheral detection
+ }
+ if index not in result:
+ result[index] = {}
+ result[index] = m
+ index += 1
+ return result
+
+
+def get_autodetected_TEST_SPEC(mbeds_list,
+ use_default_toolchain=True,
+ use_supported_toolchains=False,
+ toolchain_filter=None,
+ platform_name_filter=None):
+ """ Function detects all connected to host mbed-enabled devices and generates artificial test_spec file.
+ If function fails to auto-detect devices it will return empty 'targets' test_spec description.
+
+ use_default_toolchain - if True add default toolchain to test_spec
+ use_supported_toolchains - if True add all supported toolchains to test_spec
+ toolchain_filter - if [...list of toolchains...] add from all toolchains only those in filter to test_spec
+ """
+ result = {'targets': {} }
+
+ for mut in mbeds_list:
+ mcu = mut['mcu']
+ if platform_name_filter is None or (platform_name_filter and mut['mcu'] in platform_name_filter):
+ if mcu in TARGET_MAP:
+ default_toolchain = TARGET_MAP[mcu].default_toolchain
+ supported_toolchains = TARGET_MAP[mcu].supported_toolchains
+
+ # Decide which toolchains should be added to test specification toolchain pool for each target
+ toolchains = []
+ if use_default_toolchain:
+ toolchains.append(default_toolchain)
+ if use_supported_toolchains:
+ toolchains += supported_toolchains
+ if toolchain_filter is not None:
+ all_toolchains = supported_toolchains + [default_toolchain]
+ for toolchain in toolchain_filter.split(','):
+ if toolchain in all_toolchains:
+ toolchains.append(toolchain)
+
+ result['targets'][mcu] = list(set(toolchains))
+ return result
+
+
+def get_default_test_options_parser():
+ """ Get common test script options used by CLI, web services etc.
+ """
+ parser = optparse.OptionParser()
+ parser.add_option('-i', '--tests',
+ dest='test_spec_filename',
+ metavar="FILE",
+ help='Points to file with test specification')
+
+ parser.add_option('-M', '--MUTS',
+ dest='muts_spec_filename',
+ metavar="FILE",
+ help='Points to file with MUTs specification (overwrites settings.py and private_settings.py)')
+
+ parser.add_option("-j", "--jobs",
+ dest='jobs',
+ metavar="NUMBER",
+ type="int",
+ help="Define number of compilation jobs. Default value is 1")
+
+ if get_module_avail('mbed_lstools'):
+ # Additional features available when mbed_lstools is installed on host and imported
+ # mbed_lstools allow users to detect connected to host mbed-enabled devices
+ parser.add_option('', '--auto',
+ dest='auto_detect',
+ metavar=False,
+ action="store_true",
+ help='Use mbed-ls module to detect all connected mbed devices')
+
+ parser.add_option('', '--tc',
+ dest='toolchains_filter',
+ help="Toolchain filter for --auto option. Use toolchains names separated by comma, 'default' or 'all' to select toolchains")
+
+ test_scopes = ','.join(["'%s'" % n for n in get_available_oper_test_scopes()])
+ parser.add_option('', '--oper',
+ dest='operability_checks',
+ help='Perform interoperability tests between host and connected mbed devices. Available test scopes are: %s' % test_scopes)
+
+ parser.add_option('', '--clean',
+ dest='clean',
+ metavar=False,
+ action="store_true",
+ help='Clean the build directory')
+
+ parser.add_option('-P', '--only-peripherals',
+ dest='test_only_peripheral',
+ default=False,
+ action="store_true",
+ help='Test only peripheral declared for MUT and skip common tests')
+
+ parser.add_option('-C', '--only-commons',
+ dest='test_only_common',
+ default=False,
+ action="store_true",
+ help='Test only board internals. Skip perpherials tests and perform common tests')
+
+ parser.add_option('-n', '--test-by-names',
+ dest='test_by_names',
+ help='Runs only test enumerated it this switch. Use comma to separate test case names')
+
+ parser.add_option('-p', '--peripheral-by-names',
+ dest='peripheral_by_names',
+ help='Forces discovery of particular peripherals. Use comma to separate peripheral names')
+
+ copy_methods = host_tests_plugins.get_plugin_caps('CopyMethod')
+ copy_methods_str = "Plugin support: " + ', '.join(copy_methods)
+
+ parser.add_option('-c', '--copy-method',
+ dest='copy_method',
+ help="Select binary copy (flash) method. Default is Python's shutil.copy() method. %s"% copy_methods_str)
+
+ reset_methods = host_tests_plugins.get_plugin_caps('ResetMethod')
+ reset_methods_str = "Plugin support: " + ', '.join(reset_methods)
+
+ parser.add_option('-r', '--reset-type',
+ dest='mut_reset_type',
+ default=None,
+ help='Extra reset method used to reset MUT by host test script. %s'% reset_methods_str)
+
+ parser.add_option('-g', '--goanna-for-tests',
+ dest='goanna_for_tests',
+ metavar=False,
+ action="store_true",
+ help='Run Goanna static analyse tool for tests. (Project will be rebuilded)')
+
+ parser.add_option('-G', '--goanna-for-sdk',
+ dest='goanna_for_mbed_sdk',
+ metavar=False,
+ action="store_true",
+ help='Run Goanna static analyse tool for mbed SDK (Project will be rebuilded)')
+
+ parser.add_option('-s', '--suppress-summary',
+ dest='suppress_summary',
+ default=False,
+ action="store_true",
+ help='Suppresses display of wellformatted table with test results')
+
+ parser.add_option('-t', '--test-summary',
+ dest='test_x_toolchain_summary',
+ default=False,
+ action="store_true",
+ help='Displays wellformatted table with test x toolchain test result per target')
+
+ parser.add_option('-A', '--test-automation-report',
+ dest='test_automation_report',
+ default=False,
+ action="store_true",
+ help='Prints information about all tests and exits')
+
+ parser.add_option('-R', '--test-case-report',
+ dest='test_case_report',
+ default=False,
+ action="store_true",
+ help='Prints information about all test cases and exits')
+
+ parser.add_option("-S", "--supported-toolchains",
+ action="store_true",
+ dest="supported_toolchains",
+ default=False,
+ help="Displays supported matrix of MCUs and toolchains")
+
+ parser.add_option("-O", "--only-build",
+ action="store_true",
+ dest="only_build_tests",
+ default=False,
+ help="Only build tests, skips actual test procedures (flashing etc.)")
+
+ parser.add_option('', '--parallel',
+ dest='parallel_test_exec',
+ default=False,
+ action="store_true",
+ help='Experimental, you execute test runners for connected to your host MUTs in parallel (speeds up test result collection)')
+
+ parser.add_option('', '--config',
+ dest='verbose_test_configuration_only',
+ default=False,
+ action="store_true",
+ help='Displays full test specification and MUTs configration and exits')
+
+ parser.add_option('', '--loops',
+ dest='test_loops_list',
+ help='Set no. of loops per test. Format: TEST_1=1,TEST_2=2,TEST_3=3')
+
+ parser.add_option('', '--global-loops',
+ dest='test_global_loops_value',
+ help='Set global number of test loops per test. Default value is set 1')
+
+ parser.add_option('', '--consolidate-waterfall',
+ dest='consolidate_waterfall_test',
+ default=False,
+ action="store_true",
+ help='Used with --waterfall option. Adds only one test to report reflecting outcome of waterfall test.')
+
+ parser.add_option('-W', '--waterfall',
+ dest='waterfall_test',
+ default=False,
+ action="store_true",
+ help='Used with --loops or --global-loops options. Tests until OK result occurs and assumes test passed')
+
+ parser.add_option('-N', '--firmware-name',
+ dest='firmware_global_name',
+ help='Set global name for all produced projects. Note, proper file extension will be added by buid scripts')
+
+ parser.add_option('-u', '--shuffle',
+ dest='shuffle_test_order',
+ default=False,
+ action="store_true",
+ help='Shuffles test execution order')
+
+ parser.add_option('', '--shuffle-seed',
+ dest='shuffle_test_seed',
+ default=None,
+ help='Shuffle seed (If you want to reproduce your shuffle order please use seed provided in test summary)')
+
+ parser.add_option('-f', '--filter',
+ dest='general_filter_regex',
+ default=None,
+ help='For some commands you can use filter to filter out results')
+
+ parser.add_option('', '--inc-timeout',
+ dest='extend_test_timeout',
+ metavar="NUMBER",
+ type="int",
+ help='You can increase global timeout for each test by specifying additional test timeout in seconds')
+
+ parser.add_option('', '--db',
+ dest='db_url',
+ help='This specifies what database test suite uses to store its state. To pass DB connection info use database connection string. Example: \'mysql://username:password@127.0.0.1/db_name\'')
+
+ parser.add_option('-l', '--log',
+ dest='log_file_name',
+ help='Log events to external file (note not all console entries may be visible in log file)')
+
+ parser.add_option('', '--report-html',
+ dest='report_html_file_name',
+ help='You can log test suite results in form of HTML report')
+
+ parser.add_option('', '--report-junit',
+ dest='report_junit_file_name',
+ help='You can log test suite results in form of JUnit compliant XML report')
+
+ parser.add_option("", "--report-build",
+ dest="report_build_file_name",
+ help="Output the build results to a junit xml file")
+
+ parser.add_option('', '--verbose-skipped',
+ dest='verbose_skipped_tests',
+ default=False,
+ action="store_true",
+ help='Prints some extra information about skipped tests')
+
+ parser.add_option('-V', '--verbose-test-result',
+ dest='verbose_test_result_only',
+ default=False,
+ action="store_true",
+ help='Prints test serial output')
+
+ parser.add_option('-v', '--verbose',
+ dest='verbose',
+ default=False,
+ action="store_true",
+ help='Verbose mode (prints some extra information)')
+
+ parser.add_option('', '--version',
+ dest='version',
+ default=False,
+ action="store_true",
+ help='Prints script version and exits')
+ return parser
diff --git a/tools/test_db.py b/tools/test_db.py
new file mode 100644
index 0000000..2ec301a
--- /dev/null
+++ b/tools/test_db.py
@@ -0,0 +1,165 @@
+"""
+mbed SDK
+Copyright (c) 2011-2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+"""
+
+import re
+import json
+
+
+class BaseDBAccess():
+ """ Class used to connect with test database and store test results
+ """
+ def __init__(self):
+ self.db_object = None
+ self.db_type = None
+ # Connection credentials
+ self.host = None
+ self.user = None
+ self.passwd = None
+ self.db = None
+
+ # Test Suite DB scheme (table names)
+ self.TABLE_BUILD_ID = 'mtest_build_id'
+ self.TABLE_BUILD_ID_STATUS = 'mtest_build_id_status'
+ self.TABLE_BUILD_ID_TYPE = 'mtest_build_id_type'
+ self.TABLE_TARGET = 'mtest_target'
+ self.TABLE_TEST_ENTRY = 'mtest_test_entry'
+ self.TABLE_TEST_ID = 'mtest_test_id'
+ self.TABLE_TEST_RESULT = 'mtest_test_result'
+ self.TABLE_TEST_TYPE = 'mtest_test_type'
+ self.TABLE_TOOLCHAIN = 'mtest_toolchain'
+ # Build ID status PKs
+ self.BUILD_ID_STATUS_STARTED = 1 # Started
+ self.BUILD_ID_STATUS_IN_PROGRESS = 2 # In Progress
+ self.BUILD_ID_STATUS_COMPLETED = 3 #Completed
+ self.BUILD_ID_STATUS_FAILED = 4 # Failed
+ # Build ID type PKs
+ self.BUILD_ID_TYPE_TEST = 1 # Test
+ self.BUILD_ID_TYPE_BUILD_ONLY = 2 # Build Only
+
+ def get_hostname(self):
+ """ Useful when creating build_id in database
+ Function returns (hostname, uname) which can be used as (build_id_name, build_id_desc)
+ """
+ # Get hostname from socket
+ import socket
+ hostname = socket.gethostbyaddr(socket.gethostname())[0]
+ # Get uname from platform resources
+ import platform
+ uname = json.dumps(platform.uname())
+ return (hostname, uname)
+
+ def get_db_type(self):
+ """ Returns database type. E.g. 'mysql', 'sqlLite' etc.
+ """
+ return self.db_type
+
+ def detect_database(self, verbose=False):
+ """ detect database and return VERION data structure or string (verbose=True)
+ """
+ return None
+
+ def parse_db_connection_string(self, str):
+ """ Parsing SQL DB connection string. String should contain:
+ - DB Name, user name, password, URL (DB host), name
+ Function should return tuple with parsed (db_type, username, password, host, db_name) or None if error
+
+ (db_type, username, password, host, db_name) = self.parse_db_connection_string(db_url)
+
+ E.g. connection string: 'mysql://username:password@127.0.0.1/db_name'
+ """
+ result = None
+ if type(str) == type(''):
+ PATTERN = '^([\w]+)://([\w]+):([\w]*)@(.*)/([\w]+)'
+ result = re.match(PATTERN, str)
+ if result is not None:
+ result = result.groups() # Tuple (db_name, host, user, passwd, db)
+ return result # (db_type, username, password, host, db_name)
+
+ def is_connected(self):
+ """ Returns True if we are connected to database
+ """
+ pass
+
+ def connect(self, host, user, passwd, db):
+ """ Connects to DB and returns DB object
+ """
+ pass
+
+ def connect_url(self, db_url):
+ """ Connects to database using db_url (database url parsing),
+ store host, username, password, db_name
+ """
+ pass
+
+ def reconnect(self):
+ """ Reconnects to DB and returns DB object using stored host name,
+ database name and credentials (user name and password)
+ """
+ pass
+
+ def disconnect(self):
+ """ Close DB connection
+ """
+ pass
+
+ def escape_string(self, str):
+ """ Escapes string so it can be put in SQL query between quotes
+ """
+ pass
+
+ def select_all(self, query):
+ """ Execute SELECT query and get all results
+ """
+ pass
+
+ def insert(self, query, commit=True):
+ """ Execute INSERT query, define if you want to commit
+ """
+ pass
+
+ def get_next_build_id(self, name, desc='', location='', type=None, status=None):
+ """ Insert new build_id (DB unique build like ID number to send all test results)
+ """
+ pass
+
+ def get_table_entry_pk(self, table, column, value, update_db=True):
+ """ Checks for entries in tables with two columns (_pk, )
+ If update_db is True updates table entry if value in specified column doesn't exist
+ """
+ pass
+
+ def update_table_entry(self, table, column, value):
+ """ Updates table entry if value in specified column doesn't exist
+ Locks table to perform atomic read + update
+ """
+ pass
+
+ def update_build_id_info(self, build_id, **kw):
+ """ Update additional data inside build_id table
+ Examples:
+ db.update_build_is(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
+ """
+ pass
+
+ def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_time, test_timeout, test_loop, test_extra=''):
+ """ Inserts test result entry to database. All checks regarding existing
+ toolchain names in DB are performed.
+ If some data is missing DB will be updated
+ """
+ pass
diff --git a/tools/test_exporters.py b/tools/test_exporters.py
new file mode 100644
index 0000000..16c5e47
--- /dev/null
+++ b/tools/test_exporters.py
@@ -0,0 +1,342 @@
+"""
+mbed SDK
+Copyright (c) 2011-2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+"""
+
+from tools.utils import construct_enum
+
+
+ResultExporterType = construct_enum(HTML='Html_Exporter',
+ JUNIT='JUnit_Exporter',
+ JUNIT_OPER='JUnit_Exporter_Interoperability',
+ BUILD='Build_Exporter',
+ PRINT='Print_Exporter')
+
+
+class ReportExporter():
+ """ Class exports extended test result Python data structure to
+ different formats like HTML, JUnit XML.
+
+ Parameter 'test_result_ext' format:
+
+ u'uARM': { u'LPC1768': { 'MBED_2': { 0: { 'copy_method': 'shutils.copy()',
+ 'duration': 20,
+ 'elapsed_time': 1.7929999828338623,
+ 'output': 'Host test instrumentation on ...\r\n',
+ 'result': 'OK',
+ 'target_name': u'LPC1768',
+ 'description': 'stdio',
+ 'id': u'MBED_2',
+ 'toolchain_name': u'uARM'}},
+ """
+ CSS_STYLE = """
+ """
+
+ JAVASCRIPT = """
+
+ """
+
+ def __init__(self, result_exporter_type, package="test"):
+ self.result_exporter_type = result_exporter_type
+ self.package = package
+
+ def report(self, test_summary_ext, test_suite_properties=None):
+ """ Invokes report depending on exporter_type set in constructor
+ """
+ if self.result_exporter_type == ResultExporterType.HTML:
+ # HTML exporter
+ return self.exporter_html(test_summary_ext, test_suite_properties)
+ elif self.result_exporter_type == ResultExporterType.JUNIT:
+ # JUNIT exporter for results from test suite
+ return self.exporter_junit(test_summary_ext, test_suite_properties)
+ elif self.result_exporter_type == ResultExporterType.JUNIT_OPER:
+ # JUNIT exporter for interoperability test
+ return self.exporter_junit_ioper(test_summary_ext, test_suite_properties)
+ elif self.result_exporter_type == ResultExporterType.PRINT:
+ # JUNIT exporter for interoperability test
+ return self.exporter_print(test_summary_ext)
+ return None
+
+ def report_to_file(self, test_summary_ext, file_name, test_suite_properties=None):
+ """ Stores report to specified file
+ """
+ report = self.report(test_summary_ext, test_suite_properties=test_suite_properties)
+ self.write_to_file(report, file_name)
+
+ def write_to_file(self, report, file_name):
+ if report is not None:
+ with open(file_name, 'w') as f:
+ f.write(report)
+
+ def get_tooltip_name(self, toolchain, target, test_id, loop_no):
+ """ Generate simple unique tool-tip name which can be used.
+ For example as HTML
+ """% (result_div_style,
+ tooltip_name,
+ tooltip_name,
+ test['result'],
+ tooltip_name,
+ test['target_name_unique'],
+ test['description'],
+ test['elapsed_time'],
+ test['output'].replace('\n', ' '))
+ return result
+
+ def get_result_tree(self, test_results):
+ """ If test was run in a loop (we got few results from the same test)
+ we will show it in a column to see all results.
+ This function produces HTML table with corresponding results.
+ """
+ result = ''
+ for i, test_result in enumerate(test_results):
+ result += '
'
+ test_ids = sorted(test_result.keys())
+ for test_no in test_ids:
+ test = test_result[test_no]
+ result += """
+
%s
+
"""% self.get_result_div_sections(test, "%d_%d" % (test_no, i))
+ result += '
'
+ return result
+
+ def get_all_unique_test_ids(self, test_result_ext):
+ """ Gets all unique test ids from all ran tests.
+ We need this to create complete list of all test ran.
+ """
+ result = []
+ targets = test_result_ext.keys()
+ for target in targets:
+ toolchains = test_result_ext[target].keys()
+ for toolchain in toolchains:
+ tests = test_result_ext[target][toolchain].keys()
+ result.extend(tests)
+ return sorted(list(set(result)))
+
+ #
+ # Exporters functions
+ #
+
+ def exporter_html(self, test_result_ext, test_suite_properties=None):
+ """ Export test results in proprietary HTML format.
+ """
+ result = """
+
+ mbed SDK test suite test result report
+ %s
+ %s
+
+
+ """% (self.CSS_STYLE, self.JAVASCRIPT)
+
+ unique_test_ids = self.get_all_unique_test_ids(test_result_ext)
+ targets = sorted(test_result_ext.keys())
+ result += '
'
+ for target in targets:
+ toolchains = sorted(test_result_ext[target].keys())
+ for toolchain in toolchains:
+ result += '
'
+ result += '
'
+
+ tests = sorted(test_result_ext[target][toolchain].keys())
+ for test in unique_test_ids:
+ result += """
%s
"""% test
+ result += """
+
+
%s
+
%s
+ """% (toolchain, target)
+
+ for test in unique_test_ids:
+ test_result = self.get_result_tree(test_result_ext[target][toolchain][test]) if test in tests else ''
+ result += '
%s
'% (test_result)
+
+ result += '
'
+ result += '
'
+ result += ''
+ return result
+
+ def exporter_junit_ioper(self, test_result_ext, test_suite_properties=None):
+ from junit_xml import TestSuite, TestCase
+ test_suites = []
+ test_cases = []
+
+ for platform in sorted(test_result_ext.keys()):
+ # {platform : ['Platform', 'Result', 'Scope', 'Description'])
+ test_cases = []
+ for tr_result in test_result_ext[platform]:
+ result, name, scope, description = tr_result
+
+ classname = 'test.ioper.%s.%s.%s' % (platform, name, scope)
+ elapsed_sec = 0
+ _stdout = description
+ _stderr = ''
+ # Test case
+ tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
+ # Test case extra failure / error info
+ if result == 'FAIL':
+ tc.add_failure_info(description, _stdout)
+ elif result == 'ERROR':
+ tc.add_error_info(description, _stdout)
+ elif result == 'SKIP' or result == 'NOT_SUPPORTED':
+ tc.add_skipped_info(description, _stdout)
+
+ test_cases.append(tc)
+ ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases)
+ test_suites.append(ts)
+ return TestSuite.to_xml_string(test_suites)
+
+ def exporter_junit(self, test_result_ext, test_suite_properties=None):
+ """ Export test results in JUnit XML compliant format
+ """
+ from junit_xml import TestSuite, TestCase
+ test_suites = []
+ test_cases = []
+
+ targets = sorted(test_result_ext.keys())
+ for target in targets:
+ toolchains = sorted(test_result_ext[target].keys())
+ for toolchain in toolchains:
+ test_cases = []
+ tests = sorted(test_result_ext[target][toolchain].keys())
+ for test in tests:
+ test_results = test_result_ext[target][toolchain][test]
+ for test_res in test_results:
+ test_ids = sorted(test_res.keys())
+ for test_no in test_ids:
+ test_result = test_res[test_no]
+ name = test_result['description']
+ classname = '%s.%s.%s.%s'% (self.package, target, toolchain, test_result['id'])
+ elapsed_sec = test_result['elapsed_time']
+ _stdout = test_result['output']
+
+ if 'target_name_unique' in test_result:
+ _stderr = test_result['target_name_unique']
+ else:
+ _stderr = test_result['target_name']
+
+ # Test case
+ tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
+
+ # Test case extra failure / error info
+ message = test_result['result']
+ if test_result['result'] == 'FAIL':
+ tc.add_failure_info(message, _stdout)
+ elif test_result['result'] == 'SKIP' or test_result["result"] == 'NOT_SUPPORTED':
+ tc.add_skipped_info(message, _stdout)
+ elif test_result['result'] != 'OK':
+ tc.add_error_info(message, _stdout)
+
+ test_cases.append(tc)
+
+ ts = TestSuite("test.suite.%s.%s"% (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain])
+ test_suites.append(ts)
+ return TestSuite.to_xml_string(test_suites)
+
+ def exporter_print_helper(self, array):
+ for item in array:
+ print " * %s::%s::%s" % (item["target_name"], item["toolchain_name"], item["id"])
+
+ def exporter_print(self, test_result_ext):
+ """ Export test results in print format.
+ """
+ failures = []
+ skips = []
+ successes = []
+
+ unique_test_ids = self.get_all_unique_test_ids(test_result_ext)
+ targets = sorted(test_result_ext.keys())
+
+ for target in targets:
+ toolchains = sorted(test_result_ext[target].keys())
+ for toolchain in toolchains:
+ tests = sorted(test_result_ext[target][toolchain].keys())
+ for test in tests:
+ test_runs = test_result_ext[target][toolchain][test]
+ for test_runner in test_runs:
+ #test_run = test_result_ext[target][toolchain][test][test_run_number][0]
+ test_run = test_runner[0]
+
+ if test_run["result"] == "FAIL":
+ failures.append(test_run)
+ elif test_run["result"] == "SKIP" or test_run["result"] == "NOT_SUPPORTED":
+ skips.append(test_run)
+ elif test_run["result"] == "OK":
+ successes.append(test_run)
+ else:
+ raise Exception("Unhandled result type: %s" % (test_run["result"]))
+
+ if successes:
+ print "\n\nBuild successes:"
+ self.exporter_print_helper(successes)
+
+ if skips:
+ print "\n\nBuild skips:"
+ self.exporter_print_helper(skips)
+
+ if failures:
+ print "\n\nBuild failures:"
+ self.exporter_print_helper(failures)
+ return False
+ else:
+ return True
diff --git a/tools/test_mysql.py b/tools/test_mysql.py
new file mode 100644
index 0000000..4f00ab6
--- /dev/null
+++ b/tools/test_mysql.py
@@ -0,0 +1,271 @@
+"""
+mbed SDK
+Copyright (c) 2011-2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+"""
+
+import re
+import MySQLdb as mdb
+
+# Imports from TEST API
+from tools.test_db import BaseDBAccess
+
+
+class MySQLDBAccess(BaseDBAccess):
+ """ Wrapper for MySQL DB access for common test suite interface
+ """
+ def __init__(self):
+ BaseDBAccess.__init__(self)
+ self.DB_TYPE = 'mysql'
+
+ def detect_database(self, verbose=False):
+ """ detect database and return VERION data structure or string (verbose=True)
+ """
+ query = 'SHOW VARIABLES LIKE "%version%"'
+ rows = self.select_all(query)
+ if verbose:
+ result = []
+ for row in rows:
+ result.append("\t%s: %s"% (row['Variable_name'], row['Value']))
+ result = "\n".join(result)
+ else:
+ result = rows
+ return result
+
+ def parse_db_connection_string(self, str):
+ """ Parsing SQL DB connection string. String should contain:
+ - DB Name, user name, password, URL (DB host), name
+ Function should return tuple with parsed (host, user, passwd, db) or None if error
+ E.g. connection string: 'mysql://username:password@127.0.0.1/db_name'
+ """
+ result = BaseDBAccess().parse_db_connection_string(str)
+ if result is not None:
+ (db_type, username, password, host, db_name) = result
+ if db_type != 'mysql':
+ result = None
+ return result
+
+ def is_connected(self):
+ """ Returns True if we are connected to database
+ """
+ return self.db_object is not None
+
+ def connect(self, host, user, passwd, db):
+ """ Connects to DB and returns DB object
+ """
+ try:
+ self.db_object = mdb.connect(host=host, user=user, passwd=passwd, db=db)
+ # Let's remember connection credentials
+ self.db_type = self.DB_TYPE
+ self.host = host
+ self.user = user
+ self.passwd = passwd
+ self.db = db
+ except mdb.Error, e:
+ print "Error %d: %s"% (e.args[0], e.args[1])
+ self.db_object = None
+ self.db_type = None
+ self.host = None
+ self.user = None
+ self.passwd = None
+ self.db = None
+
+ def connect_url(self, db_url):
+ """ Connects to database using db_url (database url parsing),
+ store host, username, password, db_name
+ """
+ result = self.parse_db_connection_string(db_url)
+ if result is not None:
+ (db_type, username, password, host, db_name) = result
+ if db_type == self.DB_TYPE:
+ self.connect(host, username, password, db_name)
+
+ def reconnect(self):
+ """ Reconnects to DB and returns DB object using stored host name,
+ database name and credentials (user name and password)
+ """
+ self.connect(self.host, self.user, self.passwd, self.db)
+
+ def disconnect(self):
+ """ Close DB connection
+ """
+ if self.db_object:
+ self.db_object.close()
+ self.db_object = None
+ self.db_type = None
+
+ def escape_string(self, str):
+ """ Escapes string so it can be put in SQL query between quotes
+ """
+ con = self.db_object
+ result = con.escape_string(str)
+ return result if result else ''
+
+ def select_all(self, query):
+ """ Execute SELECT query and get all results
+ """
+ con = self.db_object
+ cur = con.cursor(mdb.cursors.DictCursor)
+ cur.execute(query)
+ rows = cur.fetchall()
+ return rows
+
+ def insert(self, query, commit=True):
+ """ Execute INSERT query, define if you want to commit
+ """
+ con = self.db_object
+ cur = con.cursor()
+ cur.execute(query)
+ if commit:
+ con.commit()
+ return cur.lastrowid
+
+ def get_next_build_id(self, name, desc='', location='', type=None, status=None):
+ """ Insert new build_id (DB unique build like ID number to send all test results)
+ """
+ if status is None:
+ status = self.BUILD_ID_STATUS_STARTED
+
+ if type is None:
+ type = self.BUILD_ID_TYPE_TEST
+
+ query = """INSERT INTO `%s` (%s_name, %s_desc, %s_location, %s_type_fk, %s_status_fk)
+ VALUES ('%s', '%s', '%s', %d, %d)"""% (self.TABLE_BUILD_ID,
+ self.TABLE_BUILD_ID,
+ self.TABLE_BUILD_ID,
+ self.TABLE_BUILD_ID,
+ self.TABLE_BUILD_ID,
+ self.TABLE_BUILD_ID,
+ self.escape_string(name),
+ self.escape_string(desc),
+ self.escape_string(location),
+ type,
+ status)
+ index = self.insert(query) # Provide inserted record PK
+ return index
+
+ def get_table_entry_pk(self, table, column, value, update_db=True):
+ """ Checks for entries in tables with two columns (_pk, )
+ If update_db is True updates table entry if value in specified column doesn't exist
+ """
+ # TODO: table buffering
+ result = None
+ table_pk = '%s_pk'% table
+ query = """SELECT `%s`
+ FROM `%s`
+ WHERE `%s`='%s'"""% (table_pk,
+ table,
+ column,
+ self.escape_string(value))
+ rows = self.select_all(query)
+ if len(rows) == 1:
+ result = rows[0][table_pk]
+ elif len(rows) == 0 and update_db:
+ # Update DB with new value
+ result = self.update_table_entry(table, column, value)
+ return result
+
+ def update_table_entry(self, table, column, value):
+ """ Updates table entry if value in specified column doesn't exist
+ Locks table to perform atomic read + update
+ """
+ result = None
+ con = self.db_object
+ cur = con.cursor()
+ cur.execute("LOCK TABLES `%s` WRITE"% table)
+ table_pk = '%s_pk'% table
+ query = """SELECT `%s`
+ FROM `%s`
+ WHERE `%s`='%s'"""% (table_pk,
+ table,
+ column,
+ self.escape_string(value))
+ cur.execute(query)
+ rows = cur.fetchall()
+ if len(rows) == 0:
+ query = """INSERT INTO `%s` (%s)
+ VALUES ('%s')"""% (table,
+ column,
+ self.escape_string(value))
+ cur.execute(query)
+ result = cur.lastrowid
+ con.commit()
+ cur.execute("UNLOCK TABLES")
+ return result
+
+ def update_build_id_info(self, build_id, **kw):
+ """ Update additional data inside build_id table
+ Examples:
+ db.update_build_id_info(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
+ """
+ if len(kw):
+ con = self.db_object
+ cur = con.cursor()
+ # Prepare UPDATE query
+ # ["`mtest_build_id_pk`=[value-1]", "`mtest_build_id_name`=[value-2]", "`mtest_build_id_desc`=[value-3]"]
+ set_list = []
+ for col_sufix in kw:
+ assign_str = "`%s%s`='%s'"% (self.TABLE_BUILD_ID, col_sufix, self.escape_string(str(kw[col_sufix])))
+ set_list.append(assign_str)
+ set_str = ', '.join(set_list)
+ query = """UPDATE `%s`
+ SET %s
+ WHERE `mtest_build_id_pk`=%d"""% (self.TABLE_BUILD_ID,
+ set_str,
+ build_id)
+ cur.execute(query)
+ con.commit()
+
+ def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_output, test_time, test_timeout, test_loop, test_extra=''):
+ """ Inserts test result entry to database. All checks regarding existing
+ toolchain names in DB are performed.
+ If some data is missing DB will be updated
+ """
+ # Get all table FK and if entry is new try to insert new value
+ target_fk = self.get_table_entry_pk(self.TABLE_TARGET, self.TABLE_TARGET + '_name', target)
+ toolchain_fk = self.get_table_entry_pk(self.TABLE_TOOLCHAIN, self.TABLE_TOOLCHAIN + '_name', toolchain)
+ test_type_fk = self.get_table_entry_pk(self.TABLE_TEST_TYPE, self.TABLE_TEST_TYPE + '_name', test_type)
+ test_id_fk = self.get_table_entry_pk(self.TABLE_TEST_ID, self.TABLE_TEST_ID + '_name', test_id)
+ test_result_fk = self.get_table_entry_pk(self.TABLE_TEST_RESULT, self.TABLE_TEST_RESULT + '_name', test_result)
+
+ con = self.db_object
+ cur = con.cursor()
+
+ query = """ INSERT INTO `%s` (`mtest_build_id_fk`,
+ `mtest_target_fk`,
+ `mtest_toolchain_fk`,
+ `mtest_test_type_fk`,
+ `mtest_test_id_fk`,
+ `mtest_test_result_fk`,
+ `mtest_test_output`,
+ `mtest_test_time`,
+ `mtest_test_timeout`,
+ `mtest_test_loop_no`,
+ `mtest_test_result_extra`)
+ VALUES (%d, %d, %d, %d, %d, %d, '%s', %.2f, %.2f, %d, '%s')"""% (self.TABLE_TEST_ENTRY,
+ build_id,
+ target_fk,
+ toolchain_fk,
+ test_type_fk,
+ test_id_fk,
+ test_result_fk,
+ self.escape_string(test_output),
+ test_time,
+ test_timeout,
+ test_loop,
+ self.escape_string(test_extra))
+ cur.execute(query)
+ con.commit()
diff --git a/tools/test_webapi.py b/tools/test_webapi.py
new file mode 100644
index 0000000..ffed0e4
--- /dev/null
+++ b/tools/test_webapi.py
@@ -0,0 +1,242 @@
+"""
+mbed SDK
+Copyright (c) 2011-2014 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Author: Przemyslaw Wirkus
+"""
+
+import sys
+import json
+import optparse
+from flask import Flask
+from os.path import join, abspath, dirname
+
+# Be sure that the tools directory is in the search path
+ROOT = abspath(join(dirname(__file__), ".."))
+sys.path.insert(0, ROOT)
+
+# Imports related to mbed build api
+from tools.utils import construct_enum
+from tools.build_api import mcu_toolchain_matrix
+
+# Imports from TEST API
+from test_api import SingleTestRunner
+from test_api import SingleTestExecutor
+from test_api import get_json_data_from_file
+from test_api import print_muts_configuration_from_json
+from test_api import print_test_configuration_from_json
+from test_api import get_avail_tests_summary_table
+from test_api import get_default_test_options_parser
+
+
+class SingleTestRunnerWebService(SingleTestRunner):
+ def __init__(self):
+ super(SingleTestRunnerWebService, self).__init__()
+
+ # With this lock we should control access to certain resources inside this class
+ self.resource_lock = thread.allocate_lock()
+
+ self.RestRequest = construct_enum(REST_MUTS='muts',
+ REST_TEST_SPEC='test_spec',
+ REST_TEST_RESULTS='test_results')
+
+ def get_rest_result_template(self, result, command, success_code):
+ """ Returns common part of every web service request
+ """
+ result = {"result" : result,
+ "command" : command,
+ "success_code": success_code} # 0 - OK, >0 - Error number
+ return result
+
+ # REST API handlers for Flask framework
+ def rest_api_status(self):
+ """ Returns current test execution status. E.g. running / finished etc.
+ """
+ with self.resource_lock:
+ pass
+
+ def rest_api_config(self):
+ """ Returns configuration passed to SingleTest executor
+ """
+ with self.resource_lock:
+ pass
+
+ def rest_api_log(self):
+ """ Returns current test log """
+ with self.resource_lock:
+ pass
+
+ def rest_api_request_handler(self, request_type):
+ """ Returns various data structures. Both static and mutable during test
+ """
+ result = {}
+ success_code = 0
+ with self.resource_lock:
+ if request_type == self.RestRequest.REST_MUTS:
+ result = self.muts # Returns MUTs
+ elif request_type == self.RestRequest.REST_TEST_SPEC:
+ result = self.test_spec # Returns Test Specification
+ elif request_type == self.RestRequest.REST_TEST_RESULTS:
+ pass # Returns test results
+ else:
+ success_code = -1
+ return json.dumps(self.get_rest_result_template(result, 'request/' + request_type, success_code), indent=4)
+
+
+def singletest_in_webservice_mode():
+ # TODO Implement this web service functionality
+ pass
+
+
+def get_default_test_webservice_options_parser():
+ """ Get test script web service options used by CLI, webservices etc.
+ """
+ parser = get_default_test_options_parser()
+
+ # Things related to web services offered by test suite scripts
+ parser.add_option('', '--rest-api',
+ dest='rest_api_enabled',
+ default=False,
+ action="store_true",
+ help='Enables REST API.')
+
+ parser.add_option('', '--rest-api-port',
+ dest='rest_api_port_no',
+ help='Sets port for REST API interface')
+
+ return parser
+
+'''
+if __name__ == '__main__':
+ # Command line options
+ parser = get_default_test_options_parser()
+
+ parser.description = """This script allows you to run mbed defined test cases for particular MCU(s) and corresponding toolchain(s)."""
+ parser.epilog = """Example: singletest.py -i test_spec.json -M muts_all.json"""
+
+ (opts, args) = parser.parse_args()
+
+ # Print summary / information about automation test status
+ if opts.test_automation_report:
+ print get_avail_tests_summary_table()
+ exit(0)
+
+ # Print summary / information about automation test status
+ if opts.test_case_report:
+ test_case_report_cols = ['id', 'automated', 'description', 'peripherals', 'host_test', 'duration', 'source_dir']
+ print get_avail_tests_summary_table(cols=test_case_report_cols, result_summary=False, join_delim='\n')
+ exit(0)
+
+ # Only prints matrix of supported toolchains
+ if opts.supported_toolchains:
+ print mcu_toolchain_matrix(platform_filter=opts.general_filter_regex)
+ exit(0)
+
+ # Open file with test specification
+ # test_spec_filename tells script which targets and their toolchain(s)
+ # should be covered by the test scenario
+ test_spec = get_json_data_from_file(opts.test_spec_filename) if opts.test_spec_filename else None
+ if test_spec is None:
+ if not opts.test_spec_filename:
+ parser.print_help()
+ exit(-1)
+
+ # Get extra MUTs if applicable
+ MUTs = get_json_data_from_file(opts.muts_spec_filename) if opts.muts_spec_filename else None
+
+ if MUTs is None:
+ if not opts.muts_spec_filename:
+ parser.print_help()
+ exit(-1)
+
+ # Only prints read MUTs configuration
+ if MUTs and opts.verbose_test_configuration_only:
+ print "MUTs configuration in %s:"% opts.muts_spec_filename
+ print print_muts_configuration_from_json(MUTs)
+ print
+ print "Test specification in %s:"% opts.test_spec_filename
+ print print_test_configuration_from_json(test_spec)
+ exit(0)
+
+ # Verbose test specification and MUTs configuration
+ if MUTs and opts.verbose:
+ print print_muts_configuration_from_json(MUTs)
+ if test_spec and opts.verbose:
+ print print_test_configuration_from_json(test_spec)
+
+ if opts.only_build_tests:
+ # We are skipping testing phase, and suppress summary
+ opts.suppress_summary = True
+
+ single_test = SingleTestRunner(_global_loops_count=opts.test_global_loops_value,
+ _test_loops_list=opts.test_loops_list,
+ _muts=MUTs,
+ _test_spec=test_spec,
+ _opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
+ _opts_goanna_for_tests=opts.goanna_for_tests,
+ _opts_shuffle_test_order=opts.shuffle_test_order,
+ _opts_shuffle_test_seed=opts.shuffle_test_seed,
+ _opts_test_by_names=opts.test_by_names,
+ _opts_test_only_peripheral=opts.test_only_peripheral,
+ _opts_test_only_common=opts.test_only_common,
+ _opts_verbose_skipped_tests=opts.verbose_skipped_tests,
+ _opts_verbose_test_result_only=opts.verbose_test_result_only,
+ _opts_verbose=opts.verbose,
+ _opts_firmware_global_name=opts.firmware_global_name,
+ _opts_only_build_tests=opts.only_build_tests,
+ _opts_suppress_summary=opts.suppress_summary,
+ _opts_test_x_toolchain_summary=opts.test_x_toolchain_summary,
+ _opts_copy_method=opts.copy_method
+ )
+
+ try:
+ st_exec_thread = SingleTestExecutor(single_test)
+ except KeyboardInterrupt, e:
+ print "\n[CTRL+c] exit"
+ st_exec_thread.start()
+
+ if opts.rest_api_enabled:
+ # Enable REST API
+
+ app = Flask(__name__)
+
+ @app.route('/')
+ def hello_world():
+ return 'Hello World!'
+
+ @app.route('/status')
+ def rest_api_status():
+ return single_test.rest_api_status() # TODO
+
+ @app.route('/config')
+ def rest_api_config():
+ return single_test.rest_api_config() # TODO
+
+ @app.route('/log')
+ def rest_api_log():
+ return single_test.rest_api_log() # TODO
+
+ @app.route('/request/') # 'muts', 'test_spec', 'test_results'
+ def rest_api_request_handler(request_type):
+ result = single_test.rest_api_request_handler(request_type) # TODO
+ return result
+
+ rest_api_port = int(opts.rest_api_port_no) if opts.rest_api_port_no else 5555
+ app.debug = False
+ app.run(port=rest_api_port) # Blocking Flask REST API web service
+ else:
+ st_exec_thread.join()
+
+'''
diff --git a/tools/tests.py b/tools/tests.py
new file mode 100644
index 0000000..7701aae
--- /dev/null
+++ b/tools/tests.py
@@ -0,0 +1,1208 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from tools.paths import *
+from tools.data.support import *
+
+TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib")
+TEST_MBED_LIB = join(TEST_DIR, "mbed", "env")
+
+PERIPHERALS = join(TEST_DIR, "peripherals")
+BENCHMARKS_DIR = join(TEST_DIR, "benchmarks")
+
+SD = join(TEST_DIR, "sd")
+TMP102 = join(PERIPHERALS, 'TMP102')
+AT30TSE75X = join(PERIPHERALS, 'AT30TSE75X')
+
+"""
+Wiring:
+ * Ground:
+ * LPC1*: p1
+ * KL25Z: GND
+
+ * Vout
+ * LPC1*: p40
+ * KL25Z: P3V3
+
+ * TMP102 (I2C):
+ * LPC1*: (SDA=p28 , SCL=p27)
+ * KL25Z: (SDA=PTC9, SCL=PTC8)
+ * MAXWSNENV: (SDA=TP6, SCL=TP5)
+
+ * digital_loop (Digital(In|Out|InOut), InterruptIn):
+ * Arduino headers: (D0 <-> D7)
+ * LPC1549: (D2 <-> D7)
+ * LPC1*: (p5 <-> p25 )
+ * KL25Z: (PTA5<-> PTC6)
+ * NUCLEO_F103RB: (PC_6 <-> PB_8)
+ * MAXWSNENV: (TP3 <-> TP4)
+ * MAX32600MBED: (P1_0 <-> P4_7)
+ * VK_RZ_A1H: (P3_2 <-> P5_6)
+
+ * port_loop (Port(In|Out|InOut)):
+ * Arduino headers: (D0 <-> D7), (D1 <-> D6)
+ * LPC1*: (p5 <-> p25), (p6 <-> p26)
+ * KL25Z: (PTA5 <-> PTC6), (PTA4 <-> PTC5)
+ * NUCLEO_F103RB: (PC_6 <-> PB_8), (PC_5 <-> PB_9)
+ * MAXWSNENV: (TP1 <-> TP3), (TP2 <-> TP4)
+ * MAX32600MBED: (P1_0 <-> P4_7), (P1_1 <-> P4_6)
+ * VK_RZ_A1H: (P3_2 <-> P5_6), (P3_7 <-> P5_1)
+
+ * analog_loop (AnalogIn, AnalogOut):
+ * Arduino headers: (A0 <-> A5)
+ * LPC1549: (A0 <-> D12)
+ * LPC1*: (p17 <-> p18 )
+ * KL25Z: (PTE30 <-> PTC2)
+
+ * analog_pot (AnalogIn):
+ * Arduino headers: (A0, A1)
+ * VK_RZ_A1H: (AN0, AN1)
+
+ * SD (SPI):
+ * LPC1*: (mosi=p11 , miso=p12 , sclk=p13 , cs=p14 )
+ * KL25Z: (mosi=PTD2, miso=PTD3, sclk=PTD1, cs=PTD0)
+
+ * MMA7660 (I2C):
+ * LPC1*: (SDA=p28 , SCL=p27)
+
+ * i2c_loop:
+ * LPC1768: (p28 <-> p9), (p27 <-> p10)
+
+ * i2c_eeprom:
+ * LPC1*: (SDA=p28 , SCL=p27)
+ * KL25Z: (SDA=PTE0, SCL=PTE1)
+ * VK_RZ_A1H:(SDA=P1_1, SCL=P1_0)
+
+ * can_transceiver:
+ * LPC1768: (RX=p9, TX=p10)
+ * LPC1549: (RX=D9, TX=D8)
+ * LPC4088: (RX=p9, TX=p10)
+ * VK_RZ_A1H:(RX=P5_9, TX=P5_10)
+ * NUCLEO_F091RC: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F072RB: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F042K6: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F334R8: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F303RE: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F303K8: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F302R8: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F446RE: (RX=PA_11, TX=PA_12)
+ * DISCO_F469NI: (RX=PB_8, TX=PB_9)
+ * DISCO_F4269ZI: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F103RB: (RX=PA_11, TX=PA_12)
+ * NUCLEO_F746ZG: (RX=PA_11, TX=PA_12)
+ * DISCO_F746NG: (RX=PB_8, TX=PB_9)
+ * DISCO_L476VG: (RX=PA_11, TX=PA_12)
+ * NUCLEO_L476RG: (RX=PA_11, TX=PA_12)
+
+"""
+TESTS = [
+ # Automated MBED tests
+ {
+ "id": "MBED_A1", "description": "Basic",
+ "source_dir": join(TEST_DIR, "mbed", "basic"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ },
+ {
+ "id": "MBED_A2", "description": "Semihost file system",
+ "source_dir": join(TEST_DIR, "mbed", "file"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "mcu": ["LPC1768", "LPC2368", "LPC11U24"]
+ },
+ {
+ "id": "MBED_A3", "description": "C++ STL",
+ "source_dir": join(TEST_DIR, "mbed", "stl"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": False,
+ },
+ {
+ "id": "MBED_A4", "description": "I2C TMP102",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_TMP102"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, TMP102],
+ "automated": True,
+ "peripherals": ["TMP102"]
+ },
+ {
+ "id": "MBED_AT30TSE75X", "description": "I2C Temperature Sensor / EEPROM",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_at30tse75x"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, AT30TSE75X],
+ "automated": False,
+ "peripherals": ["AT30TSE75X"]
+ },
+ {
+ "id": "MBED_A5", "description": "DigitalIn DigitalOut",
+ "source_dir": join(TEST_DIR, "mbed", "digitalin_digitalout"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "peripherals": ["digital_loop"]
+ },
+ {
+ "id": "MBED_A6", "description": "DigitalInOut",
+ "source_dir": join(TEST_DIR, "mbed", "digitalinout"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "peripherals": ["digital_loop"]
+ },
+ {
+ "id": "MBED_A7", "description": "InterruptIn",
+ "source_dir": join(TEST_DIR, "mbed", "interruptin"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ "peripherals": ["digital_loop"]
+ },
+ {
+ "id": "MBED_A8", "description": "Analog",
+ "source_dir": join(TEST_DIR, "mbed", "analog"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "peripherals": ["analog_loop"],
+ "mcu": ["LPC1768", "LPC2368", "LPC2460", "KL25Z", "K64F", "K22F", "LPC4088", "LPC1549",
+ "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303K8", "NUCLEO_F303RE",
+ "NUCLEO_F334R8", "NUCLEO_L053R8", "NUCLEO_L073RZ", "NUCLEO_L152RE",
+ "NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "DISCO_F407VG", "DISCO_F746NG", "NUCLEO_F746ZG",
+ "ARCH_MAX", "MAX32600MBED", "MOTE_L152RC", "B96B_F446VE"]
+ },
+ {
+ "id": "MBED_A9", "description": "Serial Echo at 115200",
+ "source_dir": join(TEST_DIR, "mbed", "echo"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test": "echo"
+ },
+ {
+ "id": "MBED_A10", "description": "PortOut PortIn",
+ "source_dir": join(TEST_DIR, "mbed", "portout_portin"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "peripherals": ["port_loop"],
+ "supported": DEFAULT_SUPPORT,
+ "automated": True,
+ },
+ {
+ "id": "MBED_A11", "description": "PortInOut",
+ "source_dir": join(TEST_DIR, "mbed", "portinout"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "peripherals": ["port_loop"],
+ "supported": DEFAULT_SUPPORT,
+ "automated": True,
+ },
+ {
+ "id": "MBED_A12", "description": "SD File System",
+ "source_dir": join(TEST_DIR, "mbed", "sd"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
+ "automated": True,
+ "duration": 15,
+ "peripherals": ["SD"]
+ },
+ {
+ "id": "MBED_A13", "description": "I2C MMA7660 accelerometer",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_MMA7660"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA7660')],
+ "automated": True,
+ "peripherals": ["MMA7660"]
+ },
+ {
+ "id": "MBED_A14", "description": "I2C Master",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_master"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
+ },
+ {
+ "id": "MBED_A15", "description": "I2C Slave",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_slave"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
+ },
+ {
+ "id": "MBED_A16", "description": "SPI Master",
+ "source_dir": join(TEST_DIR, "mbed", "spi_master"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
+ },
+ {
+ "id": "MBED_A17", "description": "SPI Slave",
+ "source_dir": join(TEST_DIR, "mbed", "spi_slave"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
+ },
+ {
+ "id": "MBED_A18", "description": "Interrupt vector relocation",
+ "source_dir": join(TEST_DIR, "mbed", "vtor_reloc"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
+ "mcu": ["LPC1768"],
+ "automated": True,
+ },
+ {
+ "id": "MBED_A19", "description": "I2C EEPROM read/write test",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_eeprom"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "peripherals": ["24LC256"],
+ "automated": True,
+ "duration": 15,
+ },
+ {
+ "id": "MBED_A20", "description": "I2C master/slave test",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_master_slave"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
+ "mcu": ["LPC1768", "RZ_A1H"],
+ "peripherals": ["i2c_loop"]
+ },
+ {
+ "id": "MBED_A21", "description": "Call function before main (mbed_main)",
+ "source_dir": join(TEST_DIR, "mbed", "call_before_main"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ },
+ {
+ "id": "MBED_A22", "description": "SPIFI for LPC4088 (test 1)",
+ "source_dir": join(TEST_DIR, "mbed", "spifi1"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 30,
+ "mcu": ["LPC4088","LPC4088_DM"]
+ },
+ {
+ "id": "MBED_A23", "description": "SPIFI for LPC4088 (test 2)",
+ "source_dir": join(TEST_DIR, "mbed", "spifi2"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 30,
+ "mcu": ["LPC4088","LPC4088_DM"]
+ },
+ {
+ "id": "MBED_A24", "description": "Serial echo with RTS/CTS flow control",
+ "source_dir": join(TEST_DIR, "mbed", "echo_flow_control"),
+ "dependencies": [MBED_LIBRARIES],
+ "automated": "True",
+ "host_test": "echo_flow_control",
+ "mcu": ["LPC1768"],
+ "peripherals": ["extra_serial"]
+ },
+ {
+ "id": "MBED_A25", "description": "I2C EEPROM line read/write test",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_eeprom_line"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "peripherals": ["24LC256"],
+ "automated": True,
+ "duration": 10,
+ },
+ {
+ "id": "MBED_A26", "description": "AnalogIn potentiometer test",
+ "source_dir": join(TEST_DIR, "mbed", "analog_pot"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "peripherals": ["analog_pot"],
+ "automated": True,
+ "duration": 10,
+ },
+ {
+ "id": "MBED_A27", "description": "CAN loopback test",
+ "source_dir": join(TEST_DIR, "mbed", "can_loopback"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 20,
+ "peripherals": ["can_transceiver"],
+ "mcu": ["LPC1549", "LPC1768","B96B_F446VE", "VK_RZ_A1H",
+ "NUCLEO_F091RC", "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8",
+ "NUCLEO_F303RE", "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE",
+ "DISCO_F469NI", "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG",
+ "DISCO_F746NG", "DISCO_L476VG", "NUCLEO_L476RG"]
+ },
+ {
+ "id": "MBED_BLINKY", "description": "Blinky",
+ "source_dir": join(TEST_DIR, "mbed", "blinky"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": False,
+ },
+ {
+ "id": "MBED_BUS", "description": "Blinky BUS",
+ "source_dir": join(TEST_DIR, "mbed", "bus"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": False,
+ "duration": 15,
+ },
+
+ {
+ "id": "MBED_BUSOUT", "description": "BusOut",
+ "source_dir": join(TEST_DIR, "mbed", "bus_out"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 15,
+ },
+
+ # Size benchmarks
+ {
+ "id": "BENCHMARK_1", "description": "Size (c environment)",
+ "source_dir": join(BENCHMARKS_DIR, "cenv"),
+ "dependencies": [MBED_LIBRARIES]
+ },
+ {
+ "id": "BENCHMARK_2", "description": "Size (float math)",
+ "source_dir": join(BENCHMARKS_DIR, "float_math"),
+ "dependencies": [MBED_LIBRARIES]
+ },
+ {
+ "id": "BENCHMARK_3", "description": "Size (printf)",
+ "source_dir": join(BENCHMARKS_DIR, "printf"),
+ "dependencies": [MBED_LIBRARIES]
+ },
+ {
+ "id": "BENCHMARK_4", "description": "Size (mbed libs)",
+ "source_dir": join(BENCHMARKS_DIR, "mbed"),
+ "dependencies": [MBED_LIBRARIES]
+ },
+ {
+ "id": "BENCHMARK_5", "description": "Size (all)",
+ "source_dir": join(BENCHMARKS_DIR, "all"),
+ "dependencies": [MBED_LIBRARIES]
+ },
+
+ # performance related tests
+ {
+ "id": "PERF_1", "description": "SD Stdio R/W Speed",
+ "source_dir": join(TEST_DIR, "mbed", "sd_perf_stdio"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
+ "automated": True,
+ "duration": 15,
+ "peripherals": ["SD"]
+ },
+ {
+ "id": "PERF_2", "description": "SD FileHandle R/W Speed",
+ "source_dir": join(TEST_DIR, "mbed", "sd_perf_fhandle"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
+ "automated": True,
+ "duration": 15,
+ "peripherals": ["SD"]
+ },
+ {
+ "id": "PERF_3", "description": "SD FatFS R/W Speed",
+ "source_dir": join(TEST_DIR, "mbed", "sd_perf_fatfs"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
+ "automated": True,
+ "duration": 15,
+ "peripherals": ["SD"]
+ },
+
+
+ # Not automated MBED tests
+ {
+ "id": "MBED_1", "description": "I2C SRF08",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_SRF08"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'SRF08')],
+ "peripherals": ["SRF08"]
+ },
+ {
+ "id": "MBED_2", "description": "stdio",
+ "source_dir": join(TEST_DIR, "mbed", "stdio"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 20,
+ "automated": True,
+ #"host_test": "stdio_auto"
+ },
+ {
+ "id": "MBED_3", "description": "PortOut",
+ "source_dir": join(TEST_DIR, "mbed", "portout"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_4", "description": "Sleep",
+ "source_dir": join(TEST_DIR, "mbed", "sleep"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 30,
+ "mcu": ["LPC1768", "LPC11U24", "LPC4088","LPC4088_DM","NRF51822", "LPC11U68"]
+ },
+ {
+ "id": "MBED_5", "description": "PWM",
+ "source_dir": join(TEST_DIR, "mbed", "pwm"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB]
+ },
+ {
+ "id": "MBED_6", "description": "SW Reset",
+ "source_dir": join(TEST_DIR, "mbed", "reset"),
+ "dependencies": [MBED_LIBRARIES],
+ "duration": 15
+ },
+ {
+ "id": "MBED_7", "description": "stdio benchmark",
+ "source_dir": join(TEST_DIR, "mbed", "stdio_benchmark"),
+ "dependencies": [MBED_LIBRARIES],
+ "duration": 40
+ },
+ {
+ "id": "MBED_8", "description": "SPI",
+ "source_dir": join(TEST_DIR, "mbed", "spi"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_9", "description": "Sleep Timeout",
+ "source_dir": join(TEST_DIR, "mbed", "sleep_timeout"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_10", "description": "Hello World",
+ "source_dir": join(TEST_DIR, "mbed", "hello"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test": "hello_auto",
+ },
+ {
+ "id": "MBED_11", "description": "Ticker Int",
+ "source_dir": join(TEST_DIR, "mbed", "ticker"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test": "wait_us_auto",
+ "duration": 20,
+ },
+ {
+ "id": "MBED_12", "description": "C++",
+ "source_dir": join(TEST_DIR, "mbed", "cpp"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True
+ },
+ {
+ "id": "MBED_13", "description": "Heap & Stack",
+ "source_dir": join(TEST_DIR, "mbed", "heap_and_stack"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ },
+ {
+ "id": "MBED_14", "description": "Serial Interrupt",
+ "source_dir": join(TEST_DIR, "mbed", "serial_interrupt"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ },
+ {
+ "id": "MBED_15", "description": "RPC",
+ "source_dir": join(TEST_DIR, "mbed", "rpc"),
+ "dependencies": [MBED_LIBRARIES, join(LIB_DIR, "rpc"), TEST_MBED_LIB],
+ "automated": False,
+ "mcu": ["LPC1768"]
+ },
+ {
+ "id": "MBED_16", "description": "RTC",
+ "source_dir": join(TEST_DIR, "mbed", "rtc"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "exclude_mcu": ["NRF51822", "NRF51822_BOOT", "NRF51822_OTA", "NRF51822_Y5_MBUG",
+ "NRF51_DK", "NRF51_DK_BOOT", "NRF51_DK_OTA",
+ "NRF51_MICROBIT", "NRF51_MICROBIT_B", "NRF51_MICROBIT_BOOT",
+ "NRF51_MICROBIT_B_BOOT", "NRF51_MICROBIT_B_OTA", "NRF51_MICROBIT_OTA",
+ "HRM1017", "HRM1017_BOOT", "HRM1701_OTA",
+ "TY51822R3", "TY51822R3_BOOT", "TY51822R3_OTA",
+ "NRF15_DONGLE", "NRF15_DONGLE_BOOT", "NRF15_DONGLE_OTA",
+ "ARCH_BLE", "ARCH_BLE_BOOT", "ARCH_BLE_OTA",
+ "ARCH_LINK", "ARCH_LINK_BOOT", "ARCH_LINK_OTA",
+ "RBLAB_BLENANO", "RBLAB_BLENANO_BOOT", "RBLAB_BLENANO_OTA",
+ "RBLAB_NRF51822", "RBLAB_NRF51822_BOOT", "RBLAB_NRF51822_OTA",
+ "SEEED_TINY_BLE", "SEEED_TINY_BLE_BOOT", "SEEED_TINY_BLE_OTA",
+ "WALLBOT_BLE", "WALLBOT_BLE_BOOT", "WALLBOT_BLE_OTA",
+ "DELTA_DFCM_NNN40", "DELTA_DFCM_NNN40_BOOT", "DELTA_DFCM_NNN40_OTA",
+ "LPC1114"],
+ #"host_test": "rtc_auto",
+ "duration": 15
+ },
+ {
+ "id": "MBED_17", "description": "Serial Interrupt 2",
+ "source_dir": join(TEST_DIR, "mbed", "serial_interrupt_2"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_18", "description": "Local FS Directory",
+ "source_dir": join(TEST_DIR, "mbed", "dir"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_19", "description": "SD FS Directory",
+ "source_dir": join(TEST_DIR, "mbed", "dir_sd"),
+ "dependencies": [MBED_LIBRARIES, FS_LIBRARY],
+ "peripherals": ["SD"]
+ },
+ {
+ "id": "MBED_20", "description": "InterruptIn 2",
+ "source_dir": join(TEST_DIR, "mbed", "interruptin_2"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_21", "description": "freopen Stream",
+ "source_dir": join(TEST_DIR, "mbed", "freopen"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_22", "description": "Semihost",
+ "source_dir": join(TEST_DIR, "mbed", "semihost"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "mcu": ["LPC1768", "LPC2368", "LPC11U24"]
+ },
+ {
+ "id": "MBED_23", "description": "Ticker Int us",
+ "source_dir": join(TEST_DIR, "mbed", "ticker_2"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ #"host_test": "wait_us_auto"
+ },
+ {
+ "id": "MBED_24", "description": "Timeout Int us",
+ "source_dir": join(TEST_DIR, "mbed", "timeout"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ #"host_test": "wait_us_auto"
+ },
+ {
+ "id": "MBED_25", "description": "Time us",
+ "source_dir": join(TEST_DIR, "mbed", "time_us"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ #"host_test": "wait_us_auto"
+ },
+ {
+ "id": "MBED_26", "description": "Integer constant division",
+ "source_dir": join(TEST_DIR, "mbed", "div"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ },
+ {
+ "id": "MBED_27", "description": "SPI ADXL345",
+ "source_dir": join(TEST_DIR, "mbed", "spi_ADXL345"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'ADXL345')],
+ "peripherals": ["ADXL345"]
+ },
+ {
+ "id": "MBED_28", "description": "Interrupt chaining (InterruptManager)",
+ "source_dir": join(TEST_DIR, "mbed", "interrupt_chaining"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ },
+ {
+ "id": "MBED_29", "description": "CAN network test",
+ "source_dir": join(TEST_DIR, "mbed", "can"),
+ "dependencies": [MBED_LIBRARIES],
+ "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
+ "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F303RE",
+ "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE", "DISCO_F469NI",
+ "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG", "DISCO_F746NG",
+ "NUCLEO_L476RG"]
+ },
+ {
+ "id": "MBED_30", "description": "CAN network test using interrupts",
+ "source_dir": join(TEST_DIR, "mbed", "can_interrupt"),
+ "dependencies": [MBED_LIBRARIES],
+ "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
+ "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F303RE",
+ "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE", "DISCO_F469NI",
+ "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG", "DISCO_F746NG",
+ "NUCLEO_L476RG"]
+ },
+ {
+ "id": "MBED_31", "description": "PWM LED test",
+ "source_dir": join(TEST_DIR, "mbed", "pwm_led"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_32", "description": "Pin toggling",
+ "source_dir": join(TEST_DIR, "mbed", "pin_toggling"),
+ "dependencies": [MBED_LIBRARIES],
+ },
+ {
+ "id": "MBED_33", "description": "C string operations",
+ "source_dir": join(TEST_DIR, "mbed", "cstring"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 10,
+ "automated": False,
+ },
+ {
+ "id": "MBED_34", "description": "Ticker Two callbacks",
+ "source_dir": join(TEST_DIR, "mbed", "ticker_3"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ #"host_test": "wait_us_auto"
+ },
+ {
+ "id": "MBED_35", "description": "SPI C12832 display",
+ "source_dir": join(TEST_DIR, "mbed", "spi_C12832"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'C12832')],
+ "peripherals": ["C12832"],
+ "automated": True,
+ "duration": 10,
+ },
+ {
+ "id": "MBED_36", "description": "WFI correct behavior",
+ "source_dir": join(TEST_DIR, "mbed", "wfi"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": False
+ },
+ {
+ "id": "MBED_37", "description": "Serial NC RX",
+ "source_dir": join(TEST_DIR, "mbed", "serial_nc_rx"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True
+ },
+ {
+ "id": "MBED_38", "description": "Serial NC TX",
+ "source_dir": join(TEST_DIR, "mbed", "serial_nc_tx"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True
+ },
+
+ # CMSIS RTOS tests
+ {
+ "id": "CMSIS_RTOS_1", "description": "Basic",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "basic"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ },
+ {
+ "id": "CMSIS_RTOS_2", "description": "Mutex",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "mutex"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ "duration": 20
+ },
+ {
+ "id": "CMSIS_RTOS_3", "description": "Semaphore",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "semaphore"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ "duration": 20
+ },
+ {
+ "id": "CMSIS_RTOS_4", "description": "Signals",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "signals"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ },
+ {
+ "id": "CMSIS_RTOS_5", "description": "Queue",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "queue"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ "duration": 20
+ },
+ {
+ "id": "CMSIS_RTOS_6", "description": "Mail",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "mail"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ "duration": 20
+ },
+ {
+ "id": "CMSIS_RTOS_7", "description": "Timer",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "timer"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ },
+ {
+ "id": "CMSIS_RTOS_8", "description": "ISR",
+ "source_dir": join(TEST_DIR, "rtos", "cmsis", "isr"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
+ },
+
+ # mbed RTOS tests
+ {
+ "id": "RTOS_1", "description": "Basic thread",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "basic"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ #"host_test": "wait_us_auto",
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_2", "description": "Mutex resource lock",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "mutex"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "duration": 20,
+ "automated": True,
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
+ "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_3", "description": "Semaphore resource lock",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "semaphore"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "duration": 20,
+ "automated": True,
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
+ "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_4", "description": "Signals messaging",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "signals"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
+ "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_5", "description": "Queue messaging",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "queue"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
+ "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_6", "description": "Mail messaging",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "mail"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
+ "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_7", "description": "Timer",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "timer"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ #"host_test": "wait_us_auto",
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
+ "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_8", "description": "ISR (Queue)",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "isr"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
+ "KL25Z", "KL05Z", "K64F", "KL46Z",
+ "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
+ "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
+ "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
+ "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
+ "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
+ "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
+ "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
+ },
+ {
+ "id": "RTOS_9", "description": "SD File write-read",
+ "source_dir": join(TEST_DIR, "rtos", "mbed", "file"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
+ "automated": True,
+ "peripherals": ["SD"],
+ "mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z",
+ "KL05Z", "K64F", "KL46Z", "RZ_A1H",
+ "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE", "NUCLEO_F410RB", "DISCO_F469NI"],
+ },
+
+ # Networking Tests
+ {
+ "id": "NET_1", "description": "TCP client hello world",
+ "source_dir": join(TEST_DIR, "net", "helloworld", "tcpclient"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_2", "description": "NIST Internet Time Service",
+ "source_dir": join(TEST_DIR, "net", "helloworld", "udpclient"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "duration": 15,
+ "automated": True,
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_3", "description": "TCP echo server",
+ "source_dir": join(TEST_DIR, "net", "echo", "tcp_server"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test" : "tcpecho_server_auto",
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_4", "description": "TCP echo client",
+ "source_dir": join(TEST_DIR, "net", "echo", "tcp_client"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test": "tcpecho_client_auto",
+ "peripherals": ["ethernet"]
+ },
+ {
+ "id": "NET_5", "description": "UDP echo server",
+ "source_dir": join(TEST_DIR, "net", "echo", "udp_server"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test" : "udpecho_server_auto",
+ "peripherals": ["ethernet"]
+ },
+ {
+ "id": "NET_6", "description": "UDP echo client",
+ "source_dir": join(TEST_DIR, "net", "echo", "udp_client"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test" : "udpecho_client_auto",
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_7", "description": "HTTP client hello world",
+ "source_dir": join(TEST_DIR, "net", "protocols", "HTTPClient_HelloWorld"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 15,
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_8", "description": "NTP client",
+ "source_dir": join(TEST_DIR, "net", "protocols", "NTPClient_HelloWorld"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_9", "description": "Multicast Send",
+ "source_dir": join(TEST_DIR, "net", "helloworld", "multicast_send"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_10", "description": "Multicast Receive",
+ "source_dir": join(TEST_DIR, "net", "helloworld", "multicast_receive"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_11", "description": "Broadcast Send",
+ "source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_send"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_12", "description": "Broadcast Receive",
+ "source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_receive"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_13", "description": "TCP client echo loop",
+ "source_dir": join(TEST_DIR, "net", "echo", "tcp_client_loop"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
+ "automated": True,
+ "duration": 15,
+ #"host_test": "tcpecho_client_auto",
+ "peripherals": ["ethernet"],
+ },
+ {
+ "id": "NET_14", "description": "UDP PHY/Data link layer",
+ "source_dir": join(TEST_DIR, "net", "echo", "udp_link_layer"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
+ "automated": False,
+ "duration": 20,
+ "host_test": "udp_link_layer_auto",
+ "peripherals": ["ethernet"],
+ },
+
+ # u-blox tests
+ {
+ "id": "UB_1", "description": "u-blox USB modem: HTTP client",
+ "source_dir": [join(TEST_DIR, "net", "cellular", "http", "ubloxusb"), join(TEST_DIR, "net", "cellular", "http", "common")],
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
+ "supported": CORTEX_ARM_SUPPORT,
+ },
+ {
+ "id": "UB_2", "description": "u-blox USB modem: SMS test",
+ "source_dir": [join(TEST_DIR, "net", "cellular", "sms", "ubloxusb"), join(TEST_DIR, "net", "cellular", "sms", "common")],
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
+ "supported": CORTEX_ARM_SUPPORT,
+ },
+
+ # USB Tests
+ {
+ "id": "USB_1", "description": "Mouse",
+ "source_dir": join(TEST_DIR, "usb", "device", "basic"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ },
+ {
+ "id": "USB_2", "description": "Keyboard",
+ "source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ },
+ {
+ "id": "USB_3", "description": "Mouse_Keyboard",
+ "source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ },
+ {
+ "id": "USB_4", "description": "Serial Port",
+ "source_dir": join(TEST_DIR, "usb", "device", "serial"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ "supported": CORTEX_ARM_SUPPORT,
+ },
+ {
+ "id": "USB_5", "description": "Generic HID",
+ "source_dir": join(TEST_DIR, "usb", "device", "raw_hid"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ },
+ {
+ "id": "USB_6", "description": "MIDI",
+ "source_dir": join(TEST_DIR, "usb", "device", "midi"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ },
+ {
+ "id": "USB_7", "description": "AUDIO",
+ "source_dir": join(TEST_DIR, "usb", "device", "audio"),
+ "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
+ },
+
+ # CMSIS DSP
+ {
+ "id": "CMSIS_DSP_1", "description": "FIR",
+ "source_dir": join(TEST_DIR, "dsp", "cmsis", "fir_f32"),
+ "dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
+ },
+
+ # mbed DSP
+ {
+ "id": "DSP_1", "description": "FIR",
+ "source_dir": join(TEST_DIR, "dsp", "mbed", "fir_f32"),
+ "dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
+ },
+
+ # KL25Z
+ {
+ "id": "KL25Z_1", "description": "LPTMR",
+ "source_dir": join(TEST_DIR, "KL25Z", "lptmr"),
+ "dependencies": [MBED_LIBRARIES],
+ "supported": CORTEX_ARM_SUPPORT,
+ "mcu": ["KL25Z"],
+ },
+ {
+ "id": "KL25Z_2", "description": "PIT",
+ "source_dir": join(TEST_DIR, "KL25Z", "pit"),
+ "dependencies": [MBED_LIBRARIES],
+ "supported": CORTEX_ARM_SUPPORT,
+ "mcu": ["KL25Z"],
+ },
+ {
+ "id": "KL25Z_3", "description": "TSI Touch Sensor",
+ "source_dir": join(TEST_DIR, "mbed", "tsi"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'TSI')],
+ "mcu": ["KL25Z"],
+ },
+ {
+ "id": "KL25Z_4", "description": "RTC",
+ "source_dir": join(TEST_DIR, "KL25Z", "rtc"),
+ "dependencies": [MBED_LIBRARIES],
+ "mcu": ["KL25Z"],
+ },
+ {
+ "id": "KL25Z_5", "description": "MMA8451Q accelerometer",
+ "source_dir": join(TEST_DIR, "mbed", "i2c_MMA8451Q"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA8451Q')],
+ "mcu": ["KL25Z", "KL05Z", "KL46Z", "K20D50M"],
+ "automated": True,
+ "duration": 15,
+ },
+
+ # Examples
+ {
+ "id": "EXAMPLE_1", "description": "/dev/null",
+ "source_dir": join(TEST_DIR, "mbed", "dev_null"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test" : "dev_null_auto",
+ },
+ {
+ "id": "EXAMPLE_2", "description": "FS + RTOS",
+ "source_dir": join(TEST_DIR, "mbed", "fs"),
+ "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
+ },
+
+ # CPPUTEST Library provides Unit testing Framework
+ #
+ # To write TESTs and TEST_GROUPs please add CPPUTEST_LIBRARY to 'dependencies'
+ #
+ # This will also include:
+ # 1. test runner - main function with call to CommandLineTestRunner::RunAllTests(ac, av)
+ # 2. Serial console object to print test result on serial port console
+ #
+
+ # Unit testing with cpputest library
+ {
+ "id": "UT_1", "description": "Basic",
+ "source_dir": join(TEST_DIR, "utest", "basic"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+ {
+ "id": "UT_2", "description": "Semihost file system",
+ "source_dir": join(TEST_DIR, "utest", "semihost_fs"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ "mcu": ["LPC1768", "LPC2368", "LPC11U24"]
+ },
+ {
+ "id": "UT_3", "description": "General tests",
+ "source_dir": join(TEST_DIR, "utest", "general"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+ {
+ "id": "UT_BUSIO", "description": "BusIn BusOut",
+ "source_dir": join(TEST_DIR, "utest", "bus"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+ {
+ "id": "UT_I2C_EEPROM_ASYNCH", "description": "I2C Asynch eeprom",
+ "source_dir": join(TEST_DIR, "utest", "i2c_eeprom_asynch"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+ {
+ "id": "UT_SERIAL_ASYNCH", "description": "Asynch serial test (req 2 serial peripherals)",
+ "source_dir": join(TEST_DIR, "utest", "serial_asynch"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+ {
+ "id": "UT_SPI_ASYNCH", "description": "Asynch spi test",
+ "source_dir": join(TEST_DIR, "utest", "spi_asynch"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+ {
+ "id": "UT_LP_TICKER", "description": "Low power ticker test",
+ "source_dir": join(TEST_DIR, "utest", "lp_ticker"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
+ "automated": False,
+ },
+
+ # Tests used for target information purposes
+ {
+ "id": "DTCT_1", "description": "Simple detect test",
+ "source_dir": join(TEST_DIR, "mbed", "detect"),
+ "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
+ "automated": True,
+ #"host_test" : "detect_auto",
+ },
+
+]
+
+# Group tests with the same goals into categories
+GROUPS = {
+ "core": ["MBED_A1", "MBED_A2", "MBED_A3", "MBED_A18"],
+ "digital_io": ["MBED_A5", "MBED_A6", "MBED_A7", "MBED_A10", "MBED_A11"],
+ "analog_io": ["MBED_A8"],
+ "i2c": ["MBED_A19", "MBED_A20"],
+ "spi": ["MBED_A12"],
+}
+GROUPS["rtos"] = [test["id"] for test in TESTS if test["id"].startswith("RTOS_")]
+GROUPS["net"] = [test["id"] for test in TESTS if test["id"].startswith("NET_")]
+GROUPS["automated"] = [test["id"] for test in TESTS if test.get("automated", False)]
+# Look for 'TEST_GROUPS' in private_settings.py and update the GROUPS dictionary
+# with the information in test_groups if found
+try:
+ from tools.private_settings import TEST_GROUPS
+except:
+ TEST_GROUPS = {}
+GROUPS.update(TEST_GROUPS)
+
+class Test:
+ DEFAULTS = {
+ #'mcu': None,
+ 'description': None,
+ 'dependencies': None,
+ 'duration': 10,
+ 'host_test': 'host_test',
+ 'automated': False,
+ 'peripherals': None,
+ #'supported': None,
+ 'source_dir': None,
+ 'extra_files': None
+ }
+ def __init__(self, n):
+ self.n = n
+ self.__dict__.update(Test.DEFAULTS)
+ self.__dict__.update(TESTS[n])
+
+ def is_supported(self, target, toolchain):
+ if hasattr(self, 'mcu') and not target in self.mcu:
+ return False
+ if hasattr(self, 'exclude_mcu') and target in self.exclude_mcu:
+ return False
+ if not hasattr(self, 'supported'):
+ return True
+ return (target in self.supported) and (toolchain in self.supported[target])
+
+ def get_description(self):
+ if self.description:
+ return self.description
+ else:
+ return self.id
+
+ def __cmp__(self, other):
+ return cmp(self.n, other.n)
+
+ def __str__(self):
+ return "[%3d] %s: %s" % (self.n, self.id, self.get_description())
+
+ def __getitem__(self, key):
+ if key == "id": return self.id
+ elif key == "mcu": return self.mcu
+ elif key == "exclude_mcu": return self.exclude_mcu
+ elif key == "dependencies": return self.dependencies
+ elif key == "description": return self.description
+ elif key == "duration": return self.duration
+ elif key == "host_test": return self.host_test
+ elif key == "automated": return self.automated
+ elif key == "peripherals": return self.peripherals
+ elif key == "supported": return self.supported
+ elif key == "source_dir": return self.source_dir
+ elif key == "extra_files": return self.extra_files
+ else:
+ return None
+
+TEST_MAP = dict([(test['id'], Test(i)) for i, test in enumerate(TESTS)])
diff --git a/tools/toolchains/__init__.py b/tools/toolchains/__init__.py
new file mode 100644
index 0000000..75ac253
--- /dev/null
+++ b/tools/toolchains/__init__.py
@@ -0,0 +1,776 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import sys
+from os import stat, walk
+from copy import copy
+from time import time, sleep
+from types import ListType
+from shutil import copyfile
+from os.path import join, splitext, exists, relpath, dirname, basename, split
+from inspect import getmro
+
+from multiprocessing import Pool, cpu_count
+from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path
+from tools.settings import BUILD_OPTIONS, MBED_ORG_USER
+import tools.hooks as hooks
+
+
+#Disables multiprocessing if set to higher number than the host machine CPUs
+CPU_COUNT_MIN = 1
+
+def compile_worker(job):
+ results = []
+ for command in job['commands']:
+ _, _stderr, _rc = run_cmd(command, job['work_dir'])
+ results.append({
+ 'code': _rc,
+ 'output': _stderr,
+ 'command': command
+ })
+
+ return {
+ 'source': job['source'],
+ 'object': job['object'],
+ 'commands': job['commands'],
+ 'results': results
+ }
+
+class Resources:
+ def __init__(self, base_path=None):
+ self.base_path = base_path
+
+ self.inc_dirs = []
+ self.headers = []
+
+ self.s_sources = []
+ self.c_sources = []
+ self.cpp_sources = []
+
+ self.lib_dirs = set([])
+ self.objects = []
+ self.libraries = []
+
+ # mbed special files
+ self.lib_builds = []
+ self.lib_refs = []
+
+ self.repo_dirs = []
+ self.repo_files = []
+
+ self.linker_script = None
+
+ # Other files
+ self.hex_files = []
+ self.bin_files = []
+
+ def add(self, resources):
+ self.inc_dirs += resources.inc_dirs
+ self.headers += resources.headers
+
+ self.s_sources += resources.s_sources
+ self.c_sources += resources.c_sources
+ self.cpp_sources += resources.cpp_sources
+
+ self.lib_dirs |= resources.lib_dirs
+ self.objects += resources.objects
+ self.libraries += resources.libraries
+
+ self.lib_builds += resources.lib_builds
+ self.lib_refs += resources.lib_refs
+
+ self.repo_dirs += resources.repo_dirs
+ self.repo_files += resources.repo_files
+
+ if resources.linker_script is not None:
+ self.linker_script = resources.linker_script
+
+ self.hex_files += resources.hex_files
+ self.bin_files += resources.bin_files
+
+ def relative_to(self, base, dot=False):
+ for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
+ 'cpp_sources', 'lib_dirs', 'objects', 'libraries',
+ 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
+ v = [rel_path(f, base, dot) for f in getattr(self, field)]
+ setattr(self, field, v)
+ if self.linker_script is not None:
+ self.linker_script = rel_path(self.linker_script, base, dot)
+
+ def win_to_unix(self):
+ for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
+ 'cpp_sources', 'lib_dirs', 'objects', 'libraries',
+ 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
+ v = [f.replace('\\', '/') for f in getattr(self, field)]
+ setattr(self, field, v)
+ if self.linker_script is not None:
+ self.linker_script = self.linker_script.replace('\\', '/')
+
+ def __str__(self):
+ s = []
+
+ for (label, resources) in (
+ ('Include Directories', self.inc_dirs),
+ ('Headers', self.headers),
+
+ ('Assembly sources', self.s_sources),
+ ('C sources', self.c_sources),
+ ('C++ sources', self.cpp_sources),
+
+ ('Library directories', self.lib_dirs),
+ ('Objects', self.objects),
+ ('Libraries', self.libraries),
+
+ ('Hex files', self.hex_files),
+ ('Bin files', self.bin_files),
+ ):
+ if resources:
+ s.append('%s:\n ' % label + '\n '.join(resources))
+
+ if self.linker_script:
+ s.append('Linker Script: ' + self.linker_script)
+
+ return '\n'.join(s)
+
+
+# Support legacy build conventions: the original mbed build system did not have
+# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
+# had the knowledge of a list of these directories to be ignored.
+LEGACY_IGNORE_DIRS = set([
+ 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
+ 'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM'
+])
+LEGACY_TOOLCHAIN_NAMES = {
+ 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
+ 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
+ 'IAR': 'IAR',
+}
+
+
+class mbedToolchain:
+ VERBOSE = True
+
+ CORTEX_SYMBOLS = {
+ "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0"],
+ "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS"],
+ "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"],
+ "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3"],
+ "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4"],
+ "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1"],
+ "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7"],
+ "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1"],
+ "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
+ }
+
+ GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%"
+ GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?Pwarning) \[(?P[^:]+):(?P\d+)\] \- (?P.*)"')
+
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ self.target = target
+ self.name = self.__class__.__name__
+ self.hook = hooks.Hook(target, self)
+ self.silent = silent
+ self.output = ""
+
+ self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
+
+ if notify:
+ self.notify_fun = notify
+ elif extra_verbose:
+ self.notify_fun = self.print_notify_verbose
+ else:
+ self.notify_fun = self.print_notify
+
+ self.options = options if options is not None else []
+
+ self.macros = macros or []
+ self.options.extend(BUILD_OPTIONS)
+ if self.options:
+ self.info("Build Options: %s" % (', '.join(self.options)))
+
+ self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
+
+ self.symbols = None
+ self.labels = None
+ self.has_config = False
+
+ self.build_all = False
+ self.timestamp = time()
+ self.jobs = 1
+
+ self.CHROOT = None
+
+ self.mp_pool = None
+
+ def get_output(self):
+ return self.output
+
+ def print_notify(self, event, silent=False):
+ """ Default command line notification
+ """
+ msg = None
+
+ if event['type'] in ['info', 'debug']:
+ msg = event['message']
+
+ elif event['type'] == 'cc':
+ event['severity'] = event['severity'].title()
+ event['file'] = basename(event['file'])
+ msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
+
+ elif event['type'] == 'progress':
+ if not silent:
+ msg = '%s: %s' % (event['action'].title(), basename(event['file']))
+
+ if msg:
+ print msg
+ self.output += msg + "\n"
+
+ def print_notify_verbose(self, event, silent=False):
+ """ Default command line notification with more verbose mode
+ """
+ if event['type'] in ['info', 'debug']:
+ self.print_notify(event) # standard handle
+
+ elif event['type'] == 'cc':
+ event['severity'] = event['severity'].title()
+ event['file'] = basename(event['file'])
+ event['mcu_name'] = "None"
+ event['toolchain'] = "None"
+ event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
+ event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
+ msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
+ print msg
+ self.output += msg + "\n"
+
+ elif event['type'] == 'progress':
+ self.print_notify(event) # standard handle
+
+ def notify(self, event):
+ """ Little closure for notify functions
+ """
+ return self.notify_fun(event, self.silent)
+
+ def __exit__(self):
+ if self.mp_pool is not None:
+ self.mp_pool.terminate()
+
+ def goanna_parse_line(self, line):
+ if "analyze" in self.options:
+ return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
+ else:
+ return None
+
+ def get_symbols(self):
+ if self.symbols is None:
+ # Target and Toolchain symbols
+ labels = self.get_labels()
+ self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
+ self.symbols.extend(["FEATURE_%s" % t for t in labels['FEATURE']])
+ self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
+
+ # Config support
+ if self.has_config:
+ self.symbols.append('HAVE_MBED_CONFIG_H')
+
+ # Cortex CPU symbols
+ if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
+ self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
+
+ # Symbols defined by the on-line build.system
+ self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
+ if MBED_ORG_USER:
+ self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
+
+ # Add target's symbols
+ self.symbols += self.target.macros
+ self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
+ # Add extra symbols passed via 'macros' parameter
+ self.symbols += self.macros
+
+ # Form factor variables
+ if hasattr(self.target, 'supported_form_factors'):
+ self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
+
+ return list(set(self.symbols)) # Return only unique symbols
+
+ def get_labels(self):
+ if self.labels is None:
+ toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
+ toolchain_labels.remove('mbedToolchain')
+ self.labels = {
+ 'TARGET': self.target.get_labels(),
+ 'FEATURE': self.target.features,
+ 'TOOLCHAIN': toolchain_labels
+ }
+ return self.labels
+
+ def need_update(self, target, dependencies):
+ if self.build_all:
+ return True
+
+ if not exists(target):
+ return True
+
+ target_mod_time = stat(target).st_mtime
+
+ for d in dependencies:
+
+ # Some objects are not provided with full path and here we do not have
+ # information about the library paths. Safe option: assume an update
+ if not d or not exists(d):
+ return True
+
+ if stat(d).st_mtime >= target_mod_time:
+ return True
+
+ return False
+
+ def scan_resources(self, path):
+ labels = self.get_labels()
+ resources = Resources(path)
+ self.has_config = False
+
+ """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
+ When topdown is True, the caller can modify the dirnames list in-place
+ (perhaps using del or slice assignment), and walk() will only recurse into
+ the subdirectories whose names remain in dirnames; this can be used to prune
+ the search, impose a specific order of visiting, or even to inform walk()
+ about directories the caller creates or renames before it resumes walk()
+ again. Modifying dirnames when topdown is False is ineffective, because in
+ bottom-up mode the directories in dirnames are generated before dirpath
+ itself is generated.
+ """
+ for root, dirs, files in walk(path):
+ # Remove ignored directories
+ for d in copy(dirs):
+ if d == '.hg':
+ dir_path = join(root, d)
+ resources.repo_dirs.append(dir_path)
+ resources.repo_files.extend(self.scan_repository(dir_path))
+
+ if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
+ (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
+ (d.startswith('FEATURE_') and d[8:] not in labels['FEATURE']) or
+ (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])):
+ dirs.remove(d)
+
+ # Add root to include paths
+ resources.inc_dirs.append(root)
+
+ for file in files:
+ file_path = join(root, file)
+ _, ext = splitext(file)
+ ext = ext.lower()
+
+ if ext == '.s':
+ resources.s_sources.append(file_path)
+
+ elif ext == '.c':
+ resources.c_sources.append(file_path)
+
+ elif ext == '.cpp':
+ resources.cpp_sources.append(file_path)
+
+ elif ext == '.h' or ext == '.hpp':
+ if basename(file_path) == "mbed_config.h":
+ self.has_config = True
+ resources.headers.append(file_path)
+
+ elif ext == '.o':
+ resources.objects.append(file_path)
+
+ elif ext == self.LIBRARY_EXT:
+ resources.libraries.append(file_path)
+ resources.lib_dirs.add(root)
+
+ elif ext == self.LINKER_EXT:
+ if resources.linker_script is not None:
+ self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
+ resources.linker_script = file_path
+
+ elif ext == '.lib':
+ resources.lib_refs.append(file_path)
+
+ elif ext == '.bld':
+ resources.lib_builds.append(file_path)
+
+ elif file == '.hgignore':
+ resources.repo_files.append(file_path)
+
+ elif ext == '.hex':
+ resources.hex_files.append(file_path)
+
+ elif ext == '.bin':
+ resources.bin_files.append(file_path)
+
+ return resources
+
+ def scan_repository(self, path):
+ resources = []
+
+ for root, dirs, files in walk(path):
+ # Remove ignored directories
+ for d in copy(dirs):
+ if d == '.' or d == '..':
+ dirs.remove(d)
+
+ for file in files:
+ file_path = join(root, file)
+ resources.append(file_path)
+
+ return resources
+
+ def copy_files(self, files_paths, trg_path, rel_path=None):
+
+ # Handle a single file
+ if type(files_paths) != ListType: files_paths = [files_paths]
+
+ for source in files_paths:
+ if source is None:
+ files_paths.remove(source)
+
+ for source in files_paths:
+ if rel_path is not None:
+ relative_path = relpath(source, rel_path)
+ else:
+ _, relative_path = split(source)
+
+ target = join(trg_path, relative_path)
+
+ if (target != source) and (self.need_update(target, [source])):
+ self.progress("copy", relative_path)
+ mkdir(dirname(target))
+ copyfile(source, target)
+
+ def relative_object_path(self, build_path, base_dir, source):
+ source_dir, name, _ = split_path(source)
+ obj_dir = join(build_path, relpath(source_dir, base_dir))
+ mkdir(obj_dir)
+ return join(obj_dir, name + '.o')
+
+ def compile_sources(self, resources, build_path, inc_dirs=None):
+ # Web IDE progress bar for project build
+ files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
+ self.to_be_compiled = len(files_to_compile)
+ self.compiled = 0
+
+ #for i in self.build_params:
+ # self.debug(i)
+ # self.debug("%s" % self.build_params[i])
+
+ inc_paths = resources.inc_dirs
+ if inc_dirs is not None:
+ inc_paths.extend(inc_dirs)
+
+ objects = []
+ queue = []
+ prev_dir = None
+
+ # The dependency checking for C/C++ is delegated to the compiler
+ base_path = resources.base_path
+ files_to_compile.sort()
+ for source in files_to_compile:
+ _, name, _ = split_path(source)
+ object = self.relative_object_path(build_path, base_path, source)
+
+ # Avoid multiple mkdir() calls on same work directory
+ work_dir = dirname(object)
+ if work_dir is not prev_dir:
+ prev_dir = work_dir
+ mkdir(work_dir)
+
+ # Queue mode (multiprocessing)
+ commands = self.compile_command(source, object, inc_paths)
+ if commands is not None:
+ queue.append({
+ 'source': source,
+ 'object': object,
+ 'commands': commands,
+ 'work_dir': work_dir,
+ 'chroot': self.CHROOT
+ })
+ else:
+ objects.append(object)
+
+ # Use queues/multiprocessing if cpu count is higher than setting
+ jobs = self.jobs if self.jobs else cpu_count()
+ if jobs > CPU_COUNT_MIN and len(queue) > jobs:
+ return self.compile_queue(queue, objects)
+ else:
+ return self.compile_seq(queue, objects)
+
+ def compile_seq(self, queue, objects):
+ for item in queue:
+ result = compile_worker(item)
+
+ self.compiled += 1
+ self.progress("compile", item['source'], build_update=True)
+ for res in result['results']:
+ self.debug("Command: %s" % ' '.join(res['command']))
+ self.compile_output([
+ res['code'],
+ res['output'],
+ res['command']
+ ])
+ objects.append(result['object'])
+ return objects
+
+ def compile_queue(self, queue, objects):
+ jobs_count = int(self.jobs if self.jobs else cpu_count())
+ p = Pool(processes=jobs_count)
+
+ results = []
+ for i in range(len(queue)):
+ results.append(p.apply_async(compile_worker, [queue[i]]))
+
+ itr = 0
+ while True:
+ itr += 1
+ if itr > 30000:
+ p.terminate()
+ p.join()
+ raise ToolException("Compile did not finish in 5 minutes")
+
+ pending = 0
+ for r in results:
+ if r._ready is True:
+ try:
+ result = r.get()
+ results.remove(r)
+
+ self.compiled += 1
+ self.progress("compile", result['source'], build_update=True)
+ for res in result['results']:
+ self.debug("Command: %s" % ' '.join(res['command']))
+ self.compile_output([
+ res['code'],
+ res['output'],
+ res['command']
+ ])
+ objects.append(result['object'])
+ except ToolException, err:
+ p.terminate()
+ p.join()
+ raise ToolException(err)
+ else:
+ pending += 1
+ if pending > jobs_count:
+ break
+
+
+ if len(results) == 0:
+ break
+
+ sleep(0.01)
+
+ results = None
+ p.terminate()
+ p.join()
+
+ return objects
+
+ def compile_command(self, source, object, includes):
+ # Check dependencies
+ _, ext = splitext(source)
+ ext = ext.lower()
+
+ if ext == '.c' or ext == '.cpp':
+ base, _ = splitext(object)
+ dep_path = base + '.d'
+ deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
+ if len(deps) == 0 or self.need_update(object, deps):
+ if ext == '.c':
+ return self.compile_c(source, object, includes)
+ else:
+ return self.compile_cpp(source, object, includes)
+ elif ext == '.s':
+ deps = [source]
+ if self.need_update(object, deps):
+ return self.assemble(source, object, includes)
+ else:
+ return False
+
+ return None
+
+ def is_not_supported_error(self, output):
+ return "#error directive: [NOT_SUPPORTED]" in output
+
+ def compile_output(self, output=[]):
+ _rc = output[0]
+ _stderr = output[1]
+ command = output[2]
+
+ # Parse output for Warnings and Errors
+ self.parse_output(_stderr)
+ self.debug("Return: %s"% _rc)
+ for error_line in _stderr.splitlines():
+ self.debug("Output: %s"% error_line)
+
+
+ # Check return code
+ if _rc != 0:
+ for line in _stderr.splitlines():
+ self.tool_error(line)
+
+ if self.is_not_supported_error(_stderr):
+ raise NotSupportedException(_stderr)
+ else:
+ raise ToolException(_stderr)
+
+ def compile(self, cc, source, object, includes):
+ _, ext = splitext(source)
+ ext = ext.lower()
+
+ command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source]
+
+ if hasattr(self, "get_dep_opt"):
+ base, _ = splitext(object)
+ dep_path = base + '.d'
+ command.extend(self.get_dep_opt(dep_path))
+
+ if hasattr(self, "cc_extra"):
+ command.extend(self.cc_extra(base))
+
+ return [command]
+
+ def compile_c(self, source, object, includes):
+ return self.compile(self.cc, source, object, includes)
+
+ def compile_cpp(self, source, object, includes):
+ return self.compile(self.cppc, source, object, includes)
+
+ def build_library(self, objects, dir, name):
+ needed_update = False
+ lib = self.STD_LIB_NAME % name
+ fout = join(dir, lib)
+ if self.need_update(fout, objects):
+ self.info("Library: %s" % lib)
+ self.archive(objects, fout)
+ needed_update = True
+
+ return needed_update
+
+ def link_program(self, r, tmp_path, name):
+ needed_update = False
+ ext = 'bin'
+ if hasattr(self.target, 'OUTPUT_EXT'):
+ ext = self.target.OUTPUT_EXT
+
+ if hasattr(self.target, 'OUTPUT_NAMING'):
+ self.var("binary_naming", self.target.OUTPUT_NAMING)
+ if self.target.OUTPUT_NAMING == "8.3":
+ name = name[0:8]
+ ext = ext[0:3]
+
+ filename = name+'.'+ext
+ elf = join(tmp_path, name + '.elf')
+ bin = join(tmp_path, filename)
+
+ if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
+ needed_update = True
+ self.progress("link", name)
+ self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
+
+ if self.need_update(bin, [elf]):
+ needed_update = True
+ self.progress("elf2bin", name)
+
+ self.binary(r, elf, bin)
+
+ self.var("compile_succeded", True)
+ self.var("binary", filename)
+
+ return bin, needed_update
+
+ def default_cmd(self, command):
+ _stdout, _stderr, _rc = run_cmd(command)
+ # Print all warning / erros from stderr to console output
+ for error_line in _stderr.splitlines():
+ print error_line
+
+ self.debug("Command: %s"% ' '.join(command))
+ self.debug("Return: %s"% _rc)
+
+ for output_line in _stdout.splitlines():
+ self.debug("Output: %s"% output_line)
+ for error_line in _stderr.splitlines():
+ self.debug("Errors: %s"% error_line)
+
+ if _rc != 0:
+ for line in _stderr.splitlines():
+ self.tool_error(line)
+ raise ToolException(_stderr)
+
+ ### NOTIFICATIONS ###
+ def info(self, message):
+ self.notify({'type': 'info', 'message': message})
+
+ def debug(self, message):
+ if self.VERBOSE:
+ if type(message) is ListType:
+ message = ' '.join(message)
+ message = "[DEBUG] " + message
+ self.notify({'type': 'debug', 'message': message})
+
+ def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):
+ self.notify({'type': 'cc',
+ 'severity': severity,
+ 'file': file,
+ 'line': line,
+ 'message': message,
+ 'target_name': target_name,
+ 'toolchain_name': toolchain_name})
+
+ def progress(self, action, file, build_update=False):
+ msg = {'type': 'progress', 'action': action, 'file': file}
+ if build_update:
+ msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
+ self.notify(msg)
+
+ def tool_error(self, message):
+ self.notify({'type': 'tool_error', 'message': message})
+
+ def var(self, key, value):
+ self.notify({'type': 'var', 'key': key, 'val': value})
+
+from tools.settings import ARM_BIN
+from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
+from tools.settings import IAR_PATH
+
+TOOLCHAIN_BIN_PATH = {
+ 'ARM': ARM_BIN,
+ 'uARM': ARM_BIN,
+ 'GCC_ARM': GCC_ARM_PATH,
+ 'GCC_CR': GCC_CR_PATH,
+ 'IAR': IAR_PATH
+}
+
+from tools.toolchains.arm import ARM_STD, ARM_MICRO
+from tools.toolchains.gcc import GCC_ARM, GCC_CR
+from tools.toolchains.iar import IAR
+
+TOOLCHAIN_CLASSES = {
+ 'ARM': ARM_STD,
+ 'uARM': ARM_MICRO,
+ 'GCC_ARM': GCC_ARM,
+ 'GCC_CR': GCC_CR,
+ 'IAR': IAR
+}
+
+TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
diff --git a/tools/toolchains/arm.py b/tools/toolchains/arm.py
new file mode 100644
index 0000000..e624fcf
--- /dev/null
+++ b/tools/toolchains/arm.py
@@ -0,0 +1,190 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" tools.,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import re
+from os.path import join
+import copy
+
+from tools.toolchains import mbedToolchain
+from tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB
+from tools.hooks import hook_tool
+from tools.settings import GOANNA_PATH
+
+class ARM(mbedToolchain):
+ LINKER_EXT = '.sct'
+ LIBRARY_EXT = '.ar'
+
+ STD_LIB_NAME = "%s.ar"
+ DIAGNOSTIC_PATTERN = re.compile('"(?P[^"]+)", line (?P\d+)( \(column (?P\d+)\)|): (?PWarning|Error): (?P.+)')
+ DEP_PATTERN = re.compile('\S+:\s(?P.+)\n')
+
+ DEFAULT_FLAGS = {
+ 'common': ["--apcs=interwork",
+ "--brief_diagnostics"],
+ 'asm': ['-I"%s"' % ARM_INC],
+ 'c': ["-c", "--gnu", "-Otime", "--restrict", "--multibyte_chars", "--split_sections", "--md", "--no_depend_system_headers", '-I"%s"' % ARM_INC,
+ "--c99", "-D__ASSERT_MSG" ],
+ 'cxx': ["--cpp", "--no_rtti", "-D__ASSERT_MSG"],
+ 'ld': [],
+ }
+
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
+
+ if target.core == "Cortex-M0+":
+ cpu = "Cortex-M0"
+ elif target.core == "Cortex-M4F":
+ cpu = "Cortex-M4.fp"
+ elif target.core == "Cortex-M7F":
+ cpu = "Cortex-M7.fp.sp"
+ else:
+ cpu = target.core
+
+ main_cc = join(ARM_BIN, "armcc")
+
+ self.flags = copy.deepcopy(self.DEFAULT_FLAGS)
+ self.flags['common'] += ["--cpu=%s" % cpu]
+ if "save-asm" in self.options:
+ self.flags['common'].extend(["--asm", "--interleave"])
+
+ if "debug-info" in self.options:
+ self.flags['common'].append("-g")
+ self.flags['c'].append("-O0")
+ else:
+ self.flags['c'].append("-O3")
+
+ self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] + self.flags['c']
+ if not "analyze" in self.options:
+ self.cc = [main_cc] + self.flags['common'] + self.flags['c']
+ self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
+ else:
+ self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c']
+ self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
+
+ self.ld = [join(ARM_BIN, "armlink")]
+ self.sys_libs = []
+
+ self.ar = join(ARM_BIN, "armar")
+ self.elf2bin = join(ARM_BIN, "fromelf")
+
+ def remove_option(self, option):
+ for tool in [self.asm, self.cc, self.cppc]:
+ if option in tool:
+ tool.remove(option)
+
+ def assemble(self, source, object, includes):
+ # Preprocess first, then assemble
+ tempfile = object + '.E.s'
+ return [
+ self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source],
+ self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile])
+ ]
+
+ def parse_dependencies(self, dep_path):
+ dependencies = []
+ for line in open(dep_path).readlines():
+ match = ARM.DEP_PATTERN.match(line)
+ if match is not None:
+ dependencies.append(match.group('file'))
+ return dependencies
+
+ def parse_output(self, output):
+ for line in output.splitlines():
+ match = ARM.DIAGNOSTIC_PATTERN.match(line)
+ if match is not None:
+ self.cc_info(
+ match.group('severity').lower(),
+ match.group('file'),
+ match.group('line'),
+ match.group('message'),
+ target_name=self.target.name,
+ toolchain_name=self.name
+ )
+ match = self.goanna_parse_line(line)
+ if match is not None:
+ self.cc_info(
+ match.group('severity').lower(),
+ match.group('file'),
+ match.group('line'),
+ match.group('message')
+ )
+
+ def get_dep_opt(self, dep_path):
+ return ["--depend", dep_path]
+
+ def archive(self, objects, lib_path):
+ self.default_cmd([self.ar, '-r', lib_path] + objects)
+
+ def link(self, output, objects, libraries, lib_dirs, mem_map):
+ if len(lib_dirs):
+ args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--list=.link_totals.txt"]
+ else:
+ args = ["-o", output, "--info=totals", "--list=.link_totals.txt"]
+
+ if mem_map:
+ args.extend(["--scatter", mem_map])
+
+ if hasattr(self.target, "link_cmdline_hook"):
+ args = self.target.link_cmdline_hook(self.__class__.__name__, args)
+
+ self.default_cmd(self.ld + args + objects + libraries + self.sys_libs)
+
+ @hook_tool
+ def binary(self, resources, elf, bin):
+ args = [self.elf2bin, '--bin', '-o', bin, elf]
+
+ if hasattr(self.target, "binary_cmdline_hook"):
+ args = self.target.binary_cmdline_hook(self.__class__.__name__, args)
+
+ self.default_cmd(args)
+
+class ARM_STD(ARM):
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
+ self.ld.append("--libpath=%s" % ARM_LIB)
+
+
+class ARM_MICRO(ARM):
+ PATCHED_LIBRARY = False
+
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
+
+ # add microlib to the command line flags
+ self.asm += ["-D__MICROLIB"]
+ self.cc += ["--library_type=microlib", "-D__MICROLIB"]
+ self.cppc += ["--library_type=microlib", "-D__MICROLIB"]
+
+ # the exporter uses --library_type flag to set microlib
+ self.flags['c'] += ["--library_type=microlib"]
+ self.flags['cxx'] += ["--library_type=microlib"]
+ self.flags['ld'].append("--library_type=microlib")
+
+ # We had to patch microlib to add C++ support
+ # In later releases this patch should have entered mainline
+ if ARM_MICRO.PATCHED_LIBRARY:
+ self.flags['ld'].append("--noscanlib")
+
+ # System Libraries
+ self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
+
+ if target.core == "Cortex-M3":
+ self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ws", "cpprt_w"]])
+
+ elif target.core in ["Cortex-M0", "Cortex-M0+"]:
+ self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
+ else:
+ self.ld.append("--libpath=%s" % ARM_LIB)
diff --git a/tools/toolchains/gcc.py b/tools/toolchains/gcc.py
new file mode 100644
index 0000000..a6336b5
--- /dev/null
+++ b/tools/toolchains/gcc.py
@@ -0,0 +1,211 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import re
+from os.path import join, basename, splitext
+
+from tools.toolchains import mbedToolchain
+from tools.settings import GCC_ARM_PATH, GCC_CR_PATH
+from tools.settings import GOANNA_PATH
+from tools.hooks import hook_tool
+
+class GCC(mbedToolchain):
+ LINKER_EXT = '.ld'
+ LIBRARY_EXT = '.a'
+
+ STD_LIB_NAME = "lib%s.a"
+ DIAGNOSTIC_PATTERN = re.compile('((?P\d+):)(\d+:)? (?Pwarning|error): (?P.+)')
+
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, tool_path="", extra_verbose=False):
+ mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
+
+ if target.core == "Cortex-M0+":
+ cpu = "cortex-m0plus"
+ elif target.core == "Cortex-M4F":
+ cpu = "cortex-m4"
+ elif target.core == "Cortex-M7F":
+ cpu = "cortex-m7"
+ else:
+ cpu = target.core.lower()
+
+ self.cpu = ["-mcpu=%s" % cpu]
+ if target.core.startswith("Cortex"):
+ self.cpu.append("-mthumb")
+
+ if target.core == "Cortex-M4F":
+ self.cpu.append("-mfpu=fpv4-sp-d16")
+ self.cpu.append("-mfloat-abi=softfp")
+ elif target.core == "Cortex-M7F":
+ self.cpu.append("-mfpu=fpv5-d16")
+ self.cpu.append("-mfloat-abi=softfp")
+
+ if target.core == "Cortex-A9":
+ self.cpu.append("-mthumb-interwork")
+ self.cpu.append("-marm")
+ self.cpu.append("-march=armv7-a")
+ self.cpu.append("-mfpu=vfpv3")
+ self.cpu.append("-mfloat-abi=hard")
+ self.cpu.append("-mno-unaligned-access")
+
+
+ # Note: We are using "-O2" instead of "-Os" to avoid this known GCC bug:
+ # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46762
+ common_flags = ["-c", "-Wall", "-Wextra",
+ "-Wno-unused-parameter", "-Wno-missing-field-initializers",
+ "-fmessage-length=0", "-fno-exceptions", "-fno-builtin",
+ "-ffunction-sections", "-fdata-sections",
+ "-MMD", "-fno-delete-null-pointer-checks", "-fomit-frame-pointer"
+ ] + self.cpu
+
+ if "save-asm" in self.options:
+ common_flags.append("-save-temps")
+
+ if "debug-info" in self.options:
+ common_flags.append("-g")
+ common_flags.append("-O0")
+ else:
+ common_flags.append("-O2")
+
+ main_cc = join(tool_path, "arm-none-eabi-gcc")
+ main_cppc = join(tool_path, "arm-none-eabi-g++")
+ self.asm = [main_cc, "-x", "assembler-with-cpp"] + common_flags
+ if not "analyze" in self.options:
+ self.cc = [main_cc, "-std=gnu99"] + common_flags
+ self.cppc =[main_cppc, "-std=gnu++98", "-fno-rtti"] + common_flags
+ else:
+ self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "-std=gnu99", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
+ self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cppc.replace('\\', '/'), "-std=gnu++98", "-fno-rtti", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
+
+ self.ld = [join(tool_path, "arm-none-eabi-gcc"), "-Wl,--gc-sections", "-Wl,--wrap,main"] + self.cpu
+ self.sys_libs = ["stdc++", "supc++", "m", "c", "gcc"]
+
+ self.ar = join(tool_path, "arm-none-eabi-ar")
+ self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
+
+ def assemble(self, source, object, includes):
+ return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
+
+ def parse_dependencies(self, dep_path):
+ dependencies = []
+ for line in open(dep_path).readlines()[1:]:
+ file = line.replace('\\\n', '').strip()
+ if file:
+ # GCC might list more than one dependency on a single line, in this case
+ # the dependencies are separated by a space. However, a space might also
+ # indicate an actual space character in a dependency path, but in this case
+ # the space character is prefixed by a backslash.
+ # Temporary replace all '\ ' with a special char that is not used (\a in this
+ # case) to keep them from being interpreted by 'split' (they will be converted
+ # back later to a space char)
+ file = file.replace('\\ ', '\a')
+ if file.find(" ") == -1:
+ dependencies.append(file.replace('\a', ' '))
+ else:
+ dependencies = dependencies + [f.replace('\a', ' ') for f in file.split(" ")]
+ return dependencies
+
+ def is_not_supported_error(self, output):
+ return "error: #error [NOT_SUPPORTED]" in output
+
+ def parse_output(self, output):
+ # The warning/error notification is multiline
+ WHERE, WHAT = 0, 1
+ state, file, message = WHERE, None, None
+ for line in output.splitlines():
+ match = self.goanna_parse_line(line)
+ if match is not None:
+ self.cc_info(
+ match.group('severity').lower(),
+ match.group('file'),
+ match.group('line'),
+ match.group('message'),
+ target_name=self.target.name,
+ toolchain_name=self.name
+ )
+ continue
+
+ # Each line should start with the file information: "filepath: ..."
+ # i should point past the file path ^
+ # avoid the first column in Windows (C:\)
+ i = line.find(':', 2)
+ if i == -1: continue
+
+ if state == WHERE:
+ file = line[:i]
+ message = line[i+1:].strip() + ' '
+ state = WHAT
+
+ elif state == WHAT:
+ match = GCC.DIAGNOSTIC_PATTERN.match(line[i+1:])
+ if match is None:
+ state = WHERE
+ continue
+
+ self.cc_info(
+ match.group('severity'),
+ file, match.group('line'),
+ message + match.group('message')
+ )
+
+ def archive(self, objects, lib_path):
+ self.default_cmd([self.ar, "rcs", lib_path] + objects)
+
+ def link(self, output, objects, libraries, lib_dirs, mem_map):
+ libs = []
+ for l in libraries:
+ name, _ = splitext(basename(l))
+ libs.append("-l%s" % name[3:])
+ libs.extend(["-l%s" % l for l in self.sys_libs])
+
+ self.default_cmd(self.hook.get_cmdline_linker(self.ld + ["-T%s" % mem_map, "-o", output] +
+ objects + ["-L%s" % L for L in lib_dirs] + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"]))
+
+ @hook_tool
+ def binary(self, resources, elf, bin):
+ self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, "-O", "binary", elf, bin]))
+
+
+class GCC_ARM(GCC):
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ GCC.__init__(self, target, options, notify, macros, silent, GCC_ARM_PATH, extra_verbose=extra_verbose)
+
+ # Use latest gcc nanolib
+ if "thread-safe" not in self.options:
+ self.ld.append("--specs=nano.specs")
+ if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368"]:
+ self.ld.extend(["-u _printf_float", "-u _scanf_float"])
+ elif target.name in ["RZ_A1H", "VK_RZ_A1H", "ARCH_MAX", "DISCO_F407VG", "DISCO_F429ZI", "DISCO_F469NI", "NUCLEO_F401RE", "NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "ELMO_F411RE", "MTS_MDOT_F411RE", "MTS_DRAGONFLY_F411RE", "DISCO_F746NG"]:
+ self.ld.extend(["-u_printf_float", "-u_scanf_float"])
+
+ self.sys_libs.append("nosys")
+
+
+class GCC_CR(GCC):
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ GCC.__init__(self, target, options, notify, macros, silent, GCC_CR_PATH, extra_verbose=extra_verbose)
+
+ additional_compiler_flags = [
+ "-D__NEWLIB__", "-D__CODE_RED", "-D__USE_CMSIS", "-DCPP_USE_HEAP",
+ ]
+ self.cc += additional_compiler_flags
+ self.cppc += additional_compiler_flags
+
+ # Use latest gcc nanolib
+ self.ld.append("--specs=nano.specs")
+ if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368"]:
+ self.ld.extend(["-u _printf_float", "-u _scanf_float"])
+ self.ld += ["-nostdlib"]
+
diff --git a/tools/toolchains/iar.py b/tools/toolchains/iar.py
new file mode 100644
index 0000000..025503e
--- /dev/null
+++ b/tools/toolchains/iar.py
@@ -0,0 +1,122 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import re
+from os import remove
+from os.path import join, exists
+
+from tools.toolchains import mbedToolchain
+from tools.settings import IAR_PATH
+from tools.settings import GOANNA_PATH
+from tools.hooks import hook_tool
+
+class IAR(mbedToolchain):
+ LIBRARY_EXT = '.a'
+ LINKER_EXT = '.icf'
+ STD_LIB_NAME = "%s.a"
+
+ DIAGNOSTIC_PATTERN = re.compile('"(?P[^"]+)",(?P[\d]+)\s+(?PWarning|Error)(?P.+)')
+
+ def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
+ mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
+ if target.core == "Cortex-M7F":
+ cpuchoice = "Cortex-M7"
+ else:
+ cpuchoice = target.core
+ c_flags = [
+ "--cpu=%s" % cpuchoice, "--thumb",
+ "--dlib_config", join(IAR_PATH, "inc", "c", "DLib_Config_Full.h"),
+ "-e", # Enable IAR language extension
+ "--no_wrap_diagnostics",
+ # Pa050: No need to be notified about "non-native end of line sequence"
+ # Pa084: Pointless integer comparison -> checks for the values of an enum, but we use values outside of the enum to notify errors (ie: NC).
+ # Pa093: Implicit conversion from float to integer (ie: wait_ms(85.4) -> wait_ms(85))
+ # Pa082: Operation involving two values from two registers (ie: (float)(*obj->MR)/(float)(LPC_PWM1->MR0))
+ "--diag_suppress=Pa050,Pa084,Pa093,Pa082",
+ ]
+
+ if target.core == "Cortex-M7F":
+ c_flags.append("--fpu=VFPv5_sp")
+
+
+ if "debug-info" in self.options:
+ c_flags.append("-r")
+ c_flags.append("-On")
+ else:
+ c_flags.append("-Oh")
+
+ IAR_BIN = join(IAR_PATH, "bin")
+ main_cc = join(IAR_BIN, "iccarm")
+ if target.core == "Cortex-M7F":
+ self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice] + ["--fpu", "VFPv5_sp"]
+ else:
+ self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice]
+ if not "analyze" in self.options:
+ self.cc = [main_cc] + c_flags
+ self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags
+ else:
+ self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + c_flags
+ self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags
+ self.ld = join(IAR_BIN, "ilinkarm")
+ self.ar = join(IAR_BIN, "iarchive")
+ self.elf2bin = join(IAR_BIN, "ielftool")
+
+ def parse_output(self, output):
+ for line in output.splitlines():
+ match = IAR.DIAGNOSTIC_PATTERN.match(line)
+ if match is not None:
+ self.cc_info(
+ match.group('severity').lower(),
+ match.group('file'),
+ match.group('line'),
+ match.group('message'),
+ target_name=self.target.name,
+ toolchain_name=self.name
+ )
+ match = self.goanna_parse_line(line)
+ if match is not None:
+ self.cc_info(
+ match.group('severity').lower(),
+ match.group('file'),
+ match.group('line'),
+ match.group('message')
+ )
+
+ def get_dep_opt(self, dep_path):
+ return ["--dependencies", dep_path]
+
+ def cc_extra(self, base):
+ return ["-l", base + '.s']
+
+ def parse_dependencies(self, dep_path):
+ return [path.strip() for path in open(dep_path).readlines()
+ if (path and not path.isspace())]
+
+ def assemble(self, source, object, includes):
+ return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
+
+ def archive(self, objects, lib_path):
+ if exists(lib_path):
+ remove(lib_path)
+ self.default_cmd([self.ar, lib_path] + objects)
+
+ def link(self, output, objects, libraries, lib_dirs, mem_map):
+ args = [self.ld, "-o", output, "--config", mem_map, "--skip_dynamic_initialization", "--threaded_lib"]
+ self.default_cmd(self.hook.get_cmdline_linker(args + objects + libraries))
+
+ @hook_tool
+ def binary(self, resources, elf, bin):
+ self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, '--bin', elf, bin]))
diff --git a/tools/upload_results.py b/tools/upload_results.py
new file mode 100644
index 0000000..695c849
--- /dev/null
+++ b/tools/upload_results.py
@@ -0,0 +1,373 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import sys
+import argparse
+import xml.etree.ElementTree as ET
+import requests
+import urlparse
+
+def create_headers(args):
+ return { 'X-Api-Key': args.api_key }
+
+def finish_command(command, response):
+ print(command, response.status_code, response.reason)
+ print(response.text)
+
+ if response.status_code < 400:
+ sys.exit(0)
+ else:
+ sys.exit(2)
+
+def create_build(args):
+ build = {}
+ build['buildType'] = args.build_type
+ build['number'] = args.build_number
+ build['source'] = args.build_source
+ build['status'] = 'running'
+
+ r = requests.post(urlparse.urljoin(args.url, "api/builds"), headers=create_headers(args), json=build)
+
+ if r.status_code < 400:
+ if args.property_file_format:
+ print("MBED_BUILD_ID=" + r.text)
+ else:
+ print(r.text)
+
+ sys.exit(0)
+ else:
+ sys.exit(2)
+
+def finish_build(args):
+ data = {}
+ data['status'] = 'completed'
+
+ r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
+ finish_command('finish-build', r)
+
+def promote_build(args):
+ data = {}
+ data['buildType'] = 'Release'
+
+ r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
+ finish_command('promote-build', r)
+
+def abort_build(args):
+ data = {}
+ data['status'] = 'aborted'
+
+ r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
+ finish_command('abort-build', r)
+
+def add_project_runs(args):
+ '''
+ -------------------------------------
+ Notes on 'project_run_data' structure:
+ --------------------------------------
+ 'projectRuns' - Tree structure used to keep track of what projects have
+ been logged in different report files. The tree is organized as follows:
+
+ 'projectRuns': { - Root element of tree
+
+ 'hostOs': { - Host OS on which project was built/tested
+ - ex. windows, linux, or mac
+
+ 'platform': { - Platform for which project was built/tested
+ (Corresponds to platform names in targets.py)
+ - ex. K64F, LPC1768, NRF51822, etc.
+
+ 'toolchain': { - Toolchain with which project was built/tested
+ (Corresponds to TOOLCHAIN_CLASSES names in toolchains/__init__.py)
+ - ex. ARM, uARM, GCC_ARM, etc.
+
+ 'project': { - Project that was build/tested
+ (Corresponds to test id in tests.py or library id in libraries.py)
+ - For tests, ex. MBED_A1, MBED_11, DTCT_1 etc.
+ - For libraries, ex. MBED, RTX, RTOS, etc.
+
+ },
+ ...
+ },
+ ...
+ },
+ ...
+ }
+ }
+
+ 'platforms_set' - Set of all the platform names mentioned in the given report files
+
+ 'toolchains_set' - Set of all the toolchain names mentioned in the given report files
+
+ 'names_set' - Set of all the project names mentioned in the given report files
+
+ 'hostOses_set' - Set of all the host names given (only given by the command line arguments)
+ '''
+
+ project_run_data = {}
+ project_run_data['projectRuns'] = {}
+ project_run_data['platforms_set'] = set()
+ project_run_data['vendors_set'] = set()
+ project_run_data['toolchains_set'] = set()
+ project_run_data['names_set'] = set()
+ project_run_data['hostOses_set'] = set()
+ project_run_data['hostOses_set'].add(args.host_os)
+
+ if args.build_report:
+ add_report(project_run_data, args.build_report, True, args.build_id, args.host_os)
+
+ if args.test_report:
+ add_report(project_run_data, args.test_report, False, args.build_id, args.host_os)
+
+ ts_data = format_project_run_data(project_run_data, args.limit)
+ total_result = True
+
+ total_parts = len(ts_data)
+ print "Uploading project runs in %d parts" % total_parts
+
+ for index, data in enumerate(ts_data):
+ r = requests.post(urlparse.urljoin(args.url, "api/projectRuns"), headers=create_headers(args), json=data)
+ print("add-project-runs part %d/%d" % (index + 1, total_parts), r.status_code, r.reason)
+ print(r.text)
+
+ if r.status_code >= 400:
+ total_result = False
+
+ if total_result:
+ print "'add-project-runs' completed successfully"
+ sys.exit(0)
+ else:
+ print "'add-project-runs' failed"
+ sys.exit(2)
+
+def prep_ts_data():
+ ts_data = {}
+ ts_data['projectRuns'] = []
+ ts_data['platforms'] = set()
+ ts_data['vendors'] = set()
+ ts_data['toolchains'] = set()
+ ts_data['names'] = set()
+ ts_data['hostOses'] = set()
+ return ts_data
+
+def finish_ts_data(ts_data, project_run_data):
+ ts_data['platforms'] = list(ts_data['platforms'])
+ ts_data['vendors'] = list(ts_data['vendors'])
+ ts_data['toolchains'] = list(ts_data['toolchains'])
+ ts_data['names'] = list(ts_data['names'])
+ ts_data['hostOses'] = list(ts_data['hostOses'])
+
+ # Add all vendors to every projectRun submission
+ # TODO Either add "vendor" to the "project_run_data"
+ # or remove "vendor" entirely from the viewer
+ ts_data['vendors'] = list(project_run_data['vendors_set'])
+
+def format_project_run_data(project_run_data, limit):
+ all_ts_data = []
+ current_limit_count = 0
+
+ ts_data = prep_ts_data()
+ ts_data['projectRuns'] = []
+
+ for hostOs_name, hostOs in project_run_data['projectRuns'].iteritems():
+ for platform_name, platform in hostOs.iteritems():
+ for toolchain_name, toolchain in platform.iteritems():
+ for project_name, project in toolchain.iteritems():
+ if current_limit_count >= limit:
+ finish_ts_data(ts_data, project_run_data)
+ all_ts_data.append(ts_data)
+ ts_data = prep_ts_data()
+ current_limit_count = 0
+
+ ts_data['projectRuns'].append(project)
+ ts_data['platforms'].add(platform_name)
+ ts_data['toolchains'].add(toolchain_name)
+ ts_data['names'].add(project_name)
+ ts_data['hostOses'].add(hostOs_name)
+ current_limit_count += 1
+
+ if current_limit_count > 0:
+ finish_ts_data(ts_data, project_run_data)
+ all_ts_data.append(ts_data)
+
+ return all_ts_data
+
+def find_project_run(projectRuns, project):
+ keys = ['hostOs', 'platform', 'toolchain', 'project']
+
+ elem = projectRuns
+
+ for key in keys:
+ if not project[key] in elem:
+ return None
+
+ elem = elem[project[key]]
+
+ return elem
+
+def add_project_run(projectRuns, project):
+ keys = ['hostOs', 'platform', 'toolchain']
+
+ elem = projectRuns
+
+ for key in keys:
+ if not project[key] in elem:
+ elem[project[key]] = {}
+
+ elem = elem[project[key]]
+
+ elem[project['project']] = project
+
+def update_project_run_results(project_to_update, project, is_build):
+ if is_build:
+ project_to_update['buildPass'] = project['buildPass']
+ project_to_update['buildResult'] = project['buildResult']
+ project_to_update['buildOutput'] = project['buildOutput']
+ else:
+ project_to_update['testPass'] = project['testPass']
+ project_to_update['testResult'] = project['testResult']
+ project_to_update['testOutput'] = project['testOutput']
+
+def update_project_run(projectRuns, project, is_build):
+ found_project = find_project_run(projectRuns, project)
+ if found_project:
+ update_project_run_results(found_project, project, is_build)
+ else:
+ add_project_run(projectRuns, project)
+
+def add_report(project_run_data, report_file, is_build, build_id, host_os):
+ tree = None
+
+ try:
+ tree = ET.parse(report_file)
+ except:
+ print(sys.exc_info()[0])
+ print('Invalid path to report: %s', report_file)
+ sys.exit(1)
+
+ test_suites = tree.getroot()
+
+ for test_suite in test_suites:
+ platform = ""
+ toolchain = ""
+ vendor = ""
+ for properties in test_suite.findall('properties'):
+ for property in properties.findall('property'):
+ if property.attrib['name'] == 'target':
+ platform = property.attrib['value']
+ project_run_data['platforms_set'].add(platform)
+ elif property.attrib['name'] == 'toolchain':
+ toolchain = property.attrib['value']
+ project_run_data['toolchains_set'].add(toolchain)
+ elif property.attrib['name'] == 'vendor':
+ vendor = property.attrib['value']
+ project_run_data['vendors_set'].add(vendor)
+
+ for test_case in test_suite.findall('testcase'):
+ projectRun = {}
+ projectRun['build'] = build_id
+ projectRun['hostOs'] = host_os
+ projectRun['platform'] = platform
+ projectRun['toolchain'] = toolchain
+ projectRun['project'] = test_case.attrib['classname'].split('.')[-1]
+ projectRun['vendor'] = vendor
+
+ project_run_data['names_set'].add(projectRun['project'])
+
+ should_skip = False
+ skips = test_case.findall('skipped')
+
+ if skips:
+ should_skip = skips[0].attrib['message'] == 'SKIP'
+
+ if not should_skip:
+ system_outs = test_case.findall('system-out')
+
+ output = ""
+ if system_outs:
+ output = system_outs[0].text
+
+ if is_build:
+ projectRun['buildOutput'] = output
+ else:
+ projectRun['testOutput'] = output
+
+ errors = test_case.findall('error')
+ failures = test_case.findall('failure')
+ projectRunPass = None
+ result = None
+
+ if errors:
+ projectRunPass = False
+ result = errors[0].attrib['message']
+ elif failures:
+ projectRunPass = False
+ result = failures[0].attrib['message']
+ elif skips:
+ projectRunPass = True
+ result = skips[0].attrib['message']
+ else:
+ projectRunPass = True
+ result = 'OK'
+
+ if is_build:
+ projectRun['buildPass'] = projectRunPass
+ projectRun['buildResult'] = result
+ else:
+ projectRun['testPass'] = projectRunPass
+ projectRun['testResult'] = result
+
+ update_project_run(project_run_data['projectRuns'], projectRun, is_build)
+
+def main(arguments):
+ # Register and parse command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-u', '--url', required=True, help='url to ci site')
+ parser.add_argument('-k', '--api-key', required=True, help='api-key for posting data')
+
+ subparsers = parser.add_subparsers(help='subcommand help')
+
+ create_build_parser = subparsers.add_parser('create-build', help='create a new build')
+ create_build_parser.add_argument('-b', '--build-number', required=True, help='build number')
+ create_build_parser.add_argument('-T', '--build-type', choices=['Nightly', 'Limited', 'Pull_Request', 'Release_Candidate'], required=True, help='type of build')
+ create_build_parser.add_argument('-s', '--build-source', required=True, help='url to source of build')
+ create_build_parser.add_argument('-p', '--property-file-format', action='store_true', help='print result in the property file format')
+ create_build_parser.set_defaults(func=create_build)
+
+ finish_build_parser = subparsers.add_parser('finish-build', help='finish a running build')
+ finish_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
+ finish_build_parser.set_defaults(func=finish_build)
+
+ finish_build_parser = subparsers.add_parser('promote-build', help='promote a build to a release')
+ finish_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
+ finish_build_parser.set_defaults(func=promote_build)
+
+ abort_build_parser = subparsers.add_parser('abort-build', help='abort a running build')
+ abort_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
+ abort_build_parser.set_defaults(func=abort_build)
+
+ add_project_runs_parser = subparsers.add_parser('add-project-runs', help='add project runs to a build')
+ add_project_runs_parser.add_argument('-b', '--build-id', required=True, help='build id')
+ add_project_runs_parser.add_argument('-r', '--build-report', required=False, help='path to junit xml build report')
+ add_project_runs_parser.add_argument('-t', '--test-report', required=False, help='path to junit xml test report')
+ add_project_runs_parser.add_argument('-o', '--host-os', required=True, help='host os on which test was run')
+ add_project_runs_parser.add_argument('-l', '--limit', required=False, type=int, default=1000, help='Limit the number of project runs sent at a time to avoid HTTP errors (default is 1000)')
+ add_project_runs_parser.set_defaults(func=add_project_runs)
+
+ args = parser.parse_args(arguments)
+ args.func(args)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
\ No newline at end of file
diff --git a/tools/utils.py b/tools/utils.py
new file mode 100644
index 0000000..21f0e14
--- /dev/null
+++ b/tools/utils.py
@@ -0,0 +1,172 @@
+"""
+mbed SDK
+Copyright (c) 2011-2013 ARM Limited
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import sys
+import inspect
+import os
+from os import listdir, remove, makedirs
+from shutil import copyfile
+from os.path import isdir, join, exists, split, relpath, splitext
+from subprocess import Popen, PIPE, STDOUT, call
+
+
+def cmd(l, check=True, verbose=False, shell=False, cwd=None):
+ text = l if shell else ' '.join(l)
+ if verbose:
+ print text
+ rc = call(l, shell=shell, cwd=cwd)
+ if check and rc != 0:
+ raise Exception('ERROR %d: "%s"' % (rc, text))
+
+
+def run_cmd(command, wd=None, redirect=False):
+ assert is_cmd_valid(command[0])
+ p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=wd)
+ _stdout, _stderr = p.communicate()
+ return _stdout, _stderr, p.returncode
+
+
+def run_cmd_ext(command):
+ assert is_cmd_valid(command[0])
+ p = Popen(command, stdout=PIPE, stderr=PIPE)
+ _stdout, _stderr = p.communicate()
+ return _stdout, _stderr, p.returncode
+
+
+def is_cmd_valid(cmd):
+ caller = get_caller_name()
+ abspath = find_cmd_abspath(cmd)
+ if not abspath:
+ error("%s: Command '%s' can't be found" % (caller, cmd))
+ if not is_exec(abspath):
+ error("%s: Command '%s' resolves to file '%s' which is not executable" % (caller, cmd, abspath))
+ return True
+
+
+def is_exec(path):
+ return os.access(path, os.X_OK) or os.access(path+'.exe', os.X_OK)
+
+
+def find_cmd_abspath(cmd):
+ """ Returns the absolute path to a command.
+ None is returned if no absolute path was found.
+ """
+ if exists(cmd) or exists(cmd + '.exe'):
+ return os.path.abspath(cmd)
+ if not 'PATH' in os.environ:
+ raise Exception("Can't find command path for current platform ('%s')" % sys.platform)
+ PATH=os.environ['PATH']
+ for path in PATH.split(os.pathsep):
+ abspath = '%s/%s' % (path, cmd)
+ if exists(abspath) or exists(abspath + '.exe'):
+ return abspath
+
+
+def mkdir(path):
+ if not exists(path):
+ makedirs(path)
+
+
+def copy_file(src, dst):
+ """ Implement the behaviour of "shutil.copy(src, dst)" without copying the
+ permissions (this was causing errors with directories mounted with samba)
+ """
+ if isdir(dst):
+ _, file = split(src)
+ dst = join(dst, file)
+ copyfile(src, dst)
+
+
+def delete_dir_files(dir):
+ if not exists(dir):
+ return
+
+ for f in listdir(dir):
+ file = join(dir, f)
+ if not isdir(file):
+ remove(file)
+
+
+def get_caller_name(steps=2):
+ """
+ When called inside a function, it returns the name
+ of the caller of that function.
+ """
+ return inspect.stack()[steps][3]
+
+
+def error(msg):
+ print("ERROR: %s" % msg)
+ sys.exit(1)
+
+
+def rel_path(path, base, dot=False):
+ p = relpath(path, base)
+ if dot and not p.startswith('.'):
+ p = './' + p
+ return p
+
+
+class ToolException(Exception):
+ pass
+
+class NotSupportedException(Exception):
+ pass
+
+def split_path(path):
+ base, file = split(path)
+ name, ext = splitext(file)
+ return base, name, ext
+
+
+def args_error(parser, message):
+ print "\n\n%s\n\n" % message
+ parser.print_help()
+ sys.exit()
+
+
+def construct_enum(**enums):
+ """ Create your own pseudo-enums """
+ return type('Enum', (), enums)
+
+
+def check_required_modules(required_modules, verbose=True):
+ """ Function checks for Python modules which should be "importable" (installed)
+ before test suite can be used.
+ @return returns True if all modules are installed already
+ """
+ import imp
+ not_installed_modules = []
+ for module_name in required_modules:
+ try:
+ imp.find_module(module_name)
+ except ImportError as e:
+ # We also test against a rare case: module is an egg file
+ try:
+ __import__(module_name)
+ except ImportError as e:
+ not_installed_modules.append(module_name)
+ if verbose:
+ print "Error: %s" % e
+
+ if verbose:
+ if not_installed_modules:
+ print "Warning: Module(s) %s not installed. Please install required module(s) before using this script."% (', '.join(not_installed_modules))
+
+ if not_installed_modules:
+ return False
+ else:
+ return True
diff --git a/workspace_tools/.mbedignore b/workspace_tools/.mbedignore
deleted file mode 100644
index f59ec20..0000000
--- a/workspace_tools/.mbedignore
+++ /dev/null
@@ -1 +0,0 @@
-*
\ No newline at end of file
diff --git a/workspace_tools/__init__.py b/workspace_tools/__init__.py
deleted file mode 100644
index 1fa8431..0000000
--- a/workspace_tools/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
diff --git a/workspace_tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin b/workspace_tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin
deleted file mode 100755
index e5c640d..0000000
--- a/workspace_tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin
+++ /dev/null
Binary files differ
diff --git a/workspace_tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin b/workspace_tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin
deleted file mode 100755
index 5a270bc..0000000
--- a/workspace_tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin
+++ /dev/null
Binary files differ
diff --git a/workspace_tools/build.py b/workspace_tools/build.py
deleted file mode 100755
index f61bbfc..0000000
--- a/workspace_tools/build.py
+++ /dev/null
@@ -1,269 +0,0 @@
-#! /usr/bin/env python2
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-LIBRARIES BUILD
-"""
-import sys
-from time import time
-from os.path import join, abspath, dirname
-
-
-# Be sure that the tools directory is in the search path
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-
-from workspace_tools.toolchains import TOOLCHAINS
-from workspace_tools.targets import TARGET_NAMES, TARGET_MAP
-from workspace_tools.options import get_default_options_parser
-from workspace_tools.build_api import build_mbed_libs, build_lib
-from workspace_tools.build_api import mcu_toolchain_matrix
-from workspace_tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library
-from workspace_tools.build_api import print_build_results
-from workspace_tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
-
-if __name__ == '__main__':
- start = time()
-
- # Parse Options
- parser = get_default_options_parser()
-
- # Extra libraries
- parser.add_option("-r", "--rtos",
- action="store_true",
- dest="rtos",
- default=False,
- help="Compile the rtos")
-
- parser.add_option("--rpc",
- action="store_true",
- dest="rpc",
- default=False,
- help="Compile the rpc library")
-
- parser.add_option("-e", "--eth",
- action="store_true", dest="eth",
- default=False,
- help="Compile the ethernet library")
-
- parser.add_option("-U", "--usb_host",
- action="store_true",
- dest="usb_host",
- default=False,
- help="Compile the USB Host library")
-
- parser.add_option("-u", "--usb",
- action="store_true",
- dest="usb",
- default=False,
- help="Compile the USB Device library")
-
- parser.add_option("-d", "--dsp",
- action="store_true",
- dest="dsp",
- default=False,
- help="Compile the DSP library")
-
- parser.add_option("-F", "--fat",
- action="store_true",
- dest="fat",
- default=False,
- help="Compile FS and SD card file system library")
-
- parser.add_option("-b", "--ublox",
- action="store_true",
- dest="ublox",
- default=False,
- help="Compile the u-blox library")
-
- parser.add_option("", "--cpputest",
- action="store_true",
- dest="cpputest_lib",
- default=False,
- help="Compiles 'cpputest' unit test library (library should be on the same directory level as mbed repository)")
-
- parser.add_option("-D", "",
- action="append",
- dest="macros",
- help="Add a macro definition")
-
- parser.add_option("-S", "--supported-toolchains",
- action="store_true",
- dest="supported_toolchains",
- default=False,
- help="Displays supported matrix of MCUs and toolchains")
-
- parser.add_option("", "--cppcheck",
- action="store_true",
- dest="cppcheck_validation",
- default=False,
- help="Forces 'cppcheck' static code analysis")
-
- parser.add_option('-f', '--filter',
- dest='general_filter_regex',
- default=None,
- help='For some commands you can use filter to filter out results')
-
- parser.add_option("-j", "--jobs", type="int", dest="jobs",
- default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
-
- parser.add_option("-v", "--verbose",
- action="store_true",
- dest="verbose",
- default=False,
- help="Verbose diagnostic output")
-
- parser.add_option("--silent",
- action="store_true",
- dest="silent",
- default=False,
- help="Silent diagnostic output (no copy, compile notification)")
-
- parser.add_option("-x", "--extra-verbose-notifications",
- action="store_true",
- dest="extra_verbose_notify",
- default=False,
- help="Makes compiler more verbose, CI friendly.")
-
- (options, args) = parser.parse_args()
-
- # Only prints matrix of supported toolchains
- if options.supported_toolchains:
- print mcu_toolchain_matrix(platform_filter=options.general_filter_regex)
- exit(0)
-
- # Get target list
- if options.mcu:
- mcu_list = (options.mcu).split(",")
- for mcu in mcu_list:
- if mcu not in TARGET_NAMES:
- print "Given MCU '%s' not into the supported list:\n%s" % (mcu, TARGET_NAMES)
- sys.exit(1)
- targets = mcu_list
- else:
- targets = TARGET_NAMES
-
- # Get toolchains list
- if options.tool:
- toolchain_list = (options.tool).split(",")
- for tc in toolchain_list:
- if tc not in TOOLCHAINS:
- print "Given toolchain '%s' not into the supported list:\n%s" % (tc, TOOLCHAINS)
- sys.exit(1)
- toolchains = toolchain_list
- else:
- toolchains = TOOLCHAINS
-
- # Get libraries list
- libraries = []
-
- # Additional Libraries
- if options.rtos:
- libraries.extend(["rtx", "rtos"])
- if options.rpc:
- libraries.extend(["rpc"])
- if options.eth:
- libraries.append("eth")
- if options.usb:
- libraries.append("usb")
- if options.usb_host:
- libraries.append("usb_host")
- if options.dsp:
- libraries.extend(["cmsis_dsp", "dsp"])
- if options.fat:
- libraries.extend(["fat"])
- if options.ublox:
- libraries.extend(["rtx", "rtos", "usb_host", "ublox"])
- if options.cpputest_lib:
- libraries.extend(["cpputest"])
-
- # Build results
- failures = []
- successes = []
- skipped = []
-
- # CPPCHECK code validation
- if options.cppcheck_validation:
- for toolchain in toolchains:
- for target in targets:
- try:
- mcu = TARGET_MAP[target]
- # CMSIS and MBED libs analysis
- static_analysis_scan(mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, verbose=options.verbose, jobs=options.jobs)
- for lib_id in libraries:
- # Static check for library
- static_analysis_scan_lib(lib_id, mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT,
- options=options.options,
- extra_verbose=options.extra_verbose_notify, verbose=options.verbose, jobs=options.jobs, clean=options.clean,
- macros=options.macros)
- pass
- except Exception, e:
- if options.verbose:
- import traceback
- traceback.print_exc(file=sys.stdout)
- sys.exit(1)
- print e
- else:
- # Build
- for toolchain in toolchains:
- for target in targets:
- tt_id = "%s::%s" % (toolchain, target)
- try:
- mcu = TARGET_MAP[target]
- lib_build_res = build_mbed_libs(mcu, toolchain,
- options=options.options,
- extra_verbose=options.extra_verbose_notify,
- verbose=options.verbose,
- silent=options.silent,
- jobs=options.jobs,
- clean=options.clean,
- macros=options.macros)
- for lib_id in libraries:
- build_lib(lib_id, mcu, toolchain,
- options=options.options,
- extra_verbose=options.extra_verbose_notify,
- verbose=options.verbose,
- silent=options.silent,
- clean=options.clean,
- macros=options.macros,
- jobs=options.jobs)
- if lib_build_res:
- successes.append(tt_id)
- else:
- skipped.append(tt_id)
- except Exception, e:
- if options.verbose:
- import traceback
- traceback.print_exc(file=sys.stdout)
- sys.exit(1)
- failures.append(tt_id)
- print e
-
- # Write summary of the builds
- print
- print "Completed in: (%.2f)s" % (time() - start)
- print
-
- for report, report_name in [(successes, "Build successes:"),
- (skipped, "Build skipped:"),
- (failures, "Build failures:"),
- ]:
- if report:
- print print_build_results(report, report_name),
-
- if failures:
- sys.exit(1)
diff --git a/workspace_tools/build_api.py b/workspace_tools/build_api.py
deleted file mode 100644
index 3dca0ad..0000000
--- a/workspace_tools/build_api.py
+++ /dev/null
@@ -1,736 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-import tempfile
-import colorama
-
-
-from types import ListType
-from shutil import rmtree
-from os.path import join, exists, basename
-from time import time
-
-from workspace_tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException
-from workspace_tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON
-from workspace_tools.targets import TARGET_NAMES, TARGET_MAP
-from workspace_tools.libraries import Library
-from workspace_tools.toolchains import TOOLCHAIN_CLASSES
-from jinja2 import FileSystemLoader
-from jinja2.environment import Environment
-
-
-def prep_report(report, target_name, toolchain_name, id_name):
- # Setup report keys
- if not target_name in report:
- report[target_name] = {}
-
- if not toolchain_name in report[target_name]:
- report[target_name][toolchain_name] = {}
-
- if not id_name in report[target_name][toolchain_name]:
- report[target_name][toolchain_name][id_name] = []
-
-def prep_properties(properties, target_name, toolchain_name, vendor_label):
- # Setup test properties
- if not target_name in properties:
- properties[target_name] = {}
-
- if not toolchain_name in properties[target_name]:
- properties[target_name][toolchain_name] = {}
-
- properties[target_name][toolchain_name]["target"] = target_name
- properties[target_name][toolchain_name]["vendor"] = vendor_label
- properties[target_name][toolchain_name]["toolchain"] = toolchain_name
-
-def create_result(target_name, toolchain_name, id_name, description):
- cur_result = {}
- cur_result["target_name"] = target_name
- cur_result["toolchain_name"] = toolchain_name
- cur_result["id"] = id_name
- cur_result["description"] = description
- cur_result["elapsed_time"] = 0
- cur_result["output"] = ""
-
- return cur_result
-
-def add_result_to_report(report, result):
- target = result["target_name"]
- toolchain = result["toolchain_name"]
- id_name = result['id']
- result_wrap = { 0: result }
- report[target][toolchain][id_name].append(result_wrap)
-
-def build_project(src_path, build_path, target, toolchain_name,
- libraries_paths=None, options=None, linker_script=None,
- clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None,
- jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False):
- """ This function builds project. Project can be for example one test / UT
- """
- # Toolchain instance
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose)
- toolchain.VERBOSE = verbose
- toolchain.jobs = jobs
- toolchain.build_all = clean
- src_paths = [src_path] if type(src_path) != ListType else src_path
-
- # We need to remove all paths which are repeated to avoid
- # multiple compilations and linking with the same objects
- src_paths = [src_paths[0]] + list(set(src_paths[1:]))
- PROJECT_BASENAME = basename(src_paths[0])
-
- if name is None:
- # We will use default project name based on project folder name
- name = PROJECT_BASENAME
- toolchain.info("Building project %s (%s, %s)" % (PROJECT_BASENAME.upper(), target.name, toolchain_name))
- else:
- # User used custom global project name to have the same name for the
- toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name))
-
-
- if report != None:
- start = time()
- id_name = project_id.upper()
- description = project_description
- vendor_label = target.extra_labels[0]
- cur_result = None
- prep_report(report, target.name, toolchain_name, id_name)
- cur_result = create_result(target.name, toolchain_name, id_name, description)
-
- if properties != None:
- prep_properties(properties, target.name, toolchain_name, vendor_label)
-
- try:
- # Scan src_path and libraries_paths for resources
- resources = toolchain.scan_resources(src_paths[0])
- for path in src_paths[1:]:
- resources.add(toolchain.scan_resources(path))
- if libraries_paths is not None:
- src_paths.extend(libraries_paths)
- for path in libraries_paths:
- resources.add(toolchain.scan_resources(path))
-
- if linker_script is not None:
- resources.linker_script = linker_script
-
- # Build Directory
- if clean:
- if exists(build_path):
- rmtree(build_path)
- mkdir(build_path)
-
- # We need to add if necessary additional include directories
- if inc_dirs:
- if type(inc_dirs) == ListType:
- resources.inc_dirs.extend(inc_dirs)
- else:
- resources.inc_dirs.append(inc_dirs)
- # Compile Sources
- for path in src_paths:
- src = toolchain.scan_resources(path)
- objects = toolchain.compile_sources(src, build_path, resources.inc_dirs)
- resources.objects.extend(objects)
-
-
- # Link Program
- res, needed_update = toolchain.link_program(resources, build_path, name)
-
- if report != None and needed_update:
- end = time()
- cur_result["elapsed_time"] = end - start
- cur_result["output"] = toolchain.get_output()
- cur_result["result"] = "OK"
-
- add_result_to_report(report, cur_result)
-
- return res
-
- except Exception, e:
- if report != None:
- end = time()
-
- if isinstance(e, NotSupportedException):
- cur_result["result"] = "NOT_SUPPORTED"
- else:
- cur_result["result"] = "FAIL"
-
- cur_result["elapsed_time"] = end - start
-
- toolchain_output = toolchain.get_output()
- if toolchain_output:
- cur_result["output"] += toolchain_output
-
- cur_result["output"] += str(e)
-
- add_result_to_report(report, cur_result)
-
- # Let Exception propagate
- raise e
-
-
-def build_library(src_paths, build_path, target, toolchain_name,
- dependencies_paths=None, options=None, name=None, clean=False,
- notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None,
- jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
- """ src_path: the path of the source directory
- build_path: the path of the build directory
- target: ['LPC1768', 'LPC11U24', 'LPC2368']
- toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR']
- library_paths: List of paths to additional libraries
- clean: Rebuild everything if True
- notify: Notify function for logs
- verbose: Write the actual tools command lines if True
- inc_dirs: additional include directories which should be included in build
- inc_dirs_ext: additional include directories which should be copied to library directory
- """
- if type(src_paths) != ListType:
- src_paths = [src_paths]
-
- # The first path will give the name to the library
- name = basename(src_paths[0])
-
- if report != None:
- start = time()
- id_name = name.upper()
- description = name
- vendor_label = target.extra_labels[0]
- cur_result = None
- prep_report(report, target.name, toolchain_name, id_name)
- cur_result = create_result(target.name, toolchain_name, id_name, description)
-
- if properties != None:
- prep_properties(properties, target.name, toolchain_name, vendor_label)
-
- for src_path in src_paths:
- if not exists(src_path):
- error_msg = "The library source folder does not exist: %s", src_path
-
- if report != None:
- cur_result["output"] = error_msg
- cur_result["result"] = "FAIL"
- add_result_to_report(report, cur_result)
-
- raise Exception(error_msg)
-
- try:
- # Toolchain instance
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
- toolchain.VERBOSE = verbose
- toolchain.jobs = jobs
- toolchain.build_all = clean
-
- toolchain.info("Building library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
-
- # Scan Resources
- resources = []
- for src_path in src_paths:
- resources.append(toolchain.scan_resources(src_path))
-
- # Add extra include directories / files which are required by library
- # This files usually are not in the same directory as source files so
- # previous scan will not include them
- if inc_dirs_ext is not None:
- for inc_ext in inc_dirs_ext:
- resources.append(toolchain.scan_resources(inc_ext))
-
- # Dependencies Include Paths
- dependencies_include_dir = []
- if dependencies_paths is not None:
- for path in dependencies_paths:
- lib_resources = toolchain.scan_resources(path)
- dependencies_include_dir.extend(lib_resources.inc_dirs)
-
- if inc_dirs:
- dependencies_include_dir.extend(inc_dirs)
-
- # Create the desired build directory structure
- bin_path = join(build_path, toolchain.obj_path)
- mkdir(bin_path)
- tmp_path = join(build_path, '.temp', toolchain.obj_path)
- mkdir(tmp_path)
-
- # Copy Headers
- for resource in resources:
- toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
- dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
-
- # Compile Sources
- objects = []
- for resource in resources:
- objects.extend(toolchain.compile_sources(resource, tmp_path, dependencies_include_dir))
-
- needed_update = toolchain.build_library(objects, bin_path, name)
-
- if report != None and needed_update:
- end = time()
- cur_result["elapsed_time"] = end - start
- cur_result["output"] = toolchain.get_output()
- cur_result["result"] = "OK"
-
- add_result_to_report(report, cur_result)
-
- except Exception, e:
- if report != None:
- end = time()
- cur_result["result"] = "FAIL"
- cur_result["elapsed_time"] = end - start
-
- toolchain_output = toolchain.get_output()
- if toolchain_output:
- cur_result["output"] += toolchain_output
-
- cur_result["output"] += str(e)
-
- add_result_to_report(report, cur_result)
-
- # Let Exception propagate
- raise e
-
-def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
- """ Wrapper for build_library function.
- Function builds library in proper directory using all dependencies and macros defined by user.
- """
- lib = Library(lib_id)
- if lib.is_supported(target, toolchain):
- # We need to combine macros from parameter list with macros from library definition
- MACROS = lib.macros if lib.macros else []
- if macros:
- MACROS.extend(macros)
-
- return build_library(lib.source_dir, lib.build_dir, target, toolchain, lib.dependencies, options,
- verbose=verbose,
- silent=silent,
- clean=clean,
- macros=MACROS,
- notify=notify,
- inc_dirs=lib.inc_dirs,
- inc_dirs_ext=lib.inc_dirs_ext,
- jobs=jobs,
- report=report,
- properties=properties,
- extra_verbose=extra_verbose)
- else:
- print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain)
- return False
-
-
-# We do have unique legacy conventions about how we build and package the mbed library
-def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False):
- """ Function returns True is library was built and false if building was skipped """
-
- if report != None:
- start = time()
- id_name = "MBED"
- description = "mbed SDK"
- vendor_label = target.extra_labels[0]
- cur_result = None
- prep_report(report, target.name, toolchain_name, id_name)
- cur_result = create_result(target.name, toolchain_name, id_name, description)
-
- if properties != None:
- prep_properties(properties, target.name, toolchain_name, vendor_label)
-
- # Check toolchain support
- if toolchain_name not in target.supported_toolchains:
- supported_toolchains_text = ", ".join(target.supported_toolchains)
- print '%s target is not yet supported by toolchain %s' % (target.name, toolchain_name)
- print '%s target supports %s toolchain%s' % (target.name, supported_toolchains_text, 's' if len(target.supported_toolchains) > 1 else '')
-
- if report != None:
- cur_result["result"] = "SKIP"
- add_result_to_report(report, cur_result)
-
- return False
-
- try:
- # Toolchain
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose)
- toolchain.VERBOSE = verbose
- toolchain.jobs = jobs
- toolchain.build_all = clean
-
- # Source and Build Paths
- BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
- BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
- mkdir(BUILD_TOOLCHAIN)
-
- TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
- mkdir(TMP_PATH)
-
- # CMSIS
- toolchain.info("Building library %s (%s, %s)"% ('CMSIS', target.name, toolchain_name))
- cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
- resources = toolchain.scan_resources(cmsis_src)
-
- toolchain.copy_files(resources.headers, BUILD_TARGET)
- toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
- toolchain.copy_files(resources.bin_files, BUILD_TOOLCHAIN)
-
- objects = toolchain.compile_sources(resources, TMP_PATH)
- toolchain.copy_files(objects, BUILD_TOOLCHAIN)
-
- # mbed
- toolchain.info("Building library %s (%s, %s)" % ('MBED', target.name, toolchain_name))
-
- # Common Headers
- toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
- toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
-
- # Target specific sources
- HAL_SRC = join(MBED_TARGETS_PATH, "hal")
- hal_implementation = toolchain.scan_resources(HAL_SRC)
- toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files + hal_implementation.libraries, BUILD_TARGET, HAL_SRC)
- incdirs = toolchain.scan_resources(BUILD_TARGET).inc_dirs
- objects = toolchain.compile_sources(hal_implementation, TMP_PATH, [MBED_LIBRARIES] + incdirs)
-
- # Common Sources
- mbed_resources = toolchain.scan_resources(MBED_COMMON)
- objects += toolchain.compile_sources(mbed_resources, TMP_PATH, [MBED_LIBRARIES] + incdirs)
-
- # A number of compiled files need to be copied as objects as opposed to
- # being part of the mbed library, for reasons that have to do with the way
- # the linker search for symbols in archives. These are:
- # - retarget.o: to make sure that the C standard lib symbols get overridden
- # - board.o: mbed_die is weak
- # - mbed_overrides.o: this contains platform overrides of various weak SDK functions
- separate_names, separate_objects = ['retarget.o', 'board.o', 'mbed_overrides.o'], []
-
- for o in objects:
- for name in separate_names:
- if o.endswith(name):
- separate_objects.append(o)
-
- for o in separate_objects:
- objects.remove(o)
-
- needed_update = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed")
-
- for o in separate_objects:
- toolchain.copy_files(o, BUILD_TOOLCHAIN)
-
- if report != None and needed_update:
- end = time()
- cur_result["elapsed_time"] = end - start
- cur_result["output"] = toolchain.get_output()
- cur_result["result"] = "OK"
-
- add_result_to_report(report, cur_result)
-
- return True
-
- except Exception, e:
- if report != None:
- end = time()
- cur_result["result"] = "FAIL"
- cur_result["elapsed_time"] = end - start
-
- toolchain_output = toolchain.get_output()
- if toolchain_output:
- cur_result["output"] += toolchain_output
-
- cur_result["output"] += str(e)
-
- add_result_to_report(report, cur_result)
-
- # Let Exception propagate
- raise e
-
-def get_unique_supported_toolchains():
- """ Get list of all unique toolchains supported by targets """
- unique_supported_toolchains = []
- for target in TARGET_NAMES:
- for toolchain in TARGET_MAP[target].supported_toolchains:
- if toolchain not in unique_supported_toolchains:
- unique_supported_toolchains.append(toolchain)
- return unique_supported_toolchains
-
-
-def mcu_toolchain_matrix(verbose_html=False, platform_filter=None):
- """ Shows target map using prettytable """
- unique_supported_toolchains = get_unique_supported_toolchains()
- from prettytable import PrettyTable # Only use it in this function so building works without extra modules
-
- # All tests status table print
- columns = ["Platform"] + unique_supported_toolchains
- pt = PrettyTable(["Platform"] + unique_supported_toolchains)
- # Align table
- for col in columns:
- pt.align[col] = "c"
- pt.align["Platform"] = "l"
-
- perm_counter = 0
- target_counter = 0
- for target in sorted(TARGET_NAMES):
- if platform_filter is not None:
- # FIlter out platforms using regex
- if re.search(platform_filter, target) is None:
- continue
- target_counter += 1
-
- row = [target] # First column is platform name
- default_toolchain = TARGET_MAP[target].default_toolchain
- for unique_toolchain in unique_supported_toolchains:
- text = "-"
- if default_toolchain == unique_toolchain:
- text = "Default"
- perm_counter += 1
- elif unique_toolchain in TARGET_MAP[target].supported_toolchains:
- text = "Supported"
- perm_counter += 1
- row.append(text)
- pt.add_row(row)
-
- result = pt.get_html_string() if verbose_html else pt.get_string()
- result += "\n"
- result += "*Default - default on-line compiler\n"
- result += "*Supported - supported off-line compiler\n"
- result += "\n"
- result += "Total platforms: %d\n"% (target_counter)
- result += "Total permutations: %d"% (perm_counter)
- return result
-
-
-def get_target_supported_toolchains(target):
- """ Returns target supported toolchains list """
- return TARGET_MAP[target].supported_toolchains if target in TARGET_MAP else None
-
-
-def static_analysis_scan(target, toolchain_name, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
- # Toolchain
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
- toolchain.VERBOSE = verbose
- toolchain.jobs = jobs
- toolchain.build_all = clean
-
- # Source and Build Paths
- BUILD_TARGET = join(MBED_LIBRARIES, "TARGET_" + target.name)
- BUILD_TOOLCHAIN = join(BUILD_TARGET, "TOOLCHAIN_" + toolchain.name)
- mkdir(BUILD_TOOLCHAIN)
-
- TMP_PATH = join(MBED_LIBRARIES, '.temp', toolchain.obj_path)
- mkdir(TMP_PATH)
-
- # CMSIS
- toolchain.info("Static analysis for %s (%s, %s)" % ('CMSIS', target.name, toolchain_name))
- cmsis_src = join(MBED_TARGETS_PATH, "cmsis")
- resources = toolchain.scan_resources(cmsis_src)
-
- # Copy files before analysis
- toolchain.copy_files(resources.headers, BUILD_TARGET)
- toolchain.copy_files(resources.linker_script, BUILD_TOOLCHAIN)
-
- # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
- includes = ["-I%s"% i for i in resources.inc_dirs]
- includes.append("-I%s"% str(BUILD_TARGET))
- c_sources = " ".join(resources.c_sources)
- cpp_sources = " ".join(resources.cpp_sources)
- macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
-
- includes = map(str.strip, includes)
- macros = map(str.strip, macros)
-
- check_cmd = CPPCHECK_CMD
- check_cmd += CPPCHECK_MSG_FORMAT
- check_cmd += includes
- check_cmd += macros
-
- # We need to pass some params via file to avoid "command line too long in some OSs"
- tmp_file = tempfile.NamedTemporaryFile(delete=False)
- tmp_file.writelines(line + '\n' for line in c_sources.split())
- tmp_file.writelines(line + '\n' for line in cpp_sources.split())
- tmp_file.close()
- check_cmd += ["--file-list=%s"% tmp_file.name]
-
- _stdout, _stderr, _rc = run_cmd(check_cmd)
- if verbose:
- print _stdout
- print _stderr
-
- # =========================================================================
-
- # MBED
- toolchain.info("Static analysis for %s (%s, %s)" % ('MBED', target.name, toolchain_name))
-
- # Common Headers
- toolchain.copy_files(toolchain.scan_resources(MBED_API).headers, MBED_LIBRARIES)
- toolchain.copy_files(toolchain.scan_resources(MBED_HAL).headers, MBED_LIBRARIES)
-
- # Target specific sources
- HAL_SRC = join(MBED_TARGETS_PATH, "hal")
- hal_implementation = toolchain.scan_resources(HAL_SRC)
-
- # Copy files before analysis
- toolchain.copy_files(hal_implementation.headers + hal_implementation.hex_files, BUILD_TARGET, HAL_SRC)
- incdirs = toolchain.scan_resources(BUILD_TARGET)
-
- target_includes = ["-I%s" % i for i in incdirs.inc_dirs]
- target_includes.append("-I%s"% str(BUILD_TARGET))
- target_includes.append("-I%s"% str(HAL_SRC))
- target_c_sources = " ".join(incdirs.c_sources)
- target_cpp_sources = " ".join(incdirs.cpp_sources)
- target_macros = ["-D%s"% s for s in toolchain.get_symbols() + toolchain.macros]
-
- # Common Sources
- mbed_resources = toolchain.scan_resources(MBED_COMMON)
-
- # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
- mbed_includes = ["-I%s" % i for i in mbed_resources.inc_dirs]
- mbed_includes.append("-I%s"% str(BUILD_TARGET))
- mbed_includes.append("-I%s"% str(MBED_COMMON))
- mbed_includes.append("-I%s"% str(MBED_API))
- mbed_includes.append("-I%s"% str(MBED_HAL))
- mbed_c_sources = " ".join(mbed_resources.c_sources)
- mbed_cpp_sources = " ".join(mbed_resources.cpp_sources)
-
- target_includes = map(str.strip, target_includes)
- mbed_includes = map(str.strip, mbed_includes)
- target_macros = map(str.strip, target_macros)
-
- check_cmd = CPPCHECK_CMD
- check_cmd += CPPCHECK_MSG_FORMAT
- check_cmd += target_includes
- check_cmd += mbed_includes
- check_cmd += target_macros
-
- # We need to pass some parames via file to avoid "command line too long in some OSs"
- tmp_file = tempfile.NamedTemporaryFile(delete=False)
- tmp_file.writelines(line + '\n' for line in target_c_sources.split())
- tmp_file.writelines(line + '\n' for line in target_cpp_sources.split())
- tmp_file.writelines(line + '\n' for line in mbed_c_sources.split())
- tmp_file.writelines(line + '\n' for line in mbed_cpp_sources.split())
- tmp_file.close()
- check_cmd += ["--file-list=%s"% tmp_file.name]
-
- _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
- if verbose:
- print _stdout
- print _stderr
-
-
-def static_analysis_scan_lib(lib_id, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
- options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, extra_verbose=False):
- lib = Library(lib_id)
- if lib.is_supported(target, toolchain):
- static_analysis_scan_library(lib.source_dir, lib.build_dir, target, toolchain, cppcheck_cmd, cppcheck_msg_format,
- lib.dependencies, options,
- verbose=verbose, clean=clean, macros=macros, notify=notify, jobs=jobs, extra_verbose=extra_verbose)
- else:
- print 'Library "%s" is not yet supported on target %s with toolchain %s'% (lib_id, target.name, toolchain)
-
-
-def static_analysis_scan_library(src_paths, build_path, target, toolchain_name, cppcheck_cmd, cppcheck_msg_format,
- dependencies_paths=None, options=None, name=None, clean=False,
- notify=None, verbose=False, macros=None, jobs=1, extra_verbose=False):
- """ Function scans library (or just some set of sources/headers) for staticly detectable defects """
- if type(src_paths) != ListType:
- src_paths = [src_paths]
-
- for src_path in src_paths:
- if not exists(src_path):
- raise Exception("The library source folder does not exist: %s", src_path)
-
- # Toolchain instance
- toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, extra_verbose=extra_verbose)
- toolchain.VERBOSE = verbose
- toolchain.jobs = jobs
-
- # The first path will give the name to the library
- name = basename(src_paths[0])
- toolchain.info("Static analysis for library %s (%s, %s)" % (name.upper(), target.name, toolchain_name))
-
- # Scan Resources
- resources = []
- for src_path in src_paths:
- resources.append(toolchain.scan_resources(src_path))
-
- # Dependencies Include Paths
- dependencies_include_dir = []
- if dependencies_paths is not None:
- for path in dependencies_paths:
- lib_resources = toolchain.scan_resources(path)
- dependencies_include_dir.extend(lib_resources.inc_dirs)
-
- # Create the desired build directory structure
- bin_path = join(build_path, toolchain.obj_path)
- mkdir(bin_path)
- tmp_path = join(build_path, '.temp', toolchain.obj_path)
- mkdir(tmp_path)
-
- # Gather include paths, c, cpp sources and macros to transfer to cppcheck command line
- includes = ["-I%s" % i for i in dependencies_include_dir + src_paths]
- c_sources = " "
- cpp_sources = " "
- macros = ['-D%s' % s for s in toolchain.get_symbols() + toolchain.macros]
-
- # Copy Headers
- for resource in resources:
- toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path)
- includes += ["-I%s" % i for i in resource.inc_dirs]
- c_sources += " ".join(resource.c_sources) + " "
- cpp_sources += " ".join(resource.cpp_sources) + " "
-
- dependencies_include_dir.extend(toolchain.scan_resources(build_path).inc_dirs)
-
- includes = map(str.strip, includes)
- macros = map(str.strip, macros)
-
- check_cmd = cppcheck_cmd
- check_cmd += cppcheck_msg_format
- check_cmd += includes
- check_cmd += macros
-
- # We need to pass some parameters via file to avoid "command line too long in some OSs"
- # Temporary file is created to store e.g. cppcheck list of files for command line
- tmp_file = tempfile.NamedTemporaryFile(delete=False)
- tmp_file.writelines(line + '\n' for line in c_sources.split())
- tmp_file.writelines(line + '\n' for line in cpp_sources.split())
- tmp_file.close()
- check_cmd += ["--file-list=%s"% tmp_file.name]
-
- # This will allow us to grab result from both stdio and stderr outputs (so we can show them)
- # We assume static code analysis tool is outputting defects on STDERR
- _stdout, _stderr, _rc = run_cmd_ext(check_cmd)
- if verbose:
- print _stdout
- print _stderr
-
-
-def print_build_results(result_list, build_name):
- """ Generate result string for build results """
- result = ""
- if len(result_list) > 0:
- result += build_name + "\n"
- result += "\n".join([" * %s" % f for f in result_list])
- result += "\n"
- return result
-
-def write_build_report(build_report, template_filename, filename):
- build_report_failing = []
- build_report_passing = []
-
- for report in build_report:
- if len(report["failing"]) > 0:
- build_report_failing.append(report)
- else:
- build_report_passing.append(report)
-
- env = Environment(extensions=['jinja2.ext.with_'])
- env.loader = FileSystemLoader('ci_templates')
- template = env.get_template(template_filename)
-
- with open(filename, 'w+') as f:
- f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing))
diff --git a/workspace_tools/build_release.py b/workspace_tools/build_release.py
deleted file mode 100644
index ef28edb..0000000
--- a/workspace_tools/build_release.py
+++ /dev/null
@@ -1,295 +0,0 @@
-#! /usr/bin/env python
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import sys
-from time import time
-from os.path import join, abspath, dirname, normpath
-from optparse import OptionParser
-import json
-
-# Be sure that the tools directory is in the search path
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-from workspace_tools.build_api import build_mbed_libs
-from workspace_tools.build_api import write_build_report
-from workspace_tools.targets import TARGET_MAP, TARGET_NAMES
-from workspace_tools.test_exporters import ReportExporter, ResultExporterType
-from workspace_tools.test_api import SingleTestRunner
-from workspace_tools.test_api import singletest_in_cli_mode
-from workspace_tools.paths import TEST_DIR
-from workspace_tools.tests import TEST_MAP
-
-OFFICIAL_MBED_LIBRARY_BUILD = (
- ('LPC11U24', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
- ('LPC1768', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('UBLOX_C027', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('ARCH_PRO', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('LPC2368', ('ARM', 'GCC_ARM')),
- ('LPC2460', ('GCC_ARM',)),
- ('LPC812', ('uARM','IAR')),
- ('LPC824', ('uARM', 'GCC_ARM', 'IAR', 'GCC_CR')),
- ('SSCI824', ('uARM','GCC_ARM')),
- ('LPC1347', ('ARM','IAR')),
- ('LPC4088', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('LPC4088_DM', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('LPC1114', ('uARM','GCC_ARM', 'GCC_CR', 'IAR')),
- ('LPC11U35_401', ('ARM', 'uARM','GCC_ARM','GCC_CR', 'IAR')),
- ('LPC11U35_501', ('ARM', 'uARM','GCC_ARM','GCC_CR', 'IAR')),
- ('LPC1549', ('uARM','GCC_ARM','GCC_CR', 'IAR')),
- ('XADOW_M0', ('ARM', 'uARM','GCC_ARM','GCC_CR')),
- ('ARCH_GPRS', ('ARM', 'uARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('LPC4337', ('ARM',)),
- ('LPC11U37H_401', ('ARM', 'uARM','GCC_ARM','GCC_CR')),
- ('MICRONFCBOARD', ('ARM', 'uARM','GCC_ARM')),
-
- ('KL05Z', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
- ('KL25Z', ('ARM', 'GCC_ARM', 'IAR')),
- ('KL27Z', ('ARM', 'GCC_ARM', 'IAR')),
- ('KL43Z', ('ARM', 'GCC_ARM')),
- ('KL46Z', ('ARM', 'GCC_ARM', 'IAR')),
- ('K64F', ('ARM', 'GCC_ARM', 'IAR')),
- ('K22F', ('ARM', 'GCC_ARM', 'IAR')),
- ('K20D50M', ('ARM', 'GCC_ARM' , 'IAR')),
- ('TEENSY3_1', ('ARM', 'GCC_ARM')),
-
- ('B96B_F446VE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F030R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F031K6', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F042K6', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F070RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F072RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F091RC', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F103RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F302R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F303K8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F303RE', ('ARM', 'uARM', 'IAR')),
- ('NUCLEO_F334R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F401RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F410RB', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F411RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F446RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('ELMO_F411RE', ('ARM', 'uARM', 'GCC_ARM')),
- ('NUCLEO_L053R8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_L152RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('MTS_MDOT_F405RG', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('MTS_MDOT_F411RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('MTS_DRAGONFLY_F411RE', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('DISCO_L053C8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('DISCO_F334C8', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('DISCO_F429ZI', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('DISCO_F469NI', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('DISCO_F746NG', ('ARM', 'uARM', 'GCC_ARM','IAR')),
- ('DISCO_L476VG', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_L476RG', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
- ('NUCLEO_F746ZG', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
- ('NUCLEO_L031K6', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
- ('NUCLEO_L073RZ', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
-
- ('MOTE_L152RC', ('ARM', 'uARM', 'IAR', 'GCC_ARM')),
-
- ('ARCH_MAX', ('ARM', 'GCC_ARM')),
-
- ('NRF51822', ('ARM', 'GCC_ARM', 'IAR')),
- ('NRF51_DK', ('ARM', 'GCC_ARM', 'IAR')),
- ('NRF51_DONGLE', ('ARM', 'GCC_ARM', 'IAR')),
- ('HRM1017', ('ARM', 'GCC_ARM', 'IAR')),
- ('ARCH_BLE', ('ARM', 'GCC_ARM', 'IAR')),
- ('SEEED_TINY_BLE', ('ARM', 'GCC_ARM', 'IAR')),
- ('RBLAB_NRF51822', ('ARM', 'GCC_ARM')),
- ('RBLAB_BLENANO', ('ARM', 'GCC_ARM')),
- ('WALLBOT_BLE', ('ARM', 'GCC_ARM')),
- ('DELTA_DFCM_NNN40', ('ARM', 'GCC_ARM')),
- ('NRF51_MICROBIT', ('ARM','GCC_ARM')),
- ('NRF51_MICROBIT_B', ('ARM',)),
- ('TY51822R3', ('ARM', 'GCC_ARM')),
-
- ('LPC11U68', ('ARM', 'uARM','GCC_ARM','GCC_CR', 'IAR')),
- ('OC_MBUINO', ('ARM', 'uARM', 'GCC_ARM', 'IAR')),
-
- ('ARM_MPS2_M0' , ('ARM',)),
- ('ARM_MPS2_M0P' , ('ARM',)),
- ('ARM_MPS2_M3' , ('ARM',)),
- ('ARM_MPS2_M4' , ('ARM',)),
- ('ARM_MPS2_M7' , ('ARM',)),
- ('ARM_IOTSS_BEID' , ('ARM',)),
-
- ('RZ_A1H' , ('ARM', 'GCC_ARM')),
-
- ('EFM32ZG_STK3200', ('GCC_ARM', 'uARM')),
- ('EFM32HG_STK3400', ('GCC_ARM', 'uARM')),
- ('EFM32LG_STK3600', ('ARM', 'GCC_ARM', 'uARM')),
- ('EFM32GG_STK3700', ('ARM', 'GCC_ARM', 'uARM')),
- ('EFM32WG_STK3800', ('ARM', 'GCC_ARM', 'uARM')),
- ('EFM32PG_STK3401', ('ARM', 'GCC_ARM', 'uARM')),
-
- ('MAXWSNENV', ('ARM', 'GCC_ARM', 'IAR')),
- ('MAX32600MBED', ('ARM', 'GCC_ARM', 'IAR')),
-
- ('WIZWIKI_W7500', ('ARM', 'uARM')),
- ('WIZWIKI_W7500P',('ARM', 'uARM')),
- ('WIZWIKI_W7500ECO',('ARM', 'uARM')),
-
- ('SAMR21G18A',('ARM', 'uARM', 'GCC_ARM')),
- ('SAMD21J18A',('ARM', 'uARM', 'GCC_ARM')),
- ('SAMD21G18A',('ARM', 'uARM', 'GCC_ARM')),
-
-)
-
-
-if __name__ == '__main__':
- parser = OptionParser()
- parser.add_option('-o', '--official', dest="official_only", default=False, action="store_true",
- help="Build using only the official toolchain for each target")
- parser.add_option("-j", "--jobs", type="int", dest="jobs",
- default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
- parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
- default=False, help="Verbose diagnostic output")
- parser.add_option("-t", "--toolchains", dest="toolchains", help="Use toolchains names separated by comma")
-
- parser.add_option("-p", "--platforms", dest="platforms", default="", help="Build only for the platform namesseparated by comma")
-
- parser.add_option("-L", "--list-config", action="store_true", dest="list_config",
- default=False, help="List the platforms and toolchains in the release in JSON")
-
- parser.add_option("", "--report-build", dest="report_build_file_name", help="Output the build results to an junit xml file")
-
- parser.add_option("", "--build-tests", dest="build_tests", help="Build all tests in the given directories (relative to /libraries/tests)")
-
-
- options, args = parser.parse_args()
-
-
-
- if options.list_config:
- print json.dumps(OFFICIAL_MBED_LIBRARY_BUILD, indent=4)
- sys.exit()
-
- start = time()
- build_report = {}
- build_properties = {}
-
- platforms = None
- if options.platforms != "":
- platforms = set(options.platforms.split(","))
-
- if options.build_tests:
- # Get all paths
- directories = options.build_tests.split(',')
- for i in range(len(directories)):
- directories[i] = normpath(join(TEST_DIR, directories[i]))
-
- test_names = []
-
- for test_id in TEST_MAP.keys():
- # Prevents tests with multiple source dirs from being checked
- if isinstance( TEST_MAP[test_id].source_dir, basestring):
- test_path = normpath(TEST_MAP[test_id].source_dir)
- for directory in directories:
- if directory in test_path:
- test_names.append(test_id)
-
- mut_counter = 1
- mut = {}
- test_spec = {
- "targets": {}
- }
-
- if options.toolchains:
- print "Only building using the following toolchains: %s" % (options.toolchains)
-
- for target_name, toolchain_list in OFFICIAL_MBED_LIBRARY_BUILD:
- toolchains = None
- if platforms is not None and not target_name in platforms:
- print("Excluding %s from release" % target_name)
- continue
-
- if target_name not in TARGET_NAMES:
- print "Target '%s' is not a valid target. Excluding from release"
- continue
-
- if options.official_only:
- toolchains = (getattr(TARGET_MAP[target_name], 'default_toolchain', 'ARM'),)
- else:
- toolchains = toolchain_list
-
- if options.toolchains:
- toolchainSet = set(toolchains)
- toolchains = toolchainSet.intersection(set((options.toolchains).split(',')))
-
- mut[str(mut_counter)] = {
- "mcu": target_name
- }
-
- mut_counter += 1
-
- test_spec["targets"][target_name] = toolchains
-
- single_test = SingleTestRunner(_muts=mut,
- _opts_report_build_file_name=options.report_build_file_name,
- _test_spec=test_spec,
- _opts_test_by_names=",".join(test_names),
- _opts_verbose=options.verbose,
- _opts_only_build_tests=True,
- _opts_suppress_summary=True,
- _opts_jobs=options.jobs,
- _opts_include_non_automated=True,
- _opts_build_report=build_report,
- _opts_build_properties=build_properties)
- # Runs test suite in CLI mode
- test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, new_build_report, new_build_properties = single_test.execute()
- else:
- for target_name, toolchain_list in OFFICIAL_MBED_LIBRARY_BUILD:
- if platforms is not None and not target_name in platforms:
- print("Excluding %s from release" % target_name)
- continue
-
- if target_name not in TARGET_NAMES:
- print "Target '%s' is not a valid target. Excluding from release"
- continue
-
- if options.official_only:
- toolchains = (getattr(TARGET_MAP[target_name], 'default_toolchain', 'ARM'),)
- else:
- toolchains = toolchain_list
-
- if options.toolchains:
- print "Only building using the following toolchains: %s" % (options.toolchains)
- toolchainSet = set(toolchains)
- toolchains = toolchainSet.intersection(set((options.toolchains).split(',')))
-
- for toolchain in toolchains:
- id = "%s::%s" % (target_name, toolchain)
-
- try:
- built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name], toolchain, verbose=options.verbose, jobs=options.jobs, report=build_report, properties=build_properties)
-
- except Exception, e:
- print str(e)
-
- # Write summary of the builds
- if options.report_build_file_name:
- file_report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
- file_report_exporter.report_to_file(build_report, options.report_build_file_name, test_suite_properties=build_properties)
-
- print "\n\nCompleted in: (%.2f)s" % (time() - start)
-
- print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
- status = print_report_exporter.report(build_report)
-
- if not status:
- sys.exit(1)
diff --git a/workspace_tools/build_travis.py b/workspace_tools/build_travis.py
deleted file mode 100644
index 7189dba..0000000
--- a/workspace_tools/build_travis.py
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/usr/bin/env python2
-
-"""
-Travis-CI build script
-
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import os
-import sys
-
-################################################################################
-# Configure builds here
-# "libs" can contain "dsp", "rtos", "eth", "usb_host", "usb", "ublox", "fat"
-
-build_list = (
- { "target": "LPC1768", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "eth", "usb_host", "usb", "ublox", "fat"] },
- { "target": "LPC2368", "toolchains": "GCC_ARM", "libs": ["fat"] },
- { "target": "LPC2460", "toolchains": "GCC_ARM", "libs": ["rtos", "usb_host", "usb", "fat"] },
- { "target": "LPC11U24", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "OC_MBUINO", "toolchains": "GCC_ARM", "libs": ["fat"] },
-
- { "target": "LPC11U24_301", "toolchains": "GCC_ARM", "libs": ["fat"] },
-
- { "target": "B96B_F446VE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_L053R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_L152RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F030R8", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_F031K6", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_F042K6", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_F070RB", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_F072RB", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F091RC", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F103RB", "toolchains": "GCC_ARM", "libs": ["rtos", "fat"] },
- { "target": "NUCLEO_F302R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F303K8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F303RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F334R8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F401RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F410RB", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NUCLEO_L476RG", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_L031K6", "toolchains": "GCC_ARM", "libs": ["dsp"] },
- { "target": "NUCLEO_L073RZ", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NUCLEO_F446RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
-
- { "target": "MOTE_L152RC", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
-
- { "target": "ELMO_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
-
- { "target": "MTS_MDOT_F405RG", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
- { "target": "MTS_MDOT_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
- { "target": "MTS_DRAGONFLY_F411RE", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "ARCH_MAX", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
-
- { "target": "DISCO_F051R8", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "DISCO_F334C8", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "DISCO_F401VC", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "DISCO_F407VG", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "DISCO_F429ZI", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "DISCO_F469NI", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "DISCO_F746NG", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
-
- { "target": "LPC1114", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "LPC11U35_401", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "UBLOX_C027", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "LPC11U35_501", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "LPC11U68", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "LPC11U37H_401", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
-
- { "target": "KL05Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "KL25Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
- { "target": "KL27Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
- { "target": "KL43Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
- { "target": "KL46Z", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
- { "target": "K20D50M", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "TEENSY3_1", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "K64F", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
- { "target": "LPC4088", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb", "fat"] },
- { "target": "ARCH_PRO", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "LPC1549", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NRF51822", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "DELTA_DFCM_NNN40", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "NRF51_DK", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
- { "target": "NRF51_MICROBIT", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "fat"] },
-
- { "target": "EFM32ZG_STK3200", "toolchains": "GCC_ARM", "libs": ["dsp"] },
- { "target": "EFM32HG_STK3400", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
- { "target": "EFM32LG_STK3600", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
- { "target": "EFM32GG_STK3700", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
- { "target": "EFM32WG_STK3800", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos", "usb"] },
- { "target": "EFM32PG_STK3401", "toolchains": "GCC_ARM", "libs": ["dsp", "rtos"] },
-
- { "target": "MAXWSNENV", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "MAX32600MBED", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
-
- { "target": "RZ_A1H", "toolchains": "GCC_ARM", "libs": ["fat"] },
-
- { "target": "SAMR21G18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "SAMD21J18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "SAMD21G18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
- { "target": "SAML21J18A", "toolchains": "GCC_ARM", "libs": ["dsp", "fat"] },
-)
-
-################################################################################
-# Configure example test building (linking against external mbed SDK libraries liek fat or rtos)
-
-linking_list = [
- {"target": "LPC1768",
- "toolchains": "GCC_ARM",
- "tests": {"" : ["MBED_2", "MBED_10", "MBED_11", "MBED_15", "MBED_16", "MBED_17"],
- "eth" : ["NET_1", "NET_2", "NET_3", "NET_4"],
- "fat" : ["MBED_A12", "MBED_19", "PERF_1", "PERF_2", "PERF_3"],
- "rtos" : ["RTOS_1", "RTOS_2", "RTOS_3"],
- "usb" : ["USB_1", "USB_2" ,"USB_3"],
- }
- }
- ]
-
-################################################################################
-
-# Driver
-
-def run_builds(dry_run):
- for build in build_list:
- toolchain_list = build["toolchains"]
- if type(toolchain_list) != type([]): toolchain_list = [toolchain_list]
- for toolchain in toolchain_list:
- cmdline = "python workspace_tools/build.py -m %s -t %s -j 4 -c --silent "% (build["target"], toolchain)
- libs = build.get("libs", [])
- if libs:
- cmdline = cmdline + " ".join(["--" + l for l in libs])
- print "Executing: " + cmdline
- if not dry_run:
- if os.system(cmdline) != 0:
- sys.exit(1)
-
-
-def run_test_linking(dry_run):
- """ Function run make.py commands to build and link simple mbed SDK
- tests against few libraries to make sure there are no simple linking errors.
- """
- for link in linking_list:
- toolchain_list = link["toolchains"]
- if type(toolchain_list) != type([]):
- toolchain_list = [toolchain_list]
- for toolchain in toolchain_list:
- tests = link["tests"]
- # Call make.py for each test group for particular library
- for test_lib in tests:
- test_names = tests[test_lib]
- test_lib_switch = "--" + test_lib if test_lib else ""
- cmdline = "python workspace_tools/make.py -m %s -t %s -c --silent %s -n %s " % (link["target"], toolchain, test_lib_switch, ",".join(test_names))
- print "Executing: " + cmdline
- if not dry_run:
- if os.system(cmdline) != 0:
- sys.exit(1)
-
-def run_test_testsuite(dry_run):
- cmdline = "python workspace_tools/singletest.py --version"
- print "Executing: " + cmdline
- if not dry_run:
- if os.system(cmdline) != 0:
- sys.exit(1)
-
-if __name__ == "__main__":
- run_builds("-s" in sys.argv)
- run_test_linking("-s" in sys.argv)
- run_test_testsuite("-s" in sys.argv)
diff --git a/workspace_tools/buildbot/master.cfg b/workspace_tools/buildbot/master.cfg
deleted file mode 100644
index 0a8a662..0000000
--- a/workspace_tools/buildbot/master.cfg
+++ /dev/null
@@ -1,406 +0,0 @@
-# -*- python -*-
-# ex: set syntax=python:
-
-# This is a sample buildmaster config file. It must be installed as
-# 'master.cfg' in your buildmaster's base directory.
-
-# This is the dictionary that the buildmaster pays attention to. We also use
-# a shorter alias to save typing.
-c = BuildmasterConfig = {}
-
-####### BUILDSLAVES
-
-# The 'slaves' list defines the set of recognized buildslaves. Each element is
-# a BuildSlave object, specifying a unique slave name and password. The same
-# slave name and password must be configured on the slave.
-from buildbot.buildslave import BuildSlave
-c['slaves'] = [BuildSlave("example-slave", "pass"),
- BuildSlave("example-slave-2", "pass"),
- BuildSlave("example-slave-KL25Z", "pass"),
- BuildSlave("example-slave-LPC1768", "pass"),
- BuildSlave("example-slave-LPC11U24", "pass"),
- ]
-
-# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
-# This must match the value configured into the buildslaves (with their
-# --master option)
-c['slavePortnum'] = 9989
-
-####### OFFICIAL_MBED_LIBRARY_BUILD
-
-OFFICIAL_MBED_LIBRARY_BUILD = (
- ('LPC1768', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- ('KL05Z', ('ARM', 'uARM', 'GCC_ARM')),
- ('KL25Z', ('ARM', 'GCC_ARM')),
- ('LPC11U24', ('ARM', 'uARM')),
- ('KL46Z', ('ARM', 'GCC_ARM')),
- ('LPC4088', ('ARM', 'GCC_ARM', 'GCC_CR')),
- ('LPC1347', ('ARM',)),
- ('LPC1549', ('uARM',)),
- ('LPC2368', ('ARM',)),
- ('LPC812', ('uARM',)),
- ('LPC11U35_401', ('ARM', 'uARM')),
- ('LPC1114', ('uARM',)),
- ('NUCLEO_F103RB', ('ARM', 'uARM')),
- ('NUCLEO_L152RE', ('ARM', 'uARM')),
- ('NUCLEO_F401RE', ('ARM', 'uARM')),
- ('NUCLEO_F030R8', ('ARM', 'uARM')),
- ('UBLOX_C027', ('ARM', 'GCC_ARM', 'GCC_CR', 'IAR')),
- # ('NRF51822', ('ARM',)),
-)
-
-# Which hardware platforms are supported for target testing
-OFFICIAL_MBED_TESTBED_SUPPORTED_HARDWARE = (
- # 'KL25Z',
- # 'LPC1768',
- # 'LPC11U24',
-)
-
-####### CHANGESOURCES
-
-# the 'change_source' setting tells the buildmaster how it should find out
-# about source code changes. Here we point to the buildbot clone of pyflakes.
-
-from buildbot.changes.gitpoller import GitPoller
-c['change_source'] = []
-"""
-c['change_source'].append(GitPoller(
- 'git://github.com/buildbot/pyflakes.git',
- workdir='gitpoller-workdir', branch='master',
- pollinterval=300))
-"""
-####### SCHEDULERS
-
-# Configure the Schedulers, which decide how to react to incoming changes. In this
-# case, just kick off a 'runtests' build
-
-from buildbot.schedulers.basic import SingleBranchScheduler
-from buildbot.schedulers.forcesched import ForceScheduler
-from buildbot.changes import filter
-c['schedulers'] = []
-
-# Create builders to generate one target using all assigned toolchains
-release_builder_name = "BuildRelease"
-builder_names = [release_builder_name]
-for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD:
- builder_name = "All_TC_%s" % target_name
- builder_names.append(builder_name)
-c['schedulers'].append(ForceScheduler(name="force", builderNames=builder_names))
-
-####### BUILDERS
-
-# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
-# what steps, and which slaves can execute them. Note that any particular build will
-# only take place on one slave.
-
-from buildbot.process.factory import BuildFactory
-from buildbot.steps.source.git import Git
-from buildbot.steps.shell import ShellCommand
-from buildbot.process.buildstep import LogLineObserver
-import buildbot.status.results
-import re
-import pprint
-
-class TestCommand(ShellCommand):
- failedTestsCount = 0 # FAIL
- passedTestsCount = 0 # OK
- errorsTestsCount = 0 # ERROR
- undefsTestsCount = 0 # UNDEF
- testsResults = []
-
- def __init__(self, stage=None,module=None, moduleset=None, **kwargs):
- ShellCommand.__init__(self, **kwargs)
- self.failedTestsCount = 0
- self.passedTestsCount = 0
- self.errorsTestsCount = 0
- self.tracebackPyCount = 0
- self.testsResults = []
- testFailuresObserver = UnitTestsObserver ()
- self.addLogObserver('stdio', testFailuresObserver)
-
- def createSummary(self, log):
- if self.failedTestsCount >= 0 or self.passedTestsCount >= 0 or self.errorsTestsCount >= 0 or self.undefsTestsCount >= 0:
- self.addHTMLLog ('tests summary', self.createTestsSummary())
-
- def getText(self, cmd, results):
- text = ShellCommand.getText(self, cmd, results)
- text.append("OK: " + str(self.passedTestsCount))
- text.append("FAIL: " + str(self.failedTestsCount))
- text.append("ERROR: " + str(self.errorsTestsCount))
- text.append("UNDEF: " + str(self.undefsTestsCount))
- text.append("Traceback: " + str(self.tracebackPyCount))
- return text
-
- def evaluateCommand(self, cmd):
- if self.failedTestsCount > 0:
- return buildbot.status.results.WARNINGS
- elif self.errorsTestsCount > 0 or self.undefsTestsCount > 0 or self.tracebackPyCount > 0:
- return buildbot.status.results.FAILURE
- return buildbot.status.results.SUCCESS
-
- def find_unique_tc_result_value(self, index):
- """ Get unique values from each row in data parameter """
- result = []
- for tc_result_list in self.testsResults:
- if tc_result_list[index] not in result:
- result.append(tc_result_list[index])
- return result
-
- def html_view_test_result(self, targets, tests, toolchain):
- """ Generates simple result table """
- COLOR_OK = "LimeGreen"
- COLOR_FAIL = "LightCoral"
- COLOR_UNDEF = "LightSlateGray"
- COLOR_NEUTRAL = "Silver"
-
- STATUS_COLORS = { "OK" : COLOR_OK,
- "FAIL" : COLOR_FAIL,
- "UNDEF" : COLOR_UNDEF}
-
- result = "
"
- result += "
" + toolchain + "
"
- for test in tests:
- result += "
" + test + "
"
- result += "
"
-
- for target in targets:
- result += "
" + target + "
"
- for test in tests:
- for tc_result_list in self.testsResults:
- if tc_result_list[1] == target and tc_result_list[2] == toolchain and tc_result_list[3] == test:
- status = tc_result_list[4]
- bgcolor = STATUS_COLORS[status]
- result += "
"
- return html
-
-class BuildObserver(LogLineObserver):
- regroupresult = []
-
- def __init__(self):
- LogLineObserver.__init__(self)
- if len(self.regroupresult) == 0:
- self.regroupresult.append(re.compile("^\[([Ww]arning)\] (.*)"))
- self.regroupresult.append(re.compile("^\[([Ee]rror)\] (.*)"))
-
- def outLineReceived(self, line):
- matched = False
- for r in self.regroupresult:
- result = r.match(line)
- if result:
- self.step.testsResults.append(result.groups())
- if result.group(1) == 'Warning':
- self.step.warningsCount += 1
- elif result.group(1) == 'Error':
- self.step.errorsCount += 1
- matched = True
- #if not matched:
- # [Future-Dev] Other check...
-
-
-####### BUILDERS - mbed project
-git_clone = Git(repourl='https://github.com/mbedmicro/mbed.git', mode='incremental')
-
-# create the build factory for mbed and add the steps to it
-from buildbot.config import BuilderConfig
-
-c['builders'] = []
-
-copy_private_settings = ShellCommand(name = "copy private_settings.py",
- command = "cp ../private_settings.py workspace_tools/private_settings.py",
- haltOnFailure = True,
- description = "Copy private_settings.py")
-
-mbed_build_release = BuildFactory()
-mbed_build_release.addStep(git_clone)
-mbed_build_release.addStep(copy_private_settings)
-
-for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD:
- builder_name = "All_TC_%s" % target_name
- mbed_build = BuildFactory()
- mbed_build.addStep(git_clone)
- mbed_build.addStep(copy_private_settings)
- # Adding all chains for target
- for toolchain in toolchains:
- build_py = BuildCommand(name = "Build %s using %s" % (target_name, toolchain),
- command = "python workspace_tools/build.py -m %s -t %s" % (target_name, toolchain),
- haltOnFailure = True,
- warnOnWarnings = True,
- description = "Building %s using %s" % (target_name, toolchain),
- descriptionDone = "Built %s using %s" % (target_name, toolchain))
-
- mbed_build.addStep(build_py)
- mbed_build_release.addStep(build_py) # For build release we need all toolchains
-
- if target_name in OFFICIAL_MBED_TESTBED_SUPPORTED_HARDWARE:
- copy_example_test_spec_json = ShellCommand(name = "Copy example_test_spec.json",
- command = "cp ../example_test_spec.json workspace_tools/data/example_test_spec.json",
- haltOnFailure = True,
- description = "Copy example_test_spec.json")
-
- autotest_py = ShellCommand(name = "Running autotest.py for %s" % (target_name),
- command = "python workspace_tools/autotest.py workspace_tools/data/example_test_spec.json",
- haltOnFailure = True,
- description = "Running autotest.py")
-
- mbed_build.addStep(copy_example_test_spec_json)
- mbed_build.addStep(autotest_py)
-
- # Add builder with steps for each toolchain
- c['builders'].append(BuilderConfig(name=builder_name,
- slavenames=["example-slave-%s" % (target_name)],
- factory=mbed_build))
- else:
- # Add builder with steps for each toolchain
- c['builders'].append(BuilderConfig(name=builder_name,
- slavenames=["example-slave"],
- factory=mbed_build))
-
-# copy_example_test_spec_json = ShellCommand(name = "Copy example_test_spec.json",
- # command = "cp ../example_test_spec.json workspace_tools/data/example_test_spec.json",
- # haltOnFailure = True,
- # description = "Copy example_test_spec.json")
-
-singletest_py = TestCommand(name = "Running Target Tests",
- command = "python workspace_tools/singletest.py -i workspace_tools/test_spec.json -M workspace_tools/muts_all.json",
- haltOnFailure = True,
- warnOnWarnings = True,
- description = "Running Target Tests",
- descriptionDone = "Target Testing Finished")
-
-mbed_build_release.addStep(singletest_py)
-# Release build collects all building toolchains
-c['builders'].append(BuilderConfig(name=release_builder_name,
- slavenames=["example-slave"],
- factory=mbed_build_release))
-
-####### STATUS TARGETS
-
-# 'status' is a list of Status Targets. The results of each build will be
-# pushed to these targets. buildbot/status/*.py has a variety to choose from,
-# including web pages, email senders, and IRC bots.
-
-c['status'] = []
-
-from buildbot.status import html
-from buildbot.status.web import authz, auth
-
-authz_cfg=authz.Authz(
- # change any of these to True to enable; see the manual for more
- # options
- auth=auth.BasicAuth([("pyflakes","pyflakes")]),
- gracefulShutdown = False,
- forceBuild = 'auth', # use this to test your slave once it is set up
- forceAllBuilds = True,
- pingBuilder = True,
- stopBuild = True,
- stopAllBuilds = True,
- cancelPendingBuild = True,
-)
-c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg, order_console_by_time=True))
-
-####### PROJECT IDENTITY
-
-# the 'title' string will appear at the top of this buildbot
-# installation's html.WebStatus home page (linked to the
-# 'titleURL') and is embedded in the title of the waterfall HTML page.
-
-c['title'] = "Green Tea"
-c['titleURL'] = ""
-
-# the 'buildbotURL' string should point to the location where the buildbot's
-# internal web server (usually the html.WebStatus page) is visible. This
-# typically uses the port number set in the Waterfall 'status' entry, but
-# with an externally-visible host name which the buildbot cannot figure out
-# without some help.
-
-c['buildbotURL'] = "http://localhost:8010/"
-
-####### DB URL
-
-c['db'] = {
- # This specifies what database buildbot uses to store its state. You can leave
- # this at its default for all but the largest installations.
- 'db_url' : "sqlite:///state.sqlite",
- # 'db_url' : "mysql://buildbot:123456@localhost/buildbot_mbed?max_idle=300",
-}
diff --git a/workspace_tools/ci_templates/library_build/build_report.html b/workspace_tools/ci_templates/library_build/build_report.html
deleted file mode 100644
index 1b2b693..0000000
--- a/workspace_tools/ci_templates/library_build/build_report.html
+++ /dev/null
@@ -1,31 +0,0 @@
-
-{% for report in failing_builds %}
-{% include 'tests_build/build_report.html' %}
-{% endfor %}
-
-
{{passing_builds|length}} Passing Builds
-{% for report in passing_builds %}
-{% include 'tests_build/build_report.html' %}
-{% endfor %}
-
-{% include 'scripts.js' %}
diff --git a/workspace_tools/compliance/__init__.py b/workspace_tools/compliance/__init__.py
deleted file mode 100644
index 3840c9e..0000000
--- a/workspace_tools/compliance/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
diff --git a/workspace_tools/compliance/ioper_base.py b/workspace_tools/compliance/ioper_base.py
deleted file mode 100644
index 53a4ed2..0000000
--- a/workspace_tools/compliance/ioper_base.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-
-"""
-
-import sys
-
-try:
- from colorama import Fore
-except:
- pass
-
-COLORAMA = 'colorama' in sys.modules
-
-
-class IOperTestCaseBase():
- """ Interoperability test case base class
- @return list of tuple (severity, Description)
- Example: (result.append((IOperTestSeverity.INFO, ""))
- """
-
- def __init__(self, scope=None):
- self.PASS = 'PASS'
- self.INFO = 'INFO'
- self.ERROR = 'ERROR'
- self.WARN = 'WARN'
-
- self.scope = scope # Default test scope (basic, pedantic, mbed-enabled etc...)
-
- def test(self, param=None):
- result = []
- return result
-
- def RED(self, text):
- return self.color_text(text, color=Fore.RED, delim=Fore.RESET) if COLORAMA else text
-
- def GREEN(self, text):
- return self.color_text(text, color=Fore.GREEN, delim=Fore.RESET) if COLORAMA else text
-
- def YELLOW(self, text):
- return self.color_text(text, color=Fore.YELLOW, delim=Fore.RESET) if COLORAMA else text
-
- def color_text(self, text, color='', delim=''):
- return color + text + color + delim
-
- def COLOR(self, severity, text):
- colors = {
- self.PASS : self.GREEN,
- self.ERROR : self.RED,
- self.WARN : self.YELLOW
- }
- if severity in colors:
- return colors[severity](text)
- return text
diff --git a/workspace_tools/compliance/ioper_runner.py b/workspace_tools/compliance/ioper_runner.py
deleted file mode 100644
index 6b5bf57..0000000
--- a/workspace_tools/compliance/ioper_runner.py
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/env python2
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-
-"""
-
-import sys
-import mbed_lstools
-from prettytable import PrettyTable
-
-try:
- from colorama import init
-except:
- pass
-
-COLORAMA = 'colorama' in sys.modules
-
-from ioper_base import IOperTestCaseBase
-from ioper_test_fs import IOperTest_FileStructure_Basic
-from ioper_test_fs import IOperTest_FileStructure_MbedEnabled
-from ioper_test_target_id import IOperTest_TargetID_Basic
-from ioper_test_target_id import IOperTest_TargetID_MbedEnabled
-
-
-TEST_LIST = [IOperTest_TargetID_Basic('basic'),
- IOperTest_TargetID_MbedEnabled('mbed-enabled'),
- IOperTest_FileStructure_Basic('basic'),
- IOperTest_FileStructure_MbedEnabled('mbed-enabled'),
- IOperTestCaseBase('all'), # Dummy used to add 'all' option
- ]
-
-
-class IOperTestRunner():
- """ Calls all i/face interoperability tests
- """
-
- def __init__(self, scope=None):
- """ Test scope:
- 'pedantic' - all
- 'mbed-enabled' - let's try to check if this device is mbed-enabled
- 'basic' - just simple, passive tests (no device flashing)
- """
- self.requested_scope = scope # Test scope given by user
- self.raw_test_results = {} # Raw test results, can be used by exporters: { Platform: [test results]}
-
- # Test scope definitions
- self.SCOPE_BASIC = 'basic' # Basic tests, sanity checks
- self.SCOPE_MBED_ENABLED = 'mbed-enabled' # Let's try to check if this device is mbed-enabled
- self.SCOPE_PEDANTIC = 'pedantic' # Extensive tests
- self.SCOPE_ALL = 'all' # All tests, equal to highest scope level
-
- # This structure will help us sort test scopes so we can include them
- # e.g. pedantic also includes basic and mbed-enabled tests
- self.scopes = {self.SCOPE_BASIC : 0,
- self.SCOPE_MBED_ENABLED : 1,
- self.SCOPE_PEDANTIC : 2,
- self.SCOPE_ALL : 99,
- }
-
- if COLORAMA:
- init() # colorama.init()
-
- def run(self):
- """ Run tests, calculate overall score and print test results
- """
- mbeds = mbed_lstools.create()
- muts_list = mbeds.list_mbeds()
- test_base = IOperTestCaseBase()
-
- self.raw_test_results = {}
- for i, mut in enumerate(muts_list):
- result = []
- self.raw_test_results[mut['platform_name']] = []
-
- print "MBEDLS: Detected %s, port: %s, mounted: %s"% (mut['platform_name'],
- mut['serial_port'],
- mut['mount_point'])
- print "Running interoperability test suite, scope '%s'" % (self.requested_scope)
- for test_case in TEST_LIST:
- if self.scopes[self.requested_scope] >= self.scopes[test_case.scope]:
- res = test_case.test(param=mut)
- result.extend(res)
- self.raw_test_results[mut['platform_name']].extend(res)
-
- columns = ['Platform', 'Test Case', 'Result', 'Scope', 'Description']
- pt = PrettyTable(columns)
- for col in columns:
- pt.align[col] = 'l'
-
- for tr in result:
- severity, tr_name, tr_scope, text = tr
- tr = (test_base.COLOR(severity, mut['platform_name']),
- test_base.COLOR(severity, tr_name),
- test_base.COLOR(severity, severity),
- test_base.COLOR(severity, tr_scope),
- test_base.COLOR(severity, text))
- pt.add_row(list(tr))
- print pt.get_string(border=True, sortby='Result')
- if i + 1 < len(muts_list):
- print
- return self.raw_test_results
-
-def get_available_oper_test_scopes():
- """ Get list of available test scopes
- """
- scopes = set()
- for oper_test in TEST_LIST:
- if oper_test.scope is not None:
- scopes.add(oper_test.scope)
- return list(scopes)
diff --git a/workspace_tools/compliance/ioper_test_fs.py b/workspace_tools/compliance/ioper_test_fs.py
deleted file mode 100644
index 945855d..0000000
--- a/workspace_tools/compliance/ioper_test_fs.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-
-"""
-
-import os.path
-from ioper_base import IOperTestCaseBase
-
-
-class IOperTest_FileStructure(IOperTestCaseBase):
-
- def __init__(self, scope=None):
- IOperTestCaseBase.__init__(self, scope)
-
- def if_file_exist(self, fname, fail_severity=None):
- file_path = os.path.join(self.param['mount_point'], fname)
- exist = os.path.isfile(file_path)
- tr_name = "FILE_EXIST(%s)" % fname.upper()
- if exist:
- self.result.append((self.PASS, tr_name, self.scope, "File '%s' exists" % file_path))
- else:
- self.result.append((fail_severity if fail_severity else self.ERROR, tr_name, self.scope, "File '%s' not found" % file_path))
-
- def test(self, param=None):
- self.result = []
- if param:
- pass
- return self.result
-
-
-class IOperTest_FileStructure_Basic(IOperTest_FileStructure):
- def __init__(self, scope=None):
- IOperTest_FileStructure.__init__(self, scope)
-
- def test(self, param=None):
- self.param = param
- self.result = []
- if param:
- self.if_file_exist('mbed.htm', self.ERROR)
- return self.result
-
-
-class IOperTest_FileStructure_MbedEnabled(IOperTest_FileStructure):
- def __init__(self, scope=None):
- IOperTest_FileStructure.__init__(self, scope)
-
- def test(self, param=None):
- self.param = param
- self.result = []
- if param:
- self.if_file_exist('mbed.htm', self.ERROR)
- self.if_file_exist('DETAILS.TXT', self.ERROR)
- self.if_file_exist('FAIL.TXT', self.INFO)
- return self.result
diff --git a/workspace_tools/compliance/ioper_test_target_id.py b/workspace_tools/compliance/ioper_test_target_id.py
deleted file mode 100644
index 55fa0d1..0000000
--- a/workspace_tools/compliance/ioper_test_target_id.py
+++ /dev/null
@@ -1,111 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-
-"""
-
-from ioper_base import IOperTestCaseBase
-
-
-class IOperTest_TargetID(IOperTestCaseBase):
- """ tests related to target_id value
- """
-
- def __init__(self, scope=None):
- IOperTestCaseBase.__init__(self, scope)
- self.TARGET_ID_LEN = 24
-
- def test_target_id_format(self, target_id, target_id_name):
- # Expected length == 24, eg. "02400203D94B0E7724B7F3CF"
- result = []
- target_id_len = len(target_id) if target_id else 0
- if target_id_len == self.TARGET_ID_LEN:
- result.append((self.PASS, "TARGET_ID_LEN", self.scope, "%s '%s' is %d chars long " % (target_id_name, target_id, target_id_len)))
- result.append((self.INFO, "FW_VER_STR", self.scope, "%s Version String is %s.%s.%s " % (target_id_name,
- target_id[0:4],
- target_id[4:8],
- target_id[8:24],
- )))
- else:
- result.append((self.ERROR, "TARGET_ID_LEN", self.scope, "%s '%s' is %d chars long. Expected %d chars" % (target_id_name, target_id, target_id_len, self.TARGET_ID_LEN)))
- return result
-
- def test_decode_target_id(self, target_id, target_id_name):
- result = []
- target_id_len = len(target_id) if target_id else 0
- if target_id_len >= 4:
- result.append((self.INFO, "FW_VEN_CODE", self.scope, "%s Vendor Code is '%s'" % (target_id_name, target_id[0:2])))
- result.append((self.INFO, "FW_PLAT_CODE", self.scope, "%s Platform Code is '%s'" % (target_id_name, target_id[2:4])))
- result.append((self.INFO, "FW_VER", self.scope, "%s Firmware Version is '%s'" % (target_id_name, target_id[4:8])))
- result.append((self.INFO, "FW_HASH_SEC", self.scope, "%s Hash of secret is '%s'" % (target_id_name, target_id[8:24])))
- return result
-
- def test(self, param=None):
- result = []
- if param:
- pass
- return result
-
-
-class IOperTest_TargetID_Basic(IOperTest_TargetID):
- """ Basic interoperability tests checking TargetID compliance
- """
-
- def __init__(self, scope=None):
- IOperTest_TargetID.__init__(self, scope)
-
- def test(self, param=None):
- result = []
-
- if param:
- result.append((self.PASS, "TARGET_ID", self.scope, "TargetID '%s' found" % param['target_id']))
-
- # Check if target name can be decoded with mbed-ls
- if param['platform_name']:
- result.append((self.PASS, "TARGET_ID_DECODE", self.scope, "TargetID '%s' decoded as '%s'" % (param['target_id'][0:4], param['platform_name'])))
- else:
- result.append((self.ERROR, "TARGET_ID_DECODE", self.scope, "TargetID '%s'... not decoded" % (param['target_id'] if param['target_id'] else '')))
-
- # Test for USBID and mbed.htm consistency
- if param['target_id_mbed_htm'] == param['target_id_usb_id']:
- result.append((self.PASS, "TARGET_ID_MATCH", self.scope, "TargetID (USBID) and TargetID (mbed.htm) match"))
- else:
- text = "TargetID (USBID) and TargetID (mbed.htm) don't match: '%s' != '%s'" % (param['target_id_usb_id'], param['target_id_mbed_htm'])
- result.append((self.WARN, "TARGET_ID_MATCH", self.scope, text))
- else:
- result.append((self.ERROR, "TARGET_ID", self.scope, "TargetID not found"))
- return result
-
-class IOperTest_TargetID_MbedEnabled(IOperTest_TargetID):
- """ Basic interoperability tests checking TargetID compliance
- """
-
- def __init__(self, scope=None):
- IOperTest_TargetID.__init__(self, scope)
-
- def test(self, param=None):
- result = []
-
- if param:
- # Target ID tests:
- result += self.test_target_id_format(param['target_id_usb_id'], "TargetId (USBID)")
- result += self.test_target_id_format(param['target_id_mbed_htm'], "TargetId (mbed.htm)")
-
- # Some extra info about TargetID itself
- result += self.test_decode_target_id(param['target_id_usb_id'], "TargetId (USBID)")
- result += self.test_decode_target_id(param['target_id_mbed_htm'], "TargetId (mbed.htm)")
- return result
diff --git a/workspace_tools/data/__init__.py b/workspace_tools/data/__init__.py
deleted file mode 100644
index 1fa8431..0000000
--- a/workspace_tools/data/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
diff --git a/workspace_tools/data/rpc/RPCClasses.h b/workspace_tools/data/rpc/RPCClasses.h
deleted file mode 100644
index ab90b53..0000000
--- a/workspace_tools/data/rpc/RPCClasses.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/* mbed Microcontroller Library
- * Copyright (c) 2006-2012 ARM Limited
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
-#ifndef MBED_CLASSES_H
-#define MBED_CLASSES_H
-
-#include "rpc.h"
-
-namespace mbed {
-
-{{classes}}
-
-}
-
-#endif
-
diff --git a/workspace_tools/data/rpc/class.cpp b/workspace_tools/data/rpc/class.cpp
deleted file mode 100644
index f783198..0000000
--- a/workspace_tools/data/rpc/class.cpp
+++ /dev/null
@@ -1,24 +0,0 @@
-class Rpc{{name}} : public RPC {
-public:
- Rpc{{name}}({{cons_proto}}) : RPC(name), o({{cons_call}}) {}
-
- {{methods}}
-
- virtual const struct rpc_method *get_rpc_methods() {
- static const rpc_method rpc_methods[] = {
- {{rpc_methods}},
- RPC_METHOD_SUPER(RPC)
- };
- return rpc_methods;
- }
- static struct rpc_class *get_rpc_class() {
- static const rpc_function funcs[] = {
- {"new", rpc_function_caller >},
- RPC_METHOD_END
- };
- static rpc_class c = {"{{name}}", funcs, NULL};
- return &c;
- }
-private:
- {{name}} o;
-};
diff --git a/workspace_tools/data/support.py b/workspace_tools/data/support.py
deleted file mode 100644
index b47380f..0000000
--- a/workspace_tools/data/support.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from workspace_tools.targets import TARGETS
-
-DEFAULT_SUPPORT = {}
-CORTEX_ARM_SUPPORT = {}
-
-for target in TARGETS:
- DEFAULT_SUPPORT[target.name] = target.supported_toolchains
-
- if target.core.startswith('Cortex'):
- CORTEX_ARM_SUPPORT[target.name] = [t for t in target.supported_toolchains
- if (t=='ARM' or t=='uARM')]
diff --git a/workspace_tools/dev/__init__.py b/workspace_tools/dev/__init__.py
deleted file mode 100644
index 1fa8431..0000000
--- a/workspace_tools/dev/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
diff --git a/workspace_tools/dev/dsp_fir.py b/workspace_tools/dev/dsp_fir.py
deleted file mode 100644
index f62c2b4..0000000
--- a/workspace_tools/dev/dsp_fir.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from numpy import sin, arange, pi
-from scipy.signal import lfilter, firwin
-from pylab import figure, plot, grid, show
-
-#------------------------------------------------
-# Create a signal for demonstration.
-#------------------------------------------------
-# 320 samples of (1000Hz + 15000 Hz) at 48 kHz
-sample_rate = 48000.
-nsamples = 320
-
-F_1KHz = 1000.
-A_1KHz = 1.0
-
-F_15KHz = 15000.
-A_15KHz = 0.5
-
-t = arange(nsamples) / sample_rate
-signal = A_1KHz * sin(2*pi*F_1KHz*t) + A_15KHz*sin(2*pi*F_15KHz*t)
-
-#------------------------------------------------
-# Create a FIR filter and apply it to signal.
-#------------------------------------------------
-# The Nyquist rate of the signal.
-nyq_rate = sample_rate / 2.
-
-# The cutoff frequency of the filter: 6KHz
-cutoff_hz = 6000.0
-
-# Length of the filter (number of coefficients, i.e. the filter order + 1)
-numtaps = 29
-
-# Use firwin to create a lowpass FIR filter
-fir_coeff = firwin(numtaps, cutoff_hz/nyq_rate)
-
-# Use lfilter to filter the signal with the FIR filter
-filtered_signal = lfilter(fir_coeff, 1.0, signal)
-
-#------------------------------------------------
-# Plot the original and filtered signals.
-#------------------------------------------------
-
-# The first N-1 samples are "corrupted" by the initial conditions
-warmup = numtaps - 1
-
-# The phase delay of the filtered signal
-delay = (warmup / 2) / sample_rate
-
-figure(1)
-# Plot the original signal
-plot(t, signal)
-
-# Plot the filtered signal, shifted to compensate for the phase delay
-plot(t-delay, filtered_signal, 'r-')
-
-# Plot just the "good" part of the filtered signal. The first N-1
-# samples are "corrupted" by the initial conditions.
-plot(t[warmup:]-delay, filtered_signal[warmup:], 'g', linewidth=4)
-
-grid(True)
-
-show()
-
-#------------------------------------------------
-# Print values
-#------------------------------------------------
-def print_values(label, values):
- var = "float32_t %s[%d]" % (label, len(values))
- print "%-30s = {%s}" % (var, ', '.join(["%+.10f" % x for x in values]))
-
-print_values('signal', signal)
-print_values('fir_coeff', fir_coeff)
-print_values('filtered_signal', filtered_signal)
diff --git a/workspace_tools/dev/intel_hex_utils.py b/workspace_tools/dev/intel_hex_utils.py
deleted file mode 100644
index c60e9c4..0000000
--- a/workspace_tools/dev/intel_hex_utils.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from intelhex import IntelHex
-from cStringIO import StringIO
-
-
-def sections(h):
- start, last_address = None, None
- for a in h.addresses():
- if last_address is None:
- start, last_address = a, a
- continue
-
- if a > last_address + 1:
- yield (start, last_address)
- start = a
-
- last_address = a
-
- if start:
- yield (start, last_address)
-
-
-def print_sections(h):
- for s in sections(h):
- print "[0x%08X - 0x%08X]" % s
-
-
-def decode(record):
- h = IntelHex()
- f = StringIO(record)
- h.loadhex(f)
- h.dump()
diff --git a/workspace_tools/dev/rpc_classes.py b/workspace_tools/dev/rpc_classes.py
deleted file mode 100644
index f082f3b..0000000
--- a/workspace_tools/dev/rpc_classes.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from os.path import join
-from jinja2 import Template
-
-from workspace_tools.paths import TOOLS_DATA, MBED_RPC
-
-RPC_TEMPLATES_PATH = join(TOOLS_DATA, "rpc")
-
-RPC_TEMPLATE = "RPCClasses.h"
-CLASS_TEMPLATE = "class.cpp"
-RPC_CLASSES_PATH = join(MBED_RPC, RPC_TEMPLATE)
-
-
-def get_template(name):
- return Template(open(join(RPC_TEMPLATES_PATH, name)).read())
-
-
-def write_rpc_classes(classes):
- template = get_template(RPC_TEMPLATE)
- open(RPC_CLASSES_PATH, "w").write(template.render({"classes":classes}))
-
-
-RPC_CLASSES = (
- {
- "name": "DigitalOut",
- "cons_args": ["PinName"],
- "methods": [
- (None , "write", ["int"]),
- ("int", "read" , []),
- ]
- },
- {
- "name": "DigitalIn",
- "cons_args": ["PinName"],
- "methods": [
- ("int", "read" , []),
- ]
- },
- {
- "name": "DigitalInOut",
- "cons_args": ["PinName"],
- "methods": [
- ("int", "read" , []),
- (None , "write" , ["int"]),
- (None , "input" , []),
- (None , "output", []),
- ]
- },
- {
- "name": "AnalogIn",
- "required": "ANALOGIN",
- "cons_args": ["PinName"],
- "methods": [
- ("float" , "read" , []),
- ("unsigned short", "read_u16", []),
- ]
- },
- {
- "name": "AnalogOut",
- "required": "ANALOGOUT",
- "cons_args": ["PinName"],
- "methods": [
- ("float", "read" , []),
- (None , "write" , ["float"]),
- (None , "write_u16", ["unsigned short"]),
- ]
- },
- {
- "name": "PwmOut",
- "required": "PWMOUT",
- "cons_args": ["PinName"],
- "methods": [
- ("float", "read" , []),
- (None , "write" , ["float"]),
- (None , "period" , ["float"]),
- (None , "period_ms" , ["int"]),
- (None , "pulsewidth" , ["float"]),
- (None , "pulsewidth_ms", ["int"]),
- ]
- },
- {
- "name": "SPI",
- "required": "SPI",
- "cons_args": ["PinName", "PinName", "PinName"],
- "methods": [
- (None , "format" , ["int", "int"]),
- (None , "frequency", ["int"]),
- ("int", "write" , ["int"]),
- ]
- },
- {
- "name": "Serial",
- "required": "SERIAL",
- "cons_args": ["PinName", "PinName"],
- "methods": [
- (None , "baud" , ["int"]),
- ("int", "readable" , []),
- ("int", "writeable", []),
- ("int", "putc" , ["int"]),
- ("int", "getc" , []),
- ("int", "puts" , ["const char *"]),
- ]
- },
- {
- "name": "Timer",
- "cons_args": [],
- "methods": [
- (None , "start" , []),
- (None , "stop" , []),
- (None , "reset" , []),
- ("float", "read" , []),
- ("int" , "read_ms", []),
- ("int" , "read_us", []),
- ]
- }
-)
-
-
-def get_args_proto(args_types, extra=None):
- args = ["%s a%d" % (s, n) for n, s in enumerate(args_types)]
- if extra:
- args.extend(extra)
- return ', '.join(args)
-
-
-def get_args_call(args):
- return ', '.join(["a%d" % (n) for n in range(len(args))])
-
-
-classes = []
-class_template = get_template(CLASS_TEMPLATE)
-
-for c in RPC_CLASSES:
- c_args = c['cons_args']
- data = {
- 'name': c['name'],
- 'cons_type': ', '.join(c_args + ['const char*']),
- "cons_proto": get_args_proto(c_args, ["const char *name=NULL"]),
- "cons_call": get_args_call(c_args)
- }
-
- c_name = "Rpc" + c['name']
-
- methods = []
- rpc_methods = []
- for r, m, a in c['methods']:
- ret_proto = r if r else "void"
- args_proto = "void"
-
- ret_defin = "return " if r else ""
- args_defin = ""
-
- if a:
- args_proto = get_args_proto(a)
- args_defin = get_args_call(a)
-
- proto = "%s %s(%s)" % (ret_proto, m, args_proto)
- defin = "{%so.%s(%s);}" % (ret_defin, m, args_defin)
- methods.append("%s %s" % (proto, defin))
-
- rpc_method_type = [r] if r else []
- rpc_method_type.append(c_name)
- rpc_method_type.extend(a)
- rpc_methods.append('{"%s", rpc_method_caller<%s, &%s::%s>}' % (m, ', '.join(rpc_method_type), c_name, m))
-
- data['methods'] = "\n ".join(methods)
- data['rpc_methods'] = ",\n ".join(rpc_methods)
-
- class_decl = class_template.render(data)
- if 'required' in c:
- class_decl = "#if DEVICE_%s\n%s\n#endif" % (c['required'], class_decl)
-
- classes.append(class_decl)
-
-write_rpc_classes('\n\n'.join(classes))
diff --git a/workspace_tools/dev/syms.py b/workspace_tools/dev/syms.py
deleted file mode 100644
index 2fdbd2d..0000000
--- a/workspace_tools/dev/syms.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-Utility to find which libraries could define a given symbol
-"""
-from argparse import ArgumentParser
-from os.path import join, splitext
-from os import walk
-from subprocess import Popen, PIPE
-
-
-OBJ_EXT = ['.o', '.a', '.ar']
-
-
-def find_sym_in_lib(sym, obj_path):
- contain_symbol = False
-
- out = Popen(["nm", "-C", obj_path], stdout=PIPE, stderr=PIPE).communicate()[0]
- for line in out.splitlines():
- tokens = line.split()
- n = len(tokens)
- if n == 2:
- sym_type = tokens[0]
- sym_name = tokens[1]
- elif n == 3:
- sym_type = tokens[1]
- sym_name = tokens[2]
- else:
- continue
-
- if sym_type == "U":
- # This object is using this symbol, not defining it
- continue
-
- if sym_name == sym:
- contain_symbol = True
-
- return contain_symbol
-
-
-def find_sym_in_path(sym, dir_path):
- for root, _, files in walk(dir_path):
- for file in files:
-
- _, ext = splitext(file)
- if ext not in OBJ_EXT: continue
-
- path = join(root, file)
- if find_sym_in_lib(sym, path):
- print path
-
-
-if __name__ == '__main__':
- parser = ArgumentParser(description='Find Symbol')
- parser.add_argument('-s', '--sym', required=True,
- help='The symbol to be searched')
- parser.add_argument('-p', '--path', required=True,
- help='The path where to search')
- args = parser.parse_args()
-
- find_sym_in_path(args.sym, args.path)
diff --git a/workspace_tools/export/.hgignore b/workspace_tools/export/.hgignore
deleted file mode 100755
index c309ef5..0000000
--- a/workspace_tools/export/.hgignore
+++ /dev/null
@@ -1,22 +0,0 @@
-syntax: regexp
-\.hgignore$
-\.git$
-\.svn$
-\.orig$
-\.msub$
-\.meta$
-\.ctags
-\.uvproj$
-\.uvopt$
-\.project$
-\.cproject$
-\.launch$
-\.project$
-\.cproject$
-\.launch$
-Makefile$
-\.ewp$
-\.eww$
-\.htm$
-Debug$
-.settings$
diff --git a/workspace_tools/export/README.md b/workspace_tools/export/README.md
deleted file mode 100644
index 1027775..0000000
--- a/workspace_tools/export/README.md
+++ /dev/null
@@ -1,1148 +0,0 @@
-Exporter IDE/Platform Support
------------------------------------
-
-
-
-
Platform
-
codesourcery
-
coide
-
ds5_5
-
emblocks
-
gcc_arm
-
iar
-
kds
-
lpcxpresso
-
uvision
-
-
-
APPNEARME_MICRONFCBOARD
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARCH_BLE
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
ARCH_GPRS
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARCH_MAX
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
ARCH_PRO
-
✓
-
✓
-
✓
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
ARM_MPS2
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARM_MPS2_M0
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARM_MPS2_M0P
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARM_MPS2_M1
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARM_MPS2_M3
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARM_MPS2_M4
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
ARM_MPS2_M7
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
DELTA_DFCM_NNN40
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
DELTA_DFCM_NNN40_OTA
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
DISCO_F051R8
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
DISCO_F100RB
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
DISCO_F303VC
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
DISCO_F334C8
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
DISCO_F401VC
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
DISCO_F407VG
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
DISCO_F429ZI
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
DISCO_L053C8
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
HRM1017
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
K20D50M
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
K22F
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
✓
-
-
-
✓
-
-
-
K64F
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
✓
-
-
-
✓
-
-
-
KL05Z
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
KL25Z
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
KL43Z
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
KL46Z
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
LPC1114
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
LPC11C24
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
✓
-
-
-
LPC11U24
-
-
-
-
-
✓
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
LPC11U24_301
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
LPC11U34_421
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
LPC11U35_401
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
✓
-
-
-
-
-
LPC11U35_501
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
✓
-
-
-
-
-
LPC11U35_Y5_MBUG
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
LPC11U37H_401
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
✓
-
✓
-
-
-
LPC11U37_501
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
LPC11U68
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
✓
-
✓
-
-
-
LPC1347
-
-
-
-
-
-
-
✓
-
-
-
✓
-
-
-
-
-
✓
-
-
-
LPC1549
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
LPC1768
-
✓
-
✓
-
✓
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
LPC2368
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
LPC4088
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
LPC4088_DM
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
LPC4330_M0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
LPC4330_M4
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
✓
-
✓
-
-
-
LPC4337
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
✓
-
-
-
LPC810
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
LPC812
-
-
-
-
-
✓
-
-
-
-
-
✓
-
-
-
-
-
✓
-
-
-
LPC824
-
-
-
-
-
-
-
✓
-
-
-
✓
-
-
-
✓
-
✓
-
-
-
LPCCAPPUCCINO
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
-
-
MTS_DRAGONFLY_F411RE
-
-
-
-
-
-
-
✓
-
-
-
✓
-
-
-
-
-
-
-
-
-
MTS_GAMBIT
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
MTS_MDOT_F405RG
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
MTS_MDOT_F411RE
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
-
-
-
-
NRF51822
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NRF51822_BOOT
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
NRF51822_OTA
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
NRF51822_Y5_MBUG
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
NRF51_DK
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NRF51_DK_BOOT
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
NRF51_DK_OTA
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
NRF51_DONGLE
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F030R8
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F070RB
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F072RB
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F091RC
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F103RB
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F302R8
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F303RE
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F334R8
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F401RE
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_F411RE
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_L053R8
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_L073RZ
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
NUCLEO_L152RE
-
-
-
✓
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
OC_MBUINO
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
RBLAB_BLENANO
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
RBLAB_NRF51822
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
RZ_A1H
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
SEEED_TINY_BLE
-
-
-
-
-
-
-
✓
-
✓
-
✓
-
-
-
-
-
✓
-
-
-
SEEED_TINY_BLE_BOOT
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
SEEED_TINY_BLE_OTA
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
SSCI824
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
STM32F3XX
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
STM32F407
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
-
-
-
-
TEENSY3_1
-
-
-
-
-
-
-
✓
-
✓
-
-
-
-
-
-
-
✓
-
-
-
UBLOX_C027
-
✓
-
✓
-
✓
-
✓
-
✓
-
✓
-
-
-
✓
-
✓
-
-
-
UBLOX_C029
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
WALLBOT_BLE
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-
XADOW_M0
-
-
-
-
-
-
-
✓
-
-
-
-
-
-
-
-
-
-
-
-
-Total IDEs: 9
- Total platforms: 94
- Total permutations: 288
diff --git a/workspace_tools/export/__init__.py b/workspace_tools/export/__init__.py
deleted file mode 100755
index 1b4cd19..0000000
--- a/workspace_tools/export/__init__.py
+++ /dev/null
@@ -1,219 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import os, tempfile
-from os.path import join, exists, basename
-from shutil import copytree, rmtree, copy
-import yaml
-
-from workspace_tools.utils import mkdir
-from workspace_tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio
-from workspace_tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException
-from workspace_tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP
-
-from project_generator_definitions.definitions import ProGenDef
-
-EXPORTERS = {
- 'uvision': uvision4.Uvision4,
- 'uvision5': uvision5.Uvision5,
- 'lpcxpresso': codered.CodeRed,
- 'gcc_arm': gccarm.GccArm,
- 'ds5_5': ds5_5.DS5_5,
- 'iar': iar.IAREmbeddedWorkbench,
- 'emblocks' : emblocks.IntermediateFile,
- 'coide' : coide.CoIDE,
- 'kds' : kds.KDS,
- 'simplicityv3' : simplicityv3.SimplicityV3,
- 'atmelstudio' : atmelstudio.AtmelStudio,
- 'sw4stm32' : sw4stm32.Sw4STM32,
- 'e2studio' : e2studio.E2Studio,
-}
-
-ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN = """
-Sorry, the target %s is not currently supported on the %s toolchain.
-Please refer to Exporting to offline toolchains for more information.
-"""
-
-ERROR_MESSAGE_NOT_EXPORT_LIBS = """
-To export this project please import the export version of the mbed library.
-"""
-
-def online_build_url_resolver(url):
- # TODO: Retrieve the path and name of an online library build URL
- return {'path':'', 'name':''}
-
-
-def export(project_path, project_name, ide, target, destination='/tmp/',
- tempdir=None, clean=True, extra_symbols=None, build_url_resolver=online_build_url_resolver):
- # Convention: we are using capitals for toolchain and target names
- if target is not None:
- target = target.upper()
-
- if tempdir is None:
- tempdir = tempfile.mkdtemp()
-
- use_progen = False
- supported = True
- report = {'success': False, 'errormsg':''}
-
- if ide is None or ide == "zip":
- # Simple ZIP exporter
- try:
- ide = "zip"
- exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
- exporter.scan_and_copy_resources(project_path, tempdir)
- exporter.generate()
- report['success'] = True
- except OldLibrariesException, e:
- report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
- else:
- if ide not in EXPORTERS:
- report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
- else:
- Exporter = EXPORTERS[ide]
- target = EXPORT_MAP.get(target, target)
- try:
- if Exporter.PROGEN_ACTIVE:
- use_progen = True
- except AttributeError:
- pass
- if use_progen:
- if not ProGenDef(ide).is_supported(TARGET_MAP[target].progen['target']):
- supported = False
- else:
- if target not in Exporter.TARGETS:
- supported = False
-
- if supported:
- # target checked, export
- try:
- exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols)
- exporter.scan_and_copy_resources(project_path, tempdir)
- exporter.generate()
- report['success'] = True
- except OldLibrariesException, e:
- report['errormsg'] = ERROR_MESSAGE_NOT_EXPORT_LIBS
- else:
- report['errormsg'] = ERROR_MESSAGE_UNSUPPORTED_TOOLCHAIN % (target, ide)
-
- zip_path = None
- if report['success']:
- # readme.txt to contain more exported data
- exporter_yaml = {
- 'project_generator': {
- 'active' : False,
- }
- }
- if use_progen:
- try:
- import pkg_resources
- version = pkg_resources.get_distribution('project_generator').version
- exporter_yaml['project_generator']['version'] = version
- exporter_yaml['project_generator']['active'] = True;
- exporter_yaml['project_generator_definitions'] = {}
- version = pkg_resources.get_distribution('project_generator_definitions').version
- exporter_yaml['project_generator_definitions']['version'] = version
- except ImportError:
- pass
- with open(os.path.join(tempdir, 'exporter.yaml'), 'w') as outfile:
- yaml.dump(exporter_yaml, outfile, default_flow_style=False)
- # add readme file to every offline export.
- open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write(''% (ide))
- # copy .hgignore file to exported direcotry as well.
- copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'),tempdir)
- zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean)
-
- return zip_path, report
-
-
-###############################################################################
-# Generate project folders following the online conventions
-###############################################################################
-def copy_tree(src, dst, clean=True):
- if exists(dst):
- if clean:
- rmtree(dst)
- else:
- return
-
- copytree(src, dst)
-
-
-def setup_user_prj(user_dir, prj_path, lib_paths=None):
- """
- Setup a project with the same directory structure of the mbed online IDE
- """
- mkdir(user_dir)
-
- # Project Path
- copy_tree(prj_path, join(user_dir, "src"))
-
- # Project Libraries
- user_lib = join(user_dir, "lib")
- mkdir(user_lib)
-
- if lib_paths is not None:
- for lib_path in lib_paths:
- copy_tree(lib_path, join(user_lib, basename(lib_path)))
-
-def mcu_ide_matrix(verbose_html=False, platform_filter=None):
- """ Shows target map using prettytable """
- supported_ides = []
- for key in EXPORTERS.iterkeys():
- supported_ides.append(key)
- supported_ides.sort()
- from prettytable import PrettyTable, ALL # Only use it in this function so building works without extra modules
-
- # All tests status table print
- columns = ["Platform"] + supported_ides
- pt = PrettyTable(columns)
- # Align table
- for col in columns:
- pt.align[col] = "c"
- pt.align["Platform"] = "l"
-
- perm_counter = 0
- target_counter = 0
- for target in sorted(TARGET_NAMES):
- target_counter += 1
-
- row = [target] # First column is platform name
- for ide in supported_ides:
- text = "-"
- if target in EXPORTERS[ide].TARGETS:
- if verbose_html:
- text = "✓"
- else:
- text = "x"
- perm_counter += 1
- row.append(text)
- pt.add_row(row)
-
- pt.border = True
- pt.vrules = ALL
- pt.hrules = ALL
- # creates a html page suitable for a browser
- # result = pt.get_html_string(format=True) if verbose_html else pt.get_string()
- # creates a html page in a shorter format suitable for readme.md
- result = pt.get_html_string() if verbose_html else pt.get_string()
- result += "\n"
- result += "Total IDEs: %d\n"% (len(supported_ides))
- if verbose_html: result += " "
- result += "Total platforms: %d\n"% (target_counter)
- if verbose_html: result += " "
- result += "Total permutations: %d"% (perm_counter)
- if verbose_html: result = result.replace("&", "&")
- return result
diff --git a/workspace_tools/export/atmelstudio.py b/workspace_tools/export/atmelstudio.py
deleted file mode 100644
index 7b69d20..0000000
--- a/workspace_tools/export/atmelstudio.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import uuid
-from exporters import Exporter
-from os.path import splitext, basename, dirname
-
-
-class AtmelStudio(Exporter):
- NAME = 'AtmelStudio'
- TOOLCHAIN = 'GCC_ARM'
-
- TARGETS = [
- 'SAMD21J18A',
- 'SAMR21G18A',
- 'SAMD21G18A',
- 'SAML21J18A',
- 'SAMG55J19',
- ]
-
- DOT_IN_RELATIVE_PATH = True
-
- def generate(self):
-
- source_files = []
- dirs = []
- for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
- r = getattr(self.resources, r_type)
- if r:
- for source in r:
- source_files.append(source[2:])
- dirs.append(dirname(source[2:]))
-
- source_folders = []
- for e in dirs:
- if e and e not in source_folders:
- source_folders.append(e)
-
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- solution_uuid = '{' + str(uuid.uuid4()) + '}'
- project_uuid = '{' + str(uuid.uuid4()) + '}'
-
- ctx = {
- 'target': self.target,
- 'name': self.program_name,
- 'source_files': source_files,
- 'source_folders': source_folders,
- 'object_files': self.resources.objects,
- 'include_paths': self.resources.inc_dirs,
- 'library_paths': self.resources.lib_dirs,
- 'linker_script': self.resources.linker_script,
- 'libraries': libraries,
- 'symbols': self.get_symbols(),
- 'solution_uuid': solution_uuid.upper(),
- 'project_uuid': project_uuid.upper()
- }
- target = self.target.lower()
- self.gen_file('atmelstudio6_2.atsln.tmpl', ctx, '%s.atsln' % self.program_name)
- self.gen_file('atmelstudio6_2.cppproj.tmpl', ctx, '%s.cppproj' % self.program_name)
diff --git a/workspace_tools/export/atmelstudio6_2.atsln.tmpl b/workspace_tools/export/atmelstudio6_2.atsln.tmpl
deleted file mode 100644
index 3c8ea50..0000000
--- a/workspace_tools/export/atmelstudio6_2.atsln.tmpl
+++ /dev/null
@@ -1,20 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 11.00
-# Atmel Studio Solution File, Format Version 11.00
-Project("{{solution_uuid}}") = "{{name}}", "{{name}}.cppproj", "{{project_uuid}}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|ARM = Debug|ARM
- Release|ARM = Release|ARM
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {{project_uuid}}.Debug|ARM.ActiveCfg = Debug|ARM
- {{project_uuid}}.Debug|ARM.Build.0 = Debug|ARM
- {{project_uuid}}.Release|ARM.ActiveCfg = Release|ARM
- {{project_uuid}}.Release|ARM.Build.0 = Release|ARM
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
-EndGlobal
diff --git a/workspace_tools/export/atmelstudio6_2.cppproj.tmpl b/workspace_tools/export/atmelstudio6_2.cppproj.tmpl
deleted file mode 100644
index 98696b3..0000000
--- a/workspace_tools/export/atmelstudio6_2.cppproj.tmpl
+++ /dev/null
@@ -1,176 +0,0 @@
-
-
-
- 2.0
- 6.2
- com.Atmel.ARMGCC.CPP
- {{project_uuid}}
- AT{{target}}
- none
- Executable
- CPP
- $(MSBuildProjectName)
- .elf
- $(MSBuildProjectDirectory)\$(Configuration)
- AtmelStudio6_2
- AtmelStudio6_2
- AtmelStudio6_2
- Native
- true
- false
- true
- true
-
-
- true
-
- 2
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
- True
- True
- True
- True
- True
-
-
- NDEBUG
- {% for s in symbols %}{{s}}
- {% endfor %}
-
-
-
-
- {% for i in include_paths %}../{{i}}
- {% endfor %}
-
-
- Optimize for size (-Os)
- True
- True
- -std=gnu99 -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
-
-
- NDEBUG
- {% for s in symbols %}{{s}}
- {% endfor %}
-
-
-
-
- {% for i in include_paths %}../{{i}}
- {% endfor %}
-
-
- Optimize for size (-Os)
- True
- True
- -std=gnu++98 -fno-rtti -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
-
-
- libm
-
-
-
-
-
-
- True
- {% for p in library_paths %}-L../{{p}} {% endfor %} {% for f in object_files %}../{{f}} {% endfor %} {% for lib in libraries %}-l{{lib}} {% endfor %} -T../{{linker_script}} -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,--cref -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
-
-
- {% for i in include_paths %}../{{i}}
- {% endfor %}
-
-
-
-
-
-
-
-
- True
- True
- True
- True
- True
-
-
- DEBUG
- {% for s in symbols %}{{s}}
- {% endfor %}
-
-
-
-
- {% for i in include_paths %}../{{i}}
- {% endfor %}
-
-
- Optimize (-O1)
- True
- Maximum (-g3)
- True
- -std=gnu99 -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
-
-
- DEBUG
- {% for s in symbols %}{{s}}
- {% endfor %}
-
-
-
-
- {% for i in include_paths %}../{{i}}
- {% endfor %}
-
-
- Optimize (-O1)
- True
- Maximum (-g3)
- True
- -std=gnu++98 -fno-rtti -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP
-
-
- libm
-
-
-
-
-
-
- True
- {% for p in library_paths %}-L../{{p}} {% endfor %} {% for f in object_files %}../{{f}} {% endfor %} {% for lib in libraries %}-l{{lib}} {% endfor %} -T../{{linker_script}} -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,--cref -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
- Default (-g)
-
-
- {% for i in include_paths %}../{{i}}
- {% endfor %}
-
-
- Default (-Wa,-g)
-
-
-
-
- {% for f in source_folders %}
- {% endfor %}
- {% for s in source_files %}
- compile
-
- {% endfor %}
-
-
-
\ No newline at end of file
diff --git a/workspace_tools/export/codered.py b/workspace_tools/export/codered.py
deleted file mode 100755
index c502096..0000000
--- a/workspace_tools/export/codered.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename
-
-
-class CodeRed(Exporter):
- NAME = 'CodeRed'
- TOOLCHAIN = 'GCC_CR'
-
- TARGETS = [
- 'LPC1768',
- 'LPC4088',
- 'LPC4088_DM',
- 'LPC4330_M4',
- 'LPC1114',
- 'LPC11U35_401',
- 'LPC11U35_501',
- 'UBLOX_C027',
- 'ARCH_PRO',
- 'LPC1549',
- 'LPC11U68',
- 'LPCCAPPUCCINO',
- 'LPC824',
- 'LPC11U37H_401',
- ]
-
- def generate(self):
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- ctx = {
- 'name': self.program_name,
- 'include_paths': self.resources.inc_dirs,
- 'linker_script': self.resources.linker_script,
- 'object_files': self.resources.objects,
- 'libraries': libraries,
- 'symbols': self.get_symbols()
- }
- self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project')
- self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
diff --git a/workspace_tools/export/codered_arch_pro_cproject.tmpl b/workspace_tools/export/codered_arch_pro_cproject.tmpl
deleted file mode 100644
index b39438a..0000000
--- a/workspace_tools/export/codered_arch_pro_cproject.tmpl
+++ /dev/null
@@ -1,79 +0,0 @@
-{% extends "codered_cproject_cortexm3_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc176x.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC1768" property_count="5" version="1"/>
-<infoList vendor="NXP">
-<info chip="LPC1768" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC1768" package="lpc17_lqfp100.xml">
-<chip>
-<name>LPC1768</name>
-<family>LPC17xx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
-<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
-<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
-<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
-<peripheralInstance derived_from="LPC17_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
-<peripheralInstance derived_from="LPC17_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
-<peripheralInstance derived_from="LPC17_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
-<peripheralInstance derived_from="LPC17_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
-<peripheralInstance derived_from="LPC17_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
-<peripheralInstance derived_from="LPC17xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
-<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
-<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
-<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
-<peripheralInstance derived_from="LPC17_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
-<peripheralInstance derived_from="LPC17_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
-<peripheralInstance derived_from="LPC17_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
-<peripheralInstance derived_from="LPC17_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
-<peripheralInstance derived_from="LPC17_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
-<peripheralInstance derived_from="LPC17_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
-<peripheralInstance derived_from="LPC17_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
-<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
-<peripheralInstance derived_from="LPC17_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
-<peripheralInstance derived_from="LPC17_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
-<peripheralInstance derived_from="LPC17_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC17_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC17_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
-<peripheralInstance derived_from="LPC1x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
-<peripheralInstance derived_from="LPC17_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
-<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
-<peripheralInstance derived_from="LPC17_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
-<peripheralInstance derived_from="LPC17_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
-<peripheralInstance derived_from="LPC17_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
-<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
-<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
-<peripheralInstance derived_from="LPC17_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m3">Cortex-M3</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_arch_pro_project.tmpl b/workspace_tools/export/codered_arch_pro_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_arch_pro_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_cproject_common.tmpl b/workspace_tools/export/codered_cproject_common.tmpl
deleted file mode 100644
index b71f131..0000000
--- a/workspace_tools/export/codered_cproject_common.tmpl
+++ /dev/null
@@ -1,1850 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% block cpu_config %}{% endblock %}
-
-
-
diff --git a/workspace_tools/export/codered_cproject_cortexm0_common.tmpl b/workspace_tools/export/codered_cproject_cortexm0_common.tmpl
deleted file mode 100644
index 895485f..0000000
--- a/workspace_tools/export/codered_cproject_cortexm0_common.tmpl
+++ /dev/null
@@ -1,3 +0,0 @@
-{% extends "codered_cproject_common.tmpl" %}
-
-{% block core %}cm0{% endblock %}
diff --git a/workspace_tools/export/codered_cproject_cortexm3_common.tmpl b/workspace_tools/export/codered_cproject_cortexm3_common.tmpl
deleted file mode 100644
index 894afaa..0000000
--- a/workspace_tools/export/codered_cproject_cortexm3_common.tmpl
+++ /dev/null
@@ -1,3 +0,0 @@
-{% extends "codered_cproject_common.tmpl" %}
-
-{% block core %}cm3{% endblock %}
diff --git a/workspace_tools/export/codered_lpc1114_cproject.tmpl b/workspace_tools/export/codered_lpc1114_cproject.tmpl
deleted file mode 100644
index ae49cd5..0000000
--- a/workspace_tools/export/codered_lpc1114_cproject.tmpl
+++ /dev/null
@@ -1,48 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC11_12_13_32K_4K.cfx" property_3="NXP" property_4="LPC1114FN/102" property_count="5" version="60100"/>
-<infoList vendor="NXP">
-<info chip="LPC1114FN/102" flash_driver="LPC11_12_13_32K_4K.cfx" match_id="0x0A40902B,0x1A40902B" name="LPC1114FN/102" stub="crt_emu_lpc11_13_nxp">
-<chip>
-<name>LPC1114FN/102</name>
-<family>LPC11xx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash32" location="0x0" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="RamLoc4" location="0x10000000" size="0x1000"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
-<peripheralInstance derived_from="UART" determined="infoFile" id="UART" location="0x40008000"/>
-<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
-<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
-<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
-<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
-<peripheralInstance derived_from="SPI0" determined="infoFile" id="SPI0" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="GPIO0" determined="infoFile" id="GPIO0" location="0x50000000"/>
-<peripheralInstance derived_from="GPIO1" determined="infoFile" id="GPIO1" location="0x50010000"/>
-<peripheralInstance derived_from="GPIO2" determined="infoFile" id="GPIO2" location="0x50020000"/>
-<peripheralInstance derived_from="GPIO3" determined="infoFile" id="GPIO3" location="0x50030000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC11xx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc1114_project.tmpl b/workspace_tools/export/codered_lpc1114_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc1114_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc11u35_401_cproject.tmpl b/workspace_tools/export/codered_lpc11u35_401_cproject.tmpl
deleted file mode 100644
index e874ee6..0000000
--- a/workspace_tools/export/codered_lpc11u35_401_cproject.tmpl
+++ /dev/null
@@ -1,51 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U35/401" property_count="5" version="70002"/>
-<infoList vendor="NXP">
-<info chip="LPC11U35/401" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U35/401" stub="crt_emu_lpc11_13_nxp">
-<chip>
-<name>LPC11U35/401</name>
-<family>LPC11Uxx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
-<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
-<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
-<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
-<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
-<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
-<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
-<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
-<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
-<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
-<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc11u35_401_project.tmpl b/workspace_tools/export/codered_lpc11u35_401_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc11u35_401_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc11u35_501_cproject.tmpl b/workspace_tools/export/codered_lpc11u35_501_cproject.tmpl
deleted file mode 100644
index 622844e..0000000
--- a/workspace_tools/export/codered_lpc11u35_501_cproject.tmpl
+++ /dev/null
@@ -1,51 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U35/501" property_count="5" version="70002"/>
-<infoList vendor="NXP">
-<info chip="LPC11U35/501" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U35/501" stub="crt_emu_lpc11_13_nxp">
-<chip>
-<name>LPC11U35/501</name>
-<family>LPC11Uxx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
-<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
-<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
-<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
-<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
-<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
-<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
-<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
-<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
-<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
-<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc11u35_501_project.tmpl b/workspace_tools/export/codered_lpc11u35_501_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc11u35_501_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc11u37h_401_cproject.tmpl b/workspace_tools/export/codered_lpc11u37h_401_cproject.tmpl
deleted file mode 100644
index 4b9fd6e..0000000
--- a/workspace_tools/export/codered_lpc11u37h_401_cproject.tmpl
+++ /dev/null
@@ -1,51 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U37H/401" property_count="5" version="70002"/>
-<infoList vendor="NXP">
-<info chip="LPC11U37H/401" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U37H/401" stub="crt_emu_lpc11_13_nxp">
-<chip>
-<name>LPC11U37H/401</name>
-<family>LPC11Uxx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
-<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
-<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
-<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
-<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
-<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
-<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
-<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
-<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
-<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
-<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc11u37h_401_project.tmpl b/workspace_tools/export/codered_lpc11u37h_401_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc11u37h_401_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc11u68_cproject.tmpl b/workspace_tools/export/codered_lpc11u68_cproject.tmpl
deleted file mode 100644
index 0af4174..0000000
--- a/workspace_tools/export/codered_lpc11u68_cproject.tmpl
+++ /dev/null
@@ -1,60 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}startup_LPC11U68.cpp{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC11U6x_256K.cfx" property_3="NXP" property_4="LPC11U68" property_count="5" version="70200"/>
-<infoList vendor="NXP">
<info chip="LPC11U68" flash_driver="LPC11U6x_256K.cfx" match_id="0x0" name="LPC11U68" stub="crt_emu_cm3_gen">
<chip>
<name>
LPC11U68</name>
-<family>
LPC11U6x</family>
-<vendor>
NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash256" location="0x0" size="0x40000"/>
-<memoryInstance derived_from="RAM" id="Ram0_32" location="0x10000000" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="Ram1_2" location="0x20000000" size="0x800"/>
-<memoryInstance derived_from="RAM" id="Ram2USB_2" location="0x20004000" size="0x800"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="I2C0" determined="infoFile" id="I2C0" location="0x40000000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
-<peripheralInstance derived_from="USART0" determined="infoFile" id="USART0" location="0x40008000"/>
-<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
-<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
-<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
-<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="I2C1" determined="infoFile" id="I2C1" location="0x40020000"/>
-<peripheralInstance derived_from="RTC" determined="infoFile" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="DMATRIGMUX" determined="infoFile" id="DMATRIGMUX" location="0x40028000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
-<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="USART4" determined="infoFile" id="USART4" location="0x4004c000"/>
-<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
-<peripheralInstance derived_from="GINT0" determined="infoFile" id="GINT0" location="0x4005c000"/>
-<peripheralInstance derived_from="GINT1" determined="infoFile" id="GINT1" location="0x40060000"/>
-<peripheralInstance derived_from="USART1" determined="infoFile" id="USART1" location="0x4006c000"/>
-<peripheralInstance derived_from="USART2" determined="infoFile" id="USART2" location="0x40070000"/>
-<peripheralInstance derived_from="USART3" determined="infoFile" id="USART3" location="0x40074000"/>
-<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
-<peripheralInstance derived_from="CRC" determined="infoFile" id="CRC" location="0x50000000"/>
-<peripheralInstance derived_from="DMA" determined="infoFile" id="DMA" location="0x50004000"/>
-<peripheralInstance derived_from="SCT0" determined="infoFile" id="SCT0" location="0x5000c000"/>
-<peripheralInstance derived_from="SCT1" determined="infoFile" id="SCT1" location="0x5000e000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0xa0000000"/>
-<peripheralInstance derived_from="PINT" determined="infoFile" id="PINT" location="0xa0004000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc11u68_project.tmpl b/workspace_tools/export/codered_lpc11u68_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc11u68_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc1549_cproject.tmpl b/workspace_tools/export/codered_lpc1549_cproject.tmpl
deleted file mode 100644
index 73529ef..0000000
--- a/workspace_tools/export/codered_lpc1549_cproject.tmpl
+++ /dev/null
@@ -1,69 +0,0 @@
-{% extends "codered_cproject_cortexm3_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc15xx.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC15xx_256K.cfx" property_3="NXP" property_4="LPC1549" property_count="5" version="70200"/>
-<infoList vendor="NXP">
-<info chip="LPC1549" connectscript="LPC15RunBootRomConnect.scp" flash_driver="LPC15xx_256K.cfx" match_id="0x0" name="LPC1549" resetscript="LPC15RunBootRomReset.scp" stub="crt_emu_cm3_gen">
-<chip>
-<name>LPC1549</name>
-<family>LPC15xx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash256" location="0x0" size="0x40000"/>
-<memoryInstance derived_from="RAM" id="Ram0_16" location="0x2000000" size="0x4000"/>
-<memoryInstance derived_from="RAM" id="Ram1_16" location="0x2004000" size="0x4000"/>
-<memoryInstance derived_from="RAM" id="Ram2_4" location="0x2008000" size="0x1000"/>
-<peripheralInstance derived_from="LPC15_MPU" determined="infoFile" id="MPU" location="0xe000ed90"/>
-<peripheralInstance derived_from="LPC15_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="LPC15_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="LPC15_ITM" determined="infoFile" id="ITM" location="0xe0000000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x1c000000"/>
-<peripheralInstance derived_from="DMA" determined="infoFile" id="DMA" location="0x1c004000"/>
-<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x1c00c000"/>
-<peripheralInstance derived_from="CRC" determined="infoFile" id="CRC" location="0x1c010000"/>
-<peripheralInstance derived_from="SCT0" determined="infoFile" id="SCT0" location="0x1c018000"/>
-<peripheralInstance derived_from="SCT1" determined="infoFile" id="SCT1" location="0x1c01c000"/>
-<peripheralInstance derived_from="SCT2" determined="infoFile" id="SCT2" location="0x1c020000"/>
-<peripheralInstance derived_from="SCT3" determined="infoFile" id="SCT3" location="0x1c024000"/>
-<peripheralInstance derived_from="ADC0" determined="infoFile" id="ADC0" location="0x40000000"/>
-<peripheralInstance derived_from="DAC" determined="infoFile" id="DAC" location="0x40004000"/>
-<peripheralInstance derived_from="ACMP" determined="infoFile" id="ACMP" location="0x40008000"/>
-<peripheralInstance derived_from="INMUX" determined="infoFile" id="INMUX" location="0x40014000"/>
-<peripheralInstance derived_from="RTC" determined="infoFile" id="RTC" location="0x40028000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x4002c000"/>
-<peripheralInstance derived_from="SWM" determined="infoFile" id="SWM" location="0x40038000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x4003c000"/>
-<peripheralInstance derived_from="USART0" determined="infoFile" id="USART0" location="0x40040000"/>
-<peripheralInstance derived_from="USART1" determined="infoFile" id="USART1" location="0x40044000"/>
-<peripheralInstance derived_from="SPI0" determined="infoFile" id="SPI0" location="0x40048000"/>
-<peripheralInstance derived_from="SPI1" determined="infoFile" id="SPI1" location="0x4004c000"/>
-<peripheralInstance derived_from="I2C0" determined="infoFile" id="I2C0" location="0x40050000"/>
-<peripheralInstance derived_from="QEI" determined="infoFile" id="QEI" location="0x40058000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40074000"/>
-<peripheralInstance derived_from="ADC1" determined="infoFile" id="ADC1" location="0x40080000"/>
-<peripheralInstance derived_from="MRT" determined="infoFile" id="MRT" location="0x400a0000"/>
-<peripheralInstance derived_from="PINT" determined="infoFile" id="PINT" location="0x400a4000"/>
-<peripheralInstance derived_from="GINT0" determined="infoFile" id="GINT0" location="0x400a8000"/>
-<peripheralInstance derived_from="GINT1" determined="infoFile" id="GINT1" location="0x400ac000"/>
-<peripheralInstance derived_from="RIT" determined="infoFile" id="RIT" location="0x400b4000"/>
-<peripheralInstance derived_from="SCTIPU" determined="infoFile" id="SCTIPU" location="0x400b8000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x400bc000"/>
-<peripheralInstance derived_from="USART2" determined="infoFile" id="USART2" location="0x400c0000"/>
-<peripheralInstance derived_from="C-CAN0" determined="infoFile" id="C-CAN0" location="0x400f0000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x400f8000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m3">Cortex-M3</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc1549_project.tmpl b/workspace_tools/export/codered_lpc1549_project.tmpl
deleted file mode 100755
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc1549_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc1768_cproject.tmpl b/workspace_tools/export/codered_lpc1768_cproject.tmpl
deleted file mode 100644
index b39438a..0000000
--- a/workspace_tools/export/codered_lpc1768_cproject.tmpl
+++ /dev/null
@@ -1,79 +0,0 @@
-{% extends "codered_cproject_cortexm3_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc176x.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC1768" property_count="5" version="1"/>
-<infoList vendor="NXP">
-<info chip="LPC1768" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC1768" package="lpc17_lqfp100.xml">
-<chip>
-<name>LPC1768</name>
-<family>LPC17xx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
-<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
-<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
-<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
-<peripheralInstance derived_from="LPC17_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
-<peripheralInstance derived_from="LPC17_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
-<peripheralInstance derived_from="LPC17_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
-<peripheralInstance derived_from="LPC17_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
-<peripheralInstance derived_from="LPC17_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
-<peripheralInstance derived_from="LPC17xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
-<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
-<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
-<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
-<peripheralInstance derived_from="LPC17_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
-<peripheralInstance derived_from="LPC17_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
-<peripheralInstance derived_from="LPC17_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
-<peripheralInstance derived_from="LPC17_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
-<peripheralInstance derived_from="LPC17_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
-<peripheralInstance derived_from="LPC17_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
-<peripheralInstance derived_from="LPC17_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
-<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
-<peripheralInstance derived_from="LPC17_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
-<peripheralInstance derived_from="LPC17_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
-<peripheralInstance derived_from="LPC17_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC17_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC17_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
-<peripheralInstance derived_from="LPC1x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
-<peripheralInstance derived_from="LPC17_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
-<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
-<peripheralInstance derived_from="LPC17_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
-<peripheralInstance derived_from="LPC17_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
-<peripheralInstance derived_from="LPC17_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
-<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
-<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
-<peripheralInstance derived_from="LPC17_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m3">Cortex-M3</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc1768_project.tmpl b/workspace_tools/export/codered_lpc1768_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc1768_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc4088_cproject.tmpl b/workspace_tools/export/codered_lpc4088_cproject.tmpl
deleted file mode 100644
index 35ffa7c..0000000
--- a/workspace_tools/export/codered_lpc4088_cproject.tmpl
+++ /dev/null
@@ -1,1922 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- <?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC177x_8x_407x_8x_512.cfx" property_3="NXP" property_4="LPC4088" property_count="5" version="1"/>
-<infoList vendor="NXP"><info chip="LPC4088" flash_driver="LPC177x_8x_407x_8x_512.cfx" match_id="0x481D3F47" name="LPC4088" stub="crt_emu_cm3_nxp"><chip><name>LPC4088</name>
-<family>LPC407x_8x</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash512" location="0x0" size="0x80000"/>
-<memoryInstance derived_from="RAM" id="RamLoc64" location="0x10000000" size="0x10000"/>
-<memoryInstance derived_from="RAM" id="RamPeriph32" location="0x20000000" size="0x8000"/>
-<prog_flash blocksz="0x1000" location="0x0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
-<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
-<peripheralInstance derived_from="V7M_MPU" id="MPU" location="0xe000ed90"/>
-<peripheralInstance derived_from="V7M_NVIC" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V7M_DCR" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="V7M_ITM" id="ITM" location="0xe0000000"/>
-<peripheralInstance derived_from="FLASHCTRL" id="FLASHCTRL" location="0x200000"/>
-<peripheralInstance derived_from="GPDMA" id="GPDMA" location="0x20080000"/>
-<peripheralInstance derived_from="ETHERNET" id="ETHERNET" location="0x20084000"/>
-<peripheralInstance derived_from="LCD" id="LCD" location="0x20088000"/>
-<peripheralInstance derived_from="USB" id="USB" location="0x2008c000"/>
-<peripheralInstance derived_from="CRC" id="CRC" location="0x20090000"/>
-<peripheralInstance derived_from="GPIO" id="GPIO" location="0x20098000"/>
-<peripheralInstance derived_from="EMC" id="EMC" location="0x2009c000"/>
-<peripheralInstance derived_from="WWDT" id="WWDT" location="0x40000000"/>
-<peripheralInstance derived_from="TIMER0" id="TIMER0" location="0x40004000"/>
-<peripheralInstance derived_from="TIMER1" id="TIMER1" location="0x40008000"/>
-<peripheralInstance derived_from="UART0" id="UART0" location="0x4000c000"/>
-<peripheralInstance derived_from="UART1" id="UART1" location="0x40010000"/>
-<peripheralInstance derived_from="PWM0" id="PWM0" location="0x40014000"/>
-<peripheralInstance derived_from="PWM1" id="PWM1" location="0x40018000"/>
-<peripheralInstance derived_from="I2C0" id="I2C0" location="0x4001c000"/>
-<peripheralInstance derived_from="COMPARATOR" id="COMPARATOR" location="0x40020000"/>
-<peripheralInstance derived_from="RTC" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="GPIOINT" id="GPIOINT" location="0x40028080"/>
-<peripheralInstance derived_from="IOCON" id="IOCON" location="0x4002c000"/>
-<peripheralInstance derived_from="SSP1" id="SSP1" location="0x40030000"/>
-<peripheralInstance derived_from="ADC" id="ADC" location="0x40034000"/>
-<peripheralInstance derived_from="CANAFRAM" id="CANAFRAM" location="0x40038000"/>
-<peripheralInstance derived_from="CANAF" id="CANAF" location="0x4003c000"/>
-<peripheralInstance derived_from="CCAN" id="CCAN" location="0x40040000"/>
-<peripheralInstance derived_from="CAN1" id="CAN1" location="0x40044000"/>
-<peripheralInstance derived_from="CAN2" id="CAN2" location="0x40048000"/>
-<peripheralInstance derived_from="I2C1" id="I2C1" location="0x4005c000"/>
-<peripheralInstance derived_from="SSP0" id="SSP0" location="0x40088000"/>
-<peripheralInstance derived_from="DAC" id="DAC" location="0x4008c000"/>
-<peripheralInstance derived_from="TIMER2" id="TIMER2" location="0x40090000"/>
-<peripheralInstance derived_from="TIMER3" id="TIMER3" location="0x40094000"/>
-<peripheralInstance derived_from="UART2" id="UART2" location="0x40098000"/>
-<peripheralInstance derived_from="UART3" id="UART3" location="0x4009c000"/>
-<peripheralInstance derived_from="I2C2" id="I2C2" location="0x400a0000"/>
-<peripheralInstance derived_from="UART4" id="UART4" location="0x400a4000"/>
-<peripheralInstance derived_from="I2S" id="I2S" location="0x400a8000"/>
-<peripheralInstance derived_from="SSP2" id="SSP2" location="0x400ac000"/>
-<peripheralInstance derived_from="MCPWM" id="MCPWM" location="0x400b8000"/>
-<peripheralInstance derived_from="QEI" id="QEI" location="0x400bc000"/>
-<peripheralInstance derived_from="SDMMC" id="SDMMC" location="0x400c0000"/>
-<peripheralInstance derived_from="SYSCON" id="SYSCON" location="0x400fc000"/>
-</chip>
-<processor><name gcc_name="cortex-m4">Cortex-M4</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpc407x_8x_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>
-
-
-
diff --git a/workspace_tools/export/codered_lpc4088_dm_cproject.tmpl b/workspace_tools/export/codered_lpc4088_dm_cproject.tmpl
deleted file mode 100644
index 35ffa7c..0000000
--- a/workspace_tools/export/codered_lpc4088_dm_cproject.tmpl
+++ /dev/null
@@ -1,1922 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- <?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC177x_8x_407x_8x_512.cfx" property_3="NXP" property_4="LPC4088" property_count="5" version="1"/>
-<infoList vendor="NXP"><info chip="LPC4088" flash_driver="LPC177x_8x_407x_8x_512.cfx" match_id="0x481D3F47" name="LPC4088" stub="crt_emu_cm3_nxp"><chip><name>LPC4088</name>
-<family>LPC407x_8x</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash512" location="0x0" size="0x80000"/>
-<memoryInstance derived_from="RAM" id="RamLoc64" location="0x10000000" size="0x10000"/>
-<memoryInstance derived_from="RAM" id="RamPeriph32" location="0x20000000" size="0x8000"/>
-<prog_flash blocksz="0x1000" location="0x0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
-<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
-<peripheralInstance derived_from="V7M_MPU" id="MPU" location="0xe000ed90"/>
-<peripheralInstance derived_from="V7M_NVIC" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V7M_DCR" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="V7M_ITM" id="ITM" location="0xe0000000"/>
-<peripheralInstance derived_from="FLASHCTRL" id="FLASHCTRL" location="0x200000"/>
-<peripheralInstance derived_from="GPDMA" id="GPDMA" location="0x20080000"/>
-<peripheralInstance derived_from="ETHERNET" id="ETHERNET" location="0x20084000"/>
-<peripheralInstance derived_from="LCD" id="LCD" location="0x20088000"/>
-<peripheralInstance derived_from="USB" id="USB" location="0x2008c000"/>
-<peripheralInstance derived_from="CRC" id="CRC" location="0x20090000"/>
-<peripheralInstance derived_from="GPIO" id="GPIO" location="0x20098000"/>
-<peripheralInstance derived_from="EMC" id="EMC" location="0x2009c000"/>
-<peripheralInstance derived_from="WWDT" id="WWDT" location="0x40000000"/>
-<peripheralInstance derived_from="TIMER0" id="TIMER0" location="0x40004000"/>
-<peripheralInstance derived_from="TIMER1" id="TIMER1" location="0x40008000"/>
-<peripheralInstance derived_from="UART0" id="UART0" location="0x4000c000"/>
-<peripheralInstance derived_from="UART1" id="UART1" location="0x40010000"/>
-<peripheralInstance derived_from="PWM0" id="PWM0" location="0x40014000"/>
-<peripheralInstance derived_from="PWM1" id="PWM1" location="0x40018000"/>
-<peripheralInstance derived_from="I2C0" id="I2C0" location="0x4001c000"/>
-<peripheralInstance derived_from="COMPARATOR" id="COMPARATOR" location="0x40020000"/>
-<peripheralInstance derived_from="RTC" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="GPIOINT" id="GPIOINT" location="0x40028080"/>
-<peripheralInstance derived_from="IOCON" id="IOCON" location="0x4002c000"/>
-<peripheralInstance derived_from="SSP1" id="SSP1" location="0x40030000"/>
-<peripheralInstance derived_from="ADC" id="ADC" location="0x40034000"/>
-<peripheralInstance derived_from="CANAFRAM" id="CANAFRAM" location="0x40038000"/>
-<peripheralInstance derived_from="CANAF" id="CANAF" location="0x4003c000"/>
-<peripheralInstance derived_from="CCAN" id="CCAN" location="0x40040000"/>
-<peripheralInstance derived_from="CAN1" id="CAN1" location="0x40044000"/>
-<peripheralInstance derived_from="CAN2" id="CAN2" location="0x40048000"/>
-<peripheralInstance derived_from="I2C1" id="I2C1" location="0x4005c000"/>
-<peripheralInstance derived_from="SSP0" id="SSP0" location="0x40088000"/>
-<peripheralInstance derived_from="DAC" id="DAC" location="0x4008c000"/>
-<peripheralInstance derived_from="TIMER2" id="TIMER2" location="0x40090000"/>
-<peripheralInstance derived_from="TIMER3" id="TIMER3" location="0x40094000"/>
-<peripheralInstance derived_from="UART2" id="UART2" location="0x40098000"/>
-<peripheralInstance derived_from="UART3" id="UART3" location="0x4009c000"/>
-<peripheralInstance derived_from="I2C2" id="I2C2" location="0x400a0000"/>
-<peripheralInstance derived_from="UART4" id="UART4" location="0x400a4000"/>
-<peripheralInstance derived_from="I2S" id="I2S" location="0x400a8000"/>
-<peripheralInstance derived_from="SSP2" id="SSP2" location="0x400ac000"/>
-<peripheralInstance derived_from="MCPWM" id="MCPWM" location="0x400b8000"/>
-<peripheralInstance derived_from="QEI" id="QEI" location="0x400bc000"/>
-<peripheralInstance derived_from="SDMMC" id="SDMMC" location="0x400c0000"/>
-<peripheralInstance derived_from="SYSCON" id="SYSCON" location="0x400fc000"/>
-</chip>
-<processor><name gcc_name="cortex-m4">Cortex-M4</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpc407x_8x_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>
-
-
-
diff --git a/workspace_tools/export/codered_lpc4088_dm_project.tmpl b/workspace_tools/export/codered_lpc4088_dm_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc4088_dm_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc4088_project.tmpl b/workspace_tools/export/codered_lpc4088_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc4088_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc4330_m4_cproject.tmpl b/workspace_tools/export/codered_lpc4330_m4_cproject.tmpl
deleted file mode 100644
index bd1f896..0000000
--- a/workspace_tools/export/codered_lpc4330_m4_cproject.tmpl
+++ /dev/null
@@ -1,1924 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- <?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC4330" property_count="5" version="1"/>
-<infoList vendor="NXP">
-<info chip="LPC4330" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC4330" package="LPC43_lqfp100.xml">
-<chip>
-<name>LPC4330</name>
-<family>LPC43xx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
-<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
-<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
-<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
-<peripheralInstance derived_from="LPC43_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
-<peripheralInstance derived_from="LPC43_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
-<peripheralInstance derived_from="LPC43_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
-<peripheralInstance derived_from="LPC43_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
-<peripheralInstance derived_from="LPC43_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
-<peripheralInstance derived_from="LPC43xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
-<peripheralInstance derived_from="LPC43xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
-<peripheralInstance derived_from="LPC43xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
-<peripheralInstance derived_from="LPC43xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
-<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
-<peripheralInstance derived_from="LPC43_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
-<peripheralInstance derived_from="LPC43_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
-<peripheralInstance derived_from="LPC43_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
-<peripheralInstance derived_from="LPC43_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
-<peripheralInstance derived_from="LPC43_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
-<peripheralInstance derived_from="LPC43_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
-<peripheralInstance derived_from="LPC43_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
-<peripheralInstance derived_from="LPC43_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
-<peripheralInstance derived_from="LPC43_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
-<peripheralInstance derived_from="LPC43_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
-<peripheralInstance derived_from="LPC43_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
-<peripheralInstance derived_from="LPC43_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
-<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
-<peripheralInstance derived_from="LPC43_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
-<peripheralInstance derived_from="LPC43_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
-<peripheralInstance derived_from="LPC43_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC43_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC43_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
-<peripheralInstance derived_from="LPC4x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
-<peripheralInstance derived_from="LPC43_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
-<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
-<peripheralInstance derived_from="LPC43_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
-<peripheralInstance derived_from="LPC43_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
-<peripheralInstance derived_from="LPC43_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
-<peripheralInstance derived_from="LPC43_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
-<peripheralInstance derived_from="LPC43_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
-<peripheralInstance derived_from="LPC43_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m4">Cortex-M4</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>
-
-
diff --git a/workspace_tools/export/codered_lpc4330_m4_project.tmpl b/workspace_tools/export/codered_lpc4330_m4_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc4330_m4_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpc824_cproject.tmpl b/workspace_tools/export/codered_lpc824_cproject.tmpl
deleted file mode 100644
index ee22cb6..0000000
--- a/workspace_tools/export/codered_lpc824_cproject.tmpl
+++ /dev/null
@@ -1,53 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}startup_LPC824_CR.cpp{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC800_32.cfx" property_3="NXP" property_4="LPC824" property_count="5" version="70200"/>
-<infoList vendor="NXP"><info chip="LPC824" flash_driver="LPC800_32.cfx" match_id="0x0" name="LPC824" stub="crt_emu_cm3_gen"><chip><name>LPC824</name>
-<family>LPC82x</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash32" location="0x0" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40000000"/>
-<peripheralInstance derived_from="MRT" determined="infoFile" id="MRT" location="0x40004000"/>
-<peripheralInstance derived_from="WKT" determined="infoFile" id="WKT" location="0x40008000"/>
-<peripheralInstance derived_from="SWM" determined="infoFile" id="SWM" location="0x4000c000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40020000"/>
-<peripheralInstance derived_from="CMP" determined="infoFile" id="CMP" location="0x40024000"/>
-<peripheralInstance derived_from="DMATRIGMUX" determined="infoFile" id="DMATRIGMUX" location="0x40028000"/>
-<peripheralInstance derived_from="INPUTMUX" determined="infoFile" id="INPUTMUX" location="0x4002c000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="I2C0" determined="infoFile" id="I2C0" location="0x40050000"/>
-<peripheralInstance derived_from="I2C1" determined="infoFile" id="I2C1" location="0x40054000"/>
-<peripheralInstance derived_from="SPI0" determined="infoFile" id="SPI0" location="0x40058000"/>
-<peripheralInstance derived_from="SPI1" determined="infoFile" id="SPI1" location="0x4005c000"/>
-<peripheralInstance derived_from="USART0" determined="infoFile" id="USART0" location="0x40064000"/>
-<peripheralInstance derived_from="USART1" determined="infoFile" id="USART1" location="0x40068000"/>
-<peripheralInstance derived_from="USART2" determined="infoFile" id="USART2" location="0x4006c000"/>
-<peripheralInstance derived_from="I2C2" determined="infoFile" id="I2C2" location="0x40070000"/>
-<peripheralInstance derived_from="I2C3" determined="infoFile" id="I2C3" location="0x40074000"/>
-<peripheralInstance derived_from="CRC" determined="infoFile" id="CRC" location="0x50000000"/>
-<peripheralInstance derived_from="SCT" determined="infoFile" id="SCT" location="0x50004000"/>
-<peripheralInstance derived_from="DMA" determined="infoFile" id="DMA" location="0x50008000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0xa0000000"/>
-<peripheralInstance derived_from="PIN-INT" determined="infoFile" id="PIN-INT" location="0xa0004000"/>
-</chip>
-<processor><name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC82x_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpc824_project.tmpl b/workspace_tools/export/codered_lpc824_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpc824_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_lpccappuccino_cproject.tmpl b/workspace_tools/export/codered_lpccappuccino_cproject.tmpl
deleted file mode 100644
index 69256f8..0000000
--- a/workspace_tools/export/codered_lpccappuccino_cproject.tmpl
+++ /dev/null
@@ -1,51 +0,0 @@
-{% extends "codered_cproject_cortexm0_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc11xx.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_2="LPC11_12_13_64K_8K.cfx" property_3="NXP" property_4="LPC11U37/501" property_count="5" version="70002"/>
-<infoList vendor="NXP">
-<info chip="LPC11U37/501" flash_driver="LPC11_12_13_64K_8K.cfx" match_id="0x0001BC40" name="LPC11U37/501" stub="crt_emu_lpc11_13_nxp">
-<chip>
-<name>LPC11U37/501</name>
-<family>LPC11Uxx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="12MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash64" location="0x0" size="0x10000"/>
-<memoryInstance derived_from="RAM" id="RamLoc8" location="0x10000000" size="0x2000"/>
-<memoryInstance derived_from="RAM" id="RamUsb2" location="0x20004000" size="0x800"/>
-<peripheralInstance derived_from="V6M_NVIC" determined="infoFile" id="NVIC" location="0xe000e000"/>
-<peripheralInstance derived_from="V6M_DCR" determined="infoFile" id="DCR" location="0xe000edf0"/>
-<peripheralInstance derived_from="I2C" determined="infoFile" id="I2C" location="0x40000000"/>
-<peripheralInstance derived_from="WWDT" determined="infoFile" id="WWDT" location="0x40004000"/>
-<peripheralInstance derived_from="USART" determined="infoFile" id="USART" location="0x40008000"/>
-<peripheralInstance derived_from="CT16B0" determined="infoFile" id="CT16B0" location="0x4000c000"/>
-<peripheralInstance derived_from="CT16B1" determined="infoFile" id="CT16B1" location="0x40010000"/>
-<peripheralInstance derived_from="CT32B0" determined="infoFile" id="CT32B0" location="0x40014000"/>
-<peripheralInstance derived_from="CT32B1" determined="infoFile" id="CT32B1" location="0x40018000"/>
-<peripheralInstance derived_from="ADC" determined="infoFile" id="ADC" location="0x4001c000"/>
-<peripheralInstance derived_from="PMU" determined="infoFile" id="PMU" location="0x40038000"/>
-<peripheralInstance derived_from="FLASHCTRL" determined="infoFile" id="FLASHCTRL" location="0x4003c000"/>
-<peripheralInstance derived_from="SSP0" determined="infoFile" id="SSP0" location="0x40040000"/>
-<peripheralInstance derived_from="IOCON" determined="infoFile" id="IOCON" location="0x40044000"/>
-<peripheralInstance derived_from="SYSCON" determined="infoFile" id="SYSCON" location="0x40048000"/>
-<peripheralInstance derived_from="GPIO-PIN-INT" determined="infoFile" id="GPIO-PIN-INT" location="0x4004c000"/>
-<peripheralInstance derived_from="SSP1" determined="infoFile" id="SSP1" location="0x40058000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT0" determined="infoFile" id="GPIO-GROUP-INT0" location="0x4005c000"/>
-<peripheralInstance derived_from="GPIO-GROUP-INT1" determined="infoFile" id="GPIO-GROUP-INT1" location="0x40060000"/>
-<peripheralInstance derived_from="USB" determined="infoFile" id="USB" location="0x40080000"/>
-<peripheralInstance derived_from="GPIO-PORT" determined="infoFile" id="GPIO-PORT" location="0x50000000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m0">Cortex-M0</name>
-<family>Cortex-M</family>
-</processor>
-<link href="LPC11Uxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_lpccappuccino_project.tmpl b/workspace_tools/export/codered_lpccappuccino_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_lpccappuccino_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/codered_project_common.tmpl b/workspace_tools/export/codered_project_common.tmpl
deleted file mode 100644
index 42ef438..0000000
--- a/workspace_tools/export/codered_project_common.tmpl
+++ /dev/null
@@ -1,84 +0,0 @@
-
-
- {{name}}
- This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-Code-Red
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/{{name}}/Debug}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
-
diff --git a/workspace_tools/export/codered_ublox_c027_cproject.tmpl b/workspace_tools/export/codered_ublox_c027_cproject.tmpl
deleted file mode 100644
index b39438a..0000000
--- a/workspace_tools/export/codered_ublox_c027_cproject.tmpl
+++ /dev/null
@@ -1,79 +0,0 @@
-{% extends "codered_cproject_cortexm3_common.tmpl" %}
-
-{% block startup_file %}cr_startup_lpc176x.c{% endblock %}
-
-{% block cpu_config %}<?xml version="1.0" encoding="UTF-8"?>
-<TargetConfig>
-<Properties property_0="" property_1="" property_2="" property_3="NXP" property_4="LPC1768" property_count="5" version="1"/>
-<infoList vendor="NXP">
-<info chip="LPC1768" match_id="0x00013f37,0x26013F37,0x26113F37" name="LPC1768" package="lpc17_lqfp100.xml">
-<chip>
-<name>LPC1768</name>
-<family>LPC17xx</family>
-<vendor>NXP (formerly Philips)</vendor>
-<reset board="None" core="Real" sys="Real"/>
-<clock changeable="TRUE" freq="20MHz" is_accurate="TRUE"/>
-<memory can_program="true" id="Flash" is_ro="true" type="Flash"/>
-<memory id="RAM" type="RAM"/>
-<memory id="Periph" is_volatile="true" type="Peripheral"/>
-<memoryInstance derived_from="Flash" id="MFlash512" location="0x00000000" size="0x80000"/>
-<memoryInstance derived_from="RAM" id="RamLoc32" location="0x10000000" size="0x8000"/>
-<memoryInstance derived_from="RAM" id="RamAHB32" location="0x2007c000" size="0x8000"/>
-<prog_flash blocksz="0x1000" location="0" maxprgbuff="0x1000" progwithcode="TRUE" size="0x10000"/>
-<prog_flash blocksz="0x8000" location="0x10000" maxprgbuff="0x1000" progwithcode="TRUE" size="0x70000"/>
-<peripheralInstance derived_from="LPC17_NVIC" determined="infoFile" id="NVIC" location="0xE000E000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM0&0x1" id="TIMER0" location="0x40004000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM1&0x1" id="TIMER1" location="0x40008000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM2&0x1" id="TIMER2" location="0x40090000"/>
-<peripheralInstance derived_from="TIMER" determined="infoFile" enable="SYSCTL.PCONP.PCTIM3&0x1" id="TIMER3" location="0x40094000"/>
-<peripheralInstance derived_from="LPC17_RIT" determined="infoFile" enable="SYSCTL.PCONP.PCRIT&0x1" id="RIT" location="0x400B0000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO0" location="0x2009C000"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO1" location="0x2009C020"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO2" location="0x2009C040"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO3" location="0x2009C060"/>
-<peripheralInstance derived_from="FGPIO" determined="infoFile" enable="SYSCTL.PCONP.PCGPIO&0x1" id="GPIO4" location="0x2009C080"/>
-<peripheralInstance derived_from="LPC17_I2S" determined="infoFile" enable="SYSCTL.PCONP&0x08000000" id="I2S" location="0x400A8000"/>
-<peripheralInstance derived_from="LPC17_SYSCTL" determined="infoFile" id="SYSCTL" location="0x400FC000"/>
-<peripheralInstance derived_from="LPC17_DAC" determined="infoFile" enable="PCB.PINSEL1.P0_26&0x2=2" id="DAC" location="0x4008C000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART0&0x1" id="UART0" location="0x4000C000"/>
-<peripheralInstance derived_from="LPC17xx_UART_MODEM" determined="infoFile" enable="SYSCTL.PCONP.PCUART1&0x1" id="UART1" location="0x40010000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART2&0x1" id="UART2" location="0x40098000"/>
-<peripheralInstance derived_from="LPC17xx_UART" determined="infoFile" enable="SYSCTL.PCONP.PCUART3&0x1" id="UART3" location="0x4009C000"/>
-<peripheralInstance derived_from="SPI" determined="infoFile" enable="SYSCTL.PCONP.PCSPI&0x1" id="SPI" location="0x40020000"/>
-<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP0&0x1" id="SSP0" location="0x40088000"/>
-<peripheralInstance derived_from="LPC17_SSP" determined="infoFile" enable="SYSCTL.PCONP.PCSSP1&0x1" id="SSP1" location="0x40030000"/>
-<peripheralInstance derived_from="LPC17_ADC" determined="infoFile" enable="SYSCTL.PCONP.PCAD&0x1" id="ADC" location="0x40034000"/>
-<peripheralInstance derived_from="LPC17_USBINTST" determined="infoFile" enable="USBCLKCTL.USBClkCtrl&0x12" id="USBINTSTAT" location="0x400fc1c0"/>
-<peripheralInstance derived_from="LPC17_USB_CLK_CTL" determined="infoFile" id="USBCLKCTL" location="0x5000cff4"/>
-<peripheralInstance derived_from="LPC17_USBDEV" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x12=0x12" id="USBDEV" location="0x5000C200"/>
-<peripheralInstance derived_from="LPC17_PWM" determined="infoFile" enable="SYSCTL.PCONP.PWM1&0x1" id="PWM" location="0x40018000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C0&0x1" id="I2C0" location="0x4001C000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C1&0x1" id="I2C1" location="0x4005C000"/>
-<peripheralInstance derived_from="LPC17_I2C" determined="infoFile" enable="SYSCTL.PCONP.PCI2C2&0x1" id="I2C2" location="0x400A0000"/>
-<peripheralInstance derived_from="LPC17_DMA" determined="infoFile" enable="SYSCTL.PCONP.PCGPDMA&0x1" id="DMA" location="0x50004000"/>
-<peripheralInstance derived_from="LPC17_ENET" determined="infoFile" enable="SYSCTL.PCONP.PCENET&0x1" id="ENET" location="0x50000000"/>
-<peripheralInstance derived_from="CM3_DCR" determined="infoFile" id="DCR" location="0xE000EDF0"/>
-<peripheralInstance derived_from="LPC17_PCB" determined="infoFile" id="PCB" location="0x4002c000"/>
-<peripheralInstance derived_from="LPC17_QEI" determined="infoFile" enable="SYSCTL.PCONP.PCQEI&0x1" id="QEI" location="0x400bc000"/>
-<peripheralInstance derived_from="LPC17_USBHOST" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x11=0x11" id="USBHOST" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC17_USBOTG" determined="infoFile" enable="USBCLKCTL.USBClkSt&0x1c=0x1c" id="USBOTG" location="0x5000C000"/>
-<peripheralInstance derived_from="LPC17_RTC" determined="infoFile" enable="SYSCTL.PCONP.PCRTC&0x1" id="RTC" location="0x40024000"/>
-<peripheralInstance derived_from="MPU" determined="infoFile" id="MPU" location="0xE000ED90"/>
-<peripheralInstance derived_from="LPC1x_WDT" determined="infoFile" id="WDT" location="0x40000000"/>
-<peripheralInstance derived_from="LPC17_FLASHCFG" determined="infoFile" id="FLASHACCEL" location="0x400FC000"/>
-<peripheralInstance derived_from="GPIO_INT" determined="infoFile" id="GPIOINTMAP" location="0x40028080"/>
-<peripheralInstance derived_from="LPC17_CANAFR" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANAFR" location="0x4003C000"/>
-<peripheralInstance derived_from="LPC17_CANCEN" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1|SYSCTL.PCONP.PCCAN2&0x1" id="CANCEN" location="0x40040000"/>
-<peripheralInstance derived_from="LPC17_CANWAKESLEEP" determined="infoFile" id="CANWAKESLEEP" location="0x400FC110"/>
-<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN1&0x1" id="CANCON1" location="0x40044000"/>
-<peripheralInstance derived_from="LPC17_CANCON" determined="infoFile" enable="SYSCTL.PCONP.PCCAN2&0x1" id="CANCON2" location="0x40048000"/>
-<peripheralInstance derived_from="LPC17_MCPWM" determined="infoFile" enable="SYSCTL.PCONP.PCMCPWM&0x1" id="MCPWM" location="0x400B8000"/>
-</chip>
-<processor>
-<name gcc_name="cortex-m3">Cortex-M3</name>
-<family>Cortex-M</family>
-</processor>
-<link href="nxp_lpcxxxx_peripheral.xme" show="embed" type="simple"/>
-</info>
-</infoList>
-</TargetConfig>{% endblock %}
diff --git a/workspace_tools/export/codered_ublox_c027_project.tmpl b/workspace_tools/export/codered_ublox_c027_project.tmpl
deleted file mode 100644
index d77c507..0000000
--- a/workspace_tools/export/codered_ublox_c027_project.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "codered_project_common.tmpl" %}
diff --git a/workspace_tools/export/coide.py b/workspace_tools/export/coide.py
deleted file mode 100755
index 50ecc38..0000000
--- a/workspace_tools/export/coide.py
+++ /dev/null
@@ -1,110 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename
-
-
-class CoIDE(Exporter):
- NAME = 'CoIDE'
- TOOLCHAIN = 'GCC_ARM'
-
- TARGETS = [
- 'KL25Z',
- 'KL05Z',
- 'LPC1768',
- 'ARCH_PRO',
- 'ARCH_MAX',
- 'UBLOX_C027',
- 'NUCLEO_L053R8',
- 'NUCLEO_L152RE',
- 'NUCLEO_F030R8',
- 'NUCLEO_F042K6',
- 'NUCLEO_F070RB',
- 'NUCLEO_F072RB',
- 'NUCLEO_F091RC',
- 'NUCLEO_F103RB',
- 'NUCLEO_F302R8',
- 'NUCLEO_F303K8',
- 'NUCLEO_F303RE',
- 'NUCLEO_F334R8',
- 'NUCLEO_F401RE',
- 'NUCLEO_F410RB',
- 'NUCLEO_F411RE',
- 'NUCLEO_F446RE',
- 'DISCO_L053C8',
- 'DISCO_F051R8',
- 'DISCO_F100RB',
- 'DISCO_F303VC',
- 'DISCO_F334C8',
- 'DISCO_F401VC',
- 'DISCO_F407VG',
- 'DISCO_F429ZI',
- 'DISCO_F469NI',
- 'MTS_MDOT_F405RG',
- 'MTS_MDOT_F411RE',
- 'MOTE_L152RC',
- 'NZ32_SC151',
- ]
-
- # seems like CoIDE currently supports only one type
- FILE_TYPES = {
- 'c_sources':'1',
- 'cpp_sources':'1',
- 's_sources':'1'
- }
- FILE_TYPES2 = {
- 'headers':'1'
- }
-
- def generate(self):
- self.resources.win_to_unix()
- source_files = []
- for r_type, n in CoIDE.FILE_TYPES.iteritems():
- for file in getattr(self.resources, r_type):
- source_files.append({
- 'name': basename(file), 'type': n, 'path': file
- })
- header_files = []
- for r_type, n in CoIDE.FILE_TYPES2.iteritems():
- for file in getattr(self.resources, r_type):
- header_files.append({
- 'name': basename(file), 'type': n, 'path': file
- })
-
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- if self.resources.linker_script is None:
- self.resources.linker_script = ''
-
- ctx = {
- 'name': self.program_name,
- 'source_files': source_files,
- 'header_files': header_files,
- 'include_paths': self.resources.inc_dirs,
- 'scatter_file': self.resources.linker_script,
- 'library_paths': self.resources.lib_dirs,
- 'object_files': self.resources.objects,
- 'libraries': libraries,
- 'symbols': self.get_symbols()
- }
- target = self.target.lower()
-
- # Project file
- self.gen_file('coide_%s.coproj.tmpl' % target, ctx, '%s.coproj' % self.program_name)
diff --git a/workspace_tools/export/coide_arch_max.coproj.tmpl b/workspace_tools/export/coide_arch_max.coproj.tmpl
deleted file mode 100644
index a9824a6..0000000
--- a/workspace_tools/export/coide_arch_max.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_arch_pro.coproj.tmpl b/workspace_tools/export/coide_arch_pro.coproj.tmpl
deleted file mode 100644
index 687d38e..0000000
--- a/workspace_tools/export/coide_arch_pro.coproj.tmpl
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f051r8.coproj.tmpl b/workspace_tools/export/coide_disco_f051r8.coproj.tmpl
deleted file mode 100644
index ee4100b..0000000
--- a/workspace_tools/export/coide_disco_f051r8.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f100rb.coproj.tmpl b/workspace_tools/export/coide_disco_f100rb.coproj.tmpl
deleted file mode 100644
index f289ff5..0000000
--- a/workspace_tools/export/coide_disco_f100rb.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f303vc.coproj.tmpl b/workspace_tools/export/coide_disco_f303vc.coproj.tmpl
deleted file mode 100644
index fbe5820..0000000
--- a/workspace_tools/export/coide_disco_f303vc.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f334c8.coproj.tmpl b/workspace_tools/export/coide_disco_f334c8.coproj.tmpl
deleted file mode 100644
index d7dd7ec..0000000
--- a/workspace_tools/export/coide_disco_f334c8.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f401vc.coproj.tmpl b/workspace_tools/export/coide_disco_f401vc.coproj.tmpl
deleted file mode 100644
index 8056335..0000000
--- a/workspace_tools/export/coide_disco_f401vc.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f407vg.coproj.tmpl b/workspace_tools/export/coide_disco_f407vg.coproj.tmpl
deleted file mode 100644
index fbde49a..0000000
--- a/workspace_tools/export/coide_disco_f407vg.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_f429zi.coproj.tmpl b/workspace_tools/export/coide_disco_f429zi.coproj.tmpl
deleted file mode 100644
index cff2c62..0000000
--- a/workspace_tools/export/coide_disco_f429zi.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_disco_l053c8.coproj.tmpl b/workspace_tools/export/coide_disco_l053c8.coproj.tmpl
deleted file mode 100644
index 8857942..0000000
--- a/workspace_tools/export/coide_disco_l053c8.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_kl05z.coproj.tmpl b/workspace_tools/export/coide_kl05z.coproj.tmpl
deleted file mode 100644
index 3127e47..0000000
--- a/workspace_tools/export/coide_kl05z.coproj.tmpl
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_kl25z.coproj.tmpl b/workspace_tools/export/coide_kl25z.coproj.tmpl
deleted file mode 100644
index 7a9b745..0000000
--- a/workspace_tools/export/coide_kl25z.coproj.tmpl
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_lpc1768.coproj.tmpl b/workspace_tools/export/coide_lpc1768.coproj.tmpl
deleted file mode 100644
index 687d38e..0000000
--- a/workspace_tools/export/coide_lpc1768.coproj.tmpl
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_mote_l152rc.coproj.tmpl b/workspace_tools/export/coide_mote_l152rc.coproj.tmpl
deleted file mode 100755
index 91ec35e..0000000
--- a/workspace_tools/export/coide_mote_l152rc.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_mts_mdot_f405rg.coproj.tmpl b/workspace_tools/export/coide_mts_mdot_f405rg.coproj.tmpl
deleted file mode 100644
index a73727f..0000000
--- a/workspace_tools/export/coide_mts_mdot_f405rg.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_mts_mdot_f411re.coproj.tmpl b/workspace_tools/export/coide_mts_mdot_f411re.coproj.tmpl
deleted file mode 100644
index adcd169..0000000
--- a/workspace_tools/export/coide_mts_mdot_f411re.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f030r8.coproj.tmpl b/workspace_tools/export/coide_nucleo_f030r8.coproj.tmpl
deleted file mode 100644
index f6e121f..0000000
--- a/workspace_tools/export/coide_nucleo_f030r8.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f042k6.coproj.tmpl b/workspace_tools/export/coide_nucleo_f042k6.coproj.tmpl
deleted file mode 100644
index 8d150d0..0000000
--- a/workspace_tools/export/coide_nucleo_f042k6.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f070rb.coproj.tmpl b/workspace_tools/export/coide_nucleo_f070rb.coproj.tmpl
deleted file mode 100644
index 9756fd2..0000000
--- a/workspace_tools/export/coide_nucleo_f070rb.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f072rb.coproj.tmpl b/workspace_tools/export/coide_nucleo_f072rb.coproj.tmpl
deleted file mode 100644
index 3687c43..0000000
--- a/workspace_tools/export/coide_nucleo_f072rb.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f091rc.coproj.tmpl b/workspace_tools/export/coide_nucleo_f091rc.coproj.tmpl
deleted file mode 100644
index e77d738..0000000
--- a/workspace_tools/export/coide_nucleo_f091rc.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f103rb.coproj.tmpl b/workspace_tools/export/coide_nucleo_f103rb.coproj.tmpl
deleted file mode 100644
index ce2c8fd..0000000
--- a/workspace_tools/export/coide_nucleo_f103rb.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f302r8.coproj.tmpl b/workspace_tools/export/coide_nucleo_f302r8.coproj.tmpl
deleted file mode 100644
index 56d6cc7..0000000
--- a/workspace_tools/export/coide_nucleo_f302r8.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f303re.coproj.tmpl b/workspace_tools/export/coide_nucleo_f303re.coproj.tmpl
deleted file mode 100644
index 0995181..0000000
--- a/workspace_tools/export/coide_nucleo_f303re.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f334r8.coproj.tmpl b/workspace_tools/export/coide_nucleo_f334r8.coproj.tmpl
deleted file mode 100644
index d7dd7ec..0000000
--- a/workspace_tools/export/coide_nucleo_f334r8.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f401re.coproj.tmpl b/workspace_tools/export/coide_nucleo_f401re.coproj.tmpl
deleted file mode 100644
index ffc886f..0000000
--- a/workspace_tools/export/coide_nucleo_f401re.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f410rb.coproj.tmpl b/workspace_tools/export/coide_nucleo_f410rb.coproj.tmpl
deleted file mode 100644
index 92f3917..0000000
--- a/workspace_tools/export/coide_nucleo_f410rb.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f411re.coproj.tmpl b/workspace_tools/export/coide_nucleo_f411re.coproj.tmpl
deleted file mode 100644
index fb222fb..0000000
--- a/workspace_tools/export/coide_nucleo_f411re.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_f446re.coproj.tmpl b/workspace_tools/export/coide_nucleo_f446re.coproj.tmpl
deleted file mode 100644
index 91bbd30..0000000
--- a/workspace_tools/export/coide_nucleo_f446re.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_l053r8.coproj.tmpl b/workspace_tools/export/coide_nucleo_l053r8.coproj.tmpl
deleted file mode 100644
index fd18d26..0000000
--- a/workspace_tools/export/coide_nucleo_l053r8.coproj.tmpl
+++ /dev/null
@@ -1,168 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nucleo_l152re.coproj.tmpl b/workspace_tools/export/coide_nucleo_l152re.coproj.tmpl
deleted file mode 100644
index cb98166..0000000
--- a/workspace_tools/export/coide_nucleo_l152re.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_nz32_sc151.coproj.tmpl b/workspace_tools/export/coide_nz32_sc151.coproj.tmpl
deleted file mode 100644
index 7341c95..0000000
--- a/workspace_tools/export/coide_nz32_sc151.coproj.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/coide_ublox_c027.coproj.tmpl b/workspace_tools/export/coide_ublox_c027.coproj.tmpl
deleted file mode 100644
index 687d38e..0000000
--- a/workspace_tools/export/coide_ublox_c027.coproj.tmpl
+++ /dev/null
@@ -1,88 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- {% for path in include_paths %} {% endfor %}
-
-
- {% for s in symbols %} {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
- {% for lib in libraries %}
-
- {% endfor %}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {% for file in source_files %}
-
- {% endfor %}
- {% for file in header_files %}
-
- {% endfor %}
-
-
diff --git a/workspace_tools/export/ds5_5.py b/workspace_tools/export/ds5_5.py
deleted file mode 100644
index b6b9d3e..0000000
--- a/workspace_tools/export/ds5_5.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import basename
-
-
-class DS5_5(Exporter):
- NAME = 'DS5'
-
- TARGETS = [
- 'LPC1768',
- 'LPC11U24',
- 'LPC812',
- 'UBLOX_C027',
- 'ARCH_PRO',
- 'RZ_A1H',
- ]
-
- USING_MICROLIB = [
- 'LPC812',
- ]
-
- FILE_TYPES = {
- 'c_sources':'1',
- 'cpp_sources':'8',
- 's_sources':'2'
- }
-
- def get_toolchain(self):
- return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
-
- def generate(self):
- source_files = []
- for r_type, n in DS5_5.FILE_TYPES.iteritems():
- for file in getattr(self.resources, r_type):
- source_files.append({
- 'name': basename(file), 'type': n, 'path': file
- })
-
- ctx = {
- 'name': self.program_name,
- 'include_paths': self.resources.inc_dirs,
- 'scatter_file': self.resources.linker_script,
- 'object_files': self.resources.objects + self.resources.libraries,
- 'source_files': source_files,
- 'symbols': self.get_symbols()
- }
- target = self.target.lower()
-
- # Project file
- self.gen_file('ds5_5_%s.project.tmpl' % target, ctx, '.project')
- self.gen_file('ds5_5_%s.cproject.tmpl' % target, ctx, '.cproject')
- self.gen_file('ds5_5_%s.launch.tmpl' % target, ctx, 'ds5_%s.launch' % target)
diff --git a/workspace_tools/export/ds5_5_arch_pro.cproject.tmpl b/workspace_tools/export/ds5_5_arch_pro.cproject.tmpl
deleted file mode 100644
index 5a5fff4..0000000
--- a/workspace_tools/export/ds5_5_arch_pro.cproject.tmpl
+++ /dev/null
@@ -1,115 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_arch_pro.launch.tmpl b/workspace_tools/export/ds5_5_arch_pro.launch.tmpl
deleted file mode 100644
index 74bc964..0000000
--- a/workspace_tools/export/ds5_5_arch_pro.launch.tmpl
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_arch_pro.project.tmpl b/workspace_tools/export/ds5_5_arch_pro.project.tmpl
deleted file mode 100644
index 4f892f3..0000000
--- a/workspace_tools/export/ds5_5_arch_pro.project.tmpl
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
- {{name}}_ds5_lpc1768
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/ds5_lpc1768/Build}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/ds5_5_lpc11u24.cproject.tmpl b/workspace_tools/export/ds5_5_lpc11u24.cproject.tmpl
deleted file mode 100644
index 32adc51..0000000
--- a/workspace_tools/export/ds5_5_lpc11u24.cproject.tmpl
+++ /dev/null
@@ -1,103 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_lpc11u24.launch.tmpl b/workspace_tools/export/ds5_5_lpc11u24.launch.tmpl
deleted file mode 100644
index 868cba3..0000000
--- a/workspace_tools/export/ds5_5_lpc11u24.launch.tmpl
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_lpc11u24.project.tmpl b/workspace_tools/export/ds5_5_lpc11u24.project.tmpl
deleted file mode 100644
index 8ad8f0a..0000000
--- a/workspace_tools/export/ds5_5_lpc11u24.project.tmpl
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
- {{name}}_ds5_lpc11u24
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/ds5_lpc11u24/Build}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/ds5_5_lpc1768.cproject.tmpl b/workspace_tools/export/ds5_5_lpc1768.cproject.tmpl
deleted file mode 100644
index 5a5fff4..0000000
--- a/workspace_tools/export/ds5_5_lpc1768.cproject.tmpl
+++ /dev/null
@@ -1,115 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_lpc1768.launch.tmpl b/workspace_tools/export/ds5_5_lpc1768.launch.tmpl
deleted file mode 100644
index 74bc964..0000000
--- a/workspace_tools/export/ds5_5_lpc1768.launch.tmpl
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_lpc1768.project.tmpl b/workspace_tools/export/ds5_5_lpc1768.project.tmpl
deleted file mode 100644
index 4f892f3..0000000
--- a/workspace_tools/export/ds5_5_lpc1768.project.tmpl
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
- {{name}}_ds5_lpc1768
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/ds5_lpc1768/Build}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/ds5_5_lpc812.cproject.tmpl b/workspace_tools/export/ds5_5_lpc812.cproject.tmpl
deleted file mode 100644
index 96f0dc9..0000000
--- a/workspace_tools/export/ds5_5_lpc812.cproject.tmpl
+++ /dev/null
@@ -1,103 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_lpc812.launch.tmpl b/workspace_tools/export/ds5_5_lpc812.launch.tmpl
deleted file mode 100644
index 57ab0ba..0000000
--- a/workspace_tools/export/ds5_5_lpc812.launch.tmpl
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_lpc812.project.tmpl b/workspace_tools/export/ds5_5_lpc812.project.tmpl
deleted file mode 100644
index 2e9c358..0000000
--- a/workspace_tools/export/ds5_5_lpc812.project.tmpl
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
- {{name}}_ds5_lpc812
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/ds5_lpc812/Build}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/ds5_5_rz_a1h.cproject.tmpl b/workspace_tools/export/ds5_5_rz_a1h.cproject.tmpl
deleted file mode 100644
index 44e66ca..0000000
--- a/workspace_tools/export/ds5_5_rz_a1h.cproject.tmpl
+++ /dev/null
@@ -1,115 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_rz_a1h.launch.tmpl b/workspace_tools/export/ds5_5_rz_a1h.launch.tmpl
deleted file mode 100644
index a4bee75..0000000
--- a/workspace_tools/export/ds5_5_rz_a1h.launch.tmpl
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_rz_a1h.project.tmpl b/workspace_tools/export/ds5_5_rz_a1h.project.tmpl
deleted file mode 100644
index eee5209..0000000
--- a/workspace_tools/export/ds5_5_rz_a1h.project.tmpl
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
- {{name}}_ds5_rz_a1h
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/ds5_rz_a1h/Build}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/ds5_5_ublox_c027.cproject.tmpl b/workspace_tools/export/ds5_5_ublox_c027.cproject.tmpl
deleted file mode 100644
index 5a5fff4..0000000
--- a/workspace_tools/export/ds5_5_ublox_c027.cproject.tmpl
+++ /dev/null
@@ -1,115 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_ublox_c027.launch.tmpl b/workspace_tools/export/ds5_5_ublox_c027.launch.tmpl
deleted file mode 100644
index 74bc964..0000000
--- a/workspace_tools/export/ds5_5_ublox_c027.launch.tmpl
+++ /dev/null
@@ -1,111 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/ds5_5_ublox_c027.project.tmpl b/workspace_tools/export/ds5_5_ublox_c027.project.tmpl
deleted file mode 100644
index 4f892f3..0000000
--- a/workspace_tools/export/ds5_5_ublox_c027.project.tmpl
+++ /dev/null
@@ -1,83 +0,0 @@
-
-
- {{name}}_ds5_lpc1768
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
- ?name?
-
-
-
- org.eclipse.cdt.make.core.append_environment
- true
-
-
- org.eclipse.cdt.make.core.autoBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.buildArguments
-
-
-
- org.eclipse.cdt.make.core.buildCommand
- make
-
-
- org.eclipse.cdt.make.core.buildLocation
- ${workspace_loc:/ds5_lpc1768/Build}
-
-
- org.eclipse.cdt.make.core.cleanBuildTarget
- clean
-
-
- org.eclipse.cdt.make.core.contents
- org.eclipse.cdt.make.core.activeConfigSettings
-
-
- org.eclipse.cdt.make.core.enableAutoBuild
- false
-
-
- org.eclipse.cdt.make.core.enableCleanBuild
- true
-
-
- org.eclipse.cdt.make.core.enableFullBuild
- true
-
-
- org.eclipse.cdt.make.core.fullBuildTarget
- all
-
-
- org.eclipse.cdt.make.core.stopOnError
- true
-
-
- org.eclipse.cdt.make.core.useDefaultBuildCmd
- true
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/e2studio.py b/workspace_tools/export/e2studio.py
deleted file mode 100644
index 66cd9de..0000000
--- a/workspace_tools/export/e2studio.py
+++ /dev/null
@@ -1,47 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename
-
-
-class E2Studio(Exporter):
- NAME = 'e2 studio'
- TOOLCHAIN = 'GCC_ARM'
-
- TARGETS = [
- 'RZ_A1H',
- ]
-
- def generate(self):
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- ctx = {
- 'name': self.program_name,
- 'include_paths': self.resources.inc_dirs,
- 'linker_script': self.resources.linker_script,
-
- 'object_files': self.resources.objects,
- 'libraries': libraries,
- 'symbols': self.get_symbols()
- }
- self.gen_file('e2studio_%s_project.tmpl' % self.target.lower(), ctx, '.project')
- self.gen_file('e2studio_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
- self.gen_file('e2studio_%s_gdbinit.tmpl' % self.target.lower(), ctx, '.gdbinit')
- self.gen_file('e2studio_launch.tmpl', ctx, '%s OpenOCD.launch' % self.program_name)
diff --git a/workspace_tools/export/e2studio_launch.tmpl b/workspace_tools/export/e2studio_launch.tmpl
deleted file mode 100644
index 2524c4d..0000000
--- a/workspace_tools/export/e2studio_launch.tmpl
+++ /dev/null
@@ -1,59 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/e2studio_rz_a1h_cproject.tmpl b/workspace_tools/export/e2studio_rz_a1h_cproject.tmpl
deleted file mode 100644
index 85dcd99..0000000
--- a/workspace_tools/export/e2studio_rz_a1h_cproject.tmpl
+++ /dev/null
@@ -1,318 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/e2studio_rz_a1h_gdbinit.tmpl b/workspace_tools/export/e2studio_rz_a1h_gdbinit.tmpl
deleted file mode 100644
index a59f78c..0000000
--- a/workspace_tools/export/e2studio_rz_a1h_gdbinit.tmpl
+++ /dev/null
@@ -1,29 +0,0 @@
-define hook-step
-mon cortex_a maskisr on
-end
-
-define hook-stepi
-mon cortex_a maskisr on
-end
-
-define hook-next
-mon cortex_a maskisr on
-end
-
-define hook-nexti
-mon cortex_a maskisr on
-end
-
-define hook-finish
-mon cortex_a maskisr on
-end
-
-define hook-stop
-mon cortex_a maskisr off
-end
-
-define hook-kill
-mon reset init
-end
-
-set mem inaccessible-by-default off
\ No newline at end of file
diff --git a/workspace_tools/export/e2studio_rz_a1h_project.tmpl b/workspace_tools/export/e2studio_rz_a1h_project.tmpl
deleted file mode 100644
index 0bab8dd..0000000
--- a/workspace_tools/export/e2studio_rz_a1h_project.tmpl
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
- {{name}}
- This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-e2studio
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/emblocks.eix.tmpl b/workspace_tools/export/emblocks.eix.tmpl
deleted file mode 100644
index 2bf5b0b..0000000
--- a/workspace_tools/export/emblocks.eix.tmpl
+++ /dev/null
@@ -1,57 +0,0 @@
-
-
-
-
-
-
-
-
- {% for s in include_paths %}
- {% endfor %}
-
-
-
-
-
-
- {% for s in libraries %}
- {% endfor %}
-
-
- {% for s in library_paths %}
- {% endfor %}
-
-
-
- {% for f in source_files %}
- {% endfor %}
-
-
-
diff --git a/workspace_tools/export/emblocks.py b/workspace_tools/export/emblocks.py
deleted file mode 100644
index 88a14d3..0000000
--- a/workspace_tools/export/emblocks.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename
-from workspace_tools.targets import TARGETS
-
-# filter all the GCC_ARM targets out of the target list
-gccTargets = []
-for t in TARGETS:
- if 'GCC_ARM' in t.supported_toolchains:
- gccTargets.append(t.name)
-
-class IntermediateFile(Exporter):
- NAME = 'EmBlocks'
- TOOLCHAIN = 'GCC_ARM'
-
- # we support all GCC targets (is handled on IDE side)
- TARGETS = gccTargets
-
- FILE_TYPES = {
- 'headers': 'h',
- 'c_sources': 'c',
- 's_sources': 'a',
- 'cpp_sources': 'cpp'
- }
-
-
- def generate(self):
- self.resources.win_to_unix()
- source_files = []
- for r_type, n in IntermediateFile.FILE_TYPES.iteritems():
- for file in getattr(self.resources, r_type):
- source_files.append({
- 'name': file, 'type': n
- })
-
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
-
- if self.resources.linker_script is None:
- self.resources.linker_script = ''
-
- ctx = {
- 'name': self.program_name,
- 'target': self.target,
- 'toolchain': self.toolchain.name,
- 'source_files': source_files,
- 'include_paths': self.resources.inc_dirs,
- 'script_file': self.resources.linker_script,
- 'library_paths': self.resources.lib_dirs,
- 'libraries': libraries,
- 'symbols': self.get_symbols(),
- 'object_files': self.resources.objects,
- 'sys_libs': self.toolchain.sys_libs,
- 'cc_org': self.toolchain.cc[1:],
- 'ld_org': self.toolchain.ld[1:],
- 'cppc_org': self.toolchain.cppc[1:]
- }
-
- # EmBlocks intermediate file template
- self.gen_file('emblocks.eix.tmpl', ctx, '%s.eix' % self.program_name)
diff --git a/workspace_tools/export/exporters.py b/workspace_tools/export/exporters.py
deleted file mode 100644
index 181f753..0000000
--- a/workspace_tools/export/exporters.py
+++ /dev/null
@@ -1,173 +0,0 @@
-"""Just a template for subclassing"""
-import uuid, shutil, os, logging, fnmatch
-from os import walk, remove
-from os.path import join, dirname, isdir, split
-from copy import copy
-from jinja2 import Template, FileSystemLoader
-from jinja2.environment import Environment
-from contextlib import closing
-from zipfile import ZipFile, ZIP_DEFLATED
-
-from workspace_tools.utils import mkdir
-from workspace_tools.toolchains import TOOLCHAIN_CLASSES
-from workspace_tools.targets import TARGET_MAP
-
-from project_generator.generate import Generator
-from project_generator.project import Project
-from project_generator.settings import ProjectSettings
-
-class OldLibrariesException(Exception): pass
-
-class Exporter(object):
- TEMPLATE_DIR = dirname(__file__)
- DOT_IN_RELATIVE_PATH = False
-
- def __init__(self, target, inputDir, program_name, build_url_resolver, extra_symbols=None):
- self.inputDir = inputDir
- self.target = target
- self.program_name = program_name
- self.toolchain = TOOLCHAIN_CLASSES[self.get_toolchain()](TARGET_MAP[target])
- self.build_url_resolver = build_url_resolver
- jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
- self.jinja_environment = Environment(loader=jinja_loader)
- self.extra_symbols = extra_symbols
-
- def get_toolchain(self):
- return self.TOOLCHAIN
-
- def __scan_and_copy(self, src_path, trg_path):
- resources = self.toolchain.scan_resources(src_path)
-
- for r_type in ['headers', 's_sources', 'c_sources', 'cpp_sources',
- 'objects', 'libraries', 'linker_script',
- 'lib_builds', 'lib_refs', 'repo_files', 'hex_files', 'bin_files']:
- r = getattr(resources, r_type)
- if r:
- self.toolchain.copy_files(r, trg_path, rel_path=src_path)
- return resources
-
- def progen_get_project_data(self):
- """ Get ProGen project data """
- # provide default data, some tools don't require any additional
- # tool specific settings
- sources = []
- for r_type in ['c_sources', 'cpp_sources', 's_sources']:
- for file in getattr(self.resources, r_type):
- sources.append(file)
-
- project_data = {
- 'common': {
- 'sources': {
- 'Source Files': sources + self.resources.hex_files +
- self.resources.objects + self.resources.libraries,
- },
- 'includes': {
- 'Include Files': self.resources.headers,
- },
- 'target': [TARGET_MAP[self.target].progen['target']],
- 'macros': self.get_symbols(),
- 'export_dir': [self.inputDir],
- 'linker_file': [self.resources.linker_script],
- }
- }
- return project_data
-
- def progen_gen_file(self, tool_name, project_data):
- """ Generate project using ProGen Project API """
- settings = ProjectSettings()
- project = Project(self.program_name, [project_data], settings)
- # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen
- # thinks it is not dict but a file, and adds them to workspace.
- project.project['common']['include_paths'] = self.resources.inc_dirs
- project.generate(tool_name, copied=True)
-
- def __scan_all(self, path):
- resources = []
-
- for root, dirs, files in walk(path):
- for d in copy(dirs):
- if d == '.' or d == '..':
- dirs.remove(d)
-
- for file in files:
- file_path = join(root, file)
- resources.append(file_path)
-
- return resources
-
- def scan_and_copy_resources(self, prj_path, trg_path):
- # Copy only the file for the required target and toolchain
- lib_builds = []
- for src in ['lib', 'src']:
- resources = self.__scan_and_copy(join(prj_path, src), trg_path)
- lib_builds.extend(resources.lib_builds)
-
- # The repository files
- for repo_dir in resources.repo_dirs:
- repo_files = self.__scan_all(repo_dir)
- self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src))
-
- # The libraries builds
- for bld in lib_builds:
- build_url = open(bld).read().strip()
- lib_data = self.build_url_resolver(build_url)
- lib_path = lib_data['path'].rstrip('\\/')
- self.__scan_and_copy(lib_path, join(trg_path, lib_data['name']))
-
- # Create .hg dir in mbed build dir so it's ignored when versioning
- hgdir = join(trg_path, lib_data['name'], '.hg')
- mkdir(hgdir)
- fhandle = file(join(hgdir, 'keep.me'), 'a')
- fhandle.close()
-
- # Final scan of the actual exported resources
- self.resources = self.toolchain.scan_resources(trg_path)
- self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH)
- # Check the existence of a binary build of the mbed library for the desired target
- # This prevents exporting the mbed libraries from source
- # if not self.toolchain.mbed_libs:
- # raise OldLibrariesException()
-
- def gen_file(self, template_file, data, target_file):
- template_path = join(Exporter.TEMPLATE_DIR, template_file)
- template = self.jinja_environment.get_template(template_file)
- target_text = template.render(data)
-
- target_path = join(self.inputDir, target_file)
- logging.debug("Generating: %s" % target_path)
- open(target_path, "w").write(target_text)
-
- def get_symbols(self, add_extra_symbols=True):
- """ This function returns symbols which must be exported.
- Please add / overwrite symbols in each exporter separately
- """
- symbols = self.toolchain.get_symbols()
- # We have extra symbols from e.g. libraries, we want to have them also added to export
- if add_extra_symbols:
- if self.extra_symbols is not None:
- symbols.extend(self.extra_symbols)
- return symbols
-
-def zip_working_directory_and_clean_up(tempdirectory=None, destination=None, program_name=None, clean=True):
- uid = str(uuid.uuid4())
- zipfilename = '%s.zip'%uid
-
- logging.debug("Zipping up %s to %s" % (tempdirectory, join(destination, zipfilename)))
- # make zip
- def zipdir(basedir, archivename):
- assert isdir(basedir)
- fakeroot = program_name + '/'
- with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
- for root, _, files in os.walk(basedir):
- # NOTE: ignore empty directories
- for fn in files:
- absfn = join(root, fn)
- zfn = fakeroot + '/' + absfn[len(basedir)+len(os.sep):]
- z.write(absfn, zfn)
-
- zipdir(tempdirectory, join(destination, zipfilename))
-
- if clean:
- shutil.rmtree(tempdirectory)
-
- return join(destination, zipfilename)
diff --git a/workspace_tools/export/gcc_arm_arch_ble.tmpl b/workspace_tools/export/gcc_arm_arch_ble.tmpl
deleted file mode 100644
index 2f4c03f..0000000
--- a/workspace_tools/export/gcc_arm_arch_ble.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_ARCH_BLE/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_arch_max.tmpl b/workspace_tools/export/gcc_arm_arch_max.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_arch_max.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_arch_pro.tmpl b/workspace_tools/export/gcc_arm_arch_pro.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_arch_pro.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_b96b_f446ve.tmpl b/workspace_tools/export/gcc_arm_b96b_f446ve.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_b96b_f446ve.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_common.tmpl b/workspace_tools/export/gcc_arm_common.tmpl
deleted file mode 100644
index d641b0a..0000000
--- a/workspace_tools/export/gcc_arm_common.tmpl
+++ /dev/null
@@ -1,105 +0,0 @@
-# This file was automagically generated by mbed.org. For more information,
-# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
-
-GCC_BIN =
-PROJECT = {{name}}
-OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
-SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
-INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
-LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
-LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
-LINKER_SCRIPT = {{linker_script}}
-{%- block additional_variables -%}{% endblock %}
-
-###############################################################################
-AS = $(GCC_BIN)arm-none-eabi-as
-CC = $(GCC_BIN)arm-none-eabi-gcc
-CPP = $(GCC_BIN)arm-none-eabi-g++
-LD = $(GCC_BIN)arm-none-eabi-gcc
-OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
-OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
-SIZE = $(GCC_BIN)arm-none-eabi-size
-{%- block additional_executables -%}{% endblock %}
-
-{%- block flags -%}
-
-{% block hardfp %}
-{% if "-mfloat-abi=softfp" in cpu_flags %}
-ifeq ($(HARDFP),1)
- FLOAT_ABI = hard
-else
- FLOAT_ABI = softfp
-endif
-{% endif %}
-{%- endblock %}
-
-CPU = {% block cpu %}{% for cf in cpu_flags %}{{cf|replace("-mfloat-abi=softfp","-mfloat-abi=$(FLOAT_ABI)")}} {% endfor %}{% endblock %}
-CC_FLAGS = {% block cc_flags %}$(CPU) -c -g -fno-common -fmessage-length=0 -Wall -Wextra -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP{% endblock %}
-CC_SYMBOLS = {% block cc_symbols %}{% for s in symbols %}-D{{s}} {% endfor %}{% endblock %}
-
-LD_FLAGS = {%- block ld_flags -%}
-{%- if "-mcpu=cortex-m0" in cpu_flags or "-mcpu=cortex-m0plus" in cpu_flags -%}
-{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -Wl,--wrap,main -Wl,-Map=$(PROJECT).map,--cref
-#LD_FLAGS += -u _printf_float -u _scanf_float
-{%- else -%}
-{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,-Map=$(PROJECT).map,--cref
-{%- endif -%}
-{% endblock %}
-LD_SYS_LIBS = {% block ld_sys_libs %}-lstdc++ -lsupc++ -lm -lc -lgcc -lnosys{% endblock %}
-{% endblock %}
-
-ifeq ($(DEBUG), 1)
- CC_FLAGS += -DDEBUG -O0
-else
- CC_FLAGS += -DNDEBUG -Os
-endif
-
-.PHONY: all clean lst size
-
-{% block target_all -%}
-all: $(PROJECT).bin $(PROJECT).hex size
-{% endblock %}
-
-{% block target_clean -%}
-clean:
- rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
-{% endblock %}
-
-.asm.o:
- $(CC) $(CPU) -c -x assembler-with-cpp -o $@ $<
-.s.o:
- $(CC) $(CPU) -c -x assembler-with-cpp -o $@ $<
-.S.o:
- $(CC) $(CPU) -c -x assembler-with-cpp -o $@ $<
-
-.c.o:
- $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
-
-.cpp.o:
- $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
-
-
-{% block target_project_elf %}
-$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
- $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ -Wl,--start-group $(LIBRARIES) $(LD_SYS_LIBS) -Wl,--end-group
-{% endblock %}
-
-$(PROJECT).bin: $(PROJECT).elf
- $(OBJCOPY) -O binary $< $@
-
-$(PROJECT).hex: $(PROJECT).elf
- @$(OBJCOPY) -O ihex $< $@
-
-$(PROJECT).lst: $(PROJECT).elf
- @$(OBJDUMP) -Sdh $< > $@
-
-lst: $(PROJECT).lst
-
-size: $(PROJECT).elf
- $(SIZE) $(PROJECT).elf
-
-DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
--include $(DEPS)
-
-{% block additional_targets %}{% endblock %}
-
diff --git a/workspace_tools/export/gcc_arm_delta_dfcm_nnn40.tmpl b/workspace_tools/export/gcc_arm_delta_dfcm_nnn40.tmpl
deleted file mode 100644
index c35964e..0000000
--- a/workspace_tools/export/gcc_arm_delta_dfcm_nnn40.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_0_0/s110_nrf51822_7.0.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_disco_f051r8.tmpl b/workspace_tools/export/gcc_arm_disco_f051r8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f051r8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f100rb.tmpl b/workspace_tools/export/gcc_arm_disco_f100rb.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f100rb.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f303vc.tmpl b/workspace_tools/export/gcc_arm_disco_f303vc.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f303vc.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f334c8.tmpl b/workspace_tools/export/gcc_arm_disco_f334c8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f334c8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f401vc.tmpl b/workspace_tools/export/gcc_arm_disco_f401vc.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f401vc.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f407vg.tmpl b/workspace_tools/export/gcc_arm_disco_f407vg.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f407vg.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f429zi.tmpl b/workspace_tools/export/gcc_arm_disco_f429zi.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f429zi.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f469ni.tmpl b/workspace_tools/export/gcc_arm_disco_f469ni.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f469ni.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_f746ng.tmpl b/workspace_tools/export/gcc_arm_disco_f746ng.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_f746ng.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_l053c8.tmpl b/workspace_tools/export/gcc_arm_disco_l053c8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_l053c8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_disco_l476vg.tmpl b/workspace_tools/export/gcc_arm_disco_l476vg.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_disco_l476vg.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_efm32_common.tmpl b/workspace_tools/export/gcc_arm_efm32_common.tmpl
deleted file mode 100644
index c73a8c3..0000000
--- a/workspace_tools/export/gcc_arm_efm32_common.tmpl
+++ /dev/null
@@ -1,115 +0,0 @@
-# This file was automagically generated by mbed.org. For more information,
-# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
-
-GCC_BIN =
-PROJECT = {{name}}
-OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
-SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
-INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
-LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
-LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
-LINKER_SCRIPT = {{linker_script}}
-
-OUT_DIR = bin
-OBJ_FOLDER = $(strip $(OUT_DIR))/
-
-{%- block additional_variables -%}{% endblock %}
-
-###############################################################################
-AS = $(GCC_BIN)arm-none-eabi-as
-CC = $(GCC_BIN)arm-none-eabi-gcc
-CPP = $(GCC_BIN)arm-none-eabi-g++
-LD = $(GCC_BIN)arm-none-eabi-gcc
-OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
-OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
-SIZE = $(GCC_BIN)arm-none-eabi-size
-{%- block additional_executables -%}{% endblock %}
-
-{%- block flags -%}
-
-{% block hardfp %}
-{% if "-mfloat-abi=softfp" in cpu_flags %}
-ifeq ($(HARDFP),1)
- FLOAT_ABI = hard
-else
- FLOAT_ABI = softfp
-endif
-{% endif %}
-{%- endblock %}
-
-CPU = {% block cpu %}{% for cf in cpu_flags %}{{cf|replace("-mfloat-abi=softfp","-mfloat-abi=$(FLOAT_ABI)")}} {% endfor %}{% endblock %}
-CC_FLAGS = {% block cc_flags %}$(CPU) -c -g -fno-common -fmessage-length=0 -Wall -Wextra -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer -MMD -MP{% endblock %}
-CC_SYMBOLS = {% block cc_symbols %}{% for s in symbols %}-D{{s}} {% endfor %}{% endblock %}
-
-ifeq ($(DEBUG), 1)
- CC_FLAGS += -DDEBUG -O0
-else
- CC_FLAGS += -DNDEBUG -Os
-endif
-
-LD_FLAGS = {%- block ld_flags -%}
-{%- if "-mcpu=cortex-m0" in cpu_flags or "-mcpu=cortex-m0plus" in cpu_flags -%}
-{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -Wl,--wrap,main -Wl,-Map=$(OBJ_FOLDER)$(PROJECT).map,--cref
-#LD_FLAGS += -u _printf_float -u _scanf_float
-{%- else -%}
-{{ ' ' }}$(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main -Wl,-Map=$(OBJ_FOLDER)$(PROJECT).map,--cref
-{%- endif -%}
-{% endblock %}
-LD_SYS_LIBS = {% block ld_sys_libs %}-lstdc++ -lsupc++ -lm -lc -lgcc -lnosys{% endblock %}
-{% endblock %}
-
-.PHONY: all clean lst size
-
-{% block target_all -%}
-all: create_outputdir $(OBJ_FOLDER)$(PROJECT).bin $(OBJ_FOLDER)$(PROJECT).hex size
-{% endblock %}
-
-{% block target_create_outputdir -%}
-create_outputdir:
- $(shell mkdir $(OBJ_FOLDER) 2>/dev/null)
-{% endblock %}
-
-{% block target_clean -%}
-clean:
- rm -f $(OBJ_FOLDER)$(PROJECT).bin $(OBJ_FOLDER)$(PROJECT).axf $(OBJ_FOLDER)$(PROJECT).hex $(OBJ_FOLDER)$(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
-{% endblock %}
-
-.s.o:
- $(AS) $(CPU) -o $@ $<
-
-.c.o:
- $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
-
-.cpp.o:
- $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
-
-
-{% block target_project_axf %}
-$(OBJ_FOLDER)$(PROJECT).axf: $(OBJECTS) $(SYS_OBJECTS)
- $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
-
-$(OBJ_FOLDER)$(PROJECT).bin: $(OBJ_FOLDER)$(PROJECT).axf
- @$(OBJCOPY) -O binary $< $@
-
-$(OBJ_FOLDER)$(PROJECT).hex: $(OBJ_FOLDER)$(PROJECT).axf
- @$(OBJCOPY) -O ihex $< $@
-
-$(OBJ_FOLDER)$(PROJECT).lst: $(OBJ_FOLDER)$(PROJECT).axf
- @$(OBJDUMP) -Sdh $< > $@
-
-lst: $(OBJ_FOLDER)$(PROJECT).lst
-
-size: $(OBJ_FOLDER)$(PROJECT).axf
- $(SIZE) $(OBJ_FOLDER)$(PROJECT).axf
-
-DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
--include $(DEPS)
-
-{% block additional_targets %}{% endblock %}
-
diff --git a/workspace_tools/export/gcc_arm_efm32gg_stk3700.tmpl b/workspace_tools/export/gcc_arm_efm32gg_stk3700.tmpl
deleted file mode 100644
index 7187abc..0000000
--- a/workspace_tools/export/gcc_arm_efm32gg_stk3700.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_efm32hg_stk3400.tmpl b/workspace_tools/export/gcc_arm_efm32hg_stk3400.tmpl
deleted file mode 100644
index 7187abc..0000000
--- a/workspace_tools/export/gcc_arm_efm32hg_stk3400.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_efm32lg_stk3600.tmpl b/workspace_tools/export/gcc_arm_efm32lg_stk3600.tmpl
deleted file mode 100644
index 7187abc..0000000
--- a/workspace_tools/export/gcc_arm_efm32lg_stk3600.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_efm32pg_stk3401.tmpl b/workspace_tools/export/gcc_arm_efm32pg_stk3401.tmpl
deleted file mode 100644
index 7187abc..0000000
--- a/workspace_tools/export/gcc_arm_efm32pg_stk3401.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_efm32wg_stk3800.tmpl b/workspace_tools/export/gcc_arm_efm32wg_stk3800.tmpl
deleted file mode 100644
index 7187abc..0000000
--- a/workspace_tools/export/gcc_arm_efm32wg_stk3800.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_efm32zg_stk3200.tmpl b/workspace_tools/export/gcc_arm_efm32zg_stk3200.tmpl
deleted file mode 100644
index 7187abc..0000000
--- a/workspace_tools/export/gcc_arm_efm32zg_stk3200.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_efm32_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_hrm1017.tmpl b/workspace_tools/export/gcc_arm_hrm1017.tmpl
deleted file mode 100644
index 0c6a037..0000000
--- a/workspace_tools/export/gcc_arm_hrm1017.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_HRM1017/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_k20d50m.tmpl b/workspace_tools/export/gcc_arm_k20d50m.tmpl
deleted file mode 100644
index 47ed5cf..0000000
--- a/workspace_tools/export/gcc_arm_k20d50m.tmpl
+++ /dev/null
@@ -1,4 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block hardfp %}{% endblock %}
-{% block cpu %}-mcpu=cortex-m4 -mthumb{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_k22f.tmpl b/workspace_tools/export/gcc_arm_k22f.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_k22f.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_k64f.tmpl b/workspace_tools/export/gcc_arm_k64f.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_k64f.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_kl05z.tmpl b/workspace_tools/export/gcc_arm_kl05z.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_kl05z.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_kl25z.tmpl b/workspace_tools/export/gcc_arm_kl25z.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_kl25z.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_kl43z.tmpl b/workspace_tools/export/gcc_arm_kl43z.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_kl43z.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_kl46z.tmpl b/workspace_tools/export/gcc_arm_kl46z.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_kl46z.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_lpc1114.tmpl b/workspace_tools/export/gcc_arm_lpc1114.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_lpc1114.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_lpc11u24.tmpl b/workspace_tools/export/gcc_arm_lpc11u24.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_lpc11u24.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_lpc11u35_401.tmpl b/workspace_tools/export/gcc_arm_lpc11u35_401.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_lpc11u35_401.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_lpc11u35_501.tmpl b/workspace_tools/export/gcc_arm_lpc11u35_501.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_lpc11u35_501.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_lpc11u37h_401.tmpl b/workspace_tools/export/gcc_arm_lpc11u37h_401.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc11u37h_401.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpc1549.tmpl b/workspace_tools/export/gcc_arm_lpc1549.tmpl
deleted file mode 100644
index 047a815..0000000
--- a/workspace_tools/export/gcc_arm_lpc1549.tmpl
+++ /dev/null
@@ -1,11 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
-
diff --git a/workspace_tools/export/gcc_arm_lpc1768.tmpl b/workspace_tools/export/gcc_arm_lpc1768.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc1768.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpc2368.tmpl b/workspace_tools/export/gcc_arm_lpc2368.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_lpc2368.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_lpc2460.tmpl b/workspace_tools/export/gcc_arm_lpc2460.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_lpc2460.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_lpc4088.tmpl b/workspace_tools/export/gcc_arm_lpc4088.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc4088.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpc4088_dm.tmpl b/workspace_tools/export/gcc_arm_lpc4088_dm.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_lpc4088_dm.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_lpc4330_m4.tmpl b/workspace_tools/export/gcc_arm_lpc4330_m4.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc4330_m4.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpc810.tmpl b/workspace_tools/export/gcc_arm_lpc810.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc810.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpc812.tmpl b/workspace_tools/export/gcc_arm_lpc812.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc812.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpc824.tmpl b/workspace_tools/export/gcc_arm_lpc824.tmpl
deleted file mode 100644
index 58de54b..0000000
--- a/workspace_tools/export/gcc_arm_lpc824.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
\ No newline at end of file
diff --git a/workspace_tools/export/gcc_arm_lpccappuccino.tmpl b/workspace_tools/export/gcc_arm_lpccappuccino.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_lpccappuccino.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_max32600mbed.tmpl b/workspace_tools/export/gcc_arm_max32600mbed.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_max32600mbed.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_maxwsnenv.tmpl b/workspace_tools/export/gcc_arm_maxwsnenv.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_maxwsnenv.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_mote_l152rc.tmpl b/workspace_tools/export/gcc_arm_mote_l152rc.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_mote_l152rc.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_mts_gambit.tmpl b/workspace_tools/export/gcc_arm_mts_gambit.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_mts_gambit.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_mts_mdot_f405rg.tmpl b/workspace_tools/export/gcc_arm_mts_mdot_f405rg.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_mts_mdot_f405rg.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_mts_mdot_f411re.tmpl b/workspace_tools/export/gcc_arm_mts_mdot_f411re.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_mts_mdot_f411re.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nrf51822.tmpl b/workspace_tools/export/gcc_arm_nrf51822.tmpl
deleted file mode 100644
index d7120ca..0000000
--- a/workspace_tools/export/gcc_arm_nrf51822.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s130_nrf51822_1_0_0/s130_nrf51_1.0.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_nrf51_dk.tmpl b/workspace_tools/export/gcc_arm_nrf51_dk.tmpl
deleted file mode 100644
index 2afebf2..0000000
--- a/workspace_tools/export/gcc_arm_nrf51_dk.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_NRF51_DK/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_nrf51_dongle.tmpl b/workspace_tools/export/gcc_arm_nrf51_dongle.tmpl
deleted file mode 100644
index c35964e..0000000
--- a/workspace_tools/export/gcc_arm_nrf51_dongle.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_0_0/s110_nrf51822_7.0.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_nrf51_microbit.tmpl b/workspace_tools/export/gcc_arm_nrf51_microbit.tmpl
deleted file mode 100644
index 8071c9b..0000000
--- a/workspace_tools/export/gcc_arm_nrf51_microbit.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_NRF51_MICROBIT/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_8_0_0/s110_nrf51822_8.0.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f030r8.tmpl b/workspace_tools/export/gcc_arm_nucleo_f030r8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f030r8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f031k6.tmpl b/workspace_tools/export/gcc_arm_nucleo_f031k6.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f031k6.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f042k6.tmpl b/workspace_tools/export/gcc_arm_nucleo_f042k6.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f042k6.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f070rb.tmpl b/workspace_tools/export/gcc_arm_nucleo_f070rb.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f070rb.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f072rb.tmpl b/workspace_tools/export/gcc_arm_nucleo_f072rb.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f072rb.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f091rc.tmpl b/workspace_tools/export/gcc_arm_nucleo_f091rc.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f091rc.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f103rb.tmpl b/workspace_tools/export/gcc_arm_nucleo_f103rb.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f103rb.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f302r8.tmpl b/workspace_tools/export/gcc_arm_nucleo_f302r8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f302r8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f303k8.tmpl b/workspace_tools/export/gcc_arm_nucleo_f303k8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f303k8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f303re.tmpl b/workspace_tools/export/gcc_arm_nucleo_f303re.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f303re.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f334r8.tmpl b/workspace_tools/export/gcc_arm_nucleo_f334r8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f334r8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f401re.tmpl b/workspace_tools/export/gcc_arm_nucleo_f401re.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f401re.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f410rb.tmpl b/workspace_tools/export/gcc_arm_nucleo_f410rb.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f410rb.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f411re.tmpl b/workspace_tools/export/gcc_arm_nucleo_f411re.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f411re.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f446re.tmpl b/workspace_tools/export/gcc_arm_nucleo_f446re.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f446re.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_f746zg.tmpl b/workspace_tools/export/gcc_arm_nucleo_f746zg.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_f746zg.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_l031k6.tmpl b/workspace_tools/export/gcc_arm_nucleo_l031k6.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_l031k6.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_l053r8.tmpl b/workspace_tools/export/gcc_arm_nucleo_l053r8.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_l053r8.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_l073rz.tmpl b/workspace_tools/export/gcc_arm_nucleo_l073rz.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_l073rz.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_l152re.tmpl b/workspace_tools/export/gcc_arm_nucleo_l152re.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_l152re.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nucleo_l476rg.tmpl b/workspace_tools/export/gcc_arm_nucleo_l476rg.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nucleo_l476rg.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_nz32_sc151.tmpl b/workspace_tools/export/gcc_arm_nz32_sc151.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_nz32_sc151.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_rblab_blenano.tmpl b/workspace_tools/export/gcc_arm_rblab_blenano.tmpl
deleted file mode 100644
index c65eb03..0000000
--- a/workspace_tools/export/gcc_arm_rblab_blenano.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_RBLAB_BLENANO/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s130_nrf51822_1_0_0/s130_nrf51_1.0.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_rblab_nrf51822.tmpl b/workspace_tools/export/gcc_arm_rblab_nrf51822.tmpl
deleted file mode 100644
index 6a615fd..0000000
--- a/workspace_tools/export/gcc_arm_rblab_nrf51822.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_RBLAB_NRF51822/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_rz_a1h.tmpl b/workspace_tools/export/gcc_arm_rz_a1h.tmpl
deleted file mode 100644
index 055d0e5..0000000
--- a/workspace_tools/export/gcc_arm_rz_a1h.tmpl
+++ /dev/null
@@ -1,16 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block cc_flags -%}
-$(CPU) -c -g -fno-common -fmessage-length=0 -Wall -Wextra -Wno-unused-parameter -Wno-missing-field-initializers
-CC_FLAGS += -fno-exceptions -fno-builtin -ffunction-sections -fdata-sections -fno-delete-null-pointer-checks -fomit-frame-pointer
-CC_FLAGS += -MMD -MP
-{% endblock %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_samd21g18a.tmpl b/workspace_tools/export/gcc_arm_samd21g18a.tmpl
deleted file mode 100644
index b71f60c..0000000
--- a/workspace_tools/export/gcc_arm_samd21g18a.tmpl
+++ /dev/null
@@ -1,72 +0,0 @@
-# This file was automagically generated by mbed.org. For more information,
-# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
-
-GCC_BIN =
-PROJECT = {{name}}
-OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
-SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
-INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
-LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
-LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
-LINKER_SCRIPT = {{linker_script}}
-
-###############################################################################
-AS = $(GCC_BIN)arm-none-eabi-as
-CC = $(GCC_BIN)arm-none-eabi-gcc
-CPP = $(GCC_BIN)arm-none-eabi-g++
-LD = $(GCC_BIN)arm-none-eabi-g++
-OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
-OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
-SIZE = $(GCC_BIN)arm-none-eabi-size
-
-CPU = -mcpu=cortex-m0plus -mthumb
-CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
-CC_FLAGS += -MMD -MP
-CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
-
-LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
-LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
-LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
-
-ifeq ($(DEBUG), 1)
- CC_FLAGS += -DDEBUG -O0
-else
- CC_FLAGS += -DNDEBUG -Os
-endif
-
-all: $(PROJECT).bin $(PROJECT).hex
-
-clean:
- rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
-
-.s.o:
- $(AS) $(CPU) -o $@ $<
-
-.c.o:
- $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
-
-.cpp.o:
- $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
-
-
-$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
- $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
- $(SIZE) $@
-
-$(PROJECT).bin: $(PROJECT).elf
- @$(OBJCOPY) -O binary $< $@
-
-$(PROJECT).hex: $(PROJECT).elf
- @$(OBJCOPY) -O ihex $< $@
-
-$(PROJECT).lst: $(PROJECT).elf
- @$(OBJDUMP) -Sdh $< > $@
-
-lst: $(PROJECT).lst
-
-size:
- $(SIZE) $(PROJECT).elf
-
-DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
--include $(DEPS)
-
diff --git a/workspace_tools/export/gcc_arm_samd21j18a.tmpl b/workspace_tools/export/gcc_arm_samd21j18a.tmpl
deleted file mode 100644
index b71f60c..0000000
--- a/workspace_tools/export/gcc_arm_samd21j18a.tmpl
+++ /dev/null
@@ -1,72 +0,0 @@
-# This file was automagically generated by mbed.org. For more information,
-# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
-
-GCC_BIN =
-PROJECT = {{name}}
-OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
-SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
-INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
-LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
-LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
-LINKER_SCRIPT = {{linker_script}}
-
-###############################################################################
-AS = $(GCC_BIN)arm-none-eabi-as
-CC = $(GCC_BIN)arm-none-eabi-gcc
-CPP = $(GCC_BIN)arm-none-eabi-g++
-LD = $(GCC_BIN)arm-none-eabi-g++
-OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
-OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
-SIZE = $(GCC_BIN)arm-none-eabi-size
-
-CPU = -mcpu=cortex-m0plus -mthumb
-CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
-CC_FLAGS += -MMD -MP
-CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
-
-LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
-LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
-LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
-
-ifeq ($(DEBUG), 1)
- CC_FLAGS += -DDEBUG -O0
-else
- CC_FLAGS += -DNDEBUG -Os
-endif
-
-all: $(PROJECT).bin $(PROJECT).hex
-
-clean:
- rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
-
-.s.o:
- $(AS) $(CPU) -o $@ $<
-
-.c.o:
- $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
-
-.cpp.o:
- $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
-
-
-$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
- $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
- $(SIZE) $@
-
-$(PROJECT).bin: $(PROJECT).elf
- @$(OBJCOPY) -O binary $< $@
-
-$(PROJECT).hex: $(PROJECT).elf
- @$(OBJCOPY) -O ihex $< $@
-
-$(PROJECT).lst: $(PROJECT).elf
- @$(OBJDUMP) -Sdh $< > $@
-
-lst: $(PROJECT).lst
-
-size:
- $(SIZE) $(PROJECT).elf
-
-DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
--include $(DEPS)
-
diff --git a/workspace_tools/export/gcc_arm_samg55j19.tmpl b/workspace_tools/export/gcc_arm_samg55j19.tmpl
deleted file mode 100644
index 47ed5cf..0000000
--- a/workspace_tools/export/gcc_arm_samg55j19.tmpl
+++ /dev/null
@@ -1,4 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block hardfp %}{% endblock %}
-{% block cpu %}-mcpu=cortex-m4 -mthumb{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_saml21j18a.tmpl b/workspace_tools/export/gcc_arm_saml21j18a.tmpl
deleted file mode 100644
index b71f60c..0000000
--- a/workspace_tools/export/gcc_arm_saml21j18a.tmpl
+++ /dev/null
@@ -1,72 +0,0 @@
-# This file was automagically generated by mbed.org. For more information,
-# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
-
-GCC_BIN =
-PROJECT = {{name}}
-OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
-SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
-INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
-LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
-LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
-LINKER_SCRIPT = {{linker_script}}
-
-###############################################################################
-AS = $(GCC_BIN)arm-none-eabi-as
-CC = $(GCC_BIN)arm-none-eabi-gcc
-CPP = $(GCC_BIN)arm-none-eabi-g++
-LD = $(GCC_BIN)arm-none-eabi-g++
-OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
-OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
-SIZE = $(GCC_BIN)arm-none-eabi-size
-
-CPU = -mcpu=cortex-m0plus -mthumb
-CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
-CC_FLAGS += -MMD -MP
-CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
-
-LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
-LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
-LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
-
-ifeq ($(DEBUG), 1)
- CC_FLAGS += -DDEBUG -O0
-else
- CC_FLAGS += -DNDEBUG -Os
-endif
-
-all: $(PROJECT).bin $(PROJECT).hex
-
-clean:
- rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
-
-.s.o:
- $(AS) $(CPU) -o $@ $<
-
-.c.o:
- $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
-
-.cpp.o:
- $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
-
-
-$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
- $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
- $(SIZE) $@
-
-$(PROJECT).bin: $(PROJECT).elf
- @$(OBJCOPY) -O binary $< $@
-
-$(PROJECT).hex: $(PROJECT).elf
- @$(OBJCOPY) -O ihex $< $@
-
-$(PROJECT).lst: $(PROJECT).elf
- @$(OBJDUMP) -Sdh $< > $@
-
-lst: $(PROJECT).lst
-
-size:
- $(SIZE) $(PROJECT).elf
-
-DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
--include $(DEPS)
-
diff --git a/workspace_tools/export/gcc_arm_samr21g18a.tmpl b/workspace_tools/export/gcc_arm_samr21g18a.tmpl
deleted file mode 100644
index b71f60c..0000000
--- a/workspace_tools/export/gcc_arm_samr21g18a.tmpl
+++ /dev/null
@@ -1,72 +0,0 @@
-# This file was automagically generated by mbed.org. For more information,
-# see http://mbed.org/handbook/Exporting-to-GCC-ARM-Embedded
-
-GCC_BIN =
-PROJECT = {{name}}
-OBJECTS = {% for f in to_be_compiled %}{{f}} {% endfor %}
-SYS_OBJECTS = {% for f in object_files %}{{f}} {% endfor %}
-INCLUDE_PATHS = {% for p in include_paths %}-I{{p}} {% endfor %}
-LIBRARY_PATHS = {% for p in library_paths %}-L{{p}} {% endfor %}
-LIBRARIES = {% for lib in libraries %}-l{{lib}} {% endfor %}
-LINKER_SCRIPT = {{linker_script}}
-
-###############################################################################
-AS = $(GCC_BIN)arm-none-eabi-as
-CC = $(GCC_BIN)arm-none-eabi-gcc
-CPP = $(GCC_BIN)arm-none-eabi-g++
-LD = $(GCC_BIN)arm-none-eabi-g++
-OBJCOPY = $(GCC_BIN)arm-none-eabi-objcopy
-OBJDUMP = $(GCC_BIN)arm-none-eabi-objdump
-SIZE = $(GCC_BIN)arm-none-eabi-size
-
-CPU = -mcpu=cortex-m0plus -mthumb
-CC_FLAGS = $(CPU) -c -g -fno-common -fmessage-length=0 -Wall -fno-exceptions -ffunction-sections -fdata-sections -fomit-frame-pointer
-CC_FLAGS += -MMD -MP
-CC_SYMBOLS = {% for s in symbols %}-D{{s}} {% endfor %}
-
-LD_FLAGS = $(CPU) -Wl,--gc-sections --specs=nano.specs -u _printf_float -u _scanf_float -Wl,--wrap,main
-LD_FLAGS += -Wl,-Map=$(PROJECT).map,--cref
-LD_SYS_LIBS = -lstdc++ -lsupc++ -lm -lgcc -Wl,--start-group -lc -lc -lnosys -Wl,--end-group
-
-ifeq ($(DEBUG), 1)
- CC_FLAGS += -DDEBUG -O0
-else
- CC_FLAGS += -DNDEBUG -Os
-endif
-
-all: $(PROJECT).bin $(PROJECT).hex
-
-clean:
- rm -f $(PROJECT).bin $(PROJECT).elf $(PROJECT).hex $(PROJECT).map $(PROJECT).lst $(OBJECTS) $(DEPS)
-
-.s.o:
- $(AS) $(CPU) -o $@ $<
-
-.c.o:
- $(CC) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu99 $(INCLUDE_PATHS) -o $@ $<
-
-.cpp.o:
- $(CPP) $(CC_FLAGS) $(CC_SYMBOLS) -std=gnu++98 -fno-rtti $(INCLUDE_PATHS) -o $@ $<
-
-
-$(PROJECT).elf: $(OBJECTS) $(SYS_OBJECTS)
- $(LD) $(LD_FLAGS) -T$(LINKER_SCRIPT) $(LIBRARY_PATHS) -o $@ $^ $(LIBRARIES) $(LD_SYS_LIBS) $(LIBRARIES) $(LD_SYS_LIBS)
- $(SIZE) $@
-
-$(PROJECT).bin: $(PROJECT).elf
- @$(OBJCOPY) -O binary $< $@
-
-$(PROJECT).hex: $(PROJECT).elf
- @$(OBJCOPY) -O ihex $< $@
-
-$(PROJECT).lst: $(PROJECT).elf
- @$(OBJDUMP) -Sdh $< > $@
-
-lst: $(PROJECT).lst
-
-size:
- $(SIZE) $(PROJECT).elf
-
-DEPS = $(OBJECTS:.o=.d) $(SYS_OBJECTS:.o=.d)
--include $(DEPS)
-
diff --git a/workspace_tools/export/gcc_arm_seeed_tiny_ble.tmpl b/workspace_tools/export/gcc_arm_seeed_tiny_ble.tmpl
deleted file mode 100644
index 2f4c03f..0000000
--- a/workspace_tools/export/gcc_arm_seeed_tiny_ble.tmpl
+++ /dev/null
@@ -1,14 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block additional_variables %}
-SOFTDEVICE = mbed/TARGET_ARCH_BLE/TARGET_NORDIC/TARGET_MCU_NRF51822/Lib/s110_nrf51822_7_1_0/s110_nrf51822_7.1.0_softdevice.hex
-{% endblock %}
-
-{% block additional_executables %}
-SREC_CAT = srec_cat
-{% endblock %}
-
-{% block additional_targets %}
-merge:
- $(SREC_CAT) $(SOFTDEVICE) -intel $(PROJECT).hex -intel -o combined.hex -intel --line-length=44
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_ssci824.tmpl b/workspace_tools/export/gcc_arm_ssci824.tmpl
deleted file mode 100644
index dbbc6da..0000000
--- a/workspace_tools/export/gcc_arm_ssci824.tmpl
+++ /dev/null
@@ -1,10 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block target_project_elf %}
-{{ super() }}
- @echo ""
- @echo "*****"
- @echo "***** You must modify vector checksum value in *.bin and *.hex files."
- @echo "*****"
- @echo ""
-{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_stm32f407.tmpl b/workspace_tools/export/gcc_arm_stm32f407.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_stm32f407.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gcc_arm_teensy3_1.tmpl b/workspace_tools/export/gcc_arm_teensy3_1.tmpl
deleted file mode 100644
index 47ed5cf..0000000
--- a/workspace_tools/export/gcc_arm_teensy3_1.tmpl
+++ /dev/null
@@ -1,4 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
-
-{% block hardfp %}{% endblock %}
-{% block cpu %}-mcpu=cortex-m4 -mthumb{% endblock %}
diff --git a/workspace_tools/export/gcc_arm_ublox_c027.tmpl b/workspace_tools/export/gcc_arm_ublox_c027.tmpl
deleted file mode 100644
index 6e616cc..0000000
--- a/workspace_tools/export/gcc_arm_ublox_c027.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-{% extends "gcc_arm_common.tmpl" %}
diff --git a/workspace_tools/export/gccarm.py b/workspace_tools/export/gccarm.py
deleted file mode 100755
index 18e67f5..0000000
--- a/workspace_tools/export/gccarm.py
+++ /dev/null
@@ -1,150 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename
-
-
-class GccArm(Exporter):
- NAME = 'GccArm'
- TOOLCHAIN = 'GCC_ARM'
-
- TARGETS = [
- 'LPC1768',
- 'LPC1549',
- 'KL05Z',
- 'KL25Z',
- 'KL43Z',
- 'KL46Z',
- 'K64F',
- 'K22F',
- 'K20D50M',
- 'LPC4088',
- 'LPC4088_DM',
- 'LPC4330_M4',
- 'LPC11U24',
- 'LPC1114',
- 'LPC11U35_401',
- 'LPC11U35_501',
- 'LPC11U37H_401',
- 'LPC810',
- 'LPC812',
- 'LPC824',
- 'SSCI824',
- 'STM32F407',
- 'DISCO_F100RB',
- 'DISCO_F051R8',
- 'DISCO_F407VG',
- 'DISCO_F429ZI',
- 'DISCO_F469NI',
- 'DISCO_F303VC',
- 'DISCO_F746NG',
- 'DISCO_L476VG',
- 'UBLOX_C027',
- 'ARCH_PRO',
- 'NRF51822',
- 'HRM1017',
- 'RBLAB_NRF51822',
- 'RBLAB_BLENANO',
- 'LPC2368',
- 'LPC2460',
- 'LPCCAPPUCCINO',
- 'ARCH_BLE',
- 'MTS_GAMBIT',
- 'ARCH_MAX',
- 'NUCLEO_F401RE',
- 'NUCLEO_F410RB',
- 'NUCLEO_F411RE',
- 'NUCLEO_F446RE',
- 'B96B_F446VE',
- 'ARCH_MAX',
- 'NUCLEO_F030R8',
- 'NUCLEO_F031K6',
- 'NUCLEO_F042K6',
- 'NUCLEO_F070RB',
- 'NUCLEO_F072RB',
- 'NUCLEO_F091RC',
- 'NUCLEO_F103RB',
- 'NUCLEO_F302R8',
- 'NUCLEO_F303K8',
- 'NUCLEO_F303RE',
- 'NUCLEO_F334R8',
- 'NUCLEO_F746ZG',
- 'DISCO_L053C8',
- 'NUCLEO_L031K6',
- 'NUCLEO_L053R8',
- 'NUCLEO_L073RZ',
- 'NUCLEO_L476RG',
- 'DISCO_F334C8',
- 'MAX32600MBED',
- 'MAXWSNENV',
- 'MTS_MDOT_F405RG',
- 'MTS_MDOT_F411RE',
- 'NUCLEO_L152RE',
- 'NRF51_DK',
- 'NRF51_DONGLE',
- 'NRF51_MICROBIT',
- 'SEEED_TINY_BLE',
- 'DISCO_F401VC',
- 'DELTA_DFCM_NNN40',
- 'RZ_A1H',
- 'MOTE_L152RC',
- 'EFM32WG_STK3800',
- 'EFM32LG_STK3600',
- 'EFM32GG_STK3700',
- 'EFM32ZG_STK3200',
- 'EFM32HG_STK3400',
- 'EFM32PG_STK3401',
- 'NZ32_SC151',
- 'SAMR21G18A',
- 'TEENSY3_1',
- 'SAMD21J18A',
- 'SAMD21G18A',
- 'SAML21J18A',
- 'SAMG55J19',
- ]
-
- DOT_IN_RELATIVE_PATH = True
-
- def generate(self):
- # "make" wants Unix paths
- self.resources.win_to_unix()
-
- to_be_compiled = []
- for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
- r = getattr(self.resources, r_type)
- if r:
- for source in r:
- base, ext = splitext(source)
- to_be_compiled.append(base + '.o')
-
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- ctx = {
- 'name': self.program_name,
- 'to_be_compiled': to_be_compiled,
- 'object_files': self.resources.objects,
- 'include_paths': self.resources.inc_dirs,
- 'library_paths': self.resources.lib_dirs,
- 'linker_script': self.resources.linker_script,
- 'libraries': libraries,
- 'symbols': self.get_symbols(),
- 'cpu_flags': self.toolchain.cpu
- }
- self.gen_file('gcc_arm_%s.tmpl' % self.target.lower(), ctx, 'Makefile')
diff --git a/workspace_tools/export/iar.py b/workspace_tools/export/iar.py
deleted file mode 100644
index 3ba8e64..0000000
--- a/workspace_tools/export/iar.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2015 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import re
-import os
-from project_generator_definitions.definitions import ProGenDef
-
-from workspace_tools.export.exporters import Exporter
-from workspace_tools.targets import TARGET_MAP, TARGET_NAMES
-
-# If you wish to add a new target, add it to project_generator_definitions, and then
-# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
-class IAREmbeddedWorkbench(Exporter):
- """
- Exporter class for IAR Systems. This class uses project generator.
- """
- # These 2 are currently for exporters backward compatiblity
- NAME = 'IAR'
- TOOLCHAIN = 'IAR'
- # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
- PROGEN_ACTIVE = True
-
- # backward compatibility with our scripts
- TARGETS = []
- for target in TARGET_NAMES:
- try:
- if (ProGenDef('iar').is_supported(str(TARGET_MAP[target])) or
- ProGenDef('iar').is_supported(TARGET_MAP[target].progen['target'])):
- TARGETS.append(target)
- except AttributeError:
- # target is not supported yet
- continue
-
- def generate(self):
- """ Generates the project files """
- project_data = self.progen_get_project_data()
- tool_specific = {}
- # Expand tool specific settings by IAR specific settings which are required
- try:
- if TARGET_MAP[self.target].progen['iar']['template']:
- tool_specific['iar'] = TARGET_MAP[self.target].progen['iar']
- except KeyError:
- # use default template
- # by the mbed projects
- tool_specific['iar'] = {
- # We currently don't use misc, template sets those for us
- # 'misc': {
- # 'cxx_flags': ['--no_rtti', '--no_exceptions'],
- # 'c_flags': ['--diag_suppress=Pa050,Pa084,Pa093,Pa082'],
- # 'ld_flags': ['--skip_dynamic_initialization'],
- # },
- 'template': [os.path.join(os.path.dirname(__file__), 'iar_template.ewp.tmpl')],
- }
-
- project_data['tool_specific'] = {}
- project_data['tool_specific'].update(tool_specific)
- self.progen_gen_file('iar_arm', project_data)
-
-# Currently not used, we should reuse folder_name to create virtual folders
-class IarFolder():
- """
- This is a recursive folder object.
- To present the folder structure in the IDE as it is presented on the disk.
- This can be used for uvision as well if you replace the __str__ method.
- Example:
- files: ./main.cpp, ./apis/I2C.h, ./mbed/common/I2C.cpp
- in the project this would look like:
- main.cpp
- common/I2C.cpp
- input:
- folder_level : folder path to current folder
- folder_name : name of current folder
- source_files : list of source_files (all must be in same directory)
- """
- def __init__(self, folder_level, folder_name, source_files):
- self.folder_level = folder_level
- self.folder_name = folder_name
- self.source_files = source_files
- self.sub_folders = {}
-
- def __str__(self):
- """
- converts the folder structue to IAR project format.
- """
- group_start = ""
- group_end = ""
- if self.folder_name != "":
- group_start = "\n%s\n" %(self.folder_name)
- group_end = "\n"
-
- str_content = group_start
- #Add files in current folder
- if self.source_files:
- for src in self.source_files:
- str_content += "\n$PROJ_DIR$/%s\n\n" % src
- #Add sub folders
- if self.sub_folders:
- for folder_name in self.sub_folders.iterkeys():
- str_content += self.sub_folders[folder_name].__str__()
-
- str_content += group_end
- return str_content
-
- def insert_file(self, source_input):
- """
- Inserts a source file into the folder tree
- """
- if self.source_files:
- #All source_files in a IarFolder must be in same directory.
- dir_sources = IarFolder.get_directory(self.source_files[0])
- #Check if sources are already at their deepest level.
- if not self.folder_level == dir_sources:
- _reg_exp = r"^" + re.escape(self.folder_level) + r"[/\\]?([^/\\]+)"
- folder_name = re.match(_reg_exp, dir_sources).group(1)
- self.sub_folders[folder_name] = IarFolder(os.path.join(self.folder_level, folder_name), folder_name, self.source_files)
- self.source_files = []
-
- dir_input = IarFolder.get_directory(source_input)
- if dir_input == self.folder_level:
- self.source_files.append(source_input)
- else:
- _reg_exp = r"^" + re.escape(self.folder_level) + r"[/\\]?([^/\\]+)"
- folder_name = re.match(_reg_exp, dir_input).group(1)
- if self.sub_folders.has_key(folder_name):
- self.sub_folders[folder_name].insert_file(source_input)
- else:
- if self.folder_level == "":
- #Top level exception
- self.sub_folders[folder_name] = IarFolder(folder_name, folder_name, [source_input])
- else:
- self.sub_folders[folder_name] = IarFolder(os.path.join(self.folder_level, folder_name), folder_name, [source_input])
-
- @staticmethod
- def get_directory(file_path):
- """
- Returns the directory of the file
- """
- return os.path.dirname(file_path)
diff --git a/workspace_tools/export/iar_nucleo_f746zg.ewp.tmpl b/workspace_tools/export/iar_nucleo_f746zg.ewp.tmpl
deleted file mode 100644
index 228fbf8..0000000
--- a/workspace_tools/export/iar_nucleo_f746zg.ewp.tmpl
+++ /dev/null
@@ -1,1917 +0,0 @@
-
-
-
- 2
-
- Debug
-
- ARM
-
- 1
-
- General
- 3
-
- 24
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ICCARM
- 2
-
- 31
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- AARM
- 2
-
- 9
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- OBJCOPY
- 0
-
- 1
- 1
- 1
-
-
-
-
-
-
-
-
- CUSTOM
- 3
-
-
-
- 0
-
-
-
- BICOMP
- 0
-
-
-
- BUILDACTION
- 1
-
-
-
-
-
-
- ILINK
- 0
-
- 16
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- IARCHIVE
- 0
-
- 0
- 1
- 1
-
-
-
-
-
-
- BILINK
- 0
-
-
-
-
- Release
-
- ARM
-
- 0
-
- General
- 3
-
- 24
- 1
- 0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ICCARM
- 2
-
- 31
- 1
- 0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- AARM
- 2
-
- 9
- 1
- 0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- OBJCOPY
- 0
-
- 1
- 1
- 0
-
-
-
-
-
-
-
-
- CUSTOM
- 3
-
-
-
- 0
-
-
-
- BICOMP
- 0
-
-
-
- BUILDACTION
- 1
-
-
-
-
-
-
- ILINK
- 0
-
- 16
- 1
- 0
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- IARCHIVE
- 0
-
- 0
- 1
- 0
-
-
-
-
-
-
- BILINK
- 0
-
-
-
-
-
-
diff --git a/workspace_tools/export/iar_rz_a1h.ewp.tmpl b/workspace_tools/export/iar_rz_a1h.ewp.tmpl
deleted file mode 100644
index 4d5338d..0000000
--- a/workspace_tools/export/iar_rz_a1h.ewp.tmpl
+++ /dev/null
@@ -1,925 +0,0 @@
-
-
-
- 2
-
- Debug
-
- ARM
-
- 1
-
- General
- 3
-
- 21
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ICCARM
- 2
-
- 28
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- AARM
- 2
-
- 8
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- OBJCOPY
- 0
-
- 1
- 1
- 1
-
-
-
-
-
-
-
-
- CUSTOM
- 3
-
-
-
-
-
-
- BICOMP
- 0
-
-
-
- BUILDACTION
- 1
-
-
-
-
-
-
- ILINK
- 0
-
- 14
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- IARCHIVE
- 0
-
- 0
- 1
- 1
-
-
-
-
-
-
- BILINK
- 0
-
-
-
-
-$PROJ_DIR$/main.cpp
-
-
-env
-
-$PROJ_DIR$/env\test_env.cpp
-
-
-
-
-
diff --git a/workspace_tools/export/iar_template.ewp.tmpl b/workspace_tools/export/iar_template.ewp.tmpl
deleted file mode 100644
index 332c581..0000000
--- a/workspace_tools/export/iar_template.ewp.tmpl
+++ /dev/null
@@ -1,995 +0,0 @@
-
-
-
- 2
-
- Debug
-
- ARM
-
- 1
-
- General
- 3
-
- 22
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ICCARM
- 2
-
- 30
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- AARM
- 2
-
- 9
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- OBJCOPY
- 0
-
- 1
- 1
- 1
-
-
-
-
-
-
-
-
- CUSTOM
- 3
-
-
-
-
-
-
- BICOMP
- 0
-
-
-
- BUILDACTION
- 1
-
-
-
-
-
-
- ILINK
- 0
-
- 16
- 1
- 1
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- IARCHIVE
- 0
-
- 0
- 1
- 1
-
-
-
-
-
-
- BILINK
- 0
-
-
-
-
-$PROJ_DIR$/main.cpp
-
-
-env
-
-$PROJ_DIR$/env\test_env.cpp
-
-
-
-
-
diff --git a/workspace_tools/export/kds.py b/workspace_tools/export/kds.py
deleted file mode 100644
index 98f4161..0000000
--- a/workspace_tools/export/kds.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename
-
-
-class KDS(Exporter):
- NAME = 'Kinetis Design Studio'
- TOOLCHAIN = 'GCC_ARM'
-
- TARGETS = [
- 'K64F',
- 'K22F',
- ]
-
- def generate(self):
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- ctx = {
- 'name': self.program_name,
- 'include_paths': self.resources.inc_dirs,
- 'linker_script': self.resources.linker_script,
- 'object_files': self.resources.objects,
- 'libraries': libraries,
- 'symbols': self.get_symbols()
- }
- self.gen_file('kds_%s_project.tmpl' % self.target.lower(), ctx, '.project')
- self.gen_file('kds_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
- self.gen_file('kds_launch.tmpl', ctx, '%s.launch' % self.program_name)
diff --git a/workspace_tools/export/kds_k22f_cproject.tmpl b/workspace_tools/export/kds_k22f_cproject.tmpl
deleted file mode 100644
index 18aa0fb..0000000
--- a/workspace_tools/export/kds_k22f_cproject.tmpl
+++ /dev/null
@@ -1,306 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/kds_k22f_project.tmpl b/workspace_tools/export/kds_k22f_project.tmpl
deleted file mode 100644
index 1ab5ab3..0000000
--- a/workspace_tools/export/kds_k22f_project.tmpl
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
- {{name}}
- This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-KDS
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/kds_k64f_cproject.tmpl b/workspace_tools/export/kds_k64f_cproject.tmpl
deleted file mode 100644
index 18aa0fb..0000000
--- a/workspace_tools/export/kds_k64f_cproject.tmpl
+++ /dev/null
@@ -1,306 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/kds_k64f_project.tmpl b/workspace_tools/export/kds_k64f_project.tmpl
deleted file mode 100644
index 1ab5ab3..0000000
--- a/workspace_tools/export/kds_k64f_project.tmpl
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
- {{name}}
- This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-KDS
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
-
-
diff --git a/workspace_tools/export/kds_launch.tmpl b/workspace_tools/export/kds_launch.tmpl
deleted file mode 100644
index 1fe8bbd..0000000
--- a/workspace_tools/export/kds_launch.tmpl
+++ /dev/null
@@ -1,59 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/simplicityv3.py b/workspace_tools/export/simplicityv3.py
deleted file mode 100644
index 6c01dcf..0000000
--- a/workspace_tools/export/simplicityv3.py
+++ /dev/null
@@ -1,191 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import split,splitext, basename
-
-class Folder:
- def __init__(self, name):
- self.name = name
- self.children = []
-
- def contains(self, folderName):
- for child in self.children:
- if child.name == folderName:
- return True
- return False
-
- def __str__(self):
- retval = self.name + " "
- if len(self.children) > 0:
- retval += "[ "
- for child in self.children:
- retval += child.__str__()
- retval += " ]"
-
- return retval
-
- def findChild(self, folderName):
- for child in self.children:
- if child.name == folderName:
- return child
- return None
-
- def addChild(self, folderName):
- if folderName == '':
- return None
-
- if not self.contains(folderName):
- self.children.append(Folder(folderName))
-
- return self.findChild(folderName)
-
-class SimplicityV3(Exporter):
- NAME = 'SimplicityV3'
- TOOLCHAIN = 'GCC_ARM'
-
- TARGETS = [
- 'EFM32GG_STK3700',
- 'EFM32ZG_STK3200',
- 'EFM32LG_STK3600',
- 'EFM32WG_STK3800',
- 'EFM32HG_STK3400',
- 'EFM32PG_STK3401'
- ]
-
- PARTS = {
- 'EFM32GG_STK3700': 'com.silabs.mcu.si32.efm32.efm32gg.efm32gg990f1024',
- 'EFM32ZG_STK3200': 'com.silabs.mcu.si32.efm32.efm32zg.efm32zg222f32',
- 'EFM32LG_STK3600': 'com.silabs.mcu.si32.efm32.efm32lg.efm32lg990f256',
- 'EFM32WG_STK3800': 'com.silabs.mcu.si32.efm32.efm32wg.efm32wg990f256',
- 'EFM32HG_STK3400': 'com.silabs.mcu.si32.efm32.efm32hg.efm32hg322f64',
- 'EFM32PG_STK3401': 'com.silabs.mcu.si32.efm32.efm32pg1b.efm32pg1b200f256gm48'
- }
-
- KITS = {
- 'EFM32GG_STK3700': 'com.silabs.kit.si32.efm32.efm32gg.stk3700',
- 'EFM32ZG_STK3200': 'com.silabs.kit.si32.efm32.efm32zg.stk3200',
- 'EFM32LG_STK3600': 'com.silabs.kit.si32.efm32.efm32lg.stk3600',
- 'EFM32WG_STK3800': 'com.silabs.kit.si32.efm32.efm32wg.stk3800',
- 'EFM32HG_STK3400': 'com.silabs.kit.si32.efm32.efm32hg.slstk3400a',
- 'EFM32PG_STK3401': 'com.silabs.kit.si32.efm32.efm32pg.slstk3401a'
- }
-
- FILE_TYPES = {
- 'c_sources':'1',
- 'cpp_sources':'1',
- 's_sources':'1'
- }
-
- EXCLUDED_LIBS = [
- 'm',
- 'c',
- 'gcc',
- 'nosys',
- 'supc++',
- 'stdc++'
- ]
-
- DOT_IN_RELATIVE_PATH = False
-
- orderedPaths = Folder("Root")
-
- def check_and_add_path(self, path):
- levels = path.split('/')
- base = self.orderedPaths
- for level in levels:
- if base.contains(level):
- base = base.findChild(level)
- else:
- base.addChild(level)
- base = base.findChild(level)
-
-
- def generate(self):
- # "make" wants Unix paths
- self.resources.win_to_unix()
-
- main_files = []
-
- EXCLUDED_LIBS = [
- 'm',
- 'c',
- 'gcc',
- 'nosys',
- 'supc++',
- 'stdc++'
- ]
-
- for r_type in ['s_sources', 'c_sources', 'cpp_sources']:
- r = getattr(self.resources, r_type)
- if r:
- for source in r:
- self.check_and_add_path(split(source)[0])
-
- if not ('/' in source):
- main_files.append(source)
-
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- if l[3:] not in EXCLUDED_LIBS:
- libraries.append(l[3:])
-
- defines = []
- for define in self.get_symbols():
- if '=' in define:
- keyval = define.split('=')
- defines.append( (keyval[0], keyval[1]) )
- else:
- defines.append( (define, '') )
-
- self.check_and_add_path(split(self.resources.linker_script)[0])
-
- ctx = {
- 'name': self.program_name,
- 'main_files': main_files,
- 'recursiveFolders': self.orderedPaths,
- 'object_files': self.resources.objects,
- 'include_paths': self.resources.inc_dirs,
- 'library_paths': self.resources.lib_dirs,
- 'linker_script': self.resources.linker_script,
- 'libraries': libraries,
- 'symbols': self.get_symbols(),
- 'defines': defines,
- 'part': self.PARTS[self.target],
- 'kit': self.KITS[self.target],
- 'loopcount': 0
- }
-
- ## Strip main folder from include paths because ssproj is not capable of handling it
- if '.' in ctx['include_paths']:
- ctx['include_paths'].remove('.')
-
- '''
- Suppress print statements
- print('\n')
- print(self.target)
- print('\n')
- print(ctx)
- print('\n')
- print(self.orderedPaths)
- for path in self.orderedPaths.children:
- print(path.name + "\n")
- for bpath in path.children:
- print("\t" + bpath.name + "\n")
- '''
-
- self.gen_file('simplicityv3_slsproj.tmpl', ctx, '%s.slsproj' % self.program_name)
diff --git a/workspace_tools/export/simplicityv3_slsproj.tmpl b/workspace_tools/export/simplicityv3_slsproj.tmpl
deleted file mode 100644
index 4324eb1..0000000
--- a/workspace_tools/export/simplicityv3_slsproj.tmpl
+++ /dev/null
@@ -1,140 +0,0 @@
-
-
-{# Hierarchically include all folders into the project #}
- {%- for child in recursiveFolders.children recursive %}
-
- {%- if child.children -%}
- {{ loop(child.children) }}
- {%- endif %}
-
- {%- endfor %}
-
-{# Include all source files not belonging to a subfolder separately #}
- {%- for file in main_files -%}
-
- {%- endfor %}
-
-
-
-
-
-
-{# Add all include paths to the managed build compiler, paths relative to project #}
- {%- for path in include_paths %}
-
- {%- endfor %}
-{# Add all mbed-defined #Defines for the preprocessor #}
- {%- for define, value in defines %}
-
- {%- endfor %}
-{# Include all standard libraries that mbed requires #}
-
-
-
-
-
-
-
-{# Include exported libraries #}
- {%- for library in libraries %}
-
- {%- endfor %}
-{# Add library search paths #}
- {%- for path in library_paths %}
-
- {%- endfor %}
-{# Add in separate object files if needed #}
- {%- if object_files %}
-
- {%- endif %}
-{# Manually override linker ordering #}
- {%- if libraries %}
-
- {%- endif %}
-{# Define mbed-specific linker file #}
-
-
-{# Make sure to wrap main in order to get clock initialization done right #}
-
-
-{# For debug build, don't apply optimizations #}
-
-
-
-
-
-
-
-
-{# Add all include paths to the managed build compiler, paths relative to project #}
- {%- for path in include_paths %}
-
- {%- endfor %}
-{# Add all mbed-defined #Defines for the preprocessor #}
- {%- for define, value in defines %}
-
- {%- endfor %}
-{# Include all standard libraries that mbed requires #}
-
-
-
-
-
-
-{# Include exported libraries #}
- {%- for library in libraries %}
-
- {%- endfor %}
-{# Add library search paths #}
- {%- for path in library_paths %}
-
- {%- endfor %}
-{# Add in separate object files if needed #}
- {%- if object_files %}
-
- {%- endif %}
-{# Manually override linker ordering #}
- {%- if libraries %}
-
- {%- endif %}
-{# Define mbed-specific linker file #}
-
-
-{# Make sure to wrap main in order to get clock initialization done right #}
-
-
-{# Use optimize for size on release build #}
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/sw4stm32.py b/workspace_tools/export/sw4stm32.py
deleted file mode 100644
index cc383c3..0000000
--- a/workspace_tools/export/sw4stm32.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2016 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import splitext, basename, join
-from random import randint
-from workspace_tools.utils import mkdir
-
-
-class Sw4STM32(Exporter):
- NAME = 'Sw4STM32'
- TOOLCHAIN = 'GCC_ARM'
-
- BOARDS = {
- # 'DISCO_F051R8': {'name': 'STM32F0DISCOVERY', 'mcuId': 'STM32F051R8Tx'},
- # 'DISCO_F303VC': {'name': 'STM32F3DISCOVERY', 'mcuId': 'STM32F303VCTx'},
- 'DISCO_F334C8': {'name': 'STM32F3348DISCOVERY', 'mcuId': 'STM32F334C8Tx'},
- # 'DISCO_F401VC': {'name': 'STM32F401C-DISCO', 'mcuId': 'STM32F401VCTx'},
- 'DISCO_F407VG': {'name': 'STM32F4DISCOVERY', 'mcuId': 'STM32F407VGTx'},
- 'DISCO_F429ZI': {'name': 'STM32F429I-DISCO', 'mcuId': 'STM32F429ZITx'},
- 'DISCO_F746NG': {'name': 'STM32F746G-DISCO', 'mcuId': 'STM32F746NGHx'},
- 'DISCO_L053C8': {'name': 'STM32L0538DISCOVERY', 'mcuId': 'STM32L053C8Tx'},
- 'DISCO_L476VG': {'name': 'STM32L476G-DISCO', 'mcuId': 'STM32L476VGTx'},
- 'DISCO_F469NI': {'name': 'DISCO-F469NI', 'mcuId': 'STM32F469NIHx'},
- 'NUCLEO_F030R8': {'name': 'NUCLEO-F030R8', 'mcuId': 'STM32F030R8Tx'},
- 'NUCLEO_F070RB': {'name': 'NUCLEO-F070RB', 'mcuId': 'STM32F070RBTx'},
- 'NUCLEO_F072RB': {'name': 'NUCLEO-F072RB', 'mcuId': 'STM32F072RBTx'},
- 'NUCLEO_F091RC': {'name': 'NUCLEO-F091RC', 'mcuId': 'STM32F091RCTx'},
- 'NUCLEO_F103RB': {'name': 'NUCLEO-F103RB', 'mcuId': 'STM32F103RBTx'},
- 'NUCLEO_F302R8': {'name': 'NUCLEO-F302R8', 'mcuId': 'STM32F302R8Tx'},
- 'NUCLEO_F303RE': {'name': 'NUCLEO-F303RE', 'mcuId': 'STM32F303RETx'},
- 'NUCLEO_F334R8': {'name': 'NUCLEO-F334R8', 'mcuId': 'STM32F334R8Tx'},
- 'NUCLEO_F401RE': {'name': 'NUCLEO-F401RE', 'mcuId': 'STM32F401RETx'},
- 'NUCLEO_F411RE': {'name': 'NUCLEO-F411RE', 'mcuId': 'STM32F411RETx'},
- 'NUCLEO_F446RE': {'name': 'NUCLEO-F446RE', 'mcuId': 'STM32F446RETx'},
- 'NUCLEO_L031K6': {'name': 'NUCLEO-L031K6', 'mcuId': 'STM32L031K6Tx'},
- 'NUCLEO_L053R8': {'name': 'NUCLEO-L053R8', 'mcuId': 'STM32L053R8Tx'},
- 'NUCLEO_L073RZ': {'name': 'NUCLEO-L073RZ', 'mcuId': 'STM32L073RZTx'},
- 'NUCLEO_L152RE': {'name': 'NUCLEO-L152RE', 'mcuId': 'STM32L152RETx'},
- 'NUCLEO_L476RG': {'name': 'NUCLEO-L476RG', 'mcuId': 'STM32L476RGTx'},
- 'NUCLEO_F031K6': {'name': 'NUCLEO-F031K6', 'mcuId': 'STM32F031K6Tx'},
- 'NUCLEO_F042K6': {'name': 'NUCLEO-F042K6', 'mcuId': 'STM32F042K6Tx'},
- 'NUCLEO_F303K8': {'name': 'NUCLEO-F303K8', 'mcuId': 'STM32F303K8Tx'},
- 'NUCLEO_F410RB': {'name': 'NUCLEO-F410RB', 'mcuId': 'STM32F410RBTx'},
- }
-
- TARGETS = BOARDS.keys()
-
- def __gen_dir(self, dirname):
- settings = join(self.inputDir, dirname)
- mkdir(settings)
-
- def __generate_uid(self):
- return "%0.9u" % randint(0, 999999999)
-
- def generate(self):
- libraries = []
- for lib in self.resources.libraries:
- l, _ = splitext(basename(lib))
- libraries.append(l[3:])
-
- ctx = {
- 'name': self.program_name,
- 'include_paths': self.resources.inc_dirs,
- 'linker_script': self.resources.linker_script,
- 'library_paths': self.resources.lib_dirs,
- 'object_files': self.resources.objects,
- 'libraries': libraries,
- 'symbols': self.get_symbols(),
- 'board_name': self.BOARDS[self.target.upper()]['name'],
- 'mcu_name': self.BOARDS[self.target.upper()]['mcuId'],
- 'debug_config_uid': self.__generate_uid(),
- 'debug_tool_compiler_uid': self.__generate_uid(),
- 'debug_tool_compiler_input_uid': self.__generate_uid(),
- 'release_config_uid': self.__generate_uid(),
- 'release_tool_compiler_uid': self.__generate_uid(),
- 'release_tool_compiler_input_uid': self.__generate_uid(),
- 'uid': self.__generate_uid()
- }
-
- self.__gen_dir('.settings')
- self.gen_file('sw4stm32_language_settings_commom.tmpl', ctx, '.settings/language.settings.xml')
- self.gen_file('sw4stm32_project_common.tmpl', ctx, '.project')
- self.gen_file('sw4stm32_cproject_common.tmpl', ctx, '.cproject')
diff --git a/workspace_tools/export/sw4stm32_cproject_common.tmpl b/workspace_tools/export/sw4stm32_cproject_common.tmpl
deleted file mode 100644
index 0128f69..0000000
--- a/workspace_tools/export/sw4stm32_cproject_common.tmpl
+++ /dev/null
@@ -1,212 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/sw4stm32_language_settings_commom.tmpl b/workspace_tools/export/sw4stm32_language_settings_commom.tmpl
deleted file mode 100644
index d138720..0000000
--- a/workspace_tools/export/sw4stm32_language_settings_commom.tmpl
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/sw4stm32_project_common.tmpl b/workspace_tools/export/sw4stm32_project_common.tmpl
deleted file mode 100644
index 2e0378c..0000000
--- a/workspace_tools/export/sw4stm32_project_common.tmpl
+++ /dev/null
@@ -1,28 +0,0 @@
-
-
- {{name}}
-
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.genmakebuilder
- clean,full,incremental,
-
-
-
-
- org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
- full,incremental,
-
-
-
-
-
- org.eclipse.cdt.core.cnature
- org.eclipse.cdt.core.ccnature
- org.eclipse.cdt.managedbuilder.core.managedBuildNature
- org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
- fr.ac6.mcu.ide.core.MCUProjectNature
-
-
diff --git a/workspace_tools/export/uvision.uvproj.tmpl b/workspace_tools/export/uvision.uvproj.tmpl
deleted file mode 100644
index ab8827b..0000000
--- a/workspace_tools/export/uvision.uvproj.tmpl
+++ /dev/null
@@ -1,403 +0,0 @@
-
-
-
- 1.1
-
- ###This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-Uvision
-
-
-
- mbed FRDM-KL25Z
- 0x4
- ARM-ADS
-
-
- MKL25Z128xxx4
- Freescale Semiconductor
- IRAM(0x1FFFF000-0x1FFFFFFF) IRAM2(0x20000000-0x20002FFF) IROM(0x0-0x1FFFF) CLOCK(8000000) CPUTYPE("Cortex-M0+") ELITTLE
-
- "STARTUP\Freescale\Kinetis\startup_MKL25Z4.s" ("Freescale MKL25Zxxxxxx4 Startup Code")
- ULP2CM3(-O2510 -S0 -C0 -FO15 -FD20000000 -FC800 -FN1 -FF0MK_P128_48MHZ -FS00 -FL020000)
- 6533
- MKL25Z4.H
-
-
-
-
-
-
-
-
-
- SFD\Freescale\Kinetis\MKL25Z4.sfr
- 0
-
-
-
- Freescale\Kinetis\
- Freescale\Kinetis\
-
- 0
- 0
- 0
- 0
- 1
-
- .\build\
- MBED_11
- 1
- 0
- 0
- 1
- 1
- .\build\
- 1
- 0
- 0
-
- 0
- 0
-
-
- 0
- 0
- 0
- 0
-
-
- 0
- 0
-
-
- 0
- 0
-
-
- 1
- 0
- fromelf --bin --output=@L.bin !L
-
- 0
- 0
-
- 0
-
-
-
- 0
- 0
- 0
- 0
- 0
- 1
- 0
- 0
- 0
- 0
- 3
-
-
-
-
- SARMCM3.DLL
-
- DARMCM1.DLL
- -pCM0+
- SARMCM3.DLL
-
- TARMCM1.DLL
- -pCM0+
-
-
-
- 1
- 0
- 0
- 0
- 16
-
-
- 0
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 0
-
-
- 1
- 1
- 1
- 1
- 1
- 1
- 0
- 1
-
- 0
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- BIN\CMSIS_AGDI.dll
-
-
-
-
- 1
- 0
- 0
- 1
- 1
- 4105
-
- BIN\CMSIS_AGDI.dll
- "" ()
-
-
-
-
- 0
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 0
- 1
- 1
- 0
- 1
- 1
- 0
- 0
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 0
- 0
- "Cortex-M0+"
-
- 0
- 0
- 0
- 1
- 1
- 0
- 0
- 0
- 1
- 0
- 8
- 0
- 0
- 0
- 3
- 3
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 1
- 0
- 0
- 0
- 0
- 1
- 0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x1ffff000
- 0x1000
-
-
- 1
- 0x0
- 0x20000
-
-
- 0
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x20000
-
-
- 1
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x20000000
- 0x3000
-
-
- 0
- 0x0
- 0x0
-
-
-
-
-
- 1
- 1
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 1
-
- --gnu --no_rtti
-
-
-
-
-
-
- 1
- 0
- 0
- 0
- 0
- 0
- 0
- 0
-
-
-
-
-
-
-
-
- 0
- 0
- 0
- 0
- 1
- 0
- 0x00000000
- 0x10000000
- mbed\TARGET_KL25Z\TOOLCHAIN_ARM_STD\MKL25Z4.sct
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/uvision4.py b/workspace_tools/export/uvision4.py
deleted file mode 100644
index 7f26bfd..0000000
--- a/workspace_tools/export/uvision4.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from os.path import basename, join, dirname
-from project_generator_definitions.definitions import ProGenDef
-
-from workspace_tools.export.exporters import Exporter
-from workspace_tools.targets import TARGET_MAP, TARGET_NAMES
-
-# If you wish to add a new target, add it to project_generator_definitions, and then
-# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
-# There are 2 default mbed templates (predefined settings) uvision.uvproj and uvproj_microlib.uvproj.tmpl
-class Uvision4(Exporter):
- """
- Exporter class for uvision. This class uses project generator.
- """
- # These 2 are currently for exporters backward compatiblity
- NAME = 'uVision4'
- TOOLCHAIN = 'ARM'
- # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
- PROGEN_ACTIVE = True
-
- # backward compatibility with our scripts
- TARGETS = []
- for target in TARGET_NAMES:
- try:
- if (ProGenDef('uvision').is_supported(str(TARGET_MAP[target])) or
- ProGenDef('uvision').is_supported(TARGET_MAP[target].progen['target'])):
- TARGETS.append(target)
- except AttributeError:
- # target is not supported yet
- continue
-
- def get_toolchain(self):
- return TARGET_MAP[self.target].default_toolchain
-
- def generate(self):
- """ Generates the project files """
- project_data = self.progen_get_project_data()
- tool_specific = {}
- # Expand tool specific settings by uvision specific settings which are required
- try:
- if TARGET_MAP[self.target].progen['uvision']['template']:
- tool_specific['uvision'] = TARGET_MAP[self.target].progen['uvision']
- except KeyError:
- # use default template
- # by the mbed projects
- tool_specific['uvision'] = {
- 'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')],
- }
-
- project_data['tool_specific'] = {}
- project_data['tool_specific'].update(tool_specific)
-
- # get flags from toolchain and apply
- project_data['tool_specific']['uvision']['misc'] = {}
- project_data['tool_specific']['uvision']['misc']['asm_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['asm']))
- project_data['tool_specific']['uvision']['misc']['c_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['c']))
- # not compatible with c99 flag set in the template
- project_data['tool_specific']['uvision']['misc']['c_flags'].remove("--c99")
- project_data['tool_specific']['uvision']['misc']['cxx_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['ld']))
- project_data['tool_specific']['uvision']['misc']['ld_flags'] = self.toolchain.flags['ld']
-
- i = 0
- for macro in project_data['common']['macros']:
- # armasm does not like floating numbers in macros, timestamp to int
- if macro.startswith('MBED_BUILD_TIMESTAMP'):
- timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
- project_data['common']['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
- # armasm does not even accept MACRO=string
- if macro.startswith('MBED_USERNAME'):
- project_data['common']['macros'].pop(i)
- i += 1
- project_data['common']['macros'].append('__ASSERT_MSG')
- self.progen_gen_file('uvision', project_data)
-
diff --git a/workspace_tools/export/uvision5.py b/workspace_tools/export/uvision5.py
deleted file mode 100644
index a881379..0000000
--- a/workspace_tools/export/uvision5.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2016 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from os.path import basename, join, dirname
-from project_generator_definitions.definitions import ProGenDef
-
-from workspace_tools.export.exporters import Exporter
-from workspace_tools.targets import TARGET_MAP, TARGET_NAMES
-
-# If you wish to add a new target, add it to project_generator_definitions, and then
-# define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``)
-# There are 2 default mbed templates (predefined settings) uvision.uvproj and uvproj_microlib.uvproj.tmpl
-class Uvision5(Exporter):
- """
- Exporter class for uvision5. This class uses project generator.
- """
- # These 2 are currently for exporters backward compatiblity
- NAME = 'uVision5'
- TOOLCHAIN = 'ARM'
- # PROGEN_ACTIVE contains information for exporter scripts that this is using progen
- PROGEN_ACTIVE = True
-
- # backward compatibility with our scripts
- TARGETS = []
- for target in TARGET_NAMES:
- try:
- if (ProGenDef('uvision5').is_supported(str(TARGET_MAP[target])) or
- ProGenDef('uvision5').is_supported(TARGET_MAP[target].progen['target'])):
- TARGETS.append(target)
- except AttributeError:
- # target is not supported yet
- continue
-
- def get_toolchain(self):
- return TARGET_MAP[self.target].default_toolchain
-
- def generate(self):
- """ Generates the project files """
- project_data = self.progen_get_project_data()
- tool_specific = {}
- # Expand tool specific settings by uvision specific settings which are required
- try:
- if TARGET_MAP[self.target].progen['uvision5']['template']:
- tool_specific['uvision5'] = TARGET_MAP[self.target].progen['uvision5']
- except KeyError:
- # use default template
- # by the mbed projects
- tool_specific['uvision5'] = {
- 'template': [join(dirname(__file__), 'uvision.uvproj.tmpl')],
- }
-
- project_data['tool_specific'] = {}
- project_data['tool_specific'].update(tool_specific)
-
- # get flags from toolchain and apply
- project_data['tool_specific']['uvision5']['misc'] = {}
- project_data['tool_specific']['uvision5']['misc']['asm_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['asm']))
- project_data['tool_specific']['uvision5']['misc']['c_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['c']))
- # not compatible with c99 flag set in the template
- project_data['tool_specific']['uvision5']['misc']['c_flags'].remove("--c99")
- project_data['tool_specific']['uvision5']['misc']['cxx_flags'] = list(set(self.toolchain.flags['common'] + self.toolchain.flags['ld']))
- project_data['tool_specific']['uvision5']['misc']['ld_flags'] = self.toolchain.flags['ld']
-
- i = 0
- for macro in project_data['common']['macros']:
- # armasm does not like floating numbers in macros, timestamp to int
- if macro.startswith('MBED_BUILD_TIMESTAMP'):
- timestamp = macro[len('MBED_BUILD_TIMESTAMP='):]
- project_data['common']['macros'][i] = 'MBED_BUILD_TIMESTAMP=' + str(int(float(timestamp)))
- # armasm does not even accept MACRO=string
- if macro.startswith('MBED_USERNAME'):
- project_data['common']['macros'].pop(i)
- i += 1
- project_data['common']['macros'].append('__ASSERT_MSG')
- self.progen_gen_file('uvision5', project_data)
-
diff --git a/workspace_tools/export/uvision_microlib.uvproj.tmpl b/workspace_tools/export/uvision_microlib.uvproj.tmpl
deleted file mode 100644
index fd0d552..0000000
--- a/workspace_tools/export/uvision_microlib.uvproj.tmpl
+++ /dev/null
@@ -1,413 +0,0 @@
-
-
-
- 1.1
-
- ###This file was automagically generated by mbed.org. For more information, see http://mbed.org/handbook/Exporting-To-Uvision
-
-
-
- mbed FRDM-KL05Z
- 0x4
- ARM-ADS
-
-
- MKL05Z32xxx4
- Freescale Semiconductor
- IRAM(0x1FFFFC00-0x1FFFFFFF) IRAM2(0x20000000-0x20000BFF) IROM(0x0-0x07FFF) CLOCK(8000000) CPUTYPE("Cortex-M0+") ELITTLE
-
- "STARTUP\Freescale\Kinetis\startup_MKL05Z4.s" ("Freescale MKL05Zxxxxxx4 Startup Code")
- ULP2CM3(-O2510 -S0 -C0 -FO15 -FD20000000 -FC800 -FN1 -FF0MK_P32_48MHZ -FS00 -FL08000)
- 6544
- MKL05Z4.H
-
-
-
-
-
-
-
-
-
- SFD\Freescale\Kinetis\MKL05Z4.sfr
- 0
-
-
-
- Freescale\Kinetis\
- Freescale\Kinetis\
-
- 0
- 0
- 0
- 0
- 1
-
- .\build\
- MBED_11
- 1
- 0
- 0
- 1
- 1
- .\build\
- 1
- 0
- 0
-
- 0
- 0
-
-
- 0
- 0
- 0
- 0
-
-
- 0
- 0
-
-
- 0
- 0
-
-
- 1
- 0
- fromelf --bin --output=@L.bin !L
-
- 0
- 0
-
- 0
-
-
-
- 0
- 0
- 0
- 0
- 0
- 1
- 0
- 0
- 0
- 0
- 3
-
-
-
-
- SARMCM3.DLL
-
- DARMCM1.DLL
- -pCM0+
- SARMCM3.DLL
-
- TARMCM1.DLL
- -pCM0+
-
-
-
- 1
- 0
- 0
- 0
- 16
-
-
- 0
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 0
-
-
- 1
- 1
- 1
- 1
- 1
- 1
- 0
- 1
-
- 0
- 14
-
-
-
-
-
-
-
-
-
-
-
-
-
- BIN\CMSIS_AGDI.dll
-
-
-
-
- 1
- 0
- 0
- 1
- 1
- 4105
-
- BIN\CMSIS_AGDI.dll
- "" ()
-
-
-
-
- 0
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 0
- 1
- 1
- 0
- 1
- 1
- 0
- 0
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 1
- 0
- 0
- "Cortex-M0+"
-
- 0
- 0
- 0
- 1
- 1
- 0
- 0
- 0
- 1
- 0
- 8
- 1
- 0
- 0
- 3
- 3
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 1
- 0
- 0
- 0
- 0
- 1
- 0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x1ffffc00
- 0x400
-
-
- 1
- 0x0
- 0x8000
-
-
- 0
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x0
-
-
- 1
- 0x0
- 0x8000
-
-
- 1
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x0
- 0x0
-
-
- 0
- 0x1ffffc00
- 0x400
-
-
- 0
- 0x0
- 0x0
-
-
-
-
-
- 1
- 1
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 0
- 1
-
- --gnu --no_rtti
-
-
- .; env; mbed;
-
-
-
- 1
- 0
- 0
- 0
- 0
- 0
- 0
- 0
-
-
-
-
-
-
-
-
- 0
- 0
- 0
- 0
- 1
- 0
- 0x00000000
- 0x10000000
- None
-
-
-
-
-
-
-
-
-
-
-
-
-
- src
-
-
-
- main.cpp
- 8
- main.cpp
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/workspace_tools/export/zip.py b/workspace_tools/export/zip.py
deleted file mode 100644
index b9828a6..0000000
--- a/workspace_tools/export/zip.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from exporters import Exporter
-from os.path import basename
-
-
-class ZIP(Exporter):
- NAME = 'ZIP'
-
- TARGETS = [
- ]
-
- USING_MICROLIB = [
- ]
-
- FILE_TYPES = {
- 'c_sources':'1',
- 'cpp_sources':'8',
- 's_sources':'2'
- }
-
- def get_toolchain(self):
- return 'uARM' if (self.target in self.USING_MICROLIB) else 'ARM'
-
- def generate(self):
- return True
-
\ No newline at end of file
diff --git a/workspace_tools/export_test.py b/workspace_tools/export_test.py
deleted file mode 100644
index 6295d03..0000000
--- a/workspace_tools/export_test.py
+++ /dev/null
@@ -1,323 +0,0 @@
-#!/usr/bin/env python
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import sys
-from os.path import join, abspath, dirname, exists
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-from shutil import move
-
-from workspace_tools.paths import *
-from workspace_tools.utils import mkdir, cmd
-from workspace_tools.export import export, setup_user_prj
-
-
-USR_PRJ_NAME = "usr_prj"
-USER_PRJ = join(EXPORT_WORKSPACE, USR_PRJ_NAME)
-USER_SRC = join(USER_PRJ, "src")
-
-
-def setup_test_user_prj():
- if exists(USER_PRJ):
- print 'Test user project already generated...'
- return
-
- setup_user_prj(USER_PRJ, join(TEST_DIR, "rtos", "mbed", "basic"), [join(LIB_DIR, "rtos"), join(LIB_DIR, "tests", "mbed", "env")])
-
- # FAKE BUILD URL
- open(join(USER_SRC, "mbed.bld"), 'w').write("http://mbed.org/users/mbed_official/code/mbed/builds/976df7c37ad5\n")
-
-
-def fake_build_url_resolver(url):
- # FAKE BUILD URL: Ignore the URL, always return the path to the mbed library
- return {'path':MBED_LIBRARIES, 'name':'mbed'}
-
-
-def test_export(toolchain, target, expected_error=None):
- if toolchain is None and target is None:
- base_dir = join(EXPORT_TMP, "zip")
- else:
- base_dir = join(EXPORT_TMP, toolchain, target)
- temp_dir = join(base_dir, "temp")
- mkdir(temp_dir)
-
- zip_path, report = export(USER_PRJ, USR_PRJ_NAME, toolchain, target, base_dir, temp_dir, False, None, fake_build_url_resolver)
-
- if report['success']:
- move(zip_path, join(EXPORT_DIR, "export_%s_%s.zip" % (toolchain, target)))
- print "[OK]"
- else:
- if expected_error is None:
- print '[ERRROR] %s' % report['errormsg']
- else:
- if (zip_path is None) and (expected_error in report['errormsg']):
- print '[OK]'
- else:
- print '[ERROR]'
- print ' zip:', zip_path
- print ' msg:', report['errormsg']
-
-
-if __name__ == '__main__':
- setup_test_user_prj()
-
- for toolchain, target in [
- ('zip', 'LPC1768'),
-
- ('emblocks', 'LPC1768'),
- ('emblocks', 'LPC1549'),
- ('emblocks', 'LPC1114'),
- ('emblocks', 'LPC11U35_401'),
- ('emblocks', 'LPC11U35_501'),
- ('emblocks', 'LPCCAPPUCCINO'),
- ('emblocks', 'LPC2368'),
- ('emblocks', 'STM32F407'),
- ('emblocks', 'DISCO_F100RB'),
- ('emblocks', 'DISCO_F051R8'),
- ('emblocks', 'DISCO_F407VG'),
- ('emblocks', 'DISCO_F303VC'),
- ('emblocks', 'NRF51822'),
- ('emblocks', 'NUCLEO_F401RE'),
- ('emblocks', 'NUCLEO_F410RB'),
- ('emblocks', 'NUCLEO_F411RE'),
- ('emblocks', 'MTS_MDOT_F405RG'),
- ('emblocks', 'MTS_MDOT_F411RE'),
-
- ('coide', 'KL05Z'),
- ('coide', 'KL25Z'),
- ('coide', 'LPC1768'),
- ('coide', 'ARCH_PRO'),
- ('coide', 'DISCO_F407VG'),
- ('coide', 'NUCLEO_F401RE'),
- ('coide', 'NUCLEO_F410RB'),
- ('coide', 'NUCLEO_F411RE'),
- ('coide', 'DISCO_F429ZI'),
- #('coide', 'DISCO_F469NI'), removed because template not available
- ('coide', 'NUCLEO_F334R8'),
- ('coide', 'MTS_MDOT_F405RG'),
- ('coide', 'MTS_MDOT_F411RE'),
-
- ('uvision', 'LPC1768'),
- ('uvision', 'LPC11U24'),
- ('uvision', 'LPC11U35_401'),
- ('uvision', 'LPC11U35_501'),
- ('uvision', 'KL25Z'),
- ('uvision', 'LPC1347'),
- ('uvision', 'LPC1114'),
- ('uvision', 'LPC4088'),
- ('uvision', 'LPC4088_DM'),
- ('uvision', 'LPC4337'),
- ('uvision', 'LPC824'),
- ('uvision', 'SSCI824'),
- ('uvision', 'HRM1017'),
-
- ('uvision', 'B96B_F446VE'),
- ('uvision', 'NUCLEO_F030R8'),
- ('uvision', 'NUCLEO_F031K6'),
- ('uvision', 'NUCLEO_F042K6'),
- ('uvision', 'NUCLEO_F070RB'),
- ('uvision', 'NUCLEO_F072RB'),
- ('uvision', 'NUCLEO_F091RC'),
- ('uvision', 'NUCLEO_F103RB'),
- ('uvision', 'NUCLEO_F302R8'),
- ('uvision', 'NUCLEO_F303K8'),
- ('uvision', 'NUCLEO_F303RE'),
- ('uvision', 'NUCLEO_F334R8'),
- ('uvision', 'NUCLEO_F401RE'),
- ('uvision', 'NUCLEO_F410RB'),
- ('uvision', 'NUCLEO_F411RE'),
- ('uvision', 'NUCLEO_F446RE'),
- ('uvision', 'NUCLEO_L031K6'),
- ('uvision', 'NUCLEO_L053R8'),
- ('uvision', 'NUCLEO_L073RZ'),
- ('uvision', 'NUCLEO_L152RE'),
- ('uvision', 'NUCLEO_L476RG'),
- ('uvision', 'MTS_MDOT_F405RG'),
- ('uvision', 'MAXWSNENV'),
- ('uvision', 'MAX32600MBED'),
- ('uvision', 'DISCO_F051R8'),
- ('uvision', 'DISCO_F103RB'),
- ('uvision', 'DISCO_F303VC'),
- ('uvision', 'DISCO_L053C8'),
- ('uvision', 'DISCO_F334C8'),
- ('uvision', 'DISCO_F407VG'),
- ('uvision', 'DISCO_F429ZI'),
- ('uvision', 'DISCO_F746NG'),
- ('uvision', 'DISCO_F469NI'),
- ('uvision', 'DISCO_L476VG'),
- ('uvision', 'MOTE_L152RC'),
-
- ('lpcxpresso', 'LPC1768'),
- ('lpcxpresso', 'LPC4088'),
- ('lpcxpresso', 'LPC4088_DM'),
- ('lpcxpresso', 'LPC1114'),
- ('lpcxpresso', 'LPC11U35_401'),
- ('lpcxpresso', 'LPC11U35_501'),
- ('lpcxpresso', 'LPCCAPPUCCINO'),
- ('lpcxpresso', 'LPC1549'),
- ('lpcxpresso', 'LPC11U68'),
-
- # Linux path: /home/emimon01/bin/gcc-arm/bin/
- # Windows path: C:/arm-none-eabi-gcc-4_7/bin/
- ('gcc_arm', 'LPC1768'),
- ('gcc_arm', 'LPC4088_DM'),
- ('gcc_arm', 'LPC1549'),
- ('gcc_arm', 'LPC1114'),
- ('gcc_arm', 'LPC11U35_401'),
- ('gcc_arm', 'LPC11U35_501'),
- ('gcc_arm', 'LPCCAPPUCCINO'),
- ('gcc_arm', 'LPC2368'),
- ('gcc_arm', 'LPC2460'),
- ('gcc_arm', 'LPC824'),
- ('gcc_arm', 'SSCI824'),
-
- ('gcc_arm', 'B96B_F446VE'),
- ('gcc_arm', 'STM32F407'),
- ('gcc_arm', 'DISCO_F100RB'),
- ('gcc_arm', 'DISCO_F051R8'),
- ('gcc_arm', 'DISCO_F407VG'),
- ('gcc_arm', 'DISCO_F303VC'),
- ('gcc_arm', 'DISCO_L053C8'),
- ('gcc_arm', 'DISCO_F334C8'),
- ('gcc_arm', 'DISCO_L053C8'),
- ('gcc_arm', 'DISCO_F429ZI'),
- ('gcc_arm', 'DISCO_F746NG'),
- ('gcc_arm', 'NUCLEO_F031K6'),
- ('gcc_arm', 'NUCLEO_F042K6'),
- ('gcc_arm', 'NRF51822'),
- ('gcc_arm', 'RBLAB_BLENANO'),
- ('gcc_arm', 'HRM1017'),
- ('gcc_arm', 'NUCLEO_F401RE'),
- ('gcc_arm', 'NUCLEO_F410RB'),
- ('gcc_arm', 'NUCLEO_F411RE'),
- ('gcc_arm', 'NUCLEO_F446RE'),
- ('gcc_arm', 'ELMO_F411RE'),
- ('gcc_arm', 'DISCO_F469NI'),
- ('gcc_arm', 'NUCLEO_F334R8'),
- ('gcc_arm', 'NUCLEO_L031K6'),
- ('gcc_arm', 'MAX32600MBED'),
- ('gcc_arm', 'MTS_MDOT_F405RG'),
- ('gcc_arm', 'MTS_MDOT_F411RE'),
- ('gcc_arm', 'RZ_A1H'),
- ('gcc_arm', 'MAXWSNENV'),
- ('gcc_arm', 'MAX32600MBED'),
- ('gcc_arm', 'ARCH_BLE'),
- ('gcc_arm', 'ARCH_MAX'),
- ('gcc_arm', 'ARCH_PRO'),
- ('gcc_arm', 'DELTA_DFCM_NNN40'),
- ('gcc_arm', 'K20D50M'),
- ('gcc_arm', 'K22F'),
- ('gcc_arm', 'K64F'),
- ('gcc_arm', 'KL05Z'),
- ('gcc_arm', 'KL25Z'),
- ('gcc_arm', 'KL43Z'),
- ('gcc_arm', 'KL46Z'),
- ('gcc_arm', 'EFM32GG_STK3700'),
- ('gcc_arm', 'EFM32LG_STK3600'),
- ('gcc_arm', 'EFM32WG_STK3800'),
- ('gcc_arm', 'EFM32ZG_STK3200'),
- ('gcc_arm', 'EFM32HG_STK3400'),
- ('gcc_arm', 'EFM32PG_STK3401'),
-
- ('ds5_5', 'LPC1768'),
- ('ds5_5', 'LPC11U24'),
- ('ds5_5', 'RZ_A1H'),
-
- ('iar', 'LPC1768'),
- ('iar', 'LPC4088_DM'),
- ('iar', 'LPC1347'),
-
- ('iar', 'B96B_F446VE'),
- ('iar', 'NUCLEO_F030R8'),
- ('iar', 'NUCLEO_F031K6'),
- ('iar', 'NUCLEO_F042K6'),
- ('iar', 'NUCLEO_F070RB'),
- ('iar', 'NUCLEO_F072RB'),
- ('iar', 'NUCLEO_F091RC'),
- ('iar', 'NUCLEO_F302R8'),
- ('iar', 'NUCLEO_F303K8'),
- ('iar', 'NUCLEO_F303RE'),
- ('iar', 'NUCLEO_F334R8'),
- ('iar', 'NUCLEO_F401RE'),
- ('iar', 'NUCLEO_F410RB'),
- ('iar', 'NUCLEO_F411RE'),
- ('iar', 'NUCLEO_F446RE'),
- ('iar', 'NUCLEO_L031K6'),
- ('iar', 'NUCLEO_L053R8'),
- ('iar', 'NUCLEO_L073RZ'),
- ('iar', 'NUCLEO_L152RE'),
- ('iar', 'NUCLEO_L476RG'),
- ('iar', 'DISCO_L053C8'),
- ('iar', 'DISCO_F334C8'),
- ('iar', 'DISCO_F429ZI'),
- ('iar', 'DISCO_F469NI'),
- ('iar', 'DISCO_F746NG'),
- ('iar', 'DISCO_L476VG'),
- ('iar', 'STM32F407'),
- ('iar', 'MTS_MDOT_F405RG'),
- ('iar', 'MTS_MDOT_F411RE'),
- ('iar', 'MAXWSNENV'),
- ('iar', 'MAX32600MBED'),
- ('iar', 'MOTE_L152RC'),
- ('iar', 'RZ_A1H'),
-
- # ('sw4stm32', 'DISCO_F051R8'),
- # ('sw4stm32', 'DISCO_F100RB'),
- ('sw4stm32', 'DISCO_F303VC'),
- ('sw4stm32', 'DISCO_F334C8'),
- # ('sw4stm32', 'DISCO_F401VC'),
- ('sw4stm32', 'DISCO_F407VG'),
- ('sw4stm32', 'DISCO_F429ZI'),
- ('sw4stm32', 'DISCO_F469NI'),
- ('sw4stm32', 'DISCO_F746NG'),
- ('sw4stm32', 'DISCO_L053C8'),
- ('sw4stm32', 'DISCO_L476VG'),
- ('sw4stm32', 'NUCLEO_F030R8'),
- ('sw4stm32', 'NUCLEO_F031K6'),
- ('sw4stm32', 'NUCLEO_F042K6'),
- ('sw4stm32', 'NUCLEO_F070RB'),
- ('sw4stm32', 'NUCLEO_F072RB'),
- ('sw4stm32', 'NUCLEO_F091RC'),
- ('sw4stm32', 'NUCLEO_F103RB'),
- ('sw4stm32', 'NUCLEO_F302R8'),
- ('sw4stm32', 'NUCLEO_F303K8'),
- ('sw4stm32', 'NUCLEO_F303RE'),
- ('sw4stm32', 'NUCLEO_F334R8'),
- ('sw4stm32', 'NUCLEO_F401RE'),
- ('sw4stm32', 'NUCLEO_F410RB'),
- ('sw4stm32', 'NUCLEO_F411RE'),
- ('sw4stm32', 'NUCLEO_F446RE'),
- ('sw4stm32', 'NUCLEO_L053R8'),
- ('sw4stm32', 'NUCLEO_L073RZ'),
- ('sw4stm32', 'NUCLEO_L031K6'),
- ('sw4stm32', 'NUCLEO_L152RE'),
- ('sw4stm32', 'NUCLEO_L476RG'),
- ('sw4stm32', 'NUCLEO_F031K6'),
- ('sw4stm32', 'NUCLEO_F042K6'),
- ('sw4stm32', 'NUCLEO_F303K8'),
- ('sw4stm32', 'NUCLEO_F410RB'),
-
- ('e2studio', 'RZ_A1H'),
- # Removed following item to avoid script error
- #(None, None),
- ]:
- print '\n=== Exporting to "%s::%s" ===' % (toolchain, target)
- test_export(toolchain, target)
-
- print "\n=== Test error messages ==="
- test_export('lpcxpresso', 'LPC11U24', expected_error='lpcxpresso')
diff --git a/workspace_tools/hooks.py b/workspace_tools/hooks.py
deleted file mode 100644
index d8018dd..0000000
--- a/workspace_tools/hooks.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Configurable hooks in the build system. Can be used by various platforms
-# to customize the build process.
-
-################################################################################
-# Hooks for the various parts of the build process
-
-# Internal mapping of hooks per tool
-_hooks = {}
-
-# Internal mapping of running hooks
-_running_hooks = {}
-
-# Available hook types
-_hook_types = ["binary", "compile", "link", "assemble"]
-
-# Available hook steps
-_hook_steps = ["pre", "replace", "post"]
-
-# Hook the given function. Use this function as a decorator
-def hook_tool(function):
- tool = function.__name__
- tool_flag = "_" + tool + "_done"
- def wrapper(t_self, *args, **kwargs):
- # if a hook for this tool is already running, it's most likely
- # coming from a derived class, so don't hook the super class version
- if _running_hooks.get(tool, False):
- return function(t_self, *args, **kwargs)
- _running_hooks[tool] = True
- # If this tool isn't hooked, return original function
- if not _hooks.has_key(tool):
- res = function(t_self, *args, **kwargs)
- _running_hooks[tool] = False
- return res
- tooldesc = _hooks[tool]
- setattr(t_self, tool_flag, False)
- # If there is a replace hook, execute the replacement instead
- if tooldesc.has_key("replace"):
- res = tooldesc["replace"](t_self, *args, **kwargs)
- # If the replacement has set the "done" flag, exit now
- # Otherwise continue as usual
- if getattr(t_self, tool_flag, False):
- _running_hooks[tool] = False
- return res
- # Execute pre-function before main function if specified
- if tooldesc.has_key("pre"):
- tooldesc["pre"](t_self, *args, **kwargs)
- # Execute the main function now
- res = function(t_self, *args, **kwargs)
- # Execute post-function after main function if specified
- if tooldesc.has_key("post"):
- post_res = tooldesc["post"](t_self, *args, **kwargs)
- _running_hooks[tool] = False
- return post_res or res
- else:
- _running_hooks[tool] = False
- return res
- return wrapper
-
-class Hook:
- def __init__(self, target, toolchain):
- _hooks.clear()
- self._cmdline_hooks = {}
- self.toolchain = toolchain
- target.init_hooks(self, toolchain.__class__.__name__)
-
- # Hook various functions directly
- def _hook_add(self, hook_type, hook_step, function):
- if not hook_type in _hook_types or not hook_step in _hook_steps:
- return False
- if not hook_type in _hooks:
- _hooks[hook_type] = {}
- _hooks[hook_type][hook_step] = function
- return True
-
- def hook_add_compiler(self, hook_step, function):
- return self._hook_add("compile", hook_step, function)
-
- def hook_add_linker(self, hook_step, function):
- return self._hook_add("link", hook_step, function)
-
- def hook_add_assembler(self, hook_step, function):
- return self._hook_add("assemble", hook_step, function)
-
- def hook_add_binary(self, hook_step, function):
- return self._hook_add("binary", hook_step, function)
-
- # Hook command lines
- def _hook_cmdline(self, hook_type, function):
- if not hook_type in _hook_types:
- return False
- self._cmdline_hooks[hook_type] = function
- return True
-
- def hook_cmdline_compiler(self, function):
- return self._hook_cmdline("compile", function)
-
- def hook_cmdline_linker(self, function):
- return self._hook_cmdline("link", function)
-
- def hook_cmdline_assembler(self, function):
- return self._hook_cmdline("assemble", function)
-
- def hook_cmdline_binary(self, function):
- return self._hook_cmdline("binary", function)
-
- # Return the command line after applying the hook
- def _get_cmdline(self, hook_type, cmdline):
- if self._cmdline_hooks.has_key(hook_type):
- cmdline = self._cmdline_hooks[hook_type](self.toolchain.__class__.__name__, cmdline)
- return cmdline
-
- def get_cmdline_compiler(self, cmdline):
- return self._get_cmdline("compile", cmdline)
-
- def get_cmdline_linker(self, cmdline):
- return self._get_cmdline("link", cmdline)
-
- def get_cmdline_assembler(self, cmdline):
- return self._get_cmdline("assemble", cmdline)
-
- def get_cmdline_binary(self, cmdline):
- return self._get_cmdline("binary", cmdline)
-
-################################################################################
-
diff --git a/workspace_tools/host_tests/__init__.py b/workspace_tools/host_tests/__init__.py
deleted file mode 100644
index b365450..0000000
--- a/workspace_tools/host_tests/__init__.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from host_registry import HostRegistry
-
-# Host test supervisors
-from echo import EchoTest
-from rtc_auto import RTCTest
-from stdio_auto import StdioTest
-from hello_auto import HelloTest
-from detect_auto import DetectPlatformTest
-from default_auto import DefaultAuto
-from dev_null_auto import DevNullTest
-from wait_us_auto import WaitusTest
-from tcpecho_server_auto import TCPEchoServerTest
-from udpecho_server_auto import UDPEchoServerTest
-from tcpecho_client_auto import TCPEchoClientTest
-from udpecho_client_auto import UDPEchoClientTest
-from wfi_auto import WFITest
-from serial_nc_rx_auto import SerialNCRXTest
-from serial_nc_tx_auto import SerialNCTXTest
-
-# Populate registry with supervising objects
-HOSTREGISTRY = HostRegistry()
-HOSTREGISTRY.register_host_test("echo", EchoTest())
-HOSTREGISTRY.register_host_test("default", DefaultAuto())
-HOSTREGISTRY.register_host_test("rtc_auto", RTCTest())
-HOSTREGISTRY.register_host_test("hello_auto", HelloTest())
-HOSTREGISTRY.register_host_test("stdio_auto", StdioTest())
-HOSTREGISTRY.register_host_test("detect_auto", DetectPlatformTest())
-HOSTREGISTRY.register_host_test("default_auto", DefaultAuto())
-HOSTREGISTRY.register_host_test("wait_us_auto", WaitusTest())
-HOSTREGISTRY.register_host_test("dev_null_auto", DevNullTest())
-HOSTREGISTRY.register_host_test("tcpecho_server_auto", TCPEchoServerTest())
-HOSTREGISTRY.register_host_test("udpecho_server_auto", UDPEchoServerTest())
-HOSTREGISTRY.register_host_test("tcpecho_client_auto", TCPEchoClientTest())
-HOSTREGISTRY.register_host_test("udpecho_client_auto", UDPEchoClientTest())
-HOSTREGISTRY.register_host_test("wfi_auto", WFITest())
-HOSTREGISTRY.register_host_test("serial_nc_rx_auto", SerialNCRXTest())
-HOSTREGISTRY.register_host_test("serial_nc_tx_auto", SerialNCTXTest())
-
-###############################################################################
-# Functional interface for test supervisor registry
-###############################################################################
-
-
-def get_host_test(ht_name):
- return HOSTREGISTRY.get_host_test(ht_name)
-
-def is_host_test(ht_name):
- return HOSTREGISTRY.is_host_test(ht_name)
diff --git a/workspace_tools/host_tests/default_auto.py b/workspace_tools/host_tests/default_auto.py
deleted file mode 100644
index 0883d79..0000000
--- a/workspace_tools/host_tests/default_auto.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from sys import stdout
-
-class DefaultAuto():
- """ Simple, basic host test's test runner waiting for serial port
- output from MUT, no supervision over test running in MUT is executed.
- """
- def test(self, selftest):
- result = selftest.RESULT_SUCCESS
- try:
- while True:
- c = selftest.mbed.serial_read(512)
- if c is None:
- return selftest.RESULT_IO_SERIAL
- stdout.write(c)
- stdout.flush()
- except KeyboardInterrupt, _:
- selftest.notify("\r\n[CTRL+C] exit")
- result = selftest.RESULT_ERROR
- return result
diff --git a/workspace_tools/host_tests/detect_auto.py b/workspace_tools/host_tests/detect_auto.py
deleted file mode 100644
index 2999946..0000000
--- a/workspace_tools/host_tests/detect_auto.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-
-class DetectPlatformTest():
- PATTERN_MICRO_NAME = "Target '(\w+)'"
- re_detect_micro_name = re.compile(PATTERN_MICRO_NAME)
-
- def test(self, selftest):
- result = True
-
- c = selftest.mbed.serial_readline() # {{start}} preamble
- if c is None:
- return selftest.RESULT_IO_SERIAL
-
- selftest.notify(c.strip())
- selftest.notify("HOST: Detecting target name...")
-
- c = selftest.mbed.serial_readline()
- if c is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify(c.strip())
-
- # Check for target name
- m = self.re_detect_micro_name.search(c)
- if m and len(m.groups()):
- micro_name = m.groups()[0]
- micro_cmp = selftest.mbed.options.micro == micro_name
- result = result and micro_cmp
- selftest.notify("HOST: MUT Target name '%s', expected '%s'... [%s]"% (micro_name,
- selftest.mbed.options.micro,
- "OK" if micro_cmp else "FAIL"))
-
- for i in range(0, 2):
- c = selftest.mbed.serial_readline()
- if c is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify(c.strip())
-
- return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/dev_null_auto.py b/workspace_tools/host_tests/dev_null_auto.py
deleted file mode 100644
index 4538f6d..0000000
--- a/workspace_tools/host_tests/dev_null_auto.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-class DevNullTest():
-
- def check_readline(self, selftest, text):
- """ Reads line from serial port and checks if text was part of read string
- """
- result = False
- c = selftest.mbed.serial_readline()
- if c and text in c:
- result = True
- return result
-
- def test(self, selftest):
- result = True
- # Test should print some text and later stop printing
- # 'MBED: re-routing stdout to /null'
- res = self.check_readline(selftest, "re-routing stdout to /null")
- if not res:
- # We haven't read preamble line
- result = False
- else:
- # Check if there are printed characters
- str = ''
- for i in range(3):
- c = selftest.mbed.serial_read(32)
- if c is None:
- return selftest.RESULT_IO_SERIAL
- else:
- str += c
- if len(str) > 0:
- result = False
- break
- selftest.notify("Received %d bytes: %s"% (len(str), str))
- return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/echo.py b/workspace_tools/host_tests/echo.py
deleted file mode 100644
index 75e534f..0000000
--- a/workspace_tools/host_tests/echo.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import uuid
-from sys import stdout
-
-class EchoTest():
-
- # Test parameters
- TEST_SERIAL_BAUDRATE = 115200
- TEST_LOOP_COUNT = 50
-
- def test(self, selftest):
- """ This host test will use mbed serial port with
- baudrate 115200 to perform echo test on that port.
- """
- # Custom initialization for echo test
- selftest.mbed.init_serial_params(serial_baud=self.TEST_SERIAL_BAUDRATE)
- selftest.mbed.init_serial()
-
- # Test function, return True or False to get standard test notification on stdout
- selftest.mbed.flush()
- selftest.notify("HOST: Starting the ECHO test")
- result = True
-
- """ This ensures that there are no parasites left in the serial buffer.
- """
- for i in range(0, 2):
- selftest.mbed.serial_write("\n")
- c = selftest.mbed.serial_readline()
-
- for i in range(0, self.TEST_LOOP_COUNT):
- TEST_STRING = str(uuid.uuid4()) + "\n"
- selftest.mbed.serial_write(TEST_STRING)
- c = selftest.mbed.serial_readline()
- if c is None:
- return selftest.RESULT_IO_SERIAL
- if c.strip() != TEST_STRING.strip():
- selftest.notify('HOST: "%s" != "%s"'% (c, TEST_STRING))
- result = False
- else:
- sys.stdout.write('.')
- stdout.flush()
- return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/echo_flow_control.py b/workspace_tools/host_tests/echo_flow_control.py
deleted file mode 100644
index 7ea11e9..0000000
--- a/workspace_tools/host_tests/echo_flow_control.py
+++ /dev/null
@@ -1,48 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from host_test import Test
-
-
-class EchoTest(Test):
- def __init__(self):
- Test.__init__(self)
- self.mbed.init_serial()
- self.mbed.extra_serial.rtscts = True
- self.mbed.reset()
-
- def test(self):
- self.mbed.flush()
- self.notify("Starting the ECHO test")
- TEST="longer serial test"
- check = True
- for i in range(1, 100):
- self.mbed.extra_serial.write(TEST + "\n")
- l = self.mbed.extra_serial.readline().strip()
- if not l: continue
-
- if l != TEST:
- check = False
- self.notify('"%s" != "%s"' % (l, TEST))
- else:
- if (i % 10) == 0:
- self.notify('.')
-
- return check
-
-
-if __name__ == '__main__':
- EchoTest().run()
diff --git a/workspace_tools/host_tests/example/BroadcastReceive.py b/workspace_tools/host_tests/example/BroadcastReceive.py
deleted file mode 100644
index 2e846ca..0000000
--- a/workspace_tools/host_tests/example/BroadcastReceive.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-
-BROADCAST_PORT = 58083
-
-s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-s.bind(('0.0.0.0', BROADCAST_PORT))
-
-while True:
- print s.recvfrom(256)
diff --git a/workspace_tools/host_tests/example/BroadcastSend.py b/workspace_tools/host_tests/example/BroadcastSend.py
deleted file mode 100644
index 0a5f8c3..0000000
--- a/workspace_tools/host_tests/example/BroadcastSend.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-from time import sleep, time
-
-BROADCAST_PORT = 58083
-
-s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-s.bind(('', 0))
-s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
-
-while True:
- print "Broadcasting..."
- data = 'Hello World: ' + repr(time()) + '\n'
- s.sendto(data, ('', BROADCAST_PORT))
- sleep(1)
diff --git a/workspace_tools/host_tests/example/MulticastReceive.py b/workspace_tools/host_tests/example/MulticastReceive.py
deleted file mode 100644
index 9001f40..0000000
--- a/workspace_tools/host_tests/example/MulticastReceive.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-import struct
-
-MCAST_GRP = '224.1.1.1'
-MCAST_PORT = 5007
-
-sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
-sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-sock.bind(('', MCAST_PORT))
-mreq = struct.pack("4sl", socket.inet_aton(MCAST_GRP), socket.INADDR_ANY)
-
-sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
-
-while True:
- print sock.recv(10240)
diff --git a/workspace_tools/host_tests/example/MulticastSend.py b/workspace_tools/host_tests/example/MulticastSend.py
deleted file mode 100644
index 8efd453..0000000
--- a/workspace_tools/host_tests/example/MulticastSend.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-from time import sleep, time
-
-MCAST_GRP = '224.1.1.1'
-MCAST_PORT = 5007
-
-sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
-sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
-
-while True:
- print "Multicast to group: %s\n" % MCAST_GRP
- data = 'Hello World: ' + repr(time()) + '\n'
- sock.sendto(data, (MCAST_GRP, MCAST_PORT))
- sleep(1)
diff --git a/workspace_tools/host_tests/example/TCPEchoClient.py b/workspace_tools/host_tests/example/TCPEchoClient.py
deleted file mode 100644
index dfa9bfd..0000000
--- a/workspace_tools/host_tests/example/TCPEchoClient.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-
-ECHO_SERVER_ADDRESS = "10.2.202.45"
-ECHO_PORT = 7
-
-s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-s.connect((ECHO_SERVER_ADDRESS, ECHO_PORT))
-
-s.sendall('Hello, world')
-data = s.recv(1024)
-s.close()
-print 'Received', repr(data)
diff --git a/workspace_tools/host_tests/example/TCPEchoServer.py b/workspace_tools/host_tests/example/TCPEchoServer.py
deleted file mode 100644
index 1324edb..0000000
--- a/workspace_tools/host_tests/example/TCPEchoServer.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-
-s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-s.bind(('', 7))
-s.listen(1)
-
-while True:
- conn, addr = s.accept()
- print 'Connected by', addr
- while True:
- data = conn.recv(1024)
- if not data: break
- conn.sendall(data)
- conn.close()
diff --git a/workspace_tools/host_tests/example/UDPEchoClient.py b/workspace_tools/host_tests/example/UDPEchoClient.py
deleted file mode 100644
index 6a6cf8c..0000000
--- a/workspace_tools/host_tests/example/UDPEchoClient.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-
-ECHO_SERVER_ADDRESS = '10.2.202.45'
-ECHO_PORT = 7
-
-sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-
-sock.sendto("Hello World\n", (ECHO_SERVER_ADDRESS, ECHO_PORT))
-response = sock.recv(256)
-sock.close()
-
-print response
diff --git a/workspace_tools/host_tests/example/UDPEchoServer.py b/workspace_tools/host_tests/example/UDPEchoServer.py
deleted file mode 100644
index 3850348..0000000
--- a/workspace_tools/host_tests/example/UDPEchoServer.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-
-ECHO_PORT = 7
-
-sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-sock.bind(('', ECHO_PORT))
-
-while True:
- data, address = sock.recvfrom(256)
- print "datagram from", address
- sock.sendto(data, address)
diff --git a/workspace_tools/host_tests/example/__init__.py b/workspace_tools/host_tests/example/__init__.py
deleted file mode 100644
index 10e7e1d..0000000
--- a/workspace_tools/host_tests/example/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
\ No newline at end of file
diff --git a/workspace_tools/host_tests/hello_auto.py b/workspace_tools/host_tests/hello_auto.py
deleted file mode 100644
index 69b39bf..0000000
--- a/workspace_tools/host_tests/hello_auto.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-class HelloTest():
- HELLO_WORLD = "Hello World"
-
- def test(self, selftest):
- c = selftest.mbed.serial_readline()
- if c is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify("Read %d bytes:"% len(c))
- selftest.notify(c.strip())
-
- result = True
- # Because we can have targetID here let's try to decode
- if len(c) < len(self.HELLO_WORLD):
- result = False
- else:
- result = self.HELLO_WORLD in c
- return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/host_registry.py b/workspace_tools/host_tests/host_registry.py
deleted file mode 100644
index d523848..0000000
--- a/workspace_tools/host_tests/host_registry.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-class HostRegistry:
- """ Class stores registry with host tests and objects representing them
- """
- HOST_TESTS = {} # host_test_name -> host_test_ojbect
-
- def register_host_test(self, ht_name, ht_object):
- if ht_name not in self.HOST_TESTS:
- self.HOST_TESTS[ht_name] = ht_object
-
- def unregister_host_test(self):
- if ht_name in HOST_TESTS:
- self.HOST_TESTS[ht_name] = None
-
- def get_host_test(self, ht_name):
- return self.HOST_TESTS[ht_name] if ht_name in self.HOST_TESTS else None
-
- def is_host_test(self, ht_name):
- return ht_name in self.HOST_TESTS
-
\ No newline at end of file
diff --git a/workspace_tools/host_tests/host_test.py b/workspace_tools/host_tests/host_test.py
deleted file mode 100644
index 103df83..0000000
--- a/workspace_tools/host_tests/host_test.py
+++ /dev/null
@@ -1,426 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-# Check if 'serial' module is installed
-try:
- from serial import Serial
-except ImportError, e:
- print "Error: Can't import 'serial' module: %s"% e
- exit(-1)
-
-import os
-import re
-import types
-from sys import stdout
-from time import sleep, time
-from optparse import OptionParser
-
-import host_tests_plugins
-
-# This is a little tricky. We need to add upper directory to path so
-# we can find packages we want from the same level as other files do
-import sys
-sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
-from workspace_tools.test_api import get_autodetected_MUTS_list
-from workspace_tools.test_api import get_module_avail
-
-
-class Mbed:
- """ Base class for a host driven test
- """
- def __init__(self):
- parser = OptionParser()
-
- parser.add_option("-m", "--micro",
- dest="micro",
- help="The target microcontroller",
- metavar="MICRO")
-
- parser.add_option("-p", "--port",
- dest="port",
- help="The serial port of the target mbed",
- metavar="PORT")
-
- parser.add_option("-d", "--disk",
- dest="disk",
- help="The target disk path",
- metavar="DISK_PATH")
-
- parser.add_option("-f", "--image-path",
- dest="image_path",
- help="Path with target's image",
- metavar="IMAGE_PATH")
-
- parser.add_option("-c", "--copy",
- dest="copy_method",
- help="Copy method selector",
- metavar="COPY_METHOD")
-
- parser.add_option("-C", "--program_cycle_s",
- dest="program_cycle_s",
- help="Program cycle sleep. Define how many seconds you want wait after copying bianry onto target",
- type="float",
- metavar="COPY_METHOD")
-
- parser.add_option("-t", "--timeout",
- dest="timeout",
- help="Timeout",
- metavar="TIMEOUT")
-
- parser.add_option("-r", "--reset",
- dest="forced_reset_type",
- help="Forces different type of reset")
-
- parser.add_option("-R", "--reset-timeout",
- dest="forced_reset_timeout",
- metavar="NUMBER",
- type="int",
- help="When forcing a reset using option -r you can set up after reset timeout in seconds")
-
- parser.add_option('', '--auto',
- dest='auto_detect',
- metavar=False,
- action="store_true",
- help='Use mbed-ls module to detect all connected mbed devices')
-
- (self.options, _) = parser.parse_args()
-
- self.DEFAULT_RESET_TOUT = 0
- self.DEFAULT_TOUT = 10
-
- if self.options.port is None:
- raise Exception("The serial port of the target mbed have to be provided as command line arguments")
-
- # Options related to copy / reset mbed device
- self.port = self.options.port
- self.disk = self.options.disk
- self.image_path = self.options.image_path.strip('"')
- self.copy_method = self.options.copy_method
- self.program_cycle_s = float(self.options.program_cycle_s)
-
- self.serial = None
- self.serial_baud = 9600
- self.serial_timeout = 1
-
- self.timeout = self.DEFAULT_TOUT if self.options.timeout is None else self.options.timeout
- print 'MBED: Instrumentation: "%s" and disk: "%s"' % (self.port, self.disk)
-
- def init_serial_params(self, serial_baud=9600, serial_timeout=1):
- """ Initialize port parameters.
- This parameters will be used by self.init_serial() function to open serial port
- """
- self.serial_baud = serial_baud
- self.serial_timeout = serial_timeout
-
- def init_serial(self, serial_baud=None, serial_timeout=None):
- """ Initialize serial port.
- Function will return error is port can't be opened or initialized
- """
- # Overload serial port configuration from default to parameters' values if they are specified
- serial_baud = serial_baud if serial_baud is not None else self.serial_baud
- serial_timeout = serial_timeout if serial_timeout is not None else self.serial_timeout
-
- if get_module_avail('mbed_lstools') and self.options.auto_detect:
- # Ensure serial port is up-to-date (try to find it 60 times)
- found = False
-
- for i in range(0, 60):
- print('Looking for %s with MBEDLS' % self.options.micro)
- muts_list = get_autodetected_MUTS_list(platform_name_filter=[self.options.micro])
-
- if 1 in muts_list:
- mut = muts_list[1]
- self.port = mut['port']
- found = True
- break
- else:
- sleep(3)
-
- if not found:
- return False
-
- # Clear serial port
- if self.serial:
- self.serial.close()
- self.serial = None
-
- # We will pool for serial to be re-mounted if it was unmounted after device reset
- result = self.pool_for_serial_init(serial_baud, serial_timeout) # Blocking
-
- # Port can be opened
- if result:
- self.flush()
- return result
-
- def pool_for_serial_init(self, serial_baud, serial_timeout, pooling_loops=40, init_delay=0.5, loop_delay=0.25):
- """ Functions pools for serial port readiness
- """
- result = True
- last_error = None
- # This loop is used to check for serial port availability due to
- # some delays and remounting when devices are being flashed with new software.
- for i in range(pooling_loops):
- sleep(loop_delay if i else init_delay)
- try:
- self.serial = Serial(self.port, baudrate=serial_baud, timeout=serial_timeout)
- except Exception as e:
- result = False
- last_error = "MBED: %s"% str(e)
- stdout.write('.')
- stdout.flush()
- else:
- print "...port ready!"
- result = True
- break
- if not result and last_error:
- print last_error
- return result
-
- def set_serial_timeout(self, timeout):
- """ Wraps self.mbed.serial object timeout property
- """
- result = None
- if self.serial:
- self.serial.timeout = timeout
- result = True
- return result
-
- def serial_read(self, count=1):
- """ Wraps self.mbed.serial object read method
- """
- result = None
- if self.serial:
- try:
- result = self.serial.read(count)
- except:
- result = None
- return result
-
- def serial_readline(self, timeout=5):
- """ Wraps self.mbed.serial object read method to read one line from serial port
- """
- result = ''
- start = time()
- while (time() - start) < timeout:
- if self.serial:
- try:
- c = self.serial.read(1)
- result += c
- except Exception as e:
- print "MBED: %s"% str(e)
- result = None
- break
- if c == '\n':
- break
- return result
-
- def serial_write(self, write_buffer):
- """ Wraps self.mbed.serial object write method
- """
- result = None
- if self.serial:
- try:
- result = self.serial.write(write_buffer)
- except:
- result = None
- return result
-
- def reset_timeout(self, timeout):
- """ Timeout executed just after reset command is issued
- """
- for n in range(0, timeout):
- sleep(1)
-
- def reset(self):
- """ Calls proper reset plugin to do the job.
- Please refer to host_test_plugins functionality
- """
- # Flush serials to get only input after reset
- self.flush()
- if self.options.forced_reset_type:
- result = host_tests_plugins.call_plugin('ResetMethod', self.options.forced_reset_type, disk=self.disk)
- else:
- result = host_tests_plugins.call_plugin('ResetMethod', 'default', serial=self.serial)
- # Give time to wait for the image loading
- reset_tout_s = self.options.forced_reset_timeout if self.options.forced_reset_timeout is not None else self.DEFAULT_RESET_TOUT
- self.reset_timeout(reset_tout_s)
- return result
-
- def copy_image(self, image_path=None, disk=None, copy_method=None):
- """ Closure for copy_image_raw() method.
- Method which is actually copying image to mbed
- """
- # Set closure environment
- image_path = image_path if image_path is not None else self.image_path
- disk = disk if disk is not None else self.disk
- copy_method = copy_method if copy_method is not None else self.copy_method
- # Call proper copy method
- result = self.copy_image_raw(image_path, disk, copy_method)
- return result
-
- def copy_image_raw(self, image_path=None, disk=None, copy_method=None):
- """ Copy file depending on method you want to use. Handles exception
- and return code from shell copy commands.
- """
- # image_path - Where is binary with target's firmware
- if copy_method is not None:
- # We override 'default' method with 'shell' method
- if copy_method == 'default':
- copy_method = 'shell'
- else:
- copy_method = 'shell'
-
- result = host_tests_plugins.call_plugin('CopyMethod', copy_method, image_path=image_path, destination_disk=disk, program_cycle_s=self.program_cycle_s, target_mcu=self.options.micro)
- return result;
-
- def flush(self):
- """ Flush serial ports
- """
- result = False
- if self.serial:
- self.serial.flushInput()
- self.serial.flushOutput()
- result = True
- return result
-
-
-class HostTestResults:
- """ Test results set by host tests
- """
- def __init__(self):
- self.RESULT_SUCCESS = 'success'
- self.RESULT_FAILURE = 'failure'
- self.RESULT_ERROR = 'error'
- self.RESULT_IO_SERIAL = 'ioerr_serial'
- self.RESULT_NO_IMAGE = 'no_image'
- self.RESULT_IOERR_COPY = "ioerr_copy"
- self.RESULT_PASSIVE = "passive"
- self.RESULT_NOT_DETECTED = "not_detected"
- self.RESULT_MBED_ASSERT = "mbed_assert"
-
-
-import workspace_tools.host_tests as host_tests
-
-
-class Test(HostTestResults):
- """ Base class for host test's test runner
- """
- # Select default host_test supervision (replaced after autodetection)
- test_supervisor = host_tests.get_host_test("default")
-
- def __init__(self):
- self.mbed = Mbed()
-
- def detect_test_config(self, verbose=False):
- """ Detects test case configuration
- """
- result = {}
- while True:
- line = self.mbed.serial_readline()
- if "{start}" in line:
- self.notify("HOST: Start test...")
- break
- else:
- # Detect if this is property from TEST_ENV print
- m = re.search('{([\w_]+);([\w\d\+ ]+)}}', line[:-1])
- if m and len(m.groups()) == 2:
- # This is most likely auto-detection property
- result[m.group(1)] = m.group(2)
- if verbose:
- self.notify("HOST: Property '%s' = '%s'"% (m.group(1), m.group(2)))
- else:
- # We can check if this is TArget Id in mbed specific format
- m2 = re.search('^([\$]+)([a-fA-F0-9]+)', line[:-1])
- if m2 and len(m2.groups()) == 2:
- if verbose:
- target_id = m2.group(1) + m2.group(2)
- self.notify("HOST: TargetID '%s'"% target_id)
- self.notify(line[len(target_id):-1])
- else:
- self.notify("HOST: Unknown property: %s"% line.strip())
- return result
-
- def run(self):
- """ Test runner for host test. This function will start executing
- test and forward test result via serial port to test suite
- """
- # Copy image to device
- self.notify("HOST: Copy image onto target...")
- result = self.mbed.copy_image()
- if not result:
- self.print_result(self.RESULT_IOERR_COPY)
-
- # Initialize and open target's serial port (console)
- self.notify("HOST: Initialize serial port...")
- result = self.mbed.init_serial()
- if not result:
- self.print_result(self.RESULT_IO_SERIAL)
-
- # Reset device
- self.notify("HOST: Reset target...")
- result = self.mbed.reset()
- if not result:
- self.print_result(self.RESULT_IO_SERIAL)
-
- # Run test
- try:
- CONFIG = self.detect_test_config(verbose=True) # print CONFIG
-
- if "host_test_name" in CONFIG:
- if host_tests.is_host_test(CONFIG["host_test_name"]):
- self.test_supervisor = host_tests.get_host_test(CONFIG["host_test_name"])
- result = self.test_supervisor.test(self) #result = self.test()
-
- if result is not None:
- self.print_result(result)
- else:
- self.notify("HOST: Passive mode...")
- except Exception, e:
- print str(e)
- self.print_result(self.RESULT_ERROR)
-
- def setup(self):
- """ Setup and check if configuration for test is
- correct. E.g. if serial port can be opened.
- """
- result = True
- if not self.mbed.serial:
- result = False
- self.print_result(self.RESULT_IO_SERIAL)
- return result
-
- def notify(self, message):
- """ On screen notification function
- """
- print message
- stdout.flush()
-
- def print_result(self, result):
- """ Test result unified printing function
- """
- self.notify("\r\n{{%s}}\r\n{{end}}" % result)
-
-
-class DefaultTestSelector(Test):
- """ Test class with serial port initialization
- """
- def __init__(self):
- HostTestResults.__init__(self)
- Test.__init__(self)
-
-if __name__ == '__main__':
- DefaultTestSelector().run()
diff --git a/workspace_tools/host_tests/host_tests_plugins/__init__.py b/workspace_tools/host_tests/host_tests_plugins/__init__.py
deleted file mode 100644
index c05241a..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/__init__.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import host_test_registry
-
-# This plugins provide 'flashing' methods to host test scripts
-import module_copy_mbed
-import module_copy_shell
-import module_copy_silabs
-
-try:
- import module_copy_smart
-except:
- pass
-
-#import module_copy_firefox
-import module_copy_mps2
-
-# Plugins used to reset certain platform
-import module_reset_mbed
-import module_reset_silabs
-import module_reset_mps2
-
-
-# Plugin registry instance
-HOST_TEST_PLUGIN_REGISTRY = host_test_registry.HostTestRegistry()
-
-# Static plugin registration
-# Some plugins are commented out if they are not stable or not commonly used
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_mbed.load_plugin())
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_shell.load_plugin())
-
-try:
- HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_smart.load_plugin())
-except:
- pass
-
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_mbed.load_plugin())
-#HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_firefox.load_plugin())
-
-# Extra platforms support
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_mps2.load_plugin())
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_mps2.load_plugin())
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_silabs.load_plugin())
-HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_silabs.load_plugin())
-
-# TODO: extend plugin loading to files with name module_*.py loaded ad-hoc
-
-###############################################################################
-# Functional interface for host test plugin registry
-###############################################################################
-def call_plugin(type, capability, *args, **kwargs):
- """ Interface to call plugin registry functional way
- """
- return HOST_TEST_PLUGIN_REGISTRY.call_plugin(type, capability, *args, **kwargs)
-
-def get_plugin_caps(type):
- """ Returns list of all capabilities for plugin family with the same type.
- If there are no capabilities empty list is returned
- """
- return HOST_TEST_PLUGIN_REGISTRY.get_plugin_caps(type)
-
-def print_plugin_info():
- """ Prints plugins' information in user friendly way
- """
- print HOST_TEST_PLUGIN_REGISTRY
diff --git a/workspace_tools/host_tests/host_tests_plugins/host_test_plugins.py b/workspace_tools/host_tests/host_tests_plugins/host_test_plugins.py
deleted file mode 100644
index ee60950..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/host_test_plugins.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from os import access, F_OK
-from sys import stdout
-from time import sleep
-from subprocess import call
-
-
-class HostTestPluginBase:
- """ Base class for all plug-ins used with host tests.
- """
- ###########################################################################
- # Interface:
- ###########################################################################
-
- ###########################################################################
- # Interface attributes defining plugin name, type etc.
- ###########################################################################
- name = "HostTestPluginBase" # Plugin name, can be plugin class name
- type = "BasePlugin" # Plugin type: ResetMethod, Copymethod etc.
- capabilities = [] # Capabilities names: what plugin can achieve
- # (e.g. reset using some external command line tool)
- stable = False # Determine if plugin is stable and can be used
-
- ###########################################################################
- # Interface methods
- ###########################################################################
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- return False
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability e.g. may directly just call some command line
- program or execute building pythonic function
- """
- return False
-
- ###########################################################################
- # Interface helper methods - overload only if you need to have custom behaviour
- ###########################################################################
- def print_plugin_error(self, text):
- """ Function prints error in console and exits always with False
- """
- print "Plugin error: %s::%s: %s"% (self.name, self.type, text)
- return False
-
- def print_plugin_info(self, text, NL=True):
- """ Function prints notification in console and exits always with True
- """
- if NL:
- print "Plugin info: %s::%s: %s"% (self.name, self.type, text)
- else:
- print "Plugin info: %s::%s: %s"% (self.name, self.type, text),
- return True
-
- def print_plugin_char(self, char):
- """ Function prints char on stdout
- """
- stdout.write(char)
- stdout.flush()
- return True
-
- def check_mount_point_ready(self, destination_disk, init_delay=0.2, loop_delay=0.25):
- """ Checks if destination_disk is ready and can be accessed by e.g. copy commands
- @init_delay - Initial delay time before first access check
- @loop_delay - pooling delay for access check
- """
- if not access(destination_disk, F_OK):
- self.print_plugin_info("Waiting for mount point '%s' to be ready..."% destination_disk, NL=False)
- sleep(init_delay)
- while not access(destination_disk, F_OK):
- sleep(loop_delay)
- self.print_plugin_char('.')
-
- def check_parameters(self, capabilitity, *args, **kwargs):
- """ This function should be ran each time we call execute()
- to check if none of the required parameters is missing.
- """
- missing_parameters = []
- for parameter in self.required_parameters:
- if parameter not in kwargs:
- missing_parameters.append(parameter)
- if len(missing_parameters) > 0:
- self.print_plugin_error("execute parameter(s) '%s' missing!"% (', '.join(parameter)))
- return False
- return True
-
- def run_command(self, cmd, shell=True):
- """ Runs command from command line.
- """
- result = True
- ret = 0
- try:
- ret = call(cmd, shell=shell)
- if ret:
- self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd))
- return False
- except Exception as e:
- result = False
- self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd))
- self.print_plugin_error(str(e))
- return result
diff --git a/workspace_tools/host_tests/host_tests_plugins/host_test_registry.py b/workspace_tools/host_tests/host_tests_plugins/host_test_registry.py
deleted file mode 100644
index 5237b9a..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/host_test_registry.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-class HostTestRegistry:
- """ Simple class used to register and store
- host test plugins for further usage
- """
- # Here we actually store all the plugins
- PLUGINS = {} # 'Plugin Name' : Plugin Object
-
- def print_error(self, text):
- print "Plugin load failed. Reason: %s"% text
-
- def register_plugin(self, plugin):
- """ Registers and stores plugin inside registry for further use.
- Method also calls plugin's setup() function to configure plugin if needed.
-
- Note: Different groups of plugins may demand different extra parameter. Plugins
- should be at least for one type of plugin configured with the same parameters
- because we do not know which of them will actually use particular parameter.
- """
- # TODO:
- # - check for unique caps for specified type
- if plugin.name not in self.PLUGINS:
- if plugin.setup(): # Setup plugin can be completed without errors
- self.PLUGINS[plugin.name] = plugin
- return True
- else:
- self.print_error("%s setup failed"% plugin.name)
- else:
- self.print_error("%s already loaded"% plugin.name)
- return False
-
- def call_plugin(self, type, capability, *args, **kwargs):
- """ Execute plugin functionality respectively to its purpose
- """
- for plugin_name in self.PLUGINS:
- plugin = self.PLUGINS[plugin_name]
- if plugin.type == type and capability in plugin.capabilities:
- return plugin.execute(capability, *args, **kwargs)
- return False
-
- def get_plugin_caps(self, type):
- """ Returns list of all capabilities for plugin family with the same type.
- If there are no capabilities empty list is returned
- """
- result = []
- for plugin_name in self.PLUGINS:
- plugin = self.PLUGINS[plugin_name]
- if plugin.type == type:
- result.extend(plugin.capabilities)
- return sorted(result)
-
- def load_plugin(self, name):
- """ Used to load module from
- """
- mod = __import__("module_%s"% name)
- return mod
-
- def __str__(self):
- """ User friendly printing method to show hooked plugins
- """
- from prettytable import PrettyTable
- column_names = ['name', 'type', 'capabilities', 'stable']
- pt = PrettyTable(column_names)
- for column in column_names:
- pt.align[column] = 'l'
- for plugin_name in sorted(self.PLUGINS.keys()):
- name = self.PLUGINS[plugin_name].name
- type = self.PLUGINS[plugin_name].type
- stable = self.PLUGINS[plugin_name].stable
- capabilities = ', '.join(self.PLUGINS[plugin_name].capabilities)
- row = [name, type, capabilities, stable]
- pt.add_row(row)
- return pt.get_string()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_firefox.py b/workspace_tools/host_tests/host_tests_plugins/module_copy_firefox.py
deleted file mode 100644
index 360835e..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_copy_firefox.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from os.path import join, basename
-from host_test_plugins import HostTestPluginBase
-
-
-class HostTestPluginCopyMethod_Firefox(HostTestPluginBase):
-
- def file_store_firefox(self, file_path, dest_disk):
- try:
- from selenium import webdriver
- profile = webdriver.FirefoxProfile()
- profile.set_preference('browser.download.folderList', 2) # custom location
- profile.set_preference('browser.download.manager.showWhenStarting', False)
- profile.set_preference('browser.download.dir', dest_disk)
- profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'application/octet-stream')
- # Launch browser with profile and get file
- browser = webdriver.Firefox(profile)
- browser.get(file_path)
- browser.close()
- except:
- return False
- return True
-
- # Plugin interface
- name = 'HostTestPluginCopyMethod_Firefox'
- type = 'CopyMethod'
- capabilities = ['firefox']
- required_parameters = ['image_path', 'destination_disk']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- try:
- from selenium import webdriver
- except ImportError, e:
- self.print_plugin_error("Error: firefox copy method requires selenium library. %s"% e)
- return False
- return True
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capabilitity, *args, **kwargs) is True:
- image_path = kwargs['image_path']
- destination_disk = kwargs['destination_disk']
- # Prepare correct command line parameter values
- image_base_name = basename(image_path)
- destination_path = join(destination_disk, image_base_name)
- if capabilitity == 'firefox':
- self.file_store_firefox(image_path, destination_path)
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginCopyMethod_Firefox()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_mbed.py b/workspace_tools/host_tests/host_tests_plugins/module_copy_mbed.py
deleted file mode 100644
index 913ff3c..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_copy_mbed.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from shutil import copy
-from host_test_plugins import HostTestPluginBase
-from time import sleep
-
-
-class HostTestPluginCopyMethod_Mbed(HostTestPluginBase):
-
- def generic_mbed_copy(self, image_path, destination_disk):
- """ Generic mbed copy method for "mbed enabled" devices.
- It uses standard python shuitl function to copy
- image_file (target specific binary) to device's disk.
- """
- result = True
- if not destination_disk.endswith('/') and not destination_disk.endswith('\\'):
- destination_disk += '/'
- try:
- copy(image_path, destination_disk)
- except Exception, e:
- self.print_plugin_error("shutil.copy('%s', '%s')"% (image_path, destination_disk))
- self.print_plugin_error("Error: %s"% str(e))
- result = False
- return result
-
- # Plugin interface
- name = 'HostTestPluginCopyMethod_Mbed'
- type = 'CopyMethod'
- stable = True
- capabilities = ['shutil', 'default']
- required_parameters = ['image_path', 'destination_disk', 'program_cycle_s']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- return True
-
- def execute(self, capability, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capability, *args, **kwargs) is True:
- # Capability 'default' is a dummy capability
- if capability == 'shutil':
- image_path = kwargs['image_path']
- destination_disk = kwargs['destination_disk']
- program_cycle_s = kwargs['program_cycle_s']
- # Wait for mount point to be ready
- self.check_mount_point_ready(destination_disk) # Blocking
- result = self.generic_mbed_copy(image_path, destination_disk)
-
- # Allow mbed to cycle
- sleep(program_cycle_s)
-
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginCopyMethod_Mbed()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_mps2.py b/workspace_tools/host_tests/host_tests_plugins/module_copy_mps2.py
deleted file mode 100644
index bcfe1d7..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_copy_mps2.py
+++ /dev/null
@@ -1,150 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-import os, shutil
-from os.path import join
-from host_test_plugins import HostTestPluginBase
-from time import sleep
-
-
-class HostTestPluginCopyMethod_MPS2(HostTestPluginBase):
-
- # MPS2 specific flashing / binary setup funcitons
- def mps2_set_board_image_file(self, disk, images_cfg_path, image0file_path, image_name='images.txt'):
- """ This function will alter image cfg file.
- Main goal of this function is to change number of images to 1, comment all
- existing image entries and append at the end of file new entry with test path.
- @return True when all steps succeed.
- """
- MBED_SDK_TEST_STAMP = 'test suite entry'
- image_path = join(disk, images_cfg_path, image_name)
- new_file_lines = [] # New configuration file lines (entries)
-
- # Check each line of the image configuration file
- try:
- with open(image_path, 'r') as file:
- for line in file:
- if re.search('^TOTALIMAGES', line):
- # Check number of total images, should be 1
- new_file_lines.append(re.sub('^TOTALIMAGES:[\t ]*[\d]+', 'TOTALIMAGES: 1', line))
- elif re.search('; - %s[\n\r]*$'% MBED_SDK_TEST_STAMP, line):
- # Look for test suite entries and remove them
- pass # Omit all test suite entries
- elif re.search('^IMAGE[\d]+FILE', line):
- # Check all image entries and mark the ';'
- new_file_lines.append(';' + line) # Comment non test suite lines
- else:
- # Append line to new file
- new_file_lines.append(line)
- except IOError as e:
- return False
-
- # Add new image entry with proper commented stamp
- new_file_lines.append('IMAGE0FILE: %s ; - %s\r\n'% (image0file_path, MBED_SDK_TEST_STAMP))
-
- # Write all lines to file
- try:
- with open(image_path, 'w') as file:
- for line in new_file_lines:
- file.write(line),
- except IOError:
- return False
-
- return True
-
- def mps2_select_core(self, disk, mobo_config_name=""):
- """ Function selects actual core
- """
- # TODO: implement core selection
- pass
-
- def mps2_switch_usb_auto_mounting_after_restart(self, disk, usb_config_name=""):
- """ Function alters configuration to allow USB MSD to be mounted after restarts
- """
- # TODO: implement USB MSD restart detection
- pass
-
- def copy_file(self, file, disk):
- if not file:
- return
-
- _, ext = os.path.splitext(file)
- ext = ext.lower()
- dfile = disk + "/SOFTWARE/mbed" + ext
-
- if os.path.isfile(dfile):
- print('Remove old binary %s' % dfile)
- os.remove(dfile)
-
- shutil.copy(file, dfile)
- return True
-
- def touch_file(self, file):
- """ Touch file and set timestamp to items
- """
- tfile = file+'.tmp'
- fhandle = open(tfile, 'a')
- try:
- fhandle.close()
- finally:
- os.rename(tfile, file)
- return True
-
- # Plugin interface
- name = 'HostTestPluginCopyMethod_MPS2'
- type = 'CopyMethod'
- capabilities = ['mps2-copy']
- required_parameters = ['image_path', 'destination_disk']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- return True
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capabilitity, *args, **kwargs) is True:
- file = kwargs['image_path']
- disk = kwargs['destination_disk']
-
- """ Add a delay in case there a test just finished
- Prevents interface firmware hiccups
- """
- sleep(20)
- if capabilitity == 'mps2-copy' and self.copy_file(file, disk):
- sleep(3)
- if self.touch_file(disk + 'reboot.txt'):
- """ Add a delay after the board was rebooted.
- The actual reboot time is 20 seconds, but using 15 seconds
- allows us to open the COM port and save a board reset.
- This also prevents interface firmware hiccups.
- """
- sleep(7)
- result = True
-
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginCopyMethod_MPS2()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_shell.py b/workspace_tools/host_tests/host_tests_plugins/module_copy_shell.py
deleted file mode 100644
index 18ca062..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_copy_shell.py
+++ /dev/null
@@ -1,74 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import os
-from os.path import join, basename
-from host_test_plugins import HostTestPluginBase
-from time import sleep
-
-
-class HostTestPluginCopyMethod_Shell(HostTestPluginBase):
-
- # Plugin interface
- name = 'HostTestPluginCopyMethod_Shell'
- type = 'CopyMethod'
- stable = True
- capabilities = ['shell', 'cp', 'copy', 'xcopy']
- required_parameters = ['image_path', 'destination_disk', 'program_cycle_s']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- return True
-
- def execute(self, capability, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capability, *args, **kwargs) is True:
- image_path = kwargs['image_path']
- destination_disk = kwargs['destination_disk']
- program_cycle_s = kwargs['program_cycle_s']
- # Wait for mount point to be ready
- self.check_mount_point_ready(destination_disk) # Blocking
- # Prepare correct command line parameter values
- image_base_name = basename(image_path)
- destination_path = join(destination_disk, image_base_name)
- if capability == 'shell':
- if os.name == 'nt': capability = 'copy'
- elif os.name == 'posix': capability = 'cp'
- if capability == 'cp' or capability == 'copy' or capability == 'copy':
- copy_method = capability
- cmd = [copy_method, image_path, destination_path]
- if os.name == 'posix':
- result = self.run_command(cmd, shell=False)
- result = self.run_command(["sync"])
- else:
- result = self.run_command(cmd)
-
- # Allow mbed to cycle
- sleep(program_cycle_s)
-
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginCopyMethod_Shell()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_silabs.py b/workspace_tools/host_tests/host_tests_plugins/module_copy_silabs.py
deleted file mode 100644
index 494bcf4..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_copy_silabs.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from host_test_plugins import HostTestPluginBase
-from time import sleep
-
-
-class HostTestPluginCopyMethod_Silabs(HostTestPluginBase):
-
- # Plugin interface
- name = 'HostTestPluginCopyMethod_Silabs'
- type = 'CopyMethod'
- capabilities = ['eACommander', 'eACommander-usb']
- required_parameters = ['image_path', 'destination_disk', 'program_cycle_s']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- self.EACOMMANDER_CMD = 'eACommander.exe'
- return True
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capabilitity, *args, **kwargs) is True:
- image_path = kwargs['image_path']
- destination_disk = kwargs['destination_disk']
- program_cycle_s = kwargs['program_cycle_s']
- if capabilitity == 'eACommander':
- cmd = [self.EACOMMANDER_CMD,
- '--serialno', destination_disk,
- '--flash', image_path,
- '--resettype', '2', '--reset']
- result = self.run_command(cmd)
- elif capabilitity == 'eACommander-usb':
- cmd = [self.EACOMMANDER_CMD,
- '--usb', destination_disk,
- '--flash', image_path]
- result = self.run_command(cmd)
-
- # Allow mbed to cycle
- sleep(program_cycle_s)
-
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginCopyMethod_Silabs()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_smart.py b/workspace_tools/host_tests/host_tests_plugins/module_copy_smart.py
deleted file mode 100644
index 9fb5970..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_copy_smart.py
+++ /dev/null
@@ -1,118 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import os
-import sys
-from os.path import join, basename, exists, abspath, dirname
-from time import sleep
-from host_test_plugins import HostTestPluginBase
-
-sys.path.append(abspath(join(dirname(__file__), "../../../")))
-from workspace_tools.test_api import get_autodetected_MUTS_list
-
-class HostTestPluginCopyMethod_Smart(HostTestPluginBase):
-
- # Plugin interface
- name = 'HostTestPluginCopyMethod_Smart'
- type = 'CopyMethod'
- stable = True
- capabilities = ['smart']
- required_parameters = ['image_path', 'destination_disk', 'target_mcu']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- return True
-
- def execute(self, capability, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capability, *args, **kwargs) is True:
- image_path = kwargs['image_path']
- destination_disk = kwargs['destination_disk']
- target_mcu = kwargs['target_mcu']
- # Wait for mount point to be ready
- self.check_mount_point_ready(destination_disk) # Blocking
- # Prepare correct command line parameter values
- image_base_name = basename(image_path)
- destination_path = join(destination_disk, image_base_name)
- if capability == 'smart':
- if os.name == 'posix':
- cmd = ['cp', image_path, destination_path]
- result = self.run_command(cmd, shell=False)
-
- cmd = ['sync']
- result = self.run_command(cmd, shell=False)
- elif os.name == 'nt':
- cmd = ['copy', image_path, destination_path]
- result = self.run_command(cmd, shell=True)
-
- # Give the OS and filesystem time to settle down
- sleep(3)
-
- platform_name_filter = [target_mcu]
- muts_list = {}
-
- remount_complete = False
-
- for i in range(0, 60):
- print('Looking for %s with MBEDLS' % target_mcu)
- muts_list = get_autodetected_MUTS_list(platform_name_filter=platform_name_filter)
-
- if 1 in muts_list:
- mut = muts_list[1]
- destination_disk = mut['disk']
- destination_path = join(destination_disk, image_base_name)
-
- if mut['mcu'] == 'LPC1768' or mut['mcu'] == 'LPC11U24':
- if exists(destination_disk) and exists(destination_path):
- remount_complete = True
- break;
- else:
- if exists(destination_disk) and not exists(destination_path):
- remount_complete = True
- break;
-
- sleep(1)
-
- if remount_complete:
- print('Remount complete')
- else:
- print('Remount FAILED')
-
- if exists(destination_disk):
- print('Disk exists')
- else:
- print('Disk does not exist')
-
- if exists(destination_path):
- print('Image exists')
- else:
- print('Image does not exist')
-
- result = None
-
-
- return result
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginCopyMethod_Smart()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_reset_mbed.py b/workspace_tools/host_tests/host_tests_plugins/module_reset_mbed.py
deleted file mode 100644
index 0390d84..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_reset_mbed.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from host_test_plugins import HostTestPluginBase
-
-
-class HostTestPluginResetMethod_Mbed(HostTestPluginBase):
-
- def safe_sendBreak(self, serial):
- """ Wraps serial.sendBreak() to avoid serial::serialposix.py exception on Linux
- Traceback (most recent call last):
- File "make.py", line 189, in
- serial.sendBreak()
- File "/usr/lib/python2.7/dist-packages/serial/serialposix.py", line 511, in sendBreak
- termios.tcsendbreak(self.fd, int(duration/0.25))
- error: (32, 'Broken pipe')
- """
- result = True
- try:
- serial.sendBreak()
- except:
- # In linux a termios.error is raised in sendBreak and in setBreak.
- # The following setBreak() is needed to release the reset signal on the target mcu.
- try:
- serial.setBreak(False)
- except:
- result = False
- return result
-
- # Plugin interface
- name = 'HostTestPluginResetMethod_Mbed'
- type = 'ResetMethod'
- stable = True
- capabilities = ['default']
- required_parameters = ['serial']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- return True
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capabilitity, *args, **kwargs) is True:
- if capabilitity == 'default':
- serial = kwargs['serial']
- result = self.safe_sendBreak(serial)
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginResetMethod_Mbed()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_reset_mps2.py b/workspace_tools/host_tests/host_tests_plugins/module_reset_mps2.py
deleted file mode 100644
index 40ff267..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_reset_mps2.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import os
-from host_test_plugins import HostTestPluginBase
-from time import sleep
-
-# Note: This plugin is not fully functional, needs improvements
-
-class HostTestPluginResetMethod_MPS2(HostTestPluginBase):
- """ Plugin used to reset ARM_MPS2 platform
- Supports:
- reboot.txt - startup from standby state, reboots when in run mode.
- shutdown.txt - shutdown from run mode.
- reset.txt - reset FPGA during run mode.
- """
- def touch_file(self, file):
- """ Touch file and set timestamp to items
- """
- tfile = file+'.tmp'
- fhandle = open(tfile, 'a')
- try:
- fhandle.close()
- finally:
- os.rename(tfile, file)
- return True
-
- # Plugin interface
- name = 'HostTestPluginResetMethod_MPS2'
- type = 'ResetMethod'
- capabilities = ['mps2-reboot', 'mps2-reset']
- required_parameters = ['disk']
-
- def setup(self, *args, **kwargs):
- """ Prepare / configure plugin to work.
- This method can receive plugin specific parameters by kwargs and
- ignore other parameters which may affect other plugins.
- """
- return True
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- return True
- result = False
- if self.check_parameters(capabilitity, *args, **kwargs) is True:
- disk = kwargs['disk']
-
- if capabilitity == 'mps2-reboot' and self.touch_file(disk + 'reboot.txt'):
- sleep(20)
- result = True
-
- elif capabilitity == 'mps2-reset' and self.touch_file(disk + 'reboot.txt'):
- sleep(20)
- result = True
-
- return result
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginResetMethod_MPS2()
diff --git a/workspace_tools/host_tests/host_tests_plugins/module_reset_silabs.py b/workspace_tools/host_tests/host_tests_plugins/module_reset_silabs.py
deleted file mode 100644
index 2c05cb2..0000000
--- a/workspace_tools/host_tests/host_tests_plugins/module_reset_silabs.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from host_test_plugins import HostTestPluginBase
-
-
-class HostTestPluginResetMethod_SiLabs(HostTestPluginBase):
-
- # Plugin interface
- name = 'HostTestPluginResetMethod_SiLabs'
- type = 'ResetMethod'
- stable = True
- capabilities = ['eACommander', 'eACommander-usb']
- required_parameters = ['disk']
-
- def setup(self, *args, **kwargs):
- """ Configure plugin, this function should be called before plugin execute() method is used.
- """
- # Note you need to have eACommander.exe on your system path!
- self.EACOMMANDER_CMD = 'eACommander.exe'
- return True
-
- def execute(self, capabilitity, *args, **kwargs):
- """ Executes capability by name.
- Each capability may directly just call some command line
- program or execute building pythonic function
- """
- result = False
- if self.check_parameters(capabilitity, *args, **kwargs) is True:
- disk = kwargs['disk'].rstrip('/\\')
-
- if capabilitity == 'eACommander':
- # For this copy method 'disk' will be 'serialno' for eACommander command line parameters
- # Note: Commands are executed in the order they are specified on the command line
- cmd = [self.EACOMMANDER_CMD,
- '--serialno', disk,
- '--resettype', '2', '--reset',]
- result = self.run_command(cmd)
- elif capabilitity == 'eACommander-usb':
- # For this copy method 'disk' will be 'usb address' for eACommander command line parameters
- # Note: Commands are executed in the order they are specified on the command line
- cmd = [self.EACOMMANDER_CMD,
- '--usb', disk,
- '--resettype', '2', '--reset',]
- result = self.run_command(cmd)
- return result
-
-
-def load_plugin():
- """ Returns plugin available in this module
- """
- return HostTestPluginResetMethod_SiLabs()
diff --git a/workspace_tools/host_tests/mbedrpc.py b/workspace_tools/host_tests/mbedrpc.py
deleted file mode 100644
index ffbdef4..0000000
--- a/workspace_tools/host_tests/mbedrpc.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# mbedRPC.py - mbed RPC interface for Python
-#
-##Copyright (c) 2010 ARM Ltd
-##
-##Permission is hereby granted, free of charge, to any person obtaining a copy
-##of this software and associated documentation files (the "Software"), to deal
-##in the Software without restriction, including without limitation the rights
-##to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-##copies of the Software, and to permit persons to whom the Software is
-##furnished to do so, subject to the following conditions:
-##
-##The above copyright notice and this permission notice shall be included in
-##all copies or substantial portions of the Software.
-##
-##THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-##IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-##FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-##AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-##LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-##OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-##THE SOFTWARE.
-#
-# Example:
-# >from mbedRPC import*
-# >mbed = SerialRPC("COM5",9600)
-# >myled = DigitalOut(mbed,"myled") <--- Where the text in quotations matches your RPC pin definition's second parameter, in this case it could be RpcDigitalOut myled(LED1,"myled");
-# >myled.write(1)
-# >
-
-import serial, urllib2, time
-
-# mbed super class
-class mbed:
- def __init__(self):
- print("This will work as a demo but no transport mechanism has been selected")
-
- def rpc(self, name, method, args):
- print("Superclass method not overridden")
-
-
-# Transport mechanisms, derived from mbed
-class SerialRPC(mbed):
- def __init__(self, port, baud):
- self.ser = serial.Serial(port)
- self.ser.setBaudrate(baud)
-
- def rpc(self, name, method, args):
- # creates the command to be sent serially - /name/method arg1 arg2 arg3 ... argN
- str = "/" + name + "/" + method + " " + " ".join(args) + "\n"
- # prints the command being executed
- print str
- # writes the command to serial
- self.ser.write(str)
- # strips trailing characters from the line just written
- ret_val = self.ser.readline().strip()
- return ret_val
-
-
-class HTTPRPC(mbed):
- def __init__(self, ip):
- self.host = "http://" + ip
-
- def rpc(self, name, method, args):
- response = urllib2.urlopen(self.host + "/rpc/" + name + "/" + method + "%20" + "%20".join(args))
- return response.read().strip()
-
-
-# generic mbed interface super class
-class mbed_interface():
- # initialize an mbed interface with a transport mechanism and pin name
- def __init__(self, this_mbed, mpin):
- self.mbed = this_mbed
- if isinstance(mpin, str):
- self.name = mpin
-
- def __del__(self):
- r = self.mbed.rpc(self.name, "delete", [])
-
- def new(self, class_name, name, pin1, pin2 = "", pin3 = ""):
- args = [arg for arg in [pin1,pin2,pin3,name] if arg != ""]
- r = self.mbed.rpc(class_name, "new", args)
-
- # generic read
- def read(self):
- r = self.mbed.rpc(self.name, "read", [])
- return int(r)
-
-
-# for classes that need write functionality - inherits from the generic reading interface
-class mbed_interface_write(mbed_interface):
- def __init__(self, this_mbed, mpin):
- mbed_interface.__init__(self, this_mbed, mpin)
-
- # generic write
- def write(self, value):
- r = self.mbed.rpc(self.name, "write", [str(value)])
-
-
-# mbed interfaces
-class DigitalOut(mbed_interface_write):
- def __init__(self, this_mbed, mpin):
- mbed_interface_write.__init__(self, this_mbed, mpin)
-
-
-class AnalogIn(mbed_interface):
- def __init__(self, this_mbed, mpin):
- mbed_interface.__init__(self, this_mbed, mpin)
-
- def read_u16(self):
- r = self.mbed.rpc(self.name, "read_u16", [])
- return int(r)
-
-
-class AnalogOut(mbed_interface_write):
- def __init__(self, this_mbed, mpin):
- mbed_interface_write.__init__(self, this_mbed, mpin)
-
- def write_u16(self, value):
- self.mbed.rpc(self.name, "write_u16", [str(value)])
-
- def read(self):
- r = self.mbed.rpc(self.name, "read", [])
- return float(r)
-
-
-class DigitalIn(mbed_interface):
- def __init__(self, this_mbed, mpin):
- mbed_interface.__init__(self, this_mbed, mpin)
-
-
-class PwmOut(mbed_interface_write):
- def __init__(self, this_mbed, mpin):
- mbed_interface_write.__init__(self, this_mbed, mpin)
-
- def read(self):
- r = self.mbed.rpc(self.name, "read", [])
- return r
-
- def period(self, value):
- self.mbed.rpc(self.name, "period", [str(value)])
-
- def period_ms(self, value):
- self.mbed.rpc(self.name, "period_ms", [str(value)])
-
- def period_us(self, value):
- self.mbed.rpc(self.name, "period_us", [str(value)])
-
- def pulsewidth(self, value):
- self.mbed.rpc(self.name, "pulsewidth", [str(value)])
-
- def pulsewidth_ms(self, value):
- self.mbed.rpc(self.name, "pulsewidth_ms", [str(value)])
-
- def pulsewidth_us(self, value):
- self.mbed.rpc(self.name, "pulsewidth_us", [str(value)])
-
-
-class RPCFunction(mbed_interface):
- def __init__(self, this_mbed, name):
- mbed_interface.__init__(self, this_mbed, name)
-
- def run(self, input):
- r = self.mbed.rpc(self.name, "run", [input])
- return r
-
-
-class RPCVariable(mbed_interface_write):
- def __init__(self, this_mbed, name):
- mbed_interface_write.__init__(self, this_mbed, name)
-
- def read(self):
- r = self.mbed.rpc(self.name, "read", [])
- return r
-
-class Timer(mbed_interface):
- def __init__(self, this_mbed, name):
- mbed_interface.__init__(self, this_mbed, name)
-
- def start(self):
- r = self.mbed.rpc(self.name, "start", [])
-
- def stop(self):
- r = self.mbed.rpc(self.name, "stop", [])
-
- def reset(self):
- r = self.mbed.rpc(self.name, "reset", [])
-
- def read(self):
- r = self.mbed.rpc(self.name, "read", [])
- return float(re.search('\d+\.*\d*', r).group(0))
-
- def read_ms(self):
- r = self.mbed.rpc(self.name, "read_ms", [])
- return float(re.search('\d+\.*\d*', r).group(0))
-
- def read_us(self):
- r = self.mbed.rpc(self.name, "read_us", [])
- return float(re.search('\d+\.*\d*', r).group(0))
-
-# Serial
-class Serial():
- def __init__(self, this_mbed, tx, rx=""):
- self.mbed = this_mbed
- if isinstance(tx, str):
- self.name = tx
-
- def __del__(self):
- r = self.mbed.rpc(self.name, "delete", [])
-
- def baud(self, value):
- r = self.mbed.rpc(self.name, "baud", [str(value)])
-
- def putc(self, value):
- r = self.mbed.rpc(self.name, "putc", [str(value)])
-
- def puts(self, value):
- r = self.mbed.rpc(self.name, "puts", ["\"" + str(value) + "\""])
-
- def getc(self):
- r = self.mbed.rpc(self.name, "getc", [])
- return int(r)
-
-
-def wait(s):
- time.sleep(s)
diff --git a/workspace_tools/host_tests/midi.py b/workspace_tools/host_tests/midi.py
deleted file mode 100644
index 67f34ea..0000000
--- a/workspace_tools/host_tests/midi.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from __future__ import print_function
-import sys
-import re
-import time
-import mido
-from mido import Message
-
-
-def test_midi_in(port):
- expected_messages_count=0
- while expected_messages_count < 7:
- for message in port.iter_pending():
- if message.type in ('note_on', 'note_off', 'program_change', 'sysex'):
- yield message
- expected_messages_count+=1
- time.sleep(0.1)
-
-def test_midi_loopback(input_port):
- expected_messages_count=0
- while expected_messages_count < 1:
- for message in input_port.iter_pending():
- print('Test MIDI OUT loopback received {}'.format(message.hex()))
- expected_messages_count+=1
-
-def test_midi_out_loopback(output_port,input_port):
- print("Test MIDI OUT loopback")
- output_port.send(Message('program_change', program=1))
- test_midi_loopback(input_port)
-
- output_port.send(Message('note_on', note=21))
- test_midi_loopback(input_port)
-
- output_port.send(Message('note_off', note=21))
- test_midi_loopback(input_port)
-
- output_port.send(Message('sysex', data=[0x7E,0x7F,0x09,0x01]))
- test_midi_loopback(input_port)
-
- output_port.send(Message('sysex', data=[0x7F,0x7F,0x04,0x01,0x7F,0x7F]))
- test_midi_loopback(input_port)
-
- output_port.send(Message('sysex', data=[0x41,0x10,0x42,0x12,0x40,0x00,0x7F,0x00,0x41]))
- test_midi_loopback(input_port)
-
- output_port.send(Message('sysex', data=[0x41,0x10,0x42,0x12,0x40,0x00,0x04,0x7F,0x3D]))
- test_midi_loopback(input_port)
-
-portname=""
-
-while portname=="":
- print("Wait for MIDI IN plug ...")
- for name in mido.get_input_names():
- matchObj = re.match( r'Mbed', name)
-
- if matchObj:
- portname=name
- time.sleep( 1 )
-
-try:
- input_port = mido.open_input(portname)
- output_port = mido.open_output(portname)
-
- print('Using {}'.format(input_port))
-
- print("Test MIDI IN")
-
- for message in test_midi_in(input_port):
- print('Test MIDI IN received {}'.format(message.hex()))
-
- test_midi_out_loopback(output_port,input_port)
-except KeyboardInterrupt:
- pass
\ No newline at end of file
diff --git a/workspace_tools/host_tests/net_test.py b/workspace_tools/host_tests/net_test.py
deleted file mode 100644
index 01b4541..0000000
--- a/workspace_tools/host_tests/net_test.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from host_test import Test, Simple
-from sys import stdout
-
-class NETTest(Simple):
- def __init__(self):
- Test.__init__(self)
- self.mbed.init_serial(115200)
- self.mbed.reset()
-
-if __name__ == '__main__':
- NETTest().run()
diff --git a/workspace_tools/host_tests/rpc.py b/workspace_tools/host_tests/rpc.py
deleted file mode 100644
index 84b85d2..0000000
--- a/workspace_tools/host_tests/rpc.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from host_test import Test
-from mbedrpc import SerialRPC, DigitalOut, DigitalIn, pin
-
-
-class RpcTest(Test):
- def test(self):
- self.notify("RPC Test")
- s = SerialRPC(self.mbed.port, debug=True)
-
- self.notify("Init remote objects")
-
- p_out = pin("p10")
- p_in = pin("p11")
-
- if hasattr(self.mbed.options, 'micro'):
- if self.mbed.options.micro == 'M0+':
- print "Freedom Board: PTA12 <-> PTC4"
- p_out = pin("PTA12")
- p_in = pin("PTC4")
-
- self.output = DigitalOut(s, p_out);
- self.input = DigitalIn(s, p_in);
-
- self.check = True
- self.write_read_test(1)
- self.write_read_test(0)
- return self.check
-
- def write_read_test(self, v):
- self.notify("Check %d" % v)
- self.output.write(v)
- if self.input.read() != v:
- self.notify("ERROR")
- self.check = False
- else:
- self.notify("OK")
-
-
-if __name__ == '__main__':
- RpcTest().run()
diff --git a/workspace_tools/host_tests/rtc_auto.py b/workspace_tools/host_tests/rtc_auto.py
deleted file mode 100644
index d267936..0000000
--- a/workspace_tools/host_tests/rtc_auto.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-from time import time, strftime, gmtime
-
-class RTCTest():
- PATTERN_RTC_VALUE = "\[(\d+)\] \[(\d+-\d+-\d+ \d+:\d+:\d+ [AaPpMm]{2})\]"
- re_detect_rtc_value = re.compile(PATTERN_RTC_VALUE)
-
- def test(self, selftest):
- test_result = True
- start = time()
- sec_prev = 0
- for i in range(0, 5):
- # Timeout changed from default: we need to wait longer for some boards to start-up
- c = selftest.mbed.serial_readline(timeout=10)
- if c is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify(c.strip())
- delta = time() - start
- m = self.re_detect_rtc_value.search(c)
- if m and len(m.groups()):
- sec = int(m.groups()[0])
- time_str = m.groups()[1]
- correct_time_str = strftime("%Y-%m-%d %H:%M:%S %p", gmtime(float(sec)))
- single_result = time_str == correct_time_str and sec > 0 and sec > sec_prev
- test_result = test_result and single_result
- result_msg = "OK" if single_result else "FAIL"
- selftest.notify("HOST: [%s] [%s] received time %+d sec after %.2f sec... %s"% (sec, time_str, sec - sec_prev, delta, result_msg))
- sec_prev = sec
- else:
- test_result = False
- break
- start = time()
- return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/serial_nc_rx_auto.py b/workspace_tools/host_tests/serial_nc_rx_auto.py
deleted file mode 100644
index 59975db..0000000
--- a/workspace_tools/host_tests/serial_nc_rx_auto.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import uuid
-import time
-import string
-from sys import stdout
-
-class SerialNCRXTest():
-
- def test(self, selftest):
- selftest.mbed.flush();
- # Wait 0.5 seconds to ensure mbed is listening
- time.sleep(0.5)
-
- #handshake with target to sync test start
- selftest.mbed.serial_write("S");
-
- strip_chars = string.whitespace + "\0"
-
- out_str = selftest.mbed.serial_readline()
-
- if not out_str:
- selftest.notify("HOST: No output detected")
- return selftest.RESULT_IO_SERIAL
-
- out_str_stripped = out_str.strip(strip_chars)
-
- if out_str_stripped != "RX OK - Start NC test":
- selftest.notify("HOST: Unexpected output. Expected 'RX OK - Expected' but received '%s'" % out_str_stripped)
- return selftest.RESULT_FAILURE
-
- # Wait 0.5 seconds to ensure mbed is listening
- time.sleep(0.5)
-
- selftest.mbed.serial_write("E");
-
- strip_chars = string.whitespace + "\0"
-
- out_str = selftest.mbed.serial_readline()
-
- if not out_str:
- selftest.notify("HOST: No output detected")
- return selftest.RESULT_IO_SERIAL
-
- out_str_stripped = out_str.strip(strip_chars)
-
- if out_str_stripped != "RX OK - Expected":
- selftest.notify("HOST: Unexpected output. Expected 'RX OK - Expected' but received '%s'" % out_str_stripped)
- return selftest.RESULT_FAILURE
-
- # Wait 0.5 seconds to ensure mbed is listening
- time.sleep(0.5)
-
- # Send character, mbed shouldn't receive
- selftest.mbed.serial_write("U");
-
- out_str = selftest.mbed.serial_readline()
-
- # If no characters received, pass the test
- if not out_str:
- selftest.notify("HOST: No further output detected")
- return selftest.RESULT_SUCCESS
- else:
- out_str_stripped = out_str.strip(strip_chars)
-
- if out_str_stripped == "RX OK - Unexpected":
- selftest.notify("HOST: Unexpected output returned indicating RX still functioning")
- else:
- selftest.notify("HOST: Extraneous output '%s' detected indicating unknown error" % out_str_stripped)
-
- return selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/serial_nc_tx_auto.py b/workspace_tools/host_tests/serial_nc_tx_auto.py
deleted file mode 100644
index 707c476..0000000
--- a/workspace_tools/host_tests/serial_nc_tx_auto.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import uuid
-import time
-import string
-from sys import stdout
-
-class SerialNCTXTest():
-
- def test(self, selftest):
- selftest.mbed.flush();
- # Wait 0.5 seconds to ensure mbed is listening
- time.sleep(0.5)
-
- selftest.mbed.serial_write("S");
-
- strip_chars = string.whitespace + "\0"
-
- out_str = selftest.mbed.serial_readline()
- selftest.notify("HOST: " + out_str)
-
- if not out_str:
- selftest.notify("HOST: No output detected")
- return selftest.RESULT_IO_SERIAL
-
- out_str_stripped = out_str.strip(strip_chars)
-
- if out_str_stripped != "TX OK - Expected":
- selftest.notify("HOST: Unexpected output. Expected 'TX OK - Expected' but received '%s'" % out_str_stripped)
- return selftest.RESULT_FAILURE
-
- out_str = selftest.mbed.serial_readline()
-
- # If no characters received, pass the test
- if not out_str:
- selftest.notify("HOST: No further output detected")
- return selftest.RESULT_SUCCESS
- else:
- out_str_stripped = out_str.strip(strip_chars)
-
- if out_str_stripped == "TX OK - Unexpected":
- selftest.notify("HOST: Unexpected output returned indicating TX still functioning")
- else:
- selftest.notify("HOST: Extraneous output '%s' detected indicating unknown error" % out_str_stripped)
-
- return selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/stdio_auto.py b/workspace_tools/host_tests/stdio_auto.py
deleted file mode 100644
index 1fe1890..0000000
--- a/workspace_tools/host_tests/stdio_auto.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-import random
-from time import time
-
-class StdioTest():
- PATTERN_INT_VALUE = "Your value was: (-?\d+)"
- re_detect_int_value = re.compile(PATTERN_INT_VALUE)
-
- def test(self, selftest):
- test_result = True
-
- c = selftest.mbed.serial_readline() # {{start}} preamble
- if c is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify(c)
-
- for i in range(0, 10):
- random_integer = random.randint(-99999, 99999)
- selftest.notify("HOST: Generated number: " + str(random_integer))
- start = time()
- selftest.mbed.serial_write(str(random_integer) + "\n")
-
- serial_stdio_msg = selftest.mbed.serial_readline()
- if serial_stdio_msg is None:
- return selftest.RESULT_IO_SERIAL
- delay_time = time() - start
- selftest.notify(serial_stdio_msg.strip())
-
- # Searching for reply with scanned values
- m = self.re_detect_int_value.search(serial_stdio_msg)
- if m and len(m.groups()):
- int_value = m.groups()[0]
- int_value_cmp = random_integer == int(int_value)
- test_result = test_result and int_value_cmp
- selftest.notify("HOST: Number %s read after %.3f sec ... [%s]"% (int_value, delay_time, "OK" if int_value_cmp else "FAIL"))
- else:
- test_result = False
- break
- return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/tcpecho_client.py b/workspace_tools/host_tests/tcpecho_client.py
deleted file mode 100644
index 303f002..0000000
--- a/workspace_tools/host_tests/tcpecho_client.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import socket
-import string, random
-from time import time
-
-from private_settings import SERVER_ADDRESS
-
-ECHO_PORT = 7
-
-LEN_PACKET = 127
-N_PACKETS = 5000
-TOT_BITS = float(LEN_PACKET * N_PACKETS * 8) * 2
-MEGA = float(1024 * 1024)
-UPDATE_STEP = (N_PACKETS/10)
-
-class TCP_EchoClient:
- def __init__(self, host):
- self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.s.connect((host, ECHO_PORT))
- self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
-
- def __packet(self):
- # Comment out the checks when measuring the throughput
- # self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
- self.s.send(self.packet)
- data = self.s.recv(LEN_PACKET)
- # assert self.packet == data, "packet error:\n%s\n%s\n" % (self.packet, data)
-
- def test(self):
- start = time()
- for i in range(N_PACKETS):
- if (i % UPDATE_STEP) == 0: print '%.2f%%' % ((float(i)/float(N_PACKETS)) * 100.)
- self.__packet()
- t = time() - start
- print 'Throughput: (%.2f)Mbits/s' % ((TOT_BITS / t)/MEGA)
-
- def __del__(self):
- self.s.close()
-
-while True:
- e = TCP_EchoClient(SERVER_ADDRESS)
- e.test()
diff --git a/workspace_tools/host_tests/tcpecho_client_auto.py b/workspace_tools/host_tests/tcpecho_client_auto.py
deleted file mode 100644
index fe915a1..0000000
--- a/workspace_tools/host_tests/tcpecho_client_auto.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import socket
-from sys import stdout
-from SocketServer import BaseRequestHandler, TCPServer
-
-class TCPEchoClient_Handler(BaseRequestHandler):
- def handle(self):
- """ One handle per connection
- """
- print "HOST: Connection received...",
- count = 1;
- while True:
- data = self.request.recv(1024)
- if not data: break
- self.request.sendall(data)
- if '{{end}}' in str(data):
- print
- print str(data)
- else:
- if not count % 10:
- sys.stdout.write('.')
- count += 1
- stdout.flush()
-
-class TCPEchoClientTest():
- def send_server_ip_port(self, selftest, ip_address, port_no):
- """ Set up network host. Reset target and and send server IP via serial to Mbed
- """
- c = selftest.mbed.serial_readline() # 'TCPCllient waiting for server IP and port...'
- if c is None:
- self.print_result(selftest.RESULT_IO_SERIAL)
- return
-
- selftest.notify(c.strip())
- selftest.notify("HOST: Sending server IP Address to target...")
-
- connection_str = ip_address + ":" + str(port_no) + "\n"
- selftest.mbed.serial_write(connection_str)
- selftest.notify(connection_str)
-
- # Two more strings about connection should be sent by MBED
- for i in range(0, 2):
- c = selftest.mbed.serial_readline()
- if c is None:
- selftest.print_result(self.RESULT_IO_SERIAL)
- return
- selftest.notify(c.strip())
-
- def test(self, selftest):
- # We need to discover SERVEP_IP and set up SERVER_PORT
- # Note: Port 7 is Echo Protocol:
- #
- # Port number rationale:
- #
- # The Echo Protocol is a service in the Internet Protocol Suite defined
- # in RFC 862. It was originally proposed for testing and measurement
- # of round-trip times[citation needed] in IP networks.
- #
- # A host may connect to a server that supports the Echo Protocol using
- # the Transmission Control Protocol (TCP) or the User Datagram Protocol
- # (UDP) on the well-known port number 7. The server sends back an
- # identical copy of the data it received.
- SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
- SERVER_PORT = 7
-
- # Returning none will suppress host test from printing success code
- server = TCPServer((SERVER_IP, SERVER_PORT), TCPEchoClient_Handler)
- print "HOST: Listening for TCP connections: " + SERVER_IP + ":" + str(SERVER_PORT)
- self.send_server_ip_port(selftest, SERVER_IP, SERVER_PORT)
- server.serve_forever()
diff --git a/workspace_tools/host_tests/tcpecho_server.py b/workspace_tools/host_tests/tcpecho_server.py
deleted file mode 100644
index 4a68bd9..0000000
--- a/workspace_tools/host_tests/tcpecho_server.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from SocketServer import BaseRequestHandler, TCPServer
-from time import time
-
-from private_settings import LOCALHOST
-
-MAX_INDEX = 126
-MEGA = float(1024 * 1024)
-
-class TCP_EchoHandler(BaseRequestHandler):
- def handle(self):
- print "\nconnection received"
- start = time()
- bytes = 0
- index = 0
- while True:
- data = self.request.recv(1024)
- if not data: break
-
- bytes += len(data)
- for n in map(ord, data):
- if n != index:
- print "data error %d != %d" % (n , index)
- index += 1
- if index > MAX_INDEX:
- index = 0
-
- self.request.sendall(data)
- t = time() - start
- b = float(bytes * 8) * 2
- print "Throughput: (%.2f)Mbits/s" % ((b/t)/MEGA)
-
-server = TCPServer((LOCALHOST, 7), TCP_EchoHandler)
-print "listening for connections"
-server.serve_forever()
diff --git a/workspace_tools/host_tests/tcpecho_server_auto.py b/workspace_tools/host_tests/tcpecho_server_auto.py
deleted file mode 100644
index 8bc0e30..0000000
--- a/workspace_tools/host_tests/tcpecho_server_auto.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-import sys
-import uuid
-import socket
-from sys import stdout
-
-class TCPEchoServerTest():
- ECHO_SERVER_ADDRESS = ""
- ECHO_PORT = 0
- ECHO_LOOPs = 100
- s = None # Socket
-
- PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
- re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
-
- def test(self, selftest):
- result = False
- c = selftest.mbed.serial_readline()
- if c is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify(c)
-
- m = self.re_detect_server_ip.search(c)
- if m and len(m.groups()):
- self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
- self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
- selftest.notify("HOST: TCP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
-
- # We assume this test fails so can't send 'error' message to server
- try:
- self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.s.connect((self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
- except Exception, e:
- self.s = None
- selftest.notify("HOST: Socket error: %s"% e)
- return selftest.RESULT_ERROR
-
- print 'HOST: Sending %d echo strings...'% self.ECHO_LOOPs,
- for i in range(0, self.ECHO_LOOPs):
- TEST_STRING = str(uuid.uuid4())
- try:
- self.s.sendall(TEST_STRING)
- data = self.s.recv(128)
- except Exception, e:
- self.s = None
- selftest.notify("HOST: Socket error: %s"% e)
- return selftest.RESULT_ERROR
-
- received_str = repr(data)[1:-1]
- if TEST_STRING == received_str: # We need to cut not needed single quotes from the string
- sys.stdout.write('.')
- stdout.flush()
- result = True
- else:
- print "Expected: "
- print "'%s'"% TEST_STRING
- print "received: "
- print "'%s'"% received_str
- result = False
- break
-
- if self.s is not None:
- self.s.close()
- else:
- selftest.notify("HOST: TCP Server not found")
- result = False
- return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/tcpecho_server_loop.py b/workspace_tools/host_tests/tcpecho_server_loop.py
deleted file mode 100644
index df48397..0000000
--- a/workspace_tools/host_tests/tcpecho_server_loop.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-# Be sure that the tools directory is in the search path
-import sys
-from os.path import join, abspath, dirname
-ROOT = abspath(join(dirname(__file__), "..", ".."))
-sys.path.insert(0, ROOT)
-
-from workspace_tools.private_settings import LOCALHOST
-from SocketServer import BaseRequestHandler, TCPServer
-
-
-class TCP_EchoHandler(BaseRequestHandler):
- def handle(self):
- print "\nHandle connection from:", self.client_address
- while True:
- data = self.request.recv(1024)
- if not data: break
- self.request.sendall(data)
- self.request.close()
- print "socket closed"
-
-if __name__ == '__main__':
- server = TCPServer((LOCALHOST, 7), TCP_EchoHandler)
- print "listening for connections on:", (LOCALHOST, 7)
- server.serve_forever()
diff --git a/workspace_tools/host_tests/udp_link_layer_auto.py b/workspace_tools/host_tests/udp_link_layer_auto.py
deleted file mode 100644
index cb0578f..0000000
--- a/workspace_tools/host_tests/udp_link_layer_auto.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-"""
-How to use:
-make.py -m LPC1768 -t ARM -d E:\ -n NET_14
-udp_link_layer_auto.py -p COM20 -d E:\ -t 10
-"""
-
-import re
-import uuid
-import socket
-import thread
-from sys import stdout
-from time import time, sleep
-from host_test import DefaultTest
-from SocketServer import BaseRequestHandler, UDPServer
-
-
-# Received datagrams (with time)
-dict_udp_recv_datagrams = dict()
-
-# Sent datagrams (with time)
-dict_udp_sent_datagrams = dict()
-
-
-class UDPEchoClient_Handler(BaseRequestHandler):
- def handle(self):
- """ One handle per connection
- """
- _data, _socket = self.request
- # Process received datagram
- data_str = repr(_data)[1:-1]
- dict_udp_recv_datagrams[data_str] = time()
-
-
-def udp_packet_recv(threadName, server_ip, server_port):
- """ This function will receive packet stream from mbed device
- """
- server = UDPServer((server_ip, server_port), UDPEchoClient_Handler)
- print "[UDP_COUNTER] Listening for connections... %s:%d"% (server_ip, server_port)
- server.serve_forever()
-
-
-class UDPEchoServerTest(DefaultTest):
- ECHO_SERVER_ADDRESS = "" # UDP IP of datagram bursts
- ECHO_PORT = 0 # UDP port for datagram bursts
- CONTROL_PORT = 23 # TCP port used to get stats from mbed device, e.g. counters
- s = None # Socket
-
- TEST_PACKET_COUNT = 1000 # how many packets should be send
- TEST_STRESS_FACTOR = 0.001 # stress factor: 10 ms
- PACKET_SATURATION_RATIO = 29.9 # Acceptable packet transmission in %
-
- PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
- re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
-
- def get_control_data(self, command="stat\n"):
- BUFFER_SIZE = 256
- try:
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- s.connect((self.ECHO_SERVER_ADDRESS, self.CONTROL_PORT))
- except Exception, e:
- data = None
- s.send(command)
- data = s.recv(BUFFER_SIZE)
- s.close()
- return data
-
- def test(self):
- serial_ip_msg = self.mbed.serial_readline()
- if serial_ip_msg is None:
- return self.RESULT_IO_SERIAL
- stdout.write(serial_ip_msg)
- stdout.flush()
- # Searching for IP address and port prompted by server
- m = self.re_detect_server_ip.search(serial_ip_msg)
- if m and len(m.groups()):
- self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
- self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
- self.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
-
- # Open client socket to burst datagrams to UDP server in mbed
- try:
- self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- except Exception, e:
- self.s = None
- self.notify("HOST: Error: %s"% e)
- return self.RESULT_ERROR
-
- # UDP replied receiver works in background to get echoed datagrams
- SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
- SERVER_PORT = self.ECHO_PORT + 1
- thread.start_new_thread(udp_packet_recv, ("Thread-udp-recv", SERVER_IP, SERVER_PORT))
- sleep(0.5)
-
- # Burst part
- for no in range(self.TEST_PACKET_COUNT):
- TEST_STRING = str(uuid.uuid4())
- payload = str(no) + "__" + TEST_STRING
- self.s.sendto(payload, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
- dict_udp_sent_datagrams[payload] = time()
- sleep(self.TEST_STRESS_FACTOR)
-
- if self.s is not None:
- self.s.close()
-
- # Wait 5 seconds for packets to come
- result = True
- self.notify("HOST: Test Summary:")
- for d in range(5):
- sleep(1.0)
- summary_datagram_success = (float(len(dict_udp_recv_datagrams)) / float(self.TEST_PACKET_COUNT)) * 100.0
- self.notify("HOST: Datagrams received after +%d sec: %.3f%% (%d / %d), stress=%.3f ms"% (d,
- summary_datagram_success,
- len(dict_udp_recv_datagrams),
- self.TEST_PACKET_COUNT,
- self.TEST_STRESS_FACTOR))
- result = result and (summary_datagram_success >= self.PACKET_SATURATION_RATIO)
- stdout.flush()
-
- # Getting control data from test
- self.notify("...")
- self.notify("HOST: Mbed Summary:")
- mbed_stats = self.get_control_data()
- self.notify(mbed_stats)
- return self.RESULT_SUCCESS if result else self.RESULT_FAILURE
-
-
-if __name__ == '__main__':
- UDPEchoServerTest().run()
diff --git a/workspace_tools/host_tests/udpecho_client.py b/workspace_tools/host_tests/udpecho_client.py
deleted file mode 100644
index 1ff833f..0000000
--- a/workspace_tools/host_tests/udpecho_client.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from socket import socket, AF_INET, SOCK_DGRAM
-import string, random
-from time import time
-
-from private_settings import CLIENT_ADDRESS
-
-ECHO_PORT = 7
-
-LEN_PACKET = 127
-N_PACKETS = 5000
-TOT_BITS = float(LEN_PACKET * N_PACKETS * 8) * 2
-MEGA = float(1024 * 1024)
-UPDATE_STEP = (N_PACKETS/10)
-
-class UDP_EchoClient:
- s = socket(AF_INET, SOCK_DGRAM)
-
- def __init__(self, host):
- self.host = host
- self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
-
- def __packet(self):
- # Comment out the checks when measuring the throughput
- # packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
- UDP_EchoClient.s.sendto(packet, (self.host, ECHO_PORT))
- data = UDP_EchoClient.s.recv(LEN_PACKET)
- # assert packet == data, "packet error:\n%s\n%s\n" % (packet, data)
-
- def test(self):
- start = time()
- for i in range(N_PACKETS):
- if (i % UPDATE_STEP) == 0: print '%.2f%%' % ((float(i)/float(N_PACKETS)) * 100.)
- self.__packet()
- t = time() - start
- print 'Throughput: (%.2f)Mbits/s' % ((TOT_BITS / t)/MEGA)
-
-while True:
- e = UDP_EchoClient(CLIENT_ADDRESS)
- e.test()
diff --git a/workspace_tools/host_tests/udpecho_client_auto.py b/workspace_tools/host_tests/udpecho_client_auto.py
deleted file mode 100644
index 7896127..0000000
--- a/workspace_tools/host_tests/udpecho_client_auto.py
+++ /dev/null
@@ -1,77 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import socket
-from sys import stdout
-from SocketServer import BaseRequestHandler, UDPServer
-
-class UDPEchoClient_Handler(BaseRequestHandler):
- def handle(self):
- """ One handle per connection
- """
- data, socket = self.request
- socket.sendto(data, self.client_address)
- if '{{end}}' in data:
- print
- print data
- else:
- sys.stdout.write('.')
- stdout.flush()
-
-class UDPEchoClientTest():
-
- def send_server_ip_port(self, selftest, ip_address, port_no):
- c = selftest.mbed.serial_readline() # 'UDPCllient waiting for server IP and port...'
- if c is None:
- selftest.print_result(selftest.RESULT_IO_SERIAL)
- return
- selftest.notify(c.strip())
-
- selftest.notify("HOST: Sending server IP Address to target...")
- connection_str = ip_address + ":" + str(port_no) + "\n"
- selftest.mbed.serial_write(connection_str)
-
- c = selftest.mbed.serial_readline() # 'UDPCllient waiting for server IP and port...'
- if c is None:
- self.print_result(selftest.RESULT_IO_SERIAL)
- return
- selftest.notify(c.strip())
- return selftest.RESULT_PASSIVE
-
- def test(self, selftest):
- # We need to discover SERVEP_IP and set up SERVER_PORT
- # Note: Port 7 is Echo Protocol:
- #
- # Port number rationale:
- #
- # The Echo Protocol is a service in the Internet Protocol Suite defined
- # in RFC 862. It was originally proposed for testing and measurement
- # of round-trip times[citation needed] in IP networks.
- #
- # A host may connect to a server that supports the Echo Protocol using
- # the Transmission Control Protocol (TCP) or the User Datagram Protocol
- # (UDP) on the well-known port number 7. The server sends back an
- # identical copy of the data it received.
- SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
- SERVER_PORT = 7
-
- # Returning none will suppress host test from printing success code
- server = UDPServer((SERVER_IP, SERVER_PORT), UDPEchoClient_Handler)
- print "HOST: Listening for UDP connections..."
- self.send_server_ip_port(selftest, SERVER_IP, SERVER_PORT)
- server.serve_forever()
diff --git a/workspace_tools/host_tests/udpecho_server.py b/workspace_tools/host_tests/udpecho_server.py
deleted file mode 100644
index f607433..0000000
--- a/workspace_tools/host_tests/udpecho_server.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from SocketServer import BaseRequestHandler, UDPServer
-from private_settings import SERVER_ADDRESS
-
-class UDP_EchoHandler(BaseRequestHandler):
- def handle(self):
- data, socket = self.request
- print "client:", self.client_address
- print "data:", data
- socket.sendto(data, self.client_address)
-
-server = UDPServer((SERVER_ADDRESS, 7195), UDP_EchoHandler)
-print "listening for connections"
-server.serve_forever()
diff --git a/workspace_tools/host_tests/udpecho_server_auto.py b/workspace_tools/host_tests/udpecho_server_auto.py
deleted file mode 100644
index a7ee026..0000000
--- a/workspace_tools/host_tests/udpecho_server_auto.py
+++ /dev/null
@@ -1,68 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-import sys
-import uuid
-from sys import stdout
-from socket import socket, AF_INET, SOCK_DGRAM
-
-class UDPEchoServerTest():
- ECHO_SERVER_ADDRESS = ""
- ECHO_PORT = 0
- s = None # Socket
-
- PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
- re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
-
- def test(self, selftest):
- result = True
- serial_ip_msg = selftest.mbed.serial_readline()
- if serial_ip_msg is None:
- return selftest.RESULT_IO_SERIAL
- selftest.notify(serial_ip_msg)
- # Searching for IP address and port prompted by server
- m = self.re_detect_server_ip.search(serial_ip_msg)
- if m and len(m.groups()):
- self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
- self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
- selftest.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
-
- # We assume this test fails so can't send 'error' message to server
- try:
- self.s = socket(AF_INET, SOCK_DGRAM)
- except Exception, e:
- self.s = None
- selftest.notify("HOST: Socket error: %s"% e)
- return selftest.RESULT_ERROR
-
- for i in range(0, 100):
- TEST_STRING = str(uuid.uuid4())
- self.s.sendto(TEST_STRING, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
- data = self.s.recv(len(TEST_STRING))
- received_str = repr(data)[1:-1]
- if TEST_STRING != received_str:
- result = False
- break
- sys.stdout.write('.')
- stdout.flush()
- else:
- result = False
-
- if self.s is not None:
- self.s.close()
- return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/wait_us_auto.py b/workspace_tools/host_tests/wait_us_auto.py
deleted file mode 100644
index 2ab66a3..0000000
--- a/workspace_tools/host_tests/wait_us_auto.py
+++ /dev/null
@@ -1,69 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from time import time
-
-class WaitusTest():
- """ This test is reading single characters from stdio
- and measures time between their occurrences.
- """
- TICK_LOOP_COUNTER = 13
- TICK_LOOP_SUCCESSFUL_COUNTS = 10
- DEVIATION = 0.10 # +/-10%
-
- def test(self, selftest):
- test_result = True
- # First character to start test (to know after reset when test starts)
- if selftest.mbed.set_serial_timeout(None) is None:
- return selftest.RESULT_IO_SERIAL
- c = selftest.mbed.serial_read(1)
- if c is None:
- return selftest.RESULT_IO_SERIAL
- if c == '$': # target will printout TargetID e.g.: $$$$1040e649d5c09a09a3f6bc568adef61375c6
- #Read additional 39 bytes of TargetID
- if selftest.mbed.serial_read(39) is None:
- return selftest.RESULT_IO_SERIAL
- c = selftest.mbed.serial_read(1) # Re-read first 'tick'
- if c is None:
- return selftest.RESULT_IO_SERIAL
- start_serial_pool = time()
- start = time()
-
- success_counter = 0
-
- for i in range(0, self.TICK_LOOP_COUNTER):
- c = selftest.mbed.serial_read(1)
- if c is None:
- return selftest.RESULT_IO_SERIAL
- delta = time() - start
- deviation = abs(delta - 1)
- # Round values
- delta = round(delta, 2)
- deviation = round(deviation, 2)
- # Check if time measurements are in given range
- deviation_ok = True if delta > 0 and deviation <= self.DEVIATION else False
- success_counter = success_counter+1 if deviation_ok else 0
- msg = "OK" if deviation_ok else "FAIL"
- selftest.notify("%s in %.2f sec (%.2f) [%s]"% (c, delta, deviation, msg))
- start = time()
- if success_counter >= self.TICK_LOOP_SUCCESSFUL_COUNTS:
- break
- measurement_time = time() - start_serial_pool
- selftest.notify("Consecutive OK timer reads: %d"% success_counter)
- selftest.notify("Completed in %.2f sec" % (measurement_time))
- test_result = True if success_counter >= self.TICK_LOOP_SUCCESSFUL_COUNTS else False
- return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
diff --git a/workspace_tools/host_tests/wfi_auto.py b/workspace_tools/host_tests/wfi_auto.py
deleted file mode 100644
index a62c432..0000000
--- a/workspace_tools/host_tests/wfi_auto.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import sys
-import uuid
-import time
-from sys import stdout
-
-class WFITest():
-
- def test(self, selftest):
- c = selftest.mbed.serial_readline()
-
- if c == None:
- selftest.notify("HOST: No output detected")
- return selftest.RESULT_IO_SERIAL
-
- if c.strip() != "0":
- selftest.notify("HOST: Unexpected output. Expected '0' but received '%s'" % c.strip())
- return selftest.RESULT_FAILURE
-
- # Wait 10 seconds to allow serial prints (indicating failure)
- selftest.mbed.set_serial_timeout(10)
-
- # If no characters received, pass the test
- if not selftest.mbed.serial_readline():
- selftest.notify("HOST: No further output detected")
- return selftest.RESULT_SUCCESS
- else:
- selftest.notify("HOST: Extra output detected")
- return selftest.RESULT_FAILURE
diff --git a/workspace_tools/libraries.py b/workspace_tools/libraries.py
deleted file mode 100644
index c4db6c8..0000000
--- a/workspace_tools/libraries.py
+++ /dev/null
@@ -1,129 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from workspace_tools.paths import *
-from workspace_tools.data.support import *
-from workspace_tools.tests import TEST_MBED_LIB
-
-
-LIBRARIES = [
- # RTOS libraries
- {
- "id": "rtx",
- "source_dir": MBED_RTX,
- "build_dir": RTOS_LIBRARIES,
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "rtos",
- "source_dir": RTOS_ABSTRACTION,
- "build_dir": RTOS_LIBRARIES,
- "dependencies": [MBED_LIBRARIES, MBED_RTX],
- },
-
- # RPC
- {
- "id": "rpc",
- "source_dir": MBED_RPC,
- "build_dir": RPC_LIBRARY,
- "dependencies": [MBED_LIBRARIES],
- },
-
- # USB Device libraries
- {
- "id": "usb",
- "source_dir": USB,
- "build_dir": USB_LIBRARIES,
- "dependencies": [MBED_LIBRARIES],
- },
-
- # USB Host libraries
- {
- "id": "usb_host",
- "source_dir": USB_HOST,
- "build_dir": USB_HOST_LIBRARIES,
- "dependencies": [MBED_LIBRARIES, FAT_FS, MBED_RTX, RTOS_ABSTRACTION],
- },
-
- # DSP libraries
- {
- "id": "cmsis_dsp",
- "source_dir": DSP_CMSIS,
- "build_dir": DSP_LIBRARIES,
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "dsp",
- "source_dir": DSP_ABSTRACTION,
- "build_dir": DSP_LIBRARIES,
- "dependencies": [MBED_LIBRARIES, DSP_CMSIS],
- },
-
- # File system libraries
- {
- "id": "fat",
- "source_dir": [FAT_FS, SD_FS],
- "build_dir": FS_LIBRARY,
- "dependencies": [MBED_LIBRARIES]
- },
-
- # Network libraries
- {
- "id": "eth",
- "source_dir": [ETH_SOURCES, LWIP_SOURCES],
- "build_dir": ETH_LIBRARY,
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES]
- },
-
- {
- "id": "ublox",
- "source_dir": [UBLOX_SOURCES, CELLULAR_SOURCES, CELLULAR_USB_SOURCES, LWIP_SOURCES],
- "build_dir": UBLOX_LIBRARY,
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES],
- },
-
- # Unit Testing library
- {
- "id": "cpputest",
- "source_dir": [CPPUTEST_SRC, CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR],
- "build_dir": CPPUTEST_LIBRARY,
- "dependencies": [MBED_LIBRARIES],
- 'inc_dirs': [CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC, TEST_MBED_LIB],
- 'inc_dirs_ext': [CPPUTEST_INC_EXT],
- 'macros': ["CPPUTEST_USE_MEM_LEAK_DETECTION=0", "CPPUTEST_USE_STD_CPP_LIB=0", "CPPUTEST=1"],
- },
-]
-
-
-LIBRARY_MAP = dict([(library['id'], library) for library in LIBRARIES])
-
-
-class Library:
- DEFAULTS = {
- "supported": DEFAULT_SUPPORT,
- 'dependencies': None,
- 'inc_dirs': None, # Include dirs required by library build
- 'inc_dirs_ext': None, # Include dirs required by others to use with this library
- 'macros': None, # Additional macros you want to define when building library
- }
- def __init__(self, lib_id):
- self.__dict__.update(Library.DEFAULTS)
- self.__dict__.update(LIBRARY_MAP[lib_id])
-
- def is_supported(self, target, toolchain):
- if not hasattr(self, 'supported'):
- return True
- return (target.name in self.supported) and (toolchain in self.supported[target.name])
diff --git a/workspace_tools/make.py b/workspace_tools/make.py
deleted file mode 100755
index e4ade4c..0000000
--- a/workspace_tools/make.py
+++ /dev/null
@@ -1,293 +0,0 @@
-#! /usr/bin/env python2
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-TEST BUILD & RUN
-"""
-import sys
-from time import sleep
-from shutil import copy
-from os.path import join, abspath, dirname
-
-# Be sure that the tools directory is in the search path
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-from workspace_tools.utils import args_error
-from workspace_tools.paths import BUILD_DIR
-from workspace_tools.paths import RTOS_LIBRARIES
-from workspace_tools.paths import RPC_LIBRARY
-from workspace_tools.paths import ETH_LIBRARY
-from workspace_tools.paths import USB_HOST_LIBRARIES, USB_LIBRARIES
-from workspace_tools.paths import DSP_LIBRARIES
-from workspace_tools.paths import FS_LIBRARY
-from workspace_tools.paths import UBLOX_LIBRARY
-from workspace_tools.tests import TESTS, Test, TEST_MAP
-from workspace_tools.tests import TEST_MBED_LIB
-from workspace_tools.targets import TARGET_MAP
-from workspace_tools.options import get_default_options_parser
-from workspace_tools.build_api import build_project
-try:
- import workspace_tools.private_settings as ps
-except:
- ps = object()
-
-
-if __name__ == '__main__':
- # Parse Options
- parser = get_default_options_parser()
- parser.add_option("-p",
- type="int",
- dest="program",
- help="The index of the desired test program: [0-%d]" % (len(TESTS)-1))
-
- parser.add_option("-n",
- dest="program_name",
- help="The name of the desired test program")
-
- parser.add_option("-j", "--jobs",
- type="int",
- dest="jobs",
- default=1,
- help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
-
- parser.add_option("-v", "--verbose",
- action="store_true",
- dest="verbose",
- default=False,
- help="Verbose diagnostic output")
-
- parser.add_option("--silent",
- action="store_true",
- dest="silent",
- default=False,
- help="Silent diagnostic output (no copy, compile notification)")
-
- parser.add_option("-D", "",
- action="append",
- dest="macros",
- help="Add a macro definition")
-
- # Local run
- parser.add_option("--automated", action="store_true", dest="automated",
- default=False, help="Automated test")
- parser.add_option("--host", dest="host_test",
- default=None, help="Host test")
- parser.add_option("--extra", dest="extra",
- default=None, help="Extra files")
- parser.add_option("--peripherals", dest="peripherals",
- default=None, help="Required peripherals")
- parser.add_option("--dep", dest="dependencies",
- default=None, help="Dependencies")
- parser.add_option("--source", dest="source_dir",
- default=None, help="The source (input) directory")
- parser.add_option("--duration", type="int", dest="duration",
- default=None, help="Duration of the test")
- parser.add_option("--build", dest="build_dir",
- default=None, help="The build (output) directory")
- parser.add_option("-d", "--disk", dest="disk",
- default=None, help="The mbed disk")
- parser.add_option("-s", "--serial", dest="serial",
- default=None, help="The mbed serial port")
- parser.add_option("-b", "--baud", type="int", dest="baud",
- default=None, help="The mbed serial baud rate")
- parser.add_option("-L", "--list-tests", action="store_true", dest="list_tests",
- default=False, help="List available tests in order and exit")
-
- # Ideally, all the tests with a single "main" thread can be run with, or
- # without the rtos, eth, usb_host, usb, dsp, fat, ublox
- parser.add_option("--rtos",
- action="store_true", dest="rtos",
- default=False, help="Link with RTOS library")
-
- parser.add_option("--rpc",
- action="store_true", dest="rpc",
- default=False, help="Link with RPC library")
-
- parser.add_option("--eth",
- action="store_true", dest="eth",
- default=False,
- help="Link with Ethernet library")
-
- parser.add_option("--usb_host",
- action="store_true",
- dest="usb_host",
- default=False,
- help="Link with USB Host library")
-
- parser.add_option("--usb",
- action="store_true",
- dest="usb",
- default=False,
- help="Link with USB Device library")
-
- parser.add_option("--dsp",
- action="store_true",
- dest="dsp",
- default=False,
- help="Link with DSP library")
-
- parser.add_option("--fat",
- action="store_true",
- dest="fat",
- default=False,
- help="Link with FS ad SD card file system library")
-
- parser.add_option("--ublox",
- action="store_true",
- dest="ublox",
- default=False,
- help="Link with U-Blox library")
-
- parser.add_option("--testlib",
- action="store_true",
- dest="testlib",
- default=False,
- help="Link with mbed test library")
-
- # Specify a different linker script
- parser.add_option("-l", "--linker", dest="linker_script",
- default=None, help="use the specified linker script")
-
- (options, args) = parser.parse_args()
-
- # Print available tests in order and exit
- if options.list_tests is True:
- print '\n'.join(map(str, sorted(TEST_MAP.values())))
- sys.exit()
-
- # force program to "0" if a source dir is specified
- if options.source_dir is not None:
- p = 0
- n = None
- else:
- # Program Number or name
- p, n = options.program, options.program_name
-
- if n is not None and p is not None:
- args_error(parser, "[ERROR] specify either '-n' or '-p', not both")
- if n:
- # We will transform 'n' to list of 'p' (integers which are test numbers)
- nlist = n.split(',')
- for test_id in nlist:
- if test_id not in TEST_MAP.keys():
- args_error(parser, "[ERROR] Program with name '%s' not found"% test_id)
-
- p = [TEST_MAP[n].n for n in nlist]
- elif p is None or (p < 0) or (p > (len(TESTS)-1)):
- message = "[ERROR] You have to specify one of the following tests:\n"
- message += '\n'.join(map(str, sorted(TEST_MAP.values())))
- args_error(parser, message)
-
- # If 'p' was set via -n to list of numbers make this a single element integer list
- if type(p) != type([]):
- p = [p]
-
- # Target
- if options.mcu is None :
- args_error(parser, "[ERROR] You should specify an MCU")
- mcu = options.mcu
-
- # Toolchain
- if options.tool is None:
- args_error(parser, "[ERROR] You should specify a TOOLCHAIN")
- toolchain = options.tool
-
- # Test
- for test_no in p:
- test = Test(test_no)
- if options.automated is not None: test.automated = options.automated
- if options.dependencies is not None: test.dependencies = options.dependencies
- if options.host_test is not None: test.host_test = options.host_test;
- if options.peripherals is not None: test.peripherals = options.peripherals;
- if options.duration is not None: test.duration = options.duration;
- if options.extra is not None: test.extra_files = options.extra
-
- if not test.is_supported(mcu, toolchain):
- print 'The selected test is not supported on target %s with toolchain %s' % (mcu, toolchain)
- sys.exit()
-
- # Linking with extra libraries
- if options.rtos: test.dependencies.append(RTOS_LIBRARIES)
- if options.rpc: test.dependencies.append(RPC_LIBRARY)
- if options.eth: test.dependencies.append(ETH_LIBRARY)
- if options.usb_host: test.dependencies.append(USB_HOST_LIBRARIES)
- if options.usb: test.dependencies.append(USB_LIBRARIES)
- if options.dsp: test.dependencies.append(DSP_LIBRARIES)
- if options.fat: test.dependencies.append(FS_LIBRARY)
- if options.ublox: test.dependencies.append(UBLOX_LIBRARY)
- if options.testlib: test.dependencies.append(TEST_MBED_LIB)
-
- build_dir = join(BUILD_DIR, "test", mcu, toolchain, test.id)
- if options.source_dir is not None:
- test.source_dir = options.source_dir
- build_dir = options.source_dir
-
- if options.build_dir is not None:
- build_dir = options.build_dir
-
- target = TARGET_MAP[mcu]
- try:
- bin_file = build_project(test.source_dir, build_dir, target, toolchain, test.dependencies, options.options,
- linker_script=options.linker_script,
- clean=options.clean,
- verbose=options.verbose,
- silent=options.silent,
- macros=options.macros,
- jobs=options.jobs)
- print 'Image: %s'% bin_file
-
- if options.disk:
- # Simple copy to the mbed disk
- copy(bin_file, options.disk)
-
- if options.serial:
- # Import pyserial: https://pypi.python.org/pypi/pyserial
- from serial import Serial
-
- sleep(target.program_cycle_s())
-
- serial = Serial(options.serial, timeout = 1)
- if options.baud:
- serial.setBaudrate(options.baud)
-
- serial.flushInput()
- serial.flushOutput()
-
- try:
- serial.sendBreak()
- except:
- # In linux a termios.error is raised in sendBreak and in setBreak.
- # The following setBreak() is needed to release the reset signal on the target mcu.
- try:
- serial.setBreak(False)
- except:
- pass
-
- while True:
- c = serial.read(512)
- sys.stdout.write(c)
- sys.stdout.flush()
-
- except KeyboardInterrupt, e:
- print "\n[CTRL+c] exit"
- except Exception,e:
- if options.verbose:
- import traceback
- traceback.print_exc(file=sys.stdout)
- else:
- print "[ERROR] %s" % str(e)
diff --git a/workspace_tools/options.py b/workspace_tools/options.py
deleted file mode 100644
index 3e4d2e8..0000000
--- a/workspace_tools/options.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from optparse import OptionParser
-from workspace_tools.toolchains import TOOLCHAINS
-from workspace_tools.targets import TARGET_NAMES
-
-
-def get_default_options_parser():
- parser = OptionParser()
-
- targetnames = TARGET_NAMES
- targetnames.sort()
- toolchainlist = list(TOOLCHAINS)
- toolchainlist.sort()
-
- parser.add_option("-m", "--mcu",
- help="build for the given MCU (%s)" % ', '.join(targetnames),
- metavar="MCU")
-
- parser.add_option("-t", "--tool",
- help="build using the given TOOLCHAIN (%s)" % ', '.join(toolchainlist),
- metavar="TOOLCHAIN")
-
- parser.add_option("-c", "--clean", action="store_true", default=False,
- help="clean the build directory")
-
- parser.add_option("-o", "--options", action="append",
- help='Add a build option ("save-asm": save the asm generated by the compiler, "debug-info": generate debugging information, "analyze": run Goanna static code analyzer")')
-
- return parser
diff --git a/workspace_tools/patch.py b/workspace_tools/patch.py
deleted file mode 100644
index 6fda63f..0000000
--- a/workspace_tools/patch.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-http://www.nxp.com/documents/user_manual/UM10360.pdf
-
-32.3.1.1 Criterion for Valid User Code
-The reserved Cortex-M3 exception vector location 7 (offset 0x1C in the vector table)
-should contain the 2's complement of the check-sum of table entries 0 through 6. This
-causes the checksum of the first 8 table entries to be 0. The boot loader code checksums
-the first 8 locations in sector 0 of the flash. If the result is 0, then execution control is
-transferred to the user code.
-"""
-from struct import unpack, pack
-
-
-def patch(bin_path):
- with open(bin_path, 'r+b') as bin:
- # Read entries 0 through 6 (Little Endian 32bits words)
- vector = [unpack(' (len(TESTS)-1)):
- message = "[ERROR] You have to specify one of the following tests:\n"
- message += '\n'.join(map(str, sorted(TEST_MAP.values())))
- args_error(parser, message)
-
- # Project
- if p is None or (p < 0) or (p > (len(TESTS)-1)):
- message = "[ERROR] You have to specify one of the following tests:\n"
- message += '\n'.join(map(str, sorted(TEST_MAP.values())))
- args_error(parser, message)
- test = Test(p)
-
- # Some libraries have extra macros (called by exporter symbols) to we need to pass
- # them to maintain compilation macros integrity between compiled library and
- # header files we might use with it
- lib_symbols = []
- for lib in LIBRARIES:
- if lib['build_dir'] in test.dependencies:
- lib_macros = lib.get('macros', None)
- if lib_macros is not None:
- lib_symbols.extend(lib_macros)
-
- if not options.build:
- # Substitute the library builds with the sources
- # TODO: Substitute also the other library build paths
- if MBED_LIBRARIES in test.dependencies:
- test.dependencies.remove(MBED_LIBRARIES)
- test.dependencies.append(MBED_BASE)
-
- # Build the project with the same directory structure of the mbed online IDE
- project_dir = join(EXPORT_WORKSPACE, test.id)
- setup_user_prj(project_dir, test.source_dir, test.dependencies)
-
- # Export to selected toolchain
- tmp_path, report = export(project_dir, test.id, ide, mcu, EXPORT_WORKSPACE, EXPORT_TMP, extra_symbols=lib_symbols)
- if report['success']:
- zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (test.id, ide, mcu))
- move(tmp_path, zip_path)
- successes.append("%s::%s\t%s"% (mcu, ide, zip_path))
- else:
- failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg']))
-
- # Prints export results
- print
- if len(successes) > 0:
- print "Successful exports:"
- for success in successes:
- print " * %s"% success
- if len(failures) > 0:
- print "Failed exports:"
- for failure in failures:
- print " * %s"% failure
diff --git a/workspace_tools/remove-device-h.py b/workspace_tools/remove-device-h.py
deleted file mode 100644
index bbed161..0000000
--- a/workspace_tools/remove-device-h.py
+++ /dev/null
@@ -1,216 +0,0 @@
-import json
-import os
-import stat
-import re
-from collections import OrderedDict
-from subprocess import Popen
-
-git_processes = []
-
-class MyJSONEncoder(json.JSONEncoder):
- def __init__(self, *args, **kwargs):
- super(MyJSONEncoder, self).__init__(*args, **kwargs)
- self.current_indent = 0
- self.current_indent_str = ""
-
-
- def encode(self, o):
- #Special Processing for lists
- if isinstance(o, (list, tuple)):
- primitives_only = True
- for item in o:
- if isinstance(item, (list, tuple, dict)):
- primitives_only = False
- break
- output = []
- if primitives_only:
- for item in o:
- output.append(json.dumps(item))
- return "[" + ", ".join(output) + "]"
- else:
- self.current_indent += self.indent
- self.current_indent_str = " " * self.current_indent
- for item in o:
- output.append(self.current_indent_str + self.encode(item))
- self.current_indent -= self.indent
- self.current_indent_str = " " * self.current_indent
- return "[\n" + ",\n".join(output) + "\n" + self.current_indent_str + "]"
- elif isinstance(o, dict):
- primitives_only = True
- for item in o.values():
- if isinstance(item, (list, tuple, dict)):
- primitives_only = False
- break
- output = []
- if primitives_only and len(o) < 3:
- for key, value in o.iteritems():
- output.append(json.dumps(key) + ": " + self.encode(value))
- return "{" + ", ".join(output) + "}"
- else:
- self.current_indent += self.indent
- self.current_indent_str = " " * self.current_indent
- for key, value in o.iteritems():
- output.append(self.current_indent_str + json.dumps(key) + ": " + self.encode(value))
- self.current_indent -= self.indent
- self.current_indent_str = " " * self.current_indent
- return "{\n" + ",\n".join(output) + "\n" + self.current_indent_str + "}"
- else:
- return json.dumps(o)
-
-def load(path):
- with open(path, 'r') as f :
- return json.load(f, object_pairs_hook=OrderedDict)
-
-def dump(path, obj):
- with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f :
- os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
- f.write(MyJSONEncoder(indent=4).encode(obj))
- f.write(u'\n')
- f.truncate()
-
-def find(stem, path) :
- for root, directories, files in os.walk(path, followlinks=True) :
- [dir for dir in directories if dir[0] != '.']
- if (stem_match(stem,os.path.basename(os.path.normpath(root))) and
- "device.h" in files) :
- return os.path.join(root, "device.h")
-
-def find_all_devices(path, verbose=False) :
- for root, directories, files in os.walk(path, followlinks=True) :
- [dir for dir in directories if dir[0] != '.']
- if "device.h" in files :
- if verbose : print("[VERBOSE] found a device.h file in {}".format(root))
- yield os.path.join(root, "device.h")
-
-mbed_matcher = re.compile('mbed', re.IGNORECASE)
-def stem_match(stem, thing) :
- return (stem in thing or
- re.sub(mbed_matcher, '', stem) in thing)
-
-attr_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+1.*$')
-def parse_attributes(path) :
- with open(path) as input :
- for line in input :
- m = re.match(attr_matcher, line)
- if m: yield m.group(1)
-
-remove_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+[10].*$')
-def remove_attributes(path) :
- with open(path) as input :
- remainder = filter(lambda l: not re.match(remove_matcher, l), input)
- with open(path,"wb") as output :
- output.truncate(0)
- output.write("// The 'provides' section in 'target.json' is now used"+
- " to create the device's hardware preprocessor switches.\n")
- output.write("// Check the 'provides' section of the target description"+
- " in 'targets.json' for more details.\n")
- output.writelines(remainder)
-
-def user_select(things, message) :
- print(message)
- for thing, number in zip(things, range(len(things))):
- print("{} : {}".format(number, thing))
- selection = None
- while selection is None :
- print("please select an integer [0..{}] or specify all".format(len(things) - 1))
- try :
- i = raw_input()
- if i == "all" :
- selection = "all"
- else :
- selection = int(i)
- if (selection > len(things) or
- selection < 0) :
- print("selection {} out of range".format(selection))
- selection = None
- except (ValueError, SyntaxError) :
- print("selection not understood")
- if selection == "all" :
- return things
- else :
- return [things[selection]]
-
-target_matcher = re.compile("TARGET_")
-def strip_target(str) :
- return re.sub(target_matcher, "", str)
-
-def add_to_targets(targets, device_file, verbose=False, remove=False) :
- if verbose : print("[VERBOSE] trying target {}".format(device_file))
- device = strip_target(os.path.basename(os.path.normpath(os.path.dirname(device_file))))
- if not device :
- print("[WARNING] device {} did not have an associated device.h".format(device))
- else :
- possible_matches = set([key for key in targets.keys() if stem_match(device, key)])
- for key, value in targets.iteritems() :
- for alt in value['extra_labels'] if 'extra_labels' in value else [] :
- if stem_match(device, alt) : possible_matches.add(key)
- for alt in value['extra_labels_add'] if 'extra_labels_add' in value else [] :
- if stem_match(device, alt) : possible_matches.add(key)
- possible_matches = list(possible_matches)
- for match in possible_matches :
- if device == match : possible_matches = [match]
- if not possible_matches :
- print("[WARNING] device {} did not have an associated entry in targets.json".format(device))
- return None
- elif len(possible_matches) > 1 :
- message = ("possible matches for file {}".format(device_file))
- target = user_select(possible_matches, message)
- else :
- target = possible_matches
- attrs = list(parse_attributes(device_file))
- if attrs :
- for t in target :
- targets[t]["device_has"] = sorted(list(set(targets[t].setdefault("device_has",[]) + attrs)))
- if verbose : print("[VERBOSE] target {} now device_has {}".format(t, attrs))
- if remove is True:
- remove_attributes(device_file)
-
-if __name__ == '__main__' :
- import argparse
- parser = argparse.ArgumentParser(description='A helpful little script for converting' +
- ' device.h files to parts of the targets.json file')
- parser.add_argument('-a', '--all', action='store_true',
- help='find and convert all available device.h files in the'+
- ' directory tree starting at the current directory')
- parser.add_argument('-f', '--file', nargs='+', help='specify an individual file to '+
- 'convert from device.h format to a piece of targets.json')
- parser.add_argument('-t', '--target', nargs='+', help='specify an individual target'+
- ' to convert from device.h format to a piece of targets.json')
- parser.add_argument('-v', '--verbose', action='store_true',
- help="print out every target that is updated in the targets.json")
- parser.add_argument('-r', '--rm', action='store_true',
- help="remove the used attributes from a device.h file")
- args = parser.parse_args()
- if not args.target and not args.file and not args.all :
- print("[WARNING] no action specified; auto-formatting targets.json")
-
- targets_file_name = os.path.join(os.curdir, "hal", "targets.json")
- try :
- targets = load(targets_file_name)
- except OSError :
- print("[ERROR] did not find targets.json where I expected it {}".format(targets_file_name))
- exit(1)
- except ValueError :
- print("[ERROR] invalid json found in {}".format(targets_file_name))
- exit(2)
-
- if args.target :
- for target in args.target :
- device_file = find(target, os.curdir)
- if device_file :
- add_to_targets(targets, device_file, verbose=args.verbose, remove=args.rm)
- else :
- print("[WARNING] could not locate a device file for target {}".format(target))
-
- if args.file :
- for file in args.file :
- add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
-
- if args.all :
- for file in find_all_devices(os.curdir, verbose=args.verbose) :
- add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
-
- dump(targets_file_name, targets)
-
- for process in git_processes :
- process.wait()
diff --git a/workspace_tools/settings.py b/workspace_tools/settings.py
deleted file mode 100644
index 0a62ed9..0000000
--- a/workspace_tools/settings.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from os.path import join, abspath, dirname
-import logging
-
-ROOT = abspath(join(dirname(__file__), ".."))
-
-# These default settings have two purposes:
-# 1) Give a template for writing local "private_settings.py"
-# 2) Give default initialization fields for the "toolchains.py" constructors
-
-##############################################################################
-# Build System Settings
-##############################################################################
-BUILD_DIR = abspath(join(ROOT, ".build"))
-
-# ARM
-armcc = "standalone" # "keil", or "standalone", or "ds-5"
-
-if armcc == "keil":
- ARM_PATH = "C:/Keil_v5/ARM/ARMCC"
- ARM_BIN = join(ARM_PATH, "bin")
- ARM_INC = join(ARM_PATH, "incldue")
- ARM_LIB = join(ARM_PATH, "lib")
-
-elif armcc == "standalone":
- ARM_PATH = "C:/Program Files (x86)/ARM_Compiler_5.06u1"
- ARM_BIN = join(ARM_PATH, "bin")
- ARM_INC = join(ARM_PATH, "include")
- ARM_LIB = join(ARM_PATH, "lib")
-
-elif armcc == "ds-5":
- ARM_PATH = "C:/Program Files (x86)/DS-5"
- ARM_BIN = join(ARM_PATH, "bin")
- ARM_INC = join(ARM_PATH, "include")
- ARM_LIB = join(ARM_PATH, "lib")
-
-ARM_CPPLIB = join(ARM_LIB, "cpplib")
-MY_ARM_CLIB = join(ARM_PATH, "lib", "microlib")
-
-# GCC ARM
-GCC_ARM_PATH = ""
-
-# GCC CodeRed
-GCC_CR_PATH = "C:/code_red/RedSuite_4.2.0_349/redsuite/Tools/bin"
-
-# IAR
-IAR_PATH = "C:/Program Files (x86)/IAR Systems/Embedded Workbench 7.3/arm"
-
-# Goanna static analyser. Please overload it in private_settings.py
-GOANNA_PATH = "c:/Program Files (x86)/RedLizards/Goanna Central 3.2.3/bin"
-
-# cppcheck path (command) and output message format
-CPPCHECK_CMD = ["cppcheck", "--enable=all"]
-CPPCHECK_MSG_FORMAT = ["--template=[{severity}] {file}@{line}: {id}:{message}"]
-
-BUILD_OPTIONS = []
-
-# mbed.org username
-MBED_ORG_USER = ""
-
-##############################################################################
-# Test System Settings
-##############################################################################
-SERVER_PORT = 59432
-SERVER_ADDRESS = "10.2.200.94"
-LOCALHOST = "10.2.200.94"
-
-MUTs = {
- "1" : {"mcu": "LPC1768",
- "port":"COM41", "disk":'E:\\',
- "peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
- },
- "2": {"mcu": "LPC11U24",
- "port":"COM42", "disk":'F:\\',
- "peripherals": ["TMP102", "digital_loop", "port_loop", "SD"]
- },
- "3" : {"mcu": "KL25Z",
- "port":"COM43", "disk":'G:\\',
- "peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
- },
-}
-
-##############################################################################
-# Private Settings
-##############################################################################
-try:
- # Allow to overwrite the default settings without the need to edit the
- # settings file stored in the repository
- from workspace_tools.private_settings import *
-except ImportError:
- print '[WARNING] Using default settings. Define your settings in the file "workspace_tools/private_settings.py" or in "./mbed_settings.py"'
diff --git a/workspace_tools/singletest.py b/workspace_tools/singletest.py
deleted file mode 100644
index 6b5054b..0000000
--- a/workspace_tools/singletest.py
+++ /dev/null
@@ -1,262 +0,0 @@
-#!/usr/bin/env python2
-
-"""
-mbed SDK
-Copyright (c) 2011-2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-"""
-
-"""
-File format example: test_spec.json:
-{
- "targets": {
- "KL46Z": ["ARM", "GCC_ARM"],
- "LPC1768": ["ARM", "GCC_ARM", "GCC_CR", "IAR"],
- "LPC11U24": ["uARM"],
- "NRF51822": ["ARM"]
- }
-}
-
-File format example: muts_all.json:
-{
- "1" : {"mcu": "LPC1768",
- "port":"COM4",
- "disk":"J:\\",
- "peripherals": ["TMP102", "digital_loop", "port_loop", "analog_loop", "SD"]
- },
-
- "2" : {"mcu": "KL25Z",
- "port":"COM7",
- "disk":"G:\\",
- "peripherals": ["digital_loop", "port_loop", "analog_loop"]
- }
-}
-"""
-
-
-# Be sure that the tools directory is in the search path
-import sys
-from os.path import join, abspath, dirname
-
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-
-# Check: Extra modules which are required by core test suite
-from workspace_tools.utils import check_required_modules
-check_required_modules(['prettytable', 'serial'])
-
-# Imports related to mbed build api
-from workspace_tools.build_api import mcu_toolchain_matrix
-
-# Imports from TEST API
-from workspace_tools.test_api import SingleTestRunner
-from workspace_tools.test_api import singletest_in_cli_mode
-from workspace_tools.test_api import detect_database_verbose
-from workspace_tools.test_api import get_json_data_from_file
-from workspace_tools.test_api import get_avail_tests_summary_table
-from workspace_tools.test_api import get_default_test_options_parser
-from workspace_tools.test_api import print_muts_configuration_from_json
-from workspace_tools.test_api import print_test_configuration_from_json
-from workspace_tools.test_api import get_autodetected_MUTS_list
-from workspace_tools.test_api import get_autodetected_TEST_SPEC
-from workspace_tools.test_api import get_module_avail
-from workspace_tools.test_exporters import ReportExporter, ResultExporterType
-
-
-# Importing extra modules which can be not installed but if available they can extend test suite functionality
-try:
- import mbed_lstools
- from workspace_tools.compliance.ioper_runner import IOperTestRunner
- from workspace_tools.compliance.ioper_runner import get_available_oper_test_scopes
-except:
- pass
-
-def get_version():
- """ Returns test script version
- """
- single_test_version_major = 1
- single_test_version_minor = 5
- return (single_test_version_major, single_test_version_minor)
-
-
-if __name__ == '__main__':
- # Command line options
- parser = get_default_test_options_parser()
-
- parser.description = """This script allows you to run mbed defined test cases for particular MCU(s) and corresponding toolchain(s)."""
- parser.epilog = """Example: singletest.py -i test_spec.json -M muts_all.json"""
-
- (opts, args) = parser.parse_args()
-
- # Print scrip version
- if opts.version:
- print parser.description
- print parser.epilog
- print "Version %d.%d"% get_version()
- exit(0)
-
- if opts.db_url and opts.verbose_test_configuration_only:
- detect_database_verbose(opts.db_url)
- exit(0)
-
- # Print summary / information about automation test status
- if opts.test_automation_report:
- print get_avail_tests_summary_table(platform_filter=opts.general_filter_regex)
- exit(0)
-
- # Print summary / information about automation test status
- if opts.test_case_report:
- test_case_report_cols = ['id',
- 'automated',
- 'description',
- 'peripherals',
- 'host_test',
- 'duration',
- 'source_dir']
- print get_avail_tests_summary_table(cols=test_case_report_cols,
- result_summary=False,
- join_delim='\n',
- platform_filter=opts.general_filter_regex)
- exit(0)
-
- # Only prints matrix of supported toolchains
- if opts.supported_toolchains:
- print mcu_toolchain_matrix(platform_filter=opts.general_filter_regex)
- exit(0)
-
- test_spec = None
- MUTs = None
-
- if hasattr(opts, 'auto_detect') and opts.auto_detect:
- # If auto_detect attribute is present, we assume other auto-detection
- # parameters like 'toolchains_filter' are also set.
- print "MBEDLS: Detecting connected mbed-enabled devices... "
-
- MUTs = get_autodetected_MUTS_list()
-
- for mut in MUTs.values():
- print "MBEDLS: Detected %s, port: %s, mounted: %s"% (mut['mcu_unique'] if 'mcu_unique' in mut else mut['mcu'],
- mut['port'],
- mut['disk'])
-
- # Set up parameters for test specification filter function (we need to set toolchains per target here)
- use_default_toolchain = 'default' in opts.toolchains_filter.split(',') if opts.toolchains_filter is not None else True
- use_supported_toolchains = 'all' in opts.toolchains_filter.split(',') if opts.toolchains_filter is not None else False
- toolchain_filter = opts.toolchains_filter
- platform_name_filter = opts.general_filter_regex.split(',') if opts.general_filter_regex is not None else opts.general_filter_regex
- # Test specification with information about each target and associated toolchain
- test_spec = get_autodetected_TEST_SPEC(MUTs.values(),
- use_default_toolchain=use_default_toolchain,
- use_supported_toolchains=use_supported_toolchains,
- toolchain_filter=toolchain_filter,
- platform_name_filter=platform_name_filter)
- else:
- # Open file with test specification
- # test_spec_filename tells script which targets and their toolchain(s)
- # should be covered by the test scenario
- opts.auto_detect = False
- test_spec = get_json_data_from_file(opts.test_spec_filename) if opts.test_spec_filename else None
- if test_spec is None:
- if not opts.test_spec_filename:
- parser.print_help()
- exit(-1)
-
- # Get extra MUTs if applicable
- MUTs = get_json_data_from_file(opts.muts_spec_filename) if opts.muts_spec_filename else None
-
- if MUTs is None:
- if not opts.muts_spec_filename:
- parser.print_help()
- exit(-1)
-
- if opts.verbose_test_configuration_only:
- print "MUTs configuration in %s:" % ('auto-detected' if opts.auto_detect else opts.muts_spec_filename)
- if MUTs:
- print print_muts_configuration_from_json(MUTs, platform_filter=opts.general_filter_regex)
- print
- print "Test specification in %s:" % ('auto-detected' if opts.auto_detect else opts.test_spec_filename)
- if test_spec:
- print print_test_configuration_from_json(test_spec)
- exit(0)
-
- if get_module_avail('mbed_lstools'):
- if opts.operability_checks:
- # Check if test scope is valid and run tests
- test_scope = get_available_oper_test_scopes()
- if opts.operability_checks in test_scope:
- tests = IOperTestRunner(scope=opts.operability_checks)
- test_results = tests.run()
-
- # Export results in form of JUnit XML report to separate file
- if opts.report_junit_file_name:
- report_exporter = ReportExporter(ResultExporterType.JUNIT_OPER)
- report_exporter.report_to_file(test_results, opts.report_junit_file_name)
- else:
- print "Unknown interoperability test scope name: '%s'" % (opts.operability_checks)
- print "Available test scopes: %s" % (','.join(["'%s'" % n for n in test_scope]))
-
- exit(0)
-
- # Verbose test specification and MUTs configuration
- if MUTs and opts.verbose:
- print print_muts_configuration_from_json(MUTs)
- if test_spec and opts.verbose:
- print print_test_configuration_from_json(test_spec)
-
- if opts.only_build_tests:
- # We are skipping testing phase, and suppress summary
- opts.suppress_summary = True
-
- single_test = SingleTestRunner(_global_loops_count=opts.test_global_loops_value,
- _test_loops_list=opts.test_loops_list,
- _muts=MUTs,
- _clean=opts.clean,
- _opts_db_url=opts.db_url,
- _opts_log_file_name=opts.log_file_name,
- _opts_report_html_file_name=opts.report_html_file_name,
- _opts_report_junit_file_name=opts.report_junit_file_name,
- _opts_report_build_file_name=opts.report_build_file_name,
- _test_spec=test_spec,
- _opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
- _opts_goanna_for_tests=opts.goanna_for_tests,
- _opts_shuffle_test_order=opts.shuffle_test_order,
- _opts_shuffle_test_seed=opts.shuffle_test_seed,
- _opts_test_by_names=opts.test_by_names,
- _opts_peripheral_by_names=opts.peripheral_by_names,
- _opts_test_only_peripheral=opts.test_only_peripheral,
- _opts_test_only_common=opts.test_only_common,
- _opts_verbose_skipped_tests=opts.verbose_skipped_tests,
- _opts_verbose_test_result_only=opts.verbose_test_result_only,
- _opts_verbose=opts.verbose,
- _opts_firmware_global_name=opts.firmware_global_name,
- _opts_only_build_tests=opts.only_build_tests,
- _opts_parallel_test_exec=opts.parallel_test_exec,
- _opts_suppress_summary=opts.suppress_summary,
- _opts_test_x_toolchain_summary=opts.test_x_toolchain_summary,
- _opts_copy_method=opts.copy_method,
- _opts_mut_reset_type=opts.mut_reset_type,
- _opts_jobs=opts.jobs,
- _opts_waterfall_test=opts.waterfall_test,
- _opts_consolidate_waterfall_test=opts.consolidate_waterfall_test,
- _opts_extend_test_timeout=opts.extend_test_timeout,
- _opts_auto_detect=opts.auto_detect)
-
- # Runs test suite in CLI mode
- if (singletest_in_cli_mode(single_test)):
- exit(0)
- else:
- exit(-1)
diff --git a/workspace_tools/size.py b/workspace_tools/size.py
deleted file mode 100644
index 48ed536..0000000
--- a/workspace_tools/size.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import sys
-from os.path import join, abspath, dirname, exists, splitext
-from subprocess import Popen, PIPE
-import csv
-from collections import defaultdict
-
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-from workspace_tools.paths import BUILD_DIR, TOOLS_DATA
-from workspace_tools.settings import GCC_ARM_PATH
-from workspace_tools.tests import TEST_MAP
-from workspace_tools.build_api import build_mbed_libs, build_project
-
-SIZE = join(GCC_ARM_PATH, 'arm-none-eabi-size')
-
-def get_size(path):
- out = Popen([SIZE, path], stdout=PIPE).communicate()[0]
- return map(int, out.splitlines()[1].split()[:4])
-
-def get_percentage(before, after):
- if before == 0:
- return 0 if after == 0 else 100.0
- return float(after - before) / float(before) * 100.0
-
-def human_size(val):
- if val>1024:
- return "%.0fKb" % (float(val)/1024.0)
- return "%d" % val
-
-def print_diff(name, before, after):
- print "%s: (%s -> %s) %.2f%%" % (name, human_size(before) , human_size(after) , get_percentage(before , after))
-
-BENCHMARKS = [
- ("BENCHMARK_1", "CENV"),
- ("BENCHMARK_2", "PRINTF"),
- ("BENCHMARK_3", "FP"),
- ("BENCHMARK_4", "MBED"),
- ("BENCHMARK_5", "ALL"),
-]
-BENCHMARK_DATA_PATH = join(TOOLS_DATA, 'benchmarks.csv')
-
-
-def benchmarks():
- # CSV Data
- csv_data = csv.writer(open(BENCHMARK_DATA_PATH, 'wb'))
- csv_data.writerow(['Toolchain', "Target", "Benchmark", "code", "data", "bss", "flash"])
-
- # Build
- for toolchain in ['ARM', 'uARM', 'GCC_CR', 'GCC_ARM']:
- for mcu in ["LPC1768", "LPC11U24"]:
- # Build Libraries
- build_mbed_libs(mcu, toolchain)
-
- # Build benchmarks
- build_dir = join(BUILD_DIR, "benchmarks", mcu, toolchain)
- for test_id, title in BENCHMARKS:
- # Build Benchmark
- try:
- test = TEST_MAP[test_id]
- path = build_project(test.source_dir, join(build_dir, test_id),
- mcu, toolchain, test.dependencies)
- base, ext = splitext(path)
- # Check Size
- code, data, bss, flash = get_size(base+'.elf')
- csv_data.writerow([toolchain, mcu, title, code, data, bss, flash])
- except Exception, e:
- print "Unable to build %s for toolchain %s targeting %s" % (test_id, toolchain, mcu)
- print e
-
-
-def compare(t1, t2, target):
- if not exists(BENCHMARK_DATA_PATH):
- benchmarks()
- else:
- print "Loading: %s" % BENCHMARK_DATA_PATH
-
- data = csv.reader(open(BENCHMARK_DATA_PATH, 'rb'))
-
- benchmarks_data = defaultdict(dict)
- for (toolchain, mcu, name, code, data, bss, flash) in data:
- if target == mcu:
- for t in [t1, t2]:
- if toolchain == t:
- benchmarks_data[name][t] = map(int, (code, data, bss, flash))
-
- print "%s vs %s for %s" % (t1, t2, target)
- for name, data in benchmarks_data.iteritems():
- try:
- # Check Size
- code_a, data_a, bss_a, flash_a = data[t1]
- code_u, data_u, bss_u, flash_u = data[t2]
-
- print "\n=== %s ===" % name
- print_diff("code", code_a , code_u)
- print_diff("data", data_a , data_u)
- print_diff("bss", bss_a , bss_u)
- print_diff("flash", flash_a , flash_u)
- except Exception, e:
- print "No data for benchmark %s" % (name)
- print e
-
-
-if __name__ == '__main__':
- compare("GCC_CR", "LPC1768")
diff --git a/workspace_tools/synch.py b/workspace_tools/synch.py
deleted file mode 100644
index 9d95034..0000000
--- a/workspace_tools/synch.py
+++ /dev/null
@@ -1,372 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-One repository to update them all
-On mbed.org the mbed SDK is split up in multiple repositories, this script takes
-care of updating them all.
-"""
-import sys
-from copy import copy
-from os import walk, remove, makedirs
-from os.path import join, abspath, dirname, relpath, exists, isfile
-from shutil import copyfile
-from optparse import OptionParser
-import re
-import string
-
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-from workspace_tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR
-from workspace_tools.paths import *
-from workspace_tools.utils import run_cmd
-
-MBED_URL = "mbed.org"
-MBED_USER = "mbed_official"
-
-changed = []
-push_remote = True
-quiet = False
-commit_msg = ''
-
-# Code that does have a mirror in the mbed SDK
-# Tuple data: (repo_name, list_of_code_dirs, [team])
-# team is optional - if not specified, the code is published under mbed_official
-OFFICIAL_CODE = (
- ("mbed-dev" , MBED_BASE),
- ("mbed-rtos", RTOS),
- ("mbed-dsp" , DSP),
- ("mbed-rpc" , MBED_RPC),
-
- ("lwip" , LWIP_SOURCES+"/lwip"),
- ("lwip-sys", LWIP_SOURCES+"/lwip-sys"),
- ("Socket" , LWIP_SOURCES+"/Socket"),
-
- ("lwip-eth" , ETH_SOURCES+"/lwip-eth"),
- ("EthernetInterface", ETH_SOURCES+"/EthernetInterface"),
-
- ("USBDevice", USB),
- ("USBHost" , USB_HOST),
-
- ("CellularModem", CELLULAR_SOURCES),
- ("CellularUSBModem", CELLULAR_USB_SOURCES),
- ("UbloxUSBModem", UBLOX_SOURCES),
- ("UbloxModemHTTPClientTest", [TEST_DIR+"/net/cellular/http/common", TEST_DIR+"/net/cellular/http/ubloxusb"]),
- ("UbloxModemSMSTest", [TEST_DIR+"/net/cellular/sms/common", TEST_DIR+"/net/cellular/sms/ubloxusb"]),
- ("FATFileSystem", FAT_FS, "mbed-official"),
-)
-
-
-# Code that does have dependencies to libraries should point to
-# the latest revision. By default, they point to a specific revision.
-CODE_WITH_DEPENDENCIES = (
- # Libraries
- "EthernetInterface",
-
- # RTOS Examples
- "rtos_basic",
- "rtos_isr",
- "rtos_mail",
- "rtos_mutex",
- "rtos_queue",
- "rtos_semaphore",
- "rtos_signals",
- "rtos_timer",
-
- # Net Examples
- "TCPEchoClient",
- "TCPEchoServer",
- "TCPSocket_HelloWorld",
- "UDPSocket_HelloWorld",
- "UDPEchoClient",
- "UDPEchoServer",
- "BroadcastReceive",
- "BroadcastSend",
-
- # mbed sources
- "mbed-src-program",
-)
-
-# A list of regular expressions that will be checked against each directory
-# name and skipped if they match.
-IGNORE_DIRS = (
-)
-
-IGNORE_FILES = (
- 'COPYING',
- '\.md',
- "\.lib",
- "\.bld"
-)
-
-def ignore_path(name, reg_exps):
- for r in reg_exps:
- if re.search(r, name):
- return True
- return False
-
-class MbedRepository:
- @staticmethod
- def run_and_print(command, cwd):
- stdout, _, _ = run_cmd(command, wd=cwd, redirect=True)
- print(stdout)
-
- def __init__(self, name, team = None):
- self.name = name
- self.path = join(MBED_ORG_PATH, name)
- if team is None:
- self.url = "http://" + MBED_URL + "/users/" + MBED_USER + "/code/%s/"
- else:
- self.url = "http://" + MBED_URL + "/teams/" + team + "/code/%s/"
- if not exists(self.path):
- # Checkout code
- if not exists(MBED_ORG_PATH):
- makedirs(MBED_ORG_PATH)
-
- self.run_and_print(['hg', 'clone', self.url % name], cwd=MBED_ORG_PATH)
-
- else:
- # Update
- self.run_and_print(['hg', 'pull'], cwd=self.path)
- self.run_and_print(['hg', 'update'], cwd=self.path)
-
- def publish(self):
- # The maintainer has to evaluate the changes first and explicitly accept them
- self.run_and_print(['hg', 'addremove'], cwd=self.path)
- stdout, _, _ = run_cmd(['hg', 'status'], wd=self.path)
- if stdout == '':
- print "No changes"
- return False
- print stdout
- if quiet:
- commit = 'Y'
- else:
- commit = raw_input(push_remote and "Do you want to commit and push? Y/N: " or "Do you want to commit? Y/N: ")
- if commit == 'Y':
- args = ['hg', 'commit', '-u', MBED_ORG_USER]
- if commit_msg:
- args = args + ['-m', commit_msg]
- self.run_and_print(args, cwd=self.path)
- if push_remote:
- self.run_and_print(['hg', 'push'], cwd=self.path)
- return True
-
-# Check if a file is a text file or a binary file
-# Taken from http://code.activestate.com/recipes/173220/
-text_characters = "".join(map(chr, range(32, 127)) + list("\n\r\t\b"))
-_null_trans = string.maketrans("", "")
-def is_text_file(filename):
- block_size = 1024
- def istext(s):
- if "\0" in s:
- return 0
-
- if not s: # Empty files are considered text
- return 1
-
- # Get the non-text characters (maps a character to itself then
- # use the 'remove' option to get rid of the text characters.)
- t = s.translate(_null_trans, text_characters)
-
- # If more than 30% non-text characters, then
- # this is considered a binary file
- if float(len(t))/len(s) > 0.30:
- return 0
- return 1
- with open(filename) as f:
- res = istext(f.read(block_size))
- return res
-
-# Return the line ending type for the given file ('cr' or 'crlf')
-def get_line_endings(f):
- examine_size = 1024
- try:
- tf = open(f, "rb")
- lines, ncrlf = tf.readlines(examine_size), 0
- tf.close()
- for l in lines:
- if l.endswith("\r\n"):
- ncrlf = ncrlf + 1
- return 'crlf' if ncrlf > len(lines) >> 1 else 'cr'
- except:
- return 'cr'
-
-# Copy file to destination, but preserve destination line endings if possible
-# This prevents very annoying issues with huge diffs that appear because of
-# differences in line endings
-def copy_with_line_endings(sdk_file, repo_file):
- if not isfile(repo_file):
- copyfile(sdk_file, repo_file)
- return
- is_text = is_text_file(repo_file)
- if is_text:
- sdk_le = get_line_endings(sdk_file)
- repo_le = get_line_endings(repo_file)
- if not is_text or sdk_le == repo_le:
- copyfile(sdk_file, repo_file)
- else:
- print "Converting line endings in '%s' to '%s'" % (abspath(repo_file), repo_le)
- f = open(sdk_file, "rb")
- data = f.read()
- f.close()
- f = open(repo_file, "wb")
- data = data.replace("\r\n", "\n") if repo_le == 'cr' else data.replace('\n','\r\n')
- f.write(data)
- f.close()
-
-def visit_files(path, visit):
- for root, dirs, files in walk(path):
- # Ignore hidden directories
- for d in copy(dirs):
- full = join(root, d)
- if d.startswith('.'):
- dirs.remove(d)
- if ignore_path(full, IGNORE_DIRS):
- print "Skipping '%s'" % full
- dirs.remove(d)
-
- for file in files:
- if ignore_path(file, IGNORE_FILES):
- continue
-
- visit(join(root, file))
-
-
-def update_repo(repo_name, sdk_paths, team_name):
- repo = MbedRepository(repo_name, team_name)
- # copy files from mbed SDK to mbed_official repository
- def visit_mbed_sdk(sdk_file):
- repo_file = join(repo.path, relpath(sdk_file, sdk_path))
-
- repo_dir = dirname(repo_file)
- if not exists(repo_dir):
- makedirs(repo_dir)
-
- copy_with_line_endings(sdk_file, repo_file)
- for sdk_path in sdk_paths:
- visit_files(sdk_path, visit_mbed_sdk)
-
- # remove repository files that do not exist in the mbed SDK
- def visit_repo(repo_file):
- for sdk_path in sdk_paths:
- sdk_file = join(sdk_path, relpath(repo_file, repo.path))
- if exists(sdk_file):
- break
- else:
- remove(repo_file)
- print "remove: %s" % repo_file
- visit_files(repo.path, visit_repo)
-
- if repo.publish():
- changed.append(repo_name)
-
-
-def update_code(repositories):
- for r in repositories:
- repo_name, sdk_dir = r[0], r[1]
- team_name = r[2] if len(r) == 3 else None
- print '\n=== Updating "%s" ===' % repo_name
- sdk_dirs = [sdk_dir] if type(sdk_dir) != type([]) else sdk_dir
- update_repo(repo_name, sdk_dirs, team_name)
-
-def update_single_repo(repo):
- repos = [r for r in OFFICIAL_CODE if r[0] == repo]
- if not repos:
- print "Repository '%s' not found" % repo
- else:
- update_code(repos)
-
-def update_dependencies(repositories):
- for repo_name in repositories:
- print '\n=== Updating "%s" ===' % repo_name
- repo = MbedRepository(repo_name)
-
- # point to the latest libraries
- def visit_repo(repo_file):
- with open(repo_file, "r") as f:
- url = f.read()
- with open(repo_file, "w") as f:
- f.write(url[:(url.rindex('/')+1)])
- visit_files(repo.path, visit_repo, None, MBED_REPO_EXT)
-
- if repo.publish():
- changed.append(repo_name)
-
-
-def update_mbed():
- update_repo("mbed", [join(BUILD_DIR, "mbed")], None)
-
-def do_sync(options):
- global push_remote, quiet, commit_msg, changed
-
- push_remote = not options.nopush
- quiet = options.quiet
- commit_msg = options.msg
- chnaged = []
-
- if options.code:
- update_code(OFFICIAL_CODE)
-
- if options.dependencies:
- update_dependencies(CODE_WITH_DEPENDENCIES)
-
- if options.mbed:
- update_mbed()
-
- if options.repo:
- update_single_repo(options.repo)
-
- if changed:
- print "Repositories with changes:", changed
-
- return changed
-
-if __name__ == '__main__':
- parser = OptionParser()
-
- parser.add_option("-c", "--code",
- action="store_true", default=False,
- help="Update the mbed_official code")
-
- parser.add_option("-d", "--dependencies",
- action="store_true", default=False,
- help="Update the mbed_official code dependencies")
-
- parser.add_option("-m", "--mbed",
- action="store_true", default=False,
- help="Release a build of the mbed library")
-
- parser.add_option("-n", "--nopush",
- action="store_true", default=False,
- help="Commit the changes locally only, don't push them")
-
- parser.add_option("", "--commit_message",
- action="store", type="string", default='', dest='msg',
- help="Commit message to use for all the commits")
-
- parser.add_option("-r", "--repository",
- action="store", type="string", default='', dest='repo',
- help="Synchronize only the given repository")
-
- parser.add_option("-q", "--quiet",
- action="store_true", default=False,
- help="Don't ask for confirmation before commiting or pushing")
-
- (options, args) = parser.parse_args()
-
- do_sync(options)
-
diff --git a/workspace_tools/targets.py b/workspace_tools/targets.py
deleted file mode 100755
index aa02af9..0000000
--- a/workspace_tools/targets.py
+++ /dev/null
@@ -1,385 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2016 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-CORE_LABELS = {
- "ARM7TDMI-S": ["ARM7", "LIKE_CORTEX_ARM7"],
- "Cortex-M0" : ["M0", "CORTEX_M", "LIKE_CORTEX_M0"],
- "Cortex-M0+": ["M0P", "CORTEX_M", "LIKE_CORTEX_M0"],
- "Cortex-M1" : ["M1", "CORTEX_M", "LIKE_CORTEX_M1"],
- "Cortex-M3" : ["M3", "CORTEX_M", "LIKE_CORTEX_M3"],
- "Cortex-M4" : ["M4", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M4"],
- "Cortex-M4F" : ["M4", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M4"],
- "Cortex-M7" : ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7"],
- "Cortex-M7F" : ["M7", "CORTEX_M", "RTOS_M4_M7", "LIKE_CORTEX_M7"],
- "Cortex-A9" : ["A9", "CORTEX_A", "LIKE_CORTEX_A9"]
-}
-
-import os
-import binascii
-import struct
-import shutil
-from workspace_tools.patch import patch
-from paths import TOOLS_BOOTLOADERS
-import json
-import inspect
-import sys
-
-
-########################################################################################################################
-# Generic Target class that reads and interprets the data in targets.json
-
-# A simple class that represents all the exceptions associated with hooking
-class HookError(Exception):
- pass
-
-# A simple decorator used for automatically caching data returned by a function
-caches = {}
-def cached(func):
- def wrapper(*args, **kwargs):
- if not caches.has_key(func):
- caches[func] = func(*args, **kwargs)
- return caches[func]
- return wrapper
-
-class Target:
- # Cumulative attributes can have values appended to them, so they
- # need to be computed differently than regular attributes
- __cumulative_attributes = ['extra_labels', 'macros', 'device_has']
-
- # Utility function: traverse a dictionary and change all the strings in the dictionary to
- # ASCII from Unicode. Needed because the original mbed target definitions were written in
- # Python and used only ASCII strings, but the Python JSON decoder always returns Unicode
- # Based on http://stackoverflow.com/a/13105359
- @staticmethod
- def to_ascii(input):
- if isinstance(input, dict):
- return dict([(Target.to_ascii(key), Target.to_ascii(value)) for key, value in input.iteritems()])
- elif isinstance(input, list):
- return [Target.to_ascii(element) for element in input]
- elif isinstance(input, unicode):
- return input.encode('ascii')
- else:
- return input
-
- # Load the description of JSON target data
- @staticmethod
- @cached
- def get_json_target_data():
- with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../hal/targets.json"), "rt") as f:
- return Target.to_ascii(json.load(f))
-
- # Get the members of this module using Python's "inspect" module
- @staticmethod
- @cached
- def get_module_data():
- return dict([(m[0], m[1]) for m in inspect.getmembers(sys.modules[__name__])])
-
- # Return the order in which target descriptions are searched for attributes
- # This mimics the Python 2.2 method resolution order, which is what the old targets.py module used
- # For more details, check http://makina-corpus.com/blog/metier/2014/python-tutorial-understanding-python-mro-class-search-path
- # The resolution order contains (name, level) tuples, where "name" is the name of the class and "level"
- # is the level in the inheritance hierarchy (the target itself is at level 0, its first parent at level 1,
- # its parent's parent at level 1 and so on)
- def __get_resolution_order(self, target_name, order, level = 0):
- if not target_name in [l[0] for l in order]: # the resolution order can't contain duplicate target names
- order.append((target_name, level))
- parents = self.get_json_target_data()[target_name].get("inherits", [])
- for p in parents:
- order = self.__get_resolution_order(p, order, level + 1)
- return order
-
- # Modify the exporter specification ("progen") by changing all "template" keys to full paths
- @staticmethod
- def __add_paths_to_progen(data):
- out = {}
- for key, value in data.items():
- if isinstance(value, dict):
- out[key] = Target.__add_paths_to_progen(value)
- elif key == "template":
- out[key] = [os.path.join(os.path.dirname(__file__), 'export', v) for v in value]
- else:
- out[key] = value
- return out
-
- # Comute the value of a given target attribute
- def __getattr_helper(self, attrname):
- tdata = self.get_json_target_data()
- if attrname in self.__cumulative_attributes:
- # For a cumulative attribute, figure out when it was defined the last time (in attribute
- # resolution order) then follow the "_add" and "_remove" data fields
- for idx, t in enumerate(self.resolution_order):
- if attrname in tdata[t[0]]: # the attribute was defined at this level in the resolution order
- def_idx = idx
- break
- else:
- raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name))
- # Get the starting value of the attribute
- v = (tdata[self.resolution_order[def_idx][0]][attrname] or [])[:]
- # Traverse the resolution list in high inheritance to low inheritance level, left to right order
- # to figure out all the other classes that change the definition by adding or removing elements
- for idx in xrange(self.resolution_order[def_idx][1] - 1, -1, -1):
- same_level_targets = [t[0] for t in self.resolution_order if t[1] == idx]
- for t in same_level_targets:
- data = tdata[t]
- # Do we have anything to add ?
- if data.has_key(attrname + "_add"):
- v.extend(data[attrname + "_add"])
- # Do we have anything to remove ?
- if data.has_key(attrname + "_remove"):
- # Macros can be defined either without a value (MACRO) or with a value (MACRO=10).
- # When removing, we specify only the name of the macro, without the value. So we need
- # to create a mapping between the macro name and its value. This will work for
- # extra_labels and other type of arrays as well, since they fall into the "macros
- # without a value" category (simple definitions without a value).
- name_def_map = {}
- for crtv in v:
- if crtv.find('=') != -1:
- temp = crtv.split('=')
- if len(temp) != 2:
- raise ValueError("Invalid macro definition '%s'" % crtv)
- name_def_map[temp[0]] = crtv
- else:
- name_def_map[crtv] = crtv
- for e in data[attrname + "_remove"]:
- if not e in name_def_map:
- raise ValueError("Unable to remove '%s' in '%s.%s' since it doesn't exist" % (e, self.name, attrname))
- v.remove(name_def_map[e])
- return v
- # Look for the attribute in the class and its parents, as defined by the resolution order
- v = None
- for t in self.resolution_order:
- data = tdata[t[0]]
- if data.has_key(attrname):
- v = data[attrname]
- break
- else: # Attribute not found
- raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name))
- # 'progen' needs the full path to the template (the path in JSON is relative to workspace_tools/export)
- return v if attrname != "progen" else self.__add_paths_to_progen(v)
-
- # Return the value of an attribute
- # This function only looks for the attribute's value in the cache, the real work of computing the
- # attribute's value is done in the function above (__getattr_helper)
- def __getattr__(self, attrname):
- if not self.attr_cache.has_key(attrname):
- self.attr_cache[attrname] = self.__getattr_helper(attrname)
- return self.attr_cache[attrname]
-
- def __init__(self, name):
- self.name = name
-
- # Compute resolution order once (it will be used later in __getattr__)
- self.resolution_order = self.__get_resolution_order(self.name, [])
-
- # Attribute cache: once an attribute's value is computed, don't compute it again
- self.attr_cache = {}
-
- def program_cycle_s(self):
- try:
- return self.__getattr__("program_cycle_s")
- except AttributeError:
- return 4 if self.is_disk_virtual else 1.5
-
- def get_labels(self):
- return [self.name] + CORE_LABELS[self.core] + self.extra_labels
-
- # For now, this function only allows "post binary" hooks (hooks that are executed after
- # the binary image is extracted from the executable file)
- def init_hooks(self, hook, toolchain_name):
- # If there's no hook, simply return
- try:
- hook_data = self.post_binary_hook
- except AttributeError:
- return
- # A hook was found. The hook's name is in the format "classname.functionname"
- temp = hook_data["function"].split(".")
- if len(temp) != 2:
- raise HookError("Invalid format for hook '%s' in target '%s' (must be 'class_name.function_name')" % (hook_data["function"], self.name))
- class_name, function_name = temp[0], temp[1]
- # "class_name" must refer to a class in this file, so check if the class exists
- mdata = self.get_module_data()
- if not mdata.has_key(class_name) or not inspect.isclass(mdata[class_name]):
- raise HookError("Class '%s' required by '%s' in target '%s' not found in targets.py" % (class_name, hook_data["function"], self.name))
- # "function_name" must refer to a static function inside class "class_name"
- cls = mdata[class_name]
- if (not hasattr(cls, function_name)) or (not inspect.isfunction(getattr(cls, function_name))):
- raise HookError("Static function '%s' required by '%s' in target '%s' not found in class '%s'" % (function_name, hook_data["function"], self.name, class_name))
- # Check if the hook specification also has target restrictions
- toolchain_restrictions = hook_data.get("toolchains", [])
- if toolchain_restrictions and (toolchain_name not in toolchain_restrictions):
- return
- # Finally, hook the requested function
- hook.hook_add_binary("post", getattr(cls, function_name))
-
-########################################################################################################################
-# Target specific code goes in this section
-# This code can be invoked from the target description using the "post_binary_hook" key
-
-class LPCTargetCode:
- @staticmethod
- def lpc_patch(t_self, resources, elf, binf):
- t_self.debug("LPC Patch: %s" % os.path.split(binf)[1])
- patch(binf)
-
-class LPC4088Code:
- @staticmethod
- def binary_hook(t_self, resources, elf, binf):
- if not os.path.isdir(binf):
- # Regular binary file, nothing to do
- LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
- return
- outbin = open(binf + ".temp", "wb")
- partf = open(os.path.join(binf, "ER_IROM1"), "rb")
- # Pad the fist part (internal flash) with 0xFF to 512k
- data = partf.read()
- outbin.write(data)
- outbin.write('\xFF' * (512*1024 - len(data)))
- partf.close()
- # Read and append the second part (external flash) in chunks of fixed size
- chunksize = 128 * 1024
- partf = open(os.path.join(binf, "ER_IROM2"), "rb")
- while True:
- data = partf.read(chunksize)
- outbin.write(data)
- if len(data) < chunksize:
- break
- partf.close()
- outbin.close()
- # Remove the directory with the binary parts and rename the temporary
- # file to 'binf'
- shutil.rmtree(binf, True)
- os.rename(binf + '.temp', binf)
- t_self.debug("Generated custom binary file (internal flash + SPIFI)")
- LPCTargetCode.lpc_patch(t_self, resources, elf, binf)
-
-class TEENSY3_1Code:
- @staticmethod
- def binary_hook(t_self, resources, elf, binf):
- from intelhex import IntelHex
- binh = IntelHex()
- binh.loadbin(binf, offset = 0)
-
- with open(binf.replace(".bin", ".hex"), "w") as f:
- binh.tofile(f, format='hex')
-
-class MTSCode:
- @staticmethod
- def _combine_bins_helper(target_name, t_self, resources, elf, binf):
- loader = os.path.join(TOOLS_BOOTLOADERS, target_name, "bootloader.bin")
- target = binf + ".tmp"
- if not os.path.exists(loader):
- print "Can't find bootloader binary: " + loader
- return
- outbin = open(target, 'w+b')
- part = open(loader, 'rb')
- data = part.read()
- outbin.write(data)
- outbin.write('\xFF' * (64*1024 - len(data)))
- part.close()
- part = open(binf, 'rb')
- data = part.read()
- outbin.write(data)
- part.close()
- outbin.seek(0, 0)
- data = outbin.read()
- outbin.seek(0, 1)
- crc = struct.pack(' platform_name
- """
- result = {}
- for target in TARGETS:
- for detect_code in target.detect_code:
- result[detect_code] = target.name
- return result
diff --git a/workspace_tools/test_api.py b/workspace_tools/test_api.py
deleted file mode 100644
index d80c0c3..0000000
--- a/workspace_tools/test_api.py
+++ /dev/null
@@ -1,1951 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-"""
-
-import os
-import re
-import sys
-import json
-import uuid
-import pprint
-import random
-import optparse
-import datetime
-import threading
-import ctypes
-from types import ListType
-from colorama import Fore, Back, Style
-from prettytable import PrettyTable
-
-from time import sleep, time
-from Queue import Queue, Empty
-from os.path import join, exists, basename
-from threading import Thread, Lock
-from subprocess import Popen, PIPE
-
-# Imports related to mbed build api
-from workspace_tools.tests import TESTS
-from workspace_tools.tests import TEST_MAP
-from workspace_tools.paths import BUILD_DIR
-from workspace_tools.paths import HOST_TESTS
-from workspace_tools.utils import ToolException
-from workspace_tools.utils import NotSupportedException
-from workspace_tools.utils import construct_enum
-from workspace_tools.targets import TARGET_MAP
-from workspace_tools.test_db import BaseDBAccess
-from workspace_tools.build_api import build_project, build_mbed_libs, build_lib
-from workspace_tools.build_api import get_target_supported_toolchains
-from workspace_tools.build_api import write_build_report
-from workspace_tools.build_api import prep_report
-from workspace_tools.build_api import prep_properties
-from workspace_tools.build_api import create_result
-from workspace_tools.build_api import add_result_to_report
-from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP
-from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH
-from workspace_tools.test_exporters import ReportExporter, ResultExporterType
-
-import workspace_tools.host_tests.host_tests_plugins as host_tests_plugins
-
-try:
- import mbed_lstools
- from workspace_tools.compliance.ioper_runner import get_available_oper_test_scopes
-except:
- pass
-
-
-class ProcessObserver(Thread):
- def __init__(self, proc):
- Thread.__init__(self)
- self.proc = proc
- self.queue = Queue()
- self.daemon = True
- self.active = True
- self.start()
-
- def run(self):
- while self.active:
- c = self.proc.stdout.read(1)
- self.queue.put(c)
-
- def stop(self):
- self.active = False
- try:
- self.proc.terminate()
- except Exception, _:
- pass
-
-
-class SingleTestExecutor(threading.Thread):
- """ Example: Single test class in separate thread usage
- """
- def __init__(self, single_test):
- self.single_test = single_test
- threading.Thread.__init__(self)
-
- def run(self):
- start = time()
- # Execute tests depending on options and filter applied
- test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext = self.single_test.execute()
- elapsed_time = time() - start
-
- # Human readable summary
- if not self.single_test.opts_suppress_summary:
- # prints well-formed summary with results (SQL table like)
- print self.single_test.generate_test_summary(test_summary, shuffle_seed)
- if self.single_test.opts_test_x_toolchain_summary:
- # prints well-formed summary with results (SQL table like)
- # table shows text x toolchain test result matrix
- print self.single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
- print "Completed in %.2f sec"% (elapsed_time)
-
-
-class SingleTestRunner(object):
- """ Object wrapper for single test run which may involve multiple MUTs
- """
- RE_DETECT_TESTCASE_RESULT = None
-
- # Return codes for test script
- TEST_RESULT_OK = "OK"
- TEST_RESULT_FAIL = "FAIL"
- TEST_RESULT_ERROR = "ERROR"
- TEST_RESULT_UNDEF = "UNDEF"
- TEST_RESULT_IOERR_COPY = "IOERR_COPY"
- TEST_RESULT_IOERR_DISK = "IOERR_DISK"
- TEST_RESULT_IOERR_SERIAL = "IOERR_SERIAL"
- TEST_RESULT_TIMEOUT = "TIMEOUT"
- TEST_RESULT_NO_IMAGE = "NO_IMAGE"
- TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
- TEST_RESULT_BUILD_FAILED = "BUILD_FAILED"
- TEST_RESULT_NOT_SUPPORTED = "NOT_SUPPORTED"
-
- GLOBAL_LOOPS_COUNT = 1 # How many times each test should be repeated
- TEST_LOOPS_LIST = [] # We redefine no.of loops per test_id
- TEST_LOOPS_DICT = {} # TEST_LOOPS_LIST in dict format: { test_id : test_loop_count}
-
- muts = {} # MUTs descriptor (from external file)
- test_spec = {} # Test specification (from external file)
-
- # mbed test suite -> SingleTestRunner
- TEST_RESULT_MAPPING = {"success" : TEST_RESULT_OK,
- "failure" : TEST_RESULT_FAIL,
- "error" : TEST_RESULT_ERROR,
- "ioerr_copy" : TEST_RESULT_IOERR_COPY,
- "ioerr_disk" : TEST_RESULT_IOERR_DISK,
- "ioerr_serial" : TEST_RESULT_IOERR_SERIAL,
- "timeout" : TEST_RESULT_TIMEOUT,
- "no_image" : TEST_RESULT_NO_IMAGE,
- "end" : TEST_RESULT_UNDEF,
- "mbed_assert" : TEST_RESULT_MBED_ASSERT,
- "build_failed" : TEST_RESULT_BUILD_FAILED,
- "not_supproted" : TEST_RESULT_NOT_SUPPORTED
- }
-
- def __init__(self,
- _global_loops_count=1,
- _test_loops_list=None,
- _muts={},
- _clean=False,
- _opts_db_url=None,
- _opts_log_file_name=None,
- _opts_report_html_file_name=None,
- _opts_report_junit_file_name=None,
- _opts_report_build_file_name=None,
- _opts_build_report={},
- _opts_build_properties={},
- _test_spec={},
- _opts_goanna_for_mbed_sdk=None,
- _opts_goanna_for_tests=None,
- _opts_shuffle_test_order=False,
- _opts_shuffle_test_seed=None,
- _opts_test_by_names=None,
- _opts_peripheral_by_names=None,
- _opts_test_only_peripheral=False,
- _opts_test_only_common=False,
- _opts_verbose_skipped_tests=False,
- _opts_verbose_test_result_only=False,
- _opts_verbose=False,
- _opts_firmware_global_name=None,
- _opts_only_build_tests=False,
- _opts_parallel_test_exec=False,
- _opts_suppress_summary=False,
- _opts_test_x_toolchain_summary=False,
- _opts_copy_method=None,
- _opts_mut_reset_type=None,
- _opts_jobs=None,
- _opts_waterfall_test=None,
- _opts_consolidate_waterfall_test=None,
- _opts_extend_test_timeout=None,
- _opts_auto_detect=None,
- _opts_include_non_automated=False):
- """ Let's try hard to init this object
- """
- from colorama import init
- init()
-
- PATTERN = "\\{(" + "|".join(self.TEST_RESULT_MAPPING.keys()) + ")\\}"
- self.RE_DETECT_TESTCASE_RESULT = re.compile(PATTERN)
- # Settings related to test loops counters
- try:
- _global_loops_count = int(_global_loops_count)
- except:
- _global_loops_count = 1
- if _global_loops_count < 1:
- _global_loops_count = 1
- self.GLOBAL_LOOPS_COUNT = _global_loops_count
- self.TEST_LOOPS_LIST = _test_loops_list if _test_loops_list else []
- self.TEST_LOOPS_DICT = self.test_loop_list_to_dict(_test_loops_list)
-
- self.shuffle_random_seed = 0.0
- self.SHUFFLE_SEED_ROUND = 10
-
- # MUT list and test specification storage
- self.muts = _muts
- self.test_spec = _test_spec
-
- # Settings passed e.g. from command line
- self.opts_db_url = _opts_db_url
- self.opts_log_file_name = _opts_log_file_name
- self.opts_report_html_file_name = _opts_report_html_file_name
- self.opts_report_junit_file_name = _opts_report_junit_file_name
- self.opts_report_build_file_name = _opts_report_build_file_name
- self.opts_goanna_for_mbed_sdk = _opts_goanna_for_mbed_sdk
- self.opts_goanna_for_tests = _opts_goanna_for_tests
- self.opts_shuffle_test_order = _opts_shuffle_test_order
- self.opts_shuffle_test_seed = _opts_shuffle_test_seed
- self.opts_test_by_names = _opts_test_by_names
- self.opts_peripheral_by_names = _opts_peripheral_by_names
- self.opts_test_only_peripheral = _opts_test_only_peripheral
- self.opts_test_only_common = _opts_test_only_common
- self.opts_verbose_skipped_tests = _opts_verbose_skipped_tests
- self.opts_verbose_test_result_only = _opts_verbose_test_result_only
- self.opts_verbose = _opts_verbose
- self.opts_firmware_global_name = _opts_firmware_global_name
- self.opts_only_build_tests = _opts_only_build_tests
- self.opts_parallel_test_exec = _opts_parallel_test_exec
- self.opts_suppress_summary = _opts_suppress_summary
- self.opts_test_x_toolchain_summary = _opts_test_x_toolchain_summary
- self.opts_copy_method = _opts_copy_method
- self.opts_mut_reset_type = _opts_mut_reset_type
- self.opts_jobs = _opts_jobs if _opts_jobs is not None else 1
- self.opts_waterfall_test = _opts_waterfall_test
- self.opts_consolidate_waterfall_test = _opts_consolidate_waterfall_test
- self.opts_extend_test_timeout = _opts_extend_test_timeout
- self.opts_clean = _clean
- self.opts_auto_detect = _opts_auto_detect
- self.opts_include_non_automated = _opts_include_non_automated
-
- self.build_report = _opts_build_report
- self.build_properties = _opts_build_properties
-
- # File / screen logger initialization
- self.logger = CLITestLogger(file_name=self.opts_log_file_name) # Default test logger
-
- # Database related initializations
- self.db_logger = factory_db_logger(self.opts_db_url)
- self.db_logger_build_id = None # Build ID (database index of build_id table)
- # Let's connect to database to set up credentials and confirm database is ready
- if self.db_logger:
- self.db_logger.connect_url(self.opts_db_url) # Save db access info inside db_logger object
- if self.db_logger.is_connected():
- # Get hostname and uname so we can use it as build description
- # when creating new build_id in external database
- (_hostname, _uname) = self.db_logger.get_hostname()
- _host_location = os.path.dirname(os.path.abspath(__file__))
- build_id_type = None if self.opts_only_build_tests is None else self.db_logger.BUILD_ID_TYPE_BUILD_ONLY
- self.db_logger_build_id = self.db_logger.get_next_build_id(_hostname, desc=_uname, location=_host_location, type=build_id_type)
- self.db_logger.disconnect()
-
- def dump_options(self):
- """ Function returns data structure with common settings passed to SingelTestRunner
- It can be used for example to fill _extra fields in database storing test suite single run data
- Example:
- data = self.dump_options()
- or
- data_str = json.dumps(self.dump_options())
- """
- result = {"db_url" : str(self.opts_db_url),
- "log_file_name" : str(self.opts_log_file_name),
- "shuffle_test_order" : str(self.opts_shuffle_test_order),
- "shuffle_test_seed" : str(self.opts_shuffle_test_seed),
- "test_by_names" : str(self.opts_test_by_names),
- "peripheral_by_names" : str(self.opts_peripheral_by_names),
- "test_only_peripheral" : str(self.opts_test_only_peripheral),
- "test_only_common" : str(self.opts_test_only_common),
- "verbose" : str(self.opts_verbose),
- "firmware_global_name" : str(self.opts_firmware_global_name),
- "only_build_tests" : str(self.opts_only_build_tests),
- "copy_method" : str(self.opts_copy_method),
- "mut_reset_type" : str(self.opts_mut_reset_type),
- "jobs" : str(self.opts_jobs),
- "extend_test_timeout" : str(self.opts_extend_test_timeout),
- "_dummy" : ''
- }
- return result
-
- def shuffle_random_func(self):
- return self.shuffle_random_seed
-
- def is_shuffle_seed_float(self):
- """ return true if function parameter can be converted to float
- """
- result = True
- try:
- float(self.shuffle_random_seed)
- except ValueError:
- result = False
- return result
-
- # This will store target / toolchain specific properties
- test_suite_properties_ext = {} # target : toolchain
- # Here we store test results
- test_summary = []
- # Here we store test results in extended data structure
- test_summary_ext = {}
- execute_thread_slice_lock = Lock()
-
- def execute_thread_slice(self, q, target, toolchains, clean, test_ids, build_report, build_properties):
- for toolchain in toolchains:
- tt_id = "%s::%s" % (toolchain, target)
-
- T = TARGET_MAP[target]
-
- # print target, toolchain
- # Test suite properties returned to external tools like CI
- test_suite_properties = {
- 'jobs': self.opts_jobs,
- 'clean': clean,
- 'target': target,
- 'vendor': T.extra_labels[0],
- 'test_ids': ', '.join(test_ids),
- 'toolchain': toolchain,
- 'shuffle_random_seed': self.shuffle_random_seed
- }
-
-
- # print '=== %s::%s ===' % (target, toolchain)
- # Let's build our test
- if target not in TARGET_MAP:
- print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Target platform not found'% (target))
- continue
-
- build_mbed_libs_options = ["analyze"] if self.opts_goanna_for_mbed_sdk else None
- clean_mbed_libs_options = True if self.opts_goanna_for_mbed_sdk or clean or self.opts_clean else None
-
-
- try:
- build_mbed_libs_result = build_mbed_libs(T,
- toolchain,
- options=build_mbed_libs_options,
- clean=clean_mbed_libs_options,
- verbose=self.opts_verbose,
- jobs=self.opts_jobs,
- report=build_report,
- properties=build_properties)
-
- if not build_mbed_libs_result:
- print self.logger.log_line(self.logger.LogType.NOTIF, 'Skipped tests for %s target. Toolchain %s is not yet supported for this target'% (T.name, toolchain))
- continue
-
- except ToolException:
- print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building MBED libs for %s using %s'% (target, toolchain))
- continue
-
- build_dir = join(BUILD_DIR, "test", target, toolchain)
-
- test_suite_properties['build_mbed_libs_result'] = build_mbed_libs_result
- test_suite_properties['build_dir'] = build_dir
- test_suite_properties['skipped'] = []
-
- # Enumerate through all tests and shuffle test order if requested
- test_map_keys = sorted(TEST_MAP.keys())
-
- if self.opts_shuffle_test_order:
- random.shuffle(test_map_keys, self.shuffle_random_func)
- # Update database with shuffle seed f applicable
- if self.db_logger:
- self.db_logger.reconnect();
- if self.db_logger.is_connected():
- self.db_logger.update_build_id_info(self.db_logger_build_id, _shuffle_seed=self.shuffle_random_func())
- self.db_logger.disconnect();
-
- if self.db_logger:
- self.db_logger.reconnect();
- if self.db_logger.is_connected():
- # Update MUTs and Test Specification in database
- self.db_logger.update_build_id_info(self.db_logger_build_id, _muts=self.muts, _test_spec=self.test_spec)
- # Update Extra information in database (some options passed to test suite)
- self.db_logger.update_build_id_info(self.db_logger_build_id, _extra=json.dumps(self.dump_options()))
- self.db_logger.disconnect();
-
- valid_test_map_keys = self.get_valid_tests(test_map_keys, target, toolchain, test_ids, self.opts_include_non_automated)
- skipped_test_map_keys = self.get_skipped_tests(test_map_keys, valid_test_map_keys)
-
- for skipped_test_id in skipped_test_map_keys:
- test_suite_properties['skipped'].append(skipped_test_id)
-
-
- # First pass through all tests and determine which libraries need to be built
- libraries = []
- for test_id in valid_test_map_keys:
- test = TEST_MAP[test_id]
-
- # Detect which lib should be added to test
- # Some libs have to compiled like RTOS or ETH
- for lib in LIBRARIES:
- if lib['build_dir'] in test.dependencies and lib['id'] not in libraries:
- libraries.append(lib['id'])
-
-
- build_project_options = ["analyze"] if self.opts_goanna_for_tests else None
- clean_project_options = True if self.opts_goanna_for_tests or clean or self.opts_clean else None
-
- # Build all required libraries
- for lib_id in libraries:
- try:
- build_lib(lib_id,
- T,
- toolchain,
- options=build_project_options,
- verbose=self.opts_verbose,
- clean=clean_mbed_libs_options,
- jobs=self.opts_jobs,
- report=build_report,
- properties=build_properties)
-
- except ToolException:
- print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building library %s'% (lib_id))
- continue
-
-
- for test_id in valid_test_map_keys:
- test = TEST_MAP[test_id]
-
- test_suite_properties['test.libs.%s.%s.%s'% (target, toolchain, test_id)] = ', '.join(libraries)
-
- # TODO: move this 2 below loops to separate function
- INC_DIRS = []
- for lib_id in libraries:
- if 'inc_dirs_ext' in LIBRARY_MAP[lib_id] and LIBRARY_MAP[lib_id]['inc_dirs_ext']:
- INC_DIRS.extend(LIBRARY_MAP[lib_id]['inc_dirs_ext'])
-
- MACROS = []
- for lib_id in libraries:
- if 'macros' in LIBRARY_MAP[lib_id] and LIBRARY_MAP[lib_id]['macros']:
- MACROS.extend(LIBRARY_MAP[lib_id]['macros'])
- MACROS.append('TEST_SUITE_TARGET_NAME="%s"'% target)
- MACROS.append('TEST_SUITE_TEST_ID="%s"'% test_id)
- test_uuid = uuid.uuid4()
- MACROS.append('TEST_SUITE_UUID="%s"'% str(test_uuid))
-
- # Prepare extended test results data structure (it can be used to generate detailed test report)
- if target not in self.test_summary_ext:
- self.test_summary_ext[target] = {} # test_summary_ext : toolchain
- if toolchain not in self.test_summary_ext[target]:
- self.test_summary_ext[target][toolchain] = {} # test_summary_ext : toolchain : target
-
- tt_test_id = "%s::%s::%s" % (toolchain, target, test_id) # For logging only
-
- project_name = self.opts_firmware_global_name if self.opts_firmware_global_name else None
- try:
- path = build_project(test.source_dir,
- join(build_dir, test_id),
- T,
- toolchain,
- test.dependencies,
- options=build_project_options,
- clean=clean_project_options,
- verbose=self.opts_verbose,
- name=project_name,
- macros=MACROS,
- inc_dirs=INC_DIRS,
- jobs=self.opts_jobs,
- report=build_report,
- properties=build_properties,
- project_id=test_id,
- project_description=test.get_description())
-
- except Exception, e:
- project_name_str = project_name if project_name is not None else test_id
-
-
- test_result = self.TEST_RESULT_FAIL
-
- if isinstance(e, ToolException):
- print self.logger.log_line(self.logger.LogType.ERROR, 'There were errors while building project %s'% (project_name_str))
- test_result = self.TEST_RESULT_BUILD_FAILED
- elif isinstance(e, NotSupportedException):
- print self.logger.log_line(self.logger.LogType.INFO, 'The project %s is not supported'% (project_name_str))
- test_result = self.TEST_RESULT_NOT_SUPPORTED
-
-
- # Append test results to global test summary
- self.test_summary.append(
- (test_result, target, toolchain, test_id, test.get_description(), 0, 0, '-')
- )
-
- # Add detailed test result to test summary structure
- if test_id not in self.test_summary_ext[target][toolchain]:
- self.test_summary_ext[target][toolchain][test_id] = []
-
- self.test_summary_ext[target][toolchain][test_id].append({ 0: {
- 'result' : test_result,
- 'output' : '',
- 'target_name' : target,
- 'target_name_unique': target,
- 'toolchain_name' : toolchain,
- 'id' : test_id,
- 'description' : test.get_description(),
- 'elapsed_time' : 0,
- 'duration' : 0,
- 'copy_method' : None
- }})
- continue
-
- if self.opts_only_build_tests:
- # With this option we are skipping testing phase
- continue
-
- # Test duration can be increased by global value
- test_duration = test.duration
- if self.opts_extend_test_timeout is not None:
- test_duration += self.opts_extend_test_timeout
-
- # For an automated test the duration act as a timeout after
- # which the test gets interrupted
- test_spec = self.shape_test_request(target, path, test_id, test_duration)
- test_loops = self.get_test_loop_count(test_id)
-
- test_suite_properties['test.duration.%s.%s.%s'% (target, toolchain, test_id)] = test_duration
- test_suite_properties['test.loops.%s.%s.%s'% (target, toolchain, test_id)] = test_loops
- test_suite_properties['test.path.%s.%s.%s'% (target, toolchain, test_id)] = path
-
- # read MUTs, test specification and perform tests
- handle_results = self.handle(test_spec, target, toolchain, test_loops=test_loops)
-
- if handle_results is None:
- continue
-
- for handle_result in handle_results:
- if handle_result:
- single_test_result, detailed_test_results = handle_result
- else:
- continue
-
- # Append test results to global test summary
- if single_test_result is not None:
- self.test_summary.append(single_test_result)
-
- # Add detailed test result to test summary structure
- if target not in self.test_summary_ext[target][toolchain]:
- if test_id not in self.test_summary_ext[target][toolchain]:
- self.test_summary_ext[target][toolchain][test_id] = []
-
- append_test_result = detailed_test_results
-
- # If waterfall and consolidate-waterfall options are enabled,
- # only include the last test result in the report.
- if self.opts_waterfall_test and self.opts_consolidate_waterfall_test:
- append_test_result = {0: detailed_test_results[len(detailed_test_results) - 1]}
-
- self.test_summary_ext[target][toolchain][test_id].append(append_test_result)
-
- test_suite_properties['skipped'] = ', '.join(test_suite_properties['skipped'])
- self.test_suite_properties_ext[target][toolchain] = test_suite_properties
-
- q.put(target + '_'.join(toolchains))
- return
-
- def execute(self):
- clean = self.test_spec.get('clean', False)
- test_ids = self.test_spec.get('test_ids', [])
- q = Queue()
-
- # Generate seed for shuffle if seed is not provided in
- self.shuffle_random_seed = round(random.random(), self.SHUFFLE_SEED_ROUND)
- if self.opts_shuffle_test_seed is not None and self.is_shuffle_seed_float():
- self.shuffle_random_seed = round(float(self.opts_shuffle_test_seed), self.SHUFFLE_SEED_ROUND)
-
-
- if self.opts_parallel_test_exec:
- ###################################################################
- # Experimental, parallel test execution per singletest instance.
- ###################################################################
- execute_threads = [] # Threads used to build mbed SDL, libs, test cases and execute tests
- # Note: We are building here in parallel for each target separately!
- # So we are not building the same thing multiple times and compilers
- # in separate threads do not collide.
- # Inside execute_thread_slice() function function handle() will be called to
- # get information about available MUTs (per target).
- for target, toolchains in self.test_spec['targets'].iteritems():
- self.test_suite_properties_ext[target] = {}
- t = threading.Thread(target=self.execute_thread_slice, args = (q, target, toolchains, clean, test_ids, self.build_report, self.build_properties))
- t.daemon = True
- t.start()
- execute_threads.append(t)
-
- for t in execute_threads:
- q.get() # t.join() would block some threads because we should not wait in any order for thread end
- else:
- # Serialized (not parallel) test execution
- for target, toolchains in self.test_spec['targets'].iteritems():
- if target not in self.test_suite_properties_ext:
- self.test_suite_properties_ext[target] = {}
-
- self.execute_thread_slice(q, target, toolchains, clean, test_ids, self.build_report, self.build_properties)
- q.get()
-
- if self.db_logger:
- self.db_logger.reconnect();
- if self.db_logger.is_connected():
- self.db_logger.update_build_id_info(self.db_logger_build_id, _status_fk=self.db_logger.BUILD_ID_STATUS_COMPLETED)
- self.db_logger.disconnect();
-
- return self.test_summary, self.shuffle_random_seed, self.test_summary_ext, self.test_suite_properties_ext, self.build_report, self.build_properties
-
- def get_valid_tests(self, test_map_keys, target, toolchain, test_ids, include_non_automated):
- valid_test_map_keys = []
-
- for test_id in test_map_keys:
- test = TEST_MAP[test_id]
- if self.opts_test_by_names and test_id not in self.opts_test_by_names.split(','):
- continue
-
- if test_ids and test_id not in test_ids:
- continue
-
- if self.opts_test_only_peripheral and not test.peripherals:
- if self.opts_verbose_skipped_tests:
- print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
- continue
-
- if self.opts_peripheral_by_names and test.peripherals and not len([i for i in test.peripherals if i in self.opts_peripheral_by_names.split(',')]):
- # We will skip tests not forced with -p option
- if self.opts_verbose_skipped_tests:
- print self.logger.log_line(self.logger.LogType.INFO, 'Common test skipped for target %s'% (target))
- continue
-
- if self.opts_test_only_common and test.peripherals:
- if self.opts_verbose_skipped_tests:
- print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral test skipped for target %s'% (target))
- continue
-
- if not include_non_automated and not test.automated:
- if self.opts_verbose_skipped_tests:
- print self.logger.log_line(self.logger.LogType.INFO, 'Non automated test skipped for target %s'% (target))
- continue
-
- if test.is_supported(target, toolchain):
- if test.peripherals is None and self.opts_only_build_tests:
- # When users are using 'build only flag' and test do not have
- # specified peripherals we can allow test building by default
- pass
- elif self.opts_peripheral_by_names and test_id not in self.opts_peripheral_by_names.split(','):
- # If we force peripheral with option -p we expect test
- # to pass even if peripheral is not in MUTs file.
- pass
- elif not self.is_peripherals_available(target, test.peripherals):
- if self.opts_verbose_skipped_tests:
- if test.peripherals:
- print self.logger.log_line(self.logger.LogType.INFO, 'Peripheral %s test skipped for target %s'% (",".join(test.peripherals), target))
- else:
- print self.logger.log_line(self.logger.LogType.INFO, 'Test %s skipped for target %s'% (test_id, target))
- continue
-
- # The test has made it through all the filters, so add it to the valid tests list
- valid_test_map_keys.append(test_id)
-
- return valid_test_map_keys
-
- def get_skipped_tests(self, all_test_map_keys, valid_test_map_keys):
- # NOTE: This will not preserve order
- return list(set(all_test_map_keys) - set(valid_test_map_keys))
-
- def generate_test_summary_by_target(self, test_summary, shuffle_seed=None):
- """ Prints well-formed summary with results (SQL table like)
- table shows text x toolchain test result matrix
- """
- RESULT_INDEX = 0
- TARGET_INDEX = 1
- TOOLCHAIN_INDEX = 2
- TEST_INDEX = 3
- DESC_INDEX = 4
-
- unique_targets = get_unique_value_from_summary(test_summary, TARGET_INDEX)
- unique_tests = get_unique_value_from_summary(test_summary, TEST_INDEX)
- unique_test_desc = get_unique_value_from_summary_ext(test_summary, TEST_INDEX, DESC_INDEX)
- unique_toolchains = get_unique_value_from_summary(test_summary, TOOLCHAIN_INDEX)
-
- result = "Test summary:\n"
- for target in unique_targets:
- result_dict = {} # test : { toolchain : result }
- unique_target_toolchains = []
- for test in test_summary:
- if test[TARGET_INDEX] == target:
- if test[TOOLCHAIN_INDEX] not in unique_target_toolchains:
- unique_target_toolchains.append(test[TOOLCHAIN_INDEX])
- if test[TEST_INDEX] not in result_dict:
- result_dict[test[TEST_INDEX]] = {}
- result_dict[test[TEST_INDEX]][test[TOOLCHAIN_INDEX]] = test[RESULT_INDEX]
-
- pt_cols = ["Target", "Test ID", "Test Description"] + unique_target_toolchains
- pt = PrettyTable(pt_cols)
- for col in pt_cols:
- pt.align[col] = "l"
- pt.padding_width = 1 # One space between column edges and contents (default)
-
- for test in unique_tests:
- if test in result_dict:
- test_results = result_dict[test]
- if test in unique_test_desc:
- row = [target, test, unique_test_desc[test]]
- for toolchain in unique_toolchains:
- if toolchain in test_results:
- row.append(test_results[toolchain])
- pt.add_row(row)
- result += pt.get_string()
- shuffle_seed_text = "Shuffle Seed: %.*f"% (self.SHUFFLE_SEED_ROUND,
- shuffle_seed if shuffle_seed else self.shuffle_random_seed)
- result += "\n%s"% (shuffle_seed_text if self.opts_shuffle_test_order else '')
- return result
-
- def generate_test_summary(self, test_summary, shuffle_seed=None):
- """ Prints well-formed summary with results (SQL table like)
- table shows target x test results matrix across
- """
- success_code = 0 # Success code that can be leter returned to
- result = "Test summary:\n"
- # Pretty table package is used to print results
- pt = PrettyTable(["Result", "Target", "Toolchain", "Test ID", "Test Description",
- "Elapsed Time (sec)", "Timeout (sec)", "Loops"])
- pt.align["Result"] = "l" # Left align
- pt.align["Target"] = "l" # Left align
- pt.align["Toolchain"] = "l" # Left align
- pt.align["Test ID"] = "l" # Left align
- pt.align["Test Description"] = "l" # Left align
- pt.padding_width = 1 # One space between column edges and contents (default)
-
- result_dict = {self.TEST_RESULT_OK : 0,
- self.TEST_RESULT_FAIL : 0,
- self.TEST_RESULT_ERROR : 0,
- self.TEST_RESULT_UNDEF : 0,
- self.TEST_RESULT_IOERR_COPY : 0,
- self.TEST_RESULT_IOERR_DISK : 0,
- self.TEST_RESULT_IOERR_SERIAL : 0,
- self.TEST_RESULT_NO_IMAGE : 0,
- self.TEST_RESULT_TIMEOUT : 0,
- self.TEST_RESULT_MBED_ASSERT : 0,
- self.TEST_RESULT_BUILD_FAILED : 0,
- self.TEST_RESULT_NOT_SUPPORTED : 0
- }
-
- for test in test_summary:
- if test[0] in result_dict:
- result_dict[test[0]] += 1
- pt.add_row(test)
- result += pt.get_string()
- result += "\n"
-
- # Print result count
- result += "Result: " + ' / '.join(['%s %s' % (value, key) for (key, value) in {k: v for k, v in result_dict.items() if v != 0}.iteritems()])
- shuffle_seed_text = "Shuffle Seed: %.*f\n"% (self.SHUFFLE_SEED_ROUND,
- shuffle_seed if shuffle_seed else self.shuffle_random_seed)
- result += "\n%s"% (shuffle_seed_text if self.opts_shuffle_test_order else '')
- return result
-
- def test_loop_list_to_dict(self, test_loops_str):
- """ Transforms test_id=X,test_id=X,test_id=X into dictionary {test_id : test_id_loops_count}
- """
- result = {}
- if test_loops_str:
- test_loops = test_loops_str.split(',')
- for test_loop in test_loops:
- test_loop_count = test_loop.split('=')
- if len(test_loop_count) == 2:
- _test_id, _test_loops = test_loop_count
- try:
- _test_loops = int(_test_loops)
- except:
- continue
- result[_test_id] = _test_loops
- return result
-
- def get_test_loop_count(self, test_id):
- """ This function returns no. of loops per test (deducted by test_id_.
- If test is not in list of redefined loop counts it will use default value.
- """
- result = self.GLOBAL_LOOPS_COUNT
- if test_id in self.TEST_LOOPS_DICT:
- result = self.TEST_LOOPS_DICT[test_id]
- return result
-
- def delete_file(self, file_path):
- """ Remove file from the system
- """
- result = True
- resutl_msg = ""
- try:
- os.remove(file_path)
- except Exception, e:
- resutl_msg = e
- result = False
- return result, resutl_msg
-
- def handle_mut(self, mut, data, target_name, toolchain_name, test_loops=1):
- """ Test is being invoked for given MUT.
- """
- # Get test information, image and test timeout
- test_id = data['test_id']
- test = TEST_MAP[test_id]
- test_description = TEST_MAP[test_id].get_description()
- image = data["image"]
- duration = data.get("duration", 10)
-
- if mut is None:
- print "Error: No Mbed available: MUT[%s]" % data['mcu']
- return None
-
- mcu = mut['mcu']
- copy_method = mut.get('copy_method') # Available board configuration selection e.g. core selection etc.
-
- if self.db_logger:
- self.db_logger.reconnect()
-
- selected_copy_method = self.opts_copy_method if copy_method is None else copy_method
-
- # Tests can be looped so test results must be stored for the same test
- test_all_result = []
- # Test results for one test ran few times
- detailed_test_results = {} # { Loop_number: { results ... } }
-
- for test_index in range(test_loops):
-
- # If mbedls is available and we are auto detecting MUT info,
- # update MUT info (mounting may changed)
- if get_module_avail('mbed_lstools') and self.opts_auto_detect:
- platform_name_filter = [mcu]
- muts_list = {}
- found = False
-
- for i in range(0, 60):
- print('Looking for %s with MBEDLS' % mcu)
- muts_list = get_autodetected_MUTS_list(platform_name_filter=platform_name_filter)
-
- if 1 not in muts_list:
- sleep(3)
- else:
- found = True
- break
-
- if not found:
- print "Error: mbed not found with MBEDLS: %s" % data['mcu']
- return None
- else:
- mut = muts_list[1]
-
- disk = mut.get('disk')
- port = mut.get('port')
-
- if disk is None or port is None:
- return None
-
- target_by_mcu = TARGET_MAP[mut['mcu']]
- target_name_unique = mut['mcu_unique'] if 'mcu_unique' in mut else mut['mcu']
- # Some extra stuff can be declared in MUTs structure
- reset_type = mut.get('reset_type') # reboot.txt, reset.txt, shutdown.txt
- reset_tout = mut.get('reset_tout') # COPY_IMAGE -> RESET_PROC -> SLEEP(RESET_TOUT)
-
- # When the build and test system were separate, this was relative to a
- # base network folder base path: join(NETWORK_BASE_PATH, )
- image_path = image
-
- # Host test execution
- start_host_exec_time = time()
-
- single_test_result = self.TEST_RESULT_UNDEF # single test run result
- _copy_method = selected_copy_method
-
- if not exists(image_path):
- single_test_result = self.TEST_RESULT_NO_IMAGE
- elapsed_time = 0
- single_test_output = self.logger.log_line(self.logger.LogType.ERROR, 'Image file does not exist: %s'% image_path)
- print single_test_output
- else:
- # Host test execution
- start_host_exec_time = time()
-
- host_test_verbose = self.opts_verbose_test_result_only or self.opts_verbose
- host_test_reset = self.opts_mut_reset_type if reset_type is None else reset_type
- host_test_result = self.run_host_test(test.host_test,
- image_path, disk, port, duration,
- micro=target_name,
- verbose=host_test_verbose,
- reset=host_test_reset,
- reset_tout=reset_tout,
- copy_method=selected_copy_method,
- program_cycle_s=target_by_mcu.program_cycle_s())
- single_test_result, single_test_output, single_testduration, single_timeout = host_test_result
-
- # Store test result
- test_all_result.append(single_test_result)
- total_elapsed_time = time() - start_host_exec_time # Test time with copy (flashing) / reset
- elapsed_time = single_testduration # TIme of single test case execution after reset
-
- detailed_test_results[test_index] = {
- 'result' : single_test_result,
- 'output' : single_test_output,
- 'target_name' : target_name,
- 'target_name_unique' : target_name_unique,
- 'toolchain_name' : toolchain_name,
- 'id' : test_id,
- 'description' : test_description,
- 'elapsed_time' : round(elapsed_time, 2),
- 'duration' : single_timeout,
- 'copy_method' : _copy_method,
- }
-
- print self.print_test_result(single_test_result, target_name_unique, toolchain_name,
- test_id, test_description, elapsed_time, single_timeout)
-
- # Update database entries for ongoing test
- if self.db_logger and self.db_logger.is_connected():
- test_type = 'SingleTest'
- self.db_logger.insert_test_entry(self.db_logger_build_id,
- target_name,
- toolchain_name,
- test_type,
- test_id,
- single_test_result,
- single_test_output,
- elapsed_time,
- single_timeout,
- test_index)
-
- # If we perform waterfall test we test until we get OK and we stop testing
- if self.opts_waterfall_test and single_test_result == self.TEST_RESULT_OK:
- break
-
- if self.db_logger:
- self.db_logger.disconnect()
-
- return (self.shape_global_test_loop_result(test_all_result, self.opts_waterfall_test and self.opts_consolidate_waterfall_test),
- target_name_unique,
- toolchain_name,
- test_id,
- test_description,
- round(elapsed_time, 2),
- single_timeout,
- self.shape_test_loop_ok_result_count(test_all_result)), detailed_test_results
-
- def handle(self, test_spec, target_name, toolchain_name, test_loops=1):
- """ Function determines MUT's mbed disk/port and copies binary to
- target.
- """
- handle_results = []
- data = json.loads(test_spec)
-
- # Find a suitable MUT:
- mut = None
- for id, m in self.muts.iteritems():
- if m['mcu'] == data['mcu']:
- mut = m
- handle_result = self.handle_mut(mut, data, target_name, toolchain_name, test_loops=test_loops)
- handle_results.append(handle_result)
-
- return handle_results
-
- def print_test_result(self, test_result, target_name, toolchain_name,
- test_id, test_description, elapsed_time, duration):
- """ Use specific convention to print test result and related data
- """
- tokens = []
- tokens.append("TargetTest")
- tokens.append(target_name)
- tokens.append(toolchain_name)
- tokens.append(test_id)
- tokens.append(test_description)
- separator = "::"
- time_info = " in %.2f of %d sec" % (round(elapsed_time, 2), duration)
- result = separator.join(tokens) + " [" + test_result +"]" + time_info
- return Fore.MAGENTA + result + Fore.RESET
-
- def shape_test_loop_ok_result_count(self, test_all_result):
- """ Reformats list of results to simple string
- """
- test_loop_count = len(test_all_result)
- test_loop_ok_result = test_all_result.count(self.TEST_RESULT_OK)
- return "%d/%d"% (test_loop_ok_result, test_loop_count)
-
- def shape_global_test_loop_result(self, test_all_result, waterfall_and_consolidate):
- """ Reformats list of results to simple string
- """
- result = self.TEST_RESULT_FAIL
-
- if all(test_all_result[0] == res for res in test_all_result):
- result = test_all_result[0]
- elif waterfall_and_consolidate and any(res == self.TEST_RESULT_OK for res in test_all_result):
- result = self.TEST_RESULT_OK
-
- return result
-
- def run_host_test(self, name, image_path, disk, port, duration,
- micro=None, reset=None, reset_tout=None,
- verbose=False, copy_method=None, program_cycle_s=None):
- """ Function creates new process with host test configured with particular test case.
- Function also is pooling for serial port activity from process to catch all data
- printed by test runner and host test during test execution
- """
-
- def get_char_from_queue(obs):
- """ Get character from queue safe way
- """
- try:
- c = obs.queue.get(block=True, timeout=0.5)
- except Empty, _:
- c = None
- return c
-
- def filter_queue_char(c):
- """ Filters out non ASCII characters from serial port
- """
- if ord(c) not in range(128):
- c = ' '
- return c
-
- def get_test_result(output):
- """ Parse test 'output' data
- """
- result = self.TEST_RESULT_TIMEOUT
- for line in "".join(output).splitlines():
- search_result = self.RE_DETECT_TESTCASE_RESULT.search(line)
- if search_result and len(search_result.groups()):
- result = self.TEST_RESULT_MAPPING[search_result.groups(0)[0]]
- break
- return result
-
- def get_auto_property_value(property_name, line):
- """ Scans auto detection line from MUT and returns scanned parameter 'property_name'
- Returns string
- """
- result = None
- if re.search("HOST: Property '%s'"% property_name, line) is not None:
- property = re.search("HOST: Property '%s' = '([\w\d _]+)'"% property_name, line)
- if property is not None and len(property.groups()) == 1:
- result = property.groups()[0]
- return result
-
- # print "{%s} port:%s disk:%s" % (name, port, disk),
- cmd = ["python",
- '%s.py'% name,
- '-d', disk,
- '-f', '"%s"'% image_path,
- '-p', port,
- '-t', str(duration),
- '-C', str(program_cycle_s)]
-
- if get_module_avail('mbed_lstools') and self.opts_auto_detect:
- cmd += ['--auto']
-
- # Add extra parameters to host_test
- if copy_method is not None:
- cmd += ["-c", copy_method]
- if micro is not None:
- cmd += ["-m", micro]
- if reset is not None:
- cmd += ["-r", reset]
- if reset_tout is not None:
- cmd += ["-R", str(reset_tout)]
-
- if verbose:
- print Fore.MAGENTA + "Executing '" + " ".join(cmd) + "'" + Fore.RESET
- print "Test::Output::Start"
-
- proc = Popen(cmd, stdout=PIPE, cwd=HOST_TESTS)
- obs = ProcessObserver(proc)
- update_once_flag = {} # Stores flags checking if some auto-parameter was already set
- line = ''
- output = []
- start_time = time()
- while (time() - start_time) < (2 * duration):
- c = get_char_from_queue(obs)
- if c:
- if verbose:
- sys.stdout.write(c)
- c = filter_queue_char(c)
- output.append(c)
- # Give the mbed under test a way to communicate the end of the test
- if c in ['\n', '\r']:
-
- # Checking for auto-detection information from the test about MUT reset moment
- if 'reset_target' not in update_once_flag and "HOST: Reset target..." in line:
- # We will update this marker only once to prevent multiple time resets
- update_once_flag['reset_target'] = True
- start_time = time()
-
- # Checking for auto-detection information from the test about timeout
- auto_timeout_val = get_auto_property_value('timeout', line)
- if 'timeout' not in update_once_flag and auto_timeout_val is not None:
- # We will update this marker only once to prevent multiple time resets
- update_once_flag['timeout'] = True
- duration = int(auto_timeout_val)
-
- # Detect mbed assert:
- if 'mbed assertation failed: ' in line:
- output.append('{{mbed_assert}}')
- break
-
- # Check for test end
- if '{end}' in line:
- break
- line = ''
- else:
- line += c
- end_time = time()
- testcase_duration = end_time - start_time # Test case duration from reset to {end}
-
- c = get_char_from_queue(obs)
-
- if c:
- if verbose:
- sys.stdout.write(c)
- c = filter_queue_char(c)
- output.append(c)
-
- if verbose:
- print "Test::Output::Finish"
- # Stop test process
- obs.stop()
-
- result = get_test_result(output)
- return (result, "".join(output), testcase_duration, duration)
-
- def is_peripherals_available(self, target_mcu_name, peripherals=None):
- """ Checks if specified target should run specific peripheral test case defined in MUTs file
- """
- if peripherals is not None:
- peripherals = set(peripherals)
- for id, mut in self.muts.iteritems():
- # Target MCU name check
- if mut["mcu"] != target_mcu_name:
- continue
- # Peripherals check
- if peripherals is not None:
- if 'peripherals' not in mut:
- continue
- if not peripherals.issubset(set(mut['peripherals'])):
- continue
- return True
- return False
-
- def shape_test_request(self, mcu, image_path, test_id, duration=10):
- """ Function prepares JSON structure describing test specification
- """
- test_spec = {
- "mcu": mcu,
- "image": image_path,
- "duration": duration,
- "test_id": test_id,
- }
- return json.dumps(test_spec)
-
-
-def get_unique_value_from_summary(test_summary, index):
- """ Gets list of unique target names
- """
- result = []
- for test in test_summary:
- target_name = test[index]
- if target_name not in result:
- result.append(target_name)
- return sorted(result)
-
-
-def get_unique_value_from_summary_ext(test_summary, index_key, index_val):
- """ Gets list of unique target names and return dictionary
- """
- result = {}
- for test in test_summary:
- key = test[index_key]
- val = test[index_val]
- if key not in result:
- result[key] = val
- return result
-
-
-def show_json_file_format_error(json_spec_filename, line, column):
- """ Prints JSON broken content
- """
- with open(json_spec_filename) as data_file:
- line_no = 1
- for json_line in data_file:
- if line_no + 5 >= line: # Print last few lines before error
- print 'Line %d:\t'%line_no + json_line, # Prints line
- if line_no == line:
- print ' ' * len('Line %d:'%line_no) + '\t', '-' * (column-1) + '^'
- break
- line_no += 1
-
-
-def json_format_error_defect_pos(json_error_msg):
- """ Gets first error line and column in JSON file format.
- Parsed from exception thrown by json.loads() string
- """
- result = None
- line, column = 0, 0
- # Line value search
- line_search = re.search('line [0-9]+', json_error_msg)
- if line_search is not None:
- ls = line_search.group().split(' ')
- if len(ls) == 2:
- line = int(ls[1])
- # Column position search
- column_search = re.search('column [0-9]+', json_error_msg)
- if column_search is not None:
- cs = column_search.group().split(' ')
- if len(cs) == 2:
- column = int(cs[1])
- result = [line, column]
- return result
-
-
-def get_json_data_from_file(json_spec_filename, verbose=False):
- """ Loads from file JSON formatted string to data structure
- """
- result = None
- try:
- with open(json_spec_filename) as data_file:
- try:
- result = json.load(data_file)
- except ValueError as json_error_msg:
- result = None
- print 'JSON file %s parsing failed. Reason: %s' % (json_spec_filename, json_error_msg)
- # We can print where error occurred inside JSON file if we can parse exception msg
- json_format_defect_pos = json_format_error_defect_pos(str(json_error_msg))
- if json_format_defect_pos is not None:
- line = json_format_defect_pos[0]
- column = json_format_defect_pos[1]
- print
- show_json_file_format_error(json_spec_filename, line, column)
-
- except IOError as fileopen_error_msg:
- print 'JSON file %s not opened. Reason: %s'% (json_spec_filename, fileopen_error_msg)
- print
- if verbose and result:
- pp = pprint.PrettyPrinter(indent=4)
- pp.pprint(result)
- return result
-
-
-def print_muts_configuration_from_json(json_data, join_delim=", ", platform_filter=None):
- """ Prints MUTs configuration passed to test script for verboseness
- """
- muts_info_cols = []
- # We need to check all unique properties for each defined MUT
- for k in json_data:
- mut_info = json_data[k]
- for mut_property in mut_info:
- if mut_property not in muts_info_cols:
- muts_info_cols.append(mut_property)
-
- # Prepare pretty table object to display all MUTs
- pt_cols = ["index"] + muts_info_cols
- pt = PrettyTable(pt_cols)
- for col in pt_cols:
- pt.align[col] = "l"
-
- # Add rows to pretty print object
- for k in json_data:
- row = [k]
- mut_info = json_data[k]
-
- add_row = True
- if platform_filter and 'mcu' in mut_info:
- add_row = re.search(platform_filter, mut_info['mcu']) is not None
- if add_row:
- for col in muts_info_cols:
- cell_val = mut_info[col] if col in mut_info else None
- if type(cell_val) == ListType:
- cell_val = join_delim.join(cell_val)
- row.append(cell_val)
- pt.add_row(row)
- return pt.get_string()
-
-
-def print_test_configuration_from_json(json_data, join_delim=", "):
- """ Prints test specification configuration passed to test script for verboseness
- """
- toolchains_info_cols = []
- # We need to check all toolchains for each device
- for k in json_data:
- # k should be 'targets'
- targets = json_data[k]
- for target in targets:
- toolchains = targets[target]
- for toolchain in toolchains:
- if toolchain not in toolchains_info_cols:
- toolchains_info_cols.append(toolchain)
-
- # Prepare pretty table object to display test specification
- pt_cols = ["mcu"] + sorted(toolchains_info_cols)
- pt = PrettyTable(pt_cols)
- for col in pt_cols:
- pt.align[col] = "l"
-
- # { target : [conflicted toolchains] }
- toolchain_conflicts = {}
- toolchain_path_conflicts = []
- for k in json_data:
- # k should be 'targets'
- targets = json_data[k]
- for target in targets:
- target_supported_toolchains = get_target_supported_toolchains(target)
- if not target_supported_toolchains:
- target_supported_toolchains = []
- target_name = target if target in TARGET_MAP else "%s*"% target
- row = [target_name]
- toolchains = targets[target]
-
- for toolchain in sorted(toolchains_info_cols):
- # Check for conflicts: target vs toolchain
- conflict = False
- conflict_path = False
- if toolchain in toolchains:
- if toolchain not in target_supported_toolchains:
- conflict = True
- if target not in toolchain_conflicts:
- toolchain_conflicts[target] = []
- toolchain_conflicts[target].append(toolchain)
- # Add marker inside table about target usage / conflict
- cell_val = 'Yes' if toolchain in toolchains else '-'
- if conflict:
- cell_val += '*'
- # Check for conflicts: toolchain vs toolchain path
- if toolchain in TOOLCHAIN_BIN_PATH:
- toolchain_path = TOOLCHAIN_BIN_PATH[toolchain]
- if not os.path.isdir(toolchain_path):
- conflict_path = True
- if toolchain not in toolchain_path_conflicts:
- toolchain_path_conflicts.append(toolchain)
- if conflict_path:
- cell_val += '#'
- row.append(cell_val)
- pt.add_row(row)
-
- # generate result string
- result = pt.get_string() # Test specification table
- if toolchain_conflicts or toolchain_path_conflicts:
- result += "\n"
- result += "Toolchain conflicts:\n"
- for target in toolchain_conflicts:
- if target not in TARGET_MAP:
- result += "\t* Target %s unknown\n"% (target)
- conflict_target_list = join_delim.join(toolchain_conflicts[target])
- sufix = 's' if len(toolchain_conflicts[target]) > 1 else ''
- result += "\t* Target %s does not support %s toolchain%s\n"% (target, conflict_target_list, sufix)
-
- for toolchain in toolchain_path_conflicts:
- # Let's check toolchain configuration
- if toolchain in TOOLCHAIN_BIN_PATH:
- toolchain_path = TOOLCHAIN_BIN_PATH[toolchain]
- if not os.path.isdir(toolchain_path):
- result += "\t# Toolchain %s path not found: %s\n"% (toolchain, toolchain_path)
- return result
-
-
-def get_avail_tests_summary_table(cols=None, result_summary=True, join_delim=',',platform_filter=None):
- """ Generates table summary with all test cases and additional test cases
- information using pretty print functionality. Allows test suite user to
- see test cases
- """
- # get all unique test ID prefixes
- unique_test_id = []
- for test in TESTS:
- split = test['id'].split('_')[:-1]
- test_id_prefix = '_'.join(split)
- if test_id_prefix not in unique_test_id:
- unique_test_id.append(test_id_prefix)
- unique_test_id.sort()
- counter_dict_test_id_types = dict((t, 0) for t in unique_test_id)
- counter_dict_test_id_types_all = dict((t, 0) for t in unique_test_id)
-
- test_properties = ['id',
- 'automated',
- 'description',
- 'peripherals',
- 'host_test',
- 'duration'] if cols is None else cols
-
- # All tests status table print
- pt = PrettyTable(test_properties)
- for col in test_properties:
- pt.align[col] = "l"
- pt.align['duration'] = "r"
-
- counter_all = 0
- counter_automated = 0
- pt.padding_width = 1 # One space between column edges and contents (default)
-
- for test_id in sorted(TEST_MAP.keys()):
- if platform_filter is not None:
- # FIlter out platforms using regex
- if re.search(platform_filter, test_id) is None:
- continue
- row = []
- test = TEST_MAP[test_id]
- split = test_id.split('_')[:-1]
- test_id_prefix = '_'.join(split)
-
- for col in test_properties:
- col_value = test[col]
- if type(test[col]) == ListType:
- col_value = join_delim.join(test[col])
- elif test[col] == None:
- col_value = "-"
-
- row.append(col_value)
- if test['automated'] == True:
- counter_dict_test_id_types[test_id_prefix] += 1
- counter_automated += 1
- pt.add_row(row)
- # Update counters
- counter_all += 1
- counter_dict_test_id_types_all[test_id_prefix] += 1
- result = pt.get_string()
- result += "\n\n"
-
- if result_summary and not platform_filter:
- # Automation result summary
- test_id_cols = ['automated', 'all', 'percent [%]', 'progress']
- pt = PrettyTable(test_id_cols)
- pt.align['automated'] = "r"
- pt.align['all'] = "r"
- pt.align['percent [%]'] = "r"
-
- percent_progress = round(100.0 * counter_automated / float(counter_all), 1)
- str_progress = progress_bar(percent_progress, 75)
- pt.add_row([counter_automated, counter_all, percent_progress, str_progress])
- result += "Automation coverage:\n"
- result += pt.get_string()
- result += "\n\n"
-
- # Test automation coverage table print
- test_id_cols = ['id', 'automated', 'all', 'percent [%]', 'progress']
- pt = PrettyTable(test_id_cols)
- pt.align['id'] = "l"
- pt.align['automated'] = "r"
- pt.align['all'] = "r"
- pt.align['percent [%]'] = "r"
- for unique_id in unique_test_id:
- # print "\t\t%s: %d / %d" % (unique_id, counter_dict_test_id_types[unique_id], counter_dict_test_id_types_all[unique_id])
- percent_progress = round(100.0 * counter_dict_test_id_types[unique_id] / float(counter_dict_test_id_types_all[unique_id]), 1)
- str_progress = progress_bar(percent_progress, 75)
- row = [unique_id,
- counter_dict_test_id_types[unique_id],
- counter_dict_test_id_types_all[unique_id],
- percent_progress,
- "[" + str_progress + "]"]
- pt.add_row(row)
- result += "Test automation coverage:\n"
- result += pt.get_string()
- result += "\n\n"
- return result
-
-
-def progress_bar(percent_progress, saturation=0):
- """ This function creates progress bar with optional simple saturation mark
- """
- step = int(percent_progress / 2) # Scale by to (scale: 1 - 50)
- str_progress = '#' * step + '.' * int(50 - step)
- c = '!' if str_progress[38] == '.' else '|'
- if saturation > 0:
- saturation = saturation / 2
- str_progress = str_progress[:saturation] + c + str_progress[saturation:]
- return str_progress
-
-
-def singletest_in_cli_mode(single_test):
- """ Runs SingleTestRunner object in CLI (Command line interface) mode
-
- @return returns success code (0 == success) for building and running tests
- """
- start = time()
- # Execute tests depending on options and filter applied
- test_summary, shuffle_seed, test_summary_ext, test_suite_properties_ext, build_report, build_properties = single_test.execute()
- elapsed_time = time() - start
-
- # Human readable summary
- if not single_test.opts_suppress_summary:
- # prints well-formed summary with results (SQL table like)
- print single_test.generate_test_summary(test_summary, shuffle_seed)
- if single_test.opts_test_x_toolchain_summary:
- # prints well-formed summary with results (SQL table like)
- # table shows text x toolchain test result matrix
- print single_test.generate_test_summary_by_target(test_summary, shuffle_seed)
-
- print "Completed in %.2f sec"% (elapsed_time)
- print
- # Write summary of the builds
-
- print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build")
- status = print_report_exporter.report(build_report)
-
- # Store extra reports in files
- if single_test.opts_report_html_file_name:
- # Export results in form of HTML report to separate file
- report_exporter = ReportExporter(ResultExporterType.HTML)
- report_exporter.report_to_file(test_summary_ext, single_test.opts_report_html_file_name, test_suite_properties=test_suite_properties_ext)
- if single_test.opts_report_junit_file_name:
- # Export results in form of JUnit XML report to separate file
- report_exporter = ReportExporter(ResultExporterType.JUNIT)
- report_exporter.report_to_file(test_summary_ext, single_test.opts_report_junit_file_name, test_suite_properties=test_suite_properties_ext)
- if single_test.opts_report_build_file_name:
- # Export build results as html report to sparate file
- report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build")
- report_exporter.report_to_file(build_report, single_test.opts_report_build_file_name, test_suite_properties=build_properties)
-
- # Returns True if no build failures of the test projects or their dependencies
- return status
-
-class TestLogger():
- """ Super-class for logging and printing ongoing events for test suite pass
- """
- def __init__(self, store_log=True):
- """ We can control if logger actually stores log in memory
- or just handled all log entries immediately
- """
- self.log = []
- self.log_to_file = False
- self.log_file_name = None
- self.store_log = store_log
-
- self.LogType = construct_enum(INFO='Info',
- WARN='Warning',
- NOTIF='Notification',
- ERROR='Error',
- EXCEPT='Exception')
-
- self.LogToFileAttr = construct_enum(CREATE=1, # Create or overwrite existing log file
- APPEND=2) # Append to existing log file
-
- def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'):
- """ Log one line of text
- """
- log_timestamp = time()
- log_entry = {'log_type' : LogType,
- 'log_timestamp' : log_timestamp,
- 'log_line' : log_line,
- '_future' : None
- }
- # Store log in memory
- if self.store_log:
- self.log.append(log_entry)
- return log_entry
-
-
-class CLITestLogger(TestLogger):
- """ Logger used with CLI (Command line interface) test suite. Logs on screen and to file if needed
- """
- def __init__(self, store_log=True, file_name=None):
- TestLogger.__init__(self)
- self.log_file_name = file_name
- #self.TIMESTAMP_FORMAT = '%y-%m-%d %H:%M:%S' # Full date and time
- self.TIMESTAMP_FORMAT = '%H:%M:%S' # Time only
-
- def log_print(self, log_entry, timestamp=True):
- """ Prints on screen formatted log entry
- """
- ts = log_entry['log_timestamp']
- timestamp_str = datetime.datetime.fromtimestamp(ts).strftime("[%s] "% self.TIMESTAMP_FORMAT) if timestamp else ''
- log_line_str = "%(log_type)s: %(log_line)s"% (log_entry)
- return timestamp_str + log_line_str
-
- def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'):
- """ Logs line, if log file output was specified log line will be appended
- at the end of log file
- """
- log_entry = TestLogger.log_line(self, LogType, log_line)
- log_line_str = self.log_print(log_entry, timestamp)
- if self.log_file_name is not None:
- try:
- with open(self.log_file_name, 'a') as f:
- f.write(log_line_str + line_delim)
- except IOError:
- pass
- return log_line_str
-
-
-def factory_db_logger(db_url):
- """ Factory database driver depending on database type supplied in database connection string db_url
- """
- if db_url is not None:
- from workspace_tools.test_mysql import MySQLDBAccess
- connection_info = BaseDBAccess().parse_db_connection_string(db_url)
- if connection_info is not None:
- (db_type, username, password, host, db_name) = BaseDBAccess().parse_db_connection_string(db_url)
- if db_type == 'mysql':
- return MySQLDBAccess()
- return None
-
-
-def detect_database_verbose(db_url):
- """ uses verbose mode (prints) database detection sequence to check it database connection string is valid
- """
- result = BaseDBAccess().parse_db_connection_string(db_url)
- if result is not None:
- # Parsing passed
- (db_type, username, password, host, db_name) = result
- #print "DB type '%s', user name '%s', password '%s', host '%s', db name '%s'"% result
- # Let's try to connect
- db_ = factory_db_logger(db_url)
- if db_ is not None:
- print "Connecting to database '%s'..."% db_url,
- db_.connect(host, username, password, db_name)
- if db_.is_connected():
- print "ok"
- print "Detecting database..."
- print db_.detect_database(verbose=True)
- print "Disconnecting...",
- db_.disconnect()
- print "done"
- else:
- print "Database type '%s' unknown"% db_type
- else:
- print "Parse error: '%s' - DB Url error"% (db_url)
-
-
-def get_module_avail(module_name):
- """ This function returns True if module_name is already impored module
- """
- return module_name in sys.modules.keys()
-
-
-def get_autodetected_MUTS_list(platform_name_filter=None):
- oldError = None
- if os.name == 'nt':
- # Disable Windows error box temporarily
- oldError = ctypes.windll.kernel32.SetErrorMode(1) #note that SEM_FAILCRITICALERRORS = 1
-
- mbeds = mbed_lstools.create()
- detect_muts_list = mbeds.list_mbeds()
-
- if os.name == 'nt':
- ctypes.windll.kernel32.SetErrorMode(oldError)
-
- return get_autodetected_MUTS(detect_muts_list, platform_name_filter=platform_name_filter)
-
-def get_autodetected_MUTS(mbeds_list, platform_name_filter=None):
- """ Function detects all connected to host mbed-enabled devices and generates artificial MUTS file.
- If function fails to auto-detect devices it will return empty dictionary.
-
- if get_module_avail('mbed_lstools'):
- mbeds = mbed_lstools.create()
- mbeds_list = mbeds.list_mbeds()
-
- @param mbeds_list list of mbeds captured from mbed_lstools
- @param platform_name You can filter 'platform_name' with list of filtered targets from 'platform_name_filter'
- """
- result = {} # Should be in muts_all.json format
- # Align mbeds_list from mbed_lstools to MUT file format (JSON dictionary with muts)
- # mbeds_list = [{'platform_name': 'NUCLEO_F302R8', 'mount_point': 'E:', 'target_id': '07050200623B61125D5EF72A', 'serial_port': u'COM34'}]
- index = 1
- for mut in mbeds_list:
- # Filter the MUTS if a filter is specified
-
- if platform_name_filter and not mut['platform_name'] in platform_name_filter:
- continue
-
- # For mcu_unique - we are assigning 'platform_name_unique' value from mbedls output (if its existing)
- # if not we are creating our own unique value (last few chars from platform's target_id).
- m = {'mcu': mut['platform_name'],
- 'mcu_unique' : mut['platform_name_unique'] if 'platform_name_unique' in mut else "%s[%s]" % (mut['platform_name'], mut['target_id'][-4:]),
- 'port': mut['serial_port'],
- 'disk': mut['mount_point'],
- 'peripherals': [] # No peripheral detection
- }
- if index not in result:
- result[index] = {}
- result[index] = m
- index += 1
- return result
-
-
-def get_autodetected_TEST_SPEC(mbeds_list,
- use_default_toolchain=True,
- use_supported_toolchains=False,
- toolchain_filter=None,
- platform_name_filter=None):
- """ Function detects all connected to host mbed-enabled devices and generates artificial test_spec file.
- If function fails to auto-detect devices it will return empty 'targets' test_spec description.
-
- use_default_toolchain - if True add default toolchain to test_spec
- use_supported_toolchains - if True add all supported toolchains to test_spec
- toolchain_filter - if [...list of toolchains...] add from all toolchains only those in filter to test_spec
- """
- result = {'targets': {} }
-
- for mut in mbeds_list:
- mcu = mut['mcu']
- if platform_name_filter is None or (platform_name_filter and mut['mcu'] in platform_name_filter):
- if mcu in TARGET_MAP:
- default_toolchain = TARGET_MAP[mcu].default_toolchain
- supported_toolchains = TARGET_MAP[mcu].supported_toolchains
-
- # Decide which toolchains should be added to test specification toolchain pool for each target
- toolchains = []
- if use_default_toolchain:
- toolchains.append(default_toolchain)
- if use_supported_toolchains:
- toolchains += supported_toolchains
- if toolchain_filter is not None:
- all_toolchains = supported_toolchains + [default_toolchain]
- for toolchain in toolchain_filter.split(','):
- if toolchain in all_toolchains:
- toolchains.append(toolchain)
-
- result['targets'][mcu] = list(set(toolchains))
- return result
-
-
-def get_default_test_options_parser():
- """ Get common test script options used by CLI, web services etc.
- """
- parser = optparse.OptionParser()
- parser.add_option('-i', '--tests',
- dest='test_spec_filename',
- metavar="FILE",
- help='Points to file with test specification')
-
- parser.add_option('-M', '--MUTS',
- dest='muts_spec_filename',
- metavar="FILE",
- help='Points to file with MUTs specification (overwrites settings.py and private_settings.py)')
-
- parser.add_option("-j", "--jobs",
- dest='jobs',
- metavar="NUMBER",
- type="int",
- help="Define number of compilation jobs. Default value is 1")
-
- if get_module_avail('mbed_lstools'):
- # Additional features available when mbed_lstools is installed on host and imported
- # mbed_lstools allow users to detect connected to host mbed-enabled devices
- parser.add_option('', '--auto',
- dest='auto_detect',
- metavar=False,
- action="store_true",
- help='Use mbed-ls module to detect all connected mbed devices')
-
- parser.add_option('', '--tc',
- dest='toolchains_filter',
- help="Toolchain filter for --auto option. Use toolchains names separated by comma, 'default' or 'all' to select toolchains")
-
- test_scopes = ','.join(["'%s'" % n for n in get_available_oper_test_scopes()])
- parser.add_option('', '--oper',
- dest='operability_checks',
- help='Perform interoperability tests between host and connected mbed devices. Available test scopes are: %s' % test_scopes)
-
- parser.add_option('', '--clean',
- dest='clean',
- metavar=False,
- action="store_true",
- help='Clean the build directory')
-
- parser.add_option('-P', '--only-peripherals',
- dest='test_only_peripheral',
- default=False,
- action="store_true",
- help='Test only peripheral declared for MUT and skip common tests')
-
- parser.add_option('-C', '--only-commons',
- dest='test_only_common',
- default=False,
- action="store_true",
- help='Test only board internals. Skip perpherials tests and perform common tests')
-
- parser.add_option('-n', '--test-by-names',
- dest='test_by_names',
- help='Runs only test enumerated it this switch. Use comma to separate test case names')
-
- parser.add_option('-p', '--peripheral-by-names',
- dest='peripheral_by_names',
- help='Forces discovery of particular peripherals. Use comma to separate peripheral names')
-
- copy_methods = host_tests_plugins.get_plugin_caps('CopyMethod')
- copy_methods_str = "Plugin support: " + ', '.join(copy_methods)
-
- parser.add_option('-c', '--copy-method',
- dest='copy_method',
- help="Select binary copy (flash) method. Default is Python's shutil.copy() method. %s"% copy_methods_str)
-
- reset_methods = host_tests_plugins.get_plugin_caps('ResetMethod')
- reset_methods_str = "Plugin support: " + ', '.join(reset_methods)
-
- parser.add_option('-r', '--reset-type',
- dest='mut_reset_type',
- default=None,
- help='Extra reset method used to reset MUT by host test script. %s'% reset_methods_str)
-
- parser.add_option('-g', '--goanna-for-tests',
- dest='goanna_for_tests',
- metavar=False,
- action="store_true",
- help='Run Goanna static analyse tool for tests. (Project will be rebuilded)')
-
- parser.add_option('-G', '--goanna-for-sdk',
- dest='goanna_for_mbed_sdk',
- metavar=False,
- action="store_true",
- help='Run Goanna static analyse tool for mbed SDK (Project will be rebuilded)')
-
- parser.add_option('-s', '--suppress-summary',
- dest='suppress_summary',
- default=False,
- action="store_true",
- help='Suppresses display of wellformatted table with test results')
-
- parser.add_option('-t', '--test-summary',
- dest='test_x_toolchain_summary',
- default=False,
- action="store_true",
- help='Displays wellformatted table with test x toolchain test result per target')
-
- parser.add_option('-A', '--test-automation-report',
- dest='test_automation_report',
- default=False,
- action="store_true",
- help='Prints information about all tests and exits')
-
- parser.add_option('-R', '--test-case-report',
- dest='test_case_report',
- default=False,
- action="store_true",
- help='Prints information about all test cases and exits')
-
- parser.add_option("-S", "--supported-toolchains",
- action="store_true",
- dest="supported_toolchains",
- default=False,
- help="Displays supported matrix of MCUs and toolchains")
-
- parser.add_option("-O", "--only-build",
- action="store_true",
- dest="only_build_tests",
- default=False,
- help="Only build tests, skips actual test procedures (flashing etc.)")
-
- parser.add_option('', '--parallel',
- dest='parallel_test_exec',
- default=False,
- action="store_true",
- help='Experimental, you execute test runners for connected to your host MUTs in parallel (speeds up test result collection)')
-
- parser.add_option('', '--config',
- dest='verbose_test_configuration_only',
- default=False,
- action="store_true",
- help='Displays full test specification and MUTs configration and exits')
-
- parser.add_option('', '--loops',
- dest='test_loops_list',
- help='Set no. of loops per test. Format: TEST_1=1,TEST_2=2,TEST_3=3')
-
- parser.add_option('', '--global-loops',
- dest='test_global_loops_value',
- help='Set global number of test loops per test. Default value is set 1')
-
- parser.add_option('', '--consolidate-waterfall',
- dest='consolidate_waterfall_test',
- default=False,
- action="store_true",
- help='Used with --waterfall option. Adds only one test to report reflecting outcome of waterfall test.')
-
- parser.add_option('-W', '--waterfall',
- dest='waterfall_test',
- default=False,
- action="store_true",
- help='Used with --loops or --global-loops options. Tests until OK result occurs and assumes test passed')
-
- parser.add_option('-N', '--firmware-name',
- dest='firmware_global_name',
- help='Set global name for all produced projects. Note, proper file extension will be added by buid scripts')
-
- parser.add_option('-u', '--shuffle',
- dest='shuffle_test_order',
- default=False,
- action="store_true",
- help='Shuffles test execution order')
-
- parser.add_option('', '--shuffle-seed',
- dest='shuffle_test_seed',
- default=None,
- help='Shuffle seed (If you want to reproduce your shuffle order please use seed provided in test summary)')
-
- parser.add_option('-f', '--filter',
- dest='general_filter_regex',
- default=None,
- help='For some commands you can use filter to filter out results')
-
- parser.add_option('', '--inc-timeout',
- dest='extend_test_timeout',
- metavar="NUMBER",
- type="int",
- help='You can increase global timeout for each test by specifying additional test timeout in seconds')
-
- parser.add_option('', '--db',
- dest='db_url',
- help='This specifies what database test suite uses to store its state. To pass DB connection info use database connection string. Example: \'mysql://username:password@127.0.0.1/db_name\'')
-
- parser.add_option('-l', '--log',
- dest='log_file_name',
- help='Log events to external file (note not all console entries may be visible in log file)')
-
- parser.add_option('', '--report-html',
- dest='report_html_file_name',
- help='You can log test suite results in form of HTML report')
-
- parser.add_option('', '--report-junit',
- dest='report_junit_file_name',
- help='You can log test suite results in form of JUnit compliant XML report')
-
- parser.add_option("", "--report-build",
- dest="report_build_file_name",
- help="Output the build results to a junit xml file")
-
- parser.add_option('', '--verbose-skipped',
- dest='verbose_skipped_tests',
- default=False,
- action="store_true",
- help='Prints some extra information about skipped tests')
-
- parser.add_option('-V', '--verbose-test-result',
- dest='verbose_test_result_only',
- default=False,
- action="store_true",
- help='Prints test serial output')
-
- parser.add_option('-v', '--verbose',
- dest='verbose',
- default=False,
- action="store_true",
- help='Verbose mode (prints some extra information)')
-
- parser.add_option('', '--version',
- dest='version',
- default=False,
- action="store_true",
- help='Prints script version and exits')
- return parser
diff --git a/workspace_tools/test_db.py b/workspace_tools/test_db.py
deleted file mode 100644
index 2ec301a..0000000
--- a/workspace_tools/test_db.py
+++ /dev/null
@@ -1,165 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-"""
-
-import re
-import json
-
-
-class BaseDBAccess():
- """ Class used to connect with test database and store test results
- """
- def __init__(self):
- self.db_object = None
- self.db_type = None
- # Connection credentials
- self.host = None
- self.user = None
- self.passwd = None
- self.db = None
-
- # Test Suite DB scheme (table names)
- self.TABLE_BUILD_ID = 'mtest_build_id'
- self.TABLE_BUILD_ID_STATUS = 'mtest_build_id_status'
- self.TABLE_BUILD_ID_TYPE = 'mtest_build_id_type'
- self.TABLE_TARGET = 'mtest_target'
- self.TABLE_TEST_ENTRY = 'mtest_test_entry'
- self.TABLE_TEST_ID = 'mtest_test_id'
- self.TABLE_TEST_RESULT = 'mtest_test_result'
- self.TABLE_TEST_TYPE = 'mtest_test_type'
- self.TABLE_TOOLCHAIN = 'mtest_toolchain'
- # Build ID status PKs
- self.BUILD_ID_STATUS_STARTED = 1 # Started
- self.BUILD_ID_STATUS_IN_PROGRESS = 2 # In Progress
- self.BUILD_ID_STATUS_COMPLETED = 3 #Completed
- self.BUILD_ID_STATUS_FAILED = 4 # Failed
- # Build ID type PKs
- self.BUILD_ID_TYPE_TEST = 1 # Test
- self.BUILD_ID_TYPE_BUILD_ONLY = 2 # Build Only
-
- def get_hostname(self):
- """ Useful when creating build_id in database
- Function returns (hostname, uname) which can be used as (build_id_name, build_id_desc)
- """
- # Get hostname from socket
- import socket
- hostname = socket.gethostbyaddr(socket.gethostname())[0]
- # Get uname from platform resources
- import platform
- uname = json.dumps(platform.uname())
- return (hostname, uname)
-
- def get_db_type(self):
- """ Returns database type. E.g. 'mysql', 'sqlLite' etc.
- """
- return self.db_type
-
- def detect_database(self, verbose=False):
- """ detect database and return VERION data structure or string (verbose=True)
- """
- return None
-
- def parse_db_connection_string(self, str):
- """ Parsing SQL DB connection string. String should contain:
- - DB Name, user name, password, URL (DB host), name
- Function should return tuple with parsed (db_type, username, password, host, db_name) or None if error
-
- (db_type, username, password, host, db_name) = self.parse_db_connection_string(db_url)
-
- E.g. connection string: 'mysql://username:password@127.0.0.1/db_name'
- """
- result = None
- if type(str) == type(''):
- PATTERN = '^([\w]+)://([\w]+):([\w]*)@(.*)/([\w]+)'
- result = re.match(PATTERN, str)
- if result is not None:
- result = result.groups() # Tuple (db_name, host, user, passwd, db)
- return result # (db_type, username, password, host, db_name)
-
- def is_connected(self):
- """ Returns True if we are connected to database
- """
- pass
-
- def connect(self, host, user, passwd, db):
- """ Connects to DB and returns DB object
- """
- pass
-
- def connect_url(self, db_url):
- """ Connects to database using db_url (database url parsing),
- store host, username, password, db_name
- """
- pass
-
- def reconnect(self):
- """ Reconnects to DB and returns DB object using stored host name,
- database name and credentials (user name and password)
- """
- pass
-
- def disconnect(self):
- """ Close DB connection
- """
- pass
-
- def escape_string(self, str):
- """ Escapes string so it can be put in SQL query between quotes
- """
- pass
-
- def select_all(self, query):
- """ Execute SELECT query and get all results
- """
- pass
-
- def insert(self, query, commit=True):
- """ Execute INSERT query, define if you want to commit
- """
- pass
-
- def get_next_build_id(self, name, desc='', location='', type=None, status=None):
- """ Insert new build_id (DB unique build like ID number to send all test results)
- """
- pass
-
- def get_table_entry_pk(self, table, column, value, update_db=True):
- """ Checks for entries in tables with two columns (_pk, )
- If update_db is True updates table entry if value in specified column doesn't exist
- """
- pass
-
- def update_table_entry(self, table, column, value):
- """ Updates table entry if value in specified column doesn't exist
- Locks table to perform atomic read + update
- """
- pass
-
- def update_build_id_info(self, build_id, **kw):
- """ Update additional data inside build_id table
- Examples:
- db.update_build_is(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
- """
- pass
-
- def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_time, test_timeout, test_loop, test_extra=''):
- """ Inserts test result entry to database. All checks regarding existing
- toolchain names in DB are performed.
- If some data is missing DB will be updated
- """
- pass
diff --git a/workspace_tools/test_exporters.py b/workspace_tools/test_exporters.py
deleted file mode 100644
index 623acd6..0000000
--- a/workspace_tools/test_exporters.py
+++ /dev/null
@@ -1,342 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-"""
-
-from workspace_tools.utils import construct_enum
-
-
-ResultExporterType = construct_enum(HTML='Html_Exporter',
- JUNIT='JUnit_Exporter',
- JUNIT_OPER='JUnit_Exporter_Interoperability',
- BUILD='Build_Exporter',
- PRINT='Print_Exporter')
-
-
-class ReportExporter():
- """ Class exports extended test result Python data structure to
- different formats like HTML, JUnit XML.
-
- Parameter 'test_result_ext' format:
-
- u'uARM': { u'LPC1768': { 'MBED_2': { 0: { 'copy_method': 'shutils.copy()',
- 'duration': 20,
- 'elapsed_time': 1.7929999828338623,
- 'output': 'Host test instrumentation on ...\r\n',
- 'result': 'OK',
- 'target_name': u'LPC1768',
- 'description': 'stdio',
- 'id': u'MBED_2',
- 'toolchain_name': u'uARM'}},
- """
- CSS_STYLE = """
- """
-
- JAVASCRIPT = """
-
- """
-
- def __init__(self, result_exporter_type, package="test"):
- self.result_exporter_type = result_exporter_type
- self.package = package
-
- def report(self, test_summary_ext, test_suite_properties=None):
- """ Invokes report depending on exporter_type set in constructor
- """
- if self.result_exporter_type == ResultExporterType.HTML:
- # HTML exporter
- return self.exporter_html(test_summary_ext, test_suite_properties)
- elif self.result_exporter_type == ResultExporterType.JUNIT:
- # JUNIT exporter for results from test suite
- return self.exporter_junit(test_summary_ext, test_suite_properties)
- elif self.result_exporter_type == ResultExporterType.JUNIT_OPER:
- # JUNIT exporter for interoperability test
- return self.exporter_junit_ioper(test_summary_ext, test_suite_properties)
- elif self.result_exporter_type == ResultExporterType.PRINT:
- # JUNIT exporter for interoperability test
- return self.exporter_print(test_summary_ext)
- return None
-
- def report_to_file(self, test_summary_ext, file_name, test_suite_properties=None):
- """ Stores report to specified file
- """
- report = self.report(test_summary_ext, test_suite_properties=test_suite_properties)
- self.write_to_file(report, file_name)
-
- def write_to_file(self, report, file_name):
- if report is not None:
- with open(file_name, 'w') as f:
- f.write(report)
-
- def get_tooltip_name(self, toolchain, target, test_id, loop_no):
- """ Generate simple unique tool-tip name which can be used.
- For example as HTML
- """% (result_div_style,
- tooltip_name,
- tooltip_name,
- test['result'],
- tooltip_name,
- test['target_name_unique'],
- test['description'],
- test['elapsed_time'],
- test['output'].replace('\n', ' '))
- return result
-
- def get_result_tree(self, test_results):
- """ If test was run in a loop (we got few results from the same test)
- we will show it in a column to see all results.
- This function produces HTML table with corresponding results.
- """
- result = ''
- for i, test_result in enumerate(test_results):
- result += '
'
- test_ids = sorted(test_result.keys())
- for test_no in test_ids:
- test = test_result[test_no]
- result += """
-
%s
-
"""% self.get_result_div_sections(test, "%d_%d" % (test_no, i))
- result += '
'
- return result
-
- def get_all_unique_test_ids(self, test_result_ext):
- """ Gets all unique test ids from all ran tests.
- We need this to create complete list of all test ran.
- """
- result = []
- targets = test_result_ext.keys()
- for target in targets:
- toolchains = test_result_ext[target].keys()
- for toolchain in toolchains:
- tests = test_result_ext[target][toolchain].keys()
- result.extend(tests)
- return sorted(list(set(result)))
-
- #
- # Exporters functions
- #
-
- def exporter_html(self, test_result_ext, test_suite_properties=None):
- """ Export test results in proprietary HTML format.
- """
- result = """
-
- mbed SDK test suite test result report
- %s
- %s
-
-
- """% (self.CSS_STYLE, self.JAVASCRIPT)
-
- unique_test_ids = self.get_all_unique_test_ids(test_result_ext)
- targets = sorted(test_result_ext.keys())
- result += '
'
- for target in targets:
- toolchains = sorted(test_result_ext[target].keys())
- for toolchain in toolchains:
- result += '
'
- result += '
'
-
- tests = sorted(test_result_ext[target][toolchain].keys())
- for test in unique_test_ids:
- result += """
%s
"""% test
- result += """
-
-
%s
-
%s
- """% (toolchain, target)
-
- for test in unique_test_ids:
- test_result = self.get_result_tree(test_result_ext[target][toolchain][test]) if test in tests else ''
- result += '
%s
'% (test_result)
-
- result += '
'
- result += '
'
- result += ''
- return result
-
- def exporter_junit_ioper(self, test_result_ext, test_suite_properties=None):
- from junit_xml import TestSuite, TestCase
- test_suites = []
- test_cases = []
-
- for platform in sorted(test_result_ext.keys()):
- # {platform : ['Platform', 'Result', 'Scope', 'Description'])
- test_cases = []
- for tr_result in test_result_ext[platform]:
- result, name, scope, description = tr_result
-
- classname = 'test.ioper.%s.%s.%s' % (platform, name, scope)
- elapsed_sec = 0
- _stdout = description
- _stderr = ''
- # Test case
- tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
- # Test case extra failure / error info
- if result == 'FAIL':
- tc.add_failure_info(description, _stdout)
- elif result == 'ERROR':
- tc.add_error_info(description, _stdout)
- elif result == 'SKIP' or result == 'NOT_SUPPORTED':
- tc.add_skipped_info(description, _stdout)
-
- test_cases.append(tc)
- ts = TestSuite("test.suite.ioper.%s" % (platform), test_cases)
- test_suites.append(ts)
- return TestSuite.to_xml_string(test_suites)
-
- def exporter_junit(self, test_result_ext, test_suite_properties=None):
- """ Export test results in JUnit XML compliant format
- """
- from junit_xml import TestSuite, TestCase
- test_suites = []
- test_cases = []
-
- targets = sorted(test_result_ext.keys())
- for target in targets:
- toolchains = sorted(test_result_ext[target].keys())
- for toolchain in toolchains:
- test_cases = []
- tests = sorted(test_result_ext[target][toolchain].keys())
- for test in tests:
- test_results = test_result_ext[target][toolchain][test]
- for test_res in test_results:
- test_ids = sorted(test_res.keys())
- for test_no in test_ids:
- test_result = test_res[test_no]
- name = test_result['description']
- classname = '%s.%s.%s.%s'% (self.package, target, toolchain, test_result['id'])
- elapsed_sec = test_result['elapsed_time']
- _stdout = test_result['output']
-
- if 'target_name_unique' in test_result:
- _stderr = test_result['target_name_unique']
- else:
- _stderr = test_result['target_name']
-
- # Test case
- tc = TestCase(name, classname, elapsed_sec, _stdout, _stderr)
-
- # Test case extra failure / error info
- message = test_result['result']
- if test_result['result'] == 'FAIL':
- tc.add_failure_info(message, _stdout)
- elif test_result['result'] == 'SKIP' or test_result["result"] == 'NOT_SUPPORTED':
- tc.add_skipped_info(message, _stdout)
- elif test_result['result'] != 'OK':
- tc.add_error_info(message, _stdout)
-
- test_cases.append(tc)
-
- ts = TestSuite("test.suite.%s.%s"% (target, toolchain), test_cases, properties=test_suite_properties[target][toolchain])
- test_suites.append(ts)
- return TestSuite.to_xml_string(test_suites)
-
- def exporter_print_helper(self, array):
- for item in array:
- print " * %s::%s::%s" % (item["target_name"], item["toolchain_name"], item["id"])
-
- def exporter_print(self, test_result_ext):
- """ Export test results in print format.
- """
- failures = []
- skips = []
- successes = []
-
- unique_test_ids = self.get_all_unique_test_ids(test_result_ext)
- targets = sorted(test_result_ext.keys())
-
- for target in targets:
- toolchains = sorted(test_result_ext[target].keys())
- for toolchain in toolchains:
- tests = sorted(test_result_ext[target][toolchain].keys())
- for test in tests:
- test_runs = test_result_ext[target][toolchain][test]
- for test_runner in test_runs:
- #test_run = test_result_ext[target][toolchain][test][test_run_number][0]
- test_run = test_runner[0]
-
- if test_run["result"] == "FAIL":
- failures.append(test_run)
- elif test_run["result"] == "SKIP" or test_run["result"] == "NOT_SUPPORTED":
- skips.append(test_run)
- elif test_run["result"] == "OK":
- successes.append(test_run)
- else:
- raise Exception("Unhandled result type: %s" % (test_run["result"]))
-
- if successes:
- print "\n\nBuild successes:"
- self.exporter_print_helper(successes)
-
- if skips:
- print "\n\nBuild skips:"
- self.exporter_print_helper(skips)
-
- if failures:
- print "\n\nBuild failures:"
- self.exporter_print_helper(failures)
- return False
- else:
- return True
diff --git a/workspace_tools/test_mysql.py b/workspace_tools/test_mysql.py
deleted file mode 100644
index 1561dab..0000000
--- a/workspace_tools/test_mysql.py
+++ /dev/null
@@ -1,271 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-"""
-
-import re
-import MySQLdb as mdb
-
-# Imports from TEST API
-from workspace_tools.test_db import BaseDBAccess
-
-
-class MySQLDBAccess(BaseDBAccess):
- """ Wrapper for MySQL DB access for common test suite interface
- """
- def __init__(self):
- BaseDBAccess.__init__(self)
- self.DB_TYPE = 'mysql'
-
- def detect_database(self, verbose=False):
- """ detect database and return VERION data structure or string (verbose=True)
- """
- query = 'SHOW VARIABLES LIKE "%version%"'
- rows = self.select_all(query)
- if verbose:
- result = []
- for row in rows:
- result.append("\t%s: %s"% (row['Variable_name'], row['Value']))
- result = "\n".join(result)
- else:
- result = rows
- return result
-
- def parse_db_connection_string(self, str):
- """ Parsing SQL DB connection string. String should contain:
- - DB Name, user name, password, URL (DB host), name
- Function should return tuple with parsed (host, user, passwd, db) or None if error
- E.g. connection string: 'mysql://username:password@127.0.0.1/db_name'
- """
- result = BaseDBAccess().parse_db_connection_string(str)
- if result is not None:
- (db_type, username, password, host, db_name) = result
- if db_type != 'mysql':
- result = None
- return result
-
- def is_connected(self):
- """ Returns True if we are connected to database
- """
- return self.db_object is not None
-
- def connect(self, host, user, passwd, db):
- """ Connects to DB and returns DB object
- """
- try:
- self.db_object = mdb.connect(host=host, user=user, passwd=passwd, db=db)
- # Let's remember connection credentials
- self.db_type = self.DB_TYPE
- self.host = host
- self.user = user
- self.passwd = passwd
- self.db = db
- except mdb.Error, e:
- print "Error %d: %s"% (e.args[0], e.args[1])
- self.db_object = None
- self.db_type = None
- self.host = None
- self.user = None
- self.passwd = None
- self.db = None
-
- def connect_url(self, db_url):
- """ Connects to database using db_url (database url parsing),
- store host, username, password, db_name
- """
- result = self.parse_db_connection_string(db_url)
- if result is not None:
- (db_type, username, password, host, db_name) = result
- if db_type == self.DB_TYPE:
- self.connect(host, username, password, db_name)
-
- def reconnect(self):
- """ Reconnects to DB and returns DB object using stored host name,
- database name and credentials (user name and password)
- """
- self.connect(self.host, self.user, self.passwd, self.db)
-
- def disconnect(self):
- """ Close DB connection
- """
- if self.db_object:
- self.db_object.close()
- self.db_object = None
- self.db_type = None
-
- def escape_string(self, str):
- """ Escapes string so it can be put in SQL query between quotes
- """
- con = self.db_object
- result = con.escape_string(str)
- return result if result else ''
-
- def select_all(self, query):
- """ Execute SELECT query and get all results
- """
- con = self.db_object
- cur = con.cursor(mdb.cursors.DictCursor)
- cur.execute(query)
- rows = cur.fetchall()
- return rows
-
- def insert(self, query, commit=True):
- """ Execute INSERT query, define if you want to commit
- """
- con = self.db_object
- cur = con.cursor()
- cur.execute(query)
- if commit:
- con.commit()
- return cur.lastrowid
-
- def get_next_build_id(self, name, desc='', location='', type=None, status=None):
- """ Insert new build_id (DB unique build like ID number to send all test results)
- """
- if status is None:
- status = self.BUILD_ID_STATUS_STARTED
-
- if type is None:
- type = self.BUILD_ID_TYPE_TEST
-
- query = """INSERT INTO `%s` (%s_name, %s_desc, %s_location, %s_type_fk, %s_status_fk)
- VALUES ('%s', '%s', '%s', %d, %d)"""% (self.TABLE_BUILD_ID,
- self.TABLE_BUILD_ID,
- self.TABLE_BUILD_ID,
- self.TABLE_BUILD_ID,
- self.TABLE_BUILD_ID,
- self.TABLE_BUILD_ID,
- self.escape_string(name),
- self.escape_string(desc),
- self.escape_string(location),
- type,
- status)
- index = self.insert(query) # Provide inserted record PK
- return index
-
- def get_table_entry_pk(self, table, column, value, update_db=True):
- """ Checks for entries in tables with two columns (_pk, )
- If update_db is True updates table entry if value in specified column doesn't exist
- """
- # TODO: table buffering
- result = None
- table_pk = '%s_pk'% table
- query = """SELECT `%s`
- FROM `%s`
- WHERE `%s`='%s'"""% (table_pk,
- table,
- column,
- self.escape_string(value))
- rows = self.select_all(query)
- if len(rows) == 1:
- result = rows[0][table_pk]
- elif len(rows) == 0 and update_db:
- # Update DB with new value
- result = self.update_table_entry(table, column, value)
- return result
-
- def update_table_entry(self, table, column, value):
- """ Updates table entry if value in specified column doesn't exist
- Locks table to perform atomic read + update
- """
- result = None
- con = self.db_object
- cur = con.cursor()
- cur.execute("LOCK TABLES `%s` WRITE"% table)
- table_pk = '%s_pk'% table
- query = """SELECT `%s`
- FROM `%s`
- WHERE `%s`='%s'"""% (table_pk,
- table,
- column,
- self.escape_string(value))
- cur.execute(query)
- rows = cur.fetchall()
- if len(rows) == 0:
- query = """INSERT INTO `%s` (%s)
- VALUES ('%s')"""% (table,
- column,
- self.escape_string(value))
- cur.execute(query)
- result = cur.lastrowid
- con.commit()
- cur.execute("UNLOCK TABLES")
- return result
-
- def update_build_id_info(self, build_id, **kw):
- """ Update additional data inside build_id table
- Examples:
- db.update_build_id_info(build_id, _status_fk=self.BUILD_ID_STATUS_COMPLETED, _shuffle_seed=0.0123456789):
- """
- if len(kw):
- con = self.db_object
- cur = con.cursor()
- # Prepare UPDATE query
- # ["`mtest_build_id_pk`=[value-1]", "`mtest_build_id_name`=[value-2]", "`mtest_build_id_desc`=[value-3]"]
- set_list = []
- for col_sufix in kw:
- assign_str = "`%s%s`='%s'"% (self.TABLE_BUILD_ID, col_sufix, self.escape_string(str(kw[col_sufix])))
- set_list.append(assign_str)
- set_str = ', '.join(set_list)
- query = """UPDATE `%s`
- SET %s
- WHERE `mtest_build_id_pk`=%d"""% (self.TABLE_BUILD_ID,
- set_str,
- build_id)
- cur.execute(query)
- con.commit()
-
- def insert_test_entry(self, build_id, target, toolchain, test_type, test_id, test_result, test_output, test_time, test_timeout, test_loop, test_extra=''):
- """ Inserts test result entry to database. All checks regarding existing
- toolchain names in DB are performed.
- If some data is missing DB will be updated
- """
- # Get all table FK and if entry is new try to insert new value
- target_fk = self.get_table_entry_pk(self.TABLE_TARGET, self.TABLE_TARGET + '_name', target)
- toolchain_fk = self.get_table_entry_pk(self.TABLE_TOOLCHAIN, self.TABLE_TOOLCHAIN + '_name', toolchain)
- test_type_fk = self.get_table_entry_pk(self.TABLE_TEST_TYPE, self.TABLE_TEST_TYPE + '_name', test_type)
- test_id_fk = self.get_table_entry_pk(self.TABLE_TEST_ID, self.TABLE_TEST_ID + '_name', test_id)
- test_result_fk = self.get_table_entry_pk(self.TABLE_TEST_RESULT, self.TABLE_TEST_RESULT + '_name', test_result)
-
- con = self.db_object
- cur = con.cursor()
-
- query = """ INSERT INTO `%s` (`mtest_build_id_fk`,
- `mtest_target_fk`,
- `mtest_toolchain_fk`,
- `mtest_test_type_fk`,
- `mtest_test_id_fk`,
- `mtest_test_result_fk`,
- `mtest_test_output`,
- `mtest_test_time`,
- `mtest_test_timeout`,
- `mtest_test_loop_no`,
- `mtest_test_result_extra`)
- VALUES (%d, %d, %d, %d, %d, %d, '%s', %.2f, %.2f, %d, '%s')"""% (self.TABLE_TEST_ENTRY,
- build_id,
- target_fk,
- toolchain_fk,
- test_type_fk,
- test_id_fk,
- test_result_fk,
- self.escape_string(test_output),
- test_time,
- test_timeout,
- test_loop,
- self.escape_string(test_extra))
- cur.execute(query)
- con.commit()
diff --git a/workspace_tools/test_webapi.py b/workspace_tools/test_webapi.py
deleted file mode 100644
index 59273e8..0000000
--- a/workspace_tools/test_webapi.py
+++ /dev/null
@@ -1,242 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2014 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Author: Przemyslaw Wirkus
-"""
-
-import sys
-import json
-import optparse
-from flask import Flask
-from os.path import join, abspath, dirname
-
-# Be sure that the tools directory is in the search path
-ROOT = abspath(join(dirname(__file__), ".."))
-sys.path.insert(0, ROOT)
-
-# Imports related to mbed build api
-from workspace_tools.utils import construct_enum
-from workspace_tools.build_api import mcu_toolchain_matrix
-
-# Imports from TEST API
-from test_api import SingleTestRunner
-from test_api import SingleTestExecutor
-from test_api import get_json_data_from_file
-from test_api import print_muts_configuration_from_json
-from test_api import print_test_configuration_from_json
-from test_api import get_avail_tests_summary_table
-from test_api import get_default_test_options_parser
-
-
-class SingleTestRunnerWebService(SingleTestRunner):
- def __init__(self):
- super(SingleTestRunnerWebService, self).__init__()
-
- # With this lock we should control access to certain resources inside this class
- self.resource_lock = thread.allocate_lock()
-
- self.RestRequest = construct_enum(REST_MUTS='muts',
- REST_TEST_SPEC='test_spec',
- REST_TEST_RESULTS='test_results')
-
- def get_rest_result_template(self, result, command, success_code):
- """ Returns common part of every web service request
- """
- result = {"result" : result,
- "command" : command,
- "success_code": success_code} # 0 - OK, >0 - Error number
- return result
-
- # REST API handlers for Flask framework
- def rest_api_status(self):
- """ Returns current test execution status. E.g. running / finished etc.
- """
- with self.resource_lock:
- pass
-
- def rest_api_config(self):
- """ Returns configuration passed to SingleTest executor
- """
- with self.resource_lock:
- pass
-
- def rest_api_log(self):
- """ Returns current test log """
- with self.resource_lock:
- pass
-
- def rest_api_request_handler(self, request_type):
- """ Returns various data structures. Both static and mutable during test
- """
- result = {}
- success_code = 0
- with self.resource_lock:
- if request_type == self.RestRequest.REST_MUTS:
- result = self.muts # Returns MUTs
- elif request_type == self.RestRequest.REST_TEST_SPEC:
- result = self.test_spec # Returns Test Specification
- elif request_type == self.RestRequest.REST_TEST_RESULTS:
- pass # Returns test results
- else:
- success_code = -1
- return json.dumps(self.get_rest_result_template(result, 'request/' + request_type, success_code), indent=4)
-
-
-def singletest_in_webservice_mode():
- # TODO Implement this web service functionality
- pass
-
-
-def get_default_test_webservice_options_parser():
- """ Get test script web service options used by CLI, webservices etc.
- """
- parser = get_default_test_options_parser()
-
- # Things related to web services offered by test suite scripts
- parser.add_option('', '--rest-api',
- dest='rest_api_enabled',
- default=False,
- action="store_true",
- help='Enables REST API.')
-
- parser.add_option('', '--rest-api-port',
- dest='rest_api_port_no',
- help='Sets port for REST API interface')
-
- return parser
-
-'''
-if __name__ == '__main__':
- # Command line options
- parser = get_default_test_options_parser()
-
- parser.description = """This script allows you to run mbed defined test cases for particular MCU(s) and corresponding toolchain(s)."""
- parser.epilog = """Example: singletest.py -i test_spec.json -M muts_all.json"""
-
- (opts, args) = parser.parse_args()
-
- # Print summary / information about automation test status
- if opts.test_automation_report:
- print get_avail_tests_summary_table()
- exit(0)
-
- # Print summary / information about automation test status
- if opts.test_case_report:
- test_case_report_cols = ['id', 'automated', 'description', 'peripherals', 'host_test', 'duration', 'source_dir']
- print get_avail_tests_summary_table(cols=test_case_report_cols, result_summary=False, join_delim='\n')
- exit(0)
-
- # Only prints matrix of supported toolchains
- if opts.supported_toolchains:
- print mcu_toolchain_matrix(platform_filter=opts.general_filter_regex)
- exit(0)
-
- # Open file with test specification
- # test_spec_filename tells script which targets and their toolchain(s)
- # should be covered by the test scenario
- test_spec = get_json_data_from_file(opts.test_spec_filename) if opts.test_spec_filename else None
- if test_spec is None:
- if not opts.test_spec_filename:
- parser.print_help()
- exit(-1)
-
- # Get extra MUTs if applicable
- MUTs = get_json_data_from_file(opts.muts_spec_filename) if opts.muts_spec_filename else None
-
- if MUTs is None:
- if not opts.muts_spec_filename:
- parser.print_help()
- exit(-1)
-
- # Only prints read MUTs configuration
- if MUTs and opts.verbose_test_configuration_only:
- print "MUTs configuration in %s:"% opts.muts_spec_filename
- print print_muts_configuration_from_json(MUTs)
- print
- print "Test specification in %s:"% opts.test_spec_filename
- print print_test_configuration_from_json(test_spec)
- exit(0)
-
- # Verbose test specification and MUTs configuration
- if MUTs and opts.verbose:
- print print_muts_configuration_from_json(MUTs)
- if test_spec and opts.verbose:
- print print_test_configuration_from_json(test_spec)
-
- if opts.only_build_tests:
- # We are skipping testing phase, and suppress summary
- opts.suppress_summary = True
-
- single_test = SingleTestRunner(_global_loops_count=opts.test_global_loops_value,
- _test_loops_list=opts.test_loops_list,
- _muts=MUTs,
- _test_spec=test_spec,
- _opts_goanna_for_mbed_sdk=opts.goanna_for_mbed_sdk,
- _opts_goanna_for_tests=opts.goanna_for_tests,
- _opts_shuffle_test_order=opts.shuffle_test_order,
- _opts_shuffle_test_seed=opts.shuffle_test_seed,
- _opts_test_by_names=opts.test_by_names,
- _opts_test_only_peripheral=opts.test_only_peripheral,
- _opts_test_only_common=opts.test_only_common,
- _opts_verbose_skipped_tests=opts.verbose_skipped_tests,
- _opts_verbose_test_result_only=opts.verbose_test_result_only,
- _opts_verbose=opts.verbose,
- _opts_firmware_global_name=opts.firmware_global_name,
- _opts_only_build_tests=opts.only_build_tests,
- _opts_suppress_summary=opts.suppress_summary,
- _opts_test_x_toolchain_summary=opts.test_x_toolchain_summary,
- _opts_copy_method=opts.copy_method
- )
-
- try:
- st_exec_thread = SingleTestExecutor(single_test)
- except KeyboardInterrupt, e:
- print "\n[CTRL+c] exit"
- st_exec_thread.start()
-
- if opts.rest_api_enabled:
- # Enable REST API
-
- app = Flask(__name__)
-
- @app.route('/')
- def hello_world():
- return 'Hello World!'
-
- @app.route('/status')
- def rest_api_status():
- return single_test.rest_api_status() # TODO
-
- @app.route('/config')
- def rest_api_config():
- return single_test.rest_api_config() # TODO
-
- @app.route('/log')
- def rest_api_log():
- return single_test.rest_api_log() # TODO
-
- @app.route('/request/') # 'muts', 'test_spec', 'test_results'
- def rest_api_request_handler(request_type):
- result = single_test.rest_api_request_handler(request_type) # TODO
- return result
-
- rest_api_port = int(opts.rest_api_port_no) if opts.rest_api_port_no else 5555
- app.debug = False
- app.run(port=rest_api_port) # Blocking Flask REST API web service
- else:
- st_exec_thread.join()
-
-'''
diff --git a/workspace_tools/tests.py b/workspace_tools/tests.py
deleted file mode 100644
index 924c028..0000000
--- a/workspace_tools/tests.py
+++ /dev/null
@@ -1,1208 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from workspace_tools.paths import *
-from workspace_tools.data.support import *
-
-TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib")
-TEST_MBED_LIB = join(TEST_DIR, "mbed", "env")
-
-PERIPHERALS = join(TEST_DIR, "peripherals")
-BENCHMARKS_DIR = join(TEST_DIR, "benchmarks")
-
-SD = join(TEST_DIR, "sd")
-TMP102 = join(PERIPHERALS, 'TMP102')
-AT30TSE75X = join(PERIPHERALS, 'AT30TSE75X')
-
-"""
-Wiring:
- * Ground:
- * LPC1*: p1
- * KL25Z: GND
-
- * Vout
- * LPC1*: p40
- * KL25Z: P3V3
-
- * TMP102 (I2C):
- * LPC1*: (SDA=p28 , SCL=p27)
- * KL25Z: (SDA=PTC9, SCL=PTC8)
- * MAXWSNENV: (SDA=TP6, SCL=TP5)
-
- * digital_loop (Digital(In|Out|InOut), InterruptIn):
- * Arduino headers: (D0 <-> D7)
- * LPC1549: (D2 <-> D7)
- * LPC1*: (p5 <-> p25 )
- * KL25Z: (PTA5<-> PTC6)
- * NUCLEO_F103RB: (PC_6 <-> PB_8)
- * MAXWSNENV: (TP3 <-> TP4)
- * MAX32600MBED: (P1_0 <-> P4_7)
- * VK_RZ_A1H: (P3_2 <-> P5_6)
-
- * port_loop (Port(In|Out|InOut)):
- * Arduino headers: (D0 <-> D7), (D1 <-> D6)
- * LPC1*: (p5 <-> p25), (p6 <-> p26)
- * KL25Z: (PTA5 <-> PTC6), (PTA4 <-> PTC5)
- * NUCLEO_F103RB: (PC_6 <-> PB_8), (PC_5 <-> PB_9)
- * MAXWSNENV: (TP1 <-> TP3), (TP2 <-> TP4)
- * MAX32600MBED: (P1_0 <-> P4_7), (P1_1 <-> P4_6)
- * VK_RZ_A1H: (P3_2 <-> P5_6), (P3_7 <-> P5_1)
-
- * analog_loop (AnalogIn, AnalogOut):
- * Arduino headers: (A0 <-> A5)
- * LPC1549: (A0 <-> D12)
- * LPC1*: (p17 <-> p18 )
- * KL25Z: (PTE30 <-> PTC2)
-
- * analog_pot (AnalogIn):
- * Arduino headers: (A0, A1)
- * VK_RZ_A1H: (AN0, AN1)
-
- * SD (SPI):
- * LPC1*: (mosi=p11 , miso=p12 , sclk=p13 , cs=p14 )
- * KL25Z: (mosi=PTD2, miso=PTD3, sclk=PTD1, cs=PTD0)
-
- * MMA7660 (I2C):
- * LPC1*: (SDA=p28 , SCL=p27)
-
- * i2c_loop:
- * LPC1768: (p28 <-> p9), (p27 <-> p10)
-
- * i2c_eeprom:
- * LPC1*: (SDA=p28 , SCL=p27)
- * KL25Z: (SDA=PTE0, SCL=PTE1)
- * VK_RZ_A1H:(SDA=P1_1, SCL=P1_0)
-
- * can_transceiver:
- * LPC1768: (RX=p9, TX=p10)
- * LPC1549: (RX=D9, TX=D8)
- * LPC4088: (RX=p9, TX=p10)
- * VK_RZ_A1H:(RX=P5_9, TX=P5_10)
- * NUCLEO_F091RC: (RX=PA_11, TX=PA_12)
- * NUCLEO_F072RB: (RX=PA_11, TX=PA_12)
- * NUCLEO_F042K6: (RX=PA_11, TX=PA_12)
- * NUCLEO_F334R8: (RX=PA_11, TX=PA_12)
- * NUCLEO_F303RE: (RX=PA_11, TX=PA_12)
- * NUCLEO_F303K8: (RX=PA_11, TX=PA_12)
- * NUCLEO_F302R8: (RX=PA_11, TX=PA_12)
- * NUCLEO_F446RE: (RX=PA_11, TX=PA_12)
- * DISCO_F469NI: (RX=PB_8, TX=PB_9)
- * DISCO_F4269ZI: (RX=PA_11, TX=PA_12)
- * NUCLEO_F103RB: (RX=PA_11, TX=PA_12)
- * NUCLEO_F746ZG: (RX=PA_11, TX=PA_12)
- * DISCO_F746NG: (RX=PB_8, TX=PB_9)
- * DISCO_L476VG: (RX=PA_11, TX=PA_12)
- * NUCLEO_L476RG: (RX=PA_11, TX=PA_12)
-
-"""
-TESTS = [
- # Automated MBED tests
- {
- "id": "MBED_A1", "description": "Basic",
- "source_dir": join(TEST_DIR, "mbed", "basic"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- },
- {
- "id": "MBED_A2", "description": "Semihost file system",
- "source_dir": join(TEST_DIR, "mbed", "file"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "mcu": ["LPC1768", "LPC2368", "LPC11U24"]
- },
- {
- "id": "MBED_A3", "description": "C++ STL",
- "source_dir": join(TEST_DIR, "mbed", "stl"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": False,
- },
- {
- "id": "MBED_A4", "description": "I2C TMP102",
- "source_dir": join(TEST_DIR, "mbed", "i2c_TMP102"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, TMP102],
- "automated": True,
- "peripherals": ["TMP102"]
- },
- {
- "id": "MBED_AT30TSE75X", "description": "I2C Temperature Sensor / EEPROM",
- "source_dir": join(TEST_DIR, "mbed", "i2c_at30tse75x"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, AT30TSE75X],
- "automated": False,
- "peripherals": ["AT30TSE75X"]
- },
- {
- "id": "MBED_A5", "description": "DigitalIn DigitalOut",
- "source_dir": join(TEST_DIR, "mbed", "digitalin_digitalout"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "peripherals": ["digital_loop"]
- },
- {
- "id": "MBED_A6", "description": "DigitalInOut",
- "source_dir": join(TEST_DIR, "mbed", "digitalinout"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "peripherals": ["digital_loop"]
- },
- {
- "id": "MBED_A7", "description": "InterruptIn",
- "source_dir": join(TEST_DIR, "mbed", "interruptin"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- "peripherals": ["digital_loop"]
- },
- {
- "id": "MBED_A8", "description": "Analog",
- "source_dir": join(TEST_DIR, "mbed", "analog"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "peripherals": ["analog_loop"],
- "mcu": ["LPC1768", "LPC2368", "LPC2460", "KL25Z", "K64F", "K22F", "LPC4088", "LPC1549",
- "NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303K8", "NUCLEO_F303RE",
- "NUCLEO_F334R8", "NUCLEO_L053R8", "NUCLEO_L073RZ", "NUCLEO_L152RE",
- "NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "DISCO_F407VG", "DISCO_F746NG", "NUCLEO_F746ZG",
- "ARCH_MAX", "MAX32600MBED", "MOTE_L152RC", "B96B_F446VE"]
- },
- {
- "id": "MBED_A9", "description": "Serial Echo at 115200",
- "source_dir": join(TEST_DIR, "mbed", "echo"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- #"host_test": "echo"
- },
- {
- "id": "MBED_A10", "description": "PortOut PortIn",
- "source_dir": join(TEST_DIR, "mbed", "portout_portin"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "peripherals": ["port_loop"],
- "supported": DEFAULT_SUPPORT,
- "automated": True,
- },
- {
- "id": "MBED_A11", "description": "PortInOut",
- "source_dir": join(TEST_DIR, "mbed", "portinout"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "peripherals": ["port_loop"],
- "supported": DEFAULT_SUPPORT,
- "automated": True,
- },
- {
- "id": "MBED_A12", "description": "SD File System",
- "source_dir": join(TEST_DIR, "mbed", "sd"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
- "automated": True,
- "duration": 15,
- "peripherals": ["SD"]
- },
- {
- "id": "MBED_A13", "description": "I2C MMA7660 accelerometer",
- "source_dir": join(TEST_DIR, "mbed", "i2c_MMA7660"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA7660')],
- "automated": True,
- "peripherals": ["MMA7660"]
- },
- {
- "id": "MBED_A14", "description": "I2C Master",
- "source_dir": join(TEST_DIR, "mbed", "i2c_master"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
- },
- {
- "id": "MBED_A15", "description": "I2C Slave",
- "source_dir": join(TEST_DIR, "mbed", "i2c_slave"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
- },
- {
- "id": "MBED_A16", "description": "SPI Master",
- "source_dir": join(TEST_DIR, "mbed", "spi_master"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
- },
- {
- "id": "MBED_A17", "description": "SPI Slave",
- "source_dir": join(TEST_DIR, "mbed", "spi_slave"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
- },
- {
- "id": "MBED_A18", "description": "Interrupt vector relocation",
- "source_dir": join(TEST_DIR, "mbed", "vtor_reloc"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
- "mcu": ["LPC1768"],
- "automated": True,
- },
- {
- "id": "MBED_A19", "description": "I2C EEPROM read/write test",
- "source_dir": join(TEST_DIR, "mbed", "i2c_eeprom"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "peripherals": ["24LC256"],
- "automated": True,
- "duration": 15,
- },
- {
- "id": "MBED_A20", "description": "I2C master/slave test",
- "source_dir": join(TEST_DIR, "mbed", "i2c_master_slave"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
- "mcu": ["LPC1768", "RZ_A1H"],
- "peripherals": ["i2c_loop"]
- },
- {
- "id": "MBED_A21", "description": "Call function before main (mbed_main)",
- "source_dir": join(TEST_DIR, "mbed", "call_before_main"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- },
- {
- "id": "MBED_A22", "description": "SPIFI for LPC4088 (test 1)",
- "source_dir": join(TEST_DIR, "mbed", "spifi1"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "duration": 30,
- "mcu": ["LPC4088","LPC4088_DM"]
- },
- {
- "id": "MBED_A23", "description": "SPIFI for LPC4088 (test 2)",
- "source_dir": join(TEST_DIR, "mbed", "spifi2"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "duration": 30,
- "mcu": ["LPC4088","LPC4088_DM"]
- },
- {
- "id": "MBED_A24", "description": "Serial echo with RTS/CTS flow control",
- "source_dir": join(TEST_DIR, "mbed", "echo_flow_control"),
- "dependencies": [MBED_LIBRARIES],
- "automated": "True",
- "host_test": "echo_flow_control",
- "mcu": ["LPC1768"],
- "peripherals": ["extra_serial"]
- },
- {
- "id": "MBED_A25", "description": "I2C EEPROM line read/write test",
- "source_dir": join(TEST_DIR, "mbed", "i2c_eeprom_line"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "peripherals": ["24LC256"],
- "automated": True,
- "duration": 10,
- },
- {
- "id": "MBED_A26", "description": "AnalogIn potentiometer test",
- "source_dir": join(TEST_DIR, "mbed", "analog_pot"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "peripherals": ["analog_pot"],
- "automated": True,
- "duration": 10,
- },
- {
- "id": "MBED_A27", "description": "CAN loopback test",
- "source_dir": join(TEST_DIR, "mbed", "can_loopback"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "duration": 20,
- "peripherals": ["can_transceiver"],
- "mcu": ["LPC1549", "LPC1768","B96B_F446VE", "VK_RZ_A1H",
- "NUCLEO_F091RC", "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8",
- "NUCLEO_F303RE", "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE",
- "DISCO_F469NI", "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG",
- "DISCO_F746NG", "DISCO_L476VG", "NUCLEO_L476RG"]
- },
- {
- "id": "MBED_BLINKY", "description": "Blinky",
- "source_dir": join(TEST_DIR, "mbed", "blinky"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": False,
- },
- {
- "id": "MBED_BUS", "description": "Blinky BUS",
- "source_dir": join(TEST_DIR, "mbed", "bus"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": False,
- "duration": 15,
- },
-
- {
- "id": "MBED_BUSOUT", "description": "BusOut",
- "source_dir": join(TEST_DIR, "mbed", "bus_out"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "duration": 15,
- },
-
- # Size benchmarks
- {
- "id": "BENCHMARK_1", "description": "Size (c environment)",
- "source_dir": join(BENCHMARKS_DIR, "cenv"),
- "dependencies": [MBED_LIBRARIES]
- },
- {
- "id": "BENCHMARK_2", "description": "Size (float math)",
- "source_dir": join(BENCHMARKS_DIR, "float_math"),
- "dependencies": [MBED_LIBRARIES]
- },
- {
- "id": "BENCHMARK_3", "description": "Size (printf)",
- "source_dir": join(BENCHMARKS_DIR, "printf"),
- "dependencies": [MBED_LIBRARIES]
- },
- {
- "id": "BENCHMARK_4", "description": "Size (mbed libs)",
- "source_dir": join(BENCHMARKS_DIR, "mbed"),
- "dependencies": [MBED_LIBRARIES]
- },
- {
- "id": "BENCHMARK_5", "description": "Size (all)",
- "source_dir": join(BENCHMARKS_DIR, "all"),
- "dependencies": [MBED_LIBRARIES]
- },
-
- # performance related tests
- {
- "id": "PERF_1", "description": "SD Stdio R/W Speed",
- "source_dir": join(TEST_DIR, "mbed", "sd_perf_stdio"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
- "automated": True,
- "duration": 15,
- "peripherals": ["SD"]
- },
- {
- "id": "PERF_2", "description": "SD FileHandle R/W Speed",
- "source_dir": join(TEST_DIR, "mbed", "sd_perf_fhandle"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
- "automated": True,
- "duration": 15,
- "peripherals": ["SD"]
- },
- {
- "id": "PERF_3", "description": "SD FatFS R/W Speed",
- "source_dir": join(TEST_DIR, "mbed", "sd_perf_fatfs"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
- "automated": True,
- "duration": 15,
- "peripherals": ["SD"]
- },
-
-
- # Not automated MBED tests
- {
- "id": "MBED_1", "description": "I2C SRF08",
- "source_dir": join(TEST_DIR, "mbed", "i2c_SRF08"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'SRF08')],
- "peripherals": ["SRF08"]
- },
- {
- "id": "MBED_2", "description": "stdio",
- "source_dir": join(TEST_DIR, "mbed", "stdio"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 20,
- "automated": True,
- #"host_test": "stdio_auto"
- },
- {
- "id": "MBED_3", "description": "PortOut",
- "source_dir": join(TEST_DIR, "mbed", "portout"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_4", "description": "Sleep",
- "source_dir": join(TEST_DIR, "mbed", "sleep"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 30,
- "mcu": ["LPC1768", "LPC11U24", "LPC4088","LPC4088_DM","NRF51822", "LPC11U68"]
- },
- {
- "id": "MBED_5", "description": "PWM",
- "source_dir": join(TEST_DIR, "mbed", "pwm"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB]
- },
- {
- "id": "MBED_6", "description": "SW Reset",
- "source_dir": join(TEST_DIR, "mbed", "reset"),
- "dependencies": [MBED_LIBRARIES],
- "duration": 15
- },
- {
- "id": "MBED_7", "description": "stdio benchmark",
- "source_dir": join(TEST_DIR, "mbed", "stdio_benchmark"),
- "dependencies": [MBED_LIBRARIES],
- "duration": 40
- },
- {
- "id": "MBED_8", "description": "SPI",
- "source_dir": join(TEST_DIR, "mbed", "spi"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_9", "description": "Sleep Timeout",
- "source_dir": join(TEST_DIR, "mbed", "sleep_timeout"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_10", "description": "Hello World",
- "source_dir": join(TEST_DIR, "mbed", "hello"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- #"host_test": "hello_auto",
- },
- {
- "id": "MBED_11", "description": "Ticker Int",
- "source_dir": join(TEST_DIR, "mbed", "ticker"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- #"host_test": "wait_us_auto",
- "duration": 20,
- },
- {
- "id": "MBED_12", "description": "C++",
- "source_dir": join(TEST_DIR, "mbed", "cpp"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True
- },
- {
- "id": "MBED_13", "description": "Heap & Stack",
- "source_dir": join(TEST_DIR, "mbed", "heap_and_stack"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- },
- {
- "id": "MBED_14", "description": "Serial Interrupt",
- "source_dir": join(TEST_DIR, "mbed", "serial_interrupt"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- },
- {
- "id": "MBED_15", "description": "RPC",
- "source_dir": join(TEST_DIR, "mbed", "rpc"),
- "dependencies": [MBED_LIBRARIES, join(LIB_DIR, "rpc"), TEST_MBED_LIB],
- "automated": False,
- "mcu": ["LPC1768"]
- },
- {
- "id": "MBED_16", "description": "RTC",
- "source_dir": join(TEST_DIR, "mbed", "rtc"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "exclude_mcu": ["NRF51822", "NRF51822_BOOT", "NRF51822_OTA", "NRF51822_Y5_MBUG",
- "NRF51_DK", "NRF51_DK_BOOT", "NRF51_DK_OTA",
- "NRF51_MICROBIT", "NRF51_MICROBIT_B", "NRF51_MICROBIT_BOOT",
- "NRF51_MICROBIT_B_BOOT", "NRF51_MICROBIT_B_OTA", "NRF51_MICROBIT_OTA",
- "HRM1017", "HRM1017_BOOT", "HRM1701_OTA",
- "TY51822R3", "TY51822R3_BOOT", "TY51822R3_OTA",
- "NRF15_DONGLE", "NRF15_DONGLE_BOOT", "NRF15_DONGLE_OTA",
- "ARCH_BLE", "ARCH_BLE_BOOT", "ARCH_BLE_OTA",
- "ARCH_LINK", "ARCH_LINK_BOOT", "ARCH_LINK_OTA",
- "RBLAB_BLENANO", "RBLAB_BLENANO_BOOT", "RBLAB_BLENANO_OTA",
- "RBLAB_NRF51822", "RBLAB_NRF51822_BOOT", "RBLAB_NRF51822_OTA",
- "SEEED_TINY_BLE", "SEEED_TINY_BLE_BOOT", "SEEED_TINY_BLE_OTA",
- "WALLBOT_BLE", "WALLBOT_BLE_BOOT", "WALLBOT_BLE_OTA",
- "DELTA_DFCM_NNN40", "DELTA_DFCM_NNN40_BOOT", "DELTA_DFCM_NNN40_OTA",
- "LPC1114"],
- #"host_test": "rtc_auto",
- "duration": 15
- },
- {
- "id": "MBED_17", "description": "Serial Interrupt 2",
- "source_dir": join(TEST_DIR, "mbed", "serial_interrupt_2"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_18", "description": "Local FS Directory",
- "source_dir": join(TEST_DIR, "mbed", "dir"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_19", "description": "SD FS Directory",
- "source_dir": join(TEST_DIR, "mbed", "dir_sd"),
- "dependencies": [MBED_LIBRARIES, FS_LIBRARY],
- "peripherals": ["SD"]
- },
- {
- "id": "MBED_20", "description": "InterruptIn 2",
- "source_dir": join(TEST_DIR, "mbed", "interruptin_2"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_21", "description": "freopen Stream",
- "source_dir": join(TEST_DIR, "mbed", "freopen"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_22", "description": "Semihost",
- "source_dir": join(TEST_DIR, "mbed", "semihost"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "mcu": ["LPC1768", "LPC2368", "LPC11U24"]
- },
- {
- "id": "MBED_23", "description": "Ticker Int us",
- "source_dir": join(TEST_DIR, "mbed", "ticker_2"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- #"host_test": "wait_us_auto"
- },
- {
- "id": "MBED_24", "description": "Timeout Int us",
- "source_dir": join(TEST_DIR, "mbed", "timeout"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- #"host_test": "wait_us_auto"
- },
- {
- "id": "MBED_25", "description": "Time us",
- "source_dir": join(TEST_DIR, "mbed", "time_us"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- #"host_test": "wait_us_auto"
- },
- {
- "id": "MBED_26", "description": "Integer constant division",
- "source_dir": join(TEST_DIR, "mbed", "div"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- },
- {
- "id": "MBED_27", "description": "SPI ADXL345",
- "source_dir": join(TEST_DIR, "mbed", "spi_ADXL345"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'ADXL345')],
- "peripherals": ["ADXL345"]
- },
- {
- "id": "MBED_28", "description": "Interrupt chaining (InterruptManager)",
- "source_dir": join(TEST_DIR, "mbed", "interrupt_chaining"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- },
- {
- "id": "MBED_29", "description": "CAN network test",
- "source_dir": join(TEST_DIR, "mbed", "can"),
- "dependencies": [MBED_LIBRARIES],
- "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
- "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F303RE",
- "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE", "DISCO_F469NI",
- "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG", "DISCO_F746NG",
- "NUCLEO_L476RG"]
- },
- {
- "id": "MBED_30", "description": "CAN network test using interrupts",
- "source_dir": join(TEST_DIR, "mbed", "can_interrupt"),
- "dependencies": [MBED_LIBRARIES],
- "mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
- "NUCLEO_F072RB", "NUCLEO_F042K6", "NUCLEO_F334R8", "NUCLEO_F303RE",
- "NUCLEO_F303K8", "NUCLEO_F302R8", "NUCLEO_F446RE", "DISCO_F469NI",
- "DISCO_F429ZI", "NUCLEO_F103RB", "NUCLEO_F746ZG", "DISCO_F746NG",
- "NUCLEO_L476RG"]
- },
- {
- "id": "MBED_31", "description": "PWM LED test",
- "source_dir": join(TEST_DIR, "mbed", "pwm_led"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_32", "description": "Pin toggling",
- "source_dir": join(TEST_DIR, "mbed", "pin_toggling"),
- "dependencies": [MBED_LIBRARIES],
- },
- {
- "id": "MBED_33", "description": "C string operations",
- "source_dir": join(TEST_DIR, "mbed", "cstring"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 10,
- "automated": False,
- },
- {
- "id": "MBED_34", "description": "Ticker Two callbacks",
- "source_dir": join(TEST_DIR, "mbed", "ticker_3"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- #"host_test": "wait_us_auto"
- },
- {
- "id": "MBED_35", "description": "SPI C12832 display",
- "source_dir": join(TEST_DIR, "mbed", "spi_C12832"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'C12832')],
- "peripherals": ["C12832"],
- "automated": True,
- "duration": 10,
- },
- {
- "id": "MBED_36", "description": "WFI correct behavior",
- "source_dir": join(TEST_DIR, "mbed", "wfi"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": False
- },
- {
- "id": "MBED_37", "description": "Serial NC RX",
- "source_dir": join(TEST_DIR, "mbed", "serial_nc_rx"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True
- },
- {
- "id": "MBED_38", "description": "Serial NC TX",
- "source_dir": join(TEST_DIR, "mbed", "serial_nc_tx"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True
- },
-
- # CMSIS RTOS tests
- {
- "id": "CMSIS_RTOS_1", "description": "Basic",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "basic"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- },
- {
- "id": "CMSIS_RTOS_2", "description": "Mutex",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "mutex"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- "duration": 20
- },
- {
- "id": "CMSIS_RTOS_3", "description": "Semaphore",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "semaphore"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- "duration": 20
- },
- {
- "id": "CMSIS_RTOS_4", "description": "Signals",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "signals"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- },
- {
- "id": "CMSIS_RTOS_5", "description": "Queue",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "queue"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- "duration": 20
- },
- {
- "id": "CMSIS_RTOS_6", "description": "Mail",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "mail"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- "duration": 20
- },
- {
- "id": "CMSIS_RTOS_7", "description": "Timer",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "timer"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- },
- {
- "id": "CMSIS_RTOS_8", "description": "ISR",
- "source_dir": join(TEST_DIR, "rtos", "cmsis", "isr"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
- },
-
- # mbed RTOS tests
- {
- "id": "RTOS_1", "description": "Basic thread",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "basic"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- #"host_test": "wait_us_auto",
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_2", "description": "Mutex resource lock",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "mutex"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "duration": 20,
- "automated": True,
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
- "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_3", "description": "Semaphore resource lock",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "semaphore"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "duration": 20,
- "automated": True,
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
- "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_4", "description": "Signals messaging",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "signals"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
- "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_5", "description": "Queue messaging",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "queue"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
- "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_6", "description": "Mail messaging",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "mail"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
- "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_7", "description": "Timer",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "timer"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- #"host_test": "wait_us_auto",
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
- "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_8", "description": "ISR (Queue)",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "isr"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- "mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
- "KL25Z", "KL05Z", "K64F", "KL46Z",
- "RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
- "NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
- "NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
- "DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
- "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE",
- "EFM32HG_STK3400", "EFM32PG_STK3401", "EFM32LG_STK3600", "EFM32GG_STK3700", "EFM32WG_STK3800",
- "NRF51822", "NRF51_DK", "NRF51_MICROBIT", "SEEED_TINY_BLE"],
- },
- {
- "id": "RTOS_9", "description": "SD File write-read",
- "source_dir": join(TEST_DIR, "rtos", "mbed", "file"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
- "automated": True,
- "peripherals": ["SD"],
- "mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z",
- "KL05Z", "K64F", "KL46Z", "RZ_A1H",
- "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE", "NUCLEO_F410RB", "DISCO_F469NI"],
- },
-
- # Networking Tests
- {
- "id": "NET_1", "description": "TCP client hello world",
- "source_dir": join(TEST_DIR, "net", "helloworld", "tcpclient"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_2", "description": "NIST Internet Time Service",
- "source_dir": join(TEST_DIR, "net", "helloworld", "udpclient"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "duration": 15,
- "automated": True,
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_3", "description": "TCP echo server",
- "source_dir": join(TEST_DIR, "net", "echo", "tcp_server"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- #"host_test" : "tcpecho_server_auto",
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_4", "description": "TCP echo client",
- "source_dir": join(TEST_DIR, "net", "echo", "tcp_client"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- #"host_test": "tcpecho_client_auto",
- "peripherals": ["ethernet"]
- },
- {
- "id": "NET_5", "description": "UDP echo server",
- "source_dir": join(TEST_DIR, "net", "echo", "udp_server"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- #"host_test" : "udpecho_server_auto",
- "peripherals": ["ethernet"]
- },
- {
- "id": "NET_6", "description": "UDP echo client",
- "source_dir": join(TEST_DIR, "net", "echo", "udp_client"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- #"host_test" : "udpecho_client_auto",
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_7", "description": "HTTP client hello world",
- "source_dir": join(TEST_DIR, "net", "protocols", "HTTPClient_HelloWorld"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- "duration": 15,
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_8", "description": "NTP client",
- "source_dir": join(TEST_DIR, "net", "protocols", "NTPClient_HelloWorld"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_9", "description": "Multicast Send",
- "source_dir": join(TEST_DIR, "net", "helloworld", "multicast_send"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_10", "description": "Multicast Receive",
- "source_dir": join(TEST_DIR, "net", "helloworld", "multicast_receive"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_11", "description": "Broadcast Send",
- "source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_send"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_12", "description": "Broadcast Receive",
- "source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_receive"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_13", "description": "TCP client echo loop",
- "source_dir": join(TEST_DIR, "net", "echo", "tcp_client_loop"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
- "automated": True,
- "duration": 15,
- #"host_test": "tcpecho_client_auto",
- "peripherals": ["ethernet"],
- },
- {
- "id": "NET_14", "description": "UDP PHY/Data link layer",
- "source_dir": join(TEST_DIR, "net", "echo", "udp_link_layer"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
- "automated": False,
- "duration": 20,
- "host_test": "udp_link_layer_auto",
- "peripherals": ["ethernet"],
- },
-
- # u-blox tests
- {
- "id": "UB_1", "description": "u-blox USB modem: HTTP client",
- "source_dir": [join(TEST_DIR, "net", "cellular", "http", "ubloxusb"), join(TEST_DIR, "net", "cellular", "http", "common")],
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
- "supported": CORTEX_ARM_SUPPORT,
- },
- {
- "id": "UB_2", "description": "u-blox USB modem: SMS test",
- "source_dir": [join(TEST_DIR, "net", "cellular", "sms", "ubloxusb"), join(TEST_DIR, "net", "cellular", "sms", "common")],
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
- "supported": CORTEX_ARM_SUPPORT,
- },
-
- # USB Tests
- {
- "id": "USB_1", "description": "Mouse",
- "source_dir": join(TEST_DIR, "usb", "device", "basic"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- },
- {
- "id": "USB_2", "description": "Keyboard",
- "source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- },
- {
- "id": "USB_3", "description": "Mouse_Keyboard",
- "source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- },
- {
- "id": "USB_4", "description": "Serial Port",
- "source_dir": join(TEST_DIR, "usb", "device", "serial"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- "supported": CORTEX_ARM_SUPPORT,
- },
- {
- "id": "USB_5", "description": "Generic HID",
- "source_dir": join(TEST_DIR, "usb", "device", "raw_hid"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- },
- {
- "id": "USB_6", "description": "MIDI",
- "source_dir": join(TEST_DIR, "usb", "device", "midi"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- },
- {
- "id": "USB_7", "description": "AUDIO",
- "source_dir": join(TEST_DIR, "usb", "device", "audio"),
- "dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
- },
-
- # CMSIS DSP
- {
- "id": "CMSIS_DSP_1", "description": "FIR",
- "source_dir": join(TEST_DIR, "dsp", "cmsis", "fir_f32"),
- "dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
- },
-
- # mbed DSP
- {
- "id": "DSP_1", "description": "FIR",
- "source_dir": join(TEST_DIR, "dsp", "mbed", "fir_f32"),
- "dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
- },
-
- # KL25Z
- {
- "id": "KL25Z_1", "description": "LPTMR",
- "source_dir": join(TEST_DIR, "KL25Z", "lptmr"),
- "dependencies": [MBED_LIBRARIES],
- "supported": CORTEX_ARM_SUPPORT,
- "mcu": ["KL25Z"],
- },
- {
- "id": "KL25Z_2", "description": "PIT",
- "source_dir": join(TEST_DIR, "KL25Z", "pit"),
- "dependencies": [MBED_LIBRARIES],
- "supported": CORTEX_ARM_SUPPORT,
- "mcu": ["KL25Z"],
- },
- {
- "id": "KL25Z_3", "description": "TSI Touch Sensor",
- "source_dir": join(TEST_DIR, "mbed", "tsi"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'TSI')],
- "mcu": ["KL25Z"],
- },
- {
- "id": "KL25Z_4", "description": "RTC",
- "source_dir": join(TEST_DIR, "KL25Z", "rtc"),
- "dependencies": [MBED_LIBRARIES],
- "mcu": ["KL25Z"],
- },
- {
- "id": "KL25Z_5", "description": "MMA8451Q accelerometer",
- "source_dir": join(TEST_DIR, "mbed", "i2c_MMA8451Q"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA8451Q')],
- "mcu": ["KL25Z", "KL05Z", "KL46Z", "K20D50M"],
- "automated": True,
- "duration": 15,
- },
-
- # Examples
- {
- "id": "EXAMPLE_1", "description": "/dev/null",
- "source_dir": join(TEST_DIR, "mbed", "dev_null"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- #"host_test" : "dev_null_auto",
- },
- {
- "id": "EXAMPLE_2", "description": "FS + RTOS",
- "source_dir": join(TEST_DIR, "mbed", "fs"),
- "dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
- },
-
- # CPPUTEST Library provides Unit testing Framework
- #
- # To write TESTs and TEST_GROUPs please add CPPUTEST_LIBRARY to 'dependencies'
- #
- # This will also include:
- # 1. test runner - main function with call to CommandLineTestRunner::RunAllTests(ac, av)
- # 2. Serial console object to print test result on serial port console
- #
-
- # Unit testing with cpputest library
- {
- "id": "UT_1", "description": "Basic",
- "source_dir": join(TEST_DIR, "utest", "basic"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
- {
- "id": "UT_2", "description": "Semihost file system",
- "source_dir": join(TEST_DIR, "utest", "semihost_fs"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- "mcu": ["LPC1768", "LPC2368", "LPC11U24"]
- },
- {
- "id": "UT_3", "description": "General tests",
- "source_dir": join(TEST_DIR, "utest", "general"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
- {
- "id": "UT_BUSIO", "description": "BusIn BusOut",
- "source_dir": join(TEST_DIR, "utest", "bus"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
- {
- "id": "UT_I2C_EEPROM_ASYNCH", "description": "I2C Asynch eeprom",
- "source_dir": join(TEST_DIR, "utest", "i2c_eeprom_asynch"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
- {
- "id": "UT_SERIAL_ASYNCH", "description": "Asynch serial test (req 2 serial peripherals)",
- "source_dir": join(TEST_DIR, "utest", "serial_asynch"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
- {
- "id": "UT_SPI_ASYNCH", "description": "Asynch spi test",
- "source_dir": join(TEST_DIR, "utest", "spi_asynch"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
- {
- "id": "UT_LP_TICKER", "description": "Low power ticker test",
- "source_dir": join(TEST_DIR, "utest", "lp_ticker"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
- "automated": False,
- },
-
- # Tests used for target information purposes
- {
- "id": "DTCT_1", "description": "Simple detect test",
- "source_dir": join(TEST_DIR, "mbed", "detect"),
- "dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
- "automated": True,
- #"host_test" : "detect_auto",
- },
-
-]
-
-# Group tests with the same goals into categories
-GROUPS = {
- "core": ["MBED_A1", "MBED_A2", "MBED_A3", "MBED_A18"],
- "digital_io": ["MBED_A5", "MBED_A6", "MBED_A7", "MBED_A10", "MBED_A11"],
- "analog_io": ["MBED_A8"],
- "i2c": ["MBED_A19", "MBED_A20"],
- "spi": ["MBED_A12"],
-}
-GROUPS["rtos"] = [test["id"] for test in TESTS if test["id"].startswith("RTOS_")]
-GROUPS["net"] = [test["id"] for test in TESTS if test["id"].startswith("NET_")]
-GROUPS["automated"] = [test["id"] for test in TESTS if test.get("automated", False)]
-# Look for 'TEST_GROUPS' in private_settings.py and update the GROUPS dictionary
-# with the information in test_groups if found
-try:
- from workspace_tools.private_settings import TEST_GROUPS
-except:
- TEST_GROUPS = {}
-GROUPS.update(TEST_GROUPS)
-
-class Test:
- DEFAULTS = {
- #'mcu': None,
- 'description': None,
- 'dependencies': None,
- 'duration': 10,
- 'host_test': 'host_test',
- 'automated': False,
- 'peripherals': None,
- #'supported': None,
- 'source_dir': None,
- 'extra_files': None
- }
- def __init__(self, n):
- self.n = n
- self.__dict__.update(Test.DEFAULTS)
- self.__dict__.update(TESTS[n])
-
- def is_supported(self, target, toolchain):
- if hasattr(self, 'mcu') and not target in self.mcu:
- return False
- if hasattr(self, 'exclude_mcu') and target in self.exclude_mcu:
- return False
- if not hasattr(self, 'supported'):
- return True
- return (target in self.supported) and (toolchain in self.supported[target])
-
- def get_description(self):
- if self.description:
- return self.description
- else:
- return self.id
-
- def __cmp__(self, other):
- return cmp(self.n, other.n)
-
- def __str__(self):
- return "[%3d] %s: %s" % (self.n, self.id, self.get_description())
-
- def __getitem__(self, key):
- if key == "id": return self.id
- elif key == "mcu": return self.mcu
- elif key == "exclude_mcu": return self.exclude_mcu
- elif key == "dependencies": return self.dependencies
- elif key == "description": return self.description
- elif key == "duration": return self.duration
- elif key == "host_test": return self.host_test
- elif key == "automated": return self.automated
- elif key == "peripherals": return self.peripherals
- elif key == "supported": return self.supported
- elif key == "source_dir": return self.source_dir
- elif key == "extra_files": return self.extra_files
- else:
- return None
-
-TEST_MAP = dict([(test['id'], Test(i)) for i, test in enumerate(TESTS)])
diff --git a/workspace_tools/toolchains/__init__.py b/workspace_tools/toolchains/__init__.py
deleted file mode 100644
index 16b9f4d..0000000
--- a/workspace_tools/toolchains/__init__.py
+++ /dev/null
@@ -1,776 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-import sys
-from os import stat, walk
-from copy import copy
-from time import time, sleep
-from types import ListType
-from shutil import copyfile
-from os.path import join, splitext, exists, relpath, dirname, basename, split
-from inspect import getmro
-
-from multiprocessing import Pool, cpu_count
-from workspace_tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path
-from workspace_tools.settings import BUILD_OPTIONS, MBED_ORG_USER
-import workspace_tools.hooks as hooks
-
-
-#Disables multiprocessing if set to higher number than the host machine CPUs
-CPU_COUNT_MIN = 1
-
-def compile_worker(job):
- results = []
- for command in job['commands']:
- _, _stderr, _rc = run_cmd(command, job['work_dir'])
- results.append({
- 'code': _rc,
- 'output': _stderr,
- 'command': command
- })
-
- return {
- 'source': job['source'],
- 'object': job['object'],
- 'commands': job['commands'],
- 'results': results
- }
-
-class Resources:
- def __init__(self, base_path=None):
- self.base_path = base_path
-
- self.inc_dirs = []
- self.headers = []
-
- self.s_sources = []
- self.c_sources = []
- self.cpp_sources = []
-
- self.lib_dirs = set([])
- self.objects = []
- self.libraries = []
-
- # mbed special files
- self.lib_builds = []
- self.lib_refs = []
-
- self.repo_dirs = []
- self.repo_files = []
-
- self.linker_script = None
-
- # Other files
- self.hex_files = []
- self.bin_files = []
-
- def add(self, resources):
- self.inc_dirs += resources.inc_dirs
- self.headers += resources.headers
-
- self.s_sources += resources.s_sources
- self.c_sources += resources.c_sources
- self.cpp_sources += resources.cpp_sources
-
- self.lib_dirs |= resources.lib_dirs
- self.objects += resources.objects
- self.libraries += resources.libraries
-
- self.lib_builds += resources.lib_builds
- self.lib_refs += resources.lib_refs
-
- self.repo_dirs += resources.repo_dirs
- self.repo_files += resources.repo_files
-
- if resources.linker_script is not None:
- self.linker_script = resources.linker_script
-
- self.hex_files += resources.hex_files
- self.bin_files += resources.bin_files
-
- def relative_to(self, base, dot=False):
- for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
- 'cpp_sources', 'lib_dirs', 'objects', 'libraries',
- 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
- v = [rel_path(f, base, dot) for f in getattr(self, field)]
- setattr(self, field, v)
- if self.linker_script is not None:
- self.linker_script = rel_path(self.linker_script, base, dot)
-
- def win_to_unix(self):
- for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
- 'cpp_sources', 'lib_dirs', 'objects', 'libraries',
- 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']:
- v = [f.replace('\\', '/') for f in getattr(self, field)]
- setattr(self, field, v)
- if self.linker_script is not None:
- self.linker_script = self.linker_script.replace('\\', '/')
-
- def __str__(self):
- s = []
-
- for (label, resources) in (
- ('Include Directories', self.inc_dirs),
- ('Headers', self.headers),
-
- ('Assembly sources', self.s_sources),
- ('C sources', self.c_sources),
- ('C++ sources', self.cpp_sources),
-
- ('Library directories', self.lib_dirs),
- ('Objects', self.objects),
- ('Libraries', self.libraries),
-
- ('Hex files', self.hex_files),
- ('Bin files', self.bin_files),
- ):
- if resources:
- s.append('%s:\n ' % label + '\n '.join(resources))
-
- if self.linker_script:
- s.append('Linker Script: ' + self.linker_script)
-
- return '\n'.join(s)
-
-
-# Support legacy build conventions: the original mbed build system did not have
-# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
-# had the knowledge of a list of these directories to be ignored.
-LEGACY_IGNORE_DIRS = set([
- 'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
- 'ARM', 'GCC_ARM', 'GCC_CR', 'IAR', 'uARM'
-])
-LEGACY_TOOLCHAIN_NAMES = {
- 'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
- 'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
- 'IAR': 'IAR',
-}
-
-
-class mbedToolchain:
- VERBOSE = True
-
- CORTEX_SYMBOLS = {
- "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0"],
- "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS"],
- "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"],
- "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3"],
- "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4"],
- "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1"],
- "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7"],
- "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1"],
- "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"],
- }
-
- GOANNA_FORMAT = "[Goanna] warning [%FILENAME%:%LINENO%] - [%CHECKNAME%(%SEVERITY%)] %MESSAGE%"
- GOANNA_DIAGNOSTIC_PATTERN = re.compile(r'"\[Goanna\] (?Pwarning) \[(?P[^:]+):(?P\d+)\] \- (?P.*)"')
-
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- self.target = target
- self.name = self.__class__.__name__
- self.hook = hooks.Hook(target, self)
- self.silent = silent
- self.output = ""
-
- self.legacy_ignore_dirs = LEGACY_IGNORE_DIRS - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
-
- if notify:
- self.notify_fun = notify
- elif extra_verbose:
- self.notify_fun = self.print_notify_verbose
- else:
- self.notify_fun = self.print_notify
-
- self.options = options if options is not None else []
-
- self.macros = macros or []
- self.options.extend(BUILD_OPTIONS)
- if self.options:
- self.info("Build Options: %s" % (', '.join(self.options)))
-
- self.obj_path = join("TARGET_"+target.name, "TOOLCHAIN_"+self.name)
-
- self.symbols = None
- self.labels = None
- self.has_config = False
-
- self.build_all = False
- self.timestamp = time()
- self.jobs = 1
-
- self.CHROOT = None
-
- self.mp_pool = None
-
- def get_output(self):
- return self.output
-
- def print_notify(self, event, silent=False):
- """ Default command line notification
- """
- msg = None
-
- if event['type'] in ['info', 'debug']:
- msg = event['message']
-
- elif event['type'] == 'cc':
- event['severity'] = event['severity'].title()
- event['file'] = basename(event['file'])
- msg = '[%(severity)s] %(file)s@%(line)s: %(message)s' % event
-
- elif event['type'] == 'progress':
- if not silent:
- msg = '%s: %s' % (event['action'].title(), basename(event['file']))
-
- if msg:
- print msg
- self.output += msg + "\n"
-
- def print_notify_verbose(self, event, silent=False):
- """ Default command line notification with more verbose mode
- """
- if event['type'] in ['info', 'debug']:
- self.print_notify(event) # standard handle
-
- elif event['type'] == 'cc':
- event['severity'] = event['severity'].title()
- event['file'] = basename(event['file'])
- event['mcu_name'] = "None"
- event['toolchain'] = "None"
- event['target_name'] = event['target_name'].upper() if event['target_name'] else "Unknown"
- event['toolchain_name'] = event['toolchain_name'].upper() if event['toolchain_name'] else "Unknown"
- msg = '[%(severity)s] %(target_name)s::%(toolchain_name)s::%(file)s@%(line)s: %(message)s' % event
- print msg
- self.output += msg + "\n"
-
- elif event['type'] == 'progress':
- self.print_notify(event) # standard handle
-
- def notify(self, event):
- """ Little closure for notify functions
- """
- return self.notify_fun(event, self.silent)
-
- def __exit__(self):
- if self.mp_pool is not None:
- self.mp_pool.terminate()
-
- def goanna_parse_line(self, line):
- if "analyze" in self.options:
- return self.GOANNA_DIAGNOSTIC_PATTERN.match(line)
- else:
- return None
-
- def get_symbols(self):
- if self.symbols is None:
- # Target and Toolchain symbols
- labels = self.get_labels()
- self.symbols = ["TARGET_%s" % t for t in labels['TARGET']]
- self.symbols.extend(["FEATURE_%s" % t for t in labels['FEATURE']])
- self.symbols.extend(["TOOLCHAIN_%s" % t for t in labels['TOOLCHAIN']])
-
- # Config support
- if self.has_config:
- self.symbols.append('HAVE_MBED_CONFIG_H')
-
- # Cortex CPU symbols
- if self.target.core in mbedToolchain.CORTEX_SYMBOLS:
- self.symbols.extend(mbedToolchain.CORTEX_SYMBOLS[self.target.core])
-
- # Symbols defined by the on-line build.system
- self.symbols.extend(['MBED_BUILD_TIMESTAMP=%s' % self.timestamp, 'TARGET_LIKE_MBED', '__MBED__=1'])
- if MBED_ORG_USER:
- self.symbols.append('MBED_USERNAME=' + MBED_ORG_USER)
-
- # Add target's symbols
- self.symbols += self.target.macros
- self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has]
- # Add extra symbols passed via 'macros' parameter
- self.symbols += self.macros
-
- # Form factor variables
- if hasattr(self.target, 'supported_form_factors'):
- self.symbols.extend(["TARGET_FF_%s" % t for t in self.target.supported_form_factors])
-
- return list(set(self.symbols)) # Return only unique symbols
-
- def get_labels(self):
- if self.labels is None:
- toolchain_labels = [c.__name__ for c in getmro(self.__class__)]
- toolchain_labels.remove('mbedToolchain')
- self.labels = {
- 'TARGET': self.target.get_labels(),
- 'FEATURE': self.target.features,
- 'TOOLCHAIN': toolchain_labels
- }
- return self.labels
-
- def need_update(self, target, dependencies):
- if self.build_all:
- return True
-
- if not exists(target):
- return True
-
- target_mod_time = stat(target).st_mtime
-
- for d in dependencies:
-
- # Some objects are not provided with full path and here we do not have
- # information about the library paths. Safe option: assume an update
- if not d or not exists(d):
- return True
-
- if stat(d).st_mtime >= target_mod_time:
- return True
-
- return False
-
- def scan_resources(self, path):
- labels = self.get_labels()
- resources = Resources(path)
- self.has_config = False
-
- """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
- When topdown is True, the caller can modify the dirnames list in-place
- (perhaps using del or slice assignment), and walk() will only recurse into
- the subdirectories whose names remain in dirnames; this can be used to prune
- the search, impose a specific order of visiting, or even to inform walk()
- about directories the caller creates or renames before it resumes walk()
- again. Modifying dirnames when topdown is False is ineffective, because in
- bottom-up mode the directories in dirnames are generated before dirpath
- itself is generated.
- """
- for root, dirs, files in walk(path):
- # Remove ignored directories
- for d in copy(dirs):
- if d == '.hg':
- dir_path = join(root, d)
- resources.repo_dirs.append(dir_path)
- resources.repo_files.extend(self.scan_repository(dir_path))
-
- if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
- (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
- (d.startswith('FEATURE_') and d[8:] not in labels['FEATURE']) or
- (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])):
- dirs.remove(d)
-
- # Add root to include paths
- resources.inc_dirs.append(root)
-
- for file in files:
- file_path = join(root, file)
- _, ext = splitext(file)
- ext = ext.lower()
-
- if ext == '.s':
- resources.s_sources.append(file_path)
-
- elif ext == '.c':
- resources.c_sources.append(file_path)
-
- elif ext == '.cpp':
- resources.cpp_sources.append(file_path)
-
- elif ext == '.h' or ext == '.hpp':
- if basename(file_path) == "mbed_config.h":
- self.has_config = True
- resources.headers.append(file_path)
-
- elif ext == '.o':
- resources.objects.append(file_path)
-
- elif ext == self.LIBRARY_EXT:
- resources.libraries.append(file_path)
- resources.lib_dirs.add(root)
-
- elif ext == self.LINKER_EXT:
- if resources.linker_script is not None:
- self.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
- resources.linker_script = file_path
-
- elif ext == '.lib':
- resources.lib_refs.append(file_path)
-
- elif ext == '.bld':
- resources.lib_builds.append(file_path)
-
- elif file == '.hgignore':
- resources.repo_files.append(file_path)
-
- elif ext == '.hex':
- resources.hex_files.append(file_path)
-
- elif ext == '.bin':
- resources.bin_files.append(file_path)
-
- return resources
-
- def scan_repository(self, path):
- resources = []
-
- for root, dirs, files in walk(path):
- # Remove ignored directories
- for d in copy(dirs):
- if d == '.' or d == '..':
- dirs.remove(d)
-
- for file in files:
- file_path = join(root, file)
- resources.append(file_path)
-
- return resources
-
- def copy_files(self, files_paths, trg_path, rel_path=None):
-
- # Handle a single file
- if type(files_paths) != ListType: files_paths = [files_paths]
-
- for source in files_paths:
- if source is None:
- files_paths.remove(source)
-
- for source in files_paths:
- if rel_path is not None:
- relative_path = relpath(source, rel_path)
- else:
- _, relative_path = split(source)
-
- target = join(trg_path, relative_path)
-
- if (target != source) and (self.need_update(target, [source])):
- self.progress("copy", relative_path)
- mkdir(dirname(target))
- copyfile(source, target)
-
- def relative_object_path(self, build_path, base_dir, source):
- source_dir, name, _ = split_path(source)
- obj_dir = join(build_path, relpath(source_dir, base_dir))
- mkdir(obj_dir)
- return join(obj_dir, name + '.o')
-
- def compile_sources(self, resources, build_path, inc_dirs=None):
- # Web IDE progress bar for project build
- files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources
- self.to_be_compiled = len(files_to_compile)
- self.compiled = 0
-
- #for i in self.build_params:
- # self.debug(i)
- # self.debug("%s" % self.build_params[i])
-
- inc_paths = resources.inc_dirs
- if inc_dirs is not None:
- inc_paths.extend(inc_dirs)
-
- objects = []
- queue = []
- prev_dir = None
-
- # The dependency checking for C/C++ is delegated to the compiler
- base_path = resources.base_path
- files_to_compile.sort()
- for source in files_to_compile:
- _, name, _ = split_path(source)
- object = self.relative_object_path(build_path, base_path, source)
-
- # Avoid multiple mkdir() calls on same work directory
- work_dir = dirname(object)
- if work_dir is not prev_dir:
- prev_dir = work_dir
- mkdir(work_dir)
-
- # Queue mode (multiprocessing)
- commands = self.compile_command(source, object, inc_paths)
- if commands is not None:
- queue.append({
- 'source': source,
- 'object': object,
- 'commands': commands,
- 'work_dir': work_dir,
- 'chroot': self.CHROOT
- })
- else:
- objects.append(object)
-
- # Use queues/multiprocessing if cpu count is higher than setting
- jobs = self.jobs if self.jobs else cpu_count()
- if jobs > CPU_COUNT_MIN and len(queue) > jobs:
- return self.compile_queue(queue, objects)
- else:
- return self.compile_seq(queue, objects)
-
- def compile_seq(self, queue, objects):
- for item in queue:
- result = compile_worker(item)
-
- self.compiled += 1
- self.progress("compile", item['source'], build_update=True)
- for res in result['results']:
- self.debug("Command: %s" % ' '.join(res['command']))
- self.compile_output([
- res['code'],
- res['output'],
- res['command']
- ])
- objects.append(result['object'])
- return objects
-
- def compile_queue(self, queue, objects):
- jobs_count = int(self.jobs if self.jobs else cpu_count())
- p = Pool(processes=jobs_count)
-
- results = []
- for i in range(len(queue)):
- results.append(p.apply_async(compile_worker, [queue[i]]))
-
- itr = 0
- while True:
- itr += 1
- if itr > 30000:
- p.terminate()
- p.join()
- raise ToolException("Compile did not finish in 5 minutes")
-
- pending = 0
- for r in results:
- if r._ready is True:
- try:
- result = r.get()
- results.remove(r)
-
- self.compiled += 1
- self.progress("compile", result['source'], build_update=True)
- for res in result['results']:
- self.debug("Command: %s" % ' '.join(res['command']))
- self.compile_output([
- res['code'],
- res['output'],
- res['command']
- ])
- objects.append(result['object'])
- except ToolException, err:
- p.terminate()
- p.join()
- raise ToolException(err)
- else:
- pending += 1
- if pending > jobs_count:
- break
-
-
- if len(results) == 0:
- break
-
- sleep(0.01)
-
- results = None
- p.terminate()
- p.join()
-
- return objects
-
- def compile_command(self, source, object, includes):
- # Check dependencies
- _, ext = splitext(source)
- ext = ext.lower()
-
- if ext == '.c' or ext == '.cpp':
- base, _ = splitext(object)
- dep_path = base + '.d'
- deps = self.parse_dependencies(dep_path) if (exists(dep_path)) else []
- if len(deps) == 0 or self.need_update(object, deps):
- if ext == '.c':
- return self.compile_c(source, object, includes)
- else:
- return self.compile_cpp(source, object, includes)
- elif ext == '.s':
- deps = [source]
- if self.need_update(object, deps):
- return self.assemble(source, object, includes)
- else:
- return False
-
- return None
-
- def is_not_supported_error(self, output):
- return "#error directive: [NOT_SUPPORTED]" in output
-
- def compile_output(self, output=[]):
- _rc = output[0]
- _stderr = output[1]
- command = output[2]
-
- # Parse output for Warnings and Errors
- self.parse_output(_stderr)
- self.debug("Return: %s"% _rc)
- for error_line in _stderr.splitlines():
- self.debug("Output: %s"% error_line)
-
-
- # Check return code
- if _rc != 0:
- for line in _stderr.splitlines():
- self.tool_error(line)
-
- if self.is_not_supported_error(_stderr):
- raise NotSupportedException(_stderr)
- else:
- raise ToolException(_stderr)
-
- def compile(self, cc, source, object, includes):
- _, ext = splitext(source)
- ext = ext.lower()
-
- command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source]
-
- if hasattr(self, "get_dep_opt"):
- base, _ = splitext(object)
- dep_path = base + '.d'
- command.extend(self.get_dep_opt(dep_path))
-
- if hasattr(self, "cc_extra"):
- command.extend(self.cc_extra(base))
-
- return [command]
-
- def compile_c(self, source, object, includes):
- return self.compile(self.cc, source, object, includes)
-
- def compile_cpp(self, source, object, includes):
- return self.compile(self.cppc, source, object, includes)
-
- def build_library(self, objects, dir, name):
- needed_update = False
- lib = self.STD_LIB_NAME % name
- fout = join(dir, lib)
- if self.need_update(fout, objects):
- self.info("Library: %s" % lib)
- self.archive(objects, fout)
- needed_update = True
-
- return needed_update
-
- def link_program(self, r, tmp_path, name):
- needed_update = False
- ext = 'bin'
- if hasattr(self.target, 'OUTPUT_EXT'):
- ext = self.target.OUTPUT_EXT
-
- if hasattr(self.target, 'OUTPUT_NAMING'):
- self.var("binary_naming", self.target.OUTPUT_NAMING)
- if self.target.OUTPUT_NAMING == "8.3":
- name = name[0:8]
- ext = ext[0:3]
-
- filename = name+'.'+ext
- elf = join(tmp_path, name + '.elf')
- bin = join(tmp_path, filename)
-
- if self.need_update(elf, r.objects + r.libraries + [r.linker_script]):
- needed_update = True
- self.progress("link", name)
- self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script)
-
- if self.need_update(bin, [elf]):
- needed_update = True
- self.progress("elf2bin", name)
-
- self.binary(r, elf, bin)
-
- self.var("compile_succeded", True)
- self.var("binary", filename)
-
- return bin, needed_update
-
- def default_cmd(self, command):
- _stdout, _stderr, _rc = run_cmd(command)
- # Print all warning / erros from stderr to console output
- for error_line in _stderr.splitlines():
- print error_line
-
- self.debug("Command: %s"% ' '.join(command))
- self.debug("Return: %s"% _rc)
-
- for output_line in _stdout.splitlines():
- self.debug("Output: %s"% output_line)
- for error_line in _stderr.splitlines():
- self.debug("Errors: %s"% error_line)
-
- if _rc != 0:
- for line in _stderr.splitlines():
- self.tool_error(line)
- raise ToolException(_stderr)
-
- ### NOTIFICATIONS ###
- def info(self, message):
- self.notify({'type': 'info', 'message': message})
-
- def debug(self, message):
- if self.VERBOSE:
- if type(message) is ListType:
- message = ' '.join(message)
- message = "[DEBUG] " + message
- self.notify({'type': 'debug', 'message': message})
-
- def cc_info(self, severity, file, line, message, target_name=None, toolchain_name=None):
- self.notify({'type': 'cc',
- 'severity': severity,
- 'file': file,
- 'line': line,
- 'message': message,
- 'target_name': target_name,
- 'toolchain_name': toolchain_name})
-
- def progress(self, action, file, build_update=False):
- msg = {'type': 'progress', 'action': action, 'file': file}
- if build_update:
- msg['percent'] = 100. * float(self.compiled) / float(self.to_be_compiled)
- self.notify(msg)
-
- def tool_error(self, message):
- self.notify({'type': 'tool_error', 'message': message})
-
- def var(self, key, value):
- self.notify({'type': 'var', 'key': key, 'val': value})
-
-from workspace_tools.settings import ARM_BIN
-from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH
-from workspace_tools.settings import IAR_PATH
-
-TOOLCHAIN_BIN_PATH = {
- 'ARM': ARM_BIN,
- 'uARM': ARM_BIN,
- 'GCC_ARM': GCC_ARM_PATH,
- 'GCC_CR': GCC_CR_PATH,
- 'IAR': IAR_PATH
-}
-
-from workspace_tools.toolchains.arm import ARM_STD, ARM_MICRO
-from workspace_tools.toolchains.gcc import GCC_ARM, GCC_CR
-from workspace_tools.toolchains.iar import IAR
-
-TOOLCHAIN_CLASSES = {
- 'ARM': ARM_STD,
- 'uARM': ARM_MICRO,
- 'GCC_ARM': GCC_ARM,
- 'GCC_CR': GCC_CR,
- 'IAR': IAR
-}
-
-TOOLCHAINS = set(TOOLCHAIN_CLASSES.keys())
diff --git a/workspace_tools/toolchains/arm.py b/workspace_tools/toolchains/arm.py
deleted file mode 100644
index 447cabb..0000000
--- a/workspace_tools/toolchains/arm.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import re
-from os.path import join
-import copy
-
-from workspace_tools.toolchains import mbedToolchain
-from workspace_tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB
-from workspace_tools.hooks import hook_tool
-from workspace_tools.settings import GOANNA_PATH
-
-class ARM(mbedToolchain):
- LINKER_EXT = '.sct'
- LIBRARY_EXT = '.ar'
-
- STD_LIB_NAME = "%s.ar"
- DIAGNOSTIC_PATTERN = re.compile('"(?P[^"]+)", line (?P\d+)( \(column (?P\d+)\)|): (?PWarning|Error): (?P.+)')
- DEP_PATTERN = re.compile('\S+:\s(?P.+)\n')
-
- DEFAULT_FLAGS = {
- 'common': ["--apcs=interwork",
- "--brief_diagnostics"],
- 'asm': ['-I"%s"' % ARM_INC],
- 'c': ["-c", "--gnu", "-Otime", "--restrict", "--multibyte_chars", "--split_sections", "--md", "--no_depend_system_headers", '-I"%s"' % ARM_INC,
- "--c99", "-D__ASSERT_MSG" ],
- 'cxx': ["--cpp", "--no_rtti", "-D__ASSERT_MSG"],
- 'ld': [],
- }
-
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
-
- if target.core == "Cortex-M0+":
- cpu = "Cortex-M0"
- elif target.core == "Cortex-M4F":
- cpu = "Cortex-M4.fp"
- elif target.core == "Cortex-M7F":
- cpu = "Cortex-M7.fp.sp"
- else:
- cpu = target.core
-
- main_cc = join(ARM_BIN, "armcc")
-
- self.flags = copy.deepcopy(self.DEFAULT_FLAGS)
- self.flags['common'] += ["--cpu=%s" % cpu]
- if "save-asm" in self.options:
- self.flags['common'].extend(["--asm", "--interleave"])
-
- if "debug-info" in self.options:
- self.flags['common'].append("-g")
- self.flags['c'].append("-O0")
- else:
- self.flags['c'].append("-O3")
-
- self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] + self.flags['c']
- if not "analyze" in self.options:
- self.cc = [main_cc] + self.flags['common'] + self.flags['c']
- self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
- else:
- self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c']
- self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cc.replace('\\', '/'), "--dialect=armcc", '--output-format="%s"' % self.GOANNA_FORMAT] + self.flags['common'] + self.flags['c'] + self.flags['cxx']
-
- self.ld = [join(ARM_BIN, "armlink")]
- self.sys_libs = []
-
- self.ar = join(ARM_BIN, "armar")
- self.elf2bin = join(ARM_BIN, "fromelf")
-
- def remove_option(self, option):
- for tool in [self.asm, self.cc, self.cppc]:
- if option in tool:
- tool.remove(option)
-
- def assemble(self, source, object, includes):
- # Preprocess first, then assemble
- tempfile = object + '.E.s'
- return [
- self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source],
- self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile])
- ]
-
- def parse_dependencies(self, dep_path):
- dependencies = []
- for line in open(dep_path).readlines():
- match = ARM.DEP_PATTERN.match(line)
- if match is not None:
- dependencies.append(match.group('file'))
- return dependencies
-
- def parse_output(self, output):
- for line in output.splitlines():
- match = ARM.DIAGNOSTIC_PATTERN.match(line)
- if match is not None:
- self.cc_info(
- match.group('severity').lower(),
- match.group('file'),
- match.group('line'),
- match.group('message'),
- target_name=self.target.name,
- toolchain_name=self.name
- )
- match = self.goanna_parse_line(line)
- if match is not None:
- self.cc_info(
- match.group('severity').lower(),
- match.group('file'),
- match.group('line'),
- match.group('message')
- )
-
- def get_dep_opt(self, dep_path):
- return ["--depend", dep_path]
-
- def archive(self, objects, lib_path):
- self.default_cmd([self.ar, '-r', lib_path] + objects)
-
- def link(self, output, objects, libraries, lib_dirs, mem_map):
- if len(lib_dirs):
- args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--list=.link_totals.txt"]
- else:
- args = ["-o", output, "--info=totals", "--list=.link_totals.txt"]
-
- if mem_map:
- args.extend(["--scatter", mem_map])
-
- if hasattr(self.target, "link_cmdline_hook"):
- args = self.target.link_cmdline_hook(self.__class__.__name__, args)
-
- self.default_cmd(self.ld + args + objects + libraries + self.sys_libs)
-
- @hook_tool
- def binary(self, resources, elf, bin):
- args = [self.elf2bin, '--bin', '-o', bin, elf]
-
- if hasattr(self.target, "binary_cmdline_hook"):
- args = self.target.binary_cmdline_hook(self.__class__.__name__, args)
-
- self.default_cmd(args)
-
-class ARM_STD(ARM):
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
- self.ld.append("--libpath=%s" % ARM_LIB)
-
-
-class ARM_MICRO(ARM):
- PATCHED_LIBRARY = False
-
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
-
- # add microlib to the command line flags
- self.asm += ["-D__MICROLIB"]
- self.cc += ["--library_type=microlib", "-D__MICROLIB"]
- self.cppc += ["--library_type=microlib", "-D__MICROLIB"]
-
- # the exporter uses --library_type flag to set microlib
- self.flags['c'] += ["--library_type=microlib"]
- self.flags['cxx'] += ["--library_type=microlib"]
- self.flags['ld'].append("--library_type=microlib")
-
- # We had to patch microlib to add C++ support
- # In later releases this patch should have entered mainline
- if ARM_MICRO.PATCHED_LIBRARY:
- self.flags['ld'].append("--noscanlib")
-
- # System Libraries
- self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
-
- if target.core == "Cortex-M3":
- self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ws", "cpprt_w"]])
-
- elif target.core in ["Cortex-M0", "Cortex-M0+"]:
- self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
- else:
- self.ld.append("--libpath=%s" % ARM_LIB)
diff --git a/workspace_tools/toolchains/gcc.py b/workspace_tools/toolchains/gcc.py
deleted file mode 100644
index a6cb063..0000000
--- a/workspace_tools/toolchains/gcc.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import re
-from os.path import join, basename, splitext
-
-from workspace_tools.toolchains import mbedToolchain
-from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH
-from workspace_tools.settings import GOANNA_PATH
-from workspace_tools.hooks import hook_tool
-
-class GCC(mbedToolchain):
- LINKER_EXT = '.ld'
- LIBRARY_EXT = '.a'
-
- STD_LIB_NAME = "lib%s.a"
- DIAGNOSTIC_PATTERN = re.compile('((?P\d+):)(\d+:)? (?Pwarning|error): (?P.+)')
-
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, tool_path="", extra_verbose=False):
- mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
-
- if target.core == "Cortex-M0+":
- cpu = "cortex-m0plus"
- elif target.core == "Cortex-M4F":
- cpu = "cortex-m4"
- elif target.core == "Cortex-M7F":
- cpu = "cortex-m7"
- else:
- cpu = target.core.lower()
-
- self.cpu = ["-mcpu=%s" % cpu]
- if target.core.startswith("Cortex"):
- self.cpu.append("-mthumb")
-
- if target.core == "Cortex-M4F":
- self.cpu.append("-mfpu=fpv4-sp-d16")
- self.cpu.append("-mfloat-abi=softfp")
- elif target.core == "Cortex-M7F":
- self.cpu.append("-mfpu=fpv5-d16")
- self.cpu.append("-mfloat-abi=softfp")
-
- if target.core == "Cortex-A9":
- self.cpu.append("-mthumb-interwork")
- self.cpu.append("-marm")
- self.cpu.append("-march=armv7-a")
- self.cpu.append("-mfpu=vfpv3")
- self.cpu.append("-mfloat-abi=hard")
- self.cpu.append("-mno-unaligned-access")
-
-
- # Note: We are using "-O2" instead of "-Os" to avoid this known GCC bug:
- # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46762
- common_flags = ["-c", "-Wall", "-Wextra",
- "-Wno-unused-parameter", "-Wno-missing-field-initializers",
- "-fmessage-length=0", "-fno-exceptions", "-fno-builtin",
- "-ffunction-sections", "-fdata-sections",
- "-MMD", "-fno-delete-null-pointer-checks", "-fomit-frame-pointer"
- ] + self.cpu
-
- if "save-asm" in self.options:
- common_flags.append("-save-temps")
-
- if "debug-info" in self.options:
- common_flags.append("-g")
- common_flags.append("-O0")
- else:
- common_flags.append("-O2")
-
- main_cc = join(tool_path, "arm-none-eabi-gcc")
- main_cppc = join(tool_path, "arm-none-eabi-g++")
- self.asm = [main_cc, "-x", "assembler-with-cpp"] + common_flags
- if not "analyze" in self.options:
- self.cc = [main_cc, "-std=gnu99"] + common_flags
- self.cppc =[main_cppc, "-std=gnu++98", "-fno-rtti"] + common_flags
- else:
- self.cc = [join(GOANNA_PATH, "goannacc"), "--with-cc=" + main_cc.replace('\\', '/'), "-std=gnu99", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
- self.cppc= [join(GOANNA_PATH, "goannac++"), "--with-cxx=" + main_cppc.replace('\\', '/'), "-std=gnu++98", "-fno-rtti", "--dialect=gnu", '--output-format="%s"' % self.GOANNA_FORMAT] + common_flags
-
- self.ld = [join(tool_path, "arm-none-eabi-gcc"), "-Wl,--gc-sections", "-Wl,--wrap,main"] + self.cpu
- self.sys_libs = ["stdc++", "supc++", "m", "c", "gcc"]
-
- self.ar = join(tool_path, "arm-none-eabi-ar")
- self.elf2bin = join(tool_path, "arm-none-eabi-objcopy")
-
- def assemble(self, source, object, includes):
- return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
-
- def parse_dependencies(self, dep_path):
- dependencies = []
- for line in open(dep_path).readlines()[1:]:
- file = line.replace('\\\n', '').strip()
- if file:
- # GCC might list more than one dependency on a single line, in this case
- # the dependencies are separated by a space. However, a space might also
- # indicate an actual space character in a dependency path, but in this case
- # the space character is prefixed by a backslash.
- # Temporary replace all '\ ' with a special char that is not used (\a in this
- # case) to keep them from being interpreted by 'split' (they will be converted
- # back later to a space char)
- file = file.replace('\\ ', '\a')
- if file.find(" ") == -1:
- dependencies.append(file.replace('\a', ' '))
- else:
- dependencies = dependencies + [f.replace('\a', ' ') for f in file.split(" ")]
- return dependencies
-
- def is_not_supported_error(self, output):
- return "error: #error [NOT_SUPPORTED]" in output
-
- def parse_output(self, output):
- # The warning/error notification is multiline
- WHERE, WHAT = 0, 1
- state, file, message = WHERE, None, None
- for line in output.splitlines():
- match = self.goanna_parse_line(line)
- if match is not None:
- self.cc_info(
- match.group('severity').lower(),
- match.group('file'),
- match.group('line'),
- match.group('message'),
- target_name=self.target.name,
- toolchain_name=self.name
- )
- continue
-
- # Each line should start with the file information: "filepath: ..."
- # i should point past the file path ^
- # avoid the first column in Windows (C:\)
- i = line.find(':', 2)
- if i == -1: continue
-
- if state == WHERE:
- file = line[:i]
- message = line[i+1:].strip() + ' '
- state = WHAT
-
- elif state == WHAT:
- match = GCC.DIAGNOSTIC_PATTERN.match(line[i+1:])
- if match is None:
- state = WHERE
- continue
-
- self.cc_info(
- match.group('severity'),
- file, match.group('line'),
- message + match.group('message')
- )
-
- def archive(self, objects, lib_path):
- self.default_cmd([self.ar, "rcs", lib_path] + objects)
-
- def link(self, output, objects, libraries, lib_dirs, mem_map):
- libs = []
- for l in libraries:
- name, _ = splitext(basename(l))
- libs.append("-l%s" % name[3:])
- libs.extend(["-l%s" % l for l in self.sys_libs])
-
- self.default_cmd(self.hook.get_cmdline_linker(self.ld + ["-T%s" % mem_map, "-o", output] +
- objects + ["-L%s" % L for L in lib_dirs] + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"]))
-
- @hook_tool
- def binary(self, resources, elf, bin):
- self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, "-O", "binary", elf, bin]))
-
-
-class GCC_ARM(GCC):
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- GCC.__init__(self, target, options, notify, macros, silent, GCC_ARM_PATH, extra_verbose=extra_verbose)
-
- # Use latest gcc nanolib
- if "thread-safe" not in self.options:
- self.ld.append("--specs=nano.specs")
- if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368"]:
- self.ld.extend(["-u _printf_float", "-u _scanf_float"])
- elif target.name in ["RZ_A1H", "VK_RZ_A1H", "ARCH_MAX", "DISCO_F407VG", "DISCO_F429ZI", "DISCO_F469NI", "NUCLEO_F401RE", "NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "ELMO_F411RE", "MTS_MDOT_F411RE", "MTS_DRAGONFLY_F411RE", "DISCO_F746NG"]:
- self.ld.extend(["-u_printf_float", "-u_scanf_float"])
-
- self.sys_libs.append("nosys")
-
-
-class GCC_CR(GCC):
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- GCC.__init__(self, target, options, notify, macros, silent, GCC_CR_PATH, extra_verbose=extra_verbose)
-
- additional_compiler_flags = [
- "-D__NEWLIB__", "-D__CODE_RED", "-D__USE_CMSIS", "-DCPP_USE_HEAP",
- ]
- self.cc += additional_compiler_flags
- self.cppc += additional_compiler_flags
-
- # Use latest gcc nanolib
- self.ld.append("--specs=nano.specs")
- if target.name in ["LPC1768", "LPC4088", "LPC4088_DM", "LPC4330", "UBLOX_C027", "LPC2368"]:
- self.ld.extend(["-u _printf_float", "-u _scanf_float"])
- self.ld += ["-nostdlib"]
-
diff --git a/workspace_tools/toolchains/iar.py b/workspace_tools/toolchains/iar.py
deleted file mode 100644
index aba1dd7..0000000
--- a/workspace_tools/toolchains/iar.py
+++ /dev/null
@@ -1,122 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import re
-from os import remove
-from os.path import join, exists
-
-from workspace_tools.toolchains import mbedToolchain
-from workspace_tools.settings import IAR_PATH
-from workspace_tools.settings import GOANNA_PATH
-from workspace_tools.hooks import hook_tool
-
-class IAR(mbedToolchain):
- LIBRARY_EXT = '.a'
- LINKER_EXT = '.icf'
- STD_LIB_NAME = "%s.a"
-
- DIAGNOSTIC_PATTERN = re.compile('"(?P[^"]+)",(?P[\d]+)\s+(?PWarning|Error)(?P.+)')
-
- def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
- mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
- if target.core == "Cortex-M7F":
- cpuchoice = "Cortex-M7"
- else:
- cpuchoice = target.core
- c_flags = [
- "--cpu=%s" % cpuchoice, "--thumb",
- "--dlib_config", join(IAR_PATH, "inc", "c", "DLib_Config_Full.h"),
- "-e", # Enable IAR language extension
- "--no_wrap_diagnostics",
- # Pa050: No need to be notified about "non-native end of line sequence"
- # Pa084: Pointless integer comparison -> checks for the values of an enum, but we use values outside of the enum to notify errors (ie: NC).
- # Pa093: Implicit conversion from float to integer (ie: wait_ms(85.4) -> wait_ms(85))
- # Pa082: Operation involving two values from two registers (ie: (float)(*obj->MR)/(float)(LPC_PWM1->MR0))
- "--diag_suppress=Pa050,Pa084,Pa093,Pa082",
- ]
-
- if target.core == "Cortex-M7F":
- c_flags.append("--fpu=VFPv5_sp")
-
-
- if "debug-info" in self.options:
- c_flags.append("-r")
- c_flags.append("-On")
- else:
- c_flags.append("-Oh")
-
- IAR_BIN = join(IAR_PATH, "bin")
- main_cc = join(IAR_BIN, "iccarm")
- if target.core == "Cortex-M7F":
- self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice] + ["--fpu", "VFPv5_sp"]
- else:
- self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice]
- if not "analyze" in self.options:
- self.cc = [main_cc] + c_flags
- self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags
- else:
- self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + c_flags
- self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags
- self.ld = join(IAR_BIN, "ilinkarm")
- self.ar = join(IAR_BIN, "iarchive")
- self.elf2bin = join(IAR_BIN, "ielftool")
-
- def parse_output(self, output):
- for line in output.splitlines():
- match = IAR.DIAGNOSTIC_PATTERN.match(line)
- if match is not None:
- self.cc_info(
- match.group('severity').lower(),
- match.group('file'),
- match.group('line'),
- match.group('message'),
- target_name=self.target.name,
- toolchain_name=self.name
- )
- match = self.goanna_parse_line(line)
- if match is not None:
- self.cc_info(
- match.group('severity').lower(),
- match.group('file'),
- match.group('line'),
- match.group('message')
- )
-
- def get_dep_opt(self, dep_path):
- return ["--dependencies", dep_path]
-
- def cc_extra(self, base):
- return ["-l", base + '.s']
-
- def parse_dependencies(self, dep_path):
- return [path.strip() for path in open(dep_path).readlines()
- if (path and not path.isspace())]
-
- def assemble(self, source, object, includes):
- return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])]
-
- def archive(self, objects, lib_path):
- if exists(lib_path):
- remove(lib_path)
- self.default_cmd([self.ar, lib_path] + objects)
-
- def link(self, output, objects, libraries, lib_dirs, mem_map):
- args = [self.ld, "-o", output, "--config", mem_map, "--skip_dynamic_initialization", "--threaded_lib"]
- self.default_cmd(self.hook.get_cmdline_linker(args + objects + libraries))
-
- @hook_tool
- def binary(self, resources, elf, bin):
- self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, '--bin', elf, bin]))
diff --git a/workspace_tools/upload_results.py b/workspace_tools/upload_results.py
deleted file mode 100644
index 695c849..0000000
--- a/workspace_tools/upload_results.py
+++ /dev/null
@@ -1,373 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import sys
-import argparse
-import xml.etree.ElementTree as ET
-import requests
-import urlparse
-
-def create_headers(args):
- return { 'X-Api-Key': args.api_key }
-
-def finish_command(command, response):
- print(command, response.status_code, response.reason)
- print(response.text)
-
- if response.status_code < 400:
- sys.exit(0)
- else:
- sys.exit(2)
-
-def create_build(args):
- build = {}
- build['buildType'] = args.build_type
- build['number'] = args.build_number
- build['source'] = args.build_source
- build['status'] = 'running'
-
- r = requests.post(urlparse.urljoin(args.url, "api/builds"), headers=create_headers(args), json=build)
-
- if r.status_code < 400:
- if args.property_file_format:
- print("MBED_BUILD_ID=" + r.text)
- else:
- print(r.text)
-
- sys.exit(0)
- else:
- sys.exit(2)
-
-def finish_build(args):
- data = {}
- data['status'] = 'completed'
-
- r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
- finish_command('finish-build', r)
-
-def promote_build(args):
- data = {}
- data['buildType'] = 'Release'
-
- r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
- finish_command('promote-build', r)
-
-def abort_build(args):
- data = {}
- data['status'] = 'aborted'
-
- r = requests.put(urlparse.urljoin(args.url, "api/builds/" + args.build_id), headers=create_headers(args), json=data)
- finish_command('abort-build', r)
-
-def add_project_runs(args):
- '''
- -------------------------------------
- Notes on 'project_run_data' structure:
- --------------------------------------
- 'projectRuns' - Tree structure used to keep track of what projects have
- been logged in different report files. The tree is organized as follows:
-
- 'projectRuns': { - Root element of tree
-
- 'hostOs': { - Host OS on which project was built/tested
- - ex. windows, linux, or mac
-
- 'platform': { - Platform for which project was built/tested
- (Corresponds to platform names in targets.py)
- - ex. K64F, LPC1768, NRF51822, etc.
-
- 'toolchain': { - Toolchain with which project was built/tested
- (Corresponds to TOOLCHAIN_CLASSES names in toolchains/__init__.py)
- - ex. ARM, uARM, GCC_ARM, etc.
-
- 'project': { - Project that was build/tested
- (Corresponds to test id in tests.py or library id in libraries.py)
- - For tests, ex. MBED_A1, MBED_11, DTCT_1 etc.
- - For libraries, ex. MBED, RTX, RTOS, etc.
-
- },
- ...
- },
- ...
- },
- ...
- }
- }
-
- 'platforms_set' - Set of all the platform names mentioned in the given report files
-
- 'toolchains_set' - Set of all the toolchain names mentioned in the given report files
-
- 'names_set' - Set of all the project names mentioned in the given report files
-
- 'hostOses_set' - Set of all the host names given (only given by the command line arguments)
- '''
-
- project_run_data = {}
- project_run_data['projectRuns'] = {}
- project_run_data['platforms_set'] = set()
- project_run_data['vendors_set'] = set()
- project_run_data['toolchains_set'] = set()
- project_run_data['names_set'] = set()
- project_run_data['hostOses_set'] = set()
- project_run_data['hostOses_set'].add(args.host_os)
-
- if args.build_report:
- add_report(project_run_data, args.build_report, True, args.build_id, args.host_os)
-
- if args.test_report:
- add_report(project_run_data, args.test_report, False, args.build_id, args.host_os)
-
- ts_data = format_project_run_data(project_run_data, args.limit)
- total_result = True
-
- total_parts = len(ts_data)
- print "Uploading project runs in %d parts" % total_parts
-
- for index, data in enumerate(ts_data):
- r = requests.post(urlparse.urljoin(args.url, "api/projectRuns"), headers=create_headers(args), json=data)
- print("add-project-runs part %d/%d" % (index + 1, total_parts), r.status_code, r.reason)
- print(r.text)
-
- if r.status_code >= 400:
- total_result = False
-
- if total_result:
- print "'add-project-runs' completed successfully"
- sys.exit(0)
- else:
- print "'add-project-runs' failed"
- sys.exit(2)
-
-def prep_ts_data():
- ts_data = {}
- ts_data['projectRuns'] = []
- ts_data['platforms'] = set()
- ts_data['vendors'] = set()
- ts_data['toolchains'] = set()
- ts_data['names'] = set()
- ts_data['hostOses'] = set()
- return ts_data
-
-def finish_ts_data(ts_data, project_run_data):
- ts_data['platforms'] = list(ts_data['platforms'])
- ts_data['vendors'] = list(ts_data['vendors'])
- ts_data['toolchains'] = list(ts_data['toolchains'])
- ts_data['names'] = list(ts_data['names'])
- ts_data['hostOses'] = list(ts_data['hostOses'])
-
- # Add all vendors to every projectRun submission
- # TODO Either add "vendor" to the "project_run_data"
- # or remove "vendor" entirely from the viewer
- ts_data['vendors'] = list(project_run_data['vendors_set'])
-
-def format_project_run_data(project_run_data, limit):
- all_ts_data = []
- current_limit_count = 0
-
- ts_data = prep_ts_data()
- ts_data['projectRuns'] = []
-
- for hostOs_name, hostOs in project_run_data['projectRuns'].iteritems():
- for platform_name, platform in hostOs.iteritems():
- for toolchain_name, toolchain in platform.iteritems():
- for project_name, project in toolchain.iteritems():
- if current_limit_count >= limit:
- finish_ts_data(ts_data, project_run_data)
- all_ts_data.append(ts_data)
- ts_data = prep_ts_data()
- current_limit_count = 0
-
- ts_data['projectRuns'].append(project)
- ts_data['platforms'].add(platform_name)
- ts_data['toolchains'].add(toolchain_name)
- ts_data['names'].add(project_name)
- ts_data['hostOses'].add(hostOs_name)
- current_limit_count += 1
-
- if current_limit_count > 0:
- finish_ts_data(ts_data, project_run_data)
- all_ts_data.append(ts_data)
-
- return all_ts_data
-
-def find_project_run(projectRuns, project):
- keys = ['hostOs', 'platform', 'toolchain', 'project']
-
- elem = projectRuns
-
- for key in keys:
- if not project[key] in elem:
- return None
-
- elem = elem[project[key]]
-
- return elem
-
-def add_project_run(projectRuns, project):
- keys = ['hostOs', 'platform', 'toolchain']
-
- elem = projectRuns
-
- for key in keys:
- if not project[key] in elem:
- elem[project[key]] = {}
-
- elem = elem[project[key]]
-
- elem[project['project']] = project
-
-def update_project_run_results(project_to_update, project, is_build):
- if is_build:
- project_to_update['buildPass'] = project['buildPass']
- project_to_update['buildResult'] = project['buildResult']
- project_to_update['buildOutput'] = project['buildOutput']
- else:
- project_to_update['testPass'] = project['testPass']
- project_to_update['testResult'] = project['testResult']
- project_to_update['testOutput'] = project['testOutput']
-
-def update_project_run(projectRuns, project, is_build):
- found_project = find_project_run(projectRuns, project)
- if found_project:
- update_project_run_results(found_project, project, is_build)
- else:
- add_project_run(projectRuns, project)
-
-def add_report(project_run_data, report_file, is_build, build_id, host_os):
- tree = None
-
- try:
- tree = ET.parse(report_file)
- except:
- print(sys.exc_info()[0])
- print('Invalid path to report: %s', report_file)
- sys.exit(1)
-
- test_suites = tree.getroot()
-
- for test_suite in test_suites:
- platform = ""
- toolchain = ""
- vendor = ""
- for properties in test_suite.findall('properties'):
- for property in properties.findall('property'):
- if property.attrib['name'] == 'target':
- platform = property.attrib['value']
- project_run_data['platforms_set'].add(platform)
- elif property.attrib['name'] == 'toolchain':
- toolchain = property.attrib['value']
- project_run_data['toolchains_set'].add(toolchain)
- elif property.attrib['name'] == 'vendor':
- vendor = property.attrib['value']
- project_run_data['vendors_set'].add(vendor)
-
- for test_case in test_suite.findall('testcase'):
- projectRun = {}
- projectRun['build'] = build_id
- projectRun['hostOs'] = host_os
- projectRun['platform'] = platform
- projectRun['toolchain'] = toolchain
- projectRun['project'] = test_case.attrib['classname'].split('.')[-1]
- projectRun['vendor'] = vendor
-
- project_run_data['names_set'].add(projectRun['project'])
-
- should_skip = False
- skips = test_case.findall('skipped')
-
- if skips:
- should_skip = skips[0].attrib['message'] == 'SKIP'
-
- if not should_skip:
- system_outs = test_case.findall('system-out')
-
- output = ""
- if system_outs:
- output = system_outs[0].text
-
- if is_build:
- projectRun['buildOutput'] = output
- else:
- projectRun['testOutput'] = output
-
- errors = test_case.findall('error')
- failures = test_case.findall('failure')
- projectRunPass = None
- result = None
-
- if errors:
- projectRunPass = False
- result = errors[0].attrib['message']
- elif failures:
- projectRunPass = False
- result = failures[0].attrib['message']
- elif skips:
- projectRunPass = True
- result = skips[0].attrib['message']
- else:
- projectRunPass = True
- result = 'OK'
-
- if is_build:
- projectRun['buildPass'] = projectRunPass
- projectRun['buildResult'] = result
- else:
- projectRun['testPass'] = projectRunPass
- projectRun['testResult'] = result
-
- update_project_run(project_run_data['projectRuns'], projectRun, is_build)
-
-def main(arguments):
- # Register and parse command line arguments
- parser = argparse.ArgumentParser()
- parser.add_argument('-u', '--url', required=True, help='url to ci site')
- parser.add_argument('-k', '--api-key', required=True, help='api-key for posting data')
-
- subparsers = parser.add_subparsers(help='subcommand help')
-
- create_build_parser = subparsers.add_parser('create-build', help='create a new build')
- create_build_parser.add_argument('-b', '--build-number', required=True, help='build number')
- create_build_parser.add_argument('-T', '--build-type', choices=['Nightly', 'Limited', 'Pull_Request', 'Release_Candidate'], required=True, help='type of build')
- create_build_parser.add_argument('-s', '--build-source', required=True, help='url to source of build')
- create_build_parser.add_argument('-p', '--property-file-format', action='store_true', help='print result in the property file format')
- create_build_parser.set_defaults(func=create_build)
-
- finish_build_parser = subparsers.add_parser('finish-build', help='finish a running build')
- finish_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
- finish_build_parser.set_defaults(func=finish_build)
-
- finish_build_parser = subparsers.add_parser('promote-build', help='promote a build to a release')
- finish_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
- finish_build_parser.set_defaults(func=promote_build)
-
- abort_build_parser = subparsers.add_parser('abort-build', help='abort a running build')
- abort_build_parser.add_argument('-b', '--build-id', required=True, help='build id')
- abort_build_parser.set_defaults(func=abort_build)
-
- add_project_runs_parser = subparsers.add_parser('add-project-runs', help='add project runs to a build')
- add_project_runs_parser.add_argument('-b', '--build-id', required=True, help='build id')
- add_project_runs_parser.add_argument('-r', '--build-report', required=False, help='path to junit xml build report')
- add_project_runs_parser.add_argument('-t', '--test-report', required=False, help='path to junit xml test report')
- add_project_runs_parser.add_argument('-o', '--host-os', required=True, help='host os on which test was run')
- add_project_runs_parser.add_argument('-l', '--limit', required=False, type=int, default=1000, help='Limit the number of project runs sent at a time to avoid HTTP errors (default is 1000)')
- add_project_runs_parser.set_defaults(func=add_project_runs)
-
- args = parser.parse_args(arguments)
- args.func(args)
-
-if __name__ == '__main__':
- main(sys.argv[1:])
\ No newline at end of file
diff --git a/workspace_tools/utils.py b/workspace_tools/utils.py
deleted file mode 100644
index 21f0e14..0000000
--- a/workspace_tools/utils.py
+++ /dev/null
@@ -1,172 +0,0 @@
-"""
-mbed SDK
-Copyright (c) 2011-2013 ARM Limited
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import sys
-import inspect
-import os
-from os import listdir, remove, makedirs
-from shutil import copyfile
-from os.path import isdir, join, exists, split, relpath, splitext
-from subprocess import Popen, PIPE, STDOUT, call
-
-
-def cmd(l, check=True, verbose=False, shell=False, cwd=None):
- text = l if shell else ' '.join(l)
- if verbose:
- print text
- rc = call(l, shell=shell, cwd=cwd)
- if check and rc != 0:
- raise Exception('ERROR %d: "%s"' % (rc, text))
-
-
-def run_cmd(command, wd=None, redirect=False):
- assert is_cmd_valid(command[0])
- p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=wd)
- _stdout, _stderr = p.communicate()
- return _stdout, _stderr, p.returncode
-
-
-def run_cmd_ext(command):
- assert is_cmd_valid(command[0])
- p = Popen(command, stdout=PIPE, stderr=PIPE)
- _stdout, _stderr = p.communicate()
- return _stdout, _stderr, p.returncode
-
-
-def is_cmd_valid(cmd):
- caller = get_caller_name()
- abspath = find_cmd_abspath(cmd)
- if not abspath:
- error("%s: Command '%s' can't be found" % (caller, cmd))
- if not is_exec(abspath):
- error("%s: Command '%s' resolves to file '%s' which is not executable" % (caller, cmd, abspath))
- return True
-
-
-def is_exec(path):
- return os.access(path, os.X_OK) or os.access(path+'.exe', os.X_OK)
-
-
-def find_cmd_abspath(cmd):
- """ Returns the absolute path to a command.
- None is returned if no absolute path was found.
- """
- if exists(cmd) or exists(cmd + '.exe'):
- return os.path.abspath(cmd)
- if not 'PATH' in os.environ:
- raise Exception("Can't find command path for current platform ('%s')" % sys.platform)
- PATH=os.environ['PATH']
- for path in PATH.split(os.pathsep):
- abspath = '%s/%s' % (path, cmd)
- if exists(abspath) or exists(abspath + '.exe'):
- return abspath
-
-
-def mkdir(path):
- if not exists(path):
- makedirs(path)
-
-
-def copy_file(src, dst):
- """ Implement the behaviour of "shutil.copy(src, dst)" without copying the
- permissions (this was causing errors with directories mounted with samba)
- """
- if isdir(dst):
- _, file = split(src)
- dst = join(dst, file)
- copyfile(src, dst)
-
-
-def delete_dir_files(dir):
- if not exists(dir):
- return
-
- for f in listdir(dir):
- file = join(dir, f)
- if not isdir(file):
- remove(file)
-
-
-def get_caller_name(steps=2):
- """
- When called inside a function, it returns the name
- of the caller of that function.
- """
- return inspect.stack()[steps][3]
-
-
-def error(msg):
- print("ERROR: %s" % msg)
- sys.exit(1)
-
-
-def rel_path(path, base, dot=False):
- p = relpath(path, base)
- if dot and not p.startswith('.'):
- p = './' + p
- return p
-
-
-class ToolException(Exception):
- pass
-
-class NotSupportedException(Exception):
- pass
-
-def split_path(path):
- base, file = split(path)
- name, ext = splitext(file)
- return base, name, ext
-
-
-def args_error(parser, message):
- print "\n\n%s\n\n" % message
- parser.print_help()
- sys.exit()
-
-
-def construct_enum(**enums):
- """ Create your own pseudo-enums """
- return type('Enum', (), enums)
-
-
-def check_required_modules(required_modules, verbose=True):
- """ Function checks for Python modules which should be "importable" (installed)
- before test suite can be used.
- @return returns True if all modules are installed already
- """
- import imp
- not_installed_modules = []
- for module_name in required_modules:
- try:
- imp.find_module(module_name)
- except ImportError as e:
- # We also test against a rare case: module is an egg file
- try:
- __import__(module_name)
- except ImportError as e:
- not_installed_modules.append(module_name)
- if verbose:
- print "Error: %s" % e
-
- if verbose:
- if not_installed_modules:
- print "Warning: Module(s) %s not installed. Please install required module(s) before using this script."% (', '.join(not_installed_modules))
-
- if not_installed_modules:
- return False
- else:
- return True