diff --git a/.gitignore b/.gitignore index 77539e148e8..7ba092e071e 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ dist MANIFEST # Private settings -private_settings.py +mbed_settings.py # Default Build Directory .build/ diff --git a/.travis.yml b/.travis.yml index 5b21d775468..b1880c0babe 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ python: - "2.7" -script: "python workspace_tools/build_travis.py" +script: "python tools/build_travis.py" before_install: - sudo add-apt-repository -y ppa:terry.guo/gcc-arm-embedded - sudo apt-get update -qq diff --git a/MANIFEST.in b/MANIFEST.in index 17993153c5e..346f3498019 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ -graft workspace_tools -recursive-exclude workspace_tools *.pyc +graft tools +recursive-exclude tools *.pyc include LICENSE diff --git a/README.md b/README.md index 1bca69e4015..147cdcdc8ac 100644 --- a/README.md +++ b/README.md @@ -157,7 +157,7 @@ Develop ``` > "venv/Scripts/activate" > pip install -r requirements.txt -> cd workspace_tools +> cd tools > ... do things ... > "venv/Scripts/deactivate" ``` diff --git a/docs/BUILDING.md b/docs/BUILDING.md index f84e3f6460a..8f02c1a0cfb 100644 --- a/docs/BUILDING.md +++ b/docs/BUILDING.md @@ -130,11 +130,11 @@ Checking out files: 100% (3994/3994), done. ``` $ cd mbed $ ls -LICENSE MANIFEST.in README.md libraries setup.py travis workspace_tools +LICENSE MANIFEST.in README.md libraries setup.py travis tools ``` Directory structure we are interested in: ``` - mbed/workspace_tools/ - test suite scripts, build scripts etc. + mbed/tools/ - test suite scripts, build scripts etc. mbed/libraries/tests/ - mbed SDK tests, mbed/libraries/tests/mbed/ - tests for mbed SDK and peripherals tests, mbed/libraries/tests/net/echo/ - tests for Ethernet interface, @@ -153,9 +153,9 @@ Workspace tools are set of Python scripts used off-line by Mbed SDK team to: Before we can run our first test we need to configure our test environment a little! Now we need to tell workspace tools where our compilers are. -* Please to go ```mbed/workspace_tools/``` directory and create empty file called ```private_settings.py```. +* Please to go ```mbed``` directory and create empty file called ```mbed_settings.py```. ``` -$ touch private_settings.py +$ touch mbed_settings.py ``` * Populate this file the Python code below: ```python @@ -203,13 +203,13 @@ GCC_CR_PATH = "C:/Work/toolchains/LPCXpresso_6.1.4_194/lpcxpresso/tools/bin" IAR_PATH = "C:/Work/toolchains/iar_6_5/arm" ``` -Note: Settings in ```private_settings.py``` will overwrite variables with default values in ```mbed/workspace_tools/settings.py``` file. +Note: Settings in ```mbed_settings.py``` will overwrite variables with default values in ```mbed/default_settings.py``` file. ## Build Mbed SDK library from sources -Let's build mbed SDK library off-line from sources using your compiler. We've already cloned mbed SDK sources, we've also installed compilers and added their paths to ```private_settings.py```. +Let's build mbed SDK library off-line from sources using your compiler. We've already cloned mbed SDK sources, we've also installed compilers and added their paths to ```mbed_settings.py```. We now should be ready to use workspace tools script ```build.py``` to compile and build mbed SDK from sources. -We are still using console. You should be already in ```mbed/workspace_tools/``` directory if not go to ```mbed/workspace_tools/``` and type below command: +We are still using console. You should be already in ```mbed/tools/``` directory if not go to ```mbed/tools/``` and type below command: ``` $ python build.py -m LPC1768 -t ARM ``` @@ -276,7 +276,7 @@ Build successes: ### build.py script -Build script located in mbed/workspace_tools/ is our core script solution to drive compilation, linking and building process for: +Build script located in mbed/tools/ is our core script solution to drive compilation, linking and building process for: * mbed SDK (with libs like Ethernet, RTOS, USB, USB host). * Tests which also can be linked with libraries like RTOS or Ethernet. @@ -426,7 +426,7 @@ $ python build.py -t uARM -m NUCLEO_F334R8 --cppcheck ``` # make.py script -```make.py``` is a ```mbed/workspace_tools/``` script used to build tests (we call them sometimes 'programs') one by one manually. This script allows you to flash board, execute and test it. However, this script is deprecated and will not be described here. Instead please use ```singletest.py``` file to build mbed SDK, tests and run automation for test cases included in ```mbedmicro/mbed```. +```make.py``` is a ```mbed/tools/``` script used to build tests (we call them sometimes 'programs') one by one manually. This script allows you to flash board, execute and test it. However, this script is deprecated and will not be described here. Instead please use ```singletest.py``` file to build mbed SDK, tests and run automation for test cases included in ```mbedmicro/mbed```. Note: ```make.py``` script depends on existing already built mbed SDK and library sources so you need to pre-build mbed SDK and other libraries (such as RTOS library) to link 'program' (test) with mbed SDK and RTOS library. To pre-build mbed SDK please use ```build.py``` script. Just for sake of example please see few ways to use ```make.py``` together with Freedom K64F board. diff --git a/docs/COMMITTERS.md b/docs/COMMITTERS.md index 473c7492297..4e431e0c8fb 100644 --- a/docs/COMMITTERS.md +++ b/docs/COMMITTERS.md @@ -198,7 +198,7 @@ $ astyle.exe --style=kr --indent=spaces=4 --indent-switches $(FULL_CURRENT_PATH) ``` ## Python coding rules & coding guidelines -Some of our tools in workspace_tools are written in ```Python 2.7```. In case of developing tools for python we prefer to keep similar code styles across all Python source code. Please note that not all rules must be enforced. For example we do not limit you to 80 characters per line, just be sure your code can fit to widescreen display. +Some of our tools in tools are written in ```Python 2.7```. In case of developing tools for python we prefer to keep similar code styles across all Python source code. Please note that not all rules must be enforced. For example we do not limit you to 80 characters per line, just be sure your code can fit to widescreen display. Please stay compatible with ```Python 2.7``` but nothing stops you to write your code so in the future it will by Python 3 friendly. @@ -211,7 +211,7 @@ Some general guidelines: * Please document your code, write comments and ```doc``` sections for each function or class you implement. ### Static Code Analizers for Python -If you are old-school developer for sure you remember tools like lint. "lint was the name originally given to a particular program that flagged some suspicious and non-portable constructs (likely to be bugs) in C language source code." Now lint-like programs are used to check similar code issues for multiple languages, also for Python. Please do use them if you want to commit new code to workspace_tools and other mbed SDK Python tooling. +If you are old-school developer for sure you remember tools like lint. "lint was the name originally given to a particular program that flagged some suspicious and non-portable constructs (likely to be bugs) in C language source code." Now lint-like programs are used to check similar code issues for multiple languages, also for Python. Please do use them if you want to commit new code to tools and other mbed SDK Python tooling. Below is the list Python lint tools you may want to use: @@ -254,7 +254,7 @@ class HostRegistry: ``` ## Testing -Please refer to TESTING.md document for detais regarding mbed SDK test suite and build scripts included in ```mbed/workspace_tools/```. +Please refer to TESTING.md document for detais regarding mbed SDK test suite and build scripts included in ```mbed/tools/```. ## Before pull request checklist * Your pull request description section contains: diff --git a/docs/TESTING.md b/docs/TESTING.md index 76980b4e224..6c867351fd9 100644 --- a/docs/TESTING.md +++ b/docs/TESTING.md @@ -6,7 +6,7 @@ Test suit allows users to run locally on their machines Mbed SDK’s tests inclu Each test is supervised by python script called “host test” which will at least Test suite is using build script API to compile and build test source together with required by test libraries like CMSIS, Mbed, Ethernet, USB etc. ## What is host test? -Test suite supports test supervisor concept. This concept is realized by separate Python script called ```host test```. Host tests can be found in ```mbed/workspace_tools/host_tests/``` directory. Note: In newer mbed versions (mbed OS) host tests will be separate library. +Test suite supports test supervisor concept. This concept is realized by separate Python script called ```host test```. Host tests can be found in ```mbed/tools/host_tests/``` directory. Note: In newer mbed versions (mbed OS) host tests will be separate library. Host test script is executed in parallel with test runner to monitor test execution. Basic host test just monitors device's default serial port for test results returned by test runner. Simple tests will print test result on serial port. In other cases host tests can for example judge by test results returned by test runner if test passed or failed. It all depends on test itself. @@ -14,7 +14,7 @@ In some cases host test can be TCP server echoing packets from test runner and j ## Test suite core: singletest.py script -```singletest.py``` script located in ```mbed/workspace_tools/``` is a test suite script which allows users to compile, build tests and test runners (also supports CppUTest unit test library). Script also is responsible for test execution on devices selected by configuration files. +```singletest.py``` script located in ```mbed/tools/``` is a test suite script which allows users to compile, build tests and test runners (also supports CppUTest unit test library). Script also is responsible for test execution on devices selected by configuration files. ### Parameters of singletest.py @@ -37,7 +37,7 @@ After connecting boards to our host machine (PC) we can check which serial ports * ```NUCLEO_F103RB``` serial port is on ```COM11``` and disk drive is ```I:```. If you are working under Linux your port and disk could look like /dev/ttyACM5 and /media/usb5. -This information is needed to create ```muts_all.json``` configuration file. You can create it in ```mbed/workspace_tools/``` directory: +This information is needed to create ```muts_all.json``` configuration file. You can create it in ```mbed/tools/``` directory: ``` $ touch muts_all.json ``` @@ -67,8 +67,8 @@ Its name will be passed to ```singletest.py``` script after ```-M``` (MUTs speci Note: We will leave field ```peripherals``` empty for the sake of this example. We will explain it later. All you need to do now is to properly fill fields ```mcu```, ```port``` and ```disk```. -Note: Please make sure files muts_all.json and test_spec.json are in workspace_tools/ directory. We will assume in this example they are. -Where to find ```mcu``` names? You can use option ```-S``` of ```build.py``` script (in ```mbed/workspace_tools/``` directory) to check all supported off-line MCUs names. +Note: Please make sure files muts_all.json and test_spec.json are in tools/ directory. We will assume in this example they are. +Where to find ```mcu``` names? You can use option ```-S``` of ```build.py``` script (in ```mbed/tools/``` directory) to check all supported off-line MCUs names. Note: If you update mbed device firmware or even disconnect / reconnect mbed device you may find that serial port / disk configuration changed. You need to update configuration file accordingly or you will face connection problems and obviously tests will not run. @@ -172,9 +172,9 @@ For our example purposes let's assume we only have Keil ARM compiler, so let's c ``` #### Run your tests -After you configure all your MUTs and compilers you are ready to run tests. Make sure your devices are connected and your configuration files reflect your current configuration (serial ports, devices). Go to workspace_tools directory in your mbed location. +After you configure all your MUTs and compilers you are ready to run tests. Make sure your devices are connected and your configuration files reflect your current configuration (serial ports, devices). Go to tools directory in your mbed location. ``` -$ cd workspace_tools/ +$ cd tools/ ``` and execute test suite script. ``` @@ -244,7 +244,7 @@ In below example we would like to have all test binaries called ```firmware.bin` ``` $ python singletest.py -i test_spec.json -M muts_all.json --firmware-name firmware ``` -* Where to find test list? Tests are defined in file ```tests.py``` in ```mbed/workspace_tools/``` directory. ```singletest.py``` uses test metadata in ```tests.py``` to resolve libraries dependencies and build tests for proper platforms and peripherals. Option ```-R``` can be used to get test names and direct path and test configuration. +* Where to find test list? Tests are defined in file ```tests.py``` in ```mbed/tools/``` directory. ```singletest.py``` uses test metadata in ```tests.py``` to resolve libraries dependencies and build tests for proper platforms and peripherals. Option ```-R``` can be used to get test names and direct path and test configuration. ``` $ python singletest.py -R +-------------+-----------+---------------------------------------+--------------+-------------------+----------+--------------------------------------------------------+ @@ -344,7 +344,7 @@ test_spec.json: ``` Note: * Please make sure device is connected before we will start running tests. -* Please make sure files ```muts_all.json``` and ```test_spec.json``` are in ```mbed/workspace_tools/``` directory. +* Please make sure files ```muts_all.json``` and ```test_spec.json``` are in ```mbed/tools/``` directory. Now you can call test suite and execute tests: ``` $ python singletest.py -i test_spec.json -M muts_all.json @@ -451,7 +451,7 @@ We want to create directory structure similar to one below: └───mbed ├───libraries ├───travis - └───workspace_tools + └───tools ``` Please go to directory with your project. For example it could be c:\Projects\Project. @@ -492,7 +492,7 @@ $ git clone https://github.com/mbedmicro/mbed.git $ hg clone https://mbed.org/users/rgrover1/code/cpputest/ ``` -After above three steps you should have proper directory structure. All you need to do now is to configure your ```private_settings.py``` in ```mbed/workspace_tools/``` directory. Please refer to mbed SDK build script documentation for details. +After above three steps you should have proper directory structure. All you need to do now is to configure your ```mbed_settings.py``` in ```mbed``` directory. Please refer to mbed SDK build script documentation for details. ## CppUTest with mbed port To make sure you actualy have CppUTest library with mbed SDK port you can go to CppUTest ```armcc``` platform directory: @@ -577,7 +577,7 @@ utest ``` ## Define unit tests in mbed SDK test suite structure -All tests defined in test suite are described in ```mbed/workspace_tools/tests.py``` file. This file stores data structure ```TESTS``` which is a list of simple structures describing each test. Below you can find example of ```TESTS``` structure which is configuring one of the unit tests. +All tests defined in test suite are described in ```mbed/tools/tests.py``` file. This file stores data structure ```TESTS``` which is a list of simple structures describing each test. Below you can find example of ```TESTS``` structure which is configuring one of the unit tests. ``` . . diff --git a/docs/mbed_targets.md b/docs/mbed_targets.md index d7f1d052dce..c61cd98ffe6 100644 --- a/docs/mbed_targets.md +++ b/docs/mbed_targets.md @@ -1,6 +1,6 @@ # Adding and configuring mbed targets -mbed uses JSON as a description language for its build targets. The JSON description of mbed targets can be found in `workspace_tools/targets.json`. To better understand how a target is defined, we'll use this example (taken from `targets.json`): +mbed uses JSON as a description language for its build targets. The JSON description of mbed targets can be found in `tools/targets.json`. To better understand how a target is defined, we'll use this example (taken from `targets.json`): ``` "TEENSY3_1": { @@ -173,4 +173,4 @@ This property is used to pass additional data to the project generator (used to ``` The `target` property of `progen` specifies the target name that must be used for the exporter (if different than the mbed target name). -For each exporter, a template for exporting can also be specified. In this example, the template used for generating a uVision project file is in a file called `uvision_microlib.uvproj.tmpl`. It is assumed that all the templates are located in `workspace_tools/export`. +For each exporter, a template for exporting can also be specified. In this example, the template used for generating a uVision project file is in a file called `uvision_microlib.uvproj.tmpl`. It is assumed that all the templates are located in `tools/export`. diff --git a/setup.py b/setup.py index f2e8642210a..8dc00b79c0c 100644 --- a/setup.py +++ b/setup.py @@ -16,17 +16,17 @@ OWNER_NAMES = 'emilmont, bogdanm' OWNER_EMAILS = 'Emilio.Monti@arm.com, Bogdan.Marinescu@arm.com' -# If private_settings.py exists in workspace_tools, read it in a temporary file +# If mbed_settings.py exists in tools, read it in a temporary file # so it can be restored later -private_settings = join('workspace_tools', 'private_settings.py') +mbed_settings = join('mbed_settings.py') backup = None -if isfile(private_settings): +if isfile(mbed_settings): backup = TemporaryFile() - with open(private_settings, "rb") as f: + with open(mbed_settings, "rb") as f: copyfileobj(f, backup) -# Create the correct private_settings.py for the distribution -with open(private_settings, "wt") as f: +# Create the correct mbed_settings.py for the distribution +with open(mbed_settings, "wt") as f: f.write("from mbed_settings import *\n") setup(name='mbed-tools', @@ -42,8 +42,8 @@ license=LICENSE, install_requires=["PrettyTable>=0.7.2", "PySerial>=2.7", "IntelHex>=1.3", "colorama>=0.3.3", "Jinja2>=2.7.3", "project-generator>=0.8.11,<0.9.0", "junit-xml", "requests", "pyYAML"]) -# Restore previous private_settings if needed +# Restore previous mbed_settings if needed if backup: backup.seek(0) - with open(private_settings, "wb") as f: + with open(mbed_settings, "wb") as f: copyfileobj(backup, f) diff --git a/workspace_tools/.mbedignore b/tools/.mbedignore similarity index 100% rename from workspace_tools/.mbedignore rename to tools/.mbedignore diff --git a/workspace_tools/__init__.py b/tools/__init__.py similarity index 100% rename from workspace_tools/__init__.py rename to tools/__init__.py diff --git a/workspace_tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin b/tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin rename to tools/bootloaders/MTS_DRAGONFLY_F411RE/bootloader.bin diff --git a/workspace_tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin b/tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin rename to tools/bootloaders/MTS_MDOT_F411RE/bootloader.bin diff --git a/workspace_tools/build.py b/tools/build.py old mode 100755 new mode 100644 similarity index 82% rename from workspace_tools/build.py rename to tools/build.py index f61bbfc9e72..6fb01df5321 --- a/workspace_tools/build.py +++ b/tools/build.py @@ -27,14 +27,14 @@ sys.path.insert(0, ROOT) -from workspace_tools.toolchains import TOOLCHAINS -from workspace_tools.targets import TARGET_NAMES, TARGET_MAP -from workspace_tools.options import get_default_options_parser -from workspace_tools.build_api import build_mbed_libs, build_lib -from workspace_tools.build_api import mcu_toolchain_matrix -from workspace_tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library -from workspace_tools.build_api import print_build_results -from workspace_tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT +from tools.toolchains import TOOLCHAINS +from tools.targets import TARGET_NAMES, TARGET_MAP +from tools.options import get_default_options_parser +from tools.build_api import build_library, build_mbed_libs, build_lib +from tools.build_api import mcu_toolchain_matrix +from tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library +from tools.build_api import print_build_results +from tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT if __name__ == '__main__': start = time() @@ -42,6 +42,15 @@ # Parse Options parser = get_default_options_parser() + parser.add_option("--source", dest="source_dir", + default=None, help="The source (input) directory", action="append") + + parser.add_option("--build", dest="build_dir", + default=None, help="The build (output) directory") + + parser.add_option("--no-archive", dest="no_archive", action="store_true", + default=False, help="Do not produce archive (.ar) file, but rather .o") + # Extra libraries parser.add_option("-r", "--rtos", action="store_true", @@ -119,7 +128,7 @@ help='For some commands you can use filter to filter out results') parser.add_option("-j", "--jobs", type="int", dest="jobs", - default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs") + default=0, help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)") parser.add_option("-v", "--verbose", action="store_true", @@ -183,7 +192,7 @@ if options.usb_host: libraries.append("usb_host") if options.dsp: - libraries.extend(["cmsis_dsp", "dsp"]) + libraries.extend(["dsp"]) if options.fat: libraries.extend(["fat"]) if options.ublox: @@ -224,7 +233,18 @@ tt_id = "%s::%s" % (toolchain, target) try: mcu = TARGET_MAP[target] - lib_build_res = build_mbed_libs(mcu, toolchain, + if options.source_dir: + lib_build_res = build_library(options.source_dir, options.build_dir, mcu, toolchain, + options=options.options, + extra_verbose=options.extra_verbose_notify, + verbose=options.verbose, + silent=options.silent, + jobs=options.jobs, + clean=options.clean, + archive=(not options.no_archive), + macros=options.macros) + else: + lib_build_res = build_mbed_libs(mcu, toolchain, options=options.options, extra_verbose=options.extra_verbose_notify, verbose=options.verbose, @@ -232,6 +252,7 @@ jobs=options.jobs, clean=options.clean, macros=options.macros) + for lib_id in libraries: build_lib(lib_id, mcu, toolchain, options=options.options, diff --git a/workspace_tools/build_api.py b/tools/build_api.py similarity index 68% rename from workspace_tools/build_api.py rename to tools/build_api.py index 3dca0adbaec..e00f9c61d89 100644 --- a/workspace_tools/build_api.py +++ b/tools/build_api.py @@ -19,20 +19,22 @@ import tempfile import colorama - +from copy import copy from types import ListType from shutil import rmtree -from os.path import join, exists, basename +from os.path import join, exists, basename, abspath, normpath +from os import getcwd, walk from time import time +import fnmatch -from workspace_tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException -from workspace_tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON -from workspace_tools.targets import TARGET_NAMES, TARGET_MAP -from workspace_tools.libraries import Library -from workspace_tools.toolchains import TOOLCHAIN_CLASSES +from tools.utils import mkdir, run_cmd, run_cmd_ext, NotSupportedException, ToolException +from tools.paths import MBED_TARGETS_PATH, MBED_LIBRARIES, MBED_API, MBED_HAL, MBED_COMMON +from tools.targets import TARGET_NAMES, TARGET_MAP +from tools.libraries import Library +from tools.toolchains import TOOLCHAIN_CLASSES from jinja2 import FileSystemLoader from jinja2.environment import Environment - +from tools.config import Config def prep_report(report, target_name, toolchain_name, id_name): # Setup report keys @@ -75,37 +77,90 @@ def add_result_to_report(report, result): result_wrap = { 0: result } report[target][toolchain][id_name].append(result_wrap) +def get_config(src_path, target, toolchain_name): + # Convert src_path to a list if needed + src_paths = [src_path] if type(src_path) != ListType else src_path + # We need to remove all paths which are repeated to avoid + # multiple compilations and linking with the same objects + src_paths = [src_paths[0]] + list(set(src_paths[1:])) + + # Create configuration object + config = Config(target, src_paths) + + # If the 'target' argument is a string, convert it to a target instance + if isinstance(target, str): + try: + target = TARGET_MAP[target] + except KeyError: + raise KeyError("Target '%s' not found" % target) + + # Toolchain instance + try: + toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options=None, notify=None, macros=None, silent=True, extra_verbose=False) + except KeyError as e: + raise KeyError("Toolchain %s not supported" % toolchain_name) + + # Scan src_path for config files + resources = toolchain.scan_resources(src_paths[0]) + for path in src_paths[1:]: + resources.add(toolchain.scan_resources(path)) + + config.add_config_files(resources.json_files) + return config.get_config_data() + def build_project(src_path, build_path, target, toolchain_name, libraries_paths=None, options=None, linker_script=None, clean=False, notify=None, verbose=False, name=None, macros=None, inc_dirs=None, - jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, extra_verbose=False): + jobs=1, silent=False, report=None, properties=None, project_id=None, project_description=None, + extra_verbose=False, config=None): """ This function builds project. Project can be for example one test / UT """ - # Toolchain instance - toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose) - toolchain.VERBOSE = verbose - toolchain.jobs = jobs - toolchain.build_all = clean + + # Convert src_path to a list if needed src_paths = [src_path] if type(src_path) != ListType else src_path # We need to remove all paths which are repeated to avoid # multiple compilations and linking with the same objects src_paths = [src_paths[0]] + list(set(src_paths[1:])) - PROJECT_BASENAME = basename(src_paths[0]) + first_src_path = src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd() + abs_path = abspath(first_src_path) + project_name = basename(normpath(abs_path)) + + # If the configuration object was not yet created, create it now + config = config or Config(target, src_paths) + + # If the 'target' argument is a string, convert it to a target instance + if isinstance(target, str): + try: + target = TARGET_MAP[target] + except KeyError: + raise KeyError("Target '%s' not found" % target) + + # Toolchain instance + try: + toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, notify, macros, silent, extra_verbose=extra_verbose) + except KeyError as e: + raise KeyError("Toolchain %s not supported" % toolchain_name) + + toolchain.VERBOSE = verbose + toolchain.jobs = jobs + toolchain.build_all = clean if name is None: # We will use default project name based on project folder name - name = PROJECT_BASENAME - toolchain.info("Building project %s (%s, %s)" % (PROJECT_BASENAME.upper(), target.name, toolchain_name)) + name = project_name + toolchain.info("Building project %s (%s, %s)" % (project_name, target.name, toolchain_name)) else: # User used custom global project name to have the same name for the - toolchain.info("Building project %s to %s (%s, %s)" % (PROJECT_BASENAME.upper(), name, target.name, toolchain_name)) + toolchain.info("Building project %s to %s (%s, %s)" % (project_name, name, target.name, toolchain_name)) if report != None: start = time() - id_name = project_id.upper() - description = project_description + + # If project_id is specified, use that over the default name + id_name = project_id.upper() if project_id else name.upper() + description = project_description if project_description else name vendor_label = target.extra_labels[0] cur_result = None prep_report(report, target.name, toolchain_name, id_name) @@ -139,17 +194,22 @@ def build_project(src_path, build_path, target, toolchain_name, resources.inc_dirs.extend(inc_dirs) else: resources.inc_dirs.append(inc_dirs) + + # Update the configuration with any .json files found while scanning + config.add_config_files(resources.json_files) + # And add the configuration macros to the toolchain + toolchain.add_macros(config.get_config_data_macros()) + # Compile Sources for path in src_paths: src = toolchain.scan_resources(path) objects = toolchain.compile_sources(src, build_path, resources.inc_dirs) resources.objects.extend(objects) - # Link Program - res, needed_update = toolchain.link_program(resources, build_path, name) + res, _ = toolchain.link_program(resources, build_path, name) - if report != None and needed_update: + if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() @@ -170,6 +230,155 @@ def build_project(src_path, build_path, target, toolchain_name, cur_result["elapsed_time"] = end - start + toolchain_output = toolchain.get_output() + if toolchain_output: + cur_result["output"] += toolchain_output + + add_result_to_report(report, cur_result) + + # Let Exception propagate + raise e + +def build_library(src_paths, build_path, target, toolchain_name, + dependencies_paths=None, options=None, name=None, clean=False, archive=True, + notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None, + jobs=1, silent=False, report=None, properties=None, extra_verbose=False, + project_id=None): + """ src_path: the path of the source directory + build_path: the path of the build directory + target: ['LPC1768', 'LPC11U24', 'LPC2368'] + toolchain: ['ARM', 'uARM', 'GCC_ARM', 'GCC_CR'] + library_paths: List of paths to additional libraries + clean: Rebuild everything if True + notify: Notify function for logs + verbose: Write the actual tools command lines if True + inc_dirs: additional include directories which should be included in build + inc_dirs_ext: additional include directories which should be copied to library directory + """ + if type(src_paths) != ListType: + src_paths = [src_paths] + + # The first path will give the name to the library + project_name = basename(src_paths[0] if src_paths[0] != "." and src_paths[0] != "./" else getcwd()) + if name is None: + # We will use default project name based on project folder name + name = project_name + + if report != None: + start = time() + + # If project_id is specified, use that over the default name + id_name = project_id.upper() if project_id else name.upper() + description = name + vendor_label = target.extra_labels[0] + cur_result = None + prep_report(report, target.name, toolchain_name, id_name) + cur_result = create_result(target.name, toolchain_name, id_name, description) + + if properties != None: + prep_properties(properties, target.name, toolchain_name, vendor_label) + + for src_path in src_paths: + if not exists(src_path): + error_msg = "The library source folder does not exist: %s", src_path + + if report != None: + cur_result["output"] = error_msg + cur_result["result"] = "FAIL" + add_result_to_report(report, cur_result) + + raise Exception(error_msg) + + try: + # Toolchain instance + toolchain = TOOLCHAIN_CLASSES[toolchain_name](target, options, macros=macros, notify=notify, silent=silent, extra_verbose=extra_verbose) + toolchain.VERBOSE = verbose + toolchain.jobs = jobs + toolchain.build_all = clean + + toolchain.info("Building library %s (%s, %s)" % (name, target.name, toolchain_name)) + + # Scan Resources + resources = None + for path in src_paths: + # Scan resources + resource = toolchain.scan_resources(path) + + # Copy headers, objects and static libraries - all files needed for static lib + toolchain.copy_files(resource.headers, build_path, rel_path=resource.base_path) + toolchain.copy_files(resource.objects, build_path, rel_path=resource.base_path) + toolchain.copy_files(resource.libraries, build_path, rel_path=resource.base_path) + if resource.linker_script: + toolchain.copy_files(resource.linker_script, build_path, rel_path=resource.base_path) + + # Extend resources collection + if not resources: + resources = resource + else: + resources.add(resource) + + # We need to add if necessary additional include directories + if inc_dirs: + if type(inc_dirs) == ListType: + resources.inc_dirs.extend(inc_dirs) + else: + resources.inc_dirs.append(inc_dirs) + + # Add extra include directories / files which are required by library + # This files usually are not in the same directory as source files so + # previous scan will not include them + if inc_dirs_ext is not None: + for inc_ext in inc_dirs_ext: + resources.add(toolchain.scan_resources(inc_ext)) + + # Dependencies Include Paths + if dependencies_paths is not None: + for path in dependencies_paths: + lib_resources = toolchain.scan_resources(path) + resources.inc_dirs.extend(lib_resources.inc_dirs) + + if archive: + # Use temp path when building archive + tmp_path = join(build_path, '.temp') + mkdir(tmp_path) + else: + tmp_path = build_path + + # Handle configuration + config = Config(target) + # Update the configuration with any .json files found while scanning + config.add_config_files(resources.json_files) + # And add the configuration macros to the toolchain + toolchain.add_macros(config.get_config_data_macros()) + + # Compile Sources + for path in src_paths: + src = toolchain.scan_resources(path) + objects = toolchain.compile_sources(src, abspath(tmp_path), resources.inc_dirs) + resources.objects.extend(objects) + + if archive: + toolchain.build_library(objects, build_path, name) + + if report != None: + end = time() + cur_result["elapsed_time"] = end - start + cur_result["output"] = toolchain.get_output() + cur_result["result"] = "OK" + + add_result_to_report(report, cur_result) + + except Exception, e: + if report != None: + end = time() + + if isinstance(e, ToolException): + cur_result["result"] = "FAIL" + elif isinstance(e, NotSupportedException): + cur_result["result"] = "NOT_SUPPORTED" + + cur_result["elapsed_time"] = end - start + toolchain_output = toolchain.get_output() if toolchain_output: cur_result["output"] += toolchain_output @@ -181,11 +390,32 @@ def build_project(src_path, build_path, target, toolchain_name, # Let Exception propagate raise e +###################### +### Legacy methods ### +###################### -def build_library(src_paths, build_path, target, toolchain_name, - dependencies_paths=None, options=None, name=None, clean=False, - notify=None, verbose=False, macros=None, inc_dirs=None, inc_dirs_ext=None, - jobs=1, silent=False, report=None, properties=None, extra_verbose=False): +def build_lib(lib_id, target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): + """ Legacy method for building mbed libraries + Function builds library in proper directory using all dependencies and macros defined by user. + """ + lib = Library(lib_id) + if not lib.is_supported(target, toolchain_name): + print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain) + return False + + # We need to combine macros from parameter list with macros from library definition + MACROS = lib.macros if lib.macros else [] + if macros: + macros.extend(MACROS) + else: + macros = MACROS + + src_paths = lib.source_dir + build_path = lib.build_dir + dependencies_paths = lib.dependencies + inc_dirs = lib.inc_dirs + inc_dirs_ext = lib.inc_dirs_ext + """ src_path: the path of the source directory build_path: the path of the build directory target: ['LPC1768', 'LPC11U24', 'LPC2368'] @@ -300,34 +530,6 @@ def build_library(src_paths, build_path, target, toolchain_name, # Let Exception propagate raise e -def build_lib(lib_id, target, toolchain, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): - """ Wrapper for build_library function. - Function builds library in proper directory using all dependencies and macros defined by user. - """ - lib = Library(lib_id) - if lib.is_supported(target, toolchain): - # We need to combine macros from parameter list with macros from library definition - MACROS = lib.macros if lib.macros else [] - if macros: - MACROS.extend(macros) - - return build_library(lib.source_dir, lib.build_dir, target, toolchain, lib.dependencies, options, - verbose=verbose, - silent=silent, - clean=clean, - macros=MACROS, - notify=notify, - inc_dirs=lib.inc_dirs, - inc_dirs_ext=lib.inc_dirs_ext, - jobs=jobs, - report=report, - properties=properties, - extra_verbose=extra_verbose) - else: - print 'Library "%s" is not yet supported on target %s with toolchain %s' % (lib_id, target.name, toolchain) - return False - - # We do have unique legacy conventions about how we build and package the mbed library def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=False, macros=None, notify=None, jobs=1, silent=False, report=None, properties=None, extra_verbose=False): """ Function returns True is library was built and false if building was skipped """ @@ -417,12 +619,12 @@ def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=F for o in separate_objects: objects.remove(o) - needed_update = toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed") + toolchain.build_library(objects, BUILD_TOOLCHAIN, "mbed") for o in separate_objects: toolchain.copy_files(o, BUILD_TOOLCHAIN) - if report != None and needed_update: + if report != None: end = time() cur_result["elapsed_time"] = end - start cur_result["output"] = toolchain.get_output() @@ -449,6 +651,7 @@ def build_mbed_libs(target, toolchain_name, options=None, verbose=False, clean=F # Let Exception propagate raise e + def get_unique_supported_toolchains(): """ Get list of all unique toolchains supported by targets """ unique_supported_toolchains = [] @@ -734,3 +937,63 @@ def write_build_report(build_report, template_filename, filename): with open(filename, 'w+') as f: f.write(template.render(failing_builds=build_report_failing, passing_builds=build_report_passing)) + + +def scan_for_source_paths(path, exclude_paths=None): + ignorepatterns = [] + paths = [] + + def is_ignored(file_path): + for pattern in ignorepatterns: + if fnmatch.fnmatch(file_path, pattern): + return True + return False + + + """ os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) + When topdown is True, the caller can modify the dirnames list in-place + (perhaps using del or slice assignment), and walk() will only recurse into + the subdirectories whose names remain in dirnames; this can be used to prune + the search, impose a specific order of visiting, or even to inform walk() + about directories the caller creates or renames before it resumes walk() + again. Modifying dirnames when topdown is False is ineffective, because in + bottom-up mode the directories in dirnames are generated before dirpath + itself is generated. + """ + for root, dirs, files in walk(path, followlinks=True): + # Remove ignored directories + # Check if folder contains .mbedignore + if ".mbedignore" in files : + with open (join(root,".mbedignore"), "r") as f: + lines=f.readlines() + lines = [l.strip() for l in lines] # Strip whitespaces + lines = [l for l in lines if l != ""] # Strip empty lines + lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines + # Append root path to glob patterns + # and append patterns to ignorepatterns + ignorepatterns.extend([join(root,line.strip()) for line in lines]) + + for d in copy(dirs): + dir_path = join(root, d) + + # Always ignore hidden directories + if d.startswith('.'): + dirs.remove(d) + + # Remove dirs that already match the ignorepatterns + # to avoid travelling into them and to prevent them + # on appearing in include path. + if is_ignored(join(dir_path,"")): + dirs.remove(d) + + if exclude_paths: + for exclude_path in exclude_paths: + rel_path = relpath(dir_path, exclude_path) + if not (rel_path.startswith('..')): + dirs.remove(d) + break + + # Add root to include paths + paths.append(root) + + return paths diff --git a/workspace_tools/build_release.py b/tools/build_release.py similarity index 96% rename from workspace_tools/build_release.py rename to tools/build_release.py index ef28edba54c..d9d3683a17e 100644 --- a/workspace_tools/build_release.py +++ b/tools/build_release.py @@ -25,14 +25,14 @@ ROOT = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT) -from workspace_tools.build_api import build_mbed_libs -from workspace_tools.build_api import write_build_report -from workspace_tools.targets import TARGET_MAP, TARGET_NAMES -from workspace_tools.test_exporters import ReportExporter, ResultExporterType -from workspace_tools.test_api import SingleTestRunner -from workspace_tools.test_api import singletest_in_cli_mode -from workspace_tools.paths import TEST_DIR -from workspace_tools.tests import TEST_MAP +from tools.build_api import build_mbed_libs +from tools.build_api import write_build_report +from tools.targets import TARGET_MAP, TARGET_NAMES +from tools.test_exporters import ReportExporter, ResultExporterType +from tools.test_api import SingleTestRunner +from tools.test_api import singletest_in_cli_mode +from tools.paths import TEST_DIR +from tools.tests import TEST_MAP OFFICIAL_MBED_LIBRARY_BUILD = ( ('LPC11U24', ('ARM', 'uARM', 'GCC_ARM', 'IAR')), diff --git a/workspace_tools/build_travis.py b/tools/build_travis.py similarity index 96% rename from workspace_tools/build_travis.py rename to tools/build_travis.py index 7189dba8399..07d55741115 100644 --- a/workspace_tools/build_travis.py +++ b/tools/build_travis.py @@ -139,7 +139,7 @@ def run_builds(dry_run): toolchain_list = build["toolchains"] if type(toolchain_list) != type([]): toolchain_list = [toolchain_list] for toolchain in toolchain_list: - cmdline = "python workspace_tools/build.py -m %s -t %s -j 4 -c --silent "% (build["target"], toolchain) + cmdline = "python tools/build.py -m %s -t %s -j 4 -c --silent "% (build["target"], toolchain) libs = build.get("libs", []) if libs: cmdline = cmdline + " ".join(["--" + l for l in libs]) @@ -163,14 +163,14 @@ def run_test_linking(dry_run): for test_lib in tests: test_names = tests[test_lib] test_lib_switch = "--" + test_lib if test_lib else "" - cmdline = "python workspace_tools/make.py -m %s -t %s -c --silent %s -n %s " % (link["target"], toolchain, test_lib_switch, ",".join(test_names)) + cmdline = "python tools/make.py -m %s -t %s -c --silent %s -n %s " % (link["target"], toolchain, test_lib_switch, ",".join(test_names)) print "Executing: " + cmdline if not dry_run: if os.system(cmdline) != 0: sys.exit(1) def run_test_testsuite(dry_run): - cmdline = "python workspace_tools/singletest.py --version" + cmdline = "python tools/singletest.py --version" print "Executing: " + cmdline if not dry_run: if os.system(cmdline) != 0: diff --git a/workspace_tools/buildbot/master.cfg b/tools/buildbot/master.cfg similarity index 94% rename from workspace_tools/buildbot/master.cfg rename to tools/buildbot/master.cfg index 0a8a6620ccc..54d3a324a29 100644 --- a/workspace_tools/buildbot/master.cfg +++ b/tools/buildbot/master.cfg @@ -286,24 +286,24 @@ from buildbot.config import BuilderConfig c['builders'] = [] -copy_private_settings = ShellCommand(name = "copy private_settings.py", - command = "cp ../private_settings.py workspace_tools/private_settings.py", +copy_mbed_settings = ShellCommand(name = "copy mbed_settings.py", + command = "cp ../mbed_settings.py mbed_settings.py", haltOnFailure = True, - description = "Copy private_settings.py") + description = "Copy mbed_settings.py") mbed_build_release = BuildFactory() mbed_build_release.addStep(git_clone) -mbed_build_release.addStep(copy_private_settings) +mbed_build_release.addStep(copy_mbed_settings) for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD: builder_name = "All_TC_%s" % target_name mbed_build = BuildFactory() mbed_build.addStep(git_clone) - mbed_build.addStep(copy_private_settings) + mbed_build.addStep(copy_mbed_settings) # Adding all chains for target for toolchain in toolchains: build_py = BuildCommand(name = "Build %s using %s" % (target_name, toolchain), - command = "python workspace_tools/build.py -m %s -t %s" % (target_name, toolchain), + command = "python tools/build.py -m %s -t %s" % (target_name, toolchain), haltOnFailure = True, warnOnWarnings = True, description = "Building %s using %s" % (target_name, toolchain), @@ -314,12 +314,12 @@ for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD: if target_name in OFFICIAL_MBED_TESTBED_SUPPORTED_HARDWARE: copy_example_test_spec_json = ShellCommand(name = "Copy example_test_spec.json", - command = "cp ../example_test_spec.json workspace_tools/data/example_test_spec.json", + command = "cp ../example_test_spec.json tools/data/example_test_spec.json", haltOnFailure = True, description = "Copy example_test_spec.json") autotest_py = ShellCommand(name = "Running autotest.py for %s" % (target_name), - command = "python workspace_tools/autotest.py workspace_tools/data/example_test_spec.json", + command = "python tools/autotest.py tools/data/example_test_spec.json", haltOnFailure = True, description = "Running autotest.py") @@ -337,12 +337,12 @@ for target_name, toolchains in OFFICIAL_MBED_LIBRARY_BUILD: factory=mbed_build)) # copy_example_test_spec_json = ShellCommand(name = "Copy example_test_spec.json", - # command = "cp ../example_test_spec.json workspace_tools/data/example_test_spec.json", + # command = "cp ../example_test_spec.json tools/data/example_test_spec.json", # haltOnFailure = True, # description = "Copy example_test_spec.json") singletest_py = TestCommand(name = "Running Target Tests", - command = "python workspace_tools/singletest.py -i workspace_tools/test_spec.json -M workspace_tools/muts_all.json", + command = "python tools/singletest.py -i tools/test_spec.json -M tools/muts_all.json", haltOnFailure = True, warnOnWarnings = True, description = "Running Target Tests", diff --git a/workspace_tools/ci_templates/library_build/build_report.html b/tools/ci_templates/library_build/build_report.html similarity index 100% rename from workspace_tools/ci_templates/library_build/build_report.html rename to tools/ci_templates/library_build/build_report.html diff --git a/workspace_tools/ci_templates/library_build/build_report_table.html b/tools/ci_templates/library_build/build_report_table.html similarity index 100% rename from workspace_tools/ci_templates/library_build/build_report_table.html rename to tools/ci_templates/library_build/build_report_table.html diff --git a/workspace_tools/ci_templates/library_build/report.html b/tools/ci_templates/library_build/report.html similarity index 100% rename from workspace_tools/ci_templates/library_build/report.html rename to tools/ci_templates/library_build/report.html diff --git a/workspace_tools/ci_templates/scripts.js b/tools/ci_templates/scripts.js similarity index 100% rename from workspace_tools/ci_templates/scripts.js rename to tools/ci_templates/scripts.js diff --git a/workspace_tools/ci_templates/tests_build/build_report.html b/tools/ci_templates/tests_build/build_report.html similarity index 100% rename from workspace_tools/ci_templates/tests_build/build_report.html rename to tools/ci_templates/tests_build/build_report.html diff --git a/workspace_tools/ci_templates/tests_build/build_report_table.html b/tools/ci_templates/tests_build/build_report_table.html similarity index 100% rename from workspace_tools/ci_templates/tests_build/build_report_table.html rename to tools/ci_templates/tests_build/build_report_table.html diff --git a/workspace_tools/ci_templates/tests_build/report.html b/tools/ci_templates/tests_build/report.html similarity index 100% rename from workspace_tools/ci_templates/tests_build/report.html rename to tools/ci_templates/tests_build/report.html diff --git a/workspace_tools/compliance/__init__.py b/tools/compliance/__init__.py similarity index 100% rename from workspace_tools/compliance/__init__.py rename to tools/compliance/__init__.py diff --git a/workspace_tools/compliance/ioper_base.py b/tools/compliance/ioper_base.py similarity index 100% rename from workspace_tools/compliance/ioper_base.py rename to tools/compliance/ioper_base.py diff --git a/workspace_tools/compliance/ioper_runner.py b/tools/compliance/ioper_runner.py similarity index 100% rename from workspace_tools/compliance/ioper_runner.py rename to tools/compliance/ioper_runner.py diff --git a/workspace_tools/compliance/ioper_test_fs.py b/tools/compliance/ioper_test_fs.py similarity index 100% rename from workspace_tools/compliance/ioper_test_fs.py rename to tools/compliance/ioper_test_fs.py diff --git a/workspace_tools/compliance/ioper_test_target_id.py b/tools/compliance/ioper_test_target_id.py similarity index 100% rename from workspace_tools/compliance/ioper_test_target_id.py rename to tools/compliance/ioper_test_target_id.py diff --git a/tools/config.py b/tools/config.py new file mode 100644 index 00000000000..a152e62cd18 --- /dev/null +++ b/tools/config.py @@ -0,0 +1,325 @@ +""" +mbed SDK +Copyright (c) 2016 ARM Limited + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +# Implementation of mbed configuration mechanism +from copy import deepcopy +from collections import OrderedDict +from tools.utils import json_file_to_dict, ToolException +from tools.targets import Target +import os + +# Base class for all configuration exceptions +class ConfigException(Exception): + pass + +# This class keeps information about a single configuration parameter +class ConfigParameter: + # name: the name of the configuration parameter + # data: the data associated with the configuration parameter + # unit_name: the unit (target/library/application) that defines this parameter + # unit_ kind: the kind of the unit ("target", "library" or "application") + def __init__(self, name, data, unit_name, unit_kind): + self.name = self.get_full_name(name, unit_name, unit_kind, allow_prefix = False) + self.defined_by = self.get_display_name(unit_name, unit_kind) + self.set_by = self.defined_by + self.help_text = data.get("help", None) + self.value = data.get("value", None) + self.required = data.get("required", False) + self.macro_name = data.get("macro_name", "MBED_CONF_%s" % self.sanitize(self.name.upper())) + + # Return the full (prefixed) name of a parameter. + # If the parameter already has a prefix, check if it is valid + # name: the simple (unqualified) name of the parameter + # unit_name: the unit (target/library/application) that defines this parameter + # unit_kind: the kind of the unit ("target", "library" or "application") + # label: the name of the label in the 'target_config_overrides' section (optional) + # allow_prefix: True to allo the original name to have a prefix, False otherwise + @staticmethod + def get_full_name(name, unit_name, unit_kind, label = None, allow_prefix = True): + if name.find('.') == -1: # the name is not prefixed + if unit_kind == "target": + prefix = "target." + elif unit_kind == "application": + prefix = "app." + else: + prefix = unit_name + '.' + return prefix + name + # The name has a prefix, so check if it is valid + if not allow_prefix: + raise ConfigException("Invalid parameter name '%s' in '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind, label))) + temp = name.split(".") + # Check if the parameter syntax is correct (must be unit_name.parameter_name) + if len(temp) != 2: + raise ConfigException("Invalid parameter name '%s' in '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind, label))) + prefix = temp[0] + # Check if the given parameter prefix matches the expected prefix + if (unit_kind == "library" and prefix != unit_name) or (unit_kind == "target" and prefix != "target"): + raise ConfigException("Invalid prefix '%s' for parameter name '%s' in '%s'" % (prefix, name, ConfigParameter.get_display_name(unit_name, unit_kind, label))) + return name + + # Return the name displayed for a unit when interogating the origin + # and the last set place of a parameter + # unit_name: the unit (target/library/application) that defines this parameter + # unit_kind: the kind of the unit ("target", "library" or "application") + # label: the name of the label in the 'target_config_overrides' section (optional) + @staticmethod + def get_display_name(unit_name, unit_kind, label = None): + if unit_kind == "target": + return "target:" + unit_name + elif unit_kind == "application": + return "application%s" % ("[%s]" % label if label else "") + else: # library + return "library:%s%s" % (unit_name, "[%s]" % label if label else "") + + # "Sanitize" a name so that it is a valid C macro name + # Currently it simply replaces '.' and '-' with '_' + # name: the un-sanitized name. + @staticmethod + def sanitize(name): + return name.replace('.', '_').replace('-', '_') + + # Sets a value for this parameter, remember the place where it was set + # value: the value of the parameter + # unit_name: the unit (target/library/application) that defines this parameter + # unit_ kind: the kind of the unit ("target", "library" or "application") + # label: the name of the label in the 'target_config_overrides' section (optional) + def set_value(self, value, unit_name, unit_kind, label = None): + self.value = value + self.set_by = self.get_display_name(unit_name, unit_kind, label) + + # Return the string representation of this configuration parameter + def __str__(self): + if self.value is not None: + return '%s = %s (macro name: "%s")' % (self.name, self.value, self.macro_name) + else: + return '%s has no value' % self.name + + # Return a verbose description of this configuration paramater as a string + def get_verbose_description(self): + desc = "Name: %s%s\n" % (self.name, " (required parameter)" if self.required else "") + if self.help_text: + desc += " Description: %s\n" % self.help_text + desc += " Defined by: %s\n" % self.defined_by + if not self.value: + return desc + " No value set" + desc += " Macro name: %s\n" % self.macro_name + desc += " Value: %s (set by %s)" % (self.value, self.set_by) + return desc + +# A representation of a configuration macro. It handles both macros without a value (MACRO) +# and with a value (MACRO=VALUE) +class ConfigMacro: + def __init__(self, name, unit_name, unit_kind): + self.name = name + self.defined_by = ConfigParameter.get_display_name(unit_name, unit_kind) + if name.find("=") != -1: + tmp = name.split("=") + if len(tmp) != 2: + raise ValueError("Invalid macro definition '%s' in '%s'" % (name, self.defined_by)) + self.macro_name = tmp[0] + else: + self.macro_name = name + +# 'Config' implements the mbed configuration mechanism +class Config: + # Libraries and applications have different names for their configuration files + __mbed_app_config_name = "mbed_app.json" + __mbed_lib_config_name = "mbed_lib.json" + + # Allowed keys in configuration dictionaries + # (targets can have any kind of keys, so this validation is not applicable to them) + __allowed_keys = { + "library": set(["name", "config", "target_overrides", "macros", "__config_path"]), + "application": set(["config", "custom_targets", "target_overrides", "macros", "__config_path"]) + } + + # The initialization arguments for Config are: + # target: the name of the mbed target used for this configuration instance + # top_level_dirs: a list of top level source directories (where mbed_abb_config.json could be found) + # __init__ will look for the application configuration file in top_level_dirs. + # If found once, it'll parse it and check if it has a custom_targets function. + # If it does, it'll update the list of targets if need. + # If found more than once, an exception is raised + # top_level_dirs can be None (in this case, mbed_app_config.json will not be searched) + def __init__(self, target, top_level_dirs = []): + app_config_location = None + for s in (top_level_dirs or []): + full_path = os.path.join(s, self.__mbed_app_config_name) + if os.path.isfile(full_path): + if app_config_location is not None: + raise ConfigException("Duplicate '%s' file in '%s' and '%s'" % (self.__mbed_app_config_name, app_config_location, full_path)) + else: + app_config_location = full_path + self.app_config_data = json_file_to_dict(app_config_location) if app_config_location else {} + # Check the keys in the application configuration data + unknown_keys = set(self.app_config_data.keys()) - self.__allowed_keys["application"] + if unknown_keys: + raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), self.__mbed_app_config_name)) + # Update the list of targets with the ones defined in the application config, if applicable + Target.add_py_targets(self.app_config_data.get("custom_targets", {})) + self.lib_config_data = {} + # Make sure that each config is processed only once + self.processed_configs = {} + self.target = target if isinstance(target, str) else target.name + self.target_labels = Target.get_target(self.target).get_labels() + + # Add one or more configuration files + def add_config_files(self, flist): + for f in flist: + if not f.endswith(self.__mbed_lib_config_name): + continue + full_path = os.path.normpath(os.path.abspath(f)) + # Check that we didn't already process this file + if self.processed_configs.has_key(full_path): + continue + self.processed_configs[full_path] = True + # Read the library configuration and add a "__full_config_path" attribute to it + cfg = json_file_to_dict(f) + cfg["__config_path"] = full_path + # If there's already a configuration for a module with the same name, exit with error + if self.lib_config_data.has_key(cfg["name"]): + raise ConfigException("Library name '%s' is not unique (defined in '%s' and '%s')" % (cfg["name"], full_path, self.lib_config_data[cfg["name"]]["__config_path"])) + self.lib_config_data[cfg["name"]] = cfg + + # Helper function: process a "config_parameters" section in either a target, a library or the application + # data: a dictionary with the configuration parameters + # params: storage for the discovered configuration parameters + # unit_name: the unit (target/library/application) that defines this parameter + # unit_kind: the kind of the unit ("target", "library" or "application") + def _process_config_parameters(self, data, params, unit_name, unit_kind): + for name, v in data.items(): + full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind) + # If the parameter was already defined, raise an error + if full_name in params: + raise ConfigException("Parameter name '%s' defined in both '%s' and '%s'" % (name, ConfigParameter.get_display_name(unit_name, unit_kind), params[full_name].defined_by)) + # Otherwise add it to the list of known parameters + # If "v" is not a dictionary, this is a shortcut definition, otherwise it is a full definition + params[full_name] = ConfigParameter(name, v if isinstance(v, dict) else {"value": v}, unit_name, unit_kind) + return params + + # Helper function: process "config_parameters" and "target_config_overrides" in a given dictionary + # data: the configuration data of the library/appliation + # params: storage for the discovered configuration parameters + # unit_name: the unit (library/application) that defines this parameter + # unit_kind: the kind of the unit ("library" or "application") + def _process_config_and_overrides(self, data, params, unit_name, unit_kind): + self._process_config_parameters(data.get("config", {}), params, unit_name, unit_kind) + for label, overrides in data.get("target_overrides", {}).items(): + # If the label is defined by the target or it has the special value "*", process the overrides + if (label == '*') or (label in self.target_labels): + for name, v in overrides.items(): + # Get the full name of the parameter + full_name = ConfigParameter.get_full_name(name, unit_name, unit_kind, label) + # If an attempt is made to override a parameter that isn't defined, raise an error + if not full_name in params: + raise ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (full_name, ConfigParameter.get_display_name(unit_name, unit_kind, label))) + params[full_name].set_value(v, unit_name, unit_kind, label) + return params + + # Read and interpret configuration data defined by targets + def get_target_config_data(self): + # We consider the resolution order for our target and sort it by level reversed, + # so that we first look at the top level target (the parent), then its direct children, + # then the children's children and so on, until we reach self.target + # TODO: this might not work so well in some multiple inheritance scenarios + # At each step, look at two keys of the target data: + # - config_parameters: used to define new configuration parameters + # - config_overrides: used to override already defined configuration parameters + params, json_data = {}, Target.get_json_target_data() + resolution_order = [e[0] for e in sorted(Target.get_target(self.target).resolution_order, key = lambda e: e[1], reverse = True)] + for tname in resolution_order: + # Read the target data directly from its description + t = json_data[tname] + # Process definitions first + self._process_config_parameters(t.get("config", {}), params, tname, "target") + # Then process overrides + for name, v in t.get("overrides", {}).items(): + full_name = ConfigParameter.get_full_name(name, tname, "target") + # If the parameter name is not defined or if there isn't a path from this target to the target where the + # parameter was defined in the target inheritance tree, raise an error + # We need to use 'defined_by[7:]' to remove the "target:" prefix from defined_by + if (not full_name in params) or (not params[full_name].defined_by[7:] in Target.get_target(tname).resolution_order_names): + raise ConfigException("Attempt to override undefined parameter '%s' in '%s'" % (name, ConfigParameter.get_display_name(tname, "target"))) + # Otherwise update the value of the parameter + params[full_name].set_value(v, tname, "target") + return params + + # Helper function: process a macro definition, checking for incompatible duplicate definitions + # mlist: list of macro names to process + # macros: dictionary with currently discovered macros + # unit_name: the unit (library/application) that defines this macro + # unit_kind: the kind of the unit ("library" or "application") + def _process_macros(self, mlist, macros, unit_name, unit_kind): + for mname in mlist: + m = ConfigMacro(mname, unit_name, unit_kind) + if (m.macro_name in macros) and (macros[m.macro_name].name != mname): + # Found an incompatible definition of the macro in another module, so raise an error + full_unit_name = ConfigParameter.get_display_name(unit_name, unit_kind) + raise ConfigException("Macro '%s' defined in both '%s' and '%s' with incompatible values" % (m.macro_name, macros[m.macro_name].defined_by, full_unit_name)) + macros[m.macro_name] = m + + # Read and interpret configuration data defined by libs + # It is assumed that "add_config_files" above was already called and the library configuration data + # exists in self.lib_config_data + def get_lib_config_data(self): + all_params, macros = {}, {} + for lib_name, lib_data in self.lib_config_data.items(): + unknown_keys = set(lib_data.keys()) - self.__allowed_keys["library"] + if unknown_keys: + raise ConfigException("Unknown key(s) '%s' in %s" % (",".join(unknown_keys), lib_name)) + all_params.update(self._process_config_and_overrides(lib_data, {}, lib_name, "library")) + self._process_macros(lib_data.get("macros", []), macros, lib_name, "library") + return all_params, macros + + # Read and interpret the configuration data defined by the target + # The target can override any configuration parameter, as well as define its own configuration data + # params: the dictionary with configuration parameters found so far (in the target and in libraries) + # macros: the list of macros defined in the configuration + def get_app_config_data(self, params, macros): + app_cfg = self.app_config_data + # The application can have a "config_parameters" and a "target_config_overrides" section just like a library + self._process_config_and_overrides(app_cfg, params, "app", "application") + # The application can also defined macros + self._process_macros(app_cfg.get("macros", []), macros, "app", "application") + + # Return the configuration data in two parts: + # - params: a dictionary with (name, ConfigParam) entries + # - macros: the list of macros defined with "macros" in libraries and in the application + def get_config_data(self): + all_params = self.get_target_config_data() + lib_params, macros = self.get_lib_config_data() + all_params.update(lib_params) + self.get_app_config_data(all_params, macros) + return all_params, [m.name for m in macros.values()] + + # Helper: verify if there are any required parameters without a value in 'params' + def _check_required_parameters(self, params): + for p in params.values(): + if p.required and (p.value is None): + raise ConfigException("Required parameter '%s' defined by '%s' doesn't have a value" % (p.name, p.defined_by)) + + # Return the macro definitions generated for a dictionary of configuration parameters + # params: a dictionary of (name, ConfigParameters instance) mappings + @staticmethod + def parameters_to_macros(params): + return ['%s=%s' % (m.macro_name, m.value) for m in params.values() if m.value is not None] + + # Return the configuration data converted to a list of C macros + def get_config_data_macros(self): + params, macros = self.get_config_data() + self._check_required_parameters(params) + return macros + self.parameters_to_macros(params) diff --git a/workspace_tools/data/__init__.py b/tools/data/__init__.py similarity index 100% rename from workspace_tools/data/__init__.py rename to tools/data/__init__.py diff --git a/workspace_tools/data/rpc/RPCClasses.h b/tools/data/rpc/RPCClasses.h similarity index 100% rename from workspace_tools/data/rpc/RPCClasses.h rename to tools/data/rpc/RPCClasses.h diff --git a/workspace_tools/data/rpc/class.cpp b/tools/data/rpc/class.cpp similarity index 100% rename from workspace_tools/data/rpc/class.cpp rename to tools/data/rpc/class.cpp diff --git a/workspace_tools/data/support.py b/tools/data/support.py similarity index 95% rename from workspace_tools/data/support.py rename to tools/data/support.py index b47380f4d91..7af37746f75 100644 --- a/workspace_tools/data/support.py +++ b/tools/data/support.py @@ -14,7 +14,7 @@ See the License for the specific language governing permissions and limitations under the License. """ -from workspace_tools.targets import TARGETS +from tools.targets import TARGETS DEFAULT_SUPPORT = {} CORTEX_ARM_SUPPORT = {} diff --git a/workspace_tools/dev/__init__.py b/tools/dev/__init__.py similarity index 100% rename from workspace_tools/dev/__init__.py rename to tools/dev/__init__.py diff --git a/workspace_tools/dev/dsp_fir.py b/tools/dev/dsp_fir.py similarity index 100% rename from workspace_tools/dev/dsp_fir.py rename to tools/dev/dsp_fir.py diff --git a/workspace_tools/dev/intel_hex_utils.py b/tools/dev/intel_hex_utils.py similarity index 100% rename from workspace_tools/dev/intel_hex_utils.py rename to tools/dev/intel_hex_utils.py diff --git a/workspace_tools/dev/rpc_classes.py b/tools/dev/rpc_classes.py similarity index 98% rename from workspace_tools/dev/rpc_classes.py rename to tools/dev/rpc_classes.py index f082f3b9da6..46fd902b2fb 100644 --- a/workspace_tools/dev/rpc_classes.py +++ b/tools/dev/rpc_classes.py @@ -17,7 +17,7 @@ from os.path import join from jinja2 import Template -from workspace_tools.paths import TOOLS_DATA, MBED_RPC +from tools.paths import TOOLS_DATA, MBED_RPC RPC_TEMPLATES_PATH = join(TOOLS_DATA, "rpc") diff --git a/workspace_tools/dev/syms.py b/tools/dev/syms.py similarity index 100% rename from workspace_tools/dev/syms.py rename to tools/dev/syms.py diff --git a/workspace_tools/export/.hgignore b/tools/export/.hgignore old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/export/.hgignore rename to tools/export/.hgignore diff --git a/workspace_tools/export/README.md b/tools/export/README.md similarity index 100% rename from workspace_tools/export/README.md rename to tools/export/README.md diff --git a/workspace_tools/export/__init__.py b/tools/export/__init__.py old mode 100755 new mode 100644 similarity index 90% rename from workspace_tools/export/__init__.py rename to tools/export/__init__.py index 1b4cd197ba3..dcb28d48031 --- a/workspace_tools/export/__init__.py +++ b/tools/export/__init__.py @@ -19,10 +19,10 @@ from shutil import copytree, rmtree, copy import yaml -from workspace_tools.utils import mkdir -from workspace_tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio -from workspace_tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException -from workspace_tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP +from tools.utils import mkdir +from tools.export import uvision4, uvision5, codered, gccarm, ds5_5, iar, emblocks, coide, kds, zip, simplicityv3, atmelstudio, sw4stm32, e2studio +from tools.export.exporters import zip_working_directory_and_clean_up, OldLibrariesException +from tools.targets import TARGET_NAMES, EXPORT_MAP, TARGET_MAP from project_generator_definitions.definitions import ProGenDef @@ -57,7 +57,7 @@ def online_build_url_resolver(url): def export(project_path, project_name, ide, target, destination='/tmp/', - tempdir=None, clean=True, extra_symbols=None, build_url_resolver=online_build_url_resolver): + tempdir=None, clean=True, extra_symbols=None, zip=True, relative=False, build_url_resolver=online_build_url_resolver): # Convention: we are using capitals for toolchain and target names if target is not None: target = target.upper() @@ -74,7 +74,7 @@ def export(project_path, project_name, ide, target, destination='/tmp/', try: ide = "zip" exporter = zip.ZIP(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols) - exporter.scan_and_copy_resources(project_path, tempdir) + exporter.scan_and_copy_resources(project_path, tempdir, relative) exporter.generate() report['success'] = True except OldLibrariesException, e: @@ -101,7 +101,7 @@ def export(project_path, project_name, ide, target, destination='/tmp/', # target checked, export try: exporter = Exporter(target, tempdir, project_name, build_url_resolver, extra_symbols=extra_symbols) - exporter.scan_and_copy_resources(project_path, tempdir) + exporter.scan_and_copy_resources(project_path, tempdir, relative) exporter.generate() report['success'] = True except OldLibrariesException, e: @@ -133,8 +133,12 @@ def export(project_path, project_name, ide, target, destination='/tmp/', # add readme file to every offline export. open(os.path.join(tempdir, 'GettingStarted.htm'),'w').write(''% (ide)) # copy .hgignore file to exported direcotry as well. - copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'),tempdir) - zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean) + if exists(os.path.join(exporter.TEMPLATE_DIR,'.hgignore')): + copy(os.path.join(exporter.TEMPLATE_DIR,'.hgignore'), tempdir) + if zip: + zip_path = zip_working_directory_and_clean_up(tempdir, destination, project_name, clean) + else: + zip_path = destination return zip_path, report diff --git a/workspace_tools/export/atmelstudio.py b/tools/export/atmelstudio.py similarity index 100% rename from workspace_tools/export/atmelstudio.py rename to tools/export/atmelstudio.py diff --git a/workspace_tools/export/atmelstudio6_2.atsln.tmpl b/tools/export/atmelstudio6_2.atsln.tmpl similarity index 100% rename from workspace_tools/export/atmelstudio6_2.atsln.tmpl rename to tools/export/atmelstudio6_2.atsln.tmpl diff --git a/workspace_tools/export/atmelstudio6_2.cppproj.tmpl b/tools/export/atmelstudio6_2.cppproj.tmpl similarity index 100% rename from workspace_tools/export/atmelstudio6_2.cppproj.tmpl rename to tools/export/atmelstudio6_2.cppproj.tmpl diff --git a/workspace_tools/export/codered.py b/tools/export/codered.py old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/export/codered.py rename to tools/export/codered.py diff --git a/workspace_tools/export/codered_arch_pro_cproject.tmpl b/tools/export/codered_arch_pro_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_arch_pro_cproject.tmpl rename to tools/export/codered_arch_pro_cproject.tmpl diff --git a/workspace_tools/export/codered_arch_pro_project.tmpl b/tools/export/codered_arch_pro_project.tmpl similarity index 100% rename from workspace_tools/export/codered_arch_pro_project.tmpl rename to tools/export/codered_arch_pro_project.tmpl diff --git a/workspace_tools/export/codered_cproject_common.tmpl b/tools/export/codered_cproject_common.tmpl similarity index 100% rename from workspace_tools/export/codered_cproject_common.tmpl rename to tools/export/codered_cproject_common.tmpl diff --git a/workspace_tools/export/codered_cproject_cortexm0_common.tmpl b/tools/export/codered_cproject_cortexm0_common.tmpl similarity index 100% rename from workspace_tools/export/codered_cproject_cortexm0_common.tmpl rename to tools/export/codered_cproject_cortexm0_common.tmpl diff --git a/workspace_tools/export/codered_cproject_cortexm3_common.tmpl b/tools/export/codered_cproject_cortexm3_common.tmpl similarity index 100% rename from workspace_tools/export/codered_cproject_cortexm3_common.tmpl rename to tools/export/codered_cproject_cortexm3_common.tmpl diff --git a/workspace_tools/export/codered_lpc1114_cproject.tmpl b/tools/export/codered_lpc1114_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc1114_cproject.tmpl rename to tools/export/codered_lpc1114_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc1114_project.tmpl b/tools/export/codered_lpc1114_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc1114_project.tmpl rename to tools/export/codered_lpc1114_project.tmpl diff --git a/workspace_tools/export/codered_lpc11u35_401_cproject.tmpl b/tools/export/codered_lpc11u35_401_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u35_401_cproject.tmpl rename to tools/export/codered_lpc11u35_401_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc11u35_401_project.tmpl b/tools/export/codered_lpc11u35_401_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u35_401_project.tmpl rename to tools/export/codered_lpc11u35_401_project.tmpl diff --git a/workspace_tools/export/codered_lpc11u35_501_cproject.tmpl b/tools/export/codered_lpc11u35_501_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u35_501_cproject.tmpl rename to tools/export/codered_lpc11u35_501_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc11u35_501_project.tmpl b/tools/export/codered_lpc11u35_501_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u35_501_project.tmpl rename to tools/export/codered_lpc11u35_501_project.tmpl diff --git a/workspace_tools/export/codered_lpc11u37h_401_cproject.tmpl b/tools/export/codered_lpc11u37h_401_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u37h_401_cproject.tmpl rename to tools/export/codered_lpc11u37h_401_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc11u37h_401_project.tmpl b/tools/export/codered_lpc11u37h_401_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u37h_401_project.tmpl rename to tools/export/codered_lpc11u37h_401_project.tmpl diff --git a/workspace_tools/export/codered_lpc11u68_cproject.tmpl b/tools/export/codered_lpc11u68_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u68_cproject.tmpl rename to tools/export/codered_lpc11u68_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc11u68_project.tmpl b/tools/export/codered_lpc11u68_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc11u68_project.tmpl rename to tools/export/codered_lpc11u68_project.tmpl diff --git a/workspace_tools/export/codered_lpc1549_cproject.tmpl b/tools/export/codered_lpc1549_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc1549_cproject.tmpl rename to tools/export/codered_lpc1549_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc1549_project.tmpl b/tools/export/codered_lpc1549_project.tmpl old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/export/codered_lpc1549_project.tmpl rename to tools/export/codered_lpc1549_project.tmpl diff --git a/workspace_tools/export/codered_lpc1768_cproject.tmpl b/tools/export/codered_lpc1768_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc1768_cproject.tmpl rename to tools/export/codered_lpc1768_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc1768_project.tmpl b/tools/export/codered_lpc1768_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc1768_project.tmpl rename to tools/export/codered_lpc1768_project.tmpl diff --git a/workspace_tools/export/codered_lpc4088_cproject.tmpl b/tools/export/codered_lpc4088_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc4088_cproject.tmpl rename to tools/export/codered_lpc4088_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc4088_dm_cproject.tmpl b/tools/export/codered_lpc4088_dm_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc4088_dm_cproject.tmpl rename to tools/export/codered_lpc4088_dm_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc4088_dm_project.tmpl b/tools/export/codered_lpc4088_dm_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc4088_dm_project.tmpl rename to tools/export/codered_lpc4088_dm_project.tmpl diff --git a/workspace_tools/export/codered_lpc4088_project.tmpl b/tools/export/codered_lpc4088_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc4088_project.tmpl rename to tools/export/codered_lpc4088_project.tmpl diff --git a/workspace_tools/export/codered_lpc4330_m4_cproject.tmpl b/tools/export/codered_lpc4330_m4_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc4330_m4_cproject.tmpl rename to tools/export/codered_lpc4330_m4_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc4330_m4_project.tmpl b/tools/export/codered_lpc4330_m4_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc4330_m4_project.tmpl rename to tools/export/codered_lpc4330_m4_project.tmpl diff --git a/workspace_tools/export/codered_lpc824_cproject.tmpl b/tools/export/codered_lpc824_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc824_cproject.tmpl rename to tools/export/codered_lpc824_cproject.tmpl diff --git a/workspace_tools/export/codered_lpc824_project.tmpl b/tools/export/codered_lpc824_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpc824_project.tmpl rename to tools/export/codered_lpc824_project.tmpl diff --git a/workspace_tools/export/codered_lpccappuccino_cproject.tmpl b/tools/export/codered_lpccappuccino_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_lpccappuccino_cproject.tmpl rename to tools/export/codered_lpccappuccino_cproject.tmpl diff --git a/workspace_tools/export/codered_lpccappuccino_project.tmpl b/tools/export/codered_lpccappuccino_project.tmpl similarity index 100% rename from workspace_tools/export/codered_lpccappuccino_project.tmpl rename to tools/export/codered_lpccappuccino_project.tmpl diff --git a/workspace_tools/export/codered_project_common.tmpl b/tools/export/codered_project_common.tmpl similarity index 100% rename from workspace_tools/export/codered_project_common.tmpl rename to tools/export/codered_project_common.tmpl diff --git a/workspace_tools/export/codered_ublox_c027_cproject.tmpl b/tools/export/codered_ublox_c027_cproject.tmpl similarity index 100% rename from workspace_tools/export/codered_ublox_c027_cproject.tmpl rename to tools/export/codered_ublox_c027_cproject.tmpl diff --git a/workspace_tools/export/codered_ublox_c027_project.tmpl b/tools/export/codered_ublox_c027_project.tmpl similarity index 100% rename from workspace_tools/export/codered_ublox_c027_project.tmpl rename to tools/export/codered_ublox_c027_project.tmpl diff --git a/workspace_tools/export/coide.py b/tools/export/coide.py old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/export/coide.py rename to tools/export/coide.py diff --git a/workspace_tools/export/coide_arch_max.coproj.tmpl b/tools/export/coide_arch_max.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_arch_max.coproj.tmpl rename to tools/export/coide_arch_max.coproj.tmpl diff --git a/workspace_tools/export/coide_arch_pro.coproj.tmpl b/tools/export/coide_arch_pro.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_arch_pro.coproj.tmpl rename to tools/export/coide_arch_pro.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f051r8.coproj.tmpl b/tools/export/coide_disco_f051r8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f051r8.coproj.tmpl rename to tools/export/coide_disco_f051r8.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f100rb.coproj.tmpl b/tools/export/coide_disco_f100rb.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f100rb.coproj.tmpl rename to tools/export/coide_disco_f100rb.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f303vc.coproj.tmpl b/tools/export/coide_disco_f303vc.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f303vc.coproj.tmpl rename to tools/export/coide_disco_f303vc.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f334c8.coproj.tmpl b/tools/export/coide_disco_f334c8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f334c8.coproj.tmpl rename to tools/export/coide_disco_f334c8.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f401vc.coproj.tmpl b/tools/export/coide_disco_f401vc.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f401vc.coproj.tmpl rename to tools/export/coide_disco_f401vc.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f407vg.coproj.tmpl b/tools/export/coide_disco_f407vg.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f407vg.coproj.tmpl rename to tools/export/coide_disco_f407vg.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_f429zi.coproj.tmpl b/tools/export/coide_disco_f429zi.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_f429zi.coproj.tmpl rename to tools/export/coide_disco_f429zi.coproj.tmpl diff --git a/workspace_tools/export/coide_disco_l053c8.coproj.tmpl b/tools/export/coide_disco_l053c8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_disco_l053c8.coproj.tmpl rename to tools/export/coide_disco_l053c8.coproj.tmpl diff --git a/workspace_tools/export/coide_kl05z.coproj.tmpl b/tools/export/coide_kl05z.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_kl05z.coproj.tmpl rename to tools/export/coide_kl05z.coproj.tmpl diff --git a/workspace_tools/export/coide_kl25z.coproj.tmpl b/tools/export/coide_kl25z.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_kl25z.coproj.tmpl rename to tools/export/coide_kl25z.coproj.tmpl diff --git a/workspace_tools/export/coide_lpc1768.coproj.tmpl b/tools/export/coide_lpc1768.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_lpc1768.coproj.tmpl rename to tools/export/coide_lpc1768.coproj.tmpl diff --git a/workspace_tools/export/coide_mote_l152rc.coproj.tmpl b/tools/export/coide_mote_l152rc.coproj.tmpl old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/export/coide_mote_l152rc.coproj.tmpl rename to tools/export/coide_mote_l152rc.coproj.tmpl diff --git a/workspace_tools/export/coide_mts_mdot_f405rg.coproj.tmpl b/tools/export/coide_mts_mdot_f405rg.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_mts_mdot_f405rg.coproj.tmpl rename to tools/export/coide_mts_mdot_f405rg.coproj.tmpl diff --git a/workspace_tools/export/coide_mts_mdot_f411re.coproj.tmpl b/tools/export/coide_mts_mdot_f411re.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_mts_mdot_f411re.coproj.tmpl rename to tools/export/coide_mts_mdot_f411re.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f030r8.coproj.tmpl b/tools/export/coide_nucleo_f030r8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f030r8.coproj.tmpl rename to tools/export/coide_nucleo_f030r8.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f042k6.coproj.tmpl b/tools/export/coide_nucleo_f042k6.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f042k6.coproj.tmpl rename to tools/export/coide_nucleo_f042k6.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f070rb.coproj.tmpl b/tools/export/coide_nucleo_f070rb.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f070rb.coproj.tmpl rename to tools/export/coide_nucleo_f070rb.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f072rb.coproj.tmpl b/tools/export/coide_nucleo_f072rb.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f072rb.coproj.tmpl rename to tools/export/coide_nucleo_f072rb.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f091rc.coproj.tmpl b/tools/export/coide_nucleo_f091rc.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f091rc.coproj.tmpl rename to tools/export/coide_nucleo_f091rc.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f103rb.coproj.tmpl b/tools/export/coide_nucleo_f103rb.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f103rb.coproj.tmpl rename to tools/export/coide_nucleo_f103rb.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f302r8.coproj.tmpl b/tools/export/coide_nucleo_f302r8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f302r8.coproj.tmpl rename to tools/export/coide_nucleo_f302r8.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f303re.coproj.tmpl b/tools/export/coide_nucleo_f303re.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f303re.coproj.tmpl rename to tools/export/coide_nucleo_f303re.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f334r8.coproj.tmpl b/tools/export/coide_nucleo_f334r8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f334r8.coproj.tmpl rename to tools/export/coide_nucleo_f334r8.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f401re.coproj.tmpl b/tools/export/coide_nucleo_f401re.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f401re.coproj.tmpl rename to tools/export/coide_nucleo_f401re.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f410rb.coproj.tmpl b/tools/export/coide_nucleo_f410rb.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f410rb.coproj.tmpl rename to tools/export/coide_nucleo_f410rb.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f411re.coproj.tmpl b/tools/export/coide_nucleo_f411re.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f411re.coproj.tmpl rename to tools/export/coide_nucleo_f411re.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_f446re.coproj.tmpl b/tools/export/coide_nucleo_f446re.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_f446re.coproj.tmpl rename to tools/export/coide_nucleo_f446re.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_l053r8.coproj.tmpl b/tools/export/coide_nucleo_l053r8.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_l053r8.coproj.tmpl rename to tools/export/coide_nucleo_l053r8.coproj.tmpl diff --git a/workspace_tools/export/coide_nucleo_l152re.coproj.tmpl b/tools/export/coide_nucleo_l152re.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nucleo_l152re.coproj.tmpl rename to tools/export/coide_nucleo_l152re.coproj.tmpl diff --git a/workspace_tools/export/coide_nz32_sc151.coproj.tmpl b/tools/export/coide_nz32_sc151.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_nz32_sc151.coproj.tmpl rename to tools/export/coide_nz32_sc151.coproj.tmpl diff --git a/workspace_tools/export/coide_ublox_c027.coproj.tmpl b/tools/export/coide_ublox_c027.coproj.tmpl similarity index 100% rename from workspace_tools/export/coide_ublox_c027.coproj.tmpl rename to tools/export/coide_ublox_c027.coproj.tmpl diff --git a/workspace_tools/export/ds5_5.py b/tools/export/ds5_5.py similarity index 100% rename from workspace_tools/export/ds5_5.py rename to tools/export/ds5_5.py diff --git a/workspace_tools/export/ds5_5_arch_pro.cproject.tmpl b/tools/export/ds5_5_arch_pro.cproject.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_arch_pro.cproject.tmpl rename to tools/export/ds5_5_arch_pro.cproject.tmpl diff --git a/workspace_tools/export/ds5_5_arch_pro.launch.tmpl b/tools/export/ds5_5_arch_pro.launch.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_arch_pro.launch.tmpl rename to tools/export/ds5_5_arch_pro.launch.tmpl diff --git a/workspace_tools/export/ds5_5_arch_pro.project.tmpl b/tools/export/ds5_5_arch_pro.project.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_arch_pro.project.tmpl rename to tools/export/ds5_5_arch_pro.project.tmpl diff --git a/workspace_tools/export/ds5_5_lpc11u24.cproject.tmpl b/tools/export/ds5_5_lpc11u24.cproject.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc11u24.cproject.tmpl rename to tools/export/ds5_5_lpc11u24.cproject.tmpl diff --git a/workspace_tools/export/ds5_5_lpc11u24.launch.tmpl b/tools/export/ds5_5_lpc11u24.launch.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc11u24.launch.tmpl rename to tools/export/ds5_5_lpc11u24.launch.tmpl diff --git a/workspace_tools/export/ds5_5_lpc11u24.project.tmpl b/tools/export/ds5_5_lpc11u24.project.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc11u24.project.tmpl rename to tools/export/ds5_5_lpc11u24.project.tmpl diff --git a/workspace_tools/export/ds5_5_lpc1768.cproject.tmpl b/tools/export/ds5_5_lpc1768.cproject.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc1768.cproject.tmpl rename to tools/export/ds5_5_lpc1768.cproject.tmpl diff --git a/workspace_tools/export/ds5_5_lpc1768.launch.tmpl b/tools/export/ds5_5_lpc1768.launch.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc1768.launch.tmpl rename to tools/export/ds5_5_lpc1768.launch.tmpl diff --git a/workspace_tools/export/ds5_5_lpc1768.project.tmpl b/tools/export/ds5_5_lpc1768.project.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc1768.project.tmpl rename to tools/export/ds5_5_lpc1768.project.tmpl diff --git a/workspace_tools/export/ds5_5_lpc812.cproject.tmpl b/tools/export/ds5_5_lpc812.cproject.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc812.cproject.tmpl rename to tools/export/ds5_5_lpc812.cproject.tmpl diff --git a/workspace_tools/export/ds5_5_lpc812.launch.tmpl b/tools/export/ds5_5_lpc812.launch.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc812.launch.tmpl rename to tools/export/ds5_5_lpc812.launch.tmpl diff --git a/workspace_tools/export/ds5_5_lpc812.project.tmpl b/tools/export/ds5_5_lpc812.project.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_lpc812.project.tmpl rename to tools/export/ds5_5_lpc812.project.tmpl diff --git a/workspace_tools/export/ds5_5_rz_a1h.cproject.tmpl b/tools/export/ds5_5_rz_a1h.cproject.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_rz_a1h.cproject.tmpl rename to tools/export/ds5_5_rz_a1h.cproject.tmpl diff --git a/workspace_tools/export/ds5_5_rz_a1h.launch.tmpl b/tools/export/ds5_5_rz_a1h.launch.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_rz_a1h.launch.tmpl rename to tools/export/ds5_5_rz_a1h.launch.tmpl diff --git a/workspace_tools/export/ds5_5_rz_a1h.project.tmpl b/tools/export/ds5_5_rz_a1h.project.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_rz_a1h.project.tmpl rename to tools/export/ds5_5_rz_a1h.project.tmpl diff --git a/workspace_tools/export/ds5_5_ublox_c027.cproject.tmpl b/tools/export/ds5_5_ublox_c027.cproject.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_ublox_c027.cproject.tmpl rename to tools/export/ds5_5_ublox_c027.cproject.tmpl diff --git a/workspace_tools/export/ds5_5_ublox_c027.launch.tmpl b/tools/export/ds5_5_ublox_c027.launch.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_ublox_c027.launch.tmpl rename to tools/export/ds5_5_ublox_c027.launch.tmpl diff --git a/workspace_tools/export/ds5_5_ublox_c027.project.tmpl b/tools/export/ds5_5_ublox_c027.project.tmpl similarity index 100% rename from workspace_tools/export/ds5_5_ublox_c027.project.tmpl rename to tools/export/ds5_5_ublox_c027.project.tmpl diff --git a/workspace_tools/export/e2studio.py b/tools/export/e2studio.py similarity index 100% rename from workspace_tools/export/e2studio.py rename to tools/export/e2studio.py diff --git a/workspace_tools/export/e2studio_launch.tmpl b/tools/export/e2studio_launch.tmpl similarity index 100% rename from workspace_tools/export/e2studio_launch.tmpl rename to tools/export/e2studio_launch.tmpl diff --git a/workspace_tools/export/e2studio_rz_a1h_cproject.tmpl b/tools/export/e2studio_rz_a1h_cproject.tmpl similarity index 100% rename from workspace_tools/export/e2studio_rz_a1h_cproject.tmpl rename to tools/export/e2studio_rz_a1h_cproject.tmpl diff --git a/workspace_tools/export/e2studio_rz_a1h_gdbinit.tmpl b/tools/export/e2studio_rz_a1h_gdbinit.tmpl similarity index 100% rename from workspace_tools/export/e2studio_rz_a1h_gdbinit.tmpl rename to tools/export/e2studio_rz_a1h_gdbinit.tmpl diff --git a/workspace_tools/export/e2studio_rz_a1h_project.tmpl b/tools/export/e2studio_rz_a1h_project.tmpl similarity index 100% rename from workspace_tools/export/e2studio_rz_a1h_project.tmpl rename to tools/export/e2studio_rz_a1h_project.tmpl diff --git a/workspace_tools/export/emblocks.eix.tmpl b/tools/export/emblocks.eix.tmpl similarity index 100% rename from workspace_tools/export/emblocks.eix.tmpl rename to tools/export/emblocks.eix.tmpl diff --git a/workspace_tools/export/emblocks.py b/tools/export/emblocks.py similarity index 98% rename from workspace_tools/export/emblocks.py rename to tools/export/emblocks.py index 88a14d3354a..d964b3dc96b 100644 --- a/workspace_tools/export/emblocks.py +++ b/tools/export/emblocks.py @@ -16,7 +16,7 @@ """ from exporters import Exporter from os.path import splitext, basename -from workspace_tools.targets import TARGETS +from tools.targets import TARGETS # filter all the GCC_ARM targets out of the target list gccTargets = [] diff --git a/workspace_tools/export/exporters.py b/tools/export/exporters.py similarity index 71% rename from workspace_tools/export/exporters.py rename to tools/export/exporters.py index 181f753bd9f..f2ff2e541be 100644 --- a/workspace_tools/export/exporters.py +++ b/tools/export/exporters.py @@ -7,15 +7,18 @@ from jinja2.environment import Environment from contextlib import closing from zipfile import ZipFile, ZIP_DEFLATED +from operator import add -from workspace_tools.utils import mkdir -from workspace_tools.toolchains import TOOLCHAIN_CLASSES -from workspace_tools.targets import TARGET_MAP +from tools.utils import mkdir +from tools.toolchains import TOOLCHAIN_CLASSES +from tools.targets import TARGET_MAP from project_generator.generate import Generator from project_generator.project import Project from project_generator.settings import ProjectSettings +from tools.config import Config + class OldLibrariesException(Exception): pass class Exporter(object): @@ -31,6 +34,7 @@ def __init__(self, target, inputDir, program_name, build_url_resolver, extra_sym jinja_loader = FileSystemLoader(os.path.dirname(os.path.abspath(__file__))) self.jinja_environment = Environment(loader=jinja_loader) self.extra_symbols = extra_symbols + self.config_macros = [] def get_toolchain(self): return self.TOOLCHAIN @@ -46,24 +50,40 @@ def __scan_and_copy(self, src_path, trg_path): self.toolchain.copy_files(r, trg_path, rel_path=src_path) return resources + @staticmethod + def _get_dir_grouped_files(files): + """ Get grouped files based on the dirname """ + files_grouped = {} + for file in files: + rel_path = os.path.relpath(file, os.getcwd()) + dir_path = os.path.dirname(rel_path) + if dir_path == '': + # all files within the current dir go into Source_Files + dir_path = 'Source_Files' + if not dir_path in files_grouped.keys(): + files_grouped[dir_path] = [] + files_grouped[dir_path].append(file) + return files_grouped + def progen_get_project_data(self): """ Get ProGen project data """ # provide default data, some tools don't require any additional # tool specific settings - sources = [] + code_files = [] for r_type in ['c_sources', 'cpp_sources', 's_sources']: for file in getattr(self.resources, r_type): - sources.append(file) + code_files.append(file) + + sources_files = code_files + self.resources.hex_files + self.resources.objects + \ + self.resources.libraries + sources_grouped = Exporter._get_dir_grouped_files(sources_files) + headers_grouped = Exporter._get_dir_grouped_files(self.resources.headers) project_data = { 'common': { - 'sources': { - 'Source Files': sources + self.resources.hex_files + - self.resources.objects + self.resources.libraries, - }, - 'includes': { - 'Include Files': self.resources.headers, - }, + 'sources': sources_grouped, + 'includes': headers_grouped, + 'build_dir':'.build', 'target': [TARGET_MAP[self.target].progen['target']], 'macros': self.get_symbols(), 'export_dir': [self.inputDir], @@ -73,7 +93,7 @@ def progen_get_project_data(self): return project_data def progen_gen_file(self, tool_name, project_data): - """ Generate project using ProGen Project API """ + """" Generate project using ProGen Project API """ settings = ProjectSettings() project = Project(self.program_name, [project_data], settings) # TODO: Fix this, the inc_dirs are not valid (our scripts copy files), therefore progen @@ -95,17 +115,20 @@ def __scan_all(self, path): return resources - def scan_and_copy_resources(self, prj_path, trg_path): + def scan_and_copy_resources(self, prj_paths, trg_path, relative=False): # Copy only the file for the required target and toolchain lib_builds = [] + # Create the configuration object + cfg = Config(self.target, prj_paths) for src in ['lib', 'src']: - resources = self.__scan_and_copy(join(prj_path, src), trg_path) + resources = reduce(add, [self.__scan_and_copy(join(path, src), trg_path) for path in prj_paths]) lib_builds.extend(resources.lib_builds) # The repository files for repo_dir in resources.repo_dirs: repo_files = self.__scan_all(repo_dir) - self.toolchain.copy_files(repo_files, trg_path, rel_path=join(prj_path, src)) + for path in proj_paths : + self.toolchain.copy_files(repo_files, trg_path, rel_path=join(path, src)) # The libraries builds for bld in lib_builds: @@ -120,9 +143,17 @@ def scan_and_copy_resources(self, prj_path, trg_path): fhandle = file(join(hgdir, 'keep.me'), 'a') fhandle.close() - # Final scan of the actual exported resources - self.resources = self.toolchain.scan_resources(trg_path) - self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) + if not relative: + # Final scan of the actual exported resources + self.resources = self.toolchain.scan_resources(trg_path) + self.resources.relative_to(trg_path, self.DOT_IN_RELATIVE_PATH) + else: + # use the prj_dir (source, not destination) + self.resources = reduce(add, [self.toolchain.scan_resources(path) for path in prj_paths]) + # Add all JSON files discovered during scanning to the configuration object + cfg.add_config_files(self.resources.json_files) + # Get data from the configuration system + self.config_macros = cfg.get_config_data_macros() # Check the existence of a binary build of the mbed library for the desired target # This prevents exporting the mbed libraries from source # if not self.toolchain.mbed_libs: @@ -141,7 +172,7 @@ def get_symbols(self, add_extra_symbols=True): """ This function returns symbols which must be exported. Please add / overwrite symbols in each exporter separately """ - symbols = self.toolchain.get_symbols() + symbols = self.toolchain.get_symbols() + self.config_macros # We have extra symbols from e.g. libraries, we want to have them also added to export if add_extra_symbols: if self.extra_symbols is not None: diff --git a/workspace_tools/export/gcc_arm_arch_ble.tmpl b/tools/export/gcc_arm_arch_ble.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_arch_ble.tmpl rename to tools/export/gcc_arm_arch_ble.tmpl diff --git a/workspace_tools/export/gcc_arm_arch_max.tmpl b/tools/export/gcc_arm_arch_max.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_arch_max.tmpl rename to tools/export/gcc_arm_arch_max.tmpl diff --git a/workspace_tools/export/gcc_arm_arch_pro.tmpl b/tools/export/gcc_arm_arch_pro.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_arch_pro.tmpl rename to tools/export/gcc_arm_arch_pro.tmpl diff --git a/workspace_tools/export/gcc_arm_b96b_f446ve.tmpl b/tools/export/gcc_arm_b96b_f446ve.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_b96b_f446ve.tmpl rename to tools/export/gcc_arm_b96b_f446ve.tmpl diff --git a/workspace_tools/export/gcc_arm_common.tmpl b/tools/export/gcc_arm_common.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_common.tmpl rename to tools/export/gcc_arm_common.tmpl diff --git a/workspace_tools/export/gcc_arm_delta_dfcm_nnn40.tmpl b/tools/export/gcc_arm_delta_dfcm_nnn40.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_delta_dfcm_nnn40.tmpl rename to tools/export/gcc_arm_delta_dfcm_nnn40.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f051r8.tmpl b/tools/export/gcc_arm_disco_f051r8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f051r8.tmpl rename to tools/export/gcc_arm_disco_f051r8.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f100rb.tmpl b/tools/export/gcc_arm_disco_f100rb.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f100rb.tmpl rename to tools/export/gcc_arm_disco_f100rb.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f303vc.tmpl b/tools/export/gcc_arm_disco_f303vc.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f303vc.tmpl rename to tools/export/gcc_arm_disco_f303vc.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f334c8.tmpl b/tools/export/gcc_arm_disco_f334c8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f334c8.tmpl rename to tools/export/gcc_arm_disco_f334c8.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f401vc.tmpl b/tools/export/gcc_arm_disco_f401vc.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f401vc.tmpl rename to tools/export/gcc_arm_disco_f401vc.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f407vg.tmpl b/tools/export/gcc_arm_disco_f407vg.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f407vg.tmpl rename to tools/export/gcc_arm_disco_f407vg.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f429zi.tmpl b/tools/export/gcc_arm_disco_f429zi.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f429zi.tmpl rename to tools/export/gcc_arm_disco_f429zi.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f469ni.tmpl b/tools/export/gcc_arm_disco_f469ni.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f469ni.tmpl rename to tools/export/gcc_arm_disco_f469ni.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_f746ng.tmpl b/tools/export/gcc_arm_disco_f746ng.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_f746ng.tmpl rename to tools/export/gcc_arm_disco_f746ng.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_l053c8.tmpl b/tools/export/gcc_arm_disco_l053c8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_l053c8.tmpl rename to tools/export/gcc_arm_disco_l053c8.tmpl diff --git a/workspace_tools/export/gcc_arm_disco_l476vg.tmpl b/tools/export/gcc_arm_disco_l476vg.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_disco_l476vg.tmpl rename to tools/export/gcc_arm_disco_l476vg.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32_common.tmpl b/tools/export/gcc_arm_efm32_common.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32_common.tmpl rename to tools/export/gcc_arm_efm32_common.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32gg_stk3700.tmpl b/tools/export/gcc_arm_efm32gg_stk3700.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32gg_stk3700.tmpl rename to tools/export/gcc_arm_efm32gg_stk3700.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32hg_stk3400.tmpl b/tools/export/gcc_arm_efm32hg_stk3400.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32hg_stk3400.tmpl rename to tools/export/gcc_arm_efm32hg_stk3400.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32lg_stk3600.tmpl b/tools/export/gcc_arm_efm32lg_stk3600.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32lg_stk3600.tmpl rename to tools/export/gcc_arm_efm32lg_stk3600.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32pg_stk3401.tmpl b/tools/export/gcc_arm_efm32pg_stk3401.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32pg_stk3401.tmpl rename to tools/export/gcc_arm_efm32pg_stk3401.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32wg_stk3800.tmpl b/tools/export/gcc_arm_efm32wg_stk3800.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32wg_stk3800.tmpl rename to tools/export/gcc_arm_efm32wg_stk3800.tmpl diff --git a/workspace_tools/export/gcc_arm_efm32zg_stk3200.tmpl b/tools/export/gcc_arm_efm32zg_stk3200.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_efm32zg_stk3200.tmpl rename to tools/export/gcc_arm_efm32zg_stk3200.tmpl diff --git a/workspace_tools/export/gcc_arm_hrm1017.tmpl b/tools/export/gcc_arm_hrm1017.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_hrm1017.tmpl rename to tools/export/gcc_arm_hrm1017.tmpl diff --git a/workspace_tools/export/gcc_arm_k20d50m.tmpl b/tools/export/gcc_arm_k20d50m.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_k20d50m.tmpl rename to tools/export/gcc_arm_k20d50m.tmpl diff --git a/workspace_tools/export/gcc_arm_k22f.tmpl b/tools/export/gcc_arm_k22f.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_k22f.tmpl rename to tools/export/gcc_arm_k22f.tmpl diff --git a/workspace_tools/export/gcc_arm_k64f.tmpl b/tools/export/gcc_arm_k64f.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_k64f.tmpl rename to tools/export/gcc_arm_k64f.tmpl diff --git a/workspace_tools/export/gcc_arm_kl05z.tmpl b/tools/export/gcc_arm_kl05z.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_kl05z.tmpl rename to tools/export/gcc_arm_kl05z.tmpl diff --git a/workspace_tools/export/gcc_arm_kl25z.tmpl b/tools/export/gcc_arm_kl25z.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_kl25z.tmpl rename to tools/export/gcc_arm_kl25z.tmpl diff --git a/workspace_tools/export/gcc_arm_kl43z.tmpl b/tools/export/gcc_arm_kl43z.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_kl43z.tmpl rename to tools/export/gcc_arm_kl43z.tmpl diff --git a/workspace_tools/export/gcc_arm_kl46z.tmpl b/tools/export/gcc_arm_kl46z.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_kl46z.tmpl rename to tools/export/gcc_arm_kl46z.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc1114.tmpl b/tools/export/gcc_arm_lpc1114.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc1114.tmpl rename to tools/export/gcc_arm_lpc1114.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc11u24.tmpl b/tools/export/gcc_arm_lpc11u24.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc11u24.tmpl rename to tools/export/gcc_arm_lpc11u24.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc11u35_401.tmpl b/tools/export/gcc_arm_lpc11u35_401.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc11u35_401.tmpl rename to tools/export/gcc_arm_lpc11u35_401.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc11u35_501.tmpl b/tools/export/gcc_arm_lpc11u35_501.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc11u35_501.tmpl rename to tools/export/gcc_arm_lpc11u35_501.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc11u37h_401.tmpl b/tools/export/gcc_arm_lpc11u37h_401.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc11u37h_401.tmpl rename to tools/export/gcc_arm_lpc11u37h_401.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc1549.tmpl b/tools/export/gcc_arm_lpc1549.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc1549.tmpl rename to tools/export/gcc_arm_lpc1549.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc1768.tmpl b/tools/export/gcc_arm_lpc1768.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc1768.tmpl rename to tools/export/gcc_arm_lpc1768.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc2368.tmpl b/tools/export/gcc_arm_lpc2368.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc2368.tmpl rename to tools/export/gcc_arm_lpc2368.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc2460.tmpl b/tools/export/gcc_arm_lpc2460.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc2460.tmpl rename to tools/export/gcc_arm_lpc2460.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc4088.tmpl b/tools/export/gcc_arm_lpc4088.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc4088.tmpl rename to tools/export/gcc_arm_lpc4088.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc4088_dm.tmpl b/tools/export/gcc_arm_lpc4088_dm.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc4088_dm.tmpl rename to tools/export/gcc_arm_lpc4088_dm.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc4330_m4.tmpl b/tools/export/gcc_arm_lpc4330_m4.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc4330_m4.tmpl rename to tools/export/gcc_arm_lpc4330_m4.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc810.tmpl b/tools/export/gcc_arm_lpc810.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc810.tmpl rename to tools/export/gcc_arm_lpc810.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc812.tmpl b/tools/export/gcc_arm_lpc812.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc812.tmpl rename to tools/export/gcc_arm_lpc812.tmpl diff --git a/workspace_tools/export/gcc_arm_lpc824.tmpl b/tools/export/gcc_arm_lpc824.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpc824.tmpl rename to tools/export/gcc_arm_lpc824.tmpl diff --git a/workspace_tools/export/gcc_arm_lpccappuccino.tmpl b/tools/export/gcc_arm_lpccappuccino.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_lpccappuccino.tmpl rename to tools/export/gcc_arm_lpccappuccino.tmpl diff --git a/workspace_tools/export/gcc_arm_max32600mbed.tmpl b/tools/export/gcc_arm_max32600mbed.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_max32600mbed.tmpl rename to tools/export/gcc_arm_max32600mbed.tmpl diff --git a/workspace_tools/export/gcc_arm_maxwsnenv.tmpl b/tools/export/gcc_arm_maxwsnenv.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_maxwsnenv.tmpl rename to tools/export/gcc_arm_maxwsnenv.tmpl diff --git a/workspace_tools/export/gcc_arm_mote_l152rc.tmpl b/tools/export/gcc_arm_mote_l152rc.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_mote_l152rc.tmpl rename to tools/export/gcc_arm_mote_l152rc.tmpl diff --git a/workspace_tools/export/gcc_arm_mts_gambit.tmpl b/tools/export/gcc_arm_mts_gambit.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_mts_gambit.tmpl rename to tools/export/gcc_arm_mts_gambit.tmpl diff --git a/workspace_tools/export/gcc_arm_mts_mdot_f405rg.tmpl b/tools/export/gcc_arm_mts_mdot_f405rg.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_mts_mdot_f405rg.tmpl rename to tools/export/gcc_arm_mts_mdot_f405rg.tmpl diff --git a/workspace_tools/export/gcc_arm_mts_mdot_f411re.tmpl b/tools/export/gcc_arm_mts_mdot_f411re.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_mts_mdot_f411re.tmpl rename to tools/export/gcc_arm_mts_mdot_f411re.tmpl diff --git a/workspace_tools/export/gcc_arm_nrf51822.tmpl b/tools/export/gcc_arm_nrf51822.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nrf51822.tmpl rename to tools/export/gcc_arm_nrf51822.tmpl diff --git a/workspace_tools/export/gcc_arm_nrf51_dk.tmpl b/tools/export/gcc_arm_nrf51_dk.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nrf51_dk.tmpl rename to tools/export/gcc_arm_nrf51_dk.tmpl diff --git a/workspace_tools/export/gcc_arm_nrf51_dongle.tmpl b/tools/export/gcc_arm_nrf51_dongle.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nrf51_dongle.tmpl rename to tools/export/gcc_arm_nrf51_dongle.tmpl diff --git a/workspace_tools/export/gcc_arm_nrf51_microbit.tmpl b/tools/export/gcc_arm_nrf51_microbit.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nrf51_microbit.tmpl rename to tools/export/gcc_arm_nrf51_microbit.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f030r8.tmpl b/tools/export/gcc_arm_nucleo_f030r8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f030r8.tmpl rename to tools/export/gcc_arm_nucleo_f030r8.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f031k6.tmpl b/tools/export/gcc_arm_nucleo_f031k6.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f031k6.tmpl rename to tools/export/gcc_arm_nucleo_f031k6.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f042k6.tmpl b/tools/export/gcc_arm_nucleo_f042k6.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f042k6.tmpl rename to tools/export/gcc_arm_nucleo_f042k6.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f070rb.tmpl b/tools/export/gcc_arm_nucleo_f070rb.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f070rb.tmpl rename to tools/export/gcc_arm_nucleo_f070rb.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f072rb.tmpl b/tools/export/gcc_arm_nucleo_f072rb.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f072rb.tmpl rename to tools/export/gcc_arm_nucleo_f072rb.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f091rc.tmpl b/tools/export/gcc_arm_nucleo_f091rc.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f091rc.tmpl rename to tools/export/gcc_arm_nucleo_f091rc.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f103rb.tmpl b/tools/export/gcc_arm_nucleo_f103rb.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f103rb.tmpl rename to tools/export/gcc_arm_nucleo_f103rb.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f302r8.tmpl b/tools/export/gcc_arm_nucleo_f302r8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f302r8.tmpl rename to tools/export/gcc_arm_nucleo_f302r8.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f303k8.tmpl b/tools/export/gcc_arm_nucleo_f303k8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f303k8.tmpl rename to tools/export/gcc_arm_nucleo_f303k8.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f303re.tmpl b/tools/export/gcc_arm_nucleo_f303re.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f303re.tmpl rename to tools/export/gcc_arm_nucleo_f303re.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f334r8.tmpl b/tools/export/gcc_arm_nucleo_f334r8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f334r8.tmpl rename to tools/export/gcc_arm_nucleo_f334r8.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f401re.tmpl b/tools/export/gcc_arm_nucleo_f401re.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f401re.tmpl rename to tools/export/gcc_arm_nucleo_f401re.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f410rb.tmpl b/tools/export/gcc_arm_nucleo_f410rb.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f410rb.tmpl rename to tools/export/gcc_arm_nucleo_f410rb.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f411re.tmpl b/tools/export/gcc_arm_nucleo_f411re.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f411re.tmpl rename to tools/export/gcc_arm_nucleo_f411re.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f446re.tmpl b/tools/export/gcc_arm_nucleo_f446re.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f446re.tmpl rename to tools/export/gcc_arm_nucleo_f446re.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_f746zg.tmpl b/tools/export/gcc_arm_nucleo_f746zg.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_f746zg.tmpl rename to tools/export/gcc_arm_nucleo_f746zg.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_l031k6.tmpl b/tools/export/gcc_arm_nucleo_l031k6.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_l031k6.tmpl rename to tools/export/gcc_arm_nucleo_l031k6.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_l053r8.tmpl b/tools/export/gcc_arm_nucleo_l053r8.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_l053r8.tmpl rename to tools/export/gcc_arm_nucleo_l053r8.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_l073rz.tmpl b/tools/export/gcc_arm_nucleo_l073rz.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_l073rz.tmpl rename to tools/export/gcc_arm_nucleo_l073rz.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_l152re.tmpl b/tools/export/gcc_arm_nucleo_l152re.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_l152re.tmpl rename to tools/export/gcc_arm_nucleo_l152re.tmpl diff --git a/workspace_tools/export/gcc_arm_nucleo_l476rg.tmpl b/tools/export/gcc_arm_nucleo_l476rg.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nucleo_l476rg.tmpl rename to tools/export/gcc_arm_nucleo_l476rg.tmpl diff --git a/workspace_tools/export/gcc_arm_nz32_sc151.tmpl b/tools/export/gcc_arm_nz32_sc151.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_nz32_sc151.tmpl rename to tools/export/gcc_arm_nz32_sc151.tmpl diff --git a/workspace_tools/export/gcc_arm_rblab_blenano.tmpl b/tools/export/gcc_arm_rblab_blenano.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_rblab_blenano.tmpl rename to tools/export/gcc_arm_rblab_blenano.tmpl diff --git a/workspace_tools/export/gcc_arm_rblab_nrf51822.tmpl b/tools/export/gcc_arm_rblab_nrf51822.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_rblab_nrf51822.tmpl rename to tools/export/gcc_arm_rblab_nrf51822.tmpl diff --git a/workspace_tools/export/gcc_arm_rz_a1h.tmpl b/tools/export/gcc_arm_rz_a1h.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_rz_a1h.tmpl rename to tools/export/gcc_arm_rz_a1h.tmpl diff --git a/workspace_tools/export/gcc_arm_samd21g18a.tmpl b/tools/export/gcc_arm_samd21g18a.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_samd21g18a.tmpl rename to tools/export/gcc_arm_samd21g18a.tmpl diff --git a/workspace_tools/export/gcc_arm_samd21j18a.tmpl b/tools/export/gcc_arm_samd21j18a.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_samd21j18a.tmpl rename to tools/export/gcc_arm_samd21j18a.tmpl diff --git a/workspace_tools/export/gcc_arm_samg55j19.tmpl b/tools/export/gcc_arm_samg55j19.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_samg55j19.tmpl rename to tools/export/gcc_arm_samg55j19.tmpl diff --git a/workspace_tools/export/gcc_arm_saml21j18a.tmpl b/tools/export/gcc_arm_saml21j18a.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_saml21j18a.tmpl rename to tools/export/gcc_arm_saml21j18a.tmpl diff --git a/workspace_tools/export/gcc_arm_samr21g18a.tmpl b/tools/export/gcc_arm_samr21g18a.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_samr21g18a.tmpl rename to tools/export/gcc_arm_samr21g18a.tmpl diff --git a/workspace_tools/export/gcc_arm_seeed_tiny_ble.tmpl b/tools/export/gcc_arm_seeed_tiny_ble.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_seeed_tiny_ble.tmpl rename to tools/export/gcc_arm_seeed_tiny_ble.tmpl diff --git a/workspace_tools/export/gcc_arm_ssci824.tmpl b/tools/export/gcc_arm_ssci824.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_ssci824.tmpl rename to tools/export/gcc_arm_ssci824.tmpl diff --git a/workspace_tools/export/gcc_arm_stm32f407.tmpl b/tools/export/gcc_arm_stm32f407.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_stm32f407.tmpl rename to tools/export/gcc_arm_stm32f407.tmpl diff --git a/workspace_tools/export/gcc_arm_teensy3_1.tmpl b/tools/export/gcc_arm_teensy3_1.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_teensy3_1.tmpl rename to tools/export/gcc_arm_teensy3_1.tmpl diff --git a/workspace_tools/export/gcc_arm_ublox_c027.tmpl b/tools/export/gcc_arm_ublox_c027.tmpl similarity index 100% rename from workspace_tools/export/gcc_arm_ublox_c027.tmpl rename to tools/export/gcc_arm_ublox_c027.tmpl diff --git a/workspace_tools/export/gccarm.py b/tools/export/gccarm.py old mode 100755 new mode 100644 similarity index 100% rename from workspace_tools/export/gccarm.py rename to tools/export/gccarm.py diff --git a/workspace_tools/export/iar.py b/tools/export/iar.py similarity index 96% rename from workspace_tools/export/iar.py rename to tools/export/iar.py index 3ba8e64b0c2..73565da47ae 100644 --- a/workspace_tools/export/iar.py +++ b/tools/export/iar.py @@ -18,8 +18,8 @@ import os from project_generator_definitions.definitions import ProGenDef -from workspace_tools.export.exporters import Exporter -from workspace_tools.targets import TARGET_MAP, TARGET_NAMES +from tools.export.exporters import Exporter +from tools.targets import TARGET_MAP, TARGET_NAMES # If you wish to add a new target, add it to project_generator_definitions, and then # define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``) @@ -67,6 +67,7 @@ def generate(self): project_data['tool_specific'] = {} project_data['tool_specific'].update(tool_specific) + project_data['common']['build_dir'] = os.path.join(project_data['common']['build_dir'], 'iar_arm') self.progen_gen_file('iar_arm', project_data) # Currently not used, we should reuse folder_name to create virtual folders diff --git a/workspace_tools/export/iar_nucleo_f746zg.ewp.tmpl b/tools/export/iar_nucleo_f746zg.ewp.tmpl similarity index 100% rename from workspace_tools/export/iar_nucleo_f746zg.ewp.tmpl rename to tools/export/iar_nucleo_f746zg.ewp.tmpl diff --git a/workspace_tools/export/iar_rz_a1h.ewp.tmpl b/tools/export/iar_rz_a1h.ewp.tmpl similarity index 100% rename from workspace_tools/export/iar_rz_a1h.ewp.tmpl rename to tools/export/iar_rz_a1h.ewp.tmpl diff --git a/workspace_tools/export/iar_template.ewp.tmpl b/tools/export/iar_template.ewp.tmpl similarity index 100% rename from workspace_tools/export/iar_template.ewp.tmpl rename to tools/export/iar_template.ewp.tmpl diff --git a/workspace_tools/export/kds.py b/tools/export/kds.py similarity index 100% rename from workspace_tools/export/kds.py rename to tools/export/kds.py diff --git a/workspace_tools/export/kds_k22f_cproject.tmpl b/tools/export/kds_k22f_cproject.tmpl similarity index 100% rename from workspace_tools/export/kds_k22f_cproject.tmpl rename to tools/export/kds_k22f_cproject.tmpl diff --git a/workspace_tools/export/kds_k22f_project.tmpl b/tools/export/kds_k22f_project.tmpl similarity index 100% rename from workspace_tools/export/kds_k22f_project.tmpl rename to tools/export/kds_k22f_project.tmpl diff --git a/workspace_tools/export/kds_k64f_cproject.tmpl b/tools/export/kds_k64f_cproject.tmpl similarity index 100% rename from workspace_tools/export/kds_k64f_cproject.tmpl rename to tools/export/kds_k64f_cproject.tmpl diff --git a/workspace_tools/export/kds_k64f_project.tmpl b/tools/export/kds_k64f_project.tmpl similarity index 100% rename from workspace_tools/export/kds_k64f_project.tmpl rename to tools/export/kds_k64f_project.tmpl diff --git a/workspace_tools/export/kds_launch.tmpl b/tools/export/kds_launch.tmpl similarity index 100% rename from workspace_tools/export/kds_launch.tmpl rename to tools/export/kds_launch.tmpl diff --git a/workspace_tools/export/simplicityv3.py b/tools/export/simplicityv3.py similarity index 100% rename from workspace_tools/export/simplicityv3.py rename to tools/export/simplicityv3.py diff --git a/workspace_tools/export/simplicityv3_slsproj.tmpl b/tools/export/simplicityv3_slsproj.tmpl similarity index 100% rename from workspace_tools/export/simplicityv3_slsproj.tmpl rename to tools/export/simplicityv3_slsproj.tmpl diff --git a/workspace_tools/export/sw4stm32.py b/tools/export/sw4stm32.py similarity index 99% rename from workspace_tools/export/sw4stm32.py rename to tools/export/sw4stm32.py index cc383c3670b..0d9e8e881db 100644 --- a/workspace_tools/export/sw4stm32.py +++ b/tools/export/sw4stm32.py @@ -17,7 +17,7 @@ from exporters import Exporter from os.path import splitext, basename, join from random import randint -from workspace_tools.utils import mkdir +from tools.utils import mkdir class Sw4STM32(Exporter): diff --git a/workspace_tools/export/sw4stm32_cproject_common.tmpl b/tools/export/sw4stm32_cproject_common.tmpl similarity index 100% rename from workspace_tools/export/sw4stm32_cproject_common.tmpl rename to tools/export/sw4stm32_cproject_common.tmpl diff --git a/workspace_tools/export/sw4stm32_language_settings_commom.tmpl b/tools/export/sw4stm32_language_settings_commom.tmpl similarity index 100% rename from workspace_tools/export/sw4stm32_language_settings_commom.tmpl rename to tools/export/sw4stm32_language_settings_commom.tmpl diff --git a/workspace_tools/export/sw4stm32_project_common.tmpl b/tools/export/sw4stm32_project_common.tmpl similarity index 100% rename from workspace_tools/export/sw4stm32_project_common.tmpl rename to tools/export/sw4stm32_project_common.tmpl diff --git a/workspace_tools/export/uvision.uvproj.tmpl b/tools/export/uvision.uvproj.tmpl similarity index 100% rename from workspace_tools/export/uvision.uvproj.tmpl rename to tools/export/uvision.uvproj.tmpl diff --git a/workspace_tools/export/uvision4.py b/tools/export/uvision4.py similarity index 95% rename from workspace_tools/export/uvision4.py rename to tools/export/uvision4.py index 7f26bfde2da..fe47ada320d 100644 --- a/workspace_tools/export/uvision4.py +++ b/tools/export/uvision4.py @@ -17,8 +17,8 @@ from os.path import basename, join, dirname from project_generator_definitions.definitions import ProGenDef -from workspace_tools.export.exporters import Exporter -from workspace_tools.targets import TARGET_MAP, TARGET_NAMES +from tools.export.exporters import Exporter +from tools.targets import TARGET_MAP, TARGET_NAMES # If you wish to add a new target, add it to project_generator_definitions, and then # define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``) @@ -85,5 +85,6 @@ def generate(self): project_data['common']['macros'].pop(i) i += 1 project_data['common']['macros'].append('__ASSERT_MSG') + project_data['common']['build_dir'] = join(project_data['common']['build_dir'], 'uvision4') self.progen_gen_file('uvision', project_data) diff --git a/workspace_tools/export/uvision5.py b/tools/export/uvision5.py similarity index 97% rename from workspace_tools/export/uvision5.py rename to tools/export/uvision5.py index a8813799a0c..7662ffdfb2c 100644 --- a/workspace_tools/export/uvision5.py +++ b/tools/export/uvision5.py @@ -17,8 +17,8 @@ from os.path import basename, join, dirname from project_generator_definitions.definitions import ProGenDef -from workspace_tools.export.exporters import Exporter -from workspace_tools.targets import TARGET_MAP, TARGET_NAMES +from tools.export.exporters import Exporter +from tools.targets import TARGET_MAP, TARGET_NAMES # If you wish to add a new target, add it to project_generator_definitions, and then # define progen_target name in the target class (`` self.progen_target = 'my_target_name' ``) diff --git a/workspace_tools/export/uvision_microlib.uvproj.tmpl b/tools/export/uvision_microlib.uvproj.tmpl similarity index 100% rename from workspace_tools/export/uvision_microlib.uvproj.tmpl rename to tools/export/uvision_microlib.uvproj.tmpl diff --git a/workspace_tools/export/zip.py b/tools/export/zip.py similarity index 100% rename from workspace_tools/export/zip.py rename to tools/export/zip.py diff --git a/workspace_tools/export_test.py b/tools/export_test.py similarity index 98% rename from workspace_tools/export_test.py rename to tools/export_test.py index 6295d03d0eb..fdb990c5416 100644 --- a/workspace_tools/export_test.py +++ b/tools/export_test.py @@ -22,9 +22,9 @@ from shutil import move -from workspace_tools.paths import * -from workspace_tools.utils import mkdir, cmd -from workspace_tools.export import export, setup_user_prj +from tools.paths import * +from tools.utils import mkdir, cmd +from tools.export import export, setup_user_prj USR_PRJ_NAME = "usr_prj" diff --git a/tools/get_config.py b/tools/get_config.py new file mode 100644 index 00000000000..6d8af838176 --- /dev/null +++ b/tools/get_config.py @@ -0,0 +1,94 @@ +#! /usr/bin/env python2 +""" +mbed SDK +Copyright (c) 2011-2013 ARM Limited + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" +import sys +from os.path import isdir, abspath, dirname, join +from os import _exit + +# Be sure that the tools directory is in the search path +ROOT = abspath(join(dirname(__file__), "..")) +sys.path.insert(0, ROOT) + +from tools.utils import args_error +from tools.options import get_default_options_parser +from tools.build_api import get_config +from config import Config +try: + import tools.private_settings as ps +except: + ps = object() + +if __name__ == '__main__': + # Parse Options + parser = get_default_options_parser(add_clean=False, add_options=False) + parser.add_option("--source", dest="source_dir", + default=None, help="The source (input) directory", action="append") + parser.add_option("--prefix", dest="prefix", action="append", + default=None, help="Restrict listing to parameters that have this prefix") + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", + default=False, help="Verbose diagnostic output") + + (options, args) = parser.parse_args() + + for path in options.source_dir : + if not isdir(path) : + args_error(parser, "[ERROR] you passed \"{}\" to --source, which does not exist". + format(path)) + # Target + if options.mcu is None : + args_error(parser, "[ERROR] You should specify an MCU") + target = options.mcu + + # Toolchain + if options.tool is None: + args_error(parser, "[ERROR] You should specify a TOOLCHAIN") + toolchain = options.tool + + options.prefix = options.prefix or [""] + + try: + params, macros = get_config(options.source_dir, target, toolchain) + if not params and not macros: + print "No configuration data available." + _exit(0) + if params: + print "Configuration parameters" + print "------------------------" + for p in params: + for s in options.prefix: + if p.startswith(s): + print(str(params[p]) if not options.verbose else params[p].get_verbose_description()) + break + print "" + + print "Macros" + print "------" + if macros: + print 'Defined with "macros":', macros + print "Generated from configuration parameters:", Config.parameters_to_macros(params) + + except KeyboardInterrupt, e: + print "\n[CTRL+c] exit" + except Exception,e: + if options.verbose: + import traceback + traceback.print_exc(file=sys.stdout) + else: + print "[ERROR] %s" % str(e) + + sys.exit(1) diff --git a/workspace_tools/hooks.py b/tools/hooks.py similarity index 100% rename from workspace_tools/hooks.py rename to tools/hooks.py diff --git a/workspace_tools/host_tests/__init__.py b/tools/host_tests/__init__.py similarity index 100% rename from workspace_tools/host_tests/__init__.py rename to tools/host_tests/__init__.py diff --git a/workspace_tools/host_tests/default_auto.py b/tools/host_tests/default_auto.py similarity index 100% rename from workspace_tools/host_tests/default_auto.py rename to tools/host_tests/default_auto.py diff --git a/workspace_tools/host_tests/detect_auto.py b/tools/host_tests/detect_auto.py similarity index 100% rename from workspace_tools/host_tests/detect_auto.py rename to tools/host_tests/detect_auto.py diff --git a/workspace_tools/host_tests/dev_null_auto.py b/tools/host_tests/dev_null_auto.py similarity index 100% rename from workspace_tools/host_tests/dev_null_auto.py rename to tools/host_tests/dev_null_auto.py diff --git a/workspace_tools/host_tests/echo.py b/tools/host_tests/echo.py similarity index 100% rename from workspace_tools/host_tests/echo.py rename to tools/host_tests/echo.py diff --git a/workspace_tools/host_tests/echo_flow_control.py b/tools/host_tests/echo_flow_control.py similarity index 100% rename from workspace_tools/host_tests/echo_flow_control.py rename to tools/host_tests/echo_flow_control.py diff --git a/workspace_tools/host_tests/example/BroadcastReceive.py b/tools/host_tests/example/BroadcastReceive.py similarity index 100% rename from workspace_tools/host_tests/example/BroadcastReceive.py rename to tools/host_tests/example/BroadcastReceive.py diff --git a/workspace_tools/host_tests/example/BroadcastSend.py b/tools/host_tests/example/BroadcastSend.py similarity index 100% rename from workspace_tools/host_tests/example/BroadcastSend.py rename to tools/host_tests/example/BroadcastSend.py diff --git a/workspace_tools/host_tests/example/MulticastReceive.py b/tools/host_tests/example/MulticastReceive.py similarity index 100% rename from workspace_tools/host_tests/example/MulticastReceive.py rename to tools/host_tests/example/MulticastReceive.py diff --git a/workspace_tools/host_tests/example/MulticastSend.py b/tools/host_tests/example/MulticastSend.py similarity index 100% rename from workspace_tools/host_tests/example/MulticastSend.py rename to tools/host_tests/example/MulticastSend.py diff --git a/workspace_tools/host_tests/example/TCPEchoClient.py b/tools/host_tests/example/TCPEchoClient.py similarity index 100% rename from workspace_tools/host_tests/example/TCPEchoClient.py rename to tools/host_tests/example/TCPEchoClient.py diff --git a/workspace_tools/host_tests/example/TCPEchoServer.py b/tools/host_tests/example/TCPEchoServer.py similarity index 100% rename from workspace_tools/host_tests/example/TCPEchoServer.py rename to tools/host_tests/example/TCPEchoServer.py diff --git a/workspace_tools/host_tests/example/UDPEchoClient.py b/tools/host_tests/example/UDPEchoClient.py similarity index 100% rename from workspace_tools/host_tests/example/UDPEchoClient.py rename to tools/host_tests/example/UDPEchoClient.py diff --git a/workspace_tools/host_tests/example/UDPEchoServer.py b/tools/host_tests/example/UDPEchoServer.py similarity index 100% rename from workspace_tools/host_tests/example/UDPEchoServer.py rename to tools/host_tests/example/UDPEchoServer.py diff --git a/workspace_tools/host_tests/example/__init__.py b/tools/host_tests/example/__init__.py similarity index 100% rename from workspace_tools/host_tests/example/__init__.py rename to tools/host_tests/example/__init__.py diff --git a/workspace_tools/host_tests/hello_auto.py b/tools/host_tests/hello_auto.py similarity index 100% rename from workspace_tools/host_tests/hello_auto.py rename to tools/host_tests/hello_auto.py diff --git a/workspace_tools/host_tests/host_registry.py b/tools/host_tests/host_registry.py similarity index 100% rename from workspace_tools/host_tests/host_registry.py rename to tools/host_tests/host_registry.py diff --git a/workspace_tools/host_tests/host_test.py b/tools/host_tests/host_test.py similarity index 98% rename from workspace_tools/host_tests/host_test.py rename to tools/host_tests/host_test.py index 103df839247..fe611cb1456 100644 --- a/workspace_tools/host_tests/host_test.py +++ b/tools/host_tests/host_test.py @@ -35,8 +35,8 @@ # we can find packages we want from the same level as other files do import sys sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))) -from workspace_tools.test_api import get_autodetected_MUTS_list -from workspace_tools.test_api import get_module_avail +from tools.test_api import get_autodetected_MUTS_list +from tools.test_api import get_module_avail class Mbed: @@ -313,7 +313,7 @@ def __init__(self): self.RESULT_MBED_ASSERT = "mbed_assert" -import workspace_tools.host_tests as host_tests +import tools.host_tests as host_tests class Test(HostTestResults): diff --git a/workspace_tools/host_tests/host_tests_plugins/__init__.py b/tools/host_tests/host_tests_plugins/__init__.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/__init__.py rename to tools/host_tests/host_tests_plugins/__init__.py diff --git a/workspace_tools/host_tests/host_tests_plugins/host_test_plugins.py b/tools/host_tests/host_tests_plugins/host_test_plugins.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/host_test_plugins.py rename to tools/host_tests/host_tests_plugins/host_test_plugins.py diff --git a/workspace_tools/host_tests/host_tests_plugins/host_test_registry.py b/tools/host_tests/host_tests_plugins/host_test_registry.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/host_test_registry.py rename to tools/host_tests/host_tests_plugins/host_test_registry.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_firefox.py b/tools/host_tests/host_tests_plugins/module_copy_firefox.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_copy_firefox.py rename to tools/host_tests/host_tests_plugins/module_copy_firefox.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_mbed.py b/tools/host_tests/host_tests_plugins/module_copy_mbed.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_copy_mbed.py rename to tools/host_tests/host_tests_plugins/module_copy_mbed.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_mps2.py b/tools/host_tests/host_tests_plugins/module_copy_mps2.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_copy_mps2.py rename to tools/host_tests/host_tests_plugins/module_copy_mps2.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_shell.py b/tools/host_tests/host_tests_plugins/module_copy_shell.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_copy_shell.py rename to tools/host_tests/host_tests_plugins/module_copy_shell.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_silabs.py b/tools/host_tests/host_tests_plugins/module_copy_silabs.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_copy_silabs.py rename to tools/host_tests/host_tests_plugins/module_copy_silabs.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_copy_smart.py b/tools/host_tests/host_tests_plugins/module_copy_smart.py similarity index 98% rename from workspace_tools/host_tests/host_tests_plugins/module_copy_smart.py rename to tools/host_tests/host_tests_plugins/module_copy_smart.py index 9fb5970d461..1af9eaf70aa 100644 --- a/workspace_tools/host_tests/host_tests_plugins/module_copy_smart.py +++ b/tools/host_tests/host_tests_plugins/module_copy_smart.py @@ -22,7 +22,7 @@ from host_test_plugins import HostTestPluginBase sys.path.append(abspath(join(dirname(__file__), "../../../"))) -from workspace_tools.test_api import get_autodetected_MUTS_list +from tools.test_api import get_autodetected_MUTS_list class HostTestPluginCopyMethod_Smart(HostTestPluginBase): diff --git a/workspace_tools/host_tests/host_tests_plugins/module_reset_mbed.py b/tools/host_tests/host_tests_plugins/module_reset_mbed.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_reset_mbed.py rename to tools/host_tests/host_tests_plugins/module_reset_mbed.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_reset_mps2.py b/tools/host_tests/host_tests_plugins/module_reset_mps2.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_reset_mps2.py rename to tools/host_tests/host_tests_plugins/module_reset_mps2.py diff --git a/workspace_tools/host_tests/host_tests_plugins/module_reset_silabs.py b/tools/host_tests/host_tests_plugins/module_reset_silabs.py similarity index 100% rename from workspace_tools/host_tests/host_tests_plugins/module_reset_silabs.py rename to tools/host_tests/host_tests_plugins/module_reset_silabs.py diff --git a/workspace_tools/host_tests/mbedrpc.py b/tools/host_tests/mbedrpc.py similarity index 100% rename from workspace_tools/host_tests/mbedrpc.py rename to tools/host_tests/mbedrpc.py diff --git a/workspace_tools/host_tests/midi.py b/tools/host_tests/midi.py similarity index 100% rename from workspace_tools/host_tests/midi.py rename to tools/host_tests/midi.py diff --git a/workspace_tools/host_tests/net_test.py b/tools/host_tests/net_test.py similarity index 100% rename from workspace_tools/host_tests/net_test.py rename to tools/host_tests/net_test.py diff --git a/workspace_tools/host_tests/rpc.py b/tools/host_tests/rpc.py similarity index 100% rename from workspace_tools/host_tests/rpc.py rename to tools/host_tests/rpc.py diff --git a/workspace_tools/host_tests/rtc_auto.py b/tools/host_tests/rtc_auto.py similarity index 100% rename from workspace_tools/host_tests/rtc_auto.py rename to tools/host_tests/rtc_auto.py diff --git a/workspace_tools/host_tests/serial_nc_rx_auto.py b/tools/host_tests/serial_nc_rx_auto.py similarity index 100% rename from workspace_tools/host_tests/serial_nc_rx_auto.py rename to tools/host_tests/serial_nc_rx_auto.py diff --git a/workspace_tools/host_tests/serial_nc_tx_auto.py b/tools/host_tests/serial_nc_tx_auto.py similarity index 100% rename from workspace_tools/host_tests/serial_nc_tx_auto.py rename to tools/host_tests/serial_nc_tx_auto.py diff --git a/workspace_tools/host_tests/stdio_auto.py b/tools/host_tests/stdio_auto.py similarity index 100% rename from workspace_tools/host_tests/stdio_auto.py rename to tools/host_tests/stdio_auto.py diff --git a/workspace_tools/host_tests/tcpecho_client.py b/tools/host_tests/tcpecho_client.py similarity index 97% rename from workspace_tools/host_tests/tcpecho_client.py rename to tools/host_tests/tcpecho_client.py index 303f002ab88..6122a1c8be2 100644 --- a/workspace_tools/host_tests/tcpecho_client.py +++ b/tools/host_tests/tcpecho_client.py @@ -18,7 +18,7 @@ import string, random from time import time -from private_settings import SERVER_ADDRESS +from mbed_settings import SERVER_ADDRESS ECHO_PORT = 7 diff --git a/workspace_tools/host_tests/tcpecho_client_auto.py b/tools/host_tests/tcpecho_client_auto.py similarity index 100% rename from workspace_tools/host_tests/tcpecho_client_auto.py rename to tools/host_tests/tcpecho_client_auto.py diff --git a/workspace_tools/host_tests/tcpecho_server.py b/tools/host_tests/tcpecho_server.py similarity index 97% rename from workspace_tools/host_tests/tcpecho_server.py rename to tools/host_tests/tcpecho_server.py index 4a68bd9ee72..e98da638906 100644 --- a/workspace_tools/host_tests/tcpecho_server.py +++ b/tools/host_tests/tcpecho_server.py @@ -17,7 +17,7 @@ from SocketServer import BaseRequestHandler, TCPServer from time import time -from private_settings import LOCALHOST +from mbed_settings import LOCALHOST MAX_INDEX = 126 MEGA = float(1024 * 1024) diff --git a/workspace_tools/host_tests/tcpecho_server_auto.py b/tools/host_tests/tcpecho_server_auto.py similarity index 100% rename from workspace_tools/host_tests/tcpecho_server_auto.py rename to tools/host_tests/tcpecho_server_auto.py diff --git a/workspace_tools/host_tests/tcpecho_server_loop.py b/tools/host_tests/tcpecho_server_loop.py similarity index 95% rename from workspace_tools/host_tests/tcpecho_server_loop.py rename to tools/host_tests/tcpecho_server_loop.py index df483974aa8..cc11ed0d969 100644 --- a/workspace_tools/host_tests/tcpecho_server_loop.py +++ b/tools/host_tests/tcpecho_server_loop.py @@ -20,7 +20,7 @@ ROOT = abspath(join(dirname(__file__), "..", "..")) sys.path.insert(0, ROOT) -from workspace_tools.private_settings import LOCALHOST +from mbed_settings import LOCALHOST from SocketServer import BaseRequestHandler, TCPServer diff --git a/workspace_tools/host_tests/udp_link_layer_auto.py b/tools/host_tests/udp_link_layer_auto.py similarity index 100% rename from workspace_tools/host_tests/udp_link_layer_auto.py rename to tools/host_tests/udp_link_layer_auto.py diff --git a/workspace_tools/host_tests/udpecho_client.py b/tools/host_tests/udpecho_client.py similarity index 97% rename from workspace_tools/host_tests/udpecho_client.py rename to tools/host_tests/udpecho_client.py index 1ff833f1750..af326016365 100644 --- a/workspace_tools/host_tests/udpecho_client.py +++ b/tools/host_tests/udpecho_client.py @@ -18,7 +18,7 @@ import string, random from time import time -from private_settings import CLIENT_ADDRESS +from mbed_settings import CLIENT_ADDRESS ECHO_PORT = 7 diff --git a/workspace_tools/host_tests/udpecho_client_auto.py b/tools/host_tests/udpecho_client_auto.py similarity index 100% rename from workspace_tools/host_tests/udpecho_client_auto.py rename to tools/host_tests/udpecho_client_auto.py diff --git a/workspace_tools/host_tests/udpecho_server.py b/tools/host_tests/udpecho_server.py similarity index 95% rename from workspace_tools/host_tests/udpecho_server.py rename to tools/host_tests/udpecho_server.py index f6074332e4e..7f7e899f2b3 100644 --- a/workspace_tools/host_tests/udpecho_server.py +++ b/tools/host_tests/udpecho_server.py @@ -15,7 +15,7 @@ limitations under the License. """ from SocketServer import BaseRequestHandler, UDPServer -from private_settings import SERVER_ADDRESS +from mbed_settings import SERVER_ADDRESS class UDP_EchoHandler(BaseRequestHandler): def handle(self): diff --git a/workspace_tools/host_tests/udpecho_server_auto.py b/tools/host_tests/udpecho_server_auto.py similarity index 100% rename from workspace_tools/host_tests/udpecho_server_auto.py rename to tools/host_tests/udpecho_server_auto.py diff --git a/workspace_tools/host_tests/wait_us_auto.py b/tools/host_tests/wait_us_auto.py similarity index 100% rename from workspace_tools/host_tests/wait_us_auto.py rename to tools/host_tests/wait_us_auto.py diff --git a/workspace_tools/host_tests/wfi_auto.py b/tools/host_tests/wfi_auto.py similarity index 100% rename from workspace_tools/host_tests/wfi_auto.py rename to tools/host_tests/wfi_auto.py diff --git a/workspace_tools/libraries.py b/tools/libraries.py similarity index 90% rename from workspace_tools/libraries.py rename to tools/libraries.py index c4db6c8e794..4d964600ce5 100644 --- a/workspace_tools/libraries.py +++ b/tools/libraries.py @@ -14,9 +14,9 @@ See the License for the specific language governing permissions and limitations under the License. """ -from workspace_tools.paths import * -from workspace_tools.data.support import * -from workspace_tools.tests import TEST_MBED_LIB +from tools.paths import * +from tools.data.support import * +from tools.tests import TEST_MBED_LIB LIBRARIES = [ @@ -59,17 +59,11 @@ }, # DSP libraries - { - "id": "cmsis_dsp", - "source_dir": DSP_CMSIS, - "build_dir": DSP_LIBRARIES, - "dependencies": [MBED_LIBRARIES], - }, { "id": "dsp", - "source_dir": DSP_ABSTRACTION, + "source_dir": [DSP_ABSTRACTION, DSP_CMSIS], "build_dir": DSP_LIBRARIES, - "dependencies": [MBED_LIBRARIES, DSP_CMSIS], + "dependencies": [MBED_LIBRARIES] }, # File system libraries diff --git a/workspace_tools/make.py b/tools/make.py old mode 100755 new mode 100644 similarity index 87% rename from workspace_tools/make.py rename to tools/make.py index e4ade4cbbf4..1d081b5cc51 --- a/workspace_tools/make.py +++ b/tools/make.py @@ -21,32 +21,31 @@ import sys from time import sleep from shutil import copy -from os.path import join, abspath, dirname +from os.path import join, abspath, dirname, isfile, isdir # Be sure that the tools directory is in the search path ROOT = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT) -from workspace_tools.utils import args_error -from workspace_tools.paths import BUILD_DIR -from workspace_tools.paths import RTOS_LIBRARIES -from workspace_tools.paths import RPC_LIBRARY -from workspace_tools.paths import ETH_LIBRARY -from workspace_tools.paths import USB_HOST_LIBRARIES, USB_LIBRARIES -from workspace_tools.paths import DSP_LIBRARIES -from workspace_tools.paths import FS_LIBRARY -from workspace_tools.paths import UBLOX_LIBRARY -from workspace_tools.tests import TESTS, Test, TEST_MAP -from workspace_tools.tests import TEST_MBED_LIB -from workspace_tools.targets import TARGET_MAP -from workspace_tools.options import get_default_options_parser -from workspace_tools.build_api import build_project +from tools.utils import args_error +from tools.paths import BUILD_DIR +from tools.paths import RTOS_LIBRARIES +from tools.paths import RPC_LIBRARY +from tools.paths import ETH_LIBRARY +from tools.paths import USB_HOST_LIBRARIES, USB_LIBRARIES +from tools.paths import DSP_LIBRARIES +from tools.paths import FS_LIBRARY +from tools.paths import UBLOX_LIBRARY +from tools.tests import TESTS, Test, TEST_MAP +from tools.tests import TEST_MBED_LIB +from tools.targets import TARGET_MAP +from tools.options import get_default_options_parser +from tools.build_api import build_project try: - import workspace_tools.private_settings as ps + import tools.private_settings as ps except: ps = object() - if __name__ == '__main__': # Parse Options parser = get_default_options_parser() @@ -62,8 +61,8 @@ parser.add_option("-j", "--jobs", type="int", dest="jobs", - default=1, - help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs") + default=0, + help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)") parser.add_option("-v", "--verbose", action="store_true", @@ -94,11 +93,13 @@ parser.add_option("--dep", dest="dependencies", default=None, help="Dependencies") parser.add_option("--source", dest="source_dir", - default=None, help="The source (input) directory") + default=None, help="The source (input) directory", action="append") parser.add_option("--duration", type="int", dest="duration", default=None, help="Duration of the test") parser.add_option("--build", dest="build_dir", default=None, help="The build (output) directory") + parser.add_option("-N", "--artifact-name", dest="artifact_name", + default=None, help="The built project's name") parser.add_option("-d", "--disk", dest="disk", default=None, help="The mbed disk") parser.add_option("-s", "--serial", dest="serial", @@ -165,6 +166,12 @@ (options, args) = parser.parse_args() + if options.source_dir: + for path in options.source_dir : + if not isfile(path) and not isdir(path) : + args_error(parser, "[ERROR] you passed \"{}\" to --source, which does not exist". + format(path)) + # Print available tests in order and exit if options.list_tests is True: print '\n'.join(map(str, sorted(TEST_MAP.values()))) @@ -240,15 +247,15 @@ if options.build_dir is not None: build_dir = options.build_dir - target = TARGET_MAP[mcu] try: - bin_file = build_project(test.source_dir, build_dir, target, toolchain, test.dependencies, options.options, + bin_file = build_project(test.source_dir, build_dir, mcu, toolchain, test.dependencies, options.options, linker_script=options.linker_script, clean=options.clean, verbose=options.verbose, silent=options.silent, macros=options.macros, - jobs=options.jobs) + jobs=options.jobs, + name=options.artifact_name) print 'Image: %s'% bin_file if options.disk: @@ -259,7 +266,7 @@ # Import pyserial: https://pypi.python.org/pypi/pyserial from serial import Serial - sleep(target.program_cycle_s()) + sleep(TARGET_MAP[mcu].program_cycle_s()) serial = Serial(options.serial, timeout = 1) if options.baud: @@ -291,3 +298,5 @@ traceback.print_exc(file=sys.stdout) else: print "[ERROR] %s" % str(e) + + sys.exit(1) diff --git a/tools/memap.py b/tools/memap.py new file mode 100644 index 00000000000..b465cb44f3c --- /dev/null +++ b/tools/memap.py @@ -0,0 +1,529 @@ +#!/usr/bin/env python +# pylint: disable=too-many-arguments, too-many-locals, too-many-branches, too-many-lines, line-too-long, too-many-nested-blocks, too-many-public-methods, too-many-instance-attributes +# pylint: disable=invalid-name, missing-docstring + +# Memory Map File Analyser for ARM mbed OS + +import sys +import os +import re +import csv +import json +import argparse +from prettytable import PrettyTable + +debug = False + +class MemapParser(object): + + def __init__(self): + """ + General initialization + """ + + # list of all modules and their sections + self.modules = dict() + + self.misc_flash_sections = ('.interrupts', '.flash_config') + + self.other_sections = ('.interrupts_ram', '.init', '.ARM.extab', \ + '.ARM.exidx', '.ARM.attributes', '.eh_frame', \ + '.init_array', '.fini_array', '.jcr', '.stab', \ + '.stabstr', '.ARM.exidx', '.ARM') + + # sections to print info (generic for all toolchains) + self.sections = ('.text', '.data', '.bss', '.heap', '.stack') + + # sections must be defined in this order to take irrelevant out + self.all_sections = self.sections + self.other_sections + \ + self.misc_flash_sections + ('unknown', 'OUTPUT') + + self.print_sections = ('.text', '.data', '.bss') + + # list of all object files and mappting to module names + self.object_to_module = dict() + + def module_add(self, module_name, size, section): + """ + Adds a module / section to the list + """ + + if module_name in self.modules: + self.modules[module_name][section] += size + else: + temp_dic = dict() + for section_idx in self.all_sections: + temp_dic[section_idx] = 0 + temp_dic[section] = size + self.modules[module_name] = temp_dic + + def check_new_section_gcc(self, line): + """ + Check whether a new section in a map file has been detected (only applies to gcc) + """ + + for i in self.all_sections: + if line.startswith(i): + return i # should name of the section (assuming it's a known one) + + if line.startswith('.'): + return 'unknown' # all others are clasified are unknown + else: + return False # everything else, means no change in section + + def path_object_to_module_name(self, txt): + """ + Parses path to object file and extracts module / object data + """ + + txt = txt.replace('\\', '/') + rex_mbed_os_name = r'^.+mbed-os\/(.+)\/(.+\.o)$' + test_rex_mbed_os_name = re.match(rex_mbed_os_name, txt) + + if test_rex_mbed_os_name: + + object_name = test_rex_mbed_os_name.group(2) + data = test_rex_mbed_os_name.group(1).split('/') + ndata = len(data) + + if ndata == 1: + module_name = data[0] + else: + module_name = data[0] + '/' + data[1] + + return [module_name, object_name] + else: + return ['Misc', ""] + + + def parse_section_gcc(self, line): + """ + Parse data from a section of gcc map file + """ + # examples + # 0x00004308 0x7c ./.build/K64F/GCC_ARM/mbed-os/hal/targets/hal/TARGET_Freescale/TARGET_KPSDK_MCUS/spi_api.o + # .text 0x00000608 0x198 ./.build/K64F/GCC_ARM/mbed-os/core/mbed-rtos/rtx/TARGET_CORTEX_M/TARGET_RTOS_M4_M7/TOOLCHAIN_GCC/HAL_CM4.o + rex_address_len_name = r'^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$' + + test_address_len_name = re.match(rex_address_len_name, line) + + if test_address_len_name: + + if int(test_address_len_name.group(2), 16) == 0: # size == 0 + return ["", 0] # no valid entry + else: + m_name, m_object = self.path_object_to_module_name(test_address_len_name.group(3)) + m_size = int(test_address_len_name.group(2), 16) + return [m_name, m_size] + + else: # special corner case for *fill* sections + # example + # *fill* 0x0000abe4 0x4 + rex_address_len = r'^\s+\*fill\*\s+0x(\w{8,16})\s+0x(\w+).*$' + test_address_len = re.match(rex_address_len, line) + + if test_address_len: + if int(test_address_len.group(2), 16) == 0: # size == 0 + return ["", 0] # no valid entry + else: + m_name = 'Fill' + m_size = int(test_address_len.group(2), 16) + return [m_name, m_size] + else: + return ["", 0] # no valid entry + + def parse_map_file_gcc(self, file_desc): + """ + Main logic to decode gcc map files + """ + + current_section = 'unknown' + + with file_desc as infile: + + # Search area to parse + for line in infile: + if line.startswith('Linker script and memory map'): + current_section = "unknown" + break + + # Start decoding the map file + for line in infile: + + change_section = self.check_new_section_gcc(line) + + if change_section == "OUTPUT": # finish parsing file: exit + break + elif change_section != False: + current_section = change_section + + [module_name, module_size] = self.parse_section_gcc(line) + + if module_size == 0 or module_name == "": + pass + else: + self.module_add(module_name, module_size, current_section) + + if debug: + print "Line: %s" % line, + print "Module: %s\tSection: %s\tSize: %s" % (module_name, current_section, module_size) + raw_input("----------") + + def parse_section_armcc(self, line): + """ + Parse data from an armcc map file + """ + # Examples of armcc map file: + # Base_Addr Size Type Attr Idx E Section Name Object + # 0x00000000 0x00000400 Data RO 11222 RESET startup_MK64F12.o + # 0x00000410 0x00000008 Code RO 49364 * !!!main c_w.l(__main.o) + rex_armcc = r'^\s+0x(\w{8})\s+0x(\w{8})\s+(\w+)\s+(\w+)\s+(\d+)\s+[*]?.+\s+(.+)$' + + test_rex_armcc = re.match(rex_armcc, line) + + if test_rex_armcc: + + size = int(test_rex_armcc.group(2), 16) + + if test_rex_armcc.group(4) == 'RO': + section = '.text' + else: + if test_rex_armcc.group(3) == 'Data': + section = '.data' + elif test_rex_armcc.group(3) == 'Zero': + section = '.bss' + else: + print "BUG armcc map parser" + raw_input() + + # lookup object in dictionary and return module name + object_name = test_rex_armcc.group(6) + if object_name in self.object_to_module: + module_name = self.object_to_module[object_name] + else: + module_name = 'Misc' + + return [module_name, size, section] + + else: + return ["", 0, ""] # no valid entry + + def parse_section_iar(self, line): + """ + Parse data from an IAR map file + """ + # Examples of IAR map file: + # Section Kind Address Size Object + # .intvec ro code 0x00000000 0x198 startup_MK64F12.o [15] + # .rodata const 0x00000198 0x0 zero_init3.o [133] + # .iar.init_table const 0x00008384 0x2c - Linker created - + # Initializer bytes const 0x00000198 0xb2 + # .data inited 0x20000000 0xd4 driverAtmelRFInterface.o [70] + # .bss zero 0x20000598 0x318 RTX_Conf_CM.o [4] + # .iar.dynexit uninit 0x20001448 0x204 + # HEAP uninit 0x20001650 0x10000 + rex_iar = r'^\s+(.+)\s+(zero|const|ro code|inited|uninit)\s+0x(\w{8})\s+0x(\w+)\s+(.+)\s.+$' + + test_rex_iar = re.match(rex_iar, line) + + if test_rex_iar: + + size = int(test_rex_iar.group(4), 16) + + if test_rex_iar.group(2) == 'const' or test_rex_iar.group(2) == 'ro code': + section = '.text' + elif test_rex_iar.group(2) == 'zero' or test_rex_iar.group(2) == 'uninit': + if test_rex_iar.group(1)[0:4] == 'HEAP': + section = '.heap' + elif test_rex_iar.group(1)[0:6] == 'CSTACK': + section = '.stack' + else: + section = '.bss' # default section + + elif test_rex_iar.group(2) == 'inited': + section = '.data' + else: + print "BUG IAR map parser" + raw_input() + + # lookup object in dictionary and return module name + object_name = test_rex_iar.group(5) + if object_name in self.object_to_module: + module_name = self.object_to_module[object_name] + else: + module_name = 'Misc' + + return [module_name, size, section] + + else: + return ["", 0, ""] # no valid entry + + def parse_map_file_armcc(self, file_desc): + """ + Main logic to decode armcc map files + """ + + with file_desc as infile: + + # Search area to parse + for line in infile: + if line.startswith(' Base Addr Size'): + break + + # Start decoding the map file + for line in infile: + + [name, size, section] = self.parse_section_armcc(line) + + if size == 0 or name == "" or section == "": + pass + else: + self.module_add(name, size, section) + + def parse_map_file_iar(self, file_desc): + """ + Main logic to decode armcc map files + """ + + with file_desc as infile: + + # Search area to parse + for line in infile: + if line.startswith(' Section '): + break + + # Start decoding the map file + for line in infile: + + [name, size, section] = self.parse_section_iar(line) + + if size == 0 or name == "" or section == "": + pass + else: + self.module_add(name, size, section) + + def search_objects(self, path, toolchain): + """ + Check whether the specified map file matches with the toolchain. + Searches for object files and creates mapping: object --> module + """ + + path = path.replace('\\', '/') + + # check location of map file + rex = r'^(.+\/)' + re.escape(toolchain) + r'\/(.+\.map)$' + test_rex = re.match(rex, path) + + if test_rex: + search_path = test_rex.group(1) + toolchain + '/mbed-os/' + else: + # It looks this is not an mbed OS project + # object-to-module mapping cannot be generated + print "Warning: specified toolchain doesn't match with path to the memory map file." + return + + for root, dir, obj_files in os.walk(search_path): + for obj_file in obj_files: + if obj_file.endswith(".o"): + module_name, object_name = self.path_object_to_module_name(os.path.join(root, obj_file)) + + if object_name in self.object_to_module: + if debug: + print "WARNING: multiple usages of object file: %s" % object_name + print " Current: %s" % self.object_to_module[object_name] + print " New: %s" % module_name + print " " + else: + self.object_to_module.update({object_name:module_name}) + + def generate_output(self, export_format, file_output=None): + """ + Generates summary of memory map data + + Parameters + json_mode: generates output in json formal (True/False) + file_desc: descriptor (either stdout or file) + """ + + try: + if file_output: + file_desc = open(file_output, 'wb') + else: + file_desc = sys.stdout + except IOError as error: + print "I/O error({0}): {1}".format(error.errno, error.strerror) + return False + + # Calculate misc flash sections + misc_flash_mem = 0 + for i in self.modules: + for k in self.misc_flash_sections: + if self.modules[i][k]: + misc_flash_mem += self.modules[i][k] + + # Create table + columns = ['Module'] + for i in list(self.print_sections): + columns.append(i) + + table = PrettyTable(columns) + table.align["Module"] = "l" + + subtotal = dict() + for k in self.sections: + subtotal[k] = 0 + + json_obj = [] + for i in sorted(self.modules): + + row = [] + row.append(i) + + for k in self.sections: + subtotal[k] += self.modules[i][k] + + for k in self.print_sections: + row.append(self.modules[i][k]) + + json_obj.append({"module":i, "size":{\ + k:self.modules[i][k] for k in self.print_sections}}) + + table.add_row(row) + + subtotal_row = ['Subtotals'] + for k in self.print_sections: + subtotal_row.append(subtotal[k]) + + table.add_row(subtotal_row) + + if export_format == 'json': + json_obj.append({\ + 'summary':{\ + 'static_ram':(subtotal['.data']+subtotal['.bss']),\ + 'heap':(subtotal['.heap']),\ + 'stack':(subtotal['.stack']),\ + 'total_ram':(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']),\ + 'total_flash':(subtotal['.text']+subtotal['.data']+misc_flash_mem),}}) + + file_desc.write(json.dumps(json_obj, indent=4)) + file_desc.write('\n') + + elif export_format == 'csv-ci': # CSV format for the CI system + + csv_writer = csv.writer(file_desc, delimiter=',', quoting=csv.QUOTE_NONE) + + csv_module_section = [] + csv_sizes = [] + for i in sorted(self.modules): + for k in self.print_sections: + csv_module_section += [i+k] + csv_sizes += [self.modules[i][k]] + + csv_module_section += ['static_ram'] + csv_sizes += [subtotal['.data']+subtotal['.bss']] + + csv_module_section += ['heap'] + csv_sizes += [subtotal['.heap']] + + csv_module_section += ['stack'] + csv_sizes += [subtotal['.stack']] + + csv_module_section += ['total_ram'] + csv_sizes += [subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']] + + csv_module_section += ['total_flash'] + csv_sizes += [subtotal['.text']+subtotal['.data']+misc_flash_mem] + + csv_writer.writerow(csv_module_section) + csv_writer.writerow(csv_sizes) + + else: # default format is 'table' + file_desc.write(table.get_string()) + file_desc.write('\n') + file_desc.write("Static RAM memory (data + bss): %s\n" % (str(subtotal['.data']+subtotal['.bss']))) + file_desc.write("Heap: %s\n" % str(subtotal['.heap'])) + file_desc.write("Stack: %s\n" % str(subtotal['.stack'])) + file_desc.write("Total RAM memory (data + bss + heap + stack): %s\n" % (str(subtotal['.data']+subtotal['.bss']+subtotal['.heap']+subtotal['.stack']))) + file_desc.write("Total Flash memory (text + data + misc): %s\n" % (str(subtotal['.text']+subtotal['.data']+misc_flash_mem))) + + if file_desc is not sys.stdout: + file_desc.close() + + return True + + def parse(self, mapfile, toolchain): + """ + Parse and decode map file depending on the toolchain + """ + + try: + file_input = open(mapfile, 'rt') + except IOError as error: + print "I/O error({0}): {1}".format(error.errno, error.strerror) + return False + + if toolchain == "ARM" or toolchain == "ARM_STD" or toolchain == "ARM_MICRO": + self.search_objects(os.path.abspath(mapfile), "ARM") + self.parse_map_file_armcc(file_input) + elif toolchain == "GCC_ARM": + self.parse_map_file_gcc(file_input) + elif toolchain == "IAR": + self.search_objects(os.path.abspath(mapfile), toolchain) + self.parse_map_file_iar(file_input) + else: + return False + + file_input.close() + + return True + +def main(): + + version = '0.3.10' + + # Parser handling + parser = argparse.ArgumentParser(description="Memory Map File Analyser for ARM mbed OS\nversion %s" % version) + + parser.add_argument('file', help='memory map file') + + parser.add_argument('-t', '--toolchain', dest='toolchain', help='select a toolchain used to build the memory map file (ARM, GCC_ARM, IAR)',\ + required=True) + + parser.add_argument('-o', '--output', help='output file name', required=False) + + parser.add_argument('-e', '--export', dest='export', required=False,\ + help="export format (examples: 'json', 'csv-ci', 'table': default)") + + parser.add_argument('-v', '--version', action='version', version=version) + + # Parse/run command + if len(sys.argv) <= 1: + parser.print_help() + sys.exit(1) + + + args, remainder = parser.parse_known_args() + + # Create memap object + memap = MemapParser() + + # Parse and decode a map file + if args.file and args.toolchain: + if memap.parse(args.file, args.toolchain) is False: + print "Unknown toolchain for memory statistics %s" % args.toolchain + sys.exit(0) + + # default export format is table + if not args.export: + args.export = 'table' + + # Write output in file + if args.output != None: + memap.generate_output(args.export, args.output) + else: # Write output in screen + memap.generate_output(args.export) + + sys.exit(0) + +if __name__ == "__main__": + main() diff --git a/workspace_tools/options.py b/tools/options.py similarity index 65% rename from workspace_tools/options.py rename to tools/options.py index 3e4d2e859c1..250e8e19484 100644 --- a/workspace_tools/options.py +++ b/tools/options.py @@ -15,11 +15,11 @@ limitations under the License. """ from optparse import OptionParser -from workspace_tools.toolchains import TOOLCHAINS -from workspace_tools.targets import TARGET_NAMES +from tools.toolchains import TOOLCHAINS +from tools.targets import TARGET_NAMES -def get_default_options_parser(): +def get_default_options_parser(add_clean=True, add_options=True): parser = OptionParser() targetnames = TARGET_NAMES @@ -35,10 +35,12 @@ def get_default_options_parser(): help="build using the given TOOLCHAIN (%s)" % ', '.join(toolchainlist), metavar="TOOLCHAIN") - parser.add_option("-c", "--clean", action="store_true", default=False, - help="clean the build directory") + if add_clean: + parser.add_option("-c", "--clean", action="store_true", default=False, + help="clean the build directory") - parser.add_option("-o", "--options", action="append", - help='Add a build option ("save-asm": save the asm generated by the compiler, "debug-info": generate debugging information, "analyze": run Goanna static code analyzer")') + if add_options: + parser.add_option("-o", "--options", action="append", + help='Add a build option ("save-asm": save the asm generated by the compiler, "debug-info": generate debugging information, "analyze": run Goanna static code analyzer")') return parser diff --git a/workspace_tools/patch.py b/tools/patch.py similarity index 100% rename from workspace_tools/patch.py rename to tools/patch.py diff --git a/workspace_tools/paths.py b/tools/paths.py similarity index 97% rename from workspace_tools/paths.py rename to tools/paths.py index 849995fb97f..cdff2798d17 100644 --- a/workspace_tools/paths.py +++ b/tools/paths.py @@ -26,7 +26,7 @@ # Embedded Libraries Sources LIB_DIR = join(ROOT, "libraries") -TOOLS = join(ROOT, "workspace_tools") +TOOLS = join(ROOT, "tools") TOOLS_DATA = join(TOOLS, "data") TOOLS_BOOTLOADERS = join(TOOLS, "bootloaders") @@ -42,7 +42,7 @@ # Tests TEST_DIR = join(LIB_DIR, "tests") -HOST_TESTS = join(ROOT, "workspace_tools", "host_tests") +HOST_TESTS = join(ROOT, "tools", "host_tests") # mbed RPC MBED_RPC = join(LIB_DIR, "rpc") diff --git a/tools/project.py b/tools/project.py new file mode 100644 index 00000000000..b28d50dfaed --- /dev/null +++ b/tools/project.py @@ -0,0 +1,231 @@ +import sys +from os.path import join, abspath, dirname, exists, basename +ROOT = abspath(join(dirname(__file__), "..")) +sys.path.insert(0, ROOT) + +from shutil import move, rmtree +from optparse import OptionParser +from os import path + +from tools.paths import EXPORT_DIR, EXPORT_WORKSPACE, EXPORT_TMP +from tools.paths import MBED_BASE, MBED_LIBRARIES +from tools.export import export, setup_user_prj, EXPORTERS, mcu_ide_matrix +from tools.utils import args_error, mkdir +from tools.tests import TESTS, Test, TEST_MAP +from tools.targets import TARGET_NAMES +from tools.libraries import LIBRARIES + +try: + import tools.private_settings as ps +except: + ps = object() + + +if __name__ == '__main__': + # Parse Options + parser = OptionParser() + + targetnames = TARGET_NAMES + targetnames.sort() + toolchainlist = EXPORTERS.keys() + toolchainlist.sort() + + parser.add_option("-m", "--mcu", + metavar="MCU", + default='LPC1768', + help="generate project for the given MCU (%s)"% ', '.join(targetnames)) + + parser.add_option("-i", + dest="ide", + default='uvision', + help="The target IDE: %s"% str(toolchainlist)) + + parser.add_option("-c", "--clean", + action="store_true", + default=False, + help="clean the export directory") + + parser.add_option("-p", + type="int", + dest="program", + help="The index of the desired test program: [0-%d]"% (len(TESTS)-1)) + + parser.add_option("-n", + dest="program_name", + help="The name of the desired test program") + + parser.add_option("-b", + dest="build", + action="store_true", + default=False, + help="use the mbed library build, instead of the sources") + + parser.add_option("-L", "--list-tests", + action="store_true", + dest="list_tests", + default=False, + help="list available programs in order and exit") + + parser.add_option("-S", "--list-matrix", + action="store_true", + dest="supported_ides", + default=False, + help="displays supported matrix of MCUs and IDEs") + + parser.add_option("-E", + action="store_true", + dest="supported_ides_html", + default=False, + help="writes tools/export/README.md") + + parser.add_option("--source", + action="append", + dest="source_dir", + default=None, + help="The source (input) directory") + + parser.add_option("-D", "", + action="append", + dest="macros", + help="Add a macro definition") + + (options, args) = parser.parse_args() + + # Print available tests in order and exit + if options.list_tests is True: + print '\n'.join(map(str, sorted(TEST_MAP.values()))) + sys.exit() + + # Only prints matrix of supported IDEs + if options.supported_ides: + print mcu_ide_matrix() + exit(0) + + # Only prints matrix of supported IDEs + if options.supported_ides_html: + html = mcu_ide_matrix(verbose_html=True) + try: + with open("./export/README.md","w") as f: + f.write("Exporter IDE/Platform Support\n") + f.write("-----------------------------------\n") + f.write("\n") + f.write(html) + except IOError as e: + print "I/O error({0}): {1}".format(e.errno, e.strerror) + except: + print "Unexpected error:", sys.exc_info()[0] + raise + exit(0) + + # Clean Export Directory + if options.clean: + if exists(EXPORT_DIR): + rmtree(EXPORT_DIR) + + # Target + if options.mcu is None : + args_error(parser, "[ERROR] You should specify an MCU") + mcus = options.mcu + + # IDE + if options.ide is None: + args_error(parser, "[ERROR] You should specify an IDE") + ide = options.ide + + # Export results + successes = [] + failures = [] + zip = True + clean = True + + # source_dir = use relative paths, otherwise sources are copied + sources_relative = True if options.source_dir else False + + for mcu in mcus.split(','): + # Program Number or name + p, n, src, ide = options.program, options.program_name, options.source_dir, options.ide + + if src is not None: + # --source is used to generate IDE files to toolchain directly in the source tree and doesn't generate zip file + project_dir = options.source_dir + project_name = n if n else "Unnamed_Project" + project_temp = path.join(options.source_dir[0], 'projectfiles', ide) + mkdir(project_temp) + lib_symbols = [] + if options.macros: + lib_symbols += options.macros + zip = False # don't create zip + clean = False # don't cleanup because we use the actual source tree to generate IDE files + else: + if n is not None and p is not None: + args_error(parser, "[ERROR] specify either '-n' or '-p', not both") + if n: + if not n in TEST_MAP.keys(): + # Check if there is an alias for this in private_settings.py + if getattr(ps, "test_alias", None) is not None: + alias = ps.test_alias.get(n, "") + if not alias in TEST_MAP.keys(): + args_error(parser, "[ERROR] Program with name '%s' not found" % n) + else: + n = alias + else: + args_error(parser, "[ERROR] Program with name '%s' not found" % n) + p = TEST_MAP[n].n + + if p is None or (p < 0) or (p > (len(TESTS)-1)): + message = "[ERROR] You have to specify one of the following tests:\n" + message += '\n'.join(map(str, sorted(TEST_MAP.values()))) + args_error(parser, message) + + # Project + if p is None or (p < 0) or (p > (len(TESTS)-1)): + message = "[ERROR] You have to specify one of the following tests:\n" + message += '\n'.join(map(str, sorted(TEST_MAP.values()))) + args_error(parser, message) + test = Test(p) + + # Some libraries have extra macros (called by exporter symbols) to we need to pass + # them to maintain compilation macros integrity between compiled library and + # header files we might use with it + lib_symbols = [] + if options.macros: + lib_symbols += options.macros + for lib in LIBRARIES: + if lib['build_dir'] in test.dependencies: + lib_macros = lib.get('macros', None) + if lib_macros is not None: + lib_symbols.extend(lib_macros) + + if not options.build: + # Substitute the library builds with the sources + # TODO: Substitute also the other library build paths + if MBED_LIBRARIES in test.dependencies: + test.dependencies.remove(MBED_LIBRARIES) + test.dependencies.append(MBED_BASE) + + # Build the project with the same directory structure of the mbed online IDE + project_name = test.id + project_dir = join(EXPORT_WORKSPACE, project_name) + project_temp = EXPORT_TMP + setup_user_prj(project_dir, test.source_dir, test.dependencies) + + # Export to selected toolchain + tmp_path, report = export(project_dir, project_name, ide, mcu, project_dir, project_temp, clean=clean, zip=zip, extra_symbols=lib_symbols, relative=sources_relative) + if report['success']: + zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (project_name, ide, mcu)) + if zip: + move(tmp_path, zip_path) + successes.append("%s::%s\t%s"% (mcu, ide, zip_path)) + else: + failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg'])) + + # Prints export results + print + if len(successes) > 0: + print "Successful exports:" + for success in successes: + print " * %s"% success + if len(failures) > 0: + print "Failed exports:" + for failure in failures: + print " * %s"% failure diff --git a/workspace_tools/remove-device-h.py b/tools/remove-device-h.py similarity index 100% rename from workspace_tools/remove-device-h.py rename to tools/remove-device-h.py diff --git a/workspace_tools/settings.py b/tools/settings.py similarity index 92% rename from workspace_tools/settings.py rename to tools/settings.py index 0a62ed9c81f..f50c69e8e27 100644 --- a/workspace_tools/settings.py +++ b/tools/settings.py @@ -18,7 +18,7 @@ ROOT = abspath(join(dirname(__file__), "..")) # These default settings have two purposes: -# 1) Give a template for writing local "private_settings.py" +# 1) Give a template for writing local "mbed_settings.py" # 2) Give default initialization fields for the "toolchains.py" constructors ############################################################################## @@ -59,7 +59,7 @@ # IAR IAR_PATH = "C:/Program Files (x86)/IAR Systems/Embedded Workbench 7.3/arm" -# Goanna static analyser. Please overload it in private_settings.py +# Goanna static analyser. Please overload it in mbed_settings.py GOANNA_PATH = "c:/Program Files (x86)/RedLizards/Goanna Central 3.2.3/bin" # cppcheck path (command) and output message format @@ -99,6 +99,6 @@ try: # Allow to overwrite the default settings without the need to edit the # settings file stored in the repository - from workspace_tools.private_settings import * + from mbed_settings import * except ImportError: - print '[WARNING] Using default settings. Define your settings in the file "workspace_tools/private_settings.py" or in "./mbed_settings.py"' + print '[WARNING] Using default settings. Define your settings in the file "./mbed_settings.py"' diff --git a/workspace_tools/singletest.py b/tools/singletest.py similarity index 91% rename from workspace_tools/singletest.py rename to tools/singletest.py index 6b5054baca3..058b96d4d17 100644 --- a/workspace_tools/singletest.py +++ b/tools/singletest.py @@ -56,32 +56,32 @@ # Check: Extra modules which are required by core test suite -from workspace_tools.utils import check_required_modules +from tools.utils import check_required_modules check_required_modules(['prettytable', 'serial']) # Imports related to mbed build api -from workspace_tools.build_api import mcu_toolchain_matrix +from tools.build_api import mcu_toolchain_matrix # Imports from TEST API -from workspace_tools.test_api import SingleTestRunner -from workspace_tools.test_api import singletest_in_cli_mode -from workspace_tools.test_api import detect_database_verbose -from workspace_tools.test_api import get_json_data_from_file -from workspace_tools.test_api import get_avail_tests_summary_table -from workspace_tools.test_api import get_default_test_options_parser -from workspace_tools.test_api import print_muts_configuration_from_json -from workspace_tools.test_api import print_test_configuration_from_json -from workspace_tools.test_api import get_autodetected_MUTS_list -from workspace_tools.test_api import get_autodetected_TEST_SPEC -from workspace_tools.test_api import get_module_avail -from workspace_tools.test_exporters import ReportExporter, ResultExporterType +from tools.test_api import SingleTestRunner +from tools.test_api import singletest_in_cli_mode +from tools.test_api import detect_database_verbose +from tools.test_api import get_json_data_from_file +from tools.test_api import get_avail_tests_summary_table +from tools.test_api import get_default_test_options_parser +from tools.test_api import print_muts_configuration_from_json +from tools.test_api import print_test_configuration_from_json +from tools.test_api import get_autodetected_MUTS_list +from tools.test_api import get_autodetected_TEST_SPEC +from tools.test_api import get_module_avail +from tools.test_exporters import ReportExporter, ResultExporterType # Importing extra modules which can be not installed but if available they can extend test suite functionality try: import mbed_lstools - from workspace_tools.compliance.ioper_runner import IOperTestRunner - from workspace_tools.compliance.ioper_runner import get_available_oper_test_scopes + from tools.compliance.ioper_runner import IOperTestRunner + from tools.compliance.ioper_runner import get_available_oper_test_scopes except: pass diff --git a/workspace_tools/size.py b/tools/size.py similarity index 94% rename from workspace_tools/size.py rename to tools/size.py index 48ed5366be7..0e19ae395ea 100644 --- a/workspace_tools/size.py +++ b/tools/size.py @@ -23,10 +23,10 @@ ROOT = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT) -from workspace_tools.paths import BUILD_DIR, TOOLS_DATA -from workspace_tools.settings import GCC_ARM_PATH -from workspace_tools.tests import TEST_MAP -from workspace_tools.build_api import build_mbed_libs, build_project +from tools.paths import BUILD_DIR, TOOLS_DATA +from tools.settings import GCC_ARM_PATH +from tools.tests import TEST_MAP +from tools.build_api import build_mbed_libs, build_project SIZE = join(GCC_ARM_PATH, 'arm-none-eabi-size') diff --git a/workspace_tools/synch.py b/tools/synch.py similarity index 98% rename from workspace_tools/synch.py rename to tools/synch.py index 9d95034e64b..4efeb2c3ea1 100644 --- a/workspace_tools/synch.py +++ b/tools/synch.py @@ -31,9 +31,9 @@ ROOT = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT) -from workspace_tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR -from workspace_tools.paths import * -from workspace_tools.utils import run_cmd +from tools.settings import MBED_ORG_PATH, MBED_ORG_USER, BUILD_DIR +from tools.paths import * +from tools.utils import run_cmd MBED_URL = "mbed.org" MBED_USER = "mbed_official" diff --git a/workspace_tools/targets.py b/tools/targets.py old mode 100755 new mode 100644 similarity index 83% rename from workspace_tools/targets.py rename to tools/targets.py index aa02af962d6..e29b5c34bf0 --- a/workspace_tools/targets.py +++ b/tools/targets.py @@ -32,12 +32,12 @@ import binascii import struct import shutil -from workspace_tools.patch import patch +from tools.patch import patch from paths import TOOLS_BOOTLOADERS import json import inspect import sys - +from tools.utils import json_file_to_dict ######################################################################################################################## # Generic Target class that reads and interprets the data in targets.json @@ -58,29 +58,19 @@ def wrapper(*args, **kwargs): class Target: # Cumulative attributes can have values appended to them, so they # need to be computed differently than regular attributes - __cumulative_attributes = ['extra_labels', 'macros', 'device_has'] + __cumulative_attributes = ['extra_labels', 'macros', 'device_has', 'features'] - # Utility function: traverse a dictionary and change all the strings in the dictionary to - # ASCII from Unicode. Needed because the original mbed target definitions were written in - # Python and used only ASCII strings, but the Python JSON decoder always returns Unicode - # Based on http://stackoverflow.com/a/13105359 - @staticmethod - def to_ascii(input): - if isinstance(input, dict): - return dict([(Target.to_ascii(key), Target.to_ascii(value)) for key, value in input.iteritems()]) - elif isinstance(input, list): - return [Target.to_ascii(element) for element in input] - elif isinstance(input, unicode): - return input.encode('ascii') - else: - return input + # {target_name: target_instance} map for all the targets in the system + __target_map = {} + + # List of targets that were added dynamically using "add_py_targets" (see below) + __py_targets = set() # Load the description of JSON target data @staticmethod @cached def get_json_target_data(): - with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../hal/targets.json"), "rt") as f: - return Target.to_ascii(json.load(f)) + return json_file_to_dict(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'hal', 'targets.json')) # Get the members of this module using Python's "inspect" module @staticmethod @@ -168,25 +158,62 @@ def __getattr_helper(self, attrname): break else: # Attribute not found raise AttributeError("Attribute '%s' not found in target '%s'" % (attrname, self.name)) - # 'progen' needs the full path to the template (the path in JSON is relative to workspace_tools/export) + # 'progen' needs the full path to the template (the path in JSON is relative to tools/export) return v if attrname != "progen" else self.__add_paths_to_progen(v) # Return the value of an attribute - # This function only looks for the attribute's value in the cache, the real work of computing the - # attribute's value is done in the function above (__getattr_helper) + # This function only computes the attribute's value once, then adds it to the instance attributes + # (in __dict__), so the next time it is returned directly def __getattr__(self, attrname): - if not self.attr_cache.has_key(attrname): - self.attr_cache[attrname] = self.__getattr_helper(attrname) - return self.attr_cache[attrname] + v = self.__getattr_helper(attrname) + self.__dict__[attrname] = v + return v + + # Add one or more new target(s) represented as a Python dictionary in 'new_targets' + # It it an error to add a target with a name that exists in "targets.json" + # However, it is OK to add a target that was previously added via "add_py_targets" + # (this makes testing easier without changing the regular semantics) + @staticmethod + def add_py_targets(new_targets): + crt_data = Target.get_json_target_data() + # First add all elemnts to the internal dictionary + for tk, tv in new_targets.items(): + if crt_data.has_key(tk) and (not tk in Target.__py_targets): + raise Exception("Attempt to add target '%s' that already exists" % tk) + crt_data[tk] = tv + Target.__py_targets.add(tk) + # Then create the new instances and update global variables if needed + for tk, tv in new_targets.items(): + # Is the target already created? + old_target = Target.__target_map.get(tk, None) + # Instantiate this target. If it is public, update the data in + # in TARGETS, TARGET_MAP, TARGET_NAMES + new_target = Target(tk) + if tv.get("public", True): + if old_target: # remove the old target from TARGETS and TARGET_NAMES + TARGETS.remove(old_target) + TARGET_NAMES.remove(tk) + # Add the new target + TARGETS.append(new_target) + TARGET_MAP[tk] = new_target + TARGET_NAMES.append(tk) + # Update the target cache + Target.__target_map[tk] = new_target + + # Return the target instance starting from the target name + @staticmethod + def get_target(name): + if not Target.__target_map.has_key(name): + Target.__target_map[name] = Target(name) + return Target.__target_map[name] def __init__(self, name): self.name = name # Compute resolution order once (it will be used later in __getattr__) self.resolution_order = self.__get_resolution_order(self.name, []) - - # Attribute cache: once an attribute's value is computed, don't compute it again - self.attr_cache = {} + # Create also a list with only the names of the targets in the resolution order + self.resolution_order_names = [t[0] for t in self.resolution_order] def program_cycle_s(self): try: @@ -195,7 +222,12 @@ def program_cycle_s(self): return 4 if self.is_disk_virtual else 1.5 def get_labels(self): - return [self.name] + CORE_LABELS[self.core] + self.extra_labels + labels = [self.name] + CORE_LABELS[self.core] + self.extra_labels + # Automatically define UVISOR_UNSUPPORTED if the target doesn't specifically + # define UVISOR_SUPPORTED + if not "UVISOR_SUPPORTED" in labels: + labels.append("UVISOR_UNSUPPORTED") + return labels # For now, this function only allows "post binary" hooks (hooks that are executed after # the binary image is extracted from the executable file) @@ -364,7 +396,7 @@ def binary_hook(t_self, resources, elf, binf): ######################################################################################################################## # Instantiate all public targets -TARGETS = [Target(name) for name, value in Target.get_json_target_data().items() if value.get("public", True)] +TARGETS = [Target.get_target(name) for name, value in Target.get_json_target_data().items() if value.get("public", True)] # Map each target name to its unique instance TARGET_MAP = dict([(t.name, t) for t in TARGETS]) diff --git a/tools/test.py b/tools/test.py new file mode 100644 index 00000000000..1c7fbcd5d2c --- /dev/null +++ b/tools/test.py @@ -0,0 +1,202 @@ +#! /usr/bin/env python2 +""" +mbed SDK +Copyright (c) 2011-2013 ARM Limited + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +TEST BUILD & RUN +""" +import sys +import os +import json + +ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +sys.path.insert(0, ROOT) + +from tools.test_api import test_path_to_name, find_tests, print_tests, build_tests, test_spec_from_test_builds +from tools.options import get_default_options_parser +from tools.build_api import build_project, build_library +from tools.targets import TARGET_MAP +from tools.utils import mkdir +from tools.test_exporters import ReportExporter, ResultExporterType + +if __name__ == '__main__': + try: + # Parse Options + parser = get_default_options_parser() + + parser.add_option("-D", "", + action="append", + dest="macros", + help="Add a macro definition") + + parser.add_option("-j", "--jobs", + type="int", + dest="jobs", + default=0, + help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)") + + parser.add_option("--source", dest="source_dir", + default=None, help="The source (input) directory (for sources other than tests). Defaults to current directory.", action="append") + + parser.add_option("--build", dest="build_dir", + default=None, help="The build (output) directory") + + parser.add_option("-l", "--list", action="store_true", dest="list", + default=False, help="List (recursively) available tests in order and exit") + + parser.add_option("-p", "--paths", dest="paths", + default=None, help="Limit the tests to those within the specified comma separated list of paths") + + format_choices = ["list", "json"] + format_default_choice = "list" + format_help = "Change the format in which tests are listed. Choices include: %s. Default: %s" % (", ".join(format_choices), format_default_choice) + parser.add_option("-f", "--format", type="choice", dest="format", + choices=format_choices, default=format_default_choice, help=format_help) + + parser.add_option("--continue-on-build-fail", action="store_true", dest="continue_on_build_fail", + default=None, help="Continue trying to build all tests if a build failure occurs") + + parser.add_option("-n", "--names", dest="names", + default=None, help="Limit the tests to a comma separated list of names") + + parser.add_option("--test-spec", dest="test_spec", + default=None, help="Destination path for a test spec file that can be used by the Greentea automated test tool") + + parser.add_option("--build-report-junit", dest="build_report_junit", + default=None, help="Destination path for a build report in the JUnit xml format") + + parser.add_option("-v", "--verbose", + action="store_true", + dest="verbose", + default=False, + help="Verbose diagnostic output") + + (options, args) = parser.parse_args() + + # Filter tests by path if specified + if options.paths: + all_paths = options.paths.split(",") + else: + all_paths = ["."] + + all_tests = {} + tests = {} + + # Find all tests in the relevant paths + for path in all_paths: + all_tests.update(find_tests(path)) + + # Filter tests by name if specified + if options.names: + all_names = options.names.split(",") + + all_tests_keys = all_tests.keys() + for name in all_names: + if name in all_tests_keys: + tests[name] = all_tests[name] + else: + print "[Warning] Test with name '%s' was not found in the available tests" % (name) + else: + tests = all_tests + + if options.list: + # Print available tests in order and exit + print_tests(tests, options.format) + sys.exit(0) + else: + # Build all tests + if not options.build_dir: + print "[ERROR] You must specify a build path" + sys.exit(1) + + base_source_paths = options.source_dir + + # Default base source path is the current directory + if not base_source_paths: + base_source_paths = ['.'] + + + target = TARGET_MAP[options.mcu] + + build_report = {} + build_properties = {} + + library_build_success = True + try: + # Build sources + build_library(base_source_paths, options.build_dir, target, options.tool, + options=options.options, + jobs=options.jobs, + clean=options.clean, + report=build_report, + properties=build_properties, + name="mbed-build", + macros=options.macros, + verbose=options.verbose, + archive=False) + except Exception, e: + library_build_success = False + print "Failed to build library" + + if library_build_success: + # Build all the tests + test_build_success, test_build = build_tests(tests, [options.build_dir], options.build_dir, target, options.tool, + options=options.options, + clean=options.clean, + report=build_report, + properties=build_properties, + macros=options.macros, + verbose=options.verbose, + jobs=options.jobs, + continue_on_build_fail=options.continue_on_build_fail) + + # If a path to a test spec is provided, write it to a file + if options.test_spec: + test_spec_data = test_spec_from_test_builds(test_build) + + # Create the target dir for the test spec if necessary + # mkdir will not create the dir if it already exists + test_spec_dir = os.path.dirname(options.test_spec) + if test_spec_dir: + mkdir(test_spec_dir) + + try: + with open(options.test_spec, 'w') as f: + f.write(json.dumps(test_spec_data, indent=2)) + except IOError, e: + print "[ERROR] Error writing test spec to file" + print e + + # If a path to a JUnit build report spec is provided, write it to a file + if options.build_report_junit: + report_exporter = ReportExporter(ResultExporterType.JUNIT, package="build") + report_exporter.report_to_file(build_report, options.build_report_junit, test_suite_properties=build_properties) + + print_report_exporter = ReportExporter(ResultExporterType.PRINT, package="build") + status = print_report_exporter.report(build_report) + + if status: + sys.exit(0) + else: + sys.exit(1) + + except KeyboardInterrupt, e: + print "\n[CTRL+c] exit" + except Exception,e: + import traceback + traceback.print_exc(file=sys.stdout) + print "[ERROR] %s" % str(e) + sys.exit(1) diff --git a/workspace_tools/test_api.py b/tools/test_api.py similarity index 92% rename from workspace_tools/test_api.py rename to tools/test_api.py index d80c0c3f07a..9cdedf9dc96 100644 --- a/workspace_tools/test_api.py +++ b/tools/test_api.py @@ -39,31 +39,32 @@ from subprocess import Popen, PIPE # Imports related to mbed build api -from workspace_tools.tests import TESTS -from workspace_tools.tests import TEST_MAP -from workspace_tools.paths import BUILD_DIR -from workspace_tools.paths import HOST_TESTS -from workspace_tools.utils import ToolException -from workspace_tools.utils import NotSupportedException -from workspace_tools.utils import construct_enum -from workspace_tools.targets import TARGET_MAP -from workspace_tools.test_db import BaseDBAccess -from workspace_tools.build_api import build_project, build_mbed_libs, build_lib -from workspace_tools.build_api import get_target_supported_toolchains -from workspace_tools.build_api import write_build_report -from workspace_tools.build_api import prep_report -from workspace_tools.build_api import prep_properties -from workspace_tools.build_api import create_result -from workspace_tools.build_api import add_result_to_report -from workspace_tools.libraries import LIBRARIES, LIBRARY_MAP -from workspace_tools.toolchains import TOOLCHAIN_BIN_PATH -from workspace_tools.test_exporters import ReportExporter, ResultExporterType - -import workspace_tools.host_tests.host_tests_plugins as host_tests_plugins +from tools.tests import TESTS +from tools.tests import TEST_MAP +from tools.paths import BUILD_DIR +from tools.paths import HOST_TESTS +from tools.utils import ToolException +from tools.utils import NotSupportedException +from tools.utils import construct_enum +from tools.targets import TARGET_MAP +from tools.test_db import BaseDBAccess +from tools.build_api import build_project, build_mbed_libs, build_lib +from tools.build_api import get_target_supported_toolchains +from tools.build_api import write_build_report +from tools.build_api import prep_report +from tools.build_api import prep_properties +from tools.build_api import create_result +from tools.build_api import add_result_to_report +from tools.build_api import scan_for_source_paths +from tools.libraries import LIBRARIES, LIBRARY_MAP +from tools.toolchains import TOOLCHAIN_BIN_PATH +from tools.test_exporters import ReportExporter, ResultExporterType + +import tools.host_tests.host_tests_plugins as host_tests_plugins try: import mbed_lstools - from workspace_tools.compliance.ioper_runner import get_available_oper_test_scopes + from tools.compliance.ioper_runner import get_available_oper_test_scopes except: pass @@ -1592,7 +1593,7 @@ def factory_db_logger(db_url): """ Factory database driver depending on database type supplied in database connection string db_url """ if db_url is not None: - from workspace_tools.test_mysql import MySQLDBAccess + from tools.test_mysql import MySQLDBAccess connection_info = BaseDBAccess().parse_db_connection_string(db_url) if connection_info is not None: (db_type, username, password, host, db_name) = BaseDBAccess().parse_db_connection_string(db_url) @@ -1949,3 +1950,167 @@ def get_default_test_options_parser(): action="store_true", help='Prints script version and exits') return parser + +def test_path_to_name(path): + """Change all slashes in a path into hyphens + This creates a unique cross-platform test name based on the path + This can eventually be overriden by a to-be-determined meta-data mechanism""" + name_parts = [] + head, tail = os.path.split(path) + while (tail and tail != "."): + name_parts.insert(0, tail) + head, tail = os.path.split(head) + + return "-".join(name_parts) + +def find_tests(base_dir): + """Given any directory, walk through the subdirectories and find all tests""" + + def is_subdir(path, directory): + path = os.path.realpath(path) + directory = os.path.realpath(directory) + relative = os.path.relpath(path, directory) + return not (relative.startswith(os.pardir + os.sep) and relative.startswith(os.pardir)) + + def find_tests_in_tests_directory(directory): + """Given a 'TESTS' directory, return a dictionary of test names and test paths. + The formate of the dictionary is {"test-name": "./path/to/test"}""" + tests = {} + + for d in os.listdir(directory): + # dir name host_tests is reserved for host python scripts. + if d != "host_tests": + # Loop on test case directories + for td in os.listdir(os.path.join(directory, d)): + # Add test case to the results if it is a directory and not "host_tests" + if td != "host_tests": + test_case_path = os.path.join(directory, d, td) + if os.path.isdir(test_case_path): + tests[test_path_to_name(test_case_path)] = test_case_path + + return tests + + tests_path = 'TESTS' + + # Determine if "base_dir" is already a "TESTS" directory + _, top_folder = os.path.split(base_dir) + + if top_folder == tests_path: + # Already pointing at a "TESTS" directory + return find_tests_in_tests_directory(base_dir) + else: + # Not pointing at a "TESTS" directory, so go find one! + tests = {} + + dirs = scan_for_source_paths(base_dir) + + test_and_sub_dirs = [x for x in dirs if tests_path in x] + test_dirs = [] + for potential_test_dir in test_and_sub_dirs: + good_to_add = True + if test_dirs: + for test_dir in test_dirs: + if is_subdir(potential_test_dir, test_dir): + good_to_add = False + break + + if good_to_add: + test_dirs.append(potential_test_dir) + + # Only look at valid paths + for path in test_dirs: + # Get the tests inside of the "TESTS" directory + new_tests = find_tests_in_tests_directory(path) + if new_tests: + tests.update(new_tests) + + return tests + +def print_tests(tests, format="list"): + """Given a dictionary of tests (as returned from "find_tests"), print them + in the specified format""" + if format == "list": + for test_name, test_path in tests.iteritems(): + print "Test Case:" + print " Name: %s" % test_name + print " Path: %s" % test_path + elif format == "json": + print json.dumps(tests, indent=2) + else: + print "Unknown format '%s'" % format + sys.exit(1) + +def build_tests(tests, base_source_paths, build_path, target, toolchain_name, + options=None, clean=False, notify=None, verbose=False, jobs=1, + macros=None, silent=False, report=None, properties=None, + continue_on_build_fail=False): + """Given the data structure from 'find_tests' and the typical build parameters, + build all the tests + + Returns a tuple of the build result (True or False) followed by the test + build data structure""" + + test_build = { + "platform": target.name, + "toolchain": toolchain_name, + "base_path": build_path, + "baud_rate": 9600, + "binary_type": "bootable", + "tests": {} + } + + result = True + + for test_name, test_path in tests.iteritems(): + test_build_path = os.path.join(build_path, test_path) + src_path = base_source_paths + [test_path] + + try: + bin_file = build_project(src_path, test_build_path, target, toolchain_name, + options=options, + jobs=jobs, + clean=clean, + macros=macros, + name=test_name, + report=report, + properties=properties, + verbose=verbose) + + except Exception, e: + result = False + + if continue_on_build_fail: + continue + else: + break + + # If a clean build was carried out last time, disable it for the next build. + # Otherwise the previously built test will be deleted. + if clean: + clean = False + + # Normalize the path + bin_file = os.path.normpath(bin_file) + + test_build['tests'][test_name] = { + "binaries": [ + { + "path": bin_file + } + ] + } + + print 'Image: %s'% bin_file + + test_builds = {} + test_builds["%s-%s" % (target.name, toolchain_name)] = test_build + + + return result, test_builds + + +def test_spec_from_test_builds(test_builds): + return { + "builds": test_builds + } + \ No newline at end of file diff --git a/workspace_tools/test_db.py b/tools/test_db.py similarity index 100% rename from workspace_tools/test_db.py rename to tools/test_db.py diff --git a/workspace_tools/test_exporters.py b/tools/test_exporters.py similarity index 95% rename from workspace_tools/test_exporters.py rename to tools/test_exporters.py index 623acd6f4de..857a6bfcd3a 100644 --- a/workspace_tools/test_exporters.py +++ b/tools/test_exporters.py @@ -17,8 +17,8 @@ Author: Przemyslaw Wirkus """ -from workspace_tools.utils import construct_enum - +from tools.utils import construct_enum, mkdir +import os ResultExporterType = construct_enum(HTML='Html_Exporter', JUNIT='JUnit_Exporter', @@ -72,7 +72,8 @@ def __init__(self, result_exporter_type, package="test"): self.result_exporter_type = result_exporter_type self.package = package - def report(self, test_summary_ext, test_suite_properties=None): + def report(self, test_summary_ext, test_suite_properties=None, + print_log_for_failures=True): """ Invokes report depending on exporter_type set in constructor """ if self.result_exporter_type == ResultExporterType.HTML: @@ -86,7 +87,7 @@ def report(self, test_summary_ext, test_suite_properties=None): return self.exporter_junit_ioper(test_summary_ext, test_suite_properties) elif self.result_exporter_type == ResultExporterType.PRINT: # JUNIT exporter for interoperability test - return self.exporter_print(test_summary_ext) + return self.exporter_print(test_summary_ext, print_log_for_failures=print_log_for_failures) return None def report_to_file(self, test_summary_ext, file_name, test_suite_properties=None): @@ -97,6 +98,9 @@ def report_to_file(self, test_summary_ext, file_name, test_suite_properties=None def write_to_file(self, report, file_name): if report is not None: + dirname = os.path.dirname(file_name) + if dirname: + mkdir(dirname) with open(file_name, 'w') as f: f.write(report) @@ -293,11 +297,15 @@ def exporter_junit(self, test_result_ext, test_suite_properties=None): test_suites.append(ts) return TestSuite.to_xml_string(test_suites) - def exporter_print_helper(self, array): + def exporter_print_helper(self, array, print_log=False): for item in array: print " * %s::%s::%s" % (item["target_name"], item["toolchain_name"], item["id"]) + if print_log: + log_lines = item["output"].split("\n") + for log_line in log_lines: + print " %s" % log_line - def exporter_print(self, test_result_ext): + def exporter_print(self, test_result_ext, print_log_for_failures=False): """ Export test results in print format. """ failures = [] @@ -336,7 +344,7 @@ def exporter_print(self, test_result_ext): if failures: print "\n\nBuild failures:" - self.exporter_print_helper(failures) + self.exporter_print_helper(failures, print_log=print_log_for_failures) return False else: return True diff --git a/workspace_tools/test_mysql.py b/tools/test_mysql.py similarity index 99% rename from workspace_tools/test_mysql.py rename to tools/test_mysql.py index 1561dab32fd..4f00ab6dd9b 100644 --- a/workspace_tools/test_mysql.py +++ b/tools/test_mysql.py @@ -21,7 +21,7 @@ import MySQLdb as mdb # Imports from TEST API -from workspace_tools.test_db import BaseDBAccess +from tools.test_db import BaseDBAccess class MySQLDBAccess(BaseDBAccess): diff --git a/workspace_tools/test_webapi.py b/tools/test_webapi.py similarity index 98% rename from workspace_tools/test_webapi.py rename to tools/test_webapi.py index 59273e80d2e..ffed0e46243 100644 --- a/workspace_tools/test_webapi.py +++ b/tools/test_webapi.py @@ -28,8 +28,8 @@ sys.path.insert(0, ROOT) # Imports related to mbed build api -from workspace_tools.utils import construct_enum -from workspace_tools.build_api import mcu_toolchain_matrix +from tools.utils import construct_enum +from tools.build_api import mcu_toolchain_matrix # Imports from TEST API from test_api import SingleTestRunner diff --git a/workspace_tools/tests.py b/tools/tests.py similarity index 99% rename from workspace_tools/tests.py rename to tools/tests.py index 924c0280633..fab1bbf8fd4 100644 --- a/workspace_tools/tests.py +++ b/tools/tests.py @@ -14,8 +14,8 @@ See the License for the specific language governing permissions and limitations under the License. """ -from workspace_tools.paths import * -from workspace_tools.data.support import * +from tools.paths import * +from tools.data.support import * TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib") TEST_MBED_LIB = join(TEST_DIR, "mbed", "env") @@ -1142,10 +1142,10 @@ GROUPS["rtos"] = [test["id"] for test in TESTS if test["id"].startswith("RTOS_")] GROUPS["net"] = [test["id"] for test in TESTS if test["id"].startswith("NET_")] GROUPS["automated"] = [test["id"] for test in TESTS if test.get("automated", False)] -# Look for 'TEST_GROUPS' in private_settings.py and update the GROUPS dictionary +# Look for 'TEST_GROUPS' in mbed_settings.py and update the GROUPS dictionary # with the information in test_groups if found try: - from workspace_tools.private_settings import TEST_GROUPS + from mbed_settings import TEST_GROUPS except: TEST_GROUPS = {} GROUPS.update(TEST_GROUPS) diff --git a/workspace_tools/toolchains/__init__.py b/tools/toolchains/__init__.py similarity index 80% rename from workspace_tools/toolchains/__init__.py rename to tools/toolchains/__init__.py index 16b9f4dfe2c..8f55af6800e 100644 --- a/workspace_tools/toolchains/__init__.py +++ b/tools/toolchains/__init__.py @@ -17,18 +17,21 @@ import re import sys -from os import stat, walk +from os import stat, walk, getcwd, sep from copy import copy from time import time, sleep from types import ListType from shutil import copyfile -from os.path import join, splitext, exists, relpath, dirname, basename, split +from os.path import join, splitext, exists, relpath, dirname, basename, split, abspath from inspect import getmro from multiprocessing import Pool, cpu_count -from workspace_tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path -from workspace_tools.settings import BUILD_OPTIONS, MBED_ORG_USER -import workspace_tools.hooks as hooks +from tools.utils import run_cmd, mkdir, rel_path, ToolException, NotSupportedException, split_path +from tools.settings import BUILD_OPTIONS, MBED_ORG_USER +import tools.hooks as hooks +from tools.memap import MemapParser +from hashlib import md5 +import fnmatch #Disables multiprocessing if set to higher number than the host machine CPUs @@ -51,6 +54,7 @@ def compile_worker(job): 'results': results } + class Resources: def __init__(self, base_path=None): self.base_path = base_path @@ -78,6 +82,19 @@ def __init__(self, base_path=None): # Other files self.hex_files = [] self.bin_files = [] + self.json_files = [] + + def __add__(self, resources): + if resources is None: + return self + else: + return self.add(resources) + + def __radd__(self, resources): + if resources is None: + return self + else: + return self.add(resources) def add(self, resources): self.inc_dirs += resources.inc_dirs @@ -102,11 +119,15 @@ def add(self, resources): self.hex_files += resources.hex_files self.bin_files += resources.bin_files + self.json_files += resources.json_files + + return self def relative_to(self, base, dot=False): for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 'cpp_sources', 'lib_dirs', 'objects', 'libraries', - 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']: + 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', + 'hex_files', 'bin_files', 'json_files']: v = [rel_path(f, base, dot) for f in getattr(self, field)] setattr(self, field, v) if self.linker_script is not None: @@ -115,7 +136,8 @@ def relative_to(self, base, dot=False): def win_to_unix(self): for field in ['inc_dirs', 'headers', 's_sources', 'c_sources', 'cpp_sources', 'lib_dirs', 'objects', 'libraries', - 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', 'hex_files', 'bin_files']: + 'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files', + 'hex_files', 'bin_files', 'json_files']: v = [f.replace('\\', '/') for f in getattr(self, field)] setattr(self, field, v) if self.linker_script is not None: @@ -147,7 +169,6 @@ def __str__(self): return '\n'.join(s) - # Support legacy build conventions: the original mbed build system did not have # standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but # had the knowledge of a list of these directories to be ignored. @@ -164,16 +185,17 @@ def __str__(self): class mbedToolchain: VERBOSE = True + ignorepatterns = [] CORTEX_SYMBOLS = { - "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0"], - "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS"], + "Cortex-M0" : ["__CORTEX_M0", "ARM_MATH_CM0", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], + "Cortex-M0+": ["__CORTEX_M0PLUS", "ARM_MATH_CM0PLUS", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-M1" : ["__CORTEX_M3", "ARM_MATH_CM1"], - "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3"], - "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4"], - "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1"], - "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7"], - "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1"], + "Cortex-M3" : ["__CORTEX_M3", "ARM_MATH_CM3", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], + "Cortex-M4" : ["__CORTEX_M4", "ARM_MATH_CM4", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], + "Cortex-M4F" : ["__CORTEX_M4", "ARM_MATH_CM4", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], + "Cortex-M7" : ["__CORTEX_M7", "ARM_MATH_CM7", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], + "Cortex-M7F" : ["__CORTEX_M7", "ARM_MATH_CM7", "__FPU_PRESENT=1", "__CMSIS_RTOS", "__MBED_CMSIS_RTOS_CM"], "Cortex-A9" : ["__CORTEX_A9", "ARM_MATH_CA9", "__FPU_PRESENT", "__CMSIS_RTOS", "__EVAL", "__MBED_CMSIS_RTOS_CA9"], } @@ -210,6 +232,7 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, self.has_config = False self.build_all = False + self.build_dir = None self.timestamp = time() self.jobs = 1 @@ -217,6 +240,9 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, self.mp_pool = None + if 'UVISOR_PRESENT=1' in self.macros: + self.target.core = re.sub(r"F$", '', self.target.core) + def get_output(self): return self.output @@ -299,7 +325,10 @@ def get_symbols(self): # Add target's symbols self.symbols += self.target.macros + # Add target's hardware self.symbols += ["DEVICE_" + data + "=1" for data in self.target.device_has] + # Add target's features + self.symbols += ["FEATURE_" + data + "=1" for data in self.target.features] # Add extra symbols passed via 'macros' parameter self.symbols += self.macros @@ -309,12 +338,16 @@ def get_symbols(self): return list(set(self.symbols)) # Return only unique symbols + # Extend the internal list of macros + def add_macros(self, new_macros): + self.macros.extend(new_macros) + def get_labels(self): if self.labels is None: toolchain_labels = [c.__name__ for c in getmro(self.__class__)] toolchain_labels.remove('mbedToolchain') self.labels = { - 'TARGET': self.target.get_labels(), + 'TARGET': self.target.get_labels() + ["DEBUG" if "debug-info" in self.options else "RELEASE"], 'FEATURE': self.target.features, 'TOOLCHAIN': toolchain_labels } @@ -341,7 +374,13 @@ def need_update(self, target, dependencies): return False - def scan_resources(self, path): + def is_ignored(self, file_path): + for pattern in self.ignorepatterns: + if fnmatch.fnmatch(file_path, pattern): + return True + return False + + def scan_resources(self, path, exclude_paths=None): labels = self.get_labels() resources = Resources(path) self.has_config = False @@ -356,25 +395,55 @@ def scan_resources(self, path): bottom-up mode the directories in dirnames are generated before dirpath itself is generated. """ - for root, dirs, files in walk(path): + for root, dirs, files in walk(path, followlinks=True): # Remove ignored directories + # Check if folder contains .mbedignore + if ".mbedignore" in files : + with open (join(root,".mbedignore"), "r") as f: + lines=f.readlines() + lines = [l.strip() for l in lines] # Strip whitespaces + lines = [l for l in lines if l != ""] # Strip empty lines + lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines + # Append root path to glob patterns + # and append patterns to ignorepatterns + self.ignorepatterns.extend([join(root,line.strip()) for line in lines]) + for d in copy(dirs): + dir_path = join(root, d) if d == '.hg': - dir_path = join(root, d) resources.repo_dirs.append(dir_path) resources.repo_files.extend(self.scan_repository(dir_path)) if ((d.startswith('.') or d in self.legacy_ignore_dirs) or (d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or (d.startswith('FEATURE_') and d[8:] not in labels['FEATURE']) or - (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN'])): + (d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or + (d == 'TESTS')): + dirs.remove(d) + + + # Remove dirs that already match the ignorepatterns + # to avoid travelling into them and to prevent them + # on appearing in include path. + if self.is_ignored(join(dir_path,"")): dirs.remove(d) + if exclude_paths: + for exclude_path in exclude_paths: + rel_path = relpath(dir_path, exclude_path) + if not (rel_path.startswith('..')): + dirs.remove(d) + break + # Add root to include paths resources.inc_dirs.append(root) for file in files: file_path = join(root, file) + + if self.is_ignored(file_path): + continue + _, ext = splitext(file) ext = ext.lower() @@ -419,6 +488,9 @@ def scan_resources(self, path): elif ext == '.bin': resources.bin_files.append(file_path) + elif ext == '.json': + resources.json_files.append(file_path) + return resources def scan_repository(self, path): @@ -460,23 +532,40 @@ def copy_files(self, files_paths, trg_path, rel_path=None): def relative_object_path(self, build_path, base_dir, source): source_dir, name, _ = split_path(source) + obj_dir = join(build_path, relpath(source_dir, base_dir)) mkdir(obj_dir) return join(obj_dir, name + '.o') + def get_inc_file(self, includes): + include_file = join(self.build_dir, ".includes_%s.txt" % self.inc_md5) + if not exists(include_file): + with open(include_file, "wb") as f: + cmd_list = [] + for c in includes: + if c: + cmd_list.append(('-I%s' % c).replace("\\", "/")) + string = " ".join(cmd_list) + f.write(string) + return include_file + def compile_sources(self, resources, build_path, inc_dirs=None): # Web IDE progress bar for project build files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources self.to_be_compiled = len(files_to_compile) self.compiled = 0 - #for i in self.build_params: - # self.debug(i) - # self.debug("%s" % self.build_params[i]) - inc_paths = resources.inc_dirs if inc_dirs is not None: inc_paths.extend(inc_dirs) + # De-duplicate include paths + inc_paths = set(inc_paths) + # Sort include paths for consistency + inc_paths = sorted(set(inc_paths)) + # Unique id of all include paths + self.inc_md5 = md5(' '.join(inc_paths)).hexdigest() + # Where to store response files + self.build_dir = build_path objects = [] queue = [] @@ -484,17 +573,14 @@ def compile_sources(self, resources, build_path, inc_dirs=None): # The dependency checking for C/C++ is delegated to the compiler base_path = resources.base_path + # Sort compile queue for consistency files_to_compile.sort() + work_dir = getcwd() + for source in files_to_compile: _, name, _ = split_path(source) object = self.relative_object_path(build_path, base_path, source) - # Avoid multiple mkdir() calls on same work directory - work_dir = dirname(object) - if work_dir is not prev_dir: - prev_dir = work_dir - mkdir(work_dir) - # Queue mode (multiprocessing) commands = self.compile_command(source, object, inc_paths) if commands is not None: @@ -542,7 +628,7 @@ def compile_queue(self, queue, objects): itr = 0 while True: itr += 1 - if itr > 30000: + if itr > 180000: p.terminate() p.join() raise ToolException("Compile did not finish in 5 minutes") @@ -633,28 +719,6 @@ def compile_output(self, output=[]): else: raise ToolException(_stderr) - def compile(self, cc, source, object, includes): - _, ext = splitext(source) - ext = ext.lower() - - command = cc + ['-D%s' % s for s in self.get_symbols()] + ["-I%s" % i for i in includes] + ["-o", object, source] - - if hasattr(self, "get_dep_opt"): - base, _ = splitext(object) - dep_path = base + '.d' - command.extend(self.get_dep_opt(dep_path)) - - if hasattr(self, "cc_extra"): - command.extend(self.cc_extra(base)) - - return [command] - - def compile_c(self, source, object, includes): - return self.compile(self.cc, source, object, includes) - - def compile_cpp(self, source, object, includes): - return self.compile(self.cppc, source, object, includes) - def build_library(self, objects, dir, name): needed_update = False lib = self.STD_LIB_NAME % name @@ -677,10 +741,16 @@ def link_program(self, r, tmp_path, name): if self.target.OUTPUT_NAMING == "8.3": name = name[0:8] ext = ext[0:3] - + + # Create destination directory + head, tail = split(name) + new_path = join(tmp_path, head) + mkdir(new_path) + filename = name+'.'+ext elf = join(tmp_path, name + '.elf') bin = join(tmp_path, filename) + map = join(tmp_path, name + '.map') if self.need_update(elf, r.objects + r.libraries + [r.linker_script]): needed_update = True @@ -693,18 +763,20 @@ def link_program(self, r, tmp_path, name): self.binary(r, elf, bin) + self.mem_stats(map) + self.var("compile_succeded", True) self.var("binary", filename) return bin, needed_update def default_cmd(self, command): + self.debug("Command: %s"% ' '.join(command)) _stdout, _stderr, _rc = run_cmd(command) # Print all warning / erros from stderr to console output for error_line in _stderr.splitlines(): print error_line - self.debug("Command: %s"% ' '.join(command)) self.debug("Return: %s"% _rc) for output_line in _stdout.splitlines(): @@ -749,9 +821,33 @@ def tool_error(self, message): def var(self, key, value): self.notify({'type': 'var', 'key': key, 'val': value}) -from workspace_tools.settings import ARM_BIN -from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH -from workspace_tools.settings import IAR_PATH + def mem_stats(self, map): + # Creates parser object + toolchain = self.__class__.__name__ + + # Create memap object + memap = MemapParser() + + # Parse and decode a map file + if memap.parse(abspath(map), toolchain) is False: + self.info("Unknown toolchain for memory statistics %s" % toolchain) + return + + # Write output to stdout in text (pretty table) format + memap.generate_output('table') + + # Write output to file in JSON format + map_out = splitext(map)[0] + "_map.json" + memap.generate_output('json', map_out) + + # Write output to file in CSV format for the CI + map_csv = splitext(map)[0] + "_map.csv" + memap.generate_output('csv-ci', map_csv) + + +from tools.settings import ARM_BIN +from tools.settings import GCC_ARM_PATH, GCC_CR_PATH +from tools.settings import IAR_PATH TOOLCHAIN_BIN_PATH = { 'ARM': ARM_BIN, @@ -761,9 +857,9 @@ def var(self, key, value): 'IAR': IAR_PATH } -from workspace_tools.toolchains.arm import ARM_STD, ARM_MICRO -from workspace_tools.toolchains.gcc import GCC_ARM, GCC_CR -from workspace_tools.toolchains.iar import IAR +from tools.toolchains.arm import ARM_STD, ARM_MICRO +from tools.toolchains.gcc import GCC_ARM, GCC_CR +from tools.toolchains.iar import IAR TOOLCHAIN_CLASSES = { 'ARM': ARM_STD, diff --git a/workspace_tools/toolchains/arm.py b/tools/toolchains/arm.py similarity index 54% rename from workspace_tools/toolchains/arm.py rename to tools/toolchains/arm.py index 447cabbfee7..5be4487c4b9 100644 --- a/workspace_tools/toolchains/arm.py +++ b/tools/toolchains/arm.py @@ -15,13 +15,13 @@ limitations under the License. """ import re -from os.path import join -import copy +from os.path import join, dirname, splitext, basename, exists -from workspace_tools.toolchains import mbedToolchain -from workspace_tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB -from workspace_tools.hooks import hook_tool -from workspace_tools.settings import GOANNA_PATH +from tools.toolchains import mbedToolchain +from tools.settings import ARM_BIN, ARM_INC, ARM_LIB, MY_ARM_CLIB, ARM_CPPLIB, GOANNA_PATH +from tools.hooks import hook_tool +from tools.utils import mkdir +import copy class ARM(mbedToolchain): LINKER_EXT = '.sct' @@ -31,13 +31,14 @@ class ARM(mbedToolchain): DIAGNOSTIC_PATTERN = re.compile('"(?P[^"]+)", line (?P\d+)( \(column (?P\d+)\)|): (?PWarning|Error): (?P.+)') DEP_PATTERN = re.compile('\S+:\s(?P.+)\n') + DEFAULT_FLAGS = { - 'common': ["--apcs=interwork", - "--brief_diagnostics"], - 'asm': ['-I"%s"' % ARM_INC], - 'c': ["-c", "--gnu", "-Otime", "--restrict", "--multibyte_chars", "--split_sections", "--md", "--no_depend_system_headers", '-I"%s"' % ARM_INC, - "--c99", "-D__ASSERT_MSG" ], - 'cxx': ["--cpp", "--no_rtti", "-D__ASSERT_MSG"], + 'common': ["-c", "--gnu", + "-Otime", "--split_sections", "--apcs=interwork", + "--brief_diagnostics", "--restrict", "--multibyte_chars", "-I", "\""+ARM_INC+"\""], + 'asm': [], + 'c': ["--md", "--no_depend_system_headers", "--c99", "-D__ASSERT_MSG"], + 'cxx': ["--cpp", "--no_rtti"], 'ld': [], } @@ -66,7 +67,7 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, else: self.flags['c'].append("-O3") - self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] + self.flags['c'] + self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] if not "analyze" in self.options: self.cc = [main_cc] + self.flags['common'] + self.flags['c'] self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx'] @@ -80,19 +81,6 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, self.ar = join(ARM_BIN, "armar") self.elf2bin = join(ARM_BIN, "fromelf") - def remove_option(self, option): - for tool in [self.asm, self.cc, self.cppc]: - if option in tool: - tool.remove(option) - - def assemble(self, source, object, includes): - # Preprocess first, then assemble - tempfile = object + '.E.s' - return [ - self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-E", "-o", tempfile, source], - self.hook.get_cmdline_assembler(self.asm + ["-o", object, tempfile]) - ] - def parse_dependencies(self, dep_path): dependencies = [] for line in open(dep_path).readlines(): @@ -122,39 +110,119 @@ def parse_output(self, output): match.group('message') ) - def get_dep_opt(self, dep_path): + def get_dep_option(self, object): + base, _ = splitext(object) + dep_path = base + '.d' return ["--depend", dep_path] - def archive(self, objects, lib_path): - self.default_cmd([self.ar, '-r', lib_path] + objects) + def get_compile_options(self, defines, includes): + return ['-D%s' % d for d in defines] + ['--via', self.get_inc_file(includes)] + + @hook_tool + def assemble(self, source, object, includes): + # Preprocess first, then assemble + dir = join(dirname(object), '.temp') + mkdir(dir) + tempfile = join(dir, basename(object) + '.E.s') + + # Build preprocess assemble command + cmd_pre = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-E", "-o", tempfile, source] + + # Build main assemble command + cmd = self.asm + ["-o", object, tempfile] + + # Call cmdline hook + cmd_pre = self.hook.get_cmdline_assembler(cmd_pre) + cmd = self.hook.get_cmdline_assembler(cmd) + + # Return command array, don't execute + return [cmd_pre, cmd] + + @hook_tool + def compile(self, cc, source, object, includes): + # Build compile command + cmd = cc + self.get_compile_options(self.get_symbols(), includes) + + cmd.extend(self.get_dep_option(object)) + + cmd.extend(["-o", object, source]) + + # Call cmdline hook + cmd = self.hook.get_cmdline_compiler(cmd) + return [cmd] + + def compile_c(self, source, object, includes): + return self.compile(self.cc, source, object, includes) + + def compile_cpp(self, source, object, includes): + return self.compile(self.cppc, source, object, includes) + + @hook_tool def link(self, output, objects, libraries, lib_dirs, mem_map): + map_file = splitext(output)[0] + ".map" if len(lib_dirs): - args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--list=.link_totals.txt"] + args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--map", "--list=%s" % map_file] else: - args = ["-o", output, "--info=totals", "--list=.link_totals.txt"] + args = ["-o", output, "--info=totals", "--map", "--list=%s" % map_file] if mem_map: args.extend(["--scatter", mem_map]) - if hasattr(self.target, "link_cmdline_hook"): - args = self.target.link_cmdline_hook(self.__class__.__name__, args) + # Build linker command + cmd = self.ld + args + objects + libraries + self.sys_libs + + # Call cmdline hook + cmd = self.hook.get_cmdline_linker(cmd) + + # Split link command to linker executable + response file + link_files = join(dirname(output), ".link_files.txt") + with open(link_files, "wb") as f: + cmd_linker = cmd[0] + cmd_list = [] + for c in cmd[1:]: + if c: + cmd_list.append(('"%s"' % c) if not c.startswith('-') else c) + string = " ".join(cmd_list).replace("\\", "/") + f.write(string) + + # Exec command + self.default_cmd([cmd_linker, '--via', link_files]) - self.default_cmd(self.ld + args + objects + libraries + self.sys_libs) + @hook_tool + def archive(self, objects, lib_path): + archive_files = join(dirname(lib_path), ".archive_files.txt") + with open(archive_files, "wb") as f: + o_list = [] + for o in objects: + o_list.append('"%s"' % o) + string = " ".join(o_list).replace("\\", "/") + f.write(string) + + # Exec command + self.default_cmd([self.ar, '-r', lib_path, '--via', archive_files]) @hook_tool def binary(self, resources, elf, bin): - args = [self.elf2bin, '--bin', '-o', bin, elf] + # Build binary command + cmd = [self.elf2bin, '--bin', '-o', bin, elf] + + # Call cmdline hook + cmd = self.hook.get_cmdline_binary(cmd) - if hasattr(self.target, "binary_cmdline_hook"): - args = self.target.binary_cmdline_hook(self.__class__.__name__, args) + # Exec command + self.default_cmd(cmd) - self.default_cmd(args) class ARM_STD(ARM): def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose) - self.ld.append("--libpath=%s" % ARM_LIB) + + # Extend flags + self.flags['ld'].extend(["--libpath", ARM_LIB]) + + # Run-time values + self.ld.extend(["--libpath", ARM_LIB]) class ARM_MICRO(ARM): @@ -163,20 +231,24 @@ class ARM_MICRO(ARM): def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False): ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose) - # add microlib to the command line flags - self.asm += ["-D__MICROLIB"] - self.cc += ["--library_type=microlib", "-D__MICROLIB"] - self.cppc += ["--library_type=microlib", "-D__MICROLIB"] + # Extend flags + self.flags['common'].extend(["-D__MICROLIB"]) + self.flags['c'].extend(["--library_type=microlib"]) + self.flags['ld'].extend(["--library_type=microlib"]) - # the exporter uses --library_type flag to set microlib - self.flags['c'] += ["--library_type=microlib"] - self.flags['cxx'] += ["--library_type=microlib"] - self.flags['ld'].append("--library_type=microlib") + # Run-time values + self.asm += ["-D__MICROLIB"] + self.cc += ["-D__MICROLIB", "--library_type=microlib"] + self.cppc += ["-D__MICROLIB", "--library_type=microlib"] + self.ld += ["--library_type=microlib"] # We had to patch microlib to add C++ support # In later releases this patch should have entered mainline if ARM_MICRO.PATCHED_LIBRARY: - self.flags['ld'].append("--noscanlib") + # Run-time values + self.flags['ld'].extend(["--noscanlib"]) + # Run-time values + self.ld += ["--noscanlib"] # System Libraries self.sys_libs.extend([join(MY_ARM_CLIB, lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]]) @@ -187,4 +259,7 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, elif target.core in ["Cortex-M0", "Cortex-M0+"]: self.sys_libs.extend([join(ARM_CPPLIB, lib+".l") for lib in ["cpp_ps", "cpprt_p"]]) else: - self.ld.append("--libpath=%s" % ARM_LIB) + # Run-time values + self.flags['ld'].extend(["--libpath", ARM_LIB]) + # Run-time values + self.ld.extend(["--libpath", ARM_LIB]) diff --git a/workspace_tools/toolchains/gcc.py b/tools/toolchains/gcc.py similarity index 66% rename from workspace_tools/toolchains/gcc.py rename to tools/toolchains/gcc.py index a6cb0633355..58ed5dd4b3b 100644 --- a/workspace_tools/toolchains/gcc.py +++ b/tools/toolchains/gcc.py @@ -15,19 +15,19 @@ limitations under the License. """ import re -from os.path import join, basename, splitext +from os.path import join, basename, splitext, dirname, exists -from workspace_tools.toolchains import mbedToolchain -from workspace_tools.settings import GCC_ARM_PATH, GCC_CR_PATH -from workspace_tools.settings import GOANNA_PATH -from workspace_tools.hooks import hook_tool +from tools.toolchains import mbedToolchain +from tools.settings import GCC_ARM_PATH, GCC_CR_PATH +from tools.settings import GOANNA_PATH +from tools.hooks import hook_tool class GCC(mbedToolchain): LINKER_EXT = '.ld' LIBRARY_EXT = '.a' STD_LIB_NAME = "lib%s.a" - DIAGNOSTIC_PATTERN = re.compile('((?P\d+):)(\d+:)? (?Pwarning|error): (?P.+)') + DIAGNOSTIC_PATTERN = re.compile('((?P[^:]+):(?P\d+):)(\d+:)? (?Pwarning|error): (?P.+)') def __init__(self, target, options=None, notify=None, macros=None, silent=False, tool_path="", extra_verbose=False): mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose) @@ -67,7 +67,7 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, "-Wno-unused-parameter", "-Wno-missing-field-initializers", "-fmessage-length=0", "-fno-exceptions", "-fno-builtin", "-ffunction-sections", "-fdata-sections", - "-MMD", "-fno-delete-null-pointer-checks", "-fomit-frame-pointer" + "-fno-delete-null-pointer-checks", "-fomit-frame-pointer" ] + self.cpu if "save-asm" in self.options: @@ -95,12 +95,11 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, self.ar = join(tool_path, "arm-none-eabi-ar") self.elf2bin = join(tool_path, "arm-none-eabi-objcopy") - def assemble(self, source, object, includes): - return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])] - def parse_dependencies(self, dep_path): dependencies = [] - for line in open(dep_path).readlines()[1:]: + buff = open(dep_path).readlines() + buff[0] = re.sub('^(.*?)\: ', '', buff[0]) + for line in buff: file = line.replace('\\\n', '').strip() if file: # GCC might list more than one dependency on a single line, in this case @@ -137,45 +136,115 @@ def parse_output(self, output): ) continue - # Each line should start with the file information: "filepath: ..." - # i should point past the file path ^ - # avoid the first column in Windows (C:\) - i = line.find(':', 2) - if i == -1: continue - - if state == WHERE: - file = line[:i] - message = line[i+1:].strip() + ' ' - state = WHAT - - elif state == WHAT: - match = GCC.DIAGNOSTIC_PATTERN.match(line[i+1:]) - if match is None: - state = WHERE - continue + match = GCC.DIAGNOSTIC_PATTERN.match(line) + if match is not None: self.cc_info( - match.group('severity'), - file, match.group('line'), - message + match.group('message') + match.group('severity').lower(), + match.group('file'), + match.group('line'), + match.group('message'), + target_name=self.target.name, + toolchain_name=self.name ) - def archive(self, objects, lib_path): - self.default_cmd([self.ar, "rcs", lib_path] + objects) + def get_dep_option(self, object): + base, _ = splitext(object) + dep_path = base + '.d' + return ["-MD", "-MF", dep_path] + + def get_compile_options(self, defines, includes): + return ['-D%s' % d for d in defines] + ['@%s' % self.get_inc_file(includes)] + + @hook_tool + def assemble(self, source, object, includes): + # Build assemble command + cmd = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-o", object, source] + # Call cmdline hook + cmd = self.hook.get_cmdline_assembler(cmd) + + # Return command array, don't execute + return [cmd] + + @hook_tool + def compile(self, cc, source, object, includes): + # Build compile command + cmd = cc + self.get_compile_options(self.get_symbols(), includes) + + cmd.extend(self.get_dep_option(object)) + + cmd.extend(["-o", object, source]) + + # Call cmdline hook + cmd = self.hook.get_cmdline_compiler(cmd) + + return [cmd] + + def compile_c(self, source, object, includes): + return self.compile(self.cc, source, object, includes) + + def compile_cpp(self, source, object, includes): + return self.compile(self.cppc, source, object, includes) + + @hook_tool def link(self, output, objects, libraries, lib_dirs, mem_map): libs = [] for l in libraries: name, _ = splitext(basename(l)) libs.append("-l%s" % name[3:]) libs.extend(["-l%s" % l for l in self.sys_libs]) + + # Build linker command + map_file = splitext(output)[0] + ".map" + cmd = self.ld + ["-o", output, "-Wl,-Map=%s" % map_file] + objects + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"] + if mem_map: + cmd.extend(['-T', mem_map]) + + for L in lib_dirs: + cmd.extend(['-L', L]) + cmd.extend(libs) + + # Call cmdline hook + cmd = self.hook.get_cmdline_linker(cmd) + + # Split link command to linker executable + response file + link_files = join(dirname(output), ".link_files.txt") + with open(link_files, "wb") as f: + cmd_linker = cmd[0] + cmd_list = [] + for c in cmd[1:]: + if c: + cmd_list.append(('"%s"' % c) if not c.startswith('-') else c) + string = " ".join(cmd_list).replace("\\", "/") + f.write(string) + + # Exec command + self.default_cmd([cmd_linker, "@%s" % link_files]) - self.default_cmd(self.hook.get_cmdline_linker(self.ld + ["-T%s" % mem_map, "-o", output] + - objects + ["-L%s" % L for L in lib_dirs] + ["-Wl,--start-group"] + libs + ["-Wl,--end-group"])) + @hook_tool + def archive(self, objects, lib_path): + archive_files = join(dirname(lib_path), ".archive_files.txt") + with open(archive_files, "wb") as f: + o_list = [] + for o in objects: + o_list.append('"%s"' % o) + string = " ".join(o_list).replace("\\", "/") + f.write(string) + + # Exec command + self.default_cmd([self.ar, 'rcs', lib_path, "@%s" % archive_files]) @hook_tool def binary(self, resources, elf, bin): - self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, "-O", "binary", elf, bin])) + # Build binary command + cmd = [self.elf2bin, "-O", "binary", elf, bin] + + # Call cmdline hook + cmd = self.hook.get_cmdline_binary(cmd) + + # Exec command + self.default_cmd(cmd) class GCC_ARM(GCC): diff --git a/workspace_tools/toolchains/iar.py b/tools/toolchains/iar.py similarity index 55% rename from workspace_tools/toolchains/iar.py rename to tools/toolchains/iar.py index aba1dd7f715..c3aa4a098ac 100644 --- a/workspace_tools/toolchains/iar.py +++ b/tools/toolchains/iar.py @@ -16,12 +16,12 @@ """ import re from os import remove -from os.path import join, exists +from os.path import join, exists, dirname, splitext, exists -from workspace_tools.toolchains import mbedToolchain -from workspace_tools.settings import IAR_PATH -from workspace_tools.settings import GOANNA_PATH -from workspace_tools.hooks import hook_tool +from tools.toolchains import mbedToolchain +from tools.settings import IAR_PATH +from tools.settings import GOANNA_PATH +from tools.hooks import hook_tool class IAR(mbedToolchain): LIBRARY_EXT = '.a' @@ -60,20 +60,25 @@ def __init__(self, target, options=None, notify=None, macros=None, silent=False, IAR_BIN = join(IAR_PATH, "bin") main_cc = join(IAR_BIN, "iccarm") + if target.core == "Cortex-M7F": self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice] + ["--fpu", "VFPv5_sp"] else: self.asm = [join(IAR_BIN, "iasmarm")] + ["--cpu", cpuchoice] if not "analyze" in self.options: - self.cc = [main_cc] + c_flags - self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags + self.cc = [main_cc, "--vla"] + c_flags + self.cppc = [main_cc, "--c++", "--no_rtti", "--no_exceptions"] + c_flags else: - self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + c_flags - self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions", "--guard_calls"] + c_flags + self.cc = [join(GOANNA_PATH, "goannacc"), '--with-cc="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT, "--vla"] + c_flags + self.cppc = [join(GOANNA_PATH, "goannac++"), '--with-cxx="%s"' % main_cc.replace('\\', '/'), "--dialect=iar-arm", '--output-format="%s"' % self.GOANNA_FORMAT] + ["--c++", "--no_rtti", "--no_exceptions"] + c_flags self.ld = join(IAR_BIN, "ilinkarm") self.ar = join(IAR_BIN, "iarchive") self.elf2bin = join(IAR_BIN, "ielftool") + def parse_dependencies(self, dep_path): + return [path.strip() for path in open(dep_path).readlines() + if (path and not path.isspace())] + def parse_output(self, output): for line in output.splitlines(): match = IAR.DIAGNOSTIC_PATTERN.match(line) @@ -95,28 +100,99 @@ def parse_output(self, output): match.group('message') ) - def get_dep_opt(self, dep_path): + def get_dep_option(self, object): + base, _ = splitext(object) + dep_path = base + '.d' return ["--dependencies", dep_path] - def cc_extra(self, base): - return ["-l", base + '.s'] + def cc_extra(self, object): + base, _ = splitext(object) + return ["-l", base + '.s.txt'] - def parse_dependencies(self, dep_path): - return [path.strip() for path in open(dep_path).readlines() - if (path and not path.isspace())] + def get_compile_options(self, defines, includes): + return ['-D%s' % d for d in defines] + ['-f', self.get_inc_file(includes)] + @hook_tool def assemble(self, source, object, includes): - return [self.hook.get_cmdline_assembler(self.asm + ['-D%s' % s for s in self.get_symbols() + self.macros] + ["-I%s" % i for i in includes] + ["-o", object, source])] + # Build assemble command + cmd = self.asm + self.get_compile_options(self.get_symbols(), includes) + ["-o", object, source] + + # Call cmdline hook + cmd = self.hook.get_cmdline_assembler(cmd) + + # Return command array, don't execute + return [cmd] + + @hook_tool + def compile(self, cc, source, object, includes): + # Build compile command + cmd = cc + self.get_compile_options(self.get_symbols(), includes) + + cmd.extend(self.get_dep_option(object)) + cmd.extend(self.cc_extra(object)) + + cmd.extend(["-o", object, source]) + + # Call cmdline hook + cmd = self.hook.get_cmdline_compiler(cmd) + + return [cmd] + + def compile_c(self, source, object, includes): + return self.compile(self.cc, source, object, includes) + + def compile_cpp(self, source, object, includes): + return self.compile(self.cppc, source, object, includes) + + @hook_tool + def link(self, output, objects, libraries, lib_dirs, mem_map): + # Build linker command + map_file = splitext(output)[0] + ".map" + cmd = [self.ld, "-o", output, "--skip_dynamic_initialization", "--map=%s" % map_file] + objects + libraries + + if mem_map: + cmd.extend(["--config", mem_map]) + + # Call cmdline hook + cmd = self.hook.get_cmdline_linker(cmd) + + # Split link command to linker executable + response file + link_files = join(dirname(output), ".link_files.txt") + with open(link_files, "wb") as f: + cmd_linker = cmd[0] + cmd_list = [] + for c in cmd[1:]: + if c: + cmd_list.append(('"%s"' % c) if not c.startswith('-') else c) + string = " ".join(cmd_list).replace("\\", "/") + f.write(string) + + # Exec command + self.default_cmd([cmd_linker, '-f', link_files]) + + @hook_tool def archive(self, objects, lib_path): + archive_files = join(dirname(lib_path), ".archive_files.txt") + with open(archive_files, "wb") as f: + o_list = [] + for o in objects: + o_list.append('"%s"' % o) + string = " ".join(o_list).replace("\\", "/") + f.write(string) + if exists(lib_path): remove(lib_path) - self.default_cmd([self.ar, lib_path] + objects) - def link(self, output, objects, libraries, lib_dirs, mem_map): - args = [self.ld, "-o", output, "--config", mem_map, "--skip_dynamic_initialization", "--threaded_lib"] - self.default_cmd(self.hook.get_cmdline_linker(args + objects + libraries)) + self.default_cmd([self.ar, lib_path, '-f', archive_files]) @hook_tool def binary(self, resources, elf, bin): - self.default_cmd(self.hook.get_cmdline_binary([self.elf2bin, '--bin', elf, bin])) + # Build binary command + cmd = [self.elf2bin, "--bin", elf, bin] + + # Call cmdline hook + cmd = self.hook.get_cmdline_binary(cmd) + + # Exec command + self.default_cmd(cmd) diff --git a/workspace_tools/upload_results.py b/tools/upload_results.py similarity index 99% rename from workspace_tools/upload_results.py rename to tools/upload_results.py index 695c849df1d..f07efb11acf 100644 --- a/workspace_tools/upload_results.py +++ b/tools/upload_results.py @@ -370,4 +370,4 @@ def main(arguments): args.func(args) if __name__ == '__main__': - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/workspace_tools/utils.py b/tools/utils.py similarity index 79% rename from workspace_tools/utils.py rename to tools/utils.py index 21f0e1496b0..861366cf9e5 100644 --- a/workspace_tools/utils.py +++ b/tools/utils.py @@ -21,7 +21,8 @@ from shutil import copyfile from os.path import isdir, join, exists, split, relpath, splitext from subprocess import Popen, PIPE, STDOUT, call - +import json +from collections import OrderedDict def cmd(l, check=True, verbose=False, shell=False, cwd=None): text = l if shell else ' '.join(l) @@ -34,8 +35,12 @@ def cmd(l, check=True, verbose=False, shell=False, cwd=None): def run_cmd(command, wd=None, redirect=False): assert is_cmd_valid(command[0]) - p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=wd) - _stdout, _stderr = p.communicate() + try: + p = Popen(command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=wd) + _stdout, _stderr = p.communicate() + except: + print "[OS ERROR] Command: "+(' '.join(command)) + raise return _stdout, _stderr, p.returncode @@ -170,3 +175,23 @@ def check_required_modules(required_modules, verbose=True): return False else: return True + +# Utility function: traverse a dictionary and change all the strings in the dictionary to +# ASCII from Unicode. Useful when reading ASCII JSON data, because the JSON decoder always +# returns Unicode string. +# Based on http://stackoverflow.com/a/13105359 +def dict_to_ascii(input): + if isinstance(input, dict): + return OrderedDict([(dict_to_ascii(key), dict_to_ascii(value)) for key, value in input.iteritems()]) + elif isinstance(input, list): + return [dict_to_ascii(element) for element in input] + elif isinstance(input, unicode): + return input.encode('ascii') + else: + return input + +# Read a JSON file and return its Python representation, transforming all the strings from Unicode +# to ASCII. The order of keys in the JSON file is preserved. +def json_file_to_dict(fname): + with open(fname, "rt") as f: + return dict_to_ascii(json.load(f, object_pairs_hook=OrderedDict)) diff --git a/workspace_tools/project.py b/workspace_tools/project.py deleted file mode 100644 index 7bf165b0bd0..00000000000 --- a/workspace_tools/project.py +++ /dev/null @@ -1,196 +0,0 @@ -import sys -from os.path import join, abspath, dirname, exists -ROOT = abspath(join(dirname(__file__), "..")) -sys.path.insert(0, ROOT) - -from shutil import move, rmtree -from optparse import OptionParser - -from workspace_tools.paths import EXPORT_DIR, EXPORT_WORKSPACE, EXPORT_TMP -from workspace_tools.paths import MBED_BASE, MBED_LIBRARIES -from workspace_tools.export import export, setup_user_prj, EXPORTERS, mcu_ide_matrix -from workspace_tools.utils import args_error -from workspace_tools.tests import TESTS, Test, TEST_MAP -from workspace_tools.targets import TARGET_NAMES -from workspace_tools.libraries import LIBRARIES - -try: - import workspace_tools.private_settings as ps -except: - ps = object() - - -if __name__ == '__main__': - # Parse Options - parser = OptionParser() - - targetnames = TARGET_NAMES - targetnames.sort() - toolchainlist = EXPORTERS.keys() - toolchainlist.sort() - - parser.add_option("-m", "--mcu", - metavar="MCU", - default='LPC1768', - help="generate project for the given MCU (%s)"% ', '.join(targetnames)) - - parser.add_option("-i", - dest="ide", - default='uvision', - help="The target IDE: %s"% str(toolchainlist)) - - parser.add_option("-c", "--clean", - action="store_true", - default=False, - help="clean the export directory") - - parser.add_option("-p", - type="int", - dest="program", - help="The index of the desired test program: [0-%d]"% (len(TESTS)-1)) - - parser.add_option("-n", - dest="program_name", - help="The name of the desired test program") - - parser.add_option("-b", - dest="build", - action="store_true", - default=False, - help="use the mbed library build, instead of the sources") - - parser.add_option("-L", "--list-tests", - action="store_true", - dest="list_tests", - default=False, - help="list available programs in order and exit") - - parser.add_option("-S", "--list-matrix", - action="store_true", - dest="supported_ides", - default=False, - help="displays supported matrix of MCUs and IDEs") - - parser.add_option("-E", - action="store_true", - dest="supported_ides_html", - default=False, - help="writes workspace_tools/export/README.md") - - (options, args) = parser.parse_args() - - # Print available tests in order and exit - if options.list_tests is True: - print '\n'.join(map(str, sorted(TEST_MAP.values()))) - sys.exit() - - # Only prints matrix of supported IDEs - if options.supported_ides: - print mcu_ide_matrix() - exit(0) - - # Only prints matrix of supported IDEs - if options.supported_ides_html: - html = mcu_ide_matrix(verbose_html=True) - try: - with open("./export/README.md","w") as f: - f.write("Exporter IDE/Platform Support\n") - f.write("-----------------------------------\n") - f.write("\n") - f.write(html) - except IOError as e: - print "I/O error({0}): {1}".format(e.errno, e.strerror) - except: - print "Unexpected error:", sys.exc_info()[0] - raise - exit(0) - - # Clean Export Directory - if options.clean: - if exists(EXPORT_DIR): - rmtree(EXPORT_DIR) - - # Target - if options.mcu is None : - args_error(parser, "[ERROR] You should specify an MCU") - mcus = options.mcu - - # IDE - if options.ide is None: - args_error(parser, "[ERROR] You should specify an IDE") - ide = options.ide - - # Export results - successes = [] - failures = [] - - for mcu in mcus.split(','): - # Program Number or name - p, n = options.program, options.program_name - - if n is not None and p is not None: - args_error(parser, "[ERROR] specify either '-n' or '-p', not both") - if n: - if not n in TEST_MAP.keys(): - # Check if there is an alias for this in private_settings.py - if getattr(ps, "test_alias", None) is not None: - alias = ps.test_alias.get(n, "") - if not alias in TEST_MAP.keys(): - args_error(parser, "[ERROR] Program with name '%s' not found" % n) - else: - n = alias - else: - args_error(parser, "[ERROR] Program with name '%s' not found" % n) - p = TEST_MAP[n].n - if p is None or (p < 0) or (p > (len(TESTS)-1)): - message = "[ERROR] You have to specify one of the following tests:\n" - message += '\n'.join(map(str, sorted(TEST_MAP.values()))) - args_error(parser, message) - - # Project - if p is None or (p < 0) or (p > (len(TESTS)-1)): - message = "[ERROR] You have to specify one of the following tests:\n" - message += '\n'.join(map(str, sorted(TEST_MAP.values()))) - args_error(parser, message) - test = Test(p) - - # Some libraries have extra macros (called by exporter symbols) to we need to pass - # them to maintain compilation macros integrity between compiled library and - # header files we might use with it - lib_symbols = [] - for lib in LIBRARIES: - if lib['build_dir'] in test.dependencies: - lib_macros = lib.get('macros', None) - if lib_macros is not None: - lib_symbols.extend(lib_macros) - - if not options.build: - # Substitute the library builds with the sources - # TODO: Substitute also the other library build paths - if MBED_LIBRARIES in test.dependencies: - test.dependencies.remove(MBED_LIBRARIES) - test.dependencies.append(MBED_BASE) - - # Build the project with the same directory structure of the mbed online IDE - project_dir = join(EXPORT_WORKSPACE, test.id) - setup_user_prj(project_dir, test.source_dir, test.dependencies) - - # Export to selected toolchain - tmp_path, report = export(project_dir, test.id, ide, mcu, EXPORT_WORKSPACE, EXPORT_TMP, extra_symbols=lib_symbols) - if report['success']: - zip_path = join(EXPORT_DIR, "%s_%s_%s.zip" % (test.id, ide, mcu)) - move(tmp_path, zip_path) - successes.append("%s::%s\t%s"% (mcu, ide, zip_path)) - else: - failures.append("%s::%s\t%s"% (mcu, ide, report['errormsg'])) - - # Prints export results - print - if len(successes) > 0: - print "Successful exports:" - for success in successes: - print " * %s"% success - if len(failures) > 0: - print "Failed exports:" - for failure in failures: - print " * %s"% failure