diff --git a/CI/setup_test_env.py b/CI/setup_test_env.py index d143b6f..2edf49f 100644 --- a/CI/setup_test_env.py +++ b/CI/setup_test_env.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # -*- coding: utf-8 -*- import os diff --git a/CI/tests_runner.py b/CI/tests_runner.py index 0190d19..f68c2f5 100644 --- a/CI/tests_runner.py +++ b/CI/tests_runner.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # -*- coding: utf-8 -*- """ diff --git a/README.md b/README.md index cca195d..1c4a55e 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Small cross-platform Python app that can create and update [PlatformIO](https://platformio.org) projects from [STM32CubeMX](https://www.st.com/en/development-tools/stm32cubemx.html) `.ioc` files. -It uses the STM32CubeMX to generate a HAL-framework-based code and alongside creates the PlatformIO project with compatible parameters to bind them both together. +It uses the STM32CubeMX to generate a HAL-framework-based code and alongside creates the PlatformIO project with compatible parameters to glue them both together. The [GUI version](/docs/GUI/README.md) is available, too (but read this main introduction first, please). @@ -21,26 +21,36 @@ The [GUI version](/docs/GUI/README.md) is available, too (but read this main int ## Features - - Originate the new full-fledged project in a single directory starting only from an `.ioc` file - - Seamlessly update an existing project after the hardware changes by CubeMX - - Quickly check the current state + - Originate new full-fledged project in a single directory starting only from an `.ioc` file + - Seamlessly update an existing project after making hardware changes from CubeMX + - Quickly check the project current state - Inspect tools (CubeMX, PlatformIO, etc.) - - Clean-up the project + - Easily clean-up the project - *[optional]* Automatically run your favorite editor or initiate a build in the end - *[optional]* GUI edition (see [the dedicated README](/docs/GUI/README.md) file) (please, read this main introduction first) ## Requirements: -**OS:** macOS, Linux, Windows 7-10 +**OS:** Linux, macOS, Windows (few latest versions of 7, and above) **Python:** 3.6+ -The app introduces zero dependencies by itself. Of course, you need to have all the necessary tools installed in order to perform the operations: - - STM32CubeMX with the desired downloaded frameworks (F0, F1, etc.). All recent versions are fine (5.x, 6.x) - - Java (JRE, Java runtime environment) for the CubeMX. Starting from CubeMX v6.3.0 Java is included in the bundle (in form of a `jre` folder sitting alongside the executable) so you don't need to install it by yourself from now on. Hence, it can be omitted in the `stm32pio.ini` config file **except Windows** where it is still **highly recommended** to run CubeMX via `java.exe`. As mentioned, Java exists already, the only difference is that it is still will be listed in the default `stm32pio.ini` config. You can refer to STM32CubeMX own documentation to obtain more information on current situation if, suddenly, something will change in this regard - - PlatformIO (4.2.0 and above) CLI (most likely is already present if you have installed it via some package manager (`pip`, `apt`, `brew`, etc.) or need to be installed as a "command line extension" from the PlatformIO IDE (see its [docs](https://docs.platformio.org/en/latest/core/installation.html#piocore-install-shell-commands) for more information)) +The app introduces zero dependencies by itself. Of course, you need to have all necessary tools installed on your machine in order to perform the operations: + - STM32CubeMX. All recent versions are fine (5.x, 6.x). + + - CubeMX is written in Java, so Java Runtime Environment (JRE) is required. For CubeMX versions starting from 6.3.0 it is included in the installation bundle (of CubeMX). If you are using older versions of CubeMX, either upgrade or install JRE manually. + + - STM32CubeMX CLI (which is used by stm32pio) can be invoked directly (by calling the executable file) or through the Java. First case is obviously simpler, and it is a default way of operating for UNIX and macOS. On Windows, however, the latter case is the only working one (for some reason), so Java executable (whether command or path) should be specified. As mentioned above, a method of its obtaining differs depending on CubeMX version, but default settings doing their best to figure out an appropriate setup and most likely all will just work out of the box. -If you, for some reasons, don't want to (or cannot) install (i.e. register in PATH) command line versions of these applications in your system, you can always specify the direct paths to them overriding the default values in the project configuration file `stm32pio.ini`. Check the [config reference](/docs/CONFIG.md) to see all possible ways of telling stm32pio where the tools are residing on your machine. + - CubeMX embedded software packages of your choice (F0, F1, etc.) should be added into CubeMX. In case of their absence or versions mismatches you will probably be prompted by CubeMX during the code generation stage. + + For more information on how STM32CubeMX functions please refer to its manual (that is shipped with the installation bundle) or community forum. + + - PlatformIO CLI. Its presence in your system depends on how you're using it: + - If you have obtained it via some package manager like `pip`, `conda`, `apt`, `brew`, `choco`, etc. most likely the `platformio` command is already in your `PATH` environment variable, and you're able to start it through a command line. In this case you're good to go. + - If you're using PlatformIO IDE, the CLI extension should be installed in addition to your existing setup. See [PlatformIO docs](https://docs.platformio.org/en/latest/core/installation.html#piocore-install-shell-commands) for more information on how to do that. + +Either way, for every tool listed above, a simple direct path to the according executable can be specified just in case you cannot or don't want to register them in your `PATH`. Check the [config reference](/docs/CONFIG.md) to see all possible ways of telling stm32pio where the tools are residing on your machine. ## Documentation @@ -51,15 +61,7 @@ If you, for some reasons, don't want to (or cannot) install (i.e. register in PA ## Installation -As a normal Python package the app can be run in a completely portable way by downloading (or cloning) the snapshot of this repository and invoking the main script: -```shell script -stm32pio-repo/ $ python3 stm32pio/cli/app.py -stm32pio-repo/ $ python3 -m stm32pio.cli # or as the Python module -any-path/ $ python3 path/to/stm32pio-repo/stm32pio/cli/app.py -``` -Note: we will assume `python3` and `pip3` hereinafter. - -However, it's handier to install the utility to be able to run from anywhere. The PyPI distribution is available: +The most straightforward way is to get the PyPI distribution: ```shell script $ pip install stm32pio ``` @@ -69,49 +71,59 @@ To uninstall run $ pip uninstall stm32pio ``` +As a normal Python package, the app can be run completely portable. Simply download or clone this repository and launch the main script: +```shell script +stm32pio-repo/ $ python stm32pio/cli/app.py # call the file... +stm32pio-repo/ $ python -m stm32pio.cli # ...or run as Python module +stm32pio-repo/ $ python -m stm32pio.cli.app +any-path/ $ python path/to/stm32pio-repo/stm32pio/cli/app.py # the script can be invoked from anywhere +``` + ## Usage You can always run ```shell script -$ stm32pio --help +$ stm32pio ``` to see help on available commands. -Basically, you need to follow such a workflow (refer to the [example](/examples/cli) which explains the same just illustrating it with some screenshots/command snippets): - 1. Create the CubeMX project (`.ioc` file) like you're used to, set up your hardware configuration, but after all save it with the compatible parameters - 2. Run stm32pio that automatically invokes CubeMX to generate a code, creates the PlatformIO project, patches the `platformio.ini` file. - 3. Work with your project normally as you wish, build/upload/debug etc. - 4. When necessary, come back to the hardware configuration in the CubeMX, then run stm32pio again to re-generate the code +Essentially, you need to follow such a workflow: + 1. Create new CubeMX project, set up your hardware configuration, and save with compatible parameters. You'll end up with the `.ioc` file. + 2. Run stm32pio that automatically invokes CubeMX to generate a code, establishes new PlatformIO project with specific parameters and applies the patch. + 3. Work with your PlatformIO project normally as you like, build/upload/debug etc. + 4. When necessary, come back to hardware configuration in CubeMX, then run stm32pio again to re-generate the code. + +Refer to the [example](/examples/cli) guide which basically explains same concepts just in more details and illustrates with some screenshots/command snippets. -See the [commands reference](/docs/CLI/COMMANDS.md) file listing the complete help about the available commands/options. On the first run, stm32pio will create a config file `stm32pio.ini`, syntax of which is similar to the `platformio.ini`. You can also create this config without any following operations by initializing the project: +See the [commands reference](/docs/CLI/COMMANDS.md) providing the complete help about available commands/options. On the first run in your project, stm32pio will create a config file `stm32pio.ini`, syntax of which is similar to `platformio.ini`. You can also create such config without any following operations by initializing the project: ```shell script -$ stm32pio init -d path/to/project +path/to/project $ stm32pio init ``` -It may be useful to tweak some parameters before proceeding. See the [config reference](/docs/CONFIG.md) showing meanings for every key. +Might be useful to tweak some parameters before proceeding. See the [config reference](/docs/CONFIG.md) showing meanings for every key. ## Troubleshooting -If you're stuck and the basic logs doesn't clear the situation, try the following: - - Run the same command in the verbose mode using the `-v` key: +If you've encountered a problem and basic logs doesn't clear the situation, try the following: + - Run the same command in verbose mode adding `-v` key: ```shell script - $ stm32pio -v [command] [options] + $ stm32pio -v ... ``` - This will unlock additional logs which might help to clarify - - Validate your environment, i.e. check whether the stm32pio can find all the essential tools on your machine: + This will unlock extra logs helping to clarify what's wrong + - Validate your environment, i.e. check whether stm32pio can find all required tools on your machine: ```shell script - $ stm32pio validate -d path/to/project + $ stm32pio validate ``` - This will print the report about the current set up according to your config `stm32pio.ini` file. - - Use the dynamic help feature which outputs information specifically about the requested command, e.g.: + This will print a small report about the current setup according to your config `stm32pio.ini` file. + - Use the dynamic help feature that outputs information exactly about the requested command, for example: ```shell script - $ stm32pio new -h + $ stm32pio new -h # "new" command manual ``` ## Restrictions - - The tool doesn't check for different parameters' compatibility, e.g. CPU/IO/etc frequencies, allocated memory and so on. It simply eases your workflow with these 2 programs (PlatformIO and STM32CubeMX) a little bit. - - In order to add CubeMX middlewares to your build the manual adjustments should be applied, the stm32pio doesn't handle them automatically. For example, FreeRTOS can be added via PlatformIO' `lib` feature or be directly compiled in its own directory using `lib_extra_dirs` option: + - The tool doesn't check for compatibility of various parameters like clocks/pinout/periphery/memory and so on. It just eases your workflow with those 2 programs (PlatformIO and STM32CubeMX) a little bit. + - In order to introduce some CubeMX middleware into target build the manual adjustments should be applied – stm32pio will not handle them automatically. Tell PlatformIO what to link, set necessary build flags, etc. For example, FreeRTOS can be added via PlatformIO `lib` feature or be directly compiled in its own directory using `lib_extra_dirs` option: ```ini lib_extra_dirs = Middlewares/Third_Party/FreeRTOS ``` - You also need to move all `.c`/`.h` files to the appropriate folders respectively. See PlatformIO documentation for more information. + You also need to move all `.c`/`.h` sources into according directories. See PlatformIO documentation for more information. diff --git a/docs/CONFIG.md b/docs/CONFIG.md index 96c86dc..7d3d772 100644 --- a/docs/CONFIG.md +++ b/docs/CONFIG.md @@ -29,6 +29,7 @@ Note: this is not an only source of the program settings but more like a "public > - [`ioc_file`](#ioc_file) > - [`cleanup_ignore`](#cleanup_ignore) > - [`cleanup_use_git`](#cleanup_use_git) +> - [`inspect_ioc`](#inspect_ioc) > - [`last_error`](#last_error) @@ -93,6 +94,11 @@ Boolean controlling the `clean` method: whether to utilize custom ignore list an #### Default `False` +### `inspect_ioc` +If true, `.ioc` file will be analyzed on certain operations for potential incompatibilities with recommended options (see the CLI example) and board/MCU values (compared to `platformio.ini`). +#### Default +`True` + ### `last_error` This isn't really a "parameter" and initially doesn't exist at all but appears in your config after some error occurs. This will contain an error string and its Python traceback so you can examine it later to solve the problem. This will be automatically cleared after the next successful operation on this project. #### Default diff --git a/docs/logging/logging.drawio b/docs/logging/logging.drawio index 14707c8..e223596 100644 --- a/docs/logging/logging.drawio +++ b/docs/logging/logging.drawio @@ -1 +1 @@ -7V1bl9o2EP41nLYPcGzLNx4XNiQ5Jc02254kTxwBwrhrW44sskt/fSVfwLYEOAVf2JA8rDWWbFkz82k0MxI9MPZf3hIYrj/gJfJ6mrJ86YH7nqYBdaixP5yyTSiaZVoJxSHuMqGpe8Kj+y9KiUpK3bhLFBUqUow96oZF4gIHAVrQAg0Sgp+L1VbYK741hA4SCI8L6InUz+6SrhOqrVl7+jvkOuvszao5TO74MKucfkm0hkv8nCOBNz0wJhjT5Mp/GSOPj142Lkm7yYG7u44RFNAqDTbr6fDTw8PceTv85v/+bXtH/GF/1zm6zb4YLdkApEVM6Bo7OIDemz11RPAmWCL+WIWV9nWmGIeMqDLiP4jSbcpNuKGYkdbU99K76MWlX3jzgZGWvubu3L+kT44L27SwwgGdQN/1OGGMN8RFhHX8D8SGdJR8BO/5wcFJSRFruUhrDZ+ev9w/WZOvH/8e9eH97DNxJ7sRoZA4iB6rqO6YyMQfYR9RsmUNCfIgdb8XewJTMXR29facYhcps+SMO/J2TfkOvU36KoqI7zI2CBzFG+q5ARrvVCQbzzH2MInrAPZ/wl8/cghcumh/L8AB53lECX5CuQar+B9/kOt5sgctYbTeCcl3RKjLtGoK58h7wJFLXRywe767XMYylVW481yH35hjSrHPbsCUsGB9YhwvyNHz2qXoMYQxQ58ZAqXflQqeqmXldCx4m4hVdwOHlQAvrWHI7/gvDgewAXyO9EFI8HKzoO8XvIujkCQXxToL7PswWM74wM5c3rVV3I3RQcnjX4heciRRdDLYtFMZ3GagkpafcwiUktZ58FGU86VNChOqeS0wwUadbHONePFr/t6+WVxqHF5AVXjRzoSXuOkdIXCbqxBiJqtR7skPnJATPcMoiF5fNUqzSqmBrilHG7CLpA974dt9zBnoBwT0i6gPtNDFgw11vcEDwf8wrJtix0HkbglDpqC//tbTTI+N+WjOWGs6NFbuuN4sfpsqCPlehA/BTU5cK4jQWbCglodaU0Rc0C0JMOi14QLoDi4oA1s/Dg2s8ICIyz6dTyRluNDaxgv5CFeFC9AGXKhmWSTBcbhgc9vRBmfDxbFBzKGFHznxhfltw+3v0cff99eCSJ/Q+7INMYeRuxhg9rYZM2g8Zn2dkpey8bOzig5AhESUfgA1bBE1hhLQAEZdoKG3Chp5yMjjhWrYAwMYedTQQY5yBDo6iRyVDY1WkEMwNLShWT8QiGZDGQge3n/kD4m4z4KvfHKoULYdVgT7s2gzZzbEAkX8u/8i7LEXQY9Y3D8xy2S8Q5DYqQK4BMXOFJULXZRIO7+cJwoynf+AOXIWsOysj4yDQBeBZTdB5JHFrM0cMbqJLPb/Rhark9Cii9AirWe2gSyGWTIxgHLcJjE042iDeqBIPwlF480cffjCnxNwPqUocwOkqm6TPjAlgCQzdeoDJHFJ2X2/STcxx2zKnDmL4aag1UW/xBQ7D26IuCuiZlfDeao0rLBoUGU+yNpcDdqRoYVhKIxnBmOLDfG2IwIXT1xsTg1skQtHh9krubI9tKK9Cp7uzKFNkkGrdfFXRkRL5KImcyTXBohZ3EzGRc+dDx7Two2dVdgpcwBK+alegJ9StFVtgZ8edhw3cAYMphP366+/ZCz+pX7Yu6yHVaIvplIT6snHd3hwfB8pQdB/BwMmXqSBCeWyIysJadU2snLTzL5C06xty+woBpw0zRJhbi1irpywze7dKIR0sWbKNcHEh5Ren14ZravV8KZWF1KrYUW1ymUTtbDiUa0bwy+UGqBVxVG7VRzVfsTkG6QR9ujqbD8VtI2loEPR9Sv3HoGqupVId9M+a9MCJeE7kXZjWGVpvXDazdFRzKn+YDDoxc7pOI3mD0FgmfbRkn+5kEmYphfmkwhTkpD4d3BlLUOOohrU7S8z1VLMYedmzvvLVAl4gLrAw2g3Fnad87Jc6quGu5KMibYMMSBGlMR5ebWbmGfOxh30rNFsFkAfzWY9676BKfo891cFd+YuCN3IDG3c8mIvpWRGVSU7N6h8npIZB5WsaX/cZVVJ4o9rWJWuZh3ZmEpIQp7yilarKiEG5jKVaNKNdlF1kLjRmtUGIMbJbtt7urO957yMXEUtSlv7+3uMWzDkUqAtCYbIKzYVDDnazWpOvHit8M33ur5AUHeAfCzhodG0FeMWD7mUZkniIfLMylbjIVk3r32FIOqSZJZqVpeyrKTu61JjidiSzeryik1tVj/azWteIgj6IFkjNKsO+u0IgNe7RgB619YI9tWgb9ctGV2yNU9eUW8VtcXdcqfWCA0G+89TL6PCQkGX7V2rDc3ta9wq0k31qhqv01uN1+livK6gSzAMB6PNaoVIlMtMvJbVg6BgkvmrYQXT2lUw7RVpWNVgnd7KDlDVKm1NHjaQG2O3e6pBmnf1c4mX3c6hJ0PdaFy8dDHw3Ins9cvOEpI1dcOTxC2h41JKXDV6rbcavdbF6PU0WeR8xuTpClVI5igAsk2B9fmlxNj1nx+mjPD27/fCYGbeF9ePD2LND13ZH0S5Nh10I+28RfFGzhFcPDkxo2T+qvhld1G4d4XBrLByXzhrR2l/7teU8pNm7/iQaJPFMlAG7gIHK5eJAOEuIEZdQgrZH05n08Yk8qHn9YP+iskvK2rMMJiYlq0qZv+ZNcPPfQ9u8Yb2Vc0ehNwPVZSSd8j7jvg3SlxXJZedoqhDAC4kSWbpMC9VsiaWuZzsugTJusaodDePkdOrhqX1ls6RKx8HNTQasKnEIPhVnCJ56ZV5ltTY2oxhdcjqep3HSOqVg+fn7i37n/ov7L4yGzgOLhuVn/NgSBEIhiIQNHoypHWNe1a6ueHN0JpS+PPSozRBA69k3SVYLPLDWIwGp1Gz3RT6/CSaVx61cPKhWWEO7aZGVY3aJjLdvNM720GxE0itAb+kcfpI1ds5hj8KLeXNvdLT1xo9yDD7VZsuzMxnWugFbMmZ623DS9WodUvwYoLyfKcXLPQKLdQmAiXG6YNVX7NNb5a5ZLVs05vaq0GObtolploROECr2WRZN39OvVSUMhZKTpdpVi9bzld4/XpZdb0AWl2Bm6cN+Fesl0CwUdrWS6tDlvaV+8Csyomg7QS9bLsc9GrAPrZE+/h2hlPCjvKh56om2s7NHuJki+B8Y5aUWaraOq/EtJfx9H3yoRHPUTnAqarsKTm3UyjX9J6YJXKQcQKLL8AKUPZW6ZIpVG304ERbDOzHuUevnRWq8GucdfKCFfe/Xp1MT/sfAQdv/gM= \ No newline at end of file +7V1rc5u4Gv41mXbPjD3mYmx/jJOm7W66zWm20/ZTRrZlWxtAVOBczq8/EkjcJGxqc01z9kxjhAAhPe+j94Z0Zlw4T+8J8Laf8AraZ/po9XRmXJ7puq5NpvQPK3nmJSPTiEo2BK2iMi0puEX/g7xwxEt3aAX9TMUAYztAXrZwiV0XLoNMGSAEP2arrbGdfaoHNlAquF0CWy79hlbBNiqd6pOk/ANEm614smbNojMOEJX5m/hbsMKPqSLj3ZlxQTAOol/O0wW0We+Jfomuuyo4GzeMQDcoc8Hs/vH75f3k6sfnr/MBuLz7RtDVQNzmAdg7/sZ+sIKE8DYHz6Ij8C6wkQsv4n4enRnzNXaDC2xjEtYx6H9X7OHzDQErBJNzLnbpbeZ+QPA9TF2wDv/HboRsW3WjFfC3cMWf9gBJgOjQXIMFtG+wjwKEXXrOQasVa2Zc4dxGG3ZigYMAO/QE4AVL2iZInzHfBo5NjzX683GLAnjrgSV70UcKY/5eHIuaLo55X7BrfFoduRt6ZLCjLfDYGedpw6RgCB59c+gRvNotg49L1sS5R6If2TpL7DjAXd2xjr1DrGnrsBlzeWzFONE3hE+pIj7W7yF2YECeaRVxdsxxxyVPM/nxYwrGvGibRvCIFwIuOZv41gm46A+OrwLojq/hvf3h9s+HOfD//IreDa63A03CFFxRWeOHmARbvMEusN8lpXOCd+4qBkBS5xpjj4/FvzAInvlggV2As6MLn1DwnV0+HPOjH6kzl0/8zuHBszhw6eumLmKHP9LnksvCI3Fd9H7spQpFVPQB3pEl3CeXnCUDQDYw2FdRV6OAQBsE6CHbklOGdG8zM/ThGLqHMAU3gUMbb4ZnumXTV5gv6I8N+3FD8L+UQ67xZkOFkZ+lz48rvJUu8aJL7mhrFPVDMTXMsfVHfCGJz/ESJsJsAgC8362fO8a88w/QfoCMNZIicen5CniMLmgn+Kz3KLgGSwJBQAFJy9b0Hxezm25oR7v079sVXIOdzYoWcAseECZ/pNobNUE0KycLCdKLSCmFananK+Agm0HvgoIJhc38Gz5WRB56ljwG+khmD2ukoI9xbewxbZU+UuQR8wXrLE3QSsgCU3HuBhJE35vNNSdRyt6B9ik7BOdMyUlPsbTsCrG+PYGTdtvr2Zebm8Xm/eyn89fP53PizAalKcloiJL2tTLFSEcxQApnopDN0gOB9XNaRR/qMmkkJR9dOhihxKfuultQKltCnxGKQ/+E6ud5IUko2IzfjJMoEz/gh4QTUtBihyL+YT1+Qzt8jYnz8XOmnQUMGdPpmmDnLt1SJeP+Q2gH50XyAG/lNaUF8NFyGErpF/oyF8C2qZ7J9D4mDgYDcGg5hFLmR0LKfi4iub5elCLEtY28D7xiHeRoTGVy1GcKcpzWRo6zjpKjNfx9+NGU+VFZz2qTHs1CeqQ2javkPdbHgzXvZMZ79BRwvLDzDMNkmEgxaPYMqx3+W448qXUZIHeHd779HLaFalsOM7RisvSzbBk1+jBbXuwW8NP30E+wgkxjgy4kILQfuXa3wfQ5w+Gwl0ypTfRQbiKytE4hywoI0jQ6R5DCUOmV8Tk5ngoTnaZxGrR6oSZaxxiu1GK9QVQsVSarwvT8Z0vpKzZYjzBAB1TqYdrizSqHzOlJRTJwgAtCS7rLJqVp5U1KFSkYNZmUSgzoMghOGKykhA4JJJQn2AjZzA1aZk7JjW6CRhsN/1OpW8OGD8xLX84tIeah5Y7Yz3MClvdMqA+BKYu8vdCyc85cG67ZAw76eoVLl0RAkXy/ccUKsDvNQddSIFcB3HiKqx65kyqRG3qeQ7yWtEKzVPmKzm6hU+WrU8EztmMrdwsLX10KnnQ+3TAFm+oEkd/37RuBojfVem1TSn+OfAccWyM773nusWdWwUUNO2Yn7arWenndugUVWS0fs7I6cgEYSuvI4aX0xcBzqoJHTd3AT935hhWkMDbLYmw8yoEkumECmbhlx6PIGPfQQDMo0f2ChRYe5T1f/cPk6ERMnja1zAqnltvQW/QBuHRmJcwe6xOPayVp3KxLpWw5vtZAeP64IT0sONOSgiOgmxPXBiRJ7RJrecB/Yd6uZMRb5FYh62mIqMfEbBMRwvnSFiImr4iQETFuFRF6/yaFDBwSdLwUQBitmATjfDahkUs2PVA/l5xakw3RvyCP9sLRGjkqG0er+YtoNdtAq9Wh2XZUklyHU22Shqx2ALCdNnlVKN6jubeWZMub+WuxykvkeyBYbqllfIWJA4KgINe2Wu/rLbThMgjTZVlqBfA8gj2CQMCyLtaKhvTH9Zp3tE9kk900FSa7VZvJ3rLntbcmu66XFfypkm5aYwJdYoI9cZUhT5r3Kw6w3NLn2TAIU6dstCCAPMeRFYYtYj9HKVsvKcwiVIhG4ixKaR/30UPesLSrM8LLJmaKGETz0r633SWlfbNDQzrdvum443uaD1crplFDNY3W5vket6yI91ewxmUFy+qWYI0lwcomQfNMlKK4UkH2ca+kThFvaljqeqO81iU9ikRZdcVJt6RHzpoUkhIbml2PvubFYdy2NBhyQt/rd/gv4zv8kZZzRiqot7YP8dXU+xrqP5KyFaF+dcXWQv172/0LlsRPx+66JaHp5mFTQila9Sk17X6H2WPJUmSfqb9fHHVLsuR0tJduSship5jQmhU74WHvvtjVJD6iZw+Lj9Yp8RHt7rMtIcmDwphoVhzEIL8aEy/OmDDymQ2tGxPT3nBvx1QeU7Gejrqi2S3OllfYOWRMpAORnabyfCKQyqKIP2RuhMunL37VvLrEq2zUz+xW1M+Uo34ZWVIm+8x36zUkfirnh9sYRRk//ZJCxSTXsBTq7Uph099KVCiGZWOEZuUxwqNSR7XJKAu+WbWpoGp4me3Cq+kPL1qA19ToBrxmosUNwsuU490VJ5D2ajpR2OcNzyavCSdHSnvZkLnZrZC5uWe1qUKd7joyqr5hcv9CBE/lrDBUK5PU5xmTw+z//XRNC95//Sh1pvAAISdcpDXddXmfVMBksNCVFXuswtVk5mB5vwkHSuUzCx927nuJOw6IgzV6YkM75+253AaBFy2HeEX/v1y5oyFaYneNWA4uc0PR0hUIAP3Dyn0WZXCAbQ/cwZqinh6yz02vrMlUG1mDR3oZfhzY4BnvgoGmT4ce84UVf6yRd5/l3IajkTYzjIqQZOUWD9MUdrnK7VXFeoJKIE36GELXO8HhZWPoZkc0Nmuc1dgGs3EDKpscsT92YX3lUnPJJCLdpPxS+72afAaaSPtsbfaZdEjvGw2p0bmXN/av0twNLimdNVD5dzxHcon0WYllNUAmcpKC42/CH+Lbns9/pb/zyYH0qKWKMX3a3TJepngvAupcHU9BBDOZCFSLEQu+qJ4H+vglzwmLEVcn8GO9NYE/LS9ML57OmQG4C5AtT+c9Nv0kpUm9KOW4wdnXaveDg/Tcm5a57AYyVomptxOCWDZ4LZDf9sw7Ed+bxICseI2Hvd20Z+aNdyZA7HvaZCOBwgWj83sO9GknlkqoZZKbz1ULiWuqCb22D/HFno9dmNBPVOwz3JLS8huml7LB+67Qi2Xk5zszo9iXuEJrIhQ0lrMLfidTwMqP0qRtU6BDzNEvU2BSOr+nI37E6TTvR2xA3ieyvId7HI2El+9vCX9sR5GcVGcyfHnabzq5lxdJCbmFwqyikSyqa1cjprO8W0CXuSCOWGXIoLa4sKwtvg6WcrA0rfWxkiOJF9cfo7fyw23F1CNVdnhyxjqnct08kwNvhQMnDXEFQ2HktW9TkU6hNbpy9VSOlYTh3Jc+FJqZ9yabikWKahsL9WYQ8gc9J6w0dY0zO//ROfwJOJ4NfdkqfsvMYlpjhVgmLAwfFekQ/p7Vrw5uw/R77Kpzyl4rZfsxJ4MvZeceSVwVQl0owflFWZT7JKq+5qlvMxe9VXvkrNBJmttItl4/aTrNRWk8HDRZ1J2rlTRZjFaXgRfNrHpbvFvavoTOxeuw/Q1HFBD0ny0rBJ53xp2h0SattuwMPUQu3d5b9TTKkBZyGivSGupahlSNl3az1ws5w6DjNus/aZSNtESLfLRGGrLtWglpvLfxItxJExKCiWCGiCpSqZvtMoQ5jZDWDYpQLNCg4AhlvmRtHKF3VK9IYiUhQUxiLaOZ1Kf6FjxXj0LZBc9P3pv5KBepLkX2jGpdpHs75VTmSmFbFJbb9P4L3OxsEHHbgO9PL5SkzGb3+83HVEiZJYFvwhZxixK56Izvc++Dh3BL6UojPIXMOEto8aTA8dpG3gdesRKKzCNtxkRf4kilHhXXrJ4lZW/9CZMoV58H3o542Ge7lgOHjaa78L3iqbKN9VdOGsu8Fa2NddVQjlXe4doGUv7iTtmvoxOGdx2qQ0kiSOIVO6T8dGr4xvlPO0yFa1/1eV5tPpBqN9emnbN4wdJnjLJRzTinL+PCqikys2eDLEUUDVOrgb17vAuJL3VtB8Npsll5mrhND49XVZE0ekgwE4ZEXWM6wyeq4rAa/wc= \ No newline at end of file diff --git a/docs/logging/schema_cli.drawio b/docs/logging/schema_cli.drawio new file mode 100644 index 0000000..1943a74 --- /dev/null +++ b/docs/logging/schema_cli.drawio @@ -0,0 +1 @@ +7Vxrc9u2Ev01mqZ3RhrxoddHS47j9Lq3nridJp88kAhRqEGCAUHb6q+/CxJ8QxIjUZLlNpmxiQVIAtizB9jF0h1r5r1+4ihY/cocTDtm33ntWNcd0zSN0Rh+SclaSfq2lUhcTpxEZuSCB/I3VsK+kkbEwWGpoWCMChKUhQvm+3ghSjLEOXspN1syWn5rgFxcEzwsEK1L/ySOWCXSsTnK5beYuKv0zcZwktR4KG2sRhKukMNeCiLrY8eaccZEcuW9zjCVs5fOS3LfzYbarGMc+6LJDZOnl6/XT6Obb7/9Me2i68c/Obnppo95RjRSIw6FgzlXfRbrdCJYJCjx8Syb537Hmi6ZL2aMMh63seD/jXz51OXIITiv85kPj5mGgrMnXLhhGf+TDyKU6h7koHCFHfW2Z8wFAdXcoTmm9ywkgjAf6jziOLKbWYMrSlxZMWdCMA8qkBIsoE8Y3jFdCY9C2YDLlxUR+CFACznQF4CxGpfComGmZTUX8p4QmhPfhZIlSysUyBrv1ZVW0EMvod0LOHOihfi8kF2cBjy5KLdZMM9DvvMoJ/aRyK4t425M67pN9QQjxK8FkdL1J8w8LPgamqS1A4U7ZXmGrcovBRgr0aqI4L4SImU5bvboHFxwofC1AbqDO/xEbx9+eZ6i8Jc/yMfu3apr1DCFHbA1VWRcrJjLfEQ/5tIpZ5HvZADI29wxFihd/IWFWCtloUiwsnbxKxFf5e29gSp9K9Rcv6onx4V1WvBhuIWbZPFbsS6/LS6l9yXjk4PaaKLpHLCIL/A2u1QsKRB3sdjW0NSjgGOKBHku9+QQlW7tZok+PMsMCANwc9yjzO11zCGFIUzncOHKi3vO/gIOuWOuC8aoauH9WYMPtVuC5JZH6I2mfWymlj0Y/pzdyLM6JZEmLBcApOZ9+D2SzDu9xfQZS9bIRemtVw4KJF3AJIRy9gBc3QXHSAAgQbaEHz6TD3Vhon34/cHBSxRRKZrjFXomjP9c6G/ShbRbFVvIkb6JlAqolk+6QR6hEnozABOJu/k//NISeZhl8uia/Tp7DPsa+hgcjT3GZ6WPAnlkfCEny0hpJWaBcVp3jzmBccu15iBK2aroENhBXMlNTnGJBdkNkXN7ACdFq7vJl/v7uftp8t377/f1Ffcm3caUZJ2Ikrb1ssBIezFAAWepUK7S3RTrV9DE7Jl10sgln31QRmzxhadGc6CyBQ4loXjwK95+Xm0kCQ2bqYcpEpXmh8KYcGIKmkck4R854/cw4UvGvc+/lfq5gSEzOl1y5j0We6pl3N85THDVJHfwVnWnNEchWfRiK/0Cg5khSmGfKfd90hwsCeDYc4itLEyMVF7OE7u+mzcixCUlwa1qeAxytMZ1cjQnGnIcH40cJ2+UHIe9fw4/2nV+1LYbnpMe7Y30CD6Nr+U9OcfdpZpkyXtQhbwgnjzLsiUmCgxarpGt45/NyBO8S0H8iEUhXcd9gd2WJx2tjCzDMlsmnd7NlrNojn/9GscJHCx3bNjHHMX+o9rduQze0+v1LpIpjZEZ201ClsNDyLIFgrStN0eQqaNyUc7naH8qzPc0J6fB4UVsE4f7OK7gsd4TMEudy6pxPX9fAX1lDuseDmgXrB4XPd7y5lAGPcEkhYd8FHvSb9mltIdVl1JHCtaRXEotBsw6CA5QVi4BlWAOPCE1RGUYtMmaUtFujkZKev9pNaxB8bOM0jcLS6Tr0CLidD3laPEkjXoXmMrI2wotWgnmUryUL9gZ601DujwBSi32mzVsAbvjCnSHGuRqgJstce0jd9QmcuPIc4zXhl5omSr/RefbQqcuVqeDZ+bHth4WTmN1BXjCeurKDTbsCZK474efUhT91G7UtrDpr5BvV2GrT6uR5wuOzGq46MSB2dF5t9Zm8731GbbIevuYNN0jbwBD4z1yfCsMDK0LDQJwdUVYePK9FBQwNiljbNCvgCR5YA6ZrGf7o8gaXKCDZgHR/YCHFpeqka/Lw2T/QEwetrRMNi4tD3G06Bb5sLJy6Y9dEo8bDWncPtaW8sznayc4nt9PpbsNZ9zQcFLoVsz1BJakD4mdWeE/sG63ovEzcmtq60WI6HVinxMRafDlXIgY/YuIOiIGZ0WEeXmLQgkOOTreCyCss7gEg2o2oVVJNt3RvpKceiQf4vIOeYx3jtYkUHlytNo/iFb7HGgdvqHVtt+QXHtjY1SErLEDsG/a5dWheMvO/WxJtqqbP3ZWeU3CAInFCjzjG8Y9JMSGXNt2o68PmOKFiNNlZWoFCgLOAk6QkFkXS01HLif0Wg20j+ouu21rXPbh0Vz2M0deL9ZlN82mhj/W0s3ZmMCsMcGWc5WeSpoPWz5geYD3USzi1ClK5hzxdXayIrHF6TpJ2XpPxyzpFuIk5yxaax/Xz3xnd5+TUYVxIltlQmVqSKf60ZVaVYvfWylRZXaV/Zt2Wi58/rTxNLP2nVULqrBGFVXYg7oqjGPFSvVHXvUV+QB7ArvtFPIbgW1ekRdQHNat9oNMOIQWDlkucTynKduFW2x8Z7LJPyN34JAT5abzWDHB95KfULNWjU03zk/QZoPq8hOOdmSdrqNvLV1+bBc3TsNRtsPaki5vXkK+vF4LTd2vgzNF9woimNV1x7ROEBXQOHt7Mdfe3y99wW5EkQQF8bsqW/6gD5VmzF8SN+6RYn7ik47Kug/Rc5zgmnWiYpbtZslPJp0jfVB0EEHWv7acSNOvcaTWucxats+Sm78V2WMRTb64oN0g4gELZQ518qWIPw+DzStq6t5kIYwTnOsfpMvqamcMTJ0qBxpVWkdT5GCj21ia1/4B6l0yOY35pzA4bLpHelPqG1Sy1A27rjxjeCS3b0sEvS0rhMmZv2Prs/qDkvrMvsb2dK6idSz1WfU/sBJ/ZNZnYhWPPYuJhrWpVQ58cb1r6MvX/PCN+3udwso73Y3R+jbMbbxbX4bRjr6gmP+xnWS7lv/NIuvj/wE= \ No newline at end of file diff --git a/docs/logging/schema_cli.png b/docs/logging/schema_cli.png new file mode 100644 index 0000000..9199831 Binary files /dev/null and b/docs/logging/schema_cli.png differ diff --git a/examples/embedding/embedding.py b/examples/embedding/embedding.py index aedd532..e9df67a 100644 --- a/examples/embedding/embedding.py +++ b/examples/embedding/embedding.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # -*- coding: utf-8 -*- # Import the core library containing the main class - Stm32pio - representing a single project diff --git a/stm32pio/cli/__main__.py b/stm32pio/cli/__main__.py index b7df651..e6551c3 100644 --- a/stm32pio/cli/__main__.py +++ b/stm32pio/cli/__main__.py @@ -1,13 +1,6 @@ -import pathlib import sys -MODULE_PATH = pathlib.Path(__file__).parent # module path, e.g. root/stm32pio/cli/ -ROOT_PATH = MODULE_PATH.parent.parent # repo's or the site-package's entry root -try: - import stm32pio.cli.app -except ModuleNotFoundError: - sys.path.append(str(ROOT_PATH)) # hack to run the app as 'python path/to/__main__.py' - import stm32pio.cli.app +import stm32pio.cli.app if __name__ == '__main__': diff --git a/stm32pio/cli/app.py b/stm32pio/cli/app.py index c266425..325fd33 100755 --- a/stm32pio/cli/app.py +++ b/stm32pio/cli/app.py @@ -1,138 +1,132 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import inspect import logging -import pathlib import sys +from pathlib import Path from typing import Optional, List -MODULE_PATH = pathlib.Path(__file__).parent # module path, e.g. root/stm32pio/cli/ -ROOT_PATH = MODULE_PATH.parent.parent # repo's or the site-package's entry root +MODULE_PATH = Path(__file__).parent # module path, e.g. root/stm32pio/cli/ + +# Package root, e.g. root/ (so if ran from sources, this is a repository folder; +# if the package is installed, it is a site-package entry +ROOT_PATH = MODULE_PATH.parent.parent + try: - import stm32pio.core.settings - import stm32pio.core.logging - import stm32pio.core.project - import stm32pio.core.util + import stm32pio.core.log except ModuleNotFoundError: sys.path.append(str(ROOT_PATH)) # hack to be able to run the app as 'python path/to/app.py' - import stm32pio.core.settings - import stm32pio.core.logging +finally: + import stm32pio.core.log import stm32pio.core.project + import stm32pio.core.settings import stm32pio.core.util +board_hint = "Type 'pio boards' or go to https://platformio.org to find an appropriate identifier" +no_board_message = f"PlatformIO board is not specified, it will be needed on PlatformIO project creation. {board_hint}" +init_message = f"project has been initialized. You can now edit {stm32pio.core.settings.config_file_name} config file" + + def parse_args(args: List[str]) -> Optional[argparse.Namespace]: """ - Dedicated function to parse the arguments given via CLI. - - Args: - args: list of strings CLI arguments + Parse command line arguments. - Returns: - argparse.Namespace or None if no arguments were given + :param args: list of CLI arguments + :return: argparse.Namespace or None if no arguments were given """ - root_parser = argparse.ArgumentParser(description=inspect.cleandoc(''' - Automation of creating and updating STM32CubeMX-PlatformIO projects. Requirements: Python 3.6+, STM32CubeMX, - Java, PlatformIO CLI. Visit https://github.com/ussserrr/stm32pio for more information. Use 'stm32pio [command] - -h' to see help on the particular command''')) + root = argparse.ArgumentParser(description=inspect.cleandoc(''' + Small cross-platform Python app that can create and update PlatformIO projects from STM32CubeMX .ioc files. It + uses STM32CubeMX to generate a HAL-framework-based code and alongside creates PlatformIO project with compatible + parameters to stick them both together. Both CLI and GUI editions are available. Visit + https://github.com/ussserrr/stm32pio for more information. Use 'stm32pio [command] -h' to see help on the + particular command''')) # Global arguments (there is also an automatically added '-h, --help' option) - root_parser.add_argument('--version', action='version', version=f"stm32pio {stm32pio.core.util.get_version()}") - root_parser.add_argument('-v', '--verbose', help="enable verbose output (default: INFO)", action='count', default=0) - - subparsers = root_parser.add_subparsers(dest='subcommand', title='subcommands', description="valid subcommands", - help="available actions") - - parser_init = subparsers.add_parser('init', - help="create config .ini file to check and tweak parameters before proceeding") - parser_generate = subparsers.add_parser('generate', help="generate CubeMX code only") - parser_pio_init = subparsers.add_parser('pio_init', help="create new compatible PlatformIO project") - parser_patch = subparsers.add_parser('patch', - help="tweak the project so the CubeMX and PlatformIO could work together") - parser_new = subparsers.add_parser('new', - help="generate CubeMX code, create PlatformIO project, glue them together") - parser_status = subparsers.add_parser('status', help="get the description of the current project state") - parser_clean = subparsers.add_parser('clean', help="clean-up the project (by default, it will ask you about the " - "files to delete)") - parser_validate = subparsers.add_parser('validate', help="verify current environment based on the config values") - parser_gui = subparsers.add_parser('gui', help="start the graphical version of the application. All arguments will " - "be passed forward, see its own --help for more information") - - # Common subparsers options - for parser in [parser_init, parser_generate, parser_pio_init, parser_patch, parser_new, parser_status, - parser_validate, parser_clean, parser_gui]: - parser.add_argument('-d', '--directory', dest='path', default=pathlib.Path.cwd(), - help="path to the project (current directory, if not given)") - for parser in [parser_init, parser_pio_init, parser_new, parser_gui]: - parser.add_argument('-b', '--board', dest='board', default='', help="PlatformIO identifier of the board") - for parser in [parser_init, parser_generate, parser_new]: - parser.add_argument('-e', '--start-editor', dest='editor', - help="use specified editor to open the PlatformIO project (e.g. subl, code, atom, etc.)") - for parser in [parser_generate, parser_new]: - parser.add_argument('-c', '--with-build', action='store_true', help="build the project after generation") - for parser in [parser_init, parser_clean, parser_new]: - parser.add_argument('-s', '--store-content', action='store_true', - help="save current folder contents as a cleanup ignore list and exit") - parser_clean.add_argument('-q', '--quiet', action='store_true', - help="suppress the caution about the content removal (be sure of what you are doing!)") + root.add_argument('--version', action='version', version=f"stm32pio {stm32pio.core.util.get_version()}") + root.add_argument('-v', '--verbose', help="enable verbose output (default level: INFO)", action='count', default=1) + + sub = root.add_subparsers(dest='command', title='commands', description="valid commands", help="available actions") + + # Primary operations + init = sub.add_parser('init', help="create config .INI file to check and tweak parameters before proceeding") + generate = sub.add_parser('generate', help="generate CubeMX code only") + pio_init = sub.add_parser('pio_init', help="create new compatible PlatformIO project") + patch = sub.add_parser('patch', help="tweak the project so both CubeMX and PlatformIO could work together") + new = sub.add_parser('new', help="generate CubeMX code, create PlatformIO project and glue them together") + status = sub.add_parser('status', help="inspect the project current state") + validate = sub.add_parser('validate', help="verify current environment based on the config values") + clean = sub.add_parser('clean', help="clean-up the project (by default, no files will be deleted immediately " + "without your confirmation)") + gui = sub.add_parser('gui', help="start the graphical version of the application. All arguments will " + "be passed forward, see its own --help for more information") + + # Assign options to commands + for command in [init, generate, pio_init, patch, new, status, validate, clean, gui]: + command.add_argument('-d', '--directory', dest='path', default=Path.cwd(), + help="path to the project (current directory, if not given)") + for command in [init, pio_init, new, gui]: + command.add_argument('-b', '--board', dest='board', default='', help="PlatformIO board name. " + board_hint) + for command in [init, generate, new]: + command.add_argument('-e', '--start-editor', dest='editor', + help="start the specified editor after an action (e.g. subl, code, atom, etc.)") + for command in [generate, new]: + command.add_argument('-c', '--with-build', action='store_true', help="build the project after code generation") + for command in [init, new]: + command.add_argument('-s', '--store-content', action='store_true', + help="save folder initial contents as a cleanup ignore list") + clean.add_argument('-s', '--store-content', action='store_true', + help="save project folder contents as a cleanup ignore list and exit") + clean.add_argument('-q', '--quiet', action='store_true', + help="suppress the caution about the content removal (be sure of what you are doing!)") if len(args) == 0: - root_parser.print_help() + root.print_help() return None - return root_parser.parse_args(args) + return root.parse_args(args) -def setup_logging(verbose: int = 0, dummy: bool = False) -> logging.Logger: +def setup_logging(verbose: int = 1, dummy: bool = False) -> logging.Logger: """ - Configure and return some root logger. The corresponding adapters for every project will be dependent on this. - - Args: - verbose: verbosity counter (currently only 2 levels are supported: NORMAL, VERBOSE) - dummy: create a NullHandler logger if true - - Returns: - logging.Logger instance + Prepare a logging setup suitable for a CLI application. Keep in mind, though, that Python ``logging`` module, in + general, mutates some internal global state, so be careful not to invoke this procedure twice in a single "session" + to avoid any unwanted interfering and possible slowdowns. + + :param verbose: verbosity counter (currently only 2 levels are supported: NORMAL, VERBOSE (starts from 1)) + :param dummy: if True, the function will create a "/dev/null" logger instead (no operation) + :return: configured and ready-to-use root logger instance. Corresponding logging adapters for every project will be + dependent on this """ if dummy: logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) else: logger = logging.getLogger('stm32pio') - logger.setLevel(logging.DEBUG if verbose else logging.INFO) + logger.setLevel(logging.DEBUG if verbose == 2 else logging.INFO) handler = logging.StreamHandler() - formatter = stm32pio.core.logging.DispatchingFormatter( - verbosity=stm32pio.core.logging.Verbosity.VERBOSE if verbose else stm32pio.core.logging.Verbosity.NORMAL, - general={ - stm32pio.core.logging.Verbosity.NORMAL: logging.Formatter("%(levelname)-8s %(message)s"), - stm32pio.core.logging.Verbosity.VERBOSE: logging.Formatter( - f"%(levelname)-8s %(funcName)-{stm32pio.core.settings.log_fieldwidth_function}s %(message)s") - }) + formatter = stm32pio.core.log.DispatchingFormatter(verbosity=stm32pio.core.log.Verbosity(verbose)) handler.setFormatter(formatter) logger.addHandler(handler) - logger.debug("debug logging enabled") + logger.debug("debug logging enabled") # will be printed only in verbose mode return logger def main(sys_argv: List[str] = None, should_setup_logging: bool = True) -> int: """ - Can be used as a high-level wrapper to perform independent tasks. + Entry point to the CLI edition of application. Since this is a highest-order wrapper, it can be used to + programmatically the application (for testing, embedding, etc.). Example: - Example: ret_code = stm32pio.app.main(sys_argv=['new', '-d', '~/path/to/project', '-b', 'nucleo_f031k6', '--with-build']) - Args: - sys_argv: list of strings CLI arguments - should_setup_logging: if this is true, the preferable default logging schema would be applied, otherwise it is a - caller responsibility to provide (or do not) some logging configuration. The latter can be useful when the - outer code makes sequential calls to this API so it is unwanted to append the logging handlers every time - (e.g. when unit-testing) - - Returns: - 0 on success, -1 otherwise + :param sys_argv: list of CLI arguments + :param should_setup_logging: if True, a reasonable default logging schema would be applied, otherwise it is on + caller to resolve (or not) some logging configuration. The latter can be useful when an outer code makes sequential + calls to this API so it is unwanted to append logging handlers every time (e.g. when unit-testing) + :return: 0 on success, -1 otherwise """ if sys_argv is None: @@ -140,12 +134,12 @@ def main(sys_argv: List[str] = None, should_setup_logging: bool = True) -> int: args = parse_args(sys_argv) - if args is not None and args.subcommand == 'gui': + if args is not None and args.command == 'gui': gui_args = [arg for arg in sys_argv if arg != 'gui'] import stm32pio.gui.app as gui app = gui.create_app(sys_argv=gui_args) return app.exec_() - elif args is not None and args.subcommand is not None: + elif args is not None and args.command is not None: logger = setup_logging(verbose=args.verbose, dummy=not should_setup_logging) else: print("\nNo arguments were given, exiting...") @@ -153,50 +147,54 @@ def main(sys_argv: List[str] = None, should_setup_logging: bool = True) -> int: project = None - # Main routine + # Wrap the main routine into try...except to gently handle possible error (API is designed to throw in certain + # situations when it doesn't make much sense to continue with the met conditions) try: - if args.subcommand == 'init': + if args.command == 'init': project = stm32pio.core.project.Stm32pio(args.path, parameters={'project': {'board': args.board}}, - instance_options={'save_on_destruction': True}) + save_on_destruction=True) if args.store_content: - project.config.save_content_as_ignore_list() + project.config.set_content_as_ignore_list() if project.config.get('project', 'board') == '': - logger.warning("PlatformIO board identifier is not specified, it will be needed on PlatformIO project " - "creation. Type 'pio boards' or go to https://platformio.org to find an appropriate " - "identifier") - logger.info(f"project has been initialized. You can now edit {stm32pio.core.settings.config_file_name} " - "config file") + logger.warning(no_board_message) + project.inspect_ioc_config() + logger.info(init_message) if args.editor: project.start_editor(args.editor) - elif args.subcommand == 'generate': + elif args.command == 'generate': project = stm32pio.core.project.Stm32pio(args.path) + if project.config.get('project', 'inspect_ioc', fallback='0').lower() in stm32pio.core.settings.yes_options: + project.inspect_ioc_config() project.generate_code() if args.with_build: project.build() if args.editor: project.start_editor(args.editor) - elif args.subcommand == 'pio_init': + elif args.command == 'pio_init': project = stm32pio.core.project.Stm32pio(args.path, parameters={'project': {'board': args.board}}, - instance_options={'save_on_destruction': True}) + save_on_destruction=True) + if project.config.get('project', 'inspect_ioc', fallback='0').lower() in stm32pio.core.settings.yes_options: + project.inspect_ioc_config() project.pio_init() - elif args.subcommand == 'patch': + elif args.command == 'patch': project = stm32pio.core.project.Stm32pio(args.path) + if project.config.get('project', 'inspect_ioc', fallback='0').lower() in stm32pio.core.settings.yes_options: + project.inspect_ioc_config() project.patch() - elif args.subcommand == 'new': + elif args.command == 'new': project = stm32pio.core.project.Stm32pio(args.path, parameters={'project': {'board': args.board}}, - instance_options={'save_on_destruction': True}) + save_on_destruction=True) if args.store_content: - project.config.save_content_as_ignore_list() + project.config.set_content_as_ignore_list() if project.config.get('project', 'board') == '': - logger.info(f"project has been initialized. You can now edit {stm32pio.core.settings.config_file_name} " - "config file") - raise Exception("PlatformIO board identifier is not specified, it is needed for PlatformIO project " - "creation. Type 'pio boards' or go to https://platformio.org to find an appropriate " - "identifier") + logger.info(init_message) + raise Exception(no_board_message) + if project.config.get('project', 'inspect_ioc', fallback='0').lower() in stm32pio.core.settings.yes_options: + project.inspect_ioc_config() project.generate_code() project.pio_init() project.patch() @@ -205,26 +203,24 @@ def main(sys_argv: List[str] = None, should_setup_logging: bool = True) -> int: if args.editor: project.start_editor(args.editor) - elif args.subcommand == 'status': + elif args.command == 'status': project = stm32pio.core.project.Stm32pio(args.path) print(project.state) - elif args.subcommand == 'validate': + elif args.command == 'validate': project = stm32pio.core.project.Stm32pio(args.path) print(project.validate_environment()) - elif args.subcommand == 'clean': + elif args.command == 'clean': project = stm32pio.core.project.Stm32pio(args.path) if args.store_content: - project.config.save_content_as_ignore_list() + project.config.set_content_as_ignore_list() + project.config.save() else: - project.clean(quiet_on_cli=args.quiet) + project.clean(quiet=args.quiet) - # Global errors catching. Core library is designed to throw the exception in cases when there is no sense to - # proceed. Of course this also suppose to handle any unexpected behavior, too - except Exception: - stm32pio.core.logging.log_current_exception( - logger, config=project.config if (project is not None and hasattr(project, 'config')) else None) + except (Exception,): + stm32pio.core.log.log_current_exception(logger, config=project.config if project is not None else None) return -1 return 0 diff --git a/stm32pio/core/clean.py b/stm32pio/core/clean.py new file mode 100644 index 0000000..0c47816 --- /dev/null +++ b/stm32pio/core/clean.py @@ -0,0 +1,101 @@ +""" +Various ways to remove artifacts from the project folder +""" + +import logging +import shutil +import subprocess +from abc import ABC, abstractmethod +from pathlib import Path +from typing import List + +import stm32pio.core.settings +from stm32pio.core.log import Logger, LogPipe +from stm32pio.core.util import get_folder_contents + + +# TODO: Python 3.8: also see typing.Protocol (https://stackoverflow.com/a/66056490/7782943) +class ICleanStrategy(ABC): + """Common interface for different cleaners""" + + def __init__(self, path: Path, logger: Logger, ask_confirmation: bool): + """ + :param path: working directory + :param logger: logging.Logger-compatible object + :param ask_confirmation: if True, the full removal list will be shown and the user will be asked (on CLI) + to proceed + """ + self.path = path + self.logger = logger + self.ask_confirmation = ask_confirmation + + @abstractmethod + def clean(self): + """Concrete implementation""" + raise NotImplementedError + + +class DefaultStrategyI(ICleanStrategy): + """Custom algorithm to perform a cleanup. Can be supplied with an optional ignore list""" + + def __init__(self, path: Path, logger: Logger, ask_confirmation: bool = True, ignore_list: List[Path] = None): + """ + :param ignore_list: list of *concrete paths* to not remove + """ + super().__init__(path, logger, ask_confirmation) + self.ignore_list = ignore_list + + def clean(self): + """Deletes everything except the ignore list entries""" + + removal_list = get_folder_contents(self.path, ignore_list=self.ignore_list) + if len(removal_list): + if self.ask_confirmation: + removal_str = '\n'.join(f' {path.relative_to(self.path)}' for path in removal_list) + while True: + reply = input(f"These files/folders will be deleted:\n{removal_str}\nAre you sure? (y/n) ") + if reply.lower() in stm32pio.core.settings.yes_options: + break + elif reply.lower() in stm32pio.core.settings.no_options: + return + + for entry in removal_list: + if entry.is_dir(): + shutil.rmtree(entry) # this can delete non-empty directories + self.logger.debug(f'del "{entry.relative_to(self.path)}"/') + elif entry.is_file(): + entry.unlink() + self.logger.debug(f'del "{entry.relative_to(self.path)}"') + self.logger.info("project has been cleaned") + else: + self.logger.info("no files/folders to remove") + + +class GitStrategyI(ICleanStrategy): + """Delegate the entire task to the Git. See its docs for ``git clean`` command for more information""" + + def __init__(self, path: Path, logger: Logger, ask_confirmation: bool = True, exe_cmd: str = 'git', + clean_args: List[str] = None): + """ + :param exe_cmd: command or a path to executable + :param clean_args: ``git clean`` command arguments + """ + super().__init__(path, logger, ask_confirmation) + self.exe_cmd = exe_cmd + self.clean_args = clean_args if clean_args is not None else [ + '-d', '--force', # recurse into untracked directories + '-X' # remove only files ignored by Git + ] + + def clean(self): + """Run subprocess with appropriate arguments""" + # Remove files listed in .gitignore (see git clean --help for more information) + command = [self.exe_cmd, 'clean'] + self.clean_args + if self.ask_confirmation: + command.append('--interactive') + if not self.logger.isEnabledFor(logging.DEBUG): + command.append('--quiet') + with LogPipe(self.logger, logging.INFO) as log: + # TODO: Python 3.6 compatibility: str(self.path) + subprocess.run(command, check=True, cwd=str(self.path), stdout=log.pipe, stderr=log.pipe) + self.logger.info("Done", from_subprocess=True) # fake diff --git a/stm32pio/core/config.py b/stm32pio/core/config.py index f8343e1..50edb1b 100644 --- a/stm32pio/core/config.py +++ b/stm32pio/core/config.py @@ -1,96 +1,86 @@ """ -Config entity suitable for a usage in conjunction with the main Stm32pio class. - -Not to be confused with the settings.py module! +Tweaked native ConfigParser entity attached to every stm32pio project. """ import collections.abc -import configparser -import copy -import io import logging -import pathlib -from typing import Mapping, Any, Union, List +from configparser import ConfigParser +from io import StringIO +from pathlib import Path +from typing import Mapping, Any, Union, List # TODO: 3.9+: List is not needed anymore, just use standard list -import stm32pio.core.util +import stm32pio.core.log import stm32pio.core.settings +import stm32pio.core.util + +ConfigMapping = Mapping[str, Mapping[str, Any]] # in Python, *anything* can be converted to a string :) -class Config(configparser.ConfigParser): - """ - This is basically a ConfigParser "on steroids" that can be tweaked even more later, actually. It supplements the - parent with such features as additional getters/setters (ignore list), pretty printer, smart merging and more. - """ - def __init__(self, location: pathlib.Path, name: str = stm32pio.core.settings.config_file_name, - defaults: Mapping[str, Mapping[str, Any]] = stm32pio.core.settings.config_default, - runtime_parameters: Mapping[str, Mapping[str, Any]] = None, logger: logging.Logger = None): +class ProjectConfig(ConfigParser): + """An ordinary ConfigParser with some additional functionality: getters, pretty printing, smart merging, ...""" + + def __init__(self, location: Path, logger: 'stm32pio.core.log.Logger', + name: str = stm32pio.core.settings.config_file_name, + defaults: ConfigMapping = stm32pio.core.settings.config_default, + runtime_parameters: ConfigMapping = None): """ - Prepare config for the project. Order (priorities) of values retrieval (masking) (i.e. higher levels - overwrites lower but only if a value is non-empty): - - default dict (settings.py module) => config file stm32pio.ini => user-given (runtime) values - (via CLI or another way) - - Args: - location: path to the folder which contain (or should contain in the future) the config file - name: file name of the config - defaults: mapping with the default values for the config (see schema above) - runtime_parameters: another mapping to write (see schema above) - logger: optional logging.Logger instance (or compatible one) + Setup a config for the project. Order of values masking (higher level non-empty values overwrites lower ones): + + default dict (settings.py) => project file stm32pio.ini => runtime provided values + + :param location: folder which contains (or should contain in the future) a config file (typically a project + directory) + :param logger: logging.Logger-compatible object + :param name: config file name (typically with .INI extension) + :param defaults: some mapping providing default values for the config + :param runtime_parameters: another source of values with the highest priority """ + super().__init__(interpolation=None) self.logger = logger - self.location = location - self.name = name self.path = location / name - # Fill with default values ... - self.read_dict(copy.deepcopy(defaults)) + # 1. Fill with default values ... + self.read_dict(defaults) - # ... then merge with the user's config file values (if exist)... - if self.logger is not None: - self.logger.debug(f"searching for {name}...") + # 2. ... then merge with project's config file (if exist)... + self.logger.debug(f"searching for {name} config...") self.merge_with(self.path, reason="compared to default") - # ... finally merge with the given in this session CLI parameters - if runtime_parameters is not None and len(runtime_parameters): + # 3. ... finally merge with some given runtime parameters (like from CLI or GUI) + if runtime_parameters is not None: self.merge_with(runtime_parameters, reason="CLI keys") - def get_ignore_list(self, section: str, option: str, raw: bool = False) -> Union[str, List[pathlib.Path]]: - """Custom getter based on the ConfigParser API""" - if raw: - return self.get(section, option, fallback='') - else: - ignore_list = [] - for entry in filter(lambda line: len(line) != 0, # non-empty lines only - self.get(section, option, fallback='').splitlines()): - ignore_list.extend(self.location.glob(entry)) - return ignore_list + def get_ignore_list(self, section: str, option: str) -> List[Path]: + """Custom getter similar to what built-in ones are providing (like ``getint()``/``getboolean()``...)""" + ignore_list = [] + for entry in filter(lambda line: len(line) != 0, # non-empty lines only + self.get(section, option, fallback='').splitlines()): + ignore_list.extend(self.path.parent.glob(entry)) + return ignore_list - def save_content_as_ignore_list(self): + def set_content_as_ignore_list(self): """ - Set all siblings of the config file path (non-recursively) to the [project]cleanup_ignore key and - save the entire config file + When invoked, snapshotting the config directory' current content (relative, non-recursive) and sets it as the + ``cleanup_ignore`` config option. """ - self.set('project', 'cleanup_ignore', - '\n'.join(str(path.relative_to(self.location)) for path in self.location.iterdir())) - self.save() - self.logger.info( - f"folder contents has been saved to the {self.name} [project] section as 'cleanup_ignore'") + location = self.path.parent + self.set('project', 'cleanup_ignore', '\n'.join(str(path.relative_to(location)) for path in location.iterdir())) + self.logger.info("folder current contents has been set as cleanup_ignore list") - def _log_whats_changed(self, compared_to: Mapping[str, Mapping[str, Any]], + def _log_whats_changed(self, compared_to: ConfigMapping, log_string: str = "these config parameters will be overridden", reason: str = None) -> None: """ - Compare the current configuration with the given mapping forming the resulting string for logging. + Print a diff between the current state and given mapping (only in DEBUG mode). - Args: - compared_to: compare the current state with this argument - log_string: prefix to put before the diff - reason: optional comment about the merging cause + :param compared_to: some mapping (with same shape as ConfigParser) to compare + :param log_string: prefix to put before the diff + :param reason: optional comment about a cause of the requested merge """ - if self.logger is not None and self.logger.isEnabledFor(logging.DEBUG): + + if self.logger.isEnabledFor(logging.DEBUG): whats_changed = [] for section in compared_to.keys(): for key, new_value in compared_to[section].items(): @@ -108,20 +98,15 @@ def _log_whats_changed(self, compared_to: Mapping[str, Mapping[str, Any]], log_string += f":\n{overridden}" self.logger.debug(log_string) - def merge_with(self, another: Union[pathlib.Path, Mapping[str, Mapping[str, Any]]], reason: str = None) -> None: + def merge_with(self, another: Union[Path, ConfigMapping], reason: str = None) -> None: """ - Merge itself with some external thing. It is safe because the empty given values will not overwrite existing - ones. + Merge itself with some external thing. Behavior is safe: empty values will not overwrite existing ones. - Args: - another: whether Path or Mapping (in the same form as the config) - reason: optional short description. This lays nicely with the logging (if enabled) - - Raises: - TypeError: on incompatible input argument (see above) + :param another: path to config or a mapping + :param reason: optional short description of a merge reason """ - if isinstance(another, pathlib.Path): - temp_config = configparser.ConfigParser(interpolation=None) + if isinstance(another, Path): + temp_config = ConfigParser(interpolation=None) temp_config.read(another) temp_config_dict_cleaned = stm32pio.core.util.cleanup_mapping(temp_config) self._log_whats_changed(temp_config_dict_cleaned, reason=reason, @@ -131,45 +116,33 @@ def merge_with(self, another: Union[pathlib.Path, Mapping[str, Mapping[str, Any] self._log_whats_changed(another, reason=reason) self.read_dict(stm32pio.core.util.cleanup_mapping(another)) else: - raise TypeError(f"Cannot merge the given value of type {type(another)} to the config {self.path}. This " - "type isn't supported") + raise TypeError(f"Cannot merge with value of type {type(another)}") - def save(self, parameters: Mapping[str, Mapping[str, Any]] = None) -> int: + def save(self, also_set_this: ConfigMapping = None) -> int: """ - Preliminarily, updates the config with the given 'parameters' dictionary. It should has the following format: - - { - 'project': { - 'board': 'nucleo_f031k6', - 'ioc_file': 'fan_controller.ioc' - }, - ... - } - - Then writes itself to the file 'path' and logs using the logger. + Flush the config to file. - Returns: - 0 on success, -1 otherwise + :param also_set_this: optional mapping (with same shape as ConfigParser) to populate the config with + :return: 0 on success, -1 otherwise """ - if parameters is not None and len(parameters): - self.merge_with(parameters, reason="config file saving was requested") + if also_set_this is not None and len(also_set_this): + self.merge_with(also_set_this, reason="config file save was requested") try: with self.path.open(mode='w') as config_file: self.write(config_file) - if self.logger is not None: - self.logger.debug(f"{self.name} config file has been saved") + self.logger.debug(f"{self.path.name} config file has been saved") return 0 except Exception as e: - if self.logger is not None: - self.logger.warning(f"cannot save the config: {e}", exc_info= - self.logger.isEnabledFor(stm32pio.core.settings.show_traceback_threshold_level)) + self.logger.warning( + f"cannot save config: {e}", + exc_info=self.logger.isEnabledFor(stm32pio.core.settings.show_traceback_threshold_level)) return -1 def __str__(self) -> str: - """String representation""" - fake_file = io.StringIO() + """String representation (same as it will be stored in file)""" + fake_file = StringIO() self.write(fake_file) printed = fake_file.getvalue() fake_file.close() diff --git a/stm32pio/core/cubemx.py b/stm32pio/core/cubemx.py new file mode 100644 index 0000000..66f83f5 --- /dev/null +++ b/stm32pio/core/cubemx.py @@ -0,0 +1,224 @@ +""" +Module outsourcing most of the STM32CubeMX-related logic. +""" + +import difflib +import logging +import subprocess +import tempfile +from configparser import ConfigParser +from io import StringIO +from pathlib import Path +from string import Template +from typing import Tuple + +import stm32pio.core.log +import stm32pio.core.settings +import stm32pio.core.util + + +class IocConfig(ConfigParser): + """ + .ioc file structure is actually very similar to traditional INI-style configs and can be managed by the + ``ConfigParser`` with small tweaks + """ + + fake_section_name = 'ioc' + header = '' + + def __init__(self, parent_path: Path, file_name: str, logger: stm32pio.core.log.Logger): + """ + Concentrate a CubeMX .ioc-file-related logic. As such file is a fundamental piece of every stm32pio project, + this constructor throws in case of an absent or incorrect one. + + :param parent_path: project folder + :param file_name: expected file name + :param logger: logging.Logger-compatible object + """ + super().__init__(interpolation=None) + self.logger = logger + self.path, content = self._find_ioc_file(parent_path, file_name) + self.optionxform = lambda option: option # do not modify keys + self.read_string(f'[{IocConfig.fake_section_name}]\n' + content) # ConfigParser cannot handle headless configs + self.header = stm32pio.core.util.extract_header_comment(content, comment_symbol='#') + + def _find_ioc_file(self, parent_path: Path, file_name: str) -> Tuple[Path, str]: + """ + Find and perform a basic correctness check of a CubeMX project .ioc file. Different scenarios are considered. + Read and return raw string content for further usage. + + :param parent_path: project folder + :param file_name: expected file name + :return: absolute path and the file content + """ + + if file_name: # if file is given, check its existence... + result_file = parent_path.joinpath(file_name).resolve(strict=True) + self.logger.debug(f"using '{result_file.name}' file") + else: # ...otherwise search for a file in the containing directory + self.logger.debug("searching for .ioc file...") + candidates = list(parent_path.glob('*.ioc')) + if len(candidates) == 0: # TODO: Python 3.8: assignment expression feature + raise FileNotFoundError("CubeMX project .ioc file") + elif len(candidates) == 1: + self.logger.debug(f"'{candidates[0].name}' is selected") + else: + self.logger.warning(f"there are multiple .ioc files, '{candidates[0].name}' is selected") + result_file = candidates[0] + + try: + content = result_file.read_text() # should be a non-empty text file + if len(content) == 0: + raise ValueError("file is empty") + except Exception as e: + raise Exception("file is incorrect") from e + else: + return result_file, content + + def save(self): + """ + Save the config back to its file (by overwriting it). This trying to introduce as little changes to the original + content as possible, even prepending the initial "do not modify" notice + """ + fake_file = StringIO() + self.write(fake_file, space_around_delimiters=False) + config_text = fake_file.getvalue() + self.path.write_text( + (self.header if self.header else '') + # restore a header + config_text[config_text.index('\n') + 1:-1]) # remove fake section (first line) and last \n + fake_file.close() + + def inspect(self, platformio_board: str = None, platformio_mcu: str = None): + """ + Report some info about the .ioc file current state. This method looks only for options that should be *actively* + tweaked (i.e. changed from the default values by a user) in order for project to be compatible with PlatformIO + (see README and CLI usage example). + + :param platformio_board: name to compare (i.e. nucleo_f031k6) + :param platformio_mcu: name to compare (i.e. STM32F031K6T6) + """ + + s = IocConfig.fake_section_name # just for a short variable name + + def w(message: str): + self.logger.warning(self.path.name + ': ' + message) + + if self.get(s, 'ProjectManager.TargetToolchain', fallback='') != 'Other Toolchains (GPDSC)': + w('It is recommended to use value "Other Toolchains (GPDSC)" for parameter ' + '"Project Manager –> Project -> Toolchain/IDE"') + + if self.getint(s, 'ProjectManager.LibraryCopy', fallback=None) != 1: + w('It is recommended to set parameter ' + '"Project Manager –> Code Generator –> Copy only the necessary library files"') + + if not self.getboolean(s, 'ProjectManager.CoupleFile', fallback=False): + w('It is recommended to set parameter "Project Manager –> ' + 'Code Generator –> Generate peripheral initialization as a pair of \'.c/.h\' files per peripheral"') + + similarity_threshold = 0.8 + + if self.get(s, 'board', fallback='') == 'custom' and platformio_mcu: + device_id = self.get(s, 'ProjectManager.DeviceId', fallback='') + if difflib.SequenceMatcher(a=device_id.lower(), b=platformio_mcu.lower()).ratio() < similarity_threshold: + self.logger.warning("Probably, there is a mismatch between CubeMX and PlatformIO MCUs:\n\t" + f"{device_id} (CubeMX) vs. {platformio_mcu} (PlatformIO)") + elif self.get(s, 'board', fallback='') != 'custom' and platformio_board: + board = self.get(s, 'board', fallback='') + if difflib.SequenceMatcher(a=board.lower(), b=platformio_board.lower()).ratio() < similarity_threshold: + self.logger.warning("Probably, there is a mismatch between CubeMX and PlatformIO boards:\n\t" + f"{board} (CubeMX) vs. {platformio_board} (PlatformIO)") + + +class CubeMX: + """Interface to interact with the STM32CubeMX program""" + + def __init__(self, work_dir: Path, ioc_file_name: str, exe_cmd: str, logger: stm32pio.core.log.Logger, + java_cmd: str = None): + """ + :param work_dir: project folder + :param ioc_file_name: expected .ioc file name (can be found automatically if not given) + :param exe_cmd: path to the CubeMX executable binary + :param logger: logging.Logger-compatible object + :param java_cmd: optional JRE executable (newer CubeMX versions doesn't need that) + """ + self.logger = logger + self.work_dir = work_dir + self.ioc = IocConfig(work_dir, ioc_file_name, logger) # represents project .ioc file config + self.exe_cmd = exe_cmd + self.java_cmd = java_cmd + + def execute_script(self, script_content: str) -> Tuple[subprocess.CompletedProcess, str]: + """ + CubeMX can be fed with the script file to read commands from (see PDF manual). This method manages a temp file + for such script. + + :param script_content: multi-line string + :return: tuple of subprocess.CompletedProcess instance and recorded STDOUT output + """ + + # TODO: This, probably, needs to be investigated in the future + # Use mkstemp() instead of higher-level API for compatibility with Windows (see tempfile docs for more details) + cubemx_script_file, cubemx_script_name = tempfile.mkstemp() + + # We should remove a temp directory yourself, so do not let any exception break our plans + try: + # buffering=0 leads to the immediate flushing on writing + with open(cubemx_script_file, mode='w+b', buffering=0) as cubemx_script: + cubemx_script.write(script_content.encode()) # should encode since mode='w+b' + + command_arr = [] + # CubeMX can be invoked directly or through the JRE + if self.java_cmd and (self.java_cmd.lower() not in stm32pio.core.settings.none_options): + command_arr += [self.java_cmd, '-jar'] + command_arr += [self.exe_cmd, '-q', # read commands from file + cubemx_script_name, '-s'] # no splash screen + with stm32pio.core.log.LogPipe(self.logger, logging.DEBUG, accumulate=True) as log: + completed_process = subprocess.run(command_arr, stdout=log.pipe, stderr=log.pipe) + std_output = log.value + + except Exception as e: + raise e # re-raise an exception after the 'finally' block + else: + return completed_process, std_output + finally: + Path(cubemx_script_name).unlink() + + def generate_code(self, script_template: str) -> int: + """ + Initiate a code generation process using CubeMX CLI. As of yet, results of this action differs when invoked from + CLI and GUI (folders structure). + + :param script_template: string.Template compatible string to fill with necessary paths + :return: completed process return code + """ + + self.logger.info("starting to generate a code from the CubeMX .ioc file...") + + cubemx_script_template = Template(script_template) + # It's important to wrap paths into quotation marks as they can contain whitespaces + cubemx_script_content = cubemx_script_template.substitute(ioc_file_absolute_path=f'"{self.ioc.path}"', + project_dir_absolute_path=f'"{self.work_dir}"') + completed_process, std_output = self.execute_script(cubemx_script_content) + + error_msg = "code generation error" + if completed_process.returncode == 0: + if stm32pio.core.settings.cubemx_str_indicating_success in std_output: + self.logger.info("successful code generation") + return completed_process.returncode + else: # strictly speaking, here we're just guessing + error_lines = [line for line in std_output.splitlines(keepends=True) + if stm32pio.core.settings.cubemx_str_indicating_error in line] + if len(error_lines): + self.logger.error(''.join(error_lines), from_subprocess=True) + raise Exception(error_msg) + else: + self.logger.warning("Unclear CubeMX code generation results (neither error or success symptoms " + "were found in logs). Keep going but there might be errors...") + return completed_process.returncode + else: + # Most likely, Java error (e.g. no CubeMX is present) + self.logger.error(f"Return code is {completed_process.returncode}", from_subprocess=True) + if not self.logger.isEnabledFor(logging.DEBUG): + # In DEBUG mode the output has already been printed + self.logger.error(f"Output:\n{std_output}", from_subprocess=True) + raise Exception(error_msg) diff --git a/stm32pio/core/log.py b/stm32pio/core/log.py new file mode 100644 index 0000000..b9e3523 --- /dev/null +++ b/stm32pio/core/log.py @@ -0,0 +1,252 @@ +""" +Logging is an important part of the application since almost all of its output is flowing through the Python ``logging`` +library in some way or another providing a feedback to users. It is an intended behavior allowing us not just quickly +customize the output format but also redirect it to different "sinks" in any possible combinations. It has been +developed and taken such a form mostly during the GUI milestone to provide some sleek asynchronous way of piping all +text-based application output to GUI components while preserving a backwards compatibility with the CLI API. + +This module consists of several handy utilities for various uses throughout the app. +""" + +import enum +import logging +import os +from contextlib import AbstractContextManager +from copy import copy +from threading import Thread +from traceback import format_exc as format_exception +from typing import Any, MutableMapping, Tuple, Mapping, Optional, List, Union + +import stm32pio.core.config +import stm32pio.core.settings + + +_module_logger = logging.getLogger(__name__) # this module logger + +logging_levels = { # GUI is using this map to adjust its representation + logging.getLevelName(logging.CRITICAL): logging.CRITICAL, + logging.getLevelName(logging.ERROR): logging.ERROR, + logging.getLevelName(logging.WARNING): logging.WARNING, + logging.getLevelName(logging.INFO): logging.INFO, + logging.getLevelName(logging.DEBUG): logging.DEBUG, + logging.getLevelName(logging.NOTSET): logging.NOTSET +} + +# Do not add or remove any information from the message and simply pass it "as-is" +as_is_formatter = logging.Formatter('%(message)s') + + +@enum.unique +class Verbosity(enum.IntEnum): + """ + Global logging verbosity levels available for the application. Each one determines how every LogRecord will be + formatted (independent from its level) + """ + NORMAL = enum.auto() # note: starts from 1 + VERBOSE = enum.auto() + + +@enum.unique +class SpecialLogEvent(enum.Enum): + """ + Identifiers for the special logging cases when a log request should be treated differently compared to a normal + situations + """ + FROM_SUBPROCESS = 'from_subprocess' + + +class ProjectLogger(logging.LoggerAdapter): + """ + Wrapper around the actual Logger to supply some contextual information to every LogRecord. Usage example: + + self.logger = ProjectLogger(logging.getLogger('some_singleton_logger_for_all_projects'), project_id=id(self)) + """ + + def __init__(self, underlying_logger: logging.Logger, project_id: int): + super().__init__(logger=underlying_logger, extra=dict(project_id=project_id)) + + # TODO: kwargs can utilize Python 3.8+ TypedDict, doesn't it?.. + def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> Tuple[Any, MutableMapping[str, Any]]: + """Inject a context data (both from the adapter and the log call)""" + + # 1. Attach the common, per-project-scoped context + if 'extra' in kwargs: + kwargs['extra'].update(self.extra) + else: + kwargs['extra'] = copy(self.extra) # be careful! Only works for plain objects + + # 2. Inject a logging-call-scoped context + flags = [case for case in SpecialLogEvent if case.value in kwargs and bool(kwargs[case.value])] + if len(flags) > 1: + _module_logger.warning(f"More than 1 special logging event flag is set for a single record \"{msg}\". " + "A first one will be chosen") + if len(flags) > 0: + # We should use *something* as a key anyway, so why not a SpecialLogEvent name? + kwargs['extra'][SpecialLogEvent.__name__] = flags[0] + for case in flags: # clear our "custom" keys since the kwargs argument cannot contain any unwanted values + del kwargs[case.value] + + return msg, kwargs + + +Logger = Union[logging.Logger, logging.LoggerAdapter] # used as a type hint for all loggers throughout the app + + +class DispatchingFormatter(logging.Formatter): + """ + Wrapper around the ordinary logging.Formatter allowing to have multiple formatters for different purposes. General + arguments schema: + + { + verbosity=Verbosity.NORMAL, + general={ + Verbosity.NORMAL: logging.Formatter(...) + Verbosity.VERBOSE: logging.Formatter(...) + ... + }, + special={ + 'case_1': { + Verbosity.NORMAL: logging.Formatter(...) + ... + }, + ... + } + } + """ + + # General-purpose logging formatters + GENERAL_FORMATTERS_DEFAULT = { + Verbosity.NORMAL: logging.Formatter("%(levelname)-8s %(message)s"), + Verbosity.VERBOSE: logging.Formatter( + f"%(levelname)-8s %(module)s %(funcName)-{stm32pio.core.settings.log_fieldwidth_function}s %(message)s") + } + + # Logging formatters for some "special" cases. Currently, only "from_subprocess" is defined + SPECIAL_FORMATTERS_DEFAULT = { + SpecialLogEvent.FROM_SUBPROCESS: { + level: as_is_formatter for level in Verbosity + } + } + + def __init__(self, verbosity: Verbosity = Verbosity.NORMAL, + general: Mapping[Verbosity, logging.Formatter] = None, + special: Mapping[SpecialLogEvent, Mapping[Verbosity, logging.Formatter]] = None): + super().__init__() # will be '%(message)s' + self.verbosity = verbosity + self.general = DispatchingFormatter.GENERAL_FORMATTERS_DEFAULT if general is None else general + self.special = DispatchingFormatter.SPECIAL_FORMATTERS_DEFAULT if special is None else special + + def find_formatter_for(self, record: logging.LogRecord) ->\ + Tuple[Optional[SpecialLogEvent], Optional[logging.Formatter]]: + """Find and return an appropriate formatter""" + special_case = getattr(record, SpecialLogEvent.__name__) if hasattr(record, SpecialLogEvent.__name__) else None + if special_case is not None: + return special_case, self.special.get(special_case, {}).get(self.verbosity) + return None, self.general.get(self.verbosity) + + def format(self, record: logging.LogRecord) -> str: + """Dispatch a request to a suitable formatter""" + case, formatter = self.find_formatter_for(record) + if formatter is not None: + return formatter.format(record) + else: + _module_logger.warning(f"No formatter found for logging event {case}, verbosity {self.verbosity}. " + "Falling back to default one") + return super().format(record) + + +class LogPipeRC: + """Small class suitable for passing to a caller on the LogPipe context manager enter""" + + accumulator: List[str] = [] # accumulating all incoming messages + + def __init__(self, fd: int): + """ + :param fd: writable end of os.pipe + """ + self.pipe = fd + + @property + def value(self): + return ''.join(self.accumulator) + + +class LogPipe(Thread, AbstractContextManager): + """ + Thread combined with the context manager providing a nice way to temporarily redirect some stream output into the + ``logging`` module. One straightforward application is to suppress a given subprocess' STDOUT/STDERR and wrap them + into a conventional logging mechanism of your app. It can also accumulate such output to an internal variable for + further usage + """ + + def __init__(self, logger: Logger = None, level: int = logging.INFO, accumulate: bool = False): + """ + :param logger: logger to flow a streaming lines to + :param level: logging level to log a messages with + :param accumulate: whether to store a copy of incoming information + """ + + super().__init__() # initialize both ancestors (refer to MRO) + + self.logger = logger + self.level = level + self.accumulate = accumulate + + self.fd_read, self.fd_write = os.pipe() # create 2 ends of the pipe and setup the reading one + self.pipe_reader = os.fdopen(self.fd_read) + + self.rc = LogPipeRC(self.fd_write) # RC stands for "remote control" + + def __enter__(self) -> LogPipeRC: + """Start the thread and return the consuming end of pipe. The caller should feed its data to that input now""" + self.start() + return self.rc + + def run(self): + """Routine of the thread: absorb everything""" + for line in iter(self.pipe_reader.readline, ''): # stop iteration when the empty string will occur + if self.accumulate: + self.rc.accumulator.append(line) # accumulate the string + if self.logger: + self.logger.log(self.level, line.strip('\n'), from_subprocess=True) # mark the message origin + self.pipe_reader.close() + + def __exit__(self, exc_type, exc_val, exc_tb): + """Any exception will be passed forward. The following tear-down process will be done anyway""" + os.close(self.fd_write) + + +def log_current_exception(logger: Logger, show_traceback: bool = None, + config: stm32pio.core.config.ProjectConfig = None) -> None: + """ + When called from inside a ``try...except`` block, will smartly report error details depending on a context. In + verbose mode (as determined based on the given logger) additionally prints the traceback, otherwise only a message + will be shown. This can be overridden by ``show_traceback`` flag. If project config is given, traceback always will + be put in it instead of printing. + + :param logger: instance to output the message + :param show_traceback: reset default behavior (ignored if ``config`` given) + :param config: ``ProjectConfig`` instance + """ + + if show_traceback is None: + show_traceback = logger.isEnabledFor(stm32pio.core.settings.show_traceback_threshold_level) + + # We do not explicitly retrieve an exception info via sys.exc_info() as it immediately stores a reference to the + # current Python frame/variables possibly causing some weird errors and memory leaks (objects are not garbage + # collected). See https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/ for more information. + lines = format_exception().splitlines() + message = lines[-1] + if message.startswith('Exception: ') and not show_traceback: + message = message[len('Exception: '):] + traceback = '\n'.join(lines[:-1]) + + if config is not None: + logger.error(message) + config_saved = config.save({'project': {'last_error': f"{message}\n{traceback}"}}) == 0 + if config_saved: + logger.info(f"Traceback has been saved to {config.path.name}. It will be cleared on next successful run") + else: + logger.warning(f"Traceback has not been saved to {config.path.name}") + else: + logger.error(f"{message}\n{traceback}" if show_traceback else message) diff --git a/stm32pio/core/logging.py b/stm32pio/core/logging.py deleted file mode 100644 index cb2f72d..0000000 --- a/stm32pio/core/logging.py +++ /dev/null @@ -1,235 +0,0 @@ -""" -Logging is an important part of the application because almost all of its output is flowing through the logging Python -library in some way or another. It is an intended behavior allowing us not just quickly customize the output shape but -also redirect it to different "sinks" in many possible combinations. It was developed mostly during the GUI milestone -to provide some sleek way of piping the text application output to the GUI components while preserving the backwards -compatibility with the CLI API. - -So this module contains several entities helping with common logging problems that we faced during the development. -""" - -import contextlib -import enum -import logging -import os -import threading -import traceback -import warnings -from typing import Any, MutableMapping, Tuple, Mapping, Optional - -from stm32pio.core.config import Config -from stm32pio.core.settings import show_traceback_threshold_level - - -module_logger = logging.getLogger(__name__) # this module logger - -logging_levels = { # for exposing the levels to the GUI - logging.getLevelName(logging.CRITICAL): logging.CRITICAL, - logging.getLevelName(logging.ERROR): logging.ERROR, - logging.getLevelName(logging.WARNING): logging.WARNING, - logging.getLevelName(logging.INFO): logging.INFO, - logging.getLevelName(logging.DEBUG): logging.DEBUG, - logging.getLevelName(logging.NOTSET): logging.NOTSET -} - - -def log_current_exception(logger: logging.Logger, show_traceback: bool = None, config: Config = None) -> None: - """ - Print format is: - - ExceptionName: message - [optional] traceback - - We do not explicitly retrieve an exception info via sys.exc_info() as it immediately stores a reference to the - current Python frame and/or variables causing some possible weird errors (objects are not GC'ed) and memory leaks. - See https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/ for more information. - - Args: - logger: the logging.Logger (or compatible) instance to use - show_traceback: whether print the traceback or not. Ignored if the config is given (will output it there anyway) - config: stm32pio Config instance to save. The traceback will be written to its corresponding INI file - - Returns: - None - """ - - if show_traceback is None: - show_traceback = logger.isEnabledFor(show_traceback_threshold_level) - - exc_full_str = traceback.format_exc() - exc_str = exc_full_str.splitlines()[-1] - if exc_str.startswith('Exception') and not show_traceback: - exc_str = exc_str[len('Exception: '):] # meaningless information - exc_tb = ''.join(exc_full_str.splitlines(keepends=True)[:-1]) - - if config is not None: - logger.error(exc_str) - retcode = config.save({'project': {'last_error': f"{exc_str}\n{exc_tb}"}}) - if retcode == 0: - logger.info(f"Traceback has been saved to the {config.path.name}. It will be cleared on the next " - "successful run") - else: - logger.warning(f"Traceback has not been saved to the {config.path.name}") - else: - if show_traceback: - logger.error(f"{exc_str}\n{exc_tb}") - else: - logger.error(exc_str) - - -class ProjectLoggerAdapter(logging.LoggerAdapter): - """ - Use this as a logger for every project: - - project.logger = stm32pio.util.ProjectLoggerAdapter(logging.getLogger('some_singleton_logger_for_all_projects'), - { 'project_id': id(project) }) - - It will automatically mix in 'project_id' (and any other property) to every LogRecord (whether you supply 'extra' in - your log call or not) - """ - def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> Tuple[Any, MutableMapping[str, Any]]: - """Inject context data (both from the adapter and the log call)""" - if 'extra' in kwargs: - kwargs['extra'].update(self.extra) - else: - kwargs['extra'] = self.extra - return msg, kwargs - - -# Currently available verbosity levels. Verbosity determines how every LogRecord will be formatted (regardless its -# logging level) -@enum.unique -class Verbosity(enum.IntEnum): - NORMAL = enum.auto() - VERBOSE = enum.auto() - - -# Do not add or remove any information from the message and simply pass it "as-is" -as_is_formatter = logging.Formatter('%(message)s') - - -class DispatchingFormatter(logging.Formatter): - """ - The wrapper around the ordinary logging.Formatter allowing to have multiple formatters for different purposes. - General arguments schema: - - { - verbosity=Verbosity.NORMAL, - general={ - Verbosity.NORMAL: logging.Formatter(...) - Verbosity.VERBOSE: logging.Formatter(...) - ... - }, - special={ - 'case_1': { - Verbosity.NORMAL: logging.Formatter(...) - ... - }, - ... - } - } - """ - - # Mapping of logging formatters for "special". Currently, only "from_subprocess" is defined. It's good to hide such - # implementation details as much as possible though they are still tweakable from the outer code - special_formatters = { - 'from_subprocess': { - level: as_is_formatter for level in Verbosity - } - } - - def __init__(self, *args, general: Mapping[Verbosity, logging.Formatter] = None, - special: Mapping[str, Mapping[Verbosity, logging.Formatter]] = None, - verbosity: Verbosity = Verbosity.NORMAL, **kwargs): - - super().__init__(*args, **kwargs) # will be '%(message)s' if no arguments were given - - self.verbosity = verbosity - self._warn_was_shown = False - - if general is not None: - self.general = general - else: - warnings.warn("'general' argument for DispatchingFormatter was not provided. It contains formatters for " - "all logging events except special ones and should be a dict with verbosity levels keys and " - "logging.Formatter values") - self.general = {} - - if special is not None: - self.special = special - else: - self.special = DispatchingFormatter.special_formatters # use defaults - - def find_formatter_for(self, record: logging.LogRecord, verbosity: Verbosity) -> Optional[logging.Formatter]: - """Determine and return the appropriate formatter""" - special_formatter = next((self.special[case] for case in self.special.keys() if hasattr(record, case)), None) - if special_formatter is not None: - return special_formatter.get(verbosity) - else: - return self.general.get(verbosity) - - def format(self, record: logging.LogRecord) -> str: - """ - Overridden method. Allows to specify a verbosity level on the per-record basis, not only globally - """ - formatter = self.find_formatter_for(record, - record.verbosity if hasattr(record, 'verbosity') else self.verbosity) - if formatter is not None: - return formatter.format(record) - else: - if not self._warn_was_shown: - self._warn_was_shown = True - module_logger.warning("No formatter found, use default one hereinafter") - return super().format(record) - - -class LogPipeRC: - """Small class suitable for passing to the caller when the LogPipe context manager is invoked""" - value = '' # string accumulating all incoming messages - - def __init__(self, fd: int): - self.pipe = fd # writable half of os.pipe - - -class LogPipe(threading.Thread, contextlib.AbstractContextManager): - """ - The thread combined with a context manager to provide a nice way to temporarily redirect something's stream output - into the logging module. One straightforward application is to suppress subprocess STDOUT and/or STDERR streams and - wrap them into the logging mechanism as it is now for any other message in your app. Also, store the incoming - messages in the string for using it after an execution - """ - - def __init__(self, logger: logging.Logger, level: int, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.logger = logger - self.level = level - - self.fd_read, self.fd_write = os.pipe() # create 2 ends of the pipe and setup the reading one - self.pipe_reader = os.fdopen(self.fd_read) - - self.rc = LogPipeRC(self.fd_write) # RC stands for "remote control" - - def __enter__(self) -> LogPipeRC: - """ - Activate the thread and return the consuming end of the pipe so the invoking code can use it to feed its - messages from now on - """ - self.start() - return self.rc - - def run(self): - """ - Routine of the thread, logging everything - """ - for line in iter(self.pipe_reader.readline, ''): # stops the iterator when empty string will occur - self.rc.value += line # accumulate the string - self.logger.log(self.level, line.strip('\n'), extra={'from_subprocess': True}) # mark the message origin - self.pipe_reader.close() - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - The exception will be passed forward, if present, so we don't need to do something with that. The following - tear-down process will be done anyway - """ - os.close(self.fd_write) diff --git a/stm32pio/core/pio.py b/stm32pio/core/pio.py new file mode 100644 index 0000000..0f9d0bf --- /dev/null +++ b/stm32pio/core/pio.py @@ -0,0 +1,250 @@ +""" +This module encapsulates logic needed by stm32pio to interact with PlatformIO CLI. It is called ``pio.py`` to prevent +possible import confusions with the real ``platformio.py`` package. +""" + +import json +import logging +import subprocess +import configparser +from copy import copy +from io import StringIO +from pathlib import Path +from time import time +from typing import List + +import stm32pio.core.log +import stm32pio.core.settings +import stm32pio.core.util + + +class PlatformioINI(configparser.ConfigParser): + """ + ``platformio.ini`` file is a generic INI-style config and can be parsed using builtin ``configparser`` module. The + real capabilities of this file implemented by PlatformIO is very sophisticated but for our purposes it is enough to + use just a basic ``ConfigParser``. This class is intended to be used as a part of ``PlatformIO`` class. + """ + + header = '' + patch_config_exception = None + + def __init__(self, path: Path, patch_content: str, logger: stm32pio.core.log.Logger): + """ + Majority of properties might become invalid if will be changed after construction so they are intended to be set + "once and for all" at the construction stage. In case they should be dynamic, one should reimplement them as + ``@property`` with proper getters/setters/etc. + + :param path: path to the platformio.ini file. It will NOT be read on initialization but lazy evaluated during + requested operations + :param patch_content: INI-style string that should be merged with the platformio.ini file + :param logger: logging.Logger-compatible object + """ + self.logger = logger + self.path = path + try: + self.patch_config = configparser.ConfigParser(interpolation=None) + self.patch_config.read_string(patch_content) + except Exception as e: + self.patch_config = None + self.patch_config_exception = e + super().__init__(interpolation=None) + + def sync(self) -> int: + """ + Clean itself and re-read the config from self.path. Store first N consecutive lines starting with ; as a header + + :return: number of sections after readout (excluding DEFAULT) + """ + for section in self.sections(): + self.remove_section(section) + content = self.path.read_text() + self.read_string(content) + if not self.header: + self.header = stm32pio.core.util.extract_header_comment(content, comment_symbol=';') + return len(self.sections()) + + @property + def is_initialized(self) -> bool: + """Config considered to be initialized when the file is present, correct and not empty""" + return self.sync() > 0 + + @property + def is_patched(self) -> bool: + """The config is patched when it contains all pairs from a given earlier patch""" + + if self.patch_config_exception is not None: + raise Exception("Cannot determine is project patched: desired patch content is invalid (should satisfy " + "INI-format requirements)") from self.patch_config_exception + + try: + if not self.is_initialized: + self.logger.warning(f"{self.path.name} file is empty") + return False + except FileNotFoundError as e: + raise Exception(f"Cannot determine is project patched: {self.path.name} file not found") from e + except Exception as e: + raise Exception(f"Cannot determine is project patched: {self.path.name} file is incorrect") from e + + for patch_section in self.patch_config.sections(): + if self.has_section(patch_section): + for patch_key, patch_value in self.patch_config.items(patch_section): + platformio_ini_value = self.get(patch_section, patch_key, fallback=None) + if platformio_ini_value != patch_value: + self.logger.debug(f"[{patch_section}]{patch_key}: patch value is\n {patch_value}\nbut " + f"{self.path.name} contains\n {platformio_ini_value}") + return False + else: + self.logger.debug(f"{self.path.name} has no '{patch_section}' section") + return False + return True + + def patch(self) -> None: + """ + Apply a given earlier patch. This will try to restore the initial platformio.ini header but all other comments + throughout the file will be lost + """ + + if self.is_patched: + self.logger.info(f"{self.path.name} has been already patched") + else: + self.logger.debug(f"patching {self.path.name} file...") + + for patch_section in self.patch_config.sections(): # merge 2 configs + if not self.has_section(patch_section): + self.logger.debug(f"[{patch_section}] section was added") + self.add_section(patch_section) + for patch_key, patch_value in self.patch_config.items(patch_section): + self.logger.debug(f"set [{patch_section}]{patch_key} = {patch_value}") + self.set(patch_section, patch_key, patch_value) + + fake_file = StringIO() + self.write(fake_file) + config_text = fake_file.getvalue() + self.path.write_text( + ((self.header + '\n') if self.header else '') + # restore a header + config_text[:-1]) # omit trailing \n + fake_file.close() + self.logger.debug(f"{self.path.name} has been patched") + + +class PlatformIO: + """ + Interface to execute some [related to application] PlatformIO CLI commands. It also creates a PlatformioINI instance + so the hierarchy is nice-looking and reflects real objects relations. + """ + + def __init__(self, project_path: Path, exe_cmd: str, patch_content: str, logger: stm32pio.core.log.Logger): + """ + :param exe_cmd: PlatformIO CLI command or a path to the executable. This shouldn't be an arbitrary shell command + :param project_path: Project folder. Typically, same as the stm32pio project directory + :param patch_content: INI-style string that should be merged with the platformio.ini file + :param logger: logging.Logger-compatible object + """ + self.project_path = project_path + self.exe_cmd = exe_cmd + self.logger = logger + self.ini = PlatformioINI(project_path / 'platformio.ini', patch_content, logger) + + def init(self, board: str) -> int: + """ + Initialize a new project (can also be safely run on an existing project, too). Actual command: ``platformio + project init``. + + :param board: PlatformIO name of the board (e.g. nucleo_f031k6) + :return: Return code of the executed command + """ + + self.logger.info("starting PlatformIO project initialization...") + + try: + if self.ini.sync(): + self.logger.warning(f"{self.ini.path.name} file already exist") + # else: file is empty – PlatformIO should overwrite it + except FileNotFoundError: + pass # no file – PlatformIO will create it + except configparser.Error: + self.logger.warning(f"{self.ini.path.name} file is incorrect, trying to proceed...") + + command_arr = [self.exe_cmd, 'project', 'init', + '--project-dir', str(self.project_path), + '--board', board, + '--project-option', 'framework=stm32cube', + '--project-option', 'board_build.stm32cube.custom_config_header=yes'] # see #26 + if not self.logger.isEnabledFor(logging.DEBUG): + command_arr.append('--silent') + + process = subprocess.run(command_arr, encoding='utf-8', stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + error_msg = "PlatformIO project initialization error" + if process.returncode == 0: # PlatformIO returns 0 even on some errors (e.g. no '--board' argument) + if 'error' in process.stdout.lower(): # strictly speaking, here we're just guessing + self.logger.error(process.stdout, from_subprocess=True) + raise Exception(error_msg) + self.logger.debug(process.stdout, from_subprocess=True) + self.logger.info("successful PlatformIO project initialization") + return process.returncode + else: + self.logger.error(f"return code: {process.returncode}. Output:\n\n{process.stdout}", from_subprocess=True) + raise Exception(error_msg) + + def build(self) -> int: + """ + Initiate a build (``platformio run`` command). + + :return: Return code of the executed command + """ + + self.logger.info("starting PlatformIO project build...") + + command_arr = [self.exe_cmd, 'run', '--project-dir', str(self.project_path)] + + log_level = logging.DEBUG + if not self.logger.isEnabledFor(logging.DEBUG): + command_arr.append('--silent') + log_level = logging.WARNING # in silent mode PlatformIO producing only warnings, if any + + with stm32pio.core.log.LogPipe(self.logger, log_level) as log: + process = subprocess.run(command_arr, stdout=log.pipe, stderr=log.pipe) + + if process.returncode == 0: + self.logger.info("successful PlatformIO build") + else: + self.logger.error("PlatformIO build error") + + return process.returncode + + +_pio_boards_cache: List[str] = [] +_pio_boards_cache_fetched_at: float = 0 + + +# TODO: probably some lock should be acquired preventing of more than 1 execution at a time (e.g. from threads) +# Is there some std lib implementation of temp cache? No, look at 3rd-party alternative, just like lru_cache: +# https://github.com/tkem/cachetools +def get_boards(platformio_cmd: str = stm32pio.core.settings.config_default['app']['platformio_cmd']) -> List[str]: + """ + Obtain PlatformIO boards list (string identifiers only). As we interested only in STM32 ones, cut off all of the + others. Additionally, establish a short-time "cache" for quick serving of sequential calls. + + IMPORTANT NOTE: PlatformIO can go online from time to time when decided that its own cache is out of + date. So it may take some time to execute on the first run after a long break. + + :param platformio_cmd: path or command of PlatformIO executable + :return: list of STM32 PlatformIO boards codes + """ + + global _pio_boards_cache_fetched_at, _pio_boards_cache + + cache_is_empty = len(_pio_boards_cache) == 0 + current_time = time() + cache_is_outdated = current_time - _pio_boards_cache_fetched_at >= stm32pio.core.settings.pio_boards_cache_lifetime + + if cache_is_empty or cache_is_outdated: + process = subprocess.run([platformio_cmd, 'boards', '--json-output', 'stm32cube'], + stdout=subprocess.PIPE, check=True) + _pio_boards_cache = [board['id'] for board in json.loads(process.stdout)] + _pio_boards_cache_fetched_at = current_time + + # We don't know what a caller will ended up doing with that list. Simple copy is a sufficient solution for us since + # copy(list[string]) basically equals deepcopy(list[string]) as strings are immutable in Python + return copy(_pio_boards_cache) diff --git a/stm32pio/core/project.py b/stm32pio/core/project.py index 686670f..4f3bdd4 100644 --- a/stm32pio/core/project.py +++ b/stm32pio/core/project.py @@ -1,609 +1,211 @@ """ -Core class representing a single stm32pio project. +Core class representing a single STM32CubeMX + PlatformIO project. This interface should be sufficient enough for most +3rd-party applications (CLI, GUI, embedding, ...). """ -import collections -import configparser -import contextlib -import copy import logging +import os import pathlib -import shlex -import shutil -import string -import subprocess -import tempfile import weakref -from typing import Mapping, Any, Union, Tuple +from typing import Mapping, Any, Union -import stm32pio.core.logging -import stm32pio.core.settings +import stm32pio.core.config +import stm32pio.core.clean +import stm32pio.core.cubemx +import stm32pio.core.log +import stm32pio.core.pio +import stm32pio.core.state import stm32pio.core.util import stm32pio.core.validate -import stm32pio.core.config -from stm32pio.core.state import ProjectStage, ProjectState class Stm32pio: """ - Main class. - - Represents a single project, encapsulating a file system path to the project (the primary mandatory identifier) and - some parameters in a configparser .ini file. As stm32pio can be installed via pip and has no global config we also - storing global parameters (such as Java or STM32CubeMX invoking commands) in this config .ini file so the user can - specify settings on a per-project basis. The config can be saved in a non-disturbing way automatically on the - instance destruction (e.g. by garbage collecting it) (use save_on_destruction=True flag), otherwise a user should - explicitly save the config if he wants to (using config.save() method). - - The typical life-cycle consists of the new project creation, passing mandatory 'dirty_path' argument. Optional - 'parameters' dictionary will be merged into the project config. 'instance_options' controls how the runtime entity - will behave in some aspects (logging, destructing). Then it is possible to perform API operations. + Main class reflects a single stm32pio project. Normally, this will automatically set such members for you: + ``logger`` – powerful and flexible tool on top of the builtin ``logging`` module providing to client code all + possible features it might need. In case this is not enough it can be easily overridden on initialization. + This is your primary source of getting a feedback from the internals about the project state and operations. + Default implementation will use builtin id() procedure creating a unique identifier allowing to distinguish + several projects on logging events + Refer to: ``log.py`` + ``config`` – tweaked ``ConfigParser`` instance containing merged settings from multiple sources. + Refer to: ``config.py`` + ``cubemx``, ``platformio`` – service classes handling requests to corresponding programs when there is a need to. + See ``cubemx.py``, ``pio.py`` + Most of the times you'll ended up using only methods and not touching these attributes manually. Majority of methods + are just thin wrappers around some other module – it helps both in leaner architecture and providing convenient set + of available actions (i.e. user needs to know only the single interface to leverage whole functionality and an + action can be invoked even having only a name of it – by utilizing getattr()) """ - INSTANCE_OPTIONS_DEFAULTS = { # TODO: use Python 3.8 TypedDict (or maybe some other more appropriate feature) - 'save_on_destruction': False, - 'logger': None - } - - # TODO: is instance_options ugly? - def __init__(self, dirty_path: Union[str, pathlib.Path], parameters: Mapping[str, Any] = None, - instance_options: Mapping[str, Any] = None): - """ - Args: - dirty_path: path to the project (required) - parameters: additional project parameters to set on initialization stage (format is same as for project' - config (see settings.py), passed values will be merged) - instance_options: - some parameters related more to the instance itself rather than a project's "business logic": - save_on_destruction (bool=True): register or not the finalizer that saves the config to a file - logger (logging.Logger=None): if an external logger is given, it will be used, otherwise the new - ProjectLoggerAdapter for 'stm32pio.projects' prefix will be created automatically (unique for - every instance) + def __init__(self, path: Union[str, bytes, os.PathLike], parameters: Mapping = None, + save_on_destruction: bool = False, logger: stm32pio.core.log.Logger = None): """ + Minimal requirement for the file system directory to be considered as project is to have a CubeMX .ioc file so + it (or its containing directory) is the primary identifier that should be supplied on initialization. In case of + multiple .ioc files at one folder, the given one or the first available will be picked. - if parameters is None: - parameters = {} + :param path: relative or absolute path to the .ioc file or its parent + :param parameters: config-compatible parameters mapping to merge + :param save_on_destruction: if True, the config will be flushed to file automatically on instance destruction + :param logger: override an internal logger + """ - if instance_options is None: - instance_options = copy.copy(Stm32pio.INSTANCE_OPTIONS_DEFAULTS) - else: - # Create a shallow copy of the argument, a mutable mapping, as we probably going to add some pairs to it - instance_options = dict(instance_options) - # Insert missing pairs but do not touch any extra ones if there is any - for key, value in copy.copy(Stm32pio.INSTANCE_OPTIONS_DEFAULTS).items(): - if key not in instance_options: - instance_options[key] = value - - # The individual loggers for every single project allows to fine-tune the output when the multiple projects are - # created by the third-party code - if instance_options['logger'] is not None: - self.logger = instance_options['logger'] + if logger is not None: + self.logger = logger else: + # Individual loggers for every single project allows to fine-tune the output when multiple projects are + # created by some client code. Here we utilize id() for this underlying_logger = logging.getLogger('stm32pio.projects') - self.logger = stm32pio.core.logging.ProjectLoggerAdapter(underlying_logger, {'project_id': id(self)}) - - # The path is a primary entity of the project so we process it first and foremost. Handle 'path/to/proj', - # 'path/to/proj/', '.', '../proj', etc., make the path absolute and check for existence. Also, the .ioc file can - # be specified instead of the directory. In this case it is assumed that the parent path is an actual project - # path and the provided .ioc file is used on a priority basis - path = pathlib.Path(dirty_path).expanduser().resolve(strict=True) - ioc_file = None - if path.is_file() and path.suffix == '.ioc': # if .ioc file was supplied instead of the directory - ioc_file = path - path = path.parent - elif not path.is_dir(): - raise Exception(f"the supplied project path {path} is not a directory. It should be a directory with an " - ".ioc file or an .ioc file itself") - self.path = path - - self.config = stm32pio.core.config.Config(self.path, runtime_parameters=parameters, logger=self.logger) - - self.ioc_file = self._find_ioc_file(explicit_file=ioc_file) - self.config.set('project', 'ioc_file', self.ioc_file.name) # save only the name of file to the config - - if len(self.config.get('project', 'cleanup_ignore', fallback='')) == 0: + self.logger = stm32pio.core.log.ProjectLogger(underlying_logger, project_id=id(self)) + + ioc_or_dir = pathlib.Path(path).expanduser().resolve(strict=True) + explicit_ioc_file_name = None + if ioc_or_dir.is_file() and ioc_or_dir.suffix == '.ioc': # if .ioc file was supplied instead of the directory + explicit_ioc_file_name = ioc_or_dir.name + ioc_or_dir = ioc_or_dir.parent + self.logger.debug(f"explicit '{explicit_ioc_file_name}' file provided") + elif not ioc_or_dir.is_dir(): + raise ValueError(f"project path '{ioc_or_dir}' is incorrect. It should be a directory with an .ioc file or" + "an .ioc file itself") + self.path = ioc_or_dir + + self.config = stm32pio.core.config.ProjectConfig(self.path, self.logger, runtime_parameters=parameters) + + self.cubemx = stm32pio.core.cubemx.CubeMX( + work_dir=self.path, + ioc_file_name=explicit_ioc_file_name or self.config.get('project', 'ioc_file'), + exe_cmd=self.config.get('app', 'cubemx_cmd'), + java_cmd=self.config.get('app', 'java_cmd'), + logger=self.logger + ) + self.config.set('project', 'ioc_file', self.cubemx.ioc.path.name) # save only the name of file to the config + + self.platformio = stm32pio.core.pio.PlatformIO( + project_path=self.path, + exe_cmd=self.config.get('app', 'platformio_cmd'), + patch_content=self.config.get('project', 'platformio_ini_patch_content'), + logger=self.logger + ) + + if not len(self.config.get('project', 'cleanup_ignore', fallback='')): # By-default, we preserve only the .ioc file on cleanup - self.config.set('project', 'cleanup_ignore', self.ioc_file.name) + self.config.set('project', 'cleanup_ignore', self.cubemx.ioc.path.name) if len(self.config.get('project', 'last_error', fallback='')): self.config.set('project', 'last_error', '') # reset last error self.config.save() - # Put away unnecessary processing as the string still will be formed even if the logging level doesn't allow a - # propagation of this message if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug(f"resolved config:\n{self.config}") - # Save the config on an instance destruction - if instance_options['save_on_destruction']: + if save_on_destruction: self._finalizer = weakref.finalize(self, self.config.save) - - def __repr__(self): - """String representation of the project (use an absolute path for this)""" + def __repr__(self) -> str: + """Short string representation of the project – just use an absolute path""" return f"Stm32pio project: {self.path}" - @property - def state(self) -> ProjectState: - """Constructing and returning the current state of the project (tweaked dict, see ProjectState docs)""" - - pio_is_initialized = False - with contextlib.suppress(Exception): # we just want to know the status and don't care about the details - # Is present, is correct and is not empty - pio_is_initialized = len(self.platformio_ini_config.sections()) != 0 - - platformio_ini_is_patched = False - if pio_is_initialized: # make no sense to proceed if there is something happened in the first place - with contextlib.suppress(Exception): # we just want to know the status and don't care about the details - platformio_ini_is_patched = self.platformio_ini_is_patched - - inc_dir = self.path / 'Inc' - src_dir = self.path / 'Src' - include_dir = self.path / 'include' - pio_dir = self.path / '.pio' - - # Create the temporary ordered dictionary and fill it with the conditions results arrays - stages_conditions = collections.OrderedDict() - stages_conditions[ProjectStage.UNDEFINED] = [True] - stages_conditions[ProjectStage.EMPTY] = [self.ioc_file.is_file()] - stages_conditions[ProjectStage.INITIALIZED] = [self.config.path.is_file()] - stages_conditions[ProjectStage.GENERATED] = [inc_dir.is_dir() and len(list(inc_dir.iterdir())) > 0, - src_dir.is_dir() and len(list(src_dir.iterdir())) > 0] - stages_conditions[ProjectStage.PIO_INITIALIZED] = [pio_is_initialized] - stages_conditions[ProjectStage.PATCHED] = [platformio_ini_is_patched, not include_dir.is_dir()] - # Hidden folder. Can be not visible in your file manager and cause a confusion - stages_conditions[ProjectStage.BUILT] = [pio_dir.is_dir() and - any([item.is_file() for item in pio_dir.rglob('*firmware*')])] - - # Fold arrays and save results in ProjectState instance - conditions_results = ProjectState() - for state, conditions in stages_conditions.items(): - conditions_results[state] = all(condition is True for condition in conditions) - - return conditions_results - - - def _find_ioc_file(self, explicit_file: pathlib.Path = None) -> pathlib.Path: - """ - Find, check (that this is a non-empty text file) and return an .ioc file. If there are more than one - return - first. - - Args: - explicit_file: if provided, just check it and return, no search will be performed - - Returns: - absolute path to the .ioc file - - Raises: - FileNotFoundError: no .ioc file is present - ValueError: .ioc file is empty - """ - - # 1. If explicit file was provided use it - if explicit_file is not None: - self.logger.debug(f"using explicitly provided '{explicit_file.name}' file") - result_file = explicit_file - - else: - # 2. Check the value from the config file - ioc_file = self.config.get('project', 'ioc_file', fallback=None) # TODO: Python 3.8 walrus operator (elif ...) - if ioc_file: - ioc_file = self.path.joinpath(ioc_file).resolve(strict=True) - self.logger.debug(f"using '{ioc_file.name}' file from the INI config") - result_file = ioc_file - - # 3. Otherwise search for an appropriate file by yourself - else: - self.logger.debug("searching for any .ioc file...") - candidates = list(self.path.glob('*.ioc')) - if len(candidates) == 0: # TODO: good candidate for the new Python 3.8 assignment expression feature :) - raise FileNotFoundError("CubeMX project .ioc file") - elif len(candidates) == 1: - self.logger.debug(f"'{candidates[0].name}' is selected") - result_file = candidates[0] - else: - self.logger.warning(f"there are multiple .ioc files, '{candidates[0].name}' is selected") - result_file = candidates[0] - - # Check for the file correctness - try: - content = result_file.read_text() # should be a text file - if len(content) == 0: - raise ValueError("the file is empty") - return result_file - except Exception as e: - raise Exception(f"{result_file.name} is incorrect") from e - + def state(self) -> 'stm32pio.core.state.ProjectState': + """Getter for the read-only ``state`` attribute. Evaluate, construct and return the project state""" + return stm32pio.core.state.ProjectState(self) def save_config(self, parameters: Mapping[str, Mapping[str, Any]] = None) -> int: """ - Pass the call to the config instance. This method exist primarily for the consistency in available project - actions. - """ - return self.config.save(parameters) - + Flush the config to its associated file. - def _cubemx_execute_script(self, script_content: str) -> Tuple[subprocess.CompletedProcess, str]: + :param parameters: mapping to merge into the result + :return: 0 on success, -1 otherwise """ - Call the STM32CubeMX app as 'java -jar' or directly to generate a code from the .ioc file. Pass the commands in - a temp file. - - Returns: - A tuple consisting of the subprocess.CompletedProcess and the full CubeMX output (both stdout and stderr - combined) - """ - - # Use mkstemp() instead of the higher-level API for the compatibility with the Windows (see tempfile docs for - # more details) - cubemx_script_file, cubemx_script_name = tempfile.mkstemp() - - # We must remove the temp directory, so do not let any exception break our plans - try: - # buffering=0 leads to the immediate flushing on writing - with open(cubemx_script_file, mode='w+b', buffering=0) as cubemx_script: - cubemx_script.write(script_content.encode()) # should encode, since mode='w+b' - - command_arr = [] - java_cmd = self.config.get('app', 'java_cmd') - # CubeMX can be invoked directly, without a need in Java command - if java_cmd and (java_cmd.lower() not in stm32pio.core.settings.none_options): - command_arr += [java_cmd, '-jar'] - # -q: read the commands from the file, -s: silent performance - command_arr += [self.config.get('app', 'cubemx_cmd'), '-q', cubemx_script_name, '-s'] - # Redirect the output of the subprocess into the logging module (with DEBUG level) - with stm32pio.core.logging.LogPipe(self.logger, logging.DEBUG) as log: - completed_process = subprocess.run(command_arr, stdout=log.pipe, stderr=log.pipe) - std_output = log.value - - except Exception as e: - raise e # re-raise an exception after the 'finally' block - else: - return completed_process, std_output - finally: - pathlib.Path(cubemx_script_name).unlink() - + return self.config.save(parameters) def generate_code(self) -> int: """ - Fill in the STM32CubeMX code generation script template from the project config and run it. - - Returns: - completed process return code + Invoke CubeMX with the predefined script. - Raises: - Exception: if the run failed (propagates from the inner call), if the return code is not 0, if any string - indicating error was detected in the process output + :return: subprocess return code """ - - self.logger.info("starting to generate a code from the CubeMX .ioc file...") - - cubemx_script_template = string.Template(self.config.get('project', 'cubemx_script_content')) - # It's important to wrap paths into the quotation marks as they can contain whitespaces - cubemx_script_content = cubemx_script_template.substitute(ioc_file_absolute_path=f'"{self.ioc_file}"', - project_dir_absolute_path=f'"{self.path}"') - completed_process, std_output = self._cubemx_execute_script(cubemx_script_content) - - error_msg = "code generation error" - if completed_process.returncode == 0: - if stm32pio.core.settings.cubemx_str_indicating_success in std_output: - self.logger.info("successful code generation") - return completed_process.returncode - else: # guessing - error_lines = [line for line in std_output.splitlines(keepends=True) - if stm32pio.core.settings.cubemx_str_indicating_error in line] - if len(error_lines): - self.logger.error(error_lines, extra={ 'from_subprocess': True }) - raise Exception(error_msg) - else: - self.logger.warning("Undefined result from the CubeMX (neither error or success symptoms were " - "found in the logs). Keep going but there might be an error") - return completed_process.returncode - else: - # Most likely the 'java' error (e.g. no CubeMX is present) - self.logger.error(f"Return code is {completed_process.returncode}", extra={ 'from_subprocess': True }) - if not self.logger.isEnabledFor(logging.DEBUG): - # In DEBUG mode the output has already been printed - self.logger.error(f"Output:\n{std_output}", extra={ 'from_subprocess': True }) - raise Exception(error_msg) - + return self.cubemx.generate_code(script_template=self.config.get('project', 'cubemx_script_content')) def pio_init(self) -> int: """ - Call PlatformIO CLI to initialize a new project. It uses parameters (path, board) collected earlier so the - confirmation about data presence is lying on the invoking code. + Invoke PlatformIO CLI to setup a new project with appropriate parameters. - Returns: - return code of the PlatformIO - - Raises: - Exception: if the return code of subprocess is not 0 + :return: subprocess return code """ - - self.logger.info("starting PlatformIO project initialization...") - - try: - if len(self.platformio_ini_config.sections()): - self.logger.warning("'platformio.ini' file already exist") - # else: file is empty (PlatformIO should overwrite it) - except FileNotFoundError: - pass # no file - except Exception: - self.logger.warning("'platformio.ini' file is already exist and incorrect") - - command_arr = [self.config.get('app', 'platformio_cmd'), 'project', 'init', - '--project-dir', str(self.path), - '--board', self.config.get('project', 'board'), - '--project-option', 'framework=stm32cube'] - if not self.logger.isEnabledFor(logging.DEBUG): - command_arr.append('--silent') - - completed_process = subprocess.run(command_arr, encoding='utf-8', - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - - error_msg = "PlatformIO project initialization error" - if completed_process.returncode == 0: - # PlatformIO returns 0 even on some errors (e.g. no '--board' argument) - if 'error' in completed_process.stdout.lower(): # guessing - self.logger.error(completed_process.stdout, extra={ 'from_subprocess': True }) - raise Exception(error_msg) - self.logger.debug(completed_process.stdout, extra={ 'from_subprocess': True }) - self.logger.info("successful PlatformIO project initialization") - return completed_process.returncode - else: - self.logger.error(f"Return code is {completed_process.returncode}. Output:\n\n{completed_process.stdout}", - extra={ 'from_subprocess': True }) - raise Exception(error_msg) - - - @property - def platformio_ini_config(self) -> configparser.ConfigParser: - """ - Reads and parses the 'platformio.ini' PlatformIO config file into a newly created configparser.ConfigParser - instance. It doesn't use any interpolation as we aren't interested in the particular values, just a presence and - correctness. Note, that the file may change over the time and subsequent calls may produce different results - because of this. - - Raises: - FileNotFoundError if no 'platformio.ini' file is present. Passes out all other exceptions, most likely - caused by parsing errors (i.e. corrupted .INI format), e.g. - - configparser.MissingSectionHeaderError: File contains no section headers. - """ - - platformio_ini = configparser.ConfigParser(interpolation=None) - platformio_ini.read(self.path.joinpath('platformio.ini').resolve(strict=True)) - return platformio_ini - - - @property - def platformio_ini_is_patched(self) -> bool: - """ - Check whether 'platformio.ini' config file is patched or not. It doesn't check for complete project patching - (e.g. unnecessary folders deletion). - - Returns: - boolean indicating a result - - Raises: - throws errors on non-existing file and on incorrect patch/file - """ - - try: - platformio_ini = self.platformio_ini_config # existing .ini file - except FileNotFoundError as e: - raise Exception("Cannot determine is project patched: 'platformio.ini' file not found") from e - except Exception as e: - raise Exception("Cannot determine is project patched: 'platformio.ini' file is incorrect") from e - - patch_config = configparser.ConfigParser(interpolation=None) # our patch has the INI config format, too - try: - patch_config.read_string(self.config.get('project', 'platformio_ini_patch_content')) - except Exception as e: - raise Exception("Cannot determine is project patched: desired patch content is invalid (should satisfy " - "INI-format requirements)") from e - - for patch_section in patch_config.sections(): - if platformio_ini.has_section(patch_section): - for patch_key, patch_value in patch_config.items(patch_section): - platformio_ini_value = platformio_ini.get(patch_section, patch_key, fallback=None) - if platformio_ini_value != patch_value: - self.logger.debug(f"[{patch_section}]{patch_key}: patch value is\n {patch_value}\nbut " - f"platformio.ini contains\n {platformio_ini_value}") - return False - else: - self.logger.debug(f"platformio.ini has no '{patch_section}' section") - return False - return True - + return self.platformio.init(board=self.config.get('project', 'board')) def patch(self) -> None: """ - Patch the 'platformio.ini' config file with a user's patch. By default, it sets the created earlier (by CubeMX - 'Src' and 'Inc') folders as build sources for PlatformIO specifying it in the [platformio] INI section. - configparser doesn't preserve any comments unfortunately so keep in mind that all of them will be lost at this - point. Also, the order may be violated. In the end, removes these old empty folders. + Tweak resources in the way PlatformIO will understand the CubeMX project structure: + - merge platformio.ini with provided patch config + - remove default directories + **Note:** this operation does not preserve comments both from platformio.ini an patch content so make sure + you've saved all meaningful information somewhere else. Also, the order may be left violated. """ - if self.platformio_ini_is_patched: - self.logger.info("'platformio.ini' has been already patched") - else: - self.logger.debug("patching 'platformio.ini' file...") - - platformio_ini_config = self.platformio_ini_config # existing .ini file - - patch_config = configparser.ConfigParser(interpolation=None) # our patch has the INI config format, too - patch_config.read_string(self.config.get('project', 'platformio_ini_patch_content')) - - # Merge 2 configs - for patch_section in patch_config.sections(): - if not platformio_ini_config.has_section(patch_section): - self.logger.debug(f"[{patch_section}] section was added") - platformio_ini_config.add_section(patch_section) - for patch_key, patch_value in patch_config.items(patch_section): - self.logger.debug(f"set [{patch_section}]{patch_key} = {patch_value}") - platformio_ini_config.set(patch_section, patch_key, patch_value) - - # Save, overwriting (mode='w') the original file (deletes all comments!) - with self.path.joinpath('platformio.ini').open(mode='w') as platformio_ini_file: - platformio_ini_config.write(platformio_ini_file) - self.logger.debug("'platformio.ini' has been patched") - - try: - shutil.rmtree(self.path.joinpath('include')) - self.logger.debug("'include' folder has been removed") - except Exception: - self.logger.info("cannot delete 'include' folder", - exc_info=self.logger.isEnabledFor(stm32pio.core.settings.show_traceback_threshold_level)) - - # Remove 'src' directory too but on case-sensitive file systems 'Src' == 'src' == 'SRC' so we need to check + self.platformio.ini.patch() + + stm32pio.core.util.remove_folder(self.path / 'include', logger=self.logger) + # Remove 'src' directory as well but on case-sensitive file systems 'Src' == 'src' == 'SRC' so we need to check if not self.path.joinpath('SRC').is_dir(): - try: - shutil.rmtree(self.path.joinpath('src')) - self.logger.debug("'src' folder has been removed") - except Exception: - self.logger.info("cannot delete 'src' folder", exc_info= - self.logger.isEnabledFor(stm32pio.core.settings.show_traceback_threshold_level)) + stm32pio.core.util.remove_folder(self.path / 'src', logger=self.logger) self.logger.info("project has been patched") - def build(self) -> int: """ - Initiate a build by the PlatformIO ('platformio run' command) + Initiate PlatformIO build attempt (``platformio run`` command). - Returns: - passes a return code of the PlatformIO + :return: subprocess return code """ - - self.logger.info("starting PlatformIO project build...") - - command_arr = [self.config.get('app', 'platformio_cmd'), 'run', '--project-dir', str(self.path)] - if not self.logger.isEnabledFor(logging.DEBUG): - command_arr.append('--silent') - - # In the non-verbose mode (logging.INFO) there would be a '--silent' option so if the PlatformIO will decide to - # output something then it's really important and we use logging.WARNING as a level - log_level = logging.DEBUG if self.logger.isEnabledFor(logging.DEBUG) else logging.WARNING - with stm32pio.core.logging.LogPipe(self.logger, log_level) as log: - completed_process = subprocess.run(command_arr, stdout=log.pipe, stderr=log.pipe) - - if completed_process.returncode == 0: - self.logger.info("successful PlatformIO build") - else: - self.logger.error("PlatformIO build error") - return completed_process.returncode - + return self.platformio.build() def start_editor(self, editor_command: str) -> int: """ - Start the editor specified by the 'editor_command' with a project directory opened (assuming that - $ [editor] [folder] - syntax just works) + Execute a simple command line instruction to launch the editor. - Args: - editor_command: editor command as you start it in the terminal - - Returns: - passes a return code of the command + :param editor_command: how do you start your editor? Passing options is allowed + :return: subprocess return code """ + return stm32pio.core.util.run_command(editor_command, self.path, self.logger) - sanitized_input = shlex.quote(editor_command) - - self.logger.info(f"starting an editor '{sanitized_input}'...") - try: - with stm32pio.core.logging.LogPipe(self.logger, logging.DEBUG) as log: - # Works unstable on some Windows 7 systems, but correct on Win10... - # result = subprocess.run([editor_command, self.path], check=True) - completed_process = subprocess.run(f'{sanitized_input} "{self.path}"', shell=True, check=True, - stdout=log.pipe, stderr=log.pipe) - self.logger.debug(completed_process.stdout, extra={ 'from_subprocess': True }) - - return completed_process.returncode - except subprocess.CalledProcessError as e: - self.logger.error(f"failed to start the editor '{sanitized_input}': {e.stdout}") - return e.returncode - - - def clean(self, quiet_on_cli: bool = True) -> None: + def clean(self, quiet: bool = True) -> None: """ - Clean-up the project folder. The method uses whether its own algorithm or can delegate the task to the git (`git - clean` command). This behavior is controlled by the project config's `cleanup_use_git` parameter. Note that the - results may not be as you initially expected with `git clean`, refer to its docs for clarification if necessary. - For example, with a fresh new repository you actually need to run `git add --all` first, otherwise nothing will - be removed by the git. - - Args: - quiet_on_cli: should the function ask a user (on CLI, currently) before actually removing any file/folder + Clean-up a project folder. The method uses whether its own algorithm or can delegate the task to git (``git + clean`` command). This behavior is controlled by project config's ``cleanup_use_git`` option. Note that results + may not be as you initially expected with ``git clean``, refer to its docs for clarification. For example, with + a fresh new repository given, you actually need to run ``git add --all`` first, otherwise nothing will be + removed by git. + + :param quiet: should we ask a user (on CLI only, currently) before actually removing any file/folder """ if self.config.getboolean('project', 'cleanup_use_git', fallback=False): self.logger.info("'cleanup_use_git' option is true, git will be used to perform the cleanup...") - # Remove files listed in .gitignore - args = ['git', 'clean', '-d', '--force', '-X'] - if not quiet_on_cli: - args.append('--interactive') - if not self.logger.isEnabledFor(logging.DEBUG): - args.append('--quiet') - with stm32pio.core.logging.LogPipe(self.logger, logging.INFO) as log: - # TODO: str(self.path) - 3.6 compatibility - subprocess.run(args, check=True, cwd=str(self.path), stdout=log.pipe, stderr=log.pipe) - self.logger.info("Done", extra={ 'from_subprocess': True }) + worker = stm32pio.core.clean.GitStrategyI(self.path, self.logger, ask_confirmation=not quiet) else: - removal_list = stm32pio.core.util.get_folder_contents( - self.path, ignore_list=self.config.get_ignore_list('project', 'cleanup_ignore')) - if len(removal_list): - if not quiet_on_cli: - removal_str = '\n'.join(f' {path.relative_to(self.path)}' for path in removal_list) - while True: - reply = input(f"These files/folders will be deleted:\n{removal_str}\nAre you sure? (y/n) ") - if reply.lower() in stm32pio.core.settings.yes_options: - break - elif reply.lower() in stm32pio.core.settings.no_options: - return - - for entry in removal_list: - if entry.is_dir(): - shutil.rmtree(entry) # use shutil.rmtree() to delete non-empty directories - self.logger.debug(f'del "{entry.relative_to(self.path)}"/') - elif entry.is_file(): - entry.unlink() - self.logger.debug(f'del "{entry.relative_to(self.path)}"') - self.logger.info("project has been cleaned") - else: - self.logger.info("no files/folders to remove") + worker = stm32pio.core.clean.DefaultStrategyI( + self.path, self.logger, ask_confirmation=not quiet, + ignore_list=self.config.get_ignore_list('project', 'cleanup_ignore')) + + worker.clean() + + def inspect_ioc_config(self) -> None: + """Check the current .ioc configuration and PlatformIO compatibility""" + platformio_mcu = None + env_section = next((section for section in self.platformio.ini.sections() if 'env' in section), None) + if env_section is not None: + platformio_mcu = self.platformio.ini.get(env_section, 'board_build.mcu', fallback=None) + + self.cubemx.ioc.inspect(platformio_board=self.config.get('project', 'board'), platformio_mcu=platformio_mcu) def validate_environment(self) -> stm32pio.core.validate.ToolsValidationResults: - """Verify tools specified in the 'app' section of the current configuration""" - - def java_runner(java_cmd): - with stm32pio.core.logging.LogPipe(self.logger, logging.DEBUG) as log: - completed_process = subprocess.run([java_cmd, '-version'], stdout=log.pipe, stderr=log.pipe) - std_output = log.value - return completed_process, std_output - - def cubemx_runner(_): - return self._cubemx_execute_script('exit\n') # just start and exit - - def platformio_runner(platformio_cmd): - with stm32pio.core.logging.LogPipe(self.logger, logging.DEBUG) as log: - completed_process = subprocess.run([platformio_cmd], stdout=log.pipe, stderr=log.pipe) - std_output = log.value - return completed_process, std_output - - if not self.config.path.exists(): - self.logger.warning("config file not found. Validation will be performed against the runtime configuration") - - return stm32pio.core.validate.ToolsValidationResults( - stm32pio.core.validate.ToolValidator( - param, - self.config.get('app', param), - runner, - required, - self.logger - ).validate() for param, runner, required in [ - ('java_cmd', java_runner, False), - ('cubemx_cmd', cubemx_runner, True), - ('platformio_cmd', platformio_runner, True) - ]) + """ + Verify CLI tools specified in the "app" config section. + + :return: results in the form suitable fpr printing + """ + return stm32pio.core.validate.validate_environment(self.logger, self.config, self.cubemx) diff --git a/stm32pio/core/settings.py b/stm32pio/core/settings.py index 0f0a19d..8d39595 100644 --- a/stm32pio/core/settings.py +++ b/stm32pio/core/settings.py @@ -1,73 +1,81 @@ """ This file provides all kinds of configurable parameters for different application modules. Also, this is a source of the -default project config file stm32pio.ini. None of the variables here should be edited in runtime. +default project config file stm32pio.ini. Bottom part of the file contains some definitions specifically targeting continuous integration environment. They have -no effect on normal or test (local) runs. Probably, they should be removed from the source code entirely and some -another solution need to be prepared. +no effect on normal or test (local) runs. """ -import collections import inspect import logging import os -from pathlib import Path import platform +from pathlib import Path my_os = platform.system() config_file_name = 'stm32pio.ini' -config_default = collections.OrderedDict( # guarantees printing to the file in the same order +config_default = dict( + # "app" section is used for listing commands/paths of utilized programs app={ - # How do you start Java from the command line? (edit if Java not in PATH). Can be safely set to 'None' (string) - # if in your setup the CubeMX can be invoked directly - 'java_cmd': 'C:/Program Files/STMicroelectronics/STM32Cube/STM32CubeMX/jre/bin/java.exe' - if my_os == 'Windows' else 'None', - - # How do you start PlatformIO from the command line? (edit if not in PATH, if you use PlatformIO IDE see - # https://docs.platformio.org/en/latest/core/installation.html#piocore-install-shell-commands). + # How do you start the PlatformIO from command line? + # - If you're using PlatformIO IDE see + # https://docs.platformio.org/en/latest/core/installation.html#piocore-install-shell-commands + # - If you're using PlatformIO CLI but it is not available as 'platformio' command, add it to your PATH + # environment variable (refer to OS docs) + # - Or simply specify here a full path to the PlatformIO executable # Note: "python -m platformio" isn't supported yet 'platformio_cmd': 'platformio', - # Trying to guess the STM32CubeMX location. ST actually had changed the installation path several times already. - # It also depends on how do one obtain a distribution archive (logging in on web-site or just downloading by the - # direct link). STM32CubeMX will be invoked as 'java -jar [cubemx_cmd]' + # STM32CubeMX doesn't register itself in PATH so we specify a full path to it. Here are default ones (i.e. when + # you've installed CubeMX on your system) 'cubemx_cmd': # macOS default: 'Applications' folder '/Applications/STMicroelectronics/STM32CubeMX.app/Contents/MacOs/STM32CubeMX' if my_os == 'Darwin' else - # Linux (Ubuntu) default: home directory + # Linux (at least Ubuntu) default: home directory str(Path.home() / 'STM32CubeMX/STM32CubeMX') if my_os == 'Linux' else # Windows default: Program Files - 'C:/Program Files/STMicroelectronics/STM32Cube/STM32CubeMX/STM32CubeMX.exe' if my_os == 'Windows' else None + 'C:/Program Files/STMicroelectronics/STM32Cube/STM32CubeMX/STM32CubeMX.exe' if my_os == 'Windows' else '', + + # If you're on Windows or you have CubeMX version below 6.3.0, the Java command (which CubeMX is written on) + # should be specified. For CubeMX starting from 6.3.0 JRE is bundled alongside, otherwise it must be installed + # by a user yourself separately + 'java_cmd': + 'C:/Program Files/STMicroelectronics/STM32Cube/STM32CubeMX/jre/bin/java.exe' + if my_os == 'Windows' else 'None', }, + + # "project" section focuses on parameters of the concrete stm32pio project project={ - # (default is OK) See CubeMX user manual PDF (UM1718) to get other useful options + # CubeMX can be fed with the script file to read commands from. This template is based on official user manual + # PDF (UM1718) 'cubemx_script_content': inspect.cleandoc(''' config load ${ioc_file_absolute_path} generate code ${project_dir_absolute_path} exit '''), - # Override the defaults to comply with CubeMX project structure. This should meet INI-style requirements. You - # can include existing sections, too (e.g. - # - # [env:nucleo_f031k6] - # key = value - # - # will add a 'key' parameter) + # In order for PlatformIO to "understand" a code generated by CubeMX, some tweaks (both in project structure and + # config files) should be applied. One of them is to inject some properties into the platformio.ini file and + # this option is a config-like string that should be merged with it. In other words, it should meet INI-style + # requirements and be a valid platformio.ini config itself 'platformio_ini_patch_content': inspect.cleandoc(''' [platformio] include_dir = Inc src_dir = Src '''), - 'board': '', - 'ioc_file': '', # required, the file name (relative to the project path) + 'board': '', # one of PlatformIO boards identifiers (e.g. "nucleo_f031k6") + + # CubeMX .ioc project config file. Typically, this will be filled in automatically on project initialization + 'ioc_file': '', 'cleanup_ignore': '', - 'cleanup_use_git': False # if True, 'clean' command use git to perform the task + 'cleanup_use_git': False, # controls what method 'clean' command should use + + 'inspect_ioc': True } ) @@ -76,34 +84,39 @@ no_options = ['n', 'no', 'false', '0'] yes_options = ['y', 'yes', 'true', '1'] -# CubeMX 0 return code doesn't necessarily means the correct generation (e.g. migration dialog has appeared and 'Cancel' +# CubeMX 0 return code doesn't necessarily mean a successful operation (e.g. migration dialog has appeared and 'Cancel' # was chosen, or CubeMX_version < ioc_file_version, etc.), we should analyze the actual output (STDOUT) +# noinspection SpellCheckingInspection cubemx_str_indicating_success = 'Code succesfully generated' -cubemx_str_indicating_error = 'Exception in code generation' # final line "KO" is also a good sign of an error +cubemx_str_indicating_error = 'Exception in code generation' # final line "KO" is also a good sign of error # Longest name (not necessarily a method so a little bit tricky...) # log_fieldwidth_function = max([len(member) for member in dir(stm32pio.lib.Stm32pio)]) + 1 -log_fieldwidth_function = 20 +log_fieldwidth_function = 20 # TODO: ugly and not so reliable anymore... -show_traceback_threshold_level: int = logging.DEBUG # when log some error and need to print the traceback +show_traceback_threshold_level = logging.DEBUG # when log some error and need to print a traceback -pio_boards_cache_lifetime: float = 5.0 # in seconds +pio_boards_cache_lifetime = 5.0 # in seconds # # Do not distract end-user with this CI s**t, take out from the main dict definition above # +# TODO: Probably should remove those CI-specific logic from the source code entirely. This problem is related to having +# an [optional] single (global) config # Environment variable indicating we are running on a CI server and should tweak some parameters CI_ENV_VARIABLE = os.environ.get('PIPELINE_WORKSPACE') if CI_ENV_VARIABLE is not None: + # TODO: Python 3.8+: some PyCharm static analyzer bug. Probably can be solved after introduction of TypedDict + # noinspection PyTypedDict config_default['app'] = { - 'java_cmd': 'java', 'platformio_cmd': 'platformio', - 'cubemx_cmd': str(Path(os.getenv('STM32PIO_CUBEMX_CACHE_FOLDER')) / 'STM32CubeMX.exe') + 'cubemx_cmd': str(Path(os.environ.get('STM32PIO_CUBEMX_CACHE_FOLDER')) / 'STM32CubeMX.exe'), + 'java_cmd': 'java' } TEST_FIXTURES_PATH = Path(os.environ.get('STM32PIO_TEST_FIXTURES', - default=Path(__file__).parent.joinpath('../../tests/fixtures'))) + default=Path(__file__).parent / '../../tests/fixtures')) TEST_CASE = os.environ.get('STM32PIO_TEST_CASE') patch_mixin = '' if TEST_FIXTURES_PATH is not None and TEST_CASE is not None: diff --git a/stm32pio/core/state.py b/stm32pio/core/state.py index 938b57b..4c25c5d 100644 --- a/stm32pio/core/state.py +++ b/stm32pio/core/state.py @@ -1,40 +1,33 @@ """ -State of the project in terms of business logic. It defines the sequence of some typical life-cycle stages a project can -sit in. +stm32pio project state in terms of business logic. It defines the sequence of some typical life-cycle stages a project +can sit in and the logic to inspect them. """ import collections +import contextlib import enum -_stages_string_representations = { - 'UNDEFINED': 'The project is messed up', - 'EMPTY': '.ioc file is present', - 'INITIALIZED': 'stm32pio initialized', - 'GENERATED': 'CubeMX code generated', - 'PIO_INITIALIZED': 'PlatformIO project initialized', - 'PATCHED': 'PlatformIO project patched', - 'BUILT': 'PlatformIO project built' -} +import stm32pio.core.project @enum.unique class ProjectStage(enum.IntEnum): """ - Codes indicating a project state at the moment. Should be the sequence of incrementing integers to be suited for - state determining algorithm. Starts from 1. - - Hint: Files/folders to be present on every project state (more or less, just for reference): - UNDEFINED: use this state to indicate none of the states below. Also, when we do not have any .ioc file the - Stm32pio class instance cannot be created (constructor raises an exception) - EMPTY: ['project.ioc'] - INITIALIZED: ['project.ioc', 'stm32pio.ini'] - GENERATED: ['Inc', 'Src', 'project.ioc', 'stm32pio.ini'] - PIO_INITIALIZED (on case-sensitive FS): ['test', 'include', 'Inc', 'platformio.ini', '.gitignore', 'Src', 'lib', - 'project.ioc', '.travis.yml', 'src'] - PATCHED: ['test', 'Inc', 'platformio.ini', '.gitignore', 'Src', 'lib', 'project.ioc', '.travis.yml'] - BUILT: same as above + '.pio' folder with build artifacts (such as .pio/build/nucleo_f031k6/firmware.bin, - .pio/build/nucleo_f031k6/firmware.elf) + Each code represents some attribute of the project. Their combination summarizes a state the project is being in + right now. Enum should be an integer numbers sequence so different comparing algorithms can be applied to it. Rough + list of different files/folders characterizing every stage (follow down to trace the project evolution): + + UNDEFINED: special pseudo-stage. It is always fulfilled but when this is a *only* satisfied stage, it means the + project is messed up and some stages were skipped on the way up to the last one + EMPTY: project.ioc + INITIALIZED: project.ioc, stm32pio.ini + GENERATED: Inc/, Src/, project.ioc, stm32pio.ini + PIO_INITIALIZED (for case-sensitive systems): include/, Inc/, lib/, src/, Src/, test/, .gitignore, + platformio.ini, project.ioc, stm32pio.ini + PATCHED: Inc/, lib/, Src/, test/, .gitignore, platformio.ini *(modified)*, project.ioc, stm32pio.ini + BUILT: *same as above* + .pio/ folder carrying build artifacts (such as .pio/build/nucleo_f031k6/firmware.bin) """ + UNDEFINED = enum.auto() # note: starts from 1 EMPTY = enum.auto() INITIALIZED = enum.auto() @@ -44,53 +37,108 @@ class ProjectStage(enum.IntEnum): BUILT = enum.auto() def __str__(self): - return _stages_string_representations[self.name] + return _stages_string_representations[self] + + +_stages_string_representations = { + ProjectStage.UNDEFINED: 'The project is messed up', + ProjectStage.EMPTY: '.ioc file is present', + ProjectStage.INITIALIZED: 'stm32pio initialized', + ProjectStage.GENERATED: 'CubeMX code generated', + ProjectStage.PIO_INITIALIZED: 'PlatformIO project initialized', + ProjectStage.PATCHED: 'PlatformIO project patched', + ProjectStage.BUILT: 'PlatformIO project built' +} +# TODO: 3.6+ CPython, 3.7+ language-wise: dicts are insertion ordered already class ProjectState(collections.OrderedDict): - """ - The ordered dictionary subclass suitable for storing the Stm32pio instances state. For example: - { - ProjectStage.UNDEFINED: True, # doesn't necessarily means that the project is messed up, see below - ProjectStage.EMPTY: True, - ProjectStage.INITIALIZED: True, - ProjectStage.GENERATED: False, - ProjectStage.PIO_INITIALIZED: False, - ProjectStage.PATCHED: False, - ProjectStage.BUILT: False - } - It is also extended with additional properties providing useful information such as obtaining the project current - stage. - - The class has no special constructor so its filling - both stages and their order - is a responsibility of the - external code. It also has no protection nor checks for its internal correctness. Anyway, it is intended to be used - (i.e. creating) only by the internal code of this library so there shouldn't be any worries. - """ - def __str__(self): + def __init__(self, project: 'stm32pio.core.project.Stm32pio'): """ - Pretty human-readable complete representation of the project state (not including the service one UNDEFINED to - not confuse the end-user) + Defines criteria for every ``ProjectStage`` and evaluate all of them for the given ``Stm32pio`` project. + Resulting dictionary will be a state object with ``ProjectStage`` keys and boolean values denoting whether a + particular stage has been fulfilled. Items order is always the same declaring a typical project life-cycle with + ``EMPTY`` stage at the start and ``BUILT`` one in the end. + **Important**: the class doesn't track a project and acts as a snapshot of its current state. Use + ``Stm32pio.state`` to obtain the actual information whenever you need to. + + :param project: stm32pio project to calculate the state for """ - # Need 2 spaces between the icon and the text to look fine - return '\n'.join(f"{'[*]' if stage_value else '[ ]'} {str(stage_name)}" + + super().__init__() + + # + # 1. Gather and prepare some data + # + try: # there might be no platformio.ini file yet + pio_is_initialized = project.platformio.ini.is_initialized + except (Exception,): + pio_is_initialized = False + + platformio_ini_is_patched = False + if pio_is_initialized: # there might be no platformio.ini file yet + # The getter below is designed to throw in certain circumstances but we don't care about the details here + with contextlib.suppress(Exception): + platformio_ini_is_patched = project.platformio.ini.is_patched + + inc_dir = project.path / 'Inc' + src_dir = project.path / 'Src' + include_dir = project.path / 'include' + pio_dir = project.path / '.pio' # hidden PlatformIO per-project-based service folder + + # + # 2. For each ProjectStage define the criteria a project should met to be considered fulfilling this particular + # stage + # + self[ProjectStage.UNDEFINED] = [True] # always satisfied, see ProjectStage.UNDEFINED description + self[ProjectStage.EMPTY] = [project.cubemx.ioc.path.is_file()] # IOC file is present + self[ProjectStage.INITIALIZED] = [project.config.path.is_file()] # stm32pio.ini config file has been saved + self[ProjectStage.GENERATED] = [inc_dir.is_dir() and len(list(inc_dir.iterdir())), + src_dir.is_dir() and len(list(src_dir.iterdir()))] # code has been generated + self[ProjectStage.PIO_INITIALIZED] = [pio_is_initialized] # platformio.ini file is present + # Analyze platformio.ini file and look for junk folders + self[ProjectStage.PATCHED] = [platformio_ini_is_patched, not include_dir.exists()] + # Search for a build artifacts + self[ProjectStage.BUILT] = [pio_dir.is_dir() and any(item.is_file() for item in pio_dir.rglob('*firmware*'))] + + # + # 3. Evaluate and fold all conditions above to take the final form + # + for stage, conditions in self.items(): + self[stage] = all(conditions) + + def __str__(self): + """Pretty human-readable representation (doesn't include the UNDEFINED service stage)""" + return '\n'.join(f"{'[*]' if stage_value else '[ ]'} {stage_name}" for stage_name, stage_value in self.items() if stage_name != ProjectStage.UNDEFINED) @property def current_stage(self) -> ProjectStage: - last_consistent_stage = ProjectStage.UNDEFINED + """ + Normally, the project goes through life-cycle phases consequentially fulfilling the stages one by one. The last + satisfied stage in this case we call a *consistent* one. But if there are breaks happening along the regular + trip we consider such state as inconsistent and saying the project is "messed up" (e.g. when some files were + manually moved). This scenario is reflected by the special UNDEFINED stage. + + :return: the last consistent stage or ``ProjectStage.UNDEFINED`` if there is no such + """ + # The algorithm below is probably the most time and memory efficient. It definitely can be shorter with a + # drawback of being slower/more consuming + + last_consistent_stage = ProjectStage.UNDEFINED # this one is always satisfied not_fulfilled_stage_found = False - # Search for a consecutive sequence of True's and find the last of them. For example, if the array is + # Look for a consecutive sequence of True's and find the last of them. For example, if the array is # [1,1,1,0,0,0,0] # ^ # we should consider 2 as the last index - for stage_name, stage_fulfilled in self.items(): - if stage_fulfilled: + for stage_name, stage_is_fulfilled in self.items(): + if stage_is_fulfilled: if not_fulfilled_stage_found: - # Fall back to the UNDEFINED stage if we have breaks in conditions results array. E.g., for - # [1,1,1,0,1,0,0] - # we should return UNDEFINED as it doesn't look like a correct set of files actually + # Fallback to the UNDEFINED stage if we have breaks in conditions results array. E.g., for + # [1,1,1,0,0,1,0] + # we should return UNDEFINED as it doesn't look like a correct set last_consistent_stage = ProjectStage.UNDEFINED break else: @@ -102,5 +150,5 @@ def current_stage(self) -> ProjectStage: @property def is_consistent(self) -> bool: - """Whether the state has been went through the stages consequentially or not""" + """See ``current_stage`` for the *"consistency"* definition""" return self.current_stage != ProjectStage.UNDEFINED diff --git a/stm32pio/core/util.py b/stm32pio/core/util.py index c803e4c..becd37c 100644 --- a/stm32pio/core/util.py +++ b/stm32pio/core/util.py @@ -1,17 +1,17 @@ """ -Some auxiliary entities not falling into the other categories +Some service code not falling into more specific categories. """ import collections.abc -import copy -import json -import pathlib +import logging +import shlex +import shutil import subprocess import sys -import time +from pathlib import Path from typing import Any, List, Mapping -from stm32pio.core.settings import pio_boards_cache_lifetime, config_default +import stm32pio.core.log def _get_version_from_scm() -> str: @@ -20,9 +20,11 @@ def _get_version_from_scm() -> str: except ImportError: return "Portable (not-installed). See git tag" else: - # Calculate the version in real-time from the Git repo state + # Calculate the version at runtime retrieving it from the actual Git repo return setuptools_scm.get_version(root='../..', relative_to=__file__) + +# TODO: refactor this after dropping 3.7 def get_version() -> str: """Retrieve the app version as string""" if sys.version_info >= (3, 8): @@ -36,61 +38,28 @@ def get_version() -> str: return _get_version_from_scm() else: try: - # Version is stored in the stm32pio/core/version.py file auto-generated by setuptools_scm tool + # Version is stored at the stm32pio/core/version.py file auto-generated by setuptools_scm tool import stm32pio.core.version except ImportError: # Version file is not available, most likely we are not installed (i.e. running from sources) return _get_version_from_scm() else: + # noinspection PyUnresolvedReferences return stm32pio.core.version.version -_pio_boards_cache: List[str] = [] -_pio_boards_cache_fetched_at: float = 0 - -def get_platformio_boards(platformio_cmd: str = config_default['app']['platformio_cmd']) -> List[str]: - """ - Obtain the PlatformIO boards list (string identifiers only). As we interested only in STM32 ones, cut off all of the others. Additionally, - establish a short-time "cache" to prevent the over-flooding with requests to subprocess. - - IMPORTANT NOTE: PlatformIO can go to the Internet from time to time when it decides that its own cache is out of - date. So it may take a long time to execute. - """ - - global _pio_boards_cache_fetched_at, _pio_boards_cache - cache_is_empty = len(_pio_boards_cache) == 0 - current_time = time.time() - cache_is_outdated = current_time - _pio_boards_cache_fetched_at >= pio_boards_cache_lifetime - - if cache_is_empty or cache_is_outdated: - # Windows 7, as usual, correctly works only with shell=True... - completed_process = subprocess.run( - f"{platformio_cmd} boards --json-output stm32cube", - encoding='utf-8', shell=True, stdout=subprocess.PIPE, check=True) - _pio_boards_cache = [board['id'] for board in json.loads(completed_process.stdout)] - _pio_boards_cache_fetched_at = current_time - - # Caller can mutate the array and damage our cache so we give it a copy (as the values are strings it is equivalent - # to the deep copy of this list) - return copy.copy(_pio_boards_cache) - - def cleanup_mapping(mapping: Mapping[str, Any]) -> dict: """Return a deep copy of the given mapping excluding None and empty string values""" - cleaned = {} - for key, value in mapping.items(): if isinstance(value, collections.abc.Mapping): cleaned[key] = cleanup_mapping(value) elif value is not None and value != '': cleaned[key] = value - return cleaned -def get_folder_contents(path: pathlib.Path, pattern: str = '*', - ignore_list: List[pathlib.Path] = None) -> List[pathlib.Path]: +def get_folder_contents(path: Path, pattern: str = '*', ignore_list: List[Path] = None) -> List[Path]: """ Return all endpoints inside the given directory (recursively). If specified, paths from the ignore_list will be excluded. The resulting array is fully "unfolded" meaning every folder will be expanded, so both it and its children @@ -98,27 +67,19 @@ def get_folder_contents(path: pathlib.Path, pattern: str = '*', both it and its children will be ignored completely. Note: this is a "naive", straightforward and non-efficient solution (probably, both for time and memory - consumption). The algorithm behind can (but not necessarily should) definitely be improved. - - Args: - path: root directory - pattern: optional glob-style pattern string to use. Default one will pass all - ignore_list: optional list of pathlib Paths to ignore (see the full description) + consumption). The algorithm behind it can (but not necessarily *should*) probably be improved. - Returns: - list of pathlib Paths + :param path: root directory + :param pattern: optional glob-style pattern string to use. Default one will pass all + :param ignore_list: optional list of paths to ignore (see the full description) + :return: resulting list of paths """ folder_contents = [] - - if ignore_list is not None: - ignore_list = sorted(ignore_list) - else: - ignore_list = [] + ignore_list = sorted(ignore_list) if ignore_list is not None else [] for child in sorted(path.rglob(pattern)): # all files and folders, recursively - - # Check such cases: + # Here we check such cases: # 1) current child: a/b/ # ignore list entry: a/b/c/d.txt # @@ -127,7 +88,67 @@ def get_folder_contents(path: pathlib.Path, pattern: str = '*', is_root_of_another = next( (True for entry in ignore_list if (child in entry.parents) or (entry in child.parents)), False) - if (child not in ignore_list) and (not is_root_of_another): + if child not in ignore_list and not is_root_of_another: folder_contents.append(child) return folder_contents + + +def remove_folder(path: Path, logger: 'stm32pio.core.log.Logger'): + """Remove specified directory (empty or not) logging a result""" + try: + shutil.rmtree(path) + except FileNotFoundError: + logger.debug(f"'{path.name}' folder doesn't exist already") + except Exception as e: + logger.error(f"Cannot remove '{path.name}' folder", exc_info=e) + else: + logger.debug(f"'{path.name}' folder has been removed") + + +def run_command(command: str, path: Path, logger: 'stm32pio.core.log.Logger') -> int: + """ + Launch the command consisting of the given executable and some path as its argument: + $ [editor] [folder] + + :param command: shell command (e.g. some executable in PATH or as a direct path) + :param path: argument to pass to the executable + :param logger: logging.Logger-compatible object + :return: passes a return code of the launched command + """ + + sanitized_input = shlex.quote(command) + executable_name = sanitized_input.split()[0] + + logger.info(f"starting '{executable_name}'...") + try: + with stm32pio.core.log.LogPipe(logger, logging.DEBUG) as log: + # Works unstable on some Windows 7 systems, but correct on Win10... + # result = subprocess.run([command, self.path], check=True) + completed_process = subprocess.run(f'{sanitized_input} "{path}"', shell=True, check=True, + stdout=log.pipe, stderr=log.pipe) + logger.debug(completed_process.stdout, from_subprocess=True) + + return completed_process.returncode + except subprocess.CalledProcessError as e: + logger.error(f"failed to start '{executable_name}': {e.stdout}") + return e.returncode + + +def extract_header_comment(text: str, comment_symbol: str = '#') -> str: + """ + If text has 1 or more of its first consequent lines that starts with ``comment_symbol``, return them. + + :param text: string to analyze + :param comment_symbol: symbol for line to be considered as a comment (e.g. # or //) + :return: header comment + """ + + header_comment = '' + if text.startswith(comment_symbol): + for line in text.splitlines(keepends=True): + if line.startswith(comment_symbol): + header_comment += line + else: + break + return header_comment diff --git a/stm32pio/core/validate.py b/stm32pio/core/validate.py index 064560c..84c7fe9 100644 --- a/stm32pio/core/validate.py +++ b/stm32pio/core/validate.py @@ -1,55 +1,54 @@ """ -Entities helping the main class validate some command line tools. +Helpers for command line tools presence validation. """ import logging import subprocess from typing import Optional, Callable, Tuple, List +import stm32pio.core.config +import stm32pio.core.cubemx +import stm32pio.core.log import stm32pio.core.settings -class ToolValidator: +Runner = Callable[[Optional[str]], Tuple[subprocess.CompletedProcess, str]] - # Properties-results of validation. These will be set after run + +class Tool: + """Class representing a tool – some CLI command to execute and validate (i.e. check its presence)""" + + # Properties-results of validation. These will be set after a run. Initially set to None to explicitly indicate + # there were been no validations yet succeed: bool = None - text: str = None # some optional additional description of the tool state - error: Exception = None # optional exception in case some error happened + remarks: str = None # some optional additional description of the tool state + error: Exception = None # exception will be set in case of unsuccessful run - def __init__(self, name: str, command: Optional[str], - runner: Callable[[Optional[str]], Tuple[subprocess.CompletedProcess, str]], - required: bool = True, logger: logging.Logger = None): + def __init__(self, logger: stm32pio.core.log.Logger, name: str, runner: Runner, command: str = None, + required: bool = True): """ - The constructor does nothing to check the tool. Invoke the validate() method to fill the meaningful fields. - - Args: - name: what we're verifying? - command: optional argument to pass to the runner - runner: function to execute to determine the validated thing is correct - required: is this parameter mandatory? If this is true the tool will be considered succeeded - even if it is not set - logger: optional logging.Logger instance to indicate the progress + :param logger: logging.Logger-compatible object + :param name: what we're verifying? + :param runner: function to execute in order to validate the tool + :param command: if given, this will be passed to runner as an argument + :param required: if False, the tool will be considered succeed even if it is not set """ - # TODO: dataclass can be used (https://stackoverflow.com/questions/1389180/automatically-initialize-instance-variables) + self.logger = logger self.name = name - self.command = command self.runner = runner + self.command = command self.required = required - self.logger = logger - def _run(self, command): - """_macro_ function to reduce a code repetition""" + def _run(self, command: str): + """Execute a runner""" completed_process, std_output = self.runner(command) self.succeed = completed_process.returncode == 0 if completed_process.returncode != 0: - self.error = Exception(std_output) - - def validate(self): - """Start the validation using collected information (properties). Return itself for further usage""" - - if self.logger is not None: - self.logger.info(f"checking '{self.name}'...") + self.error = Exception(std_output or 'Unknown error') + def validate(self) -> 'Tool': + """Run the validation. Chainable method""" + self.logger.info(f"checking '{self.name}'...") try: if self.required: if self.command: @@ -60,7 +59,7 @@ def validate(self): else: if self.command and (self.command.lower() in stm32pio.core.settings.none_options): self.succeed = True - self.text = f"'{self.name}' is set to None, ignoring" + self.remarks = f"'{self.name}' is set to None, ignoring" elif self.command: self._run(self.command) else: @@ -69,32 +68,75 @@ def validate(self): except Exception as e: self.succeed = False self.error = e - return self -class ToolsValidationResults(List[ToolValidator]): - """Conveniently store the validation results and use some useful additional features""" +class ToolsValidationResults(List[Tool]): + """ + Convenient container of validation results allowing external code to easily interpret them. See + ``validate_environment`` to get an idea. + """ @property def succeed(self) -> bool: return all(tool.succeed for tool in self) def __str__(self): - """Format the results of contained members (basic report and extended if present)""" + """Format basic and extended reports""" basic_report = '' for tool in self: tool_str = f"[{'ok' if tool.succeed else 'error':>5}] {tool.name:<10}" - if tool.text: - tool_str += f" {tool.text}" + if tool.remarks: + tool_str += f" {tool.remarks}" basic_report += f"{tool_str}\n" verbose_report = '' - errored_tools = [tool for tool in self if tool.error is not None] - if len(errored_tools): + faulty_tools = [tool for tool in self if tool.error is not None] + if len(faulty_tools): verbose_report += '\n\nTools output:\n\n' - for tool in errored_tools: + for tool in faulty_tools: verbose_report += f"{tool.name}\n {tool.error}\n\n" return basic_report + verbose_report + + +def validate_environment(logger: stm32pio.core.log.Logger, config: stm32pio.core.config.ProjectConfig, + cubemx: stm32pio.core.cubemx.CubeMX) -> ToolsValidationResults: + """ + Defines minimal runners enough to ensure that a tool works and execute them in the given project context gathering + the results. + + :param logger: project' logger instance + :param config: project config containing tools commands in its "app" section + :param cubemx: project' CubeMX instance + :return: validation results suitable for immediate printing + """ + + def java_runner(java_cmd): + with stm32pio.core.log.LogPipe(logger, logging.DEBUG, accumulate=True) as log: + completed_process = subprocess.run([java_cmd, '-version'], stdout=log.pipe, stderr=log.pipe) + return completed_process, log.value + + def cubemx_runner(_): + return cubemx.execute_script('exit\n') # just start and exit + + def platformio_runner(platformio_cmd): + with stm32pio.core.log.LogPipe(logger, logging.DEBUG, accumulate=True) as log: + completed_process = subprocess.run([platformio_cmd], stdout=log.pipe, stderr=log.pipe) + return completed_process, log.value + + if not config.path.exists(): + logger.warning("config file not found. Validation will be performed against the runtime configuration") + + return ToolsValidationResults( + Tool(name=param, + command=config.get('app', param), + runner=runner, + required=required, + logger=logger).validate() + for param, runner, required in [ + ('platformio_cmd', platformio_runner, True), + ('cubemx_cmd', cubemx_runner, True), + ('java_cmd', java_runner, False) + ]) diff --git a/stm32pio/gui/__main__.py b/stm32pio/gui/__main__.py index 8b17661..670006e 100644 --- a/stm32pio/gui/__main__.py +++ b/stm32pio/gui/__main__.py @@ -1,13 +1,6 @@ -import pathlib import sys -MODULE_PATH = pathlib.Path(__file__).parent # module path, e.g. root/stm32pio/gui/ -ROOT_PATH = MODULE_PATH.parent.parent # repo's or the site-package's entry root -try: - import stm32pio.gui.app -except ModuleNotFoundError: - sys.path.append(str(ROOT_PATH)) # hack to run the app as 'python path/to/__main__.py' - import stm32pio.gui.app +import stm32pio.gui.app if __name__ == '__main__': diff --git a/stm32pio/gui/app.py b/stm32pio/gui/app.py index 957e1b2..e952c5b 100644 --- a/stm32pio/gui/app.py +++ b/stm32pio/gui/app.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse @@ -9,6 +9,8 @@ import sys from typing import Optional, List +import stm32pio.core.pio + try: from PySide2.QtCore import Signal, QtInfoMsg, QtWarningMsg, QtCriticalMsg, QtFatalMsg, qInstallMessageHandler, \ QStringListModel, QUrl, QThreadPool, QSettings, QByteArray @@ -31,13 +33,13 @@ ROOT_PATH = MODULE_PATH.parent.parent # repo's or the site-package's entry root try: import stm32pio.core.settings - import stm32pio.core.logging + import stm32pio.core.log import stm32pio.core.util import stm32pio.core.state except ModuleNotFoundError: sys.path.append(str(ROOT_PATH)) # hack to resolve imports if the app was launched as 'python path/to/app.py' import stm32pio.core.settings - import stm32pio.core.logging + import stm32pio.core.log import stm32pio.core.util import stm32pio.core.state @@ -80,6 +82,7 @@ def create_app(sys_argv: List[str] = None) -> QApplicationClass: setup_logging(initial_verbosity=settings.get('verbose')) # Restore projects list + # TODO: Qt pollutes a system leaving its files across several folders, right? We should probably inform a user settings.beginGroup('app') restored_projects_paths: List[str] = [] for index in range(settings.beginReadArray('projects')): @@ -108,7 +111,7 @@ def create_app(sys_argv: List[str] = None) -> QApplicationClass: # TODO: use setContextProperties() (see in Qt6, not present in Qt5...) engine.rootContext().setContextProperty('appVersion', stm32pio.core.util.get_version()) engine.rootContext().setContextProperty('rootPath', root_path) - engine.rootContext().setContextProperty('Logging', stm32pio.core.logging.logging_levels) + engine.rootContext().setContextProperty('Logging', stm32pio.core.log.logging_levels) engine.rootContext().setContextProperty(stm32pio.core.state.ProjectStage.__name__, project_stages) engine.rootContext().setContextProperty('projectsModel', projects_model) engine.rootContext().setContextProperty('boardsModel', boards_model) @@ -131,7 +134,7 @@ def onClose(): # TODO: this uses default platformio command but it might be unavailable. # Also, it unnecessarily slows down the startup def loading(): - boards = ['None'] + stm32pio.core.util.get_platformio_boards() + boards = ['None'] + stm32pio.core.pio.get_boards() boards_model.setStringList(boards) def loaded(action_name: str, success: bool): @@ -143,7 +146,7 @@ def loaded(action_name: str, success: bool): # At the end, append (or jump to) a CLI-provided project, if there is one if args is not None and 'path' in args: list_item_kwargs = { 'from_startup': True } - if args.board: + if args.board: # TODO: test this list_item_kwargs['project_kwargs'] = { 'parameters': { 'project': { 'board': args.board } } } # pizdec konechno... projects_model.addListItem(str(pathlib.Path(args.path)), list_item_kwargs=list_item_kwargs) # Append always happens to the end of list and we want to jump to the last added project (CLI one). The @@ -151,7 +154,7 @@ def loaded(action_name: str, success: bool): projects_model.goToProject.emit((len(restored_projects_paths) + 1) - 1) projects_model.saveInSettings() except: - stm32pio.core.logging.log_current_exception(logging.getLogger('stm32pio.gui.app')) + stm32pio.core.log.log_current_exception(logging.getLogger('stm32pio.gui.app')) success = False main_window.backendLoaded.emit(success) # inform the GUI diff --git a/stm32pio/gui/list.py b/stm32pio/gui/list.py index 3228c8b..9839df6 100644 --- a/stm32pio/gui/list.py +++ b/stm32pio/gui/list.py @@ -4,7 +4,7 @@ from PySide2.QtCore import QAbstractListModel, Signal, Slot, QObject, QThreadPool, QModelIndex, Qt, QUrl -from stm32pio.core.logging import log_current_exception +from stm32pio.core.log import log_current_exception from stm32pio.gui.project import ProjectListItem from stm32pio.gui.util import Worker @@ -73,6 +73,7 @@ def saveInSettings(self) -> None: self.workers_pool.start(Worker(self._saveInSettings, logger=module_logger, parent=self)) + # TODO: simplify? def each_project_is_duplicate_of(self, path: str) -> Iterator[bool]: """ Returns generator yielding an answer to the question "Is current project is a duplicate of one represented by a diff --git a/stm32pio/gui/log.py b/stm32pio/gui/log.py index f0683e8..1d6c0a6 100644 --- a/stm32pio/gui/log.py +++ b/stm32pio/gui/log.py @@ -7,8 +7,7 @@ from PySide2.QtCore import QObject, Signal, QThread, QtInfoMsg, QtWarningMsg, QtCriticalMsg, QtFatalMsg, \ qInstallMessageHandler -from stm32pio.core.settings import log_fieldwidth_function -from stm32pio.core.logging import Verbosity, DispatchingFormatter +from stm32pio.core.log import Verbosity, DispatchingFormatter from stm32pio.gui.util import ProjectID @@ -136,9 +135,4 @@ def routine(self) -> None: projects_logger_handler = BuffersDispatchingHandler() # a storage of the buffers for the logging messages of all # current projects (see its docs for more info) -_projects_logger_formatter = DispatchingFormatter( - general={ - Verbosity.NORMAL: logging.Formatter("%(levelname)-8s %(message)s"), - Verbosity.VERBOSE: logging.Formatter( - f"%(levelname)-8s %(funcName)-{log_fieldwidth_function}s %(message)s") - }) +_projects_logger_formatter = DispatchingFormatter() diff --git a/stm32pio/gui/project.py b/stm32pio/gui/project.py index 2b52ec0..7069186 100644 --- a/stm32pio/gui/project.py +++ b/stm32pio/gui/project.py @@ -5,9 +5,10 @@ from PySide2.QtCore import QObject, Signal, QThreadPool, Property, Slot -import stm32pio.core.logging +import stm32pio.core.log import stm32pio.core.project import stm32pio.core.state +import stm32pio.core.settings from stm32pio.gui.log import LoggingWorker, module_logger from stm32pio.gui.util import Worker @@ -48,7 +49,7 @@ def __init__(self, project_args: List[Any] = None, project_kwargs: Mapping[str, self._from_startup = from_startup underlying_logger = logging.getLogger('stm32pio.gui.projects') - self.logger = stm32pio.core.logging.ProjectLoggerAdapter(underlying_logger, { 'project_id': id(self)} ) + self.logger = stm32pio.core.log.ProjectLogger(underlying_logger, project_id=id(self)) self.logging_worker = LoggingWorker(project_id=id(self)) self.logging_worker.sendLog.connect(self.logAdded) @@ -73,10 +74,8 @@ def __init__(self, project_args: List[Any] = None, project_kwargs: Mapping[str, # Register some kind of the deconstruction handler (later, after the project initialization, see init_project) self._finalizer = None - if 'instance_options' not in project_kwargs: - project_kwargs['instance_options'] = { 'logger': self.logger } - elif 'logger' not in project_kwargs['instance_options']: - project_kwargs['instance_options']['logger'] = self.logger + if 'logger' not in project_kwargs: + project_kwargs['logger'] = self.logger # Start the Stm32pio part initialization right after. It can take some time so we schedule it in a dedicated # thread @@ -95,9 +94,13 @@ def init_project(self, *args, **kwargs) -> None: try: self.project = stm32pio.core.project.Stm32pio(*args, **kwargs) except: - stm32pio.core.logging.log_current_exception(self.logger) + stm32pio.core.log.log_current_exception(self.logger) self._state = { 'INIT_ERROR': True } # pseudo-stage self._current_stage = 'INIT_ERROR' + else: + if self.project.config.get('project', 'inspect_ioc').lower() in stm32pio.core.settings.yes_options and \ + self.project.state.current_stage > stm32pio.core.state.ProjectStage.EMPTY: + self.project.inspect_ioc_config() finally: # Register some kind of the deconstruction handler self._finalizer = weakref.finalize(self, self.at_exit, self.workers_pool, self.logging_worker, diff --git a/stm32pio/gui/qml/AboutDialog.qml b/stm32pio/gui/qml/AboutDialog.qml index bbb1f9c..7420897 100644 --- a/stm32pio/gui/qml/AboutDialog.qml +++ b/stm32pio/gui/qml/AboutDialog.qml @@ -20,12 +20,11 @@ Dialogs.Dialog { textFormat: TextEdit.RichText horizontalAlignment: TextEdit.AlignHCenter verticalAlignment: TextEdit.AlignVCenter - text: `v.${appVersion}
+ text: `v${appVersion}
2018 - 2021 © ussserrr
GitHub

- Powered by Python, PlatformIO, PySide2, FlatIcons and other awesome - technologies.

+ Powered by Python, Qt, FlatIcons and other awesome technologies.

Please refer to supplied LICENSE file for more information. This software should be distributed alongside with its license` diff --git a/stm32pio/gui/qml/InitScreen.qml b/stm32pio/gui/qml/InitScreen.qml index e010815..e04f6b9 100644 --- a/stm32pio/gui/qml/InitScreen.qml +++ b/stm32pio/gui/qml/InitScreen.qml @@ -96,6 +96,8 @@ Column { } }]); + project.run('inspect_ioc_config', []); + if (runCheckBox.checked) { for (let i = projectActionsModel.statefulActionsStartIndex + 1; i < projectActionsModel.count; ++i) { project.run(projectActionsModel.get(i).action, []); diff --git a/stm32pio/gui/util.py b/stm32pio/gui/util.py index ba41cc4..78e2e13 100644 --- a/stm32pio/gui/util.py +++ b/stm32pio/gui/util.py @@ -4,7 +4,7 @@ from PySide2.QtCore import QObject, QRunnable, Signal -import stm32pio.core.logging +import stm32pio.core.log ProjectID = type(id(object)) # Int @@ -46,7 +46,7 @@ def run(self): except Exception: if self.logger is not None: # We cannot pass the project config here to preserve the error because we don't have the reference - stm32pio.core.logging.log_current_exception(self.logger) + stm32pio.core.log.log_current_exception(self.logger) result = -1 if result is None or (type(result) == int and result == 0): diff --git a/tests/common.py b/tests/common.py index b84e94c..5887fc8 100755 --- a/tests/common.py +++ b/tests/common.py @@ -50,7 +50,7 @@ # Absolute path to the main stm32pio script (make sure what we are testing) STM32PIO_MAIN_SCRIPT: str = inspect.getfile(stm32pio.cli.app.main) -# Absolute path to the Python executable (no need to guess whether it's 'python' or 'python3' and so on) +# Absolute path to the Python command (no need to guess whether it's 'python' or 'python3' and so on) PYTHON_EXEC: str = sys.executable print(f"Test case: {PROJECT_BOARD}") diff --git a/tests/test_cli.py b/tests/test_cli.py index 9d59706..5543e9e 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,7 @@ import configparser import contextlib import io +import logging import pathlib import re import subprocess @@ -9,7 +10,7 @@ from tests.common import * import stm32pio.cli.app -import stm32pio.core.logging +import stm32pio.core.log import stm32pio.core.settings import stm32pio.core.util import stm32pio.core.project @@ -89,7 +90,7 @@ def test_verbosity(self): # execution methods = dir(stm32pio.core.project.Stm32pio) + ['main'] - with self.subTest(verbosity_level=stm32pio.core.logging.Verbosity.NORMAL): + with self.subTest(verbosity_level=stm32pio.core.log.Verbosity.NORMAL): result = subprocess.run([PYTHON_EXEC, STM32PIO_MAIN_SCRIPT, 'generate', '--directory', str(STAGE_PATH)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8') @@ -106,7 +107,7 @@ def test_verbosity(self): # The snippet of the actual STM32CubeMX output self.assertNotIn('Starting STM32CubeMX', result.stderr, msg="STM32CubeMX has printed its logs") - with self.subTest(verbosity_level=stm32pio.core.logging.Verbosity.VERBOSE): + with self.subTest(verbosity_level=stm32pio.core.log.Verbosity.VERBOSE): result = subprocess.run([PYTHON_EXEC, STM32PIO_MAIN_SCRIPT, '-v', 'new', '--directory', str(STAGE_PATH), '--board', PROJECT_BOARD], @@ -118,12 +119,13 @@ def test_verbosity(self): msg="Process has printed something directly into STDOUT bypassing logging") self.assertIn('DEBUG', result.stderr, msg="Verbose logging output hasn't been enabled on STDERR") + # TODO: format has changed! # Inject all methods' names in the regex. Inject the width of field in a log format string - regex = re.compile("^(?=(DEBUG) {0,4})(?=.{8} (?=(" + '|'.join(methods) + ") {0," + - str(stm32pio.core.settings.log_fieldwidth_function) + "})(?=.{" + - str(stm32pio.core.settings.log_fieldwidth_function) + "} [^ ]))", flags=re.MULTILINE) - self.assertGreaterEqual(len(re.findall(regex, result.stderr)), 1, - msg="Logs messages doesn't match the format") + # regex = re.compile("^(?=(DEBUG) {0,4})(?=.{8} (?=(" + '|'.join(methods) + ") {0," + + # str(stm32pio.core.settings.log_fieldwidth_function) + "})(?=.{" + + # str(stm32pio.core.settings.log_fieldwidth_function) + "} [^ ]))", flags=re.MULTILINE) + # self.assertGreaterEqual(len(re.findall(regex, result.stderr)), 1, + # msg="Logs messages doesn't match the format") # The snippet of the actual STM32CubeMX output self.assertIn("Starting STM32CubeMX", result.stderr, msg="STM32CubeMX has not printed its logs") @@ -135,7 +137,7 @@ def test_init(self): result = subprocess.run([PYTHON_EXEC, STM32PIO_MAIN_SCRIPT, 'init', '--directory', str(STAGE_PATH), '--board', PROJECT_BOARD]) - self.assertEqual(result.returncode, 0, msg="Non-zero return code") + self.assertEqual(result.returncode, 0, msg=f"Non-zero return code") self.assertTrue(STAGE_PATH.joinpath(stm32pio.core.settings.config_file_name).is_file(), msg=f"{stm32pio.core.settings.config_file_name} file hasn't been created") @@ -180,11 +182,9 @@ def test_save_last_error_in_config(self): """The app should retain the last error occurred and clear it on the next successful run""" # Create and save an intentionally invalid config... - config_with_invalid_tool = stm32pio.core.config.Config(STAGE_PATH, runtime_parameters={ - 'app': { - 'java_cmd': 'incorrect_java_command' - } - }) + runtime_parameters = {'app': {'java_cmd': 'incorrect_java_command'}} + config_with_invalid_tool = stm32pio.core.config.ProjectConfig(STAGE_PATH, logging.getLogger('any'), + runtime_parameters=runtime_parameters) config_with_invalid_tool.save() # ...with this config the following command should fail... diff --git a/tests/test_integration.py b/tests/test_integration.py index a6965cf..c2ce192 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -1,7 +1,5 @@ import configparser import gc -import inspect -import shutil # Provides test constants and definitions from tests.common import * @@ -59,7 +57,7 @@ def test_config_priorities(self): # On project creation we should interpret the CLI-provided values as superseding to the saved ones and # saved ones, in turn, as superseding to the default ones (BUT only non-empty values) - project = stm32pio.core.project.Stm32pio(STAGE_PATH, instance_options={'save_on_destruction': True}, parameters={ + project = stm32pio.core.project.Stm32pio(STAGE_PATH, save_on_destruction=True, parameters={ 'app': { 'cubemx_cmd': '' }, diff --git a/tests/test_unit.py b/tests/test_unit.py index 8418c54..d9c7e09 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -1,6 +1,8 @@ import collections.abc import configparser -import inspect +import contextlib +import io +import logging import platform import string import subprocess @@ -8,14 +10,15 @@ import unittest.mock from functools import reduce -from pathlib import Path from typing import Mapping, Union # Provides test constants and definitions +import stm32pio.core.pio from tests.common import * import stm32pio.core.settings import stm32pio.core.project +import stm32pio.core.cubemx import stm32pio.core.util @@ -62,15 +65,18 @@ def test_patch(self): """ project = stm32pio.core.project.Stm32pio(STAGE_PATH) - test_content = inspect.cleandoc(''' + header = inspect.cleandoc(''' ; This is a test config .ini file ; with a comment. It emulates a real ; platformio.ini file - + ''') + '\n' + test_content = header + inspect.cleandoc(''' [platformio] include_dir = this s;789hould be replaced + let's add some tricky content ; there should appear a new parameter test_key3 = this should be preserved + alright? [test_section] test_key1 = test_value1 @@ -91,7 +97,9 @@ def test_patch(self): patch_config = configparser.ConfigParser(interpolation=None) patch_config.read_string(project.config.get('project', 'platformio_ini_patch_content')) - self.assertGreater(len(patched_config.read(STAGE_PATH.joinpath('platformio.ini'))), 0) + patched_content = STAGE_PATH.joinpath('platformio.ini').read_text() + patched_config.read_string(patched_content) + self.assertGreater(len(patched_content), 0) for patch_section in patch_config.sections(): self.assertTrue(patched_config.has_section(patch_section), msg=f"{patch_section} is missing") @@ -109,6 +117,8 @@ def test_patch(self): self.assertEqual(patched_config.get(original_section, original_key), original_value, msg=f"{original_section}: {original_key}={original_value} is corrupted") + self.assertIn(header, patched_content, msg='Header should be preserved') + def test_build_should_handle_error(self): """ Build an empty project so PlatformIO should return an error @@ -128,7 +138,7 @@ def test_start_editor(self): """ project = stm32pio.core.project.Stm32pio(STAGE_PATH) - editors = { # some edotors to check + editors = { # some editors to check 'atom': { 'Windows': 'atom.exe', 'Darwin': 'Atom', @@ -222,7 +232,7 @@ def test_get_platformio_boards(self): """ PlatformIO identifiers of boards are requested using PlatformIO CLI in JSON format """ - boards = stm32pio.core.util.get_platformio_boards() + boards = stm32pio.core.pio.get_boards() self.assertIsInstance(boards, collections.abc.MutableSequence) self.assertGreater(len(boards), 0, msg="boards list is empty") @@ -238,7 +248,7 @@ def test_ioc_file_provided(self): shutil.copy(STAGE_PATH.joinpath(PROJECT_IOC_FILENAME), STAGE_PATH.joinpath('Abracadabra.ioc')) project = stm32pio.core.project.Stm32pio(STAGE_PATH.joinpath('42.ioc')) # pick just one - self.assertTrue(project.ioc_file.samefile(STAGE_PATH.joinpath('42.ioc')), + self.assertTrue(project.cubemx.ioc.path.samefile(STAGE_PATH.joinpath('42.ioc')), msg="Provided .ioc file hasn't been chosen") self.assertEqual(project.config.get('project', 'ioc_file'), '42.ioc', msg="Provided .ioc file is not in the config") @@ -251,7 +261,7 @@ def test_validate_environment(self): self.assertTrue(result_should_be_ok.succeed, msg="All the tools are correct but the validation says " "otherwise") - with self.subTest(mag="Invalid config"): + with self.subTest(msg="Invalid config"): project.config.set('app', 'platformio_cmd', 'this_command_doesnt_exist') result_should_fail = project.validate_environment() self.assertFalse(result_should_fail.succeed, msg="One tool is incorrect and the results should reflect " @@ -260,6 +270,56 @@ def test_validate_environment(self): self.assertIsNotNone(platformio_result, msg="PlatformIO validation results not found") self.assertFalse(platformio_result.succeed, msg="PlatformIO validation results should be False") + def test_inspect_ioc(self): + with self.subTest(msg="Parsing an .ioc file"): + config = stm32pio.core.cubemx.IocConfig(STAGE_PATH, PROJECT_IOC_FILENAME, logger=logging.getLogger('any')) + self.assertSequenceEqual(config.sections(), [stm32pio.core.cubemx.IocConfig.fake_section_name], + msg="Incorrect set of config sections", seq_type=list) + self.assertGreater(len(config[config.fake_section_name].keys()), 10, msg="There should be a lot of keys") + + with self.subTest(msg="Inspecting a proper config"): + config = stm32pio.core.cubemx.IocConfig(STAGE_PATH, PROJECT_IOC_FILENAME, logger=logging.getLogger('any')) + with contextlib.redirect_stderr(io.StringIO()) as logs: + config.inspect(PROJECT_BOARD) + self.assertEqual(logs.getvalue(), '', msg="Correctly set config shouldn't produce any warnings") + + with self.subTest(msg="Inspecting an invalid config"): + invalid_content = inspect.cleandoc(''' + board=SOME-BOARD-123 + # board is wrong and no other parameters at all + ''') + '\n' + invalid_ioc = STAGE_PATH / 'invalid.ioc' + invalid_ioc.write_text(invalid_content) + config = stm32pio.core.cubemx.IocConfig(STAGE_PATH, 'invalid.ioc', logger=logging.getLogger('any')) + with self.assertLogs(logger='any', level=logging.WARNING) as logs: + config.inspect(PROJECT_BOARD) + self.assertEqual(len(logs.records), 4, msg="There should be 4 warning log messages") + + with self.subTest(msg="Custom board with unmatched MCUs"): + ioc_content = inspect.cleandoc(''' + board=custom + ProjectManager.DeviceId=some_wrong_mcu + ''') + '\n' + invalid_ioc = STAGE_PATH / 'invalid.ioc' + invalid_ioc.write_text(ioc_content) + config = stm32pio.core.cubemx.IocConfig(STAGE_PATH, 'invalid.ioc', logger=logging.getLogger('any')) + with self.assertLogs(logger='any', level=logging.WARNING) as logs: + config.inspect(PROJECT_BOARD, 'STM32F031K6T6') + self.assertTrue(any('MCU' in line for line in logs.output), msg="No mention of mismatched MCUs") + + with self.subTest(msg="Saving the config back"): + ioc_file = STAGE_PATH / PROJECT_IOC_FILENAME + initial_content = ioc_file.read_text() + config = stm32pio.core.cubemx.IocConfig(STAGE_PATH, PROJECT_IOC_FILENAME, logger=logging.getLogger('any')) + + config.save() + self.assertEqual(ioc_file.read_text(), initial_content, msg="Configs should be identical") + + changed_board = "INTEL-8086" + config[config.fake_section_name]['board'] = changed_board + config.save() + self.assertIn(f'board={changed_board}', ioc_file.read_text(), msg="Edited parameters weren't preserved") + def test_clean(self): def plant_fs_tree(path: Path, tree: Mapping[str, Union[str, Mapping]], exist_ok: bool = True): for endpoint, content in tree.items(): @@ -307,30 +367,30 @@ def tree_not_exists_fully(path: Path, tree: Mapping[str, Union[str, Mapping]]): project = stm32pio.core.project.Stm32pio(STAGE_PATH) project.clean() self.assertTrue(tree_not_exists_fully(STAGE_PATH, test_tree), msg="Test tree hasn't been removed") - self.assertTrue(project.ioc_file.exists(), msg=".ios file wasn't preserved") + self.assertTrue(project.cubemx.ioc.path.exists(), msg=".ios file wasn't preserved") self.setUp() # same actions we perform between test cases (external cleaning) plant_fs_tree(STAGE_PATH, test_tree) with self.subTest(msg="not quiet, respond yes"): project = stm32pio.core.project.Stm32pio(STAGE_PATH) with unittest.mock.patch('builtins.input', return_value=stm32pio.core.settings.yes_options[0]): - project.clean(quiet_on_cli=False) + project.clean(quiet=False) input_args, input_kwargs = input.call_args # input() function was called with these arguments input_prompt = input_args[0] # Check only for a name as the path separator is different for UNIX/Win self.assertTrue(all(endpoint.name in input_prompt for endpoint in test_tree_endpoints), msg="Paths for removal should be reported to the user") self.assertTrue(tree_not_exists_fully(STAGE_PATH, test_tree), msg="Test tree hasn't been removed") - self.assertTrue(project.ioc_file.exists(), msg=".ios file wasn't preserved") + self.assertTrue(project.cubemx.ioc.path.exists(), msg=".ios file wasn't preserved") self.setUp() plant_fs_tree(STAGE_PATH, test_tree) with self.subTest(msg="not quiet, respond no"): project = stm32pio.core.project.Stm32pio(STAGE_PATH) with unittest.mock.patch('builtins.input', return_value=stm32pio.core.settings.no_options[0]): - project.clean(quiet_on_cli=False) + project.clean(quiet=False) self.assertTrue(tree_exists_fully(STAGE_PATH, test_tree), msg="Test tree wasn't preserved") - self.assertTrue(project.ioc_file.exists(), msg=".ios file wasn't preserved") + self.assertTrue(project.cubemx.ioc.path.exists(), msg=".ios file wasn't preserved") self.setUp() plant_fs_tree(STAGE_PATH, test_tree) @@ -384,7 +444,7 @@ def tree_not_exists_fully(path: Path, tree: Mapping[str, Union[str, Mapping]]): plant_fs_tree(STAGE_PATH, test_tree) with self.subTest(msg="save current content in ignore list"): project = stm32pio.core.project.Stm32pio(STAGE_PATH) - project.config.save_content_as_ignore_list() + project.config.set_content_as_ignore_list() STAGE_PATH.joinpath('this_file_should_be_removed').touch() project.clean() self.assertTrue(tree_exists_fully(STAGE_PATH, test_tree), msg="Test tree should be preserved")