first commit

This commit is contained in:
stuce-bot 2025-06-30 20:47:33 +02:00
commit 5893b00dd2
1669 changed files with 1982740 additions and 0 deletions

View file

@ -0,0 +1,11 @@
This is the Continuous Integration tool that builds the product in various configurations.
Running
Install the python `uv` tool.
`pip install uv`
Now run the python in this directory
`uv run ci-compile.py`

View file

@ -0,0 +1,30 @@
{
"build": {
"core": "esp32",
"f_cpu": "120000000L",
"f_flash": "60000000L",
"flash_mode": "qio",
"mcu": "esp32c2",
"variant": "esp32c2"
},
"connectivity": [
"wifi"
],
"debug": {
"openocd_target": "esp32c2.cfg"
},
"frameworks": [
"arduino",
"espidf"
],
"name": "Espressif ESP32-C2-DevKitM-1",
"upload": {
"flash_size": "4MB",
"maximum_ram_size": 278528,
"maximum_size": 4194304,
"require_upload_port": true,
"speed": 460800
},
"url": "https://docs.espressif.com/projects/espressif-esp-dev-kits/en/latest/esp8684/esp8684-devkitm-1/user_guide.html",
"vendor": "Espressif"
}

View file

@ -0,0 +1,31 @@
{
"build": {
"core": "esp32",
"f_cpu": "160000000L",
"f_flash": "80000000L",
"flash_mode": "qio",
"mcu": "esp32c6",
"variant": "esp32c6"
},
"connectivity": [
"bluetooth",
"wifi"
],
"debug": {
"openocd_target": "esp32c6.cfg"
},
"frameworks": [
"arduino",
"espidf"
],
"name": "Espressif ESP32-C6-DevKitC-1",
"upload": {
"flash_size": "8MB",
"maximum_ram_size": 327680,
"maximum_size": 8388608,
"require_upload_port": true,
"speed": 460800
},
"url": "https://docs.espressif.com/projects/espressif-esp-dev-kits/en/latest/esp32c6/esp32-c6-devkitc-1/index.html",
"vendor": "Espressif"
}

View file

@ -0,0 +1,31 @@
{
"build": {
"core": "esp32",
"f_cpu": "96000000L",
"f_flash": "64000000L",
"f_image": "48000000L",
"flash_mode": "qio",
"mcu": "esp32h2",
"variant": "esp32h2"
},
"connectivity": [
"bluetooth"
],
"debug": {
"openocd_target": "esp32h2.cfg"
},
"frameworks": [
"arduino",
"espidf"
],
"name": "Espressif ESP32-H2-DevKit",
"upload": {
"flash_size": "4MB",
"maximum_ram_size": 327680,
"maximum_size": 4194304,
"require_upload_port": true,
"speed": 460800
},
"url": "https://docs.espressif.com/projects/espressif-esp-dev-kits/en/latest/esp32h2/esp32-h2-devkitm-1/index.html",
"vendor": "Espressif"
}

View file

@ -0,0 +1,61 @@
{
"build": {
"arduino": {
"ldscript": "linker_script.ld",
"flash_layout": "100_0"
},
"extra_flags": "-DARDUINO_GIGA -DARDUINO_GIGA_PINS -DGIGA_PINS -DGIGA -DCM4_BINARY_START=0x60000000 -DCM4_BINARY_END=0x60040000 -DCM4_RAM_END=0x60080000",
"core": "arduino",
"cpu": "cortex-m7",
"f_cpu": "480000000L",
"mcu": "stm32h747xih6",
"variant": "GIGA",
"product_line": "STM32H747xx",
"hwids": [
[
"0x2341",
"0x0266"
],
[
"0x2341",
"0x0366"
],
[
"0x2341",
"0x0466"
]
]
},
"connectivity": [
"bluetooth",
"wifi"
],
"debug": {
"jlink_device": "STM32H747XI_M7",
"openocd_target": "stm32h7x_dual_bank"
},
"frameworks": [
"arduino"
],
"name": "Arduino Giga R1 Wifi",
"upload": {
"_maximum_ram_size": 294248,
"_maximum_size": 1048576,
"maximum_ram_size": 523624,
"maximum_size": 1966080,
"protocol": "dfu",
"protocols": [
"cmsis-dap",
"dfu",
"jlink",
"stlink",
"mbed"
],
"require_upload_port": true,
"use_1200bps_touch": true,
"wait_for_upload_port": true,
"offset_address": "0x08100000"
},
"url": "https://docs.arduino.cc/hardware/giga-r1-wifi",
"vendor": "Arduino"
}

View file

@ -0,0 +1,51 @@
{
"build": {
"arduino":{
"ldscript": "nrf52_xxaa.ld"
},
"core": "nRF5",
"cpu": "cortex-m4",
"extra_flags": "-DARDUINO_NRF52_DK",
"f_cpu": "64000000L",
"mcu": "nrf52840",
"variant": "nRF52DK",
"zephyr": {
"variant": "nrf52840dk_nrf52840"
}
},
"connectivity": [
"bluetooth"
],
"debug": {
"default_tools": [
"jlink"
],
"jlink_device": "nRF52840_xxAA",
"onboard_tools": [
"cmsis-dap",
"jlink"
],
"svd_path": "nrf52840.svd"
},
"frameworks": [
"arduino",
"mbed",
"zephyr"
],
"name": "Nordic nRF52840-DK",
"upload": {
"maximum_ram_size": 262144,
"maximum_size": 1048576,
"protocol": "jlink",
"protocols": [
"jlink",
"nrfjprog",
"stlink",
"blackmagic",
"cmsis-dap",
"mbed"
]
},
"url": "https://os.mbed.com/platforms/Nordic-nRF52840-DK/",
"vendor": "Nordic"
}

View file

@ -0,0 +1,56 @@
{
"build": {
"arduino": {
"earlephilhower": {
"boot2_source": "boot2_w25q080_2_padded_checksum.S",
"usb_vid": "0x2E8A",
"usb_pid": "0x000A"
}
},
"core": "earlephilhower",
"cpu": "cortex-m0plus",
"extra_flags": "-DARDUINO_RASPBERRY_PI_PICO -DARDUINO_ARCH_RP2040 -DUSBD_MAX_POWER_MA=250 ",
"f_cpu": "133000000L",
"hwids": [
[
"0x2E8A",
"0x00C0"
],
[
"0x2E8A",
"0x000A"
]
],
"mcu": "rp2040",
"variant": "rpipico"
},
"debug": {
"jlink_device": "RP2040_M0_0",
"openocd_target": "rp2040.cfg",
"svd_path": "rp2040.svd"
},
"frameworks": [
"arduino"
],
"name": "Pico",
"upload": {
"maximum_ram_size": 262144,
"maximum_size": 2097152,
"require_upload_port": true,
"native_usb": true,
"use_1200bps_touch": true,
"wait_for_upload_port": false,
"protocol": "picotool",
"protocols": [
"blackmagic",
"cmsis-dap",
"jlink",
"raspberrypi-swd",
"picotool",
"picoprobe",
"pico-debug"
]
},
"url": "https://www.raspberrypi.org/products/raspberry-pi-pico/",
"vendor": "Raspberry Pi"
}

View file

@ -0,0 +1,56 @@
{
"build": {
"arduino": {
"earlephilhower": {
"boot2_source": "boot2_generic_03h_2_padded_checksum.S",
"usb_vid": "0x2E8A",
"usb_pid": "0x000B"
}
},
"core": "earlephilhower",
"cpu": "cortex-m33",
"extra_flags": "-D ARDUINO_RASPBERRY_PI_PICO_2 -DARDUINO_ARCH_RP2350 -DUSBD_MAX_POWER_MA=250",
"f_cpu": "133000000L",
"hwids": [
[
"0x2E8A",
"0x00C0"
],
[
"0x2E8A",
"0x000B"
]
],
"mcu": "rp2350",
"variant": "rpipico2"
},
"debug": {
"jlink_device": "RP2350_0",
"openocd_target": "rp2350.cfg",
"svd_path": "rp2350.svd"
},
"frameworks": [
"arduino"
],
"name": "Pico 2",
"upload": {
"maximum_ram_size": 524288,
"maximum_size": 2097152,
"require_upload_port": true,
"native_usb": true,
"use_1200bps_touch": true,
"wait_for_upload_port": false,
"protocol": "picotool",
"protocols": [
"blackmagic",
"cmsis-dap",
"jlink",
"raspberrypi-swd",
"picotool",
"picoprobe",
"pico-debug"
]
},
"url": "https://www.raspberrypi.org/products/raspberry-pi-pico/",
"vendor": "Raspberry Pi"
}

View file

@ -0,0 +1,56 @@
{
"build": {
"arduino": {
"earlephilhower": {
"boot2_source": "boot2_w25q080_2_padded_checksum.S",
"usb_vid": "0x2E8A",
"usb_pid": "0xF00A"
}
},
"core": "earlephilhower",
"cpu": "cortex-m0plus",
"extra_flags": "-DARDUINO_RASPBERRY_PI_PICO_W -DARDUINO_ARCH_RP2040 -DUSBD_MAX_POWER_MA=250 ",
"f_cpu": "133000000L",
"hwids": [
[
"0x2E8A",
"0x00C0"
],
[
"0x2E8A",
"0xF00A"
]
],
"mcu": "rp2040",
"variant": "rpipicow"
},
"debug": {
"jlink_device": "RP2040_M0_0",
"openocd_target": "rp2040.cfg",
"svd_path": "rp2040.svd"
},
"frameworks": [
"arduino"
],
"name": "Pico W",
"upload": {
"maximum_ram_size": 262144,
"maximum_size": 2097152,
"require_upload_port": true,
"native_usb": true,
"use_1200bps_touch": true,
"wait_for_upload_port": false,
"protocol": "picotool",
"protocols": [
"blackmagic",
"cmsis-dap",
"jlink",
"raspberrypi-swd",
"picotool",
"picoprobe",
"pico-debug"
]
},
"url": "https://www.raspberrypi.org/products/raspberry-pi-pico/",
"vendor": "Raspberry Pi"
}

View file

@ -0,0 +1,33 @@
{
"build": {
"core": "silabs",
"f_cpu": "39000000L",
"mcu": "cortex-m33",
"variant": "thingplusmatter"
},
"connectivity": [
"bluetooth",
"thread",
"wifi",
"zigbee"
],
"debug": {
"jlink_device": "EFR32MG24B020F1536IM40",
"onboard_tools": [
"jlink"
],
"svd_path": "EFR32MG24B020F1536IM40.svd"
},
"frameworks": [
"arduino"
],
"name": "Sparkfun Thing Plus Matter",
"upload": {
"flash_size": "1536kB",
"maximum_ram_size": 262144,
"maximum_size": 1572864,
"protocol": "jlink"
},
"url": "https://www.sparkfun.com/products/20270",
"vendor": "Sparkfun"
}

View file

@ -0,0 +1,108 @@
import argparse
import os
import re
import subprocess
import sys
from pathlib import Path
HERE = Path(__file__).resolve().parent
PROJECT_ROOT = HERE.parent
IS_GITHUB = "GITHUB_ACTIONS" in os.environ
def run_command(
cmd_list: list[str], shell: bool = False, check=False, capture_output: bool = False
) -> str | None:
check = check if check is not None else check
cmd = cmd_list if not shell else subprocess.list2cmdline(cmd_list)
result: subprocess.CompletedProcess = subprocess.run(
cmd, capture_output=capture_output, text=True, shell=shell, check=check
)
if not capture_output:
return None
stdout: str = result.stdout
stdout = stdout.strip()
return stdout
def parse_args():
parser = argparse.ArgumentParser(
description="Check compiled program size for a board"
)
parser.add_argument("board", help="Board name")
parser.add_argument(
"--max-size", type=int, required=False, help="Maximum allowed size"
)
parser.add_argument(
"--no-build",
action="store_true",
help="Skip compilation and check existing build",
)
parser.add_argument(
"--example",
default="Blink",
help="Example to compile (default: Blink)",
)
# Parse known args first
args, unknown = parser.parse_known_args()
# Add remaining arguments as extra_args
args.extra_args = unknown
return args
def main():
os.chdir(str(PROJECT_ROOT))
args = parse_args()
if not args.no_build:
cmd_list = [
"uv",
"run",
"ci/ci-compile.py",
args.board,
"--examples",
args.example,
] + args.extra_args
try:
run_command(cmd_list, shell=True, capture_output=IS_GITHUB, check=True)
except subprocess.CalledProcessError:
run_command(cmd_list, shell=True, capture_output=False, check=True)
output = run_command(
["uv", "run", "ci/compiled_size.py", "--board", args.board],
capture_output=True,
)
size_match = re.search(r": *(\d+)", output) # type: ignore
if not size_match:
print("Error: Unable to extract size from output")
print(f"Output: {output}")
sys.exit(1)
size = int(size_match.group(1))
if args.max_size is not None and args.max_size > 0:
max_size = args.max_size
if size > max_size:
print(f"{args.board} size {size} is greater than max size {max_size}")
print("::error::Compiled size exceeds maximum allowed size")
sys.exit(1)
else:
print(f"{args.board} size {size} is within the limit of {max_size}")
else:
if not args.max_size:
print("Warning: No max size specified")
elif args.max_size <= 0:
print("Warning: max size was <= 0 so no check was performed")
print(f"{args.board} size: {size}")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,5 @@
#!/bin/bash
# cd to the directory of the script
cd "$(dirname "$0")"
uv run ci-compile.py "$@"

View file

@ -0,0 +1,21 @@
#!/usr/bin/env python3
import os
import subprocess
import sys
def main():
# Change to the directory of the script
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Change to the 'native' directory and run 'pio run'
os.chdir("native")
result = subprocess.run(["pio", "run"], check=True)
# Exit with the same status as the pio run command
sys.exit(result.returncode)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,317 @@
"""
Runs the compilation process for all examples on all boards in parallel.
Build artifacts are recycled within a board group so that subsequent ino
files are built faster.
"""
import argparse
import sys
import time
import warnings
from pathlib import Path
from ci.boards import Board, get_board # type: ignore
from ci.concurrent_run import ConcurrentRunArgs, concurrent_run
from ci.locked_print import locked_print
HERE = Path(__file__).parent.resolve()
LIBS = ["src", "ci"]
EXTRA_LIBS = [
"https://github.com/me-no-dev/ESPAsyncWebServer.git",
"ArduinoOTA",
"SD",
"FS",
"ESPmDNS",
"WiFi",
"WebSockets",
]
BUILD_FLAGS = ["-Wl,-Map,firmware.map", "-fopt-info-all=optimization_report.txt"]
# Default boards to compile for. You can use boards not defined here but
# if the board isn't part of the officially supported platformio boards then
# you will need to add the board to the ~/.platformio/platforms directory.
# prior to running this script. This happens automatically as of 2024-08-20
# with the github workflow scripts.
DEFAULT_BOARDS_NAMES = [
"apollo3_red",
"apollo3_thing_explorable",
"web", # work in progress
"uno", # Build is faster if this is first, because it's used for global init.
"esp32dev",
"esp01", # ESP8266
"esp32c3",
"attiny85",
"ATtiny1616",
"esp32c6",
"esp32s3",
"yun",
"digix",
"teensy30",
"teensy41",
"adafruit_feather_nrf52840_sense",
"xiaoblesense_adafruit",
"rpipico",
"rpipico2",
"uno_r4_wifi",
"esp32rmt_51",
"esp32dev_idf44",
"bluepill",
"esp32rmt_51",
"giga_r1",
"sparkfun_xrp_controller",
]
OTHER_BOARDS_NAMES = [
"nano_every",
"esp32-c2-devkitm-1",
]
# Examples to compile.
DEFAULT_EXAMPLES = [
"Apa102",
"Apa102HD",
"Apa102HDOverride",
"Blink",
"Blur",
"Chromancer",
"ColorPalette",
"ColorTemperature",
"Cylon",
"DemoReel100",
"FirstLight",
"Fire2012",
"Multiple/MultipleStripsInOneArray",
"Multiple/ArrayOfLedArrays",
"Noise",
"NoisePlayground",
"NoisePlusPalette",
"LuminescentGrand",
"Pacifica",
"Pride2015",
"RGBCalibrate",
"RGBSetDemo",
"RGBW",
"Overclock",
"RGBWEmulated",
"TwinkleFox",
"XYMatrix",
"FxGfx2Video",
"FxSdCard",
"FxCylon",
"FxDemoReel100",
"FxTwinkleFox",
"FxFire2012",
"FxNoisePlusPalette",
"FxPacifica",
"FxEngine",
"WS2816",
]
EXTRA_EXAMPLES: dict[Board, list[str]] = {
# ESP32DEV: ["EspI2SDemo"],
# ESP32_S3_DEVKITC_1: ["EspS3I2SDemo"],
}
def parse_args():
parser = argparse.ArgumentParser(
description="Compile FastLED examples for various boards."
)
# parser.add_argument(
# "--boards", type=str, help="Comma-separated list of boards to compile for"
# )
# needs to be a positional argument instead
parser.add_argument(
"boards",
type=str,
help="Comma-separated list of boards to compile for",
nargs="?",
)
parser.add_argument(
"--examples", type=str, help="Comma-separated list of examples to compile"
)
parser.add_argument(
"--exclude-examples", type=str, help="Examples that should be excluded"
)
parser.add_argument(
"--skip-init", action="store_true", help="Skip the initialization step"
)
parser.add_argument(
"--defines", type=str, help="Comma-separated list of compiler definitions"
)
parser.add_argument(
"--extra-packages",
type=str,
help="Comma-separated list of extra packages to install",
)
parser.add_argument(
"--add-extra-esp32-libs",
action="store_true",
help="Add extra libraries to the libraries list to check against compiler errors.",
)
parser.add_argument(
"--build-dir", type=str, help="Override the default build directory"
)
parser.add_argument(
"--no-project-options",
action="store_true",
help="Don't use custom project options",
)
parser.add_argument(
"--interactive",
action="store_true",
help="Enable interactive mode to choose a board",
)
# Passed by the github action to disable interactive mode.
parser.add_argument(
"--no-interactive", action="store_true", help="Disable interactive mode"
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="Enable verbose output"
)
parser.add_argument(
"--supported-boards",
action="store_true",
help="Print the list of supported boards and exit",
)
args, unknown = parser.parse_known_args()
if unknown:
warnings.warn(f"Unknown arguments: {unknown}")
# if --interactive and --no-interative are both passed, --no-interactive takes precedence.
if args.interactive and args.no_interactive:
warnings.warn(
"Both --interactive and --no-interactive were passed, --no-interactive takes precedence."
)
args.interactive = False
return args
def remove_duplicates(items: list[str]) -> list[str]:
seen = set()
out = []
for item in items:
if item not in seen:
seen.add(item)
out.append(item)
return out
def choose_board_interactively(boards: list[str]) -> list[str]:
print("Available boards:")
boards = remove_duplicates(sorted(boards))
for i, board in enumerate(boards):
print(f"[{i}]: {board}")
print("[all]: All boards")
out: list[str] = []
while True:
try:
# choice = int(input("Enter the number of the board(s) you want to compile to: "))
input_str = input(
"Enter the number of the board(s) you want to compile to, or it's name(s): "
)
if "all" in input_str:
return boards
for board in input_str.split(","):
if board == "":
continue
if not board.isdigit():
out.append(board) # Assume it's a board name.
else:
index = int(board) # Find the board from the index.
if 0 <= index < len(boards):
out.append(boards[index])
else:
warnings.warn(f"invalid board index: {index}, skipping")
if not out:
print("Please try again.")
continue
return out
except ValueError:
print("Invalid input. Please enter a number.")
def resolve_example_path(example: str) -> Path:
example_path = HERE.parent / "examples" / example
if not example_path.exists():
raise FileNotFoundError(f"Example '{example}' not found at '{example_path}'")
return example_path
def create_concurrent_run_args(args: argparse.Namespace) -> ConcurrentRunArgs:
skip_init = args.skip_init
if args.interactive:
boards = choose_board_interactively(DEFAULT_BOARDS_NAMES + OTHER_BOARDS_NAMES)
else:
boards = args.boards.split(",") if args.boards else DEFAULT_BOARDS_NAMES
projects: list[Board] = []
for board in boards:
projects.append(get_board(board, no_project_options=args.no_project_options))
extra_examples: dict[Board, list[Path]] = {}
if args.examples is None:
for b, _examples in EXTRA_EXAMPLES.items():
resolved_examples = [resolve_example_path(example) for example in _examples]
extra_examples[b] = resolved_examples
examples = args.examples.split(",") if args.examples else DEFAULT_EXAMPLES
examples_paths = [resolve_example_path(example) for example in examples]
# now process example exclusions.
if args.exclude_examples:
exclude_examples = args.exclude_examples.split(",")
examples_paths = [
example
for example in examples_paths
if example.name not in exclude_examples
]
for exclude in exclude_examples:
examples.remove(exclude)
defines: list[str] = []
if args.defines:
defines.extend(args.defines.split(","))
extra_packages: list[str] = []
if args.extra_packages:
extra_packages.extend(args.extra_packages.split(","))
build_dir = args.build_dir
extra_scripts = "pre:lib/ci/ci-flags.py"
verbose = args.verbose
out: ConcurrentRunArgs = ConcurrentRunArgs(
projects=projects,
examples=examples_paths,
skip_init=skip_init,
defines=defines,
extra_packages=extra_packages,
libs=LIBS,
build_dir=build_dir,
extra_scripts=extra_scripts,
cwd=str(HERE.parent),
board_dir=(HERE / "boards").absolute().as_posix(),
build_flags=BUILD_FLAGS,
verbose=verbose,
extra_examples=extra_examples,
)
return out
def main() -> int:
"""Main function."""
args = parse_args()
if args.supported_boards:
print(",".join(DEFAULT_BOARDS_NAMES))
return 0
if args.add_extra_esp32_libs:
LIBS.extend(EXTRA_LIBS)
# Set the working directory to the script's parent directory.
run_args = create_concurrent_run_args(args)
start_time = time.time()
rtn = concurrent_run(args=run_args)
time_taken = time.strftime("%Mm:%Ss", time.gmtime(time.time() - start_time))
locked_print(f"Compilation finished in {time_taken}.")
return rtn
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
sys.exit(1)

View file

@ -0,0 +1,56 @@
import argparse
import os
import subprocess
import sys
from pathlib import Path
MINIMUM_REPORT_SEVERTIY = "medium"
MINIMUM_FAIL_SEVERTIY = "high"
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Run cppcheck on the project")
parser.add_argument("board", nargs="?", help="Board to check, optional")
return parser.parse_args()
def main() -> int:
args = parse_args()
here = Path(__file__).parent
project_root = here.parent
build = project_root / ".build"
os.chdir(str(build))
if args.board:
build = build / args.board
if not build.exists():
print(f"Board {args.board} not found")
return 1
os.chdir(str(build))
else:
# Change to the first subdirectory in .build
subdirs = [d for d in os.listdir() if os.path.isdir(d)]
assert (
len(subdirs) == 1
), f"Expected exactly one subdirectory in {build}, instead got {subdirs}"
if subdirs:
os.chdir(subdirs[0])
# Run pio check command
cp = subprocess.run(
[
"pio",
"check",
"--skip-packages",
"--src-filters=+<lib/src/>",
f"--severity={MINIMUM_REPORT_SEVERTIY}",
f"--fail-on-defect={MINIMUM_FAIL_SEVERTIY}",
"--flags",
"--inline-suppr",
],
)
return cp.returncode
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,7 @@
# mypy: ignore-errors
# flake8: noqa
# ruff: skip
Import("env") # type: ignore
env.Append(CXXFLAGS=["-Wno-register"]) # type: ignore

View file

View file

@ -0,0 +1,175 @@
import subprocess
from pathlib import Path
def _run_command(command: list, show_output=False):
"""
Run a command using subprocess and capture the output.
Args:
command (list): Command to run.
show_output (bool): Print command and its output if True.
Returns:
str: Standard output of the command.
Raises:
RuntimeError: If the command fails.
"""
if show_output:
print(f"Running command: {' '.join(command)}")
result = subprocess.run(command, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"Command failed: {' '.join(command)}\n{result.stderr}")
if show_output and result.stdout:
print(f"Command output: {result.stdout}")
return result.stdout
def _generate_linker_script(map_file: Path) -> Path:
"""
Generate a linker script based on map file information.
Args:
map_file (Path): Path to the map file.
Returns:
Path: Path to the generated linker script.
"""
linker_script_content = """
SECTIONS
{
.text 0x00000000 :
{
*(.text)
}
.data :
{
*(.data)
}
.bss :
{
*(.bss)
}
}
"""
linker_script_path = map_file.with_suffix(".ld")
linker_script_path.write_text(linker_script_content)
print(f"Generated linker script at: {linker_script_path}")
return linker_script_path
def _create_dummy_object_file(as_path: Path, dummy_obj_path: Path):
"""
Create a minimal dummy object file using the specified assembler.
Args:
as_path (Path): Path to the assembler executable.
dummy_obj_path (Path): Path to the dummy object file to be created.
"""
assembly_code = """
.section .text
.global _start
_start:
nop
"""
asm_file = dummy_obj_path.with_suffix(".s")
asm_file.write_text(assembly_code)
command = [str(as_path), "-o", str(dummy_obj_path), str(asm_file)]
print(f"Creating dummy object file: {dummy_obj_path}")
_run_command(command, show_output=True)
asm_file.unlink() # Clean up the temporary assembly file
def _create_dummy_elf(
ld_path: Path, linker_script: Path, dummy_obj: Path, output_elf: Path
):
"""
Create a dummy ELF file using the specified linker script and dummy object file.
Args:
ld_path (Path): Path to the ld executable.
linker_script (Path): Path to the linker script.
dummy_obj (Path): Path to the dummy object file.
output_elf (Path): Path to the output ELF file.
"""
command = [
str(ld_path),
str(dummy_obj),
"-T",
str(linker_script),
"-o",
str(output_elf),
]
print(f"Creating dummy ELF file: {output_elf}")
_run_command(command, show_output=True)
def _update_elf_sections(
objcopy_path: Path, bin_file: Path, elf_file: Path, section_name: str
):
"""
Update the ELF file sections with binary data.
Args:
objcopy_path (Path): Path to the objcopy executable.
bin_file (Path): Path to the binary file.
elf_file (Path): Path to the ELF file.
section_name (str): Name of the section to update.
"""
command = [
str(objcopy_path),
"--update-section",
f"{section_name}={bin_file}",
str(elf_file),
]
print(
f"Updating ELF file '{elf_file}' section '{section_name}' with binary file '{bin_file}'"
)
_run_command(command, show_output=True)
def bin_to_elf(
bin_file: Path,
map_file: Path,
as_path: Path,
ld_path: Path,
objcopy_path: Path,
output_elf: Path,
):
"""
Convert a binary file to ELF format.
Args:
bin_file (Path): Path to the input binary file.
map_file (Path): Path to the map file.
as_path (Path): Path to the assembler executable.
ld_path (Path): Path to the linker executable.
objcopy_path (Path): Path to the objcopy executable.
output_elf (Path): Path to the output ELF file.
Returns:
Path: Path to the generated ELF file.
"""
# Generate a linker script based on the map file
linker_script = _generate_linker_script(map_file)
# Create a minimal dummy object file
dummy_obj_path = bin_file.with_name("dummy.o")
_create_dummy_object_file(as_path, dummy_obj_path)
# Create a dummy ELF file using the generated linker script
_create_dummy_elf(ld_path, linker_script, dummy_obj_path, output_elf)
# Update the ELF sections with binary data
_update_elf_sections(objcopy_path, bin_file, output_elf, ".text")
# Clean up dummy object file
if dummy_obj_path.exists():
dummy_obj_path.unlink()
if linker_script.exists():
linker_script.unlink()
return output_elf

View file

@ -0,0 +1,318 @@
# dataclasses
import json
from dataclasses import dataclass
# An open source version of the esp-idf 5.1 platform for the ESP32 that
# gives esp32 boards the same build environment as the Arduino 2.3.1+.
# Set to a specific release, we may want to update this in the future.
ESP32_IDF_5_1_PIOARDUINO = "https://github.com/pioarduino/platform-espressif32/releases/download/51.03.04/platform-espressif32.zip"
# TODO: Upgrade toolkit to 5.3
ESP32_IDF_5_3_PIOARDUINO = "https://github.com/pioarduino/platform-espressif32/releases/download/53.03.10/platform-espressif32.zip"
ESP32_IDF_5_1_PIOARDUINO_LATEST = (
"https://github.com/pioarduino/platform-espressif32.git#develop"
)
ESP32_IDF_4_4_LATEST = "platformio/espressif32"
APOLLO3_2_2_0 = "https://github.com/nigelb/platform-apollo3blue"
# Top of trunk.
# ESP32_IDF_5_1_PIOARDUINO = "https://github.com/pioarduino/platform-espressif32"
# Old fork that we were using
# ESP32_IDF_5_1_PIOARDUINO = "https://github.com/zackees/platform-espressif32#Arduino/IDF5"
ALL: list["Board"] = []
@dataclass
class Board:
board_name: str
real_board_name: str | None = None
platform: str | None = None
platform_needs_install: bool = False
use_pio_run: bool = (
False # some platforms like esp32-c2-devkitm-1 will only work with pio run
)
platform_packages: str | None = None
framework: str | None = None
board_build_mcu: str | None = None
board_build_core: str | None = None
board_build_filesystem_size: str | None = None
build_flags: list[str] | None = None # Reserved for future use.
defines: list[str] | None = None
board_partitions: str | None = None # Reserved for future use.
def __post_init__(self) -> None:
ALL.append(self)
def get_real_board_name(self) -> str:
return self.real_board_name if self.real_board_name else self.board_name
def to_dictionary(self) -> dict[str, list[str]]:
out: dict[str, list[str]] = {}
if self.real_board_name:
out[self.board_name] = [f"board={self.real_board_name}"]
options = out.setdefault(self.board_name, [])
if self.platform:
options.append(f"platform={self.platform}")
if self.platform_needs_install:
options.append("platform_needs_install=true")
if self.platform_packages:
options.append(f"platform_packages={self.platform_packages}")
if self.framework:
options.append(f"framework={self.framework}")
if self.board_build_core:
options.append(f"board_build.core={self.board_build_core}")
if self.board_build_mcu:
options.append(f"board_build.mcu={self.board_build_mcu}")
if self.board_build_filesystem_size:
options.append(
f"board_build.filesystem_size={self.board_build_filesystem_size}"
)
if self.defines:
for define in self.defines:
options.append(f"build_flags=-D{define}")
return out
def __repr__(self) -> str:
json_str = json.dumps(self.to_dictionary(), indent=4, sort_keys=True)
return json_str
def __hash__(self) -> int:
data_str = self.__repr__()
return hash(data_str)
# [env:sparkfun_xrp_controller]
# platform = https://github.com/maxgerhardt/platform-raspberrypi
# board = sparkfun_xrp_controller
# framework = arduino
# lib_deps = fastled/FastLED @ ^3.9.16
WEBTARGET = Board(
board_name="web",
)
DUE = Board(
board_name="due",
platform="atmelsam",
)
SPARKFUN_XRP_CONTROLLER_2350B = Board(
board_name="sparkfun_xrp_controller",
platform="https://github.com/maxgerhardt/platform-raspberrypi",
platform_needs_install=True,
)
APOLLO3_RED_BOARD = Board(
board_name="apollo3_red",
real_board_name="SparkFun_RedBoard_Artemis_ATP",
platform=APOLLO3_2_2_0,
platform_packages="framework-arduinoapollo3@https://github.com/sparkfun/Arduino_Apollo3#v2.2.0",
platform_needs_install=True,
)
APOLLO3_SPARKFUN_THING_PLUS_EXPLORERABLE = Board(
board_name="apollo3_thing_explorable",
real_board_name="SparkFun_Thing_Plus_expLoRaBLE",
platform=APOLLO3_2_2_0,
platform_packages="framework-arduinoapollo3@https://github.com/sparkfun/Arduino_Apollo3#v2.2.0",
platform_needs_install=True,
)
ESP32DEV = Board(
board_name="esp32dev",
platform=ESP32_IDF_5_3_PIOARDUINO,
)
ESP32DEV_IDF3_3 = Board(
board_name="esp32dev_idf33",
real_board_name="esp32dev",
platform="espressif32@1.11.2",
)
ESP32DEV_IDF4_4 = Board(
board_name="esp32dev_idf44",
real_board_name="esp32dev",
platform=ESP32_IDF_4_4_LATEST,
)
GIGA_R1 = Board(
board_name="giga_r1",
platform="ststm32",
framework="arduino",
real_board_name="giga_r1_m7",
)
# ESP01 = Board(
# board_name="esp01",
# platform=ESP32_IDF_5_1_PIOARDUINO,
# )
ESP32_C2_DEVKITM_1 = Board(
board_name="esp32c2",
real_board_name="esp32-c2-devkitm-1",
use_pio_run=True,
platform="https://github.com/Jason2866/platform-espressif32.git#Arduino/IDF5",
defines=["CONFIG_IDF_TARGET_ESP32C2=1"],
)
ESP32_C3_DEVKITM_1 = Board(
board_name="esp32c3",
real_board_name="esp32-c3-devkitm-1",
platform=ESP32_IDF_5_3_PIOARDUINO,
)
ESP32_C6_DEVKITC_1 = Board(
board_name="esp32c6",
real_board_name="esp32-c6-devkitc-1",
platform=ESP32_IDF_5_3_PIOARDUINO,
)
ESP32_S3_DEVKITC_1 = Board(
board_name="esp32s3",
real_board_name="seeed_xiao_esp32s3", # Seeed Xiao ESP32-S3 has psram.
platform=ESP32_IDF_5_3_PIOARDUINO,
defines=[
"BOARD_HAS_PSRAM",
],
build_flags=[ # Reserved for future use.
"-mfix-esp32-psram-cache-issue",
"-mfix-esp32-psram-cache-strategy=memw",
],
board_partitions="huge_app.csv", # Reserved for future use.
)
ESP32_S2_DEVKITM_1 = Board(
board_name="esp32s2",
real_board_name="esp32dev",
board_build_mcu="esp32s2",
platform=ESP32_IDF_5_3_PIOARDUINO,
)
ESP32_H2_DEVKITM_1 = Board(
board_name="esp32-h2-devkitm-1",
platform_needs_install=True, # Install platform package to get the boards
platform=ESP32_IDF_5_3_PIOARDUINO,
)
ADA_FEATHER_NRF52840_SENSE = Board(
board_name="adafruit_feather_nrf52840_sense",
platform="nordicnrf52",
)
XIAOBLESENSE_ADAFRUIT_NRF52 = Board(
board_name="xiaoblesense_adafruit",
platform="https://github.com/maxgerhardt/platform-nordicnrf52",
platform_needs_install=True, # Install platform package to get the boards
)
XIAOBLESENSE_NRF52 = Board(
board_name="xiaoblesense",
platform="https://github.com/maxgerhardt/platform-nordicnrf52",
platform_needs_install=True,
)
NRF52840 = Board(
board_name="nrf52840_dk",
real_board_name="xiaoble_adafruit",
platform="https://github.com/maxgerhardt/platform-nordicnrf52",
platform_needs_install=True,
)
RPI_PICO = Board(
board_name="rpipico",
platform="https://github.com/maxgerhardt/platform-raspberrypi.git",
platform_needs_install=True, # Install platform package to get the boards
platform_packages="framework-arduinopico@https://github.com/earlephilhower/arduino-pico.git",
framework="arduino",
board_build_core="earlephilhower",
board_build_filesystem_size="0.5m",
)
RPI_PICO2 = Board(
board_name="rpipico2",
platform="https://github.com/maxgerhardt/platform-raspberrypi.git",
platform_needs_install=True, # Install platform package to get the boards
platform_packages="framework-arduinopico@https://github.com/earlephilhower/arduino-pico.git",
framework="arduino",
board_build_core="earlephilhower",
board_build_filesystem_size="0.5m",
)
BLUEPILL = Board(
board_name="bluepill",
real_board_name="bluepill_f103c8",
platform="ststm32",
)
# maple_mini_b20
MAPLE_MINI = Board(
board_name="maple_mini",
real_board_name="maple_mini_b20",
platform="ststm32",
)
ATTINY88 = Board(
board_name="attiny88",
platform="atmelavr",
)
# ATtiny1604
ATTINY1616 = Board(
board_name="ATtiny1616",
platform="atmelmegaavr",
)
UNO_R4_WIFI = Board(
board_name="uno_r4_wifi",
platform="renesas-ra",
)
NANO_EVERY = Board(
board_name="nano_every",
platform="atmelmegaavr",
)
ESP32DEV_I2S = Board(
board_name="esp32dev_i2s",
real_board_name="esp32dev",
platform=ESP32_IDF_4_4_LATEST,
)
ESP32S3_RMT51 = Board(
board_name="esp32rmt_51",
real_board_name="esp32-s3-devkitc-1",
platform_needs_install=True,
platform=ESP32_IDF_5_3_PIOARDUINO,
defines=[
"FASTLED_RMT5=1",
],
)
def _make_board_map(boards: list[Board]) -> dict[str, Board]:
# make board map, but assert on duplicate board names
board_map: dict[str, Board] = {}
for board in boards:
assert (
board.board_name not in board_map
), f"Duplicate board name: {board.board_name}"
board_map[board.board_name] = board
return board_map
_BOARD_MAP: dict[str, Board] = _make_board_map(ALL)
def get_board(board_name: str, no_project_options: bool = False) -> Board:
if no_project_options:
return Board(board_name=board_name)
if board_name not in _BOARD_MAP:
# empty board without any special overrides, assume platformio will know what to do with it.
return Board(board_name=board_name)
else:
return _BOARD_MAP[board_name]

View file

@ -0,0 +1,212 @@
import os
import shutil
import subprocess
from pathlib import Path
from threading import Lock
from ci.boards import Board # type: ignore
from ci.locked_print import locked_print
ERROR_HAPPENED = False
IS_GITHUB = "GITHUB_ACTIONS" in os.environ
FIRST_BUILD_LOCK = Lock()
USE_FIRST_BUILD_LOCK = IS_GITHUB
def errors_happened() -> bool:
"""Return whether any errors happened during the build."""
return ERROR_HAPPENED
def _fastled_js_is_parent_directory(p: Path) -> bool:
"""Check if fastled_js is a parent directory of the given path."""
# Check if fastled_js is a parent directory of p
return "fastled_js" in str(p.absolute())
def compile_for_board_and_example(
board: Board,
example: Path,
build_dir: str | None,
verbose_on_failure: bool,
libs: list[str] | None,
) -> tuple[bool, str]:
"""Compile the given example for the given board."""
global ERROR_HAPPENED # pylint: disable=global-statement
if board.board_name == "web":
locked_print(f"Skipping web target for example {example}")
return True, ""
board_name = board.board_name
use_pio_run = board.use_pio_run
real_board_name = board.get_real_board_name()
libs = libs or []
builddir = (
Path(build_dir) / board_name if build_dir else Path(".build") / board_name
)
builddir.mkdir(parents=True, exist_ok=True)
srcdir = builddir / "src"
# Remove the previous *.ino file if it exists, everything else is recycled
# to speed up the next build.
if srcdir.exists():
shutil.rmtree(srcdir, ignore_errors=False)
locked_print(f"*** Building example {example} for board {board_name} ***")
cwd: str | None = None
shell: bool = False
# Copy all files from the example directory to the "src" directory
for src_file in example.rglob("*"):
if src_file.is_file():
if _fastled_js_is_parent_directory(src_file):
# Skip the fastled_js folder, it's not needed for the build.
continue
src_dir = src_file.parent
path = src_dir.relative_to(example)
dst_dir = srcdir / path
os.makedirs(dst_dir, exist_ok=True)
locked_print(f"Copying {src_file} to {dst_dir / src_file.name}")
os.makedirs(srcdir, exist_ok=True)
shutil.copy(src_file, dst_dir / src_file.name)
# libs = ["src", "ci"]
if use_pio_run:
# we have to copy a few folders of pio ci in order to get this to work.
for lib in libs:
project_libdir = Path(lib)
assert project_libdir.exists()
build_lib = builddir / "lib" / lib
shutil.rmtree(build_lib, ignore_errors=True)
shutil.copytree(project_libdir, build_lib)
cwd = str(builddir)
cmd_list = [
"pio",
"run",
]
# in this case we need to manually copy the example to the src directory
# because platformio doesn't support building a single file.
# ino_file = example / f"{example.name}.ino"
else:
cmd_list = [
"pio",
"ci",
"--board",
real_board_name,
*[f"--lib={lib}" for lib in libs],
"--keep-build-dir",
f"--build-dir={builddir.as_posix()}",
]
cmd_list.append(f"{example.as_posix()}/*ino")
cmd_str = subprocess.list2cmdline(cmd_list)
msg_lsit = [
"\n\n******************************",
f"* Running command in cwd: {cwd if cwd else os.getcwd()}",
f"* {cmd_str}",
"******************************\n",
]
msg = "\n".join(msg_lsit)
locked_print(msg)
result = subprocess.run(
cmd_list,
cwd=cwd,
shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
check=False,
)
stdout = result.stdout
# replace all instances of "lib/src" => "src" so intellisense can find the files
# with one click.
stdout = stdout.replace("lib/src", "src").replace("lib\\src", "src")
locked_print(stdout)
if result.returncode != 0:
if not verbose_on_failure:
ERROR_HAPPENED = True
return False, stdout
if ERROR_HAPPENED:
return False, ""
ERROR_HAPPENED = True
locked_print(
f"*** Error compiling example {example} for board {board_name} ***"
)
# re-running command with verbose output to see what the defines are.
cmd_list.append("-v")
cmd_str = subprocess.list2cmdline(cmd_list)
msg_lsit = [
"\n\n******************************",
"* Re-running failed command but with verbose output:",
f"* {cmd_str}",
"******************************\n",
]
msg = "\n".join(msg_lsit)
locked_print(msg)
result = subprocess.run(
cmd_list,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
check=False,
)
stdout = result.stdout
# replace all instances of "lib/src" => "src" so intellisense can find the files
# with one click.
stdout = stdout.replace("lib/src", "src").replace("lib\\src", "src")
stdout = (
stdout
+ "\n\nThis is a second attempt, but with verbose output, look above for compiler errors.\n"
)
locked_print(stdout)
return False, stdout
locked_print(f"*** Finished building example {example} for board {board_name} ***")
return True, stdout
# Function to process task queues for each board
def compile_examples(
board: Board,
examples: list[Path],
build_dir: str | None,
verbose_on_failure: bool,
libs: list[str] | None,
) -> tuple[bool, str]:
"""Process the task queue for the given board."""
global ERROR_HAPPENED # pylint: disable=global-statement
board_name = board.board_name
is_first = True
for example in examples:
example = example.relative_to(Path(".").resolve())
if ERROR_HAPPENED:
return True, ""
locked_print(f"\n*** Building {example} for board {board_name} ***")
if is_first:
locked_print(
f"*** Building for first example {example} board {board_name} ***"
)
if is_first and USE_FIRST_BUILD_LOCK:
with FIRST_BUILD_LOCK:
# Github runners are memory limited and the first job is the most
# memory intensive since all the artifacts are being generated in parallel.
success, message = compile_for_board_and_example(
board=board,
example=example,
build_dir=build_dir,
verbose_on_failure=verbose_on_failure,
libs=libs,
)
else:
success, message = compile_for_board_and_example(
board=board,
example=example,
build_dir=build_dir,
verbose_on_failure=verbose_on_failure,
libs=libs,
)
is_first = False
if not success:
ERROR_HAPPENED = True
return (
False,
f"Error building {example} for board {board_name}. stdout:\n{message}",
)
return True, ""

View file

@ -0,0 +1,157 @@
import os
import time
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
from dataclasses import dataclass
from pathlib import Path
from ci.boards import Board # type: ignore
from ci.compile_for_board import compile_examples, errors_happened
from ci.cpu_count import cpu_count
from ci.create_build_dir import create_build_dir
from ci.locked_print import locked_print
# Board initialization doesn't take a lot of memory or cpu so it's safe to run in parallel
PARRALLEL_PROJECT_INITIALIZATION = (
os.environ.get("PARRALLEL_PROJECT_INITIALIZATION", "1") == "1"
)
def _banner_print(msg: str) -> None:
"""Print a banner message."""
# will produce
#######
# msg #
#######
lines = msg.splitlines()
for line in lines:
print("#" * (len(line) + 4))
print(f"# {line} #")
print("#" * (len(line) + 4))
@dataclass
class ConcurrentRunArgs:
projects: list[Board]
examples: list[Path]
skip_init: bool
defines: list[str]
extra_packages: list[str]
libs: list[str] | None
build_dir: str | None
extra_scripts: str | None
cwd: str | None
board_dir: str | None
build_flags: list[str] | None
verbose: bool = False
extra_examples: dict[Board, list[Path]] | None = None
def concurrent_run(
args: ConcurrentRunArgs,
) -> int:
projects = args.projects
examples = args.examples
skip_init = args.skip_init
defines = args.defines
extra_packages = args.extra_packages
build_dir = args.build_dir
extra_scripts = args.extra_scripts
cwd = args.cwd
start_time = time.time()
first_project = projects[0]
prev_cwd: str | None = None
board_dir = args.board_dir
libs = args.libs
extra_examples: dict[Board, list[Path]] = args.extra_examples or {}
if cwd:
prev_cwd = os.getcwd()
locked_print(f"Changing to directory {cwd}")
os.chdir(cwd)
start_time = time.time()
create_build_dir(
board=first_project,
defines=defines,
no_install_deps=skip_init,
extra_packages=extra_packages,
build_dir=build_dir,
board_dir=board_dir,
build_flags=args.build_flags,
extra_scripts=extra_scripts,
)
diff = time.time() - start_time
msg = f"Build directory created in {diff:.2f} seconds for board"
locked_print(msg)
verbose = args.verbose
# This is not memory/cpu bound but is instead network bound so we can run one thread
# per board to speed up the process.
parallel_init_workers = 1 if not PARRALLEL_PROJECT_INITIALIZATION else len(projects)
# Initialize the build directories for all boards
with ThreadPoolExecutor(max_workers=parallel_init_workers) as executor:
future_to_board: dict[Future, Board] = {}
for board in projects:
future = executor.submit(
create_build_dir,
board,
defines,
skip_init,
extra_packages,
build_dir,
board_dir,
args.build_flags,
extra_scripts,
)
future_to_board[future] = board
for future in as_completed(future_to_board):
board = future_to_board[future]
success, msg = future.result()
if not success:
locked_print(
f"Error initializing build_dir for board {board.board_name}:\n{msg}"
)
# cancel all other tasks
for f in future_to_board:
f.cancel()
return 1
else:
locked_print(
f"Finished initializing build_dir for board {board.board_name}"
)
init_end_time = time.time()
init_time = (init_end_time - start_time) / 60
locked_print(f"\nAll build directories initialized in {init_time:.2f} minutes.")
errors: list[str] = []
# Run the compilation process
num_cpus = max(1, min(cpu_count(), len(projects)))
with ThreadPoolExecutor(max_workers=num_cpus) as executor:
future_to_board = {
executor.submit(
compile_examples,
board,
examples + extra_examples.get(board, []),
build_dir,
verbose,
libs=libs,
): board
for board in projects
}
for future in as_completed(future_to_board):
board = future_to_board[future]
success, msg = future.result()
if not success:
msg = f"Compilation failed for board {board}: {msg}"
errors.append(msg)
locked_print(f"Compilation failed for board {board}: {msg}.\nStopping.")
for f in future_to_board:
f.cancel()
break
if prev_cwd:
locked_print(f"Changing back to directory {prev_cwd}")
os.chdir(prev_cwd)
if errors_happened():
locked_print("\nDone. Errors happened during compilation.")
locked_print("\n".join(errors))
return 1
return 0

View file

@ -0,0 +1,8 @@
import os
def cpu_count() -> int:
"""Get the number of CPUs."""
if "GITHUB_ACTIONS" in os.environ:
return 4
return os.cpu_count() or 1

View file

@ -0,0 +1,223 @@
import json
import os
import shutil
import subprocess
import warnings
from pathlib import Path
from ci.boards import Board # type: ignore
from ci.locked_print import locked_print
def _install_global_package(package: str) -> None:
# example pio pkg -g -p "https://github.com/maxgerhardt/platform-raspberrypi.git".
locked_print(f"*** Installing {package} ***")
cmd_list = [
"pio",
"pkg",
"install",
"-g",
"-p",
package,
]
cmd_str = subprocess.list2cmdline(cmd_list)
locked_print(f"Running command:\n\n{cmd_str}\n\n")
result = subprocess.run(
cmd_str,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
check=True,
)
locked_print(result.stdout)
locked_print(f"*** Finished installing {package} ***")
def insert_tool_aliases(meta_json: dict[str, dict]) -> None:
for board in meta_json.keys():
aliases: dict[str, str | None] = {}
cc_path = meta_json[board].get("cc_path")
cc_path = Path(cc_path) if cc_path else None
if cc_path:
# get the prefix of the base name of the compiler.
cc_base = cc_path.name
parent = cc_path.parent
prefix = cc_base.split("gcc")[0]
suffix = cc_path.suffix
# create the aliases
for tool in [
"gcc",
"g++",
"ar",
"objcopy",
"objdump",
"size",
"nm",
"ld",
"as",
"ranlib",
"strip",
"c++filt",
"readelf",
"addr2line",
]:
name = f"{prefix}{tool}" + suffix
tool_path = Path(str(parent / name))
if tool_path.exists():
aliases[tool] = str(tool_path)
else:
aliases[tool] = None
meta_json[board]["aliases"] = aliases
def remove_readonly(func, path, _):
"Clear the readonly bit and reattempt the removal"
if os.name == "nt":
os.system(f"attrib -r {path}")
else:
try:
os.chmod(path, 0o777)
except Exception:
print(f"Error removing readonly attribute from {path}")
func(path)
def create_build_dir(
board: Board,
defines: list[str],
no_install_deps: bool,
extra_packages: list[str],
build_dir: str | None,
board_dir: str | None,
build_flags: list[str] | None,
extra_scripts: str | None,
) -> tuple[bool, str]:
"""Create the build directory for the given board."""
# filter out "web" board because it's not a real board.
if board.board_name == "web":
locked_print(f"Skipping web target for board {board.board_name}")
return True, ""
if board.defines:
defines.extend(board.defines)
# remove duplicates
defines = list(set(defines))
board_name = board.board_name
real_board_name = board.get_real_board_name()
locked_print(f"*** Initializing environment for {board_name} ***")
# builddir = Path(build_dir) / board if build_dir else Path(".build") / board
build_dir = build_dir or ".build"
builddir = Path(build_dir) / board_name
builddir.mkdir(parents=True, exist_ok=True)
# if lib directory (where FastLED lives) exists, remove it. This is necessary to run on
# recycled build directories for fastled to update. This is a fast operation.
srcdir = builddir / "lib"
if srcdir.exists():
shutil.rmtree(srcdir, onerror=remove_readonly)
platformio_ini = builddir / "platformio.ini"
if platformio_ini.exists():
try:
platformio_ini.unlink()
except OSError as e:
locked_print(f"Error removing {platformio_ini}: {e}")
if board_dir:
dst_dir = builddir / "boards"
if dst_dir.exists():
shutil.rmtree(dst_dir)
shutil.copytree(str(board_dir), str(builddir / "boards"))
if board.platform_needs_install:
if board.platform:
try:
_install_global_package(board.platform)
except subprocess.CalledProcessError as e:
stdout = e.stdout
return False, stdout
else:
warnings.warn("Platform install was specified but no platform was given.")
cmd_list = [
"pio",
"project",
"init",
"--project-dir",
builddir.as_posix(),
"--board",
real_board_name,
]
if board.platform:
cmd_list.append(f"--project-option=platform={board.platform}")
if board.platform_packages:
cmd_list.append(f"--project-option=platform_packages={board.platform_packages}")
if board.framework:
cmd_list.append(f"--project-option=framework={board.framework}")
if board.board_build_core:
cmd_list.append(f"--project-option=board_build.core={board.board_build_core}")
if board.board_build_filesystem_size:
cmd_list.append(
f"--project-option=board_build.filesystem_size={board.board_build_filesystem_size}"
)
if build_flags is not None:
for build_flag in build_flags:
cmd_list.append(f"--project-option=build_flags={build_flag}")
if defines:
build_flags_str = " ".join(f"-D{define}" for define in defines)
cmd_list.append(f"--project-option=build_flags={build_flags_str}")
if extra_packages:
cmd_list.append(f'--project-option=lib_deps={",".join(extra_packages)}')
if no_install_deps:
cmd_list.append("--no-install-dependencies")
if extra_scripts:
p = Path(extra_scripts)
cmd_list.append(f"--project-option=extra_scripts={p.resolve()}")
cmd_str = subprocess.list2cmdline(cmd_list)
locked_print(f"\n\nRunning command:\n {cmd_str}\n")
result = subprocess.run(
cmd_str,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
check=False,
)
stdout = result.stdout
locked_print(result.stdout)
if result.returncode != 0:
locked_print(f"*** Error setting up board {board_name} ***")
return False, stdout
locked_print(f"*** Finished initializing environment for board {board_name} ***")
# dumping enviorment variables to help debug.
# this is the command: pio run --target envdump
cwd = str(builddir.resolve())
cmd_list = [
"pio",
"project",
"metadata",
"--json-output",
]
cmd_str = subprocess.list2cmdline(cmd_list)
stdout = subprocess.run(
cmd_list,
cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
check=False,
).stdout
try:
data = json.loads(stdout)
# now dump the values to the file at the root of the build directory.
matadata_json = builddir / "build_info.json"
try:
insert_tool_aliases(data)
formatted = json.dumps(data, indent=4, sort_keys=True)
with open(matadata_json, "w") as f:
f.write(formatted)
except Exception:
with open(matadata_json, "w") as f:
f.write(stdout)
except json.JSONDecodeError:
msg = f"build_info.json will not be generated because of error because stdout does not look like a json file:\n#### STDOUT ####\n{stdout}\n#### END STDOUT ####\n"
locked_print(msg)
return True, stdout

View file

@ -0,0 +1,205 @@
import subprocess
from dataclasses import dataclass
from pathlib import Path
def run_command(command: list, show_output=False):
"""
Run a command using subprocess and capture the output.
Args:
command (list): Command to run.
show_output (bool): Print command and its output if True.
Returns:
str: Standard output of the command.
Raises:
RuntimeError: If the command fails.
"""
if show_output:
print(f"Running command: {' '.join(command)}")
result = subprocess.run(command, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"Command failed: {' '.join(command)}\n{result.stderr}")
if show_output and result.stdout:
print(f"Command output: {result.stdout}")
return result.stdout
def analyze_elf_file(objdump_path: Path, cppfilt_path: Path, elf_file: Path):
"""
Analyze the ELF file using objdump to display its contents.
Args:
objdump_path (Path): Path to the objdump executable.
cppfilt_path (Path): Path to the c++filt executable.
elf_file (Path): Path to the ELF file.
"""
command = [str(objdump_path), "-h", str(elf_file)] # "-h" option shows headers.
print(f"Analyzing ELF file: {elf_file}")
output = run_command(command, show_output=True)
print("\nELF File Analysis:")
print(output)
list_symbols_and_sizes(objdump_path, cppfilt_path, elf_file)
def cpp_filt(cppfilt_path: Path, input_text: str) -> str:
"""
Demangle C++ symbols using c++filt.
Args:
cppfilt_path (Path): Path to c++filt executable.
input_text (str): Text to demangle.
Returns:
str: Demangled text.
"""
command = [str(cppfilt_path), "-t", "-n"]
print(f"Running c++filt on input text with {cppfilt_path}")
process = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
stdout, stderr = process.communicate(input=input_text)
if process.returncode != 0:
raise RuntimeError(f"Error running c++filt: {stderr}")
return stdout
def dump_symbol_sizes(nm_path: Path, cpp_filt_path: Path, elf_file: Path) -> str:
nm_command = [
str(nm_path),
"-S",
"--size-sort",
str(elf_file),
]
print(f"Listing symbols and sizes in ELF file: {elf_file}")
print("Running command: ", " ".join(nm_command))
nm_result = subprocess.run(
nm_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
if nm_result.returncode != 0:
raise RuntimeError(f"Error running nm command: {nm_result.stderr}")
cpp_filt_command = [str(cpp_filt_path), "--no-strip-underscore"]
print("Running c++filt command: ", " ".join(cpp_filt_command))
cpp_filt_result = subprocess.run(
cpp_filt_command,
input=nm_result.stdout,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
if cpp_filt_result.returncode != 0:
raise RuntimeError(f"Error running c++filt command: {cpp_filt_result.stderr}")
# now reverse sort the lines
lines = cpp_filt_result.stdout.splitlines()
@dataclass
class Entry:
address: str
size: int
everything_else: str
def parse_line(line: str) -> Entry:
address, size, *rest = line.split()
return Entry(address, int(size, 16), " ".join(rest))
data: list[Entry] = [parse_line(line) for line in lines]
data.sort(key=lambda x: x.size, reverse=True)
lines = [f"{d.size:6d} {d.everything_else}" for d in data]
return "\n".join(lines)
def demangle_symbol(cppfilt_path: Path, symbol: str) -> str:
"""
Demangle a C++ symbol using c++filt.
Args:
cppfilt_path (Path): Path to the c++filt executable.
symbol (str): The symbol to demangle.
Returns:
str: The demangled symbol.
"""
command = [str(cppfilt_path), symbol]
return run_command(command, show_output=False).strip()
def list_symbols_and_sizes(objdump_path: Path, cppfilt_path: Path, elf_file: Path):
"""
List all symbols and their sizes from the ELF file using objdump.
Args:
objdump_path (Path): Path to the objdump executable.
cppfilt_path (Path): Path to the c++filt executable.
elf_file (Path): Path to the ELF file.
"""
command = [
str(objdump_path),
"-t",
str(elf_file),
] # "-t" option lists symbols with sizes.
print(f"Listing symbols and sizes in ELF file: {elf_file}")
output = run_command(command, show_output=False)
symbols = []
for line in output.splitlines():
parts = line.split()
# Expected parts length can vary, check if size and section index (parts[2] & parts[4]) are valid
if len(parts) > 5 and parts[2].isdigit() and parts[4].startswith("."):
symbol = {
"name": parts[-1],
"size": int(parts[2], 16), # size is in hex format
"section": parts[4],
"type": parts[3],
}
symbols.append(symbol)
if symbols:
print("\nSymbols and Sizes in ELF File:")
for symbol in symbols:
demangled_name = demangle_symbol(cppfilt_path, symbol["name"])
print(
f"Symbol: {demangled_name}, Size: {symbol['size']} bytes, Type: {symbol['type']}, Section: {symbol['section']}"
)
else:
print("No symbols found or unable to parse symbols correctly.")
def check_elf_format(objdump_path: Path, elf_file: Path):
"""
Check the format of the ELF file using objdump to confirm it's being read correctly.
Args:
objdump_path (Path): Path to the objdump executable.
elf_file (Path): Path to the ELF file.
"""
command = [str(objdump_path), "-f", str(elf_file)]
print(f"Checking ELF file format: {elf_file}")
output = run_command(command, show_output=True)
print("\nELF File Format Information:")
print(output)
def check_section_contents(objdump_path: Path, elf_file: Path):
"""
Dump the contents of all sections in the ELF file using objdump.
Args:
objdump_path (Path): Path to the objdump executable.
elf_file (Path): Path to the ELF file.
"""
command = [str(objdump_path), "-s", str(elf_file)]
print(f"Dumping all sections of ELF file: {elf_file}")
output = run_command(command, show_output=True)
print("\nELF File Sections Content:")
print(output)

View file

@ -0,0 +1,11 @@
from threading import Lock
PRINT_LOCK = Lock()
def locked_print(string: str):
"""Print with a lock to prevent garbled output for multiple threads."""
with PRINT_LOCK:
# print only prints so much, break up the string into lines
for line in string.splitlines():
print(line)

View file

@ -0,0 +1,19 @@
import os
from pathlib import Path
def map_dump(map_file: Path) -> None:
# os.system("uv run fpvgcc ci/tests/uno/firmware.map --lmap root")
cmds = [
f"uv run fpvgcc {map_file} --sar",
f"uv run fpvgcc {map_file} --lmap root",
f"uv run fpvgcc {map_file} --uf",
f"uv run fpvgcc {map_file} --uregions",
# --usections
f"uv run fpvgcc {map_file} --usections",
f"uv run fpvgcc {map_file} --la",
]
for cmd in cmds:
print("\nRunning command: ", cmd)
os.system(cmd)

View file

@ -0,0 +1,5 @@
from pathlib import Path
_HERE = Path(__file__).resolve().parent
PROJECT_ROOT = _HERE.parent.parent
BUILD = PROJECT_ROOT / ".build"

View file

@ -0,0 +1,139 @@
import subprocess
import threading
from pathlib import Path
class RunningProcess:
"""
A class to manage and stream output from a running subprocess.
This class provides functionality to execute shell commands, stream their output
in real-time, and control the subprocess execution.
"""
def __init__(
self,
command: str | list[str],
cwd: Path | None = None,
check: bool = False,
auto_run: bool = True,
echo: bool = True,
):
"""
Initialize the RunningProcess instance. Note that stderr is merged into stdout!!
Args:
command (str): The command to execute.
cwd (Path | None): The working directory to execute the command in.
check (bool): If True, raise an exception if the command returns a non-zero exit code.
auto_run (bool): If True, automatically run the command when the instance is created.
echo (bool): If True, print the output of the command to the console in real-time.
"""
if isinstance(command, list):
command = subprocess.list2cmdline(command)
self.command = command
self.cwd = str(cwd) if cwd is not None else None
self.buffer: list[str] = []
self.proc: subprocess.Popen | None = None
self.check = check
self.auto_run = auto_run
self.echo = echo
self.reader_thread: threading.Thread | None = None
self.shutdown: threading.Event = threading.Event()
if auto_run:
self.run()
def run(self) -> None:
"""
Execute the command and stream its output in real-time.
Returns:
str: The full output of the command.
Raises:
subprocess.CalledProcessError: If the command returns a non-zero exit code.
"""
self.proc = subprocess.Popen(
self.command,
shell=True,
cwd=self.cwd,
bufsize=256,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, # Merge stderr into stdout
text=True, # Automatically decode bytes to str
)
def output_reader():
try:
assert self.proc is not None
assert self.proc.stdout is not None
for line in iter(self.proc.stdout.readline, ""):
if self.shutdown.is_set():
break
line = line.rstrip()
if self.echo:
print(line) # Print to console in real time
self.buffer.append(line)
finally:
if self.proc and self.proc.stdout:
self.proc.stdout.close()
# Start output reader thread
self.reader_thread = threading.Thread(target=output_reader, daemon=True)
self.reader_thread.start()
def wait(self) -> int:
"""
Wait for the process to complete.
Raises:
ValueError: If the process hasn't been started.
"""
if self.proc is None:
raise ValueError("Process is not running.")
rtn = self.proc.wait()
assert self.reader_thread is not None
self.reader_thread.join(timeout=1)
return rtn
def kill(self) -> None:
"""
Immediately terminate the process with SIGKILL.
Raises:
ValueError: If the process hasn't been started.
"""
if self.proc is None:
return
self.shutdown.set()
self.proc.kill()
def terminate(self) -> None:
"""
Gracefully terminate the process with SIGTERM.
Raises:
ValueError: If the process hasn't been started.
"""
if self.proc is None:
raise ValueError("Process is not running.")
self.shutdown.set()
self.proc.terminate()
@property
def returncode(self) -> int | None:
if self.proc is None:
return None
return self.proc.returncode
@property
def stdout(self) -> str:
"""
Get the complete stdout output of the process.
Returns:
str: The complete stdout output as a string, or None if process hasn't completed.
"""
self.wait()
return "\n".join(self.buffer)

View file

@ -0,0 +1,205 @@
import json
import os
import sys
from dataclasses import dataclass
from pathlib import Path
from ci.paths import BUILD
@dataclass
class Tools:
as_path: Path
ld_path: Path
objcopy_path: Path
objdump_path: Path
cpp_filt_path: Path
nm_path: Path
def load_tools(build_info_path: Path) -> Tools:
build_info = json.loads(build_info_path.read_text())
board_info = build_info[next(iter(build_info))]
aliases = board_info["aliases"]
as_path = Path(aliases["as"])
ld_path = Path(aliases["ld"])
objcopy_path = Path(aliases["objcopy"])
objdump_path = Path(aliases["objdump"])
cpp_filt_path = Path(aliases["c++filt"])
nm_path = Path(aliases["nm"])
if sys.platform == "win32":
as_path = as_path.with_suffix(".exe")
ld_path = ld_path.with_suffix(".exe")
objcopy_path = objcopy_path.with_suffix(".exe")
objdump_path = objdump_path.with_suffix(".exe")
cpp_filt_path = cpp_filt_path.with_suffix(".exe")
nm_path = nm_path.with_suffix(".exe")
out = Tools(as_path, ld_path, objcopy_path, objdump_path, cpp_filt_path, nm_path)
tools = [as_path, ld_path, objcopy_path, objdump_path, cpp_filt_path, nm_path]
for tool in tools:
if not tool.exists():
raise FileNotFoundError(f"Tool not found: {tool}")
return out
def _list_builds() -> list[Path]:
str_paths = os.listdir(BUILD)
paths = [BUILD / p for p in str_paths]
dirs = [p for p in paths if p.is_dir()]
return dirs
def _check_build(build: Path) -> bool:
# 1. should contain a build_info.json file
# 2. should contain a .pio/build directory
has_build_info = (build / "build_info.json").exists()
has_pio_build = (build / ".pio" / "build").exists()
return has_build_info and has_pio_build
def _prompt_build() -> Path:
builds = _list_builds()
if not builds:
print("Error: No builds found", file=sys.stderr)
sys.exit(1)
print("Select a build:")
for i, build in enumerate(builds):
print(f" [{i}]: {build}")
while True:
try:
which = int(input("Enter the number of the build to use: "))
if 0 <= which < len(builds):
valid = _check_build(BUILD / builds[which])
if valid:
return BUILD / builds[which]
print("Error: Invalid build", file=sys.stderr)
else:
print("Error: Invalid selection", file=sys.stderr)
continue
except ValueError:
print("Error: Invalid input", file=sys.stderr)
continue
def _prompt_object_file(build: Path) -> Path:
# Look for object files in .pio/build directory
build_dir = build / ".pio" / "build"
object_files = []
# Walk through build directory to find .o files
for root, _, files in os.walk(build_dir):
for file in files:
if file.endswith(".o") and "FrameworkArduino" not in file:
full_path = Path(root) / file
if "FrameworkArduino" not in full_path.parts:
object_files.append(full_path)
if not object_files:
print("Error: No object files found", file=sys.stderr)
sys.exit(1)
print("\nSelect an object file:")
for i, obj_file in enumerate(object_files):
print(f" [{i}]: {obj_file.relative_to(build_dir)}")
while True:
try:
which = int(input("Enter the number of the object file to use: "))
if 0 <= which < len(object_files):
return object_files[which]
print("Error: Invalid selection", file=sys.stderr)
except ValueError:
print("Error: Invalid input", file=sys.stderr)
continue
def cli() -> None:
import argparse
parser = argparse.ArgumentParser(
description="Dump object file information using build tools"
)
parser.add_argument(
"build_path",
type=Path,
nargs="?",
help="Path to build directory containing build info JSON file",
)
parser.add_argument(
"--symbols", action="store_true", help="Dump symbol table using nm"
)
parser.add_argument(
"--disassemble", action="store_true", help="Dump disassembly using objdump"
)
args = parser.parse_args()
build_path = args.build_path
symbols = args.symbols
disassemble = args.disassemble
# Check if object file was provided and exists
if build_path is None:
build_path = _prompt_build()
else:
if not _check_build(build_path):
print("Error: Invalid build directory", file=sys.stderr)
sys.exit(1)
assert build_path is not None
assert build_path
build_info_path = build_path / "build_info.json"
assert build_info_path.exists(), f"File not found: {build_info_path}"
tools = load_tools(build_info_path)
if not symbols and not disassemble:
while True:
print(
"Error: Please specify at least one action to perform", file=sys.stderr
)
action = input(
"Enter 's' to dump symbols, 'd' to disassemble, or 'q' to quit: "
)
if action == "s":
symbols = True
break
elif action == "d":
disassemble = True
break
elif action == "q":
sys.exit(0)
else:
print("Error: Invalid action", file=sys.stderr)
object_file = _prompt_object_file(build_path)
if symbols:
import subprocess
cmd_str = subprocess.list2cmdline(
[str(tools.objdump_path), str(object_file), "--syms"]
)
print(f"Running command: {cmd_str}")
subprocess.run([str(tools.objdump_path), str(object_file)])
if disassemble:
import subprocess
cmd_str = subprocess.list2cmdline(
[str(tools.objdump_path), "-d", str(object_file)]
)
print(f"Running command: {cmd_str}")
subprocess.run([str(tools.objdump_path), "-d", str(object_file)])
if not (symbols or disassemble):
parser.print_help()
if __name__ == "__main__":
try:
cli()
except KeyboardInterrupt:
print("Exiting...")
sys.exit(1)

View file

@ -0,0 +1,58 @@
import argparse
import json
from pathlib import Path
def _get_board_info(path: Path) -> dict:
build_info = json.loads(path.read_text())
assert build_info.keys(), f"No boards found in {build_info}"
assert (
len(build_info.keys()) == 1
), f"Multiple boards found in {build_info}, so correct board should be specified"
return build_info[next(iter(build_info))]
def check_firmware_size(board: str) -> int:
root_build_dir = Path(".build") / board
build_info_json = root_build_dir / "build_info.json"
board_info = _get_board_info(build_info_json)
assert board_info, f"Board {board} not found in {build_info_json}"
prog_path = Path(board_info["prog_path"])
base_path = prog_path.parent
suffixes = [".bin", ".hex", ".uf2"]
firmware: Path
for suffix in suffixes:
firmware = base_path / f"firmware{suffix}"
if firmware.exists():
break
else:
msg = (
", ".join([f"firmware{suffix}" for suffix in suffixes])
+ f" not found in {base_path}"
)
raise FileNotFoundError(msg)
return firmware.stat().st_size
def main(board: str):
try:
size = check_firmware_size(board)
print(f"Firmware size for {board}: {size} bytes")
except FileNotFoundError as e:
print(f"Error: {e}")
except json.JSONDecodeError:
print(f"Error: Unable to parse build_info.json for {board}")
except Exception as e:
print(f"An unexpected error occurred: {e}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Check FastLED firmware size for the specified board."
)
parser.add_argument(
"--board", type=str, required=True, help="Board to check firmware size for"
)
args = parser.parse_args()
main(args.board)

View file

@ -0,0 +1,233 @@
import argparse
import csv
import json
import os
import shutil
import subprocess
from pathlib import Path
import dateutil.parser # type: ignore
HERE = Path(__file__).resolve().parent
def run_command(command):
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
)
output, error = process.communicate()
return output.decode("utf-8"), error.decode("utf-8")
def step_back_commits(steps):
step_back_command = f"git reset --hard HEAD~{steps}"
output, error = run_command(step_back_command)
if error:
print(f"Error stepping back {steps} commit(s): {error}")
return False
return True
def check_firmware_size(board: str) -> int:
root_build_dir = Path(".build") / board
build_info_json = root_build_dir / "build_info.json"
build_info = json.loads(build_info_json.read_text())
board_info = build_info.get(board)
assert board_info, f"Board {board} not found in {build_info_json}"
prog_path = Path(board_info["prog_path"])
base_path = prog_path.parent
suffixes = [".bin", ".hex", ".uf2"]
firmware: Path
for suffix in suffixes:
firmware = base_path / f"firmware{suffix}"
if firmware.exists():
break
else:
msg = (
", ".join([f"firmware{suffix}" for suffix in suffixes])
+ f" not found in {base_path}"
)
raise FileNotFoundError(msg)
size_command = f"du -b {firmware}"
output, error = run_command(size_command)
if error:
print(f"Error checking firmware size: {error}")
return -1
size_in_bytes = output.strip().split()[0]
return int(size_in_bytes)
def get_commit_hash():
hash_command = "git rev-parse HEAD"
output, error = run_command(hash_command)
if error:
print(f"Error getting commit hash: {error}")
return None
return output.strip()
def get_commit_date(commit_hash):
date_command = f"git show -s --format=%ci {commit_hash}"
output, error = run_command(date_command)
if error:
print(f"Error getting commit date: {error}")
return None
return dateutil.parser.parse(output.strip()).isoformat()
def main(
board: str,
num_commits: int,
skip_step: int,
start_commit: str | None = None,
end_commit: str | None = None,
):
# change to the script dir
os.chdir(str(HERE))
# Create tmp directory if it doesn't exist
if os.path.exists("tmp"):
shutil.rmtree("tmp")
os.makedirs("tmp", exist_ok=True)
# Change to the tmp directory
os.chdir("tmp")
# 1. Git clone FastLED repository
print("Cloning FastLED repository...")
clone_command = "git clone https://github.com/FastLED/FastLED.git"
output, error = run_command(clone_command)
# if error:
# print(f"Error cloning repository: {error}")
# os.chdir("..")
# return
# Change to the FastLED directory
os.chdir("FastLED")
# Checkout the latest commit
run_command("git checkout master")
# If end_commit is specified, checkout that commit
# if end_commit:
# print(f"Checking out end commit: {end_commit}")
# checkout_command = f"git checkout {end_commit}"
# output, error = run_command(checkout_command)
# #if error:
# # print(f"Error checking out end commit: {error}")
# # return
# Prepare CSV file
csv_filename = "../../firmware_sizes.csv"
with open(csv_filename, "w", newline="") as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow(["datetime", "commit_hash", "binary_size"])
commits_checked = 0
first_iteration = True
while True:
current_commit = get_commit_hash()
if first_iteration and start_commit:
first_iteration = False
while True:
if current_commit == start_commit:
break
if not step_back_commits(1):
break
current_commit = get_commit_hash()
if num_commits and commits_checked >= num_commits:
print(f"Checked {num_commits} commits")
break
if end_commit and current_commit == end_commit:
print(f"Checked until end commit: {end_commit}")
break
# 2. Run ci-compile.py for current commit
print(f"\nChecking commit {commits_checked + 1}")
# remove .build/esp32dev/pio/build/esp32dev/ directory
board_files = Path(".build") / board / ".pio" / "build" / board
if board_files.exists():
shutil.rmtree(str(board_files), ignore_errors=True)
compile_command = f"python3 ci/ci-compile.py {board} --examples Blink"
output, error = run_command(compile_command)
if error:
print(f"Error running ci-compile.py: {error}")
if not step_back_commits(skip_step):
break
continue
# 3. Check firmware size and get commit hash
print("Checking firmware size...")
try:
size = check_firmware_size(board)
except FileNotFoundError as e:
print(f"Error checking firmware size: {e}")
if not step_back_commits(skip_step):
break
continue
except AssertionError as e:
print(f"Error: {e}")
if not step_back_commits(skip_step):
break
continue
commit_hash = get_commit_hash()
if size and commit_hash:
commit_date = get_commit_date(commit_hash)
print(f"Firmware size: {size} bytes")
# Write to CSV incrementally
with open(csv_filename, "a", newline="") as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow([commit_date, commit_hash, size])
print(f"Result appended to {csv_filename}")
commits_checked += 1
# 4. Step back one commit
print("Stepping back 1 commit...")
if not step_back_commits(1):
break
# Don't remove the tmp directory
print("\nTemporary directory 'tmp' has been left intact for inspection.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Check FastLED firmware size for multiple commits."
)
parser.add_argument(
"--num-commits", type=int, default=1, help="Number of commits to check"
)
parser.add_argument(
"--skip-step",
type=int,
default=1,
help="Number of commits to skip between checks",
)
parser.add_argument("--start-commit", type=str, help="Starting commit hash")
parser.add_argument("--end-commit", type=str, help="Ending commit hash")
parser.add_argument(
"--board", type=str, required=True, help="Board to check firmware size for"
)
args = parser.parse_args()
if args.start_commit or args.end_commit:
if not (args.start_commit and not args.end_commit):
print("Both start commit and end commit must be specified.")
exit(1)
# if start_commit is specified, end_commit must be specified
num_commits = args.num_commits
if args.start_commit and args.end_commit:
if args.start_commit == args.end_commit:
print("Start commit and end commit are the same.")
exit(1)
num_commits = 999999
main(args.board, num_commits, args.skip_step, args.start_commit, args.end_commit)

View file

@ -0,0 +1,293 @@
import argparse
import json
import os
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Tuple
from ci.paths import PROJECT_ROOT
from ci.running_process import RunningProcess
BUILD_DIR = PROJECT_ROOT / "tests" / ".build"
BUILD_DIR.mkdir(parents=True, exist_ok=True)
def clean_build_directory():
print("Cleaning build directory...")
shutil.rmtree(BUILD_DIR, ignore_errors=True)
BUILD_DIR.mkdir(parents=True, exist_ok=True)
print("Build directory cleaned.")
HERE = Path(__file__).resolve().parent
WASM_BUILD = False
USE_ZIG = False
USE_CLANG = False
def _has_system_clang_compiler() -> bool:
CLANG = shutil.which("clang")
CLANGPP = shutil.which("clang++")
LLVM_AR = shutil.which("llvm-ar")
return CLANG is not None and CLANGPP is not None and LLVM_AR is not None
def use_clang_compiler() -> Tuple[Path, Path, Path]:
assert _has_system_clang_compiler(), "Clang system compiler not found"
CLANG = shutil.which("clang")
CLANGPP = shutil.which("clang++")
LLVM_AR = shutil.which("llvm-ar")
assert CLANG is not None, "clang compiler not found"
assert CLANGPP is not None, "clang++ compiler not found"
assert LLVM_AR is not None, "llvm-ar not found"
# Set environment variables for C and C++ compilers
os.environ["CC"] = CLANG
os.environ["CXX"] = CLANGPP
os.environ["AR"] = LLVM_AR
os.environ["CXXFLAGS"] = os.environ.get("CXXFLAGS", "") + " -ferror-limit=1"
os.environ["CFLAGS"] = os.environ.get("CFLAGS", "") + " -ferror-limit=1"
if WASM_BUILD:
wasm_flags = [
"--target=wasm32",
"-O3",
"-flto",
# "-nostdlib",
# "-Wl,--no-entry",
# "-Wl,--export-all",
# "-Wl,--lto-O3",
# "-Wl,-z,stack-size=8388608", # 8 * 1024 * 1024 (8MiB)
]
os.environ["CFLAGS"] = " ".join(wasm_flags)
os.environ["CXXFLAGS"] = " ".join(wasm_flags)
print(f"CC: {CLANG}")
print(f"CXX: {CLANGPP}")
print(f"AR: {LLVM_AR}")
return Path(CLANG), Path(CLANGPP), Path(LLVM_AR)
def use_zig_compiler() -> Tuple[Path, Path, Path]:
assert 0 == os.system(
"uv run python -m ziglang version"
), "Zig-clang compiler not found"
uv_path_str: str | None = shutil.which("uv")
assert uv_path_str is not None, "uv not found in PATH"
uv_path = Path(uv_path_str).resolve()
zig_command = f'"{uv_path}" run python -m ziglang'
# We are going to build up shell scripts that look like cc, c++, and ar. It will contain the actual build command.
CC_PATH = BUILD_DIR / "cc"
CXX_PATH = BUILD_DIR / "c++"
AR_PATH = BUILD_DIR / "ar"
if sys.platform == "win32":
CC_PATH = CC_PATH.with_suffix(".cmd")
CXX_PATH = CXX_PATH.with_suffix(".cmd")
AR_PATH = AR_PATH.with_suffix(".cmd")
CC_PATH.write_text(f"@echo off\n{zig_command} cc %* 2>&1\n")
CXX_PATH.write_text(f"@echo off\n{zig_command} c++ %* 2>&1\n")
AR_PATH.write_text(f"@echo off\n{zig_command} ar %* 2>&1\n")
else:
cc_cmd = f'#!/bin/bash\n{zig_command} cc "$@"\n'
cxx_cmd = f'#!/bin/bash\n{zig_command} c++ "$@"\n'
ar_cmd = f'#!/bin/bash\n{zig_command} ar "$@"\n'
CC_PATH.write_text(cc_cmd)
CXX_PATH.write_text(cxx_cmd)
AR_PATH.write_text(ar_cmd)
CC_PATH.chmod(0o755)
CXX_PATH.chmod(0o755)
AR_PATH.chmod(0o755)
# if WASM_BUILD:
# wasm_flags = [
# # "--target=wasm32",
# # "-O3",
# # "-flto",
# # "-nostdlib",
# "-Wl,--no-entry",
# # "-Wl,--export-all",
# # "-Wl,--lto-O3",
# "-Wl,-z,stack-size=8388608", # 8 * 1024 * 1024 (8MiB)
# ]
# os.environ["CFLAGS"] = " ".join(wasm_flags)
# os.environ["CXXFLAGS"] = " ".join(wasm_flags)
cc, cxx = CC_PATH, CXX_PATH
# use the system path, so on windows this looks like "C:\Program Files\Zig\zig.exe"
cc_path: Path | str = cc.resolve()
cxx_path: Path | str = cxx.resolve()
if sys.platform == "win32":
cc_path = str(cc_path).replace("/", "\\")
cxx_path = str(cxx_path).replace("/", "\\")
# print out the paths
print(f"CC: {cc_path}")
print(f"CXX: {cxx_path}")
print(f"AR: {AR_PATH}")
# sys.exit(1)
# Set environment variables for C and C++ compilers
os.environ["CC"] = str(cc_path)
os.environ["CXX"] = str(cxx_path)
os.environ["AR"] = str(AR_PATH)
return CC_PATH, CXX_PATH, AR_PATH
def run_command(command: str, cwd: Path | None = None) -> None:
process = RunningProcess(command, cwd=cwd)
process.wait()
if process.returncode != 0:
print(f"{Path(__file__).name}: Error executing command: {command}")
sys.exit(1)
def compile_fastled(specific_test: str | None = None) -> None:
if USE_ZIG:
print("USING ZIG COMPILER")
rtn = subprocess.run(
"python -m ziglang version", shell=True, capture_output=True
).returncode
zig_is_installed = rtn == 0
assert (
zig_is_installed
), 'Zig compiler not when using "python -m ziglang version" command'
use_zig_compiler()
elif USE_CLANG:
print("USING CLANG COMPILER")
use_clang_compiler()
cmake_configure_command_list: list[str] = [
"cmake",
"-S",
str(PROJECT_ROOT / "tests"),
"-B",
str(BUILD_DIR),
"-G",
"Ninja",
"-DCMAKE_VERBOSE_MAKEFILE=ON",
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
]
if WASM_BUILD:
cmake_configure_command_list.extend(
[
"-DCMAKE_C_COMPILER_TARGET=wasm32-wasi",
"-DCMAKE_CXX_COMPILER_TARGET=wasm32-wasi",
"-DCMAKE_C_COMPILER_WORKS=TRUE",
"-DCMAKE_CXX_COMPILER_WORKS=TRUE",
"-DCMAKE_SYSTEM_NAME=Generic",
"-DCMAKE_CROSSCOMPILING=TRUE",
"-DCMAKE_EXE_LINKER_FLAGS=-Wl,--no-entry -Wl,--export-all -Wl,--lto-O3 -Wl,-z,stack-size=8388608",
]
)
cmake_configure_command = subprocess.list2cmdline(cmake_configure_command_list)
run_command(cmake_configure_command, cwd=BUILD_DIR)
# Build the project
if specific_test:
cmake_build_command = f"cmake --build {BUILD_DIR} --target test_{specific_test}"
else:
cmake_build_command = f"cmake --build {BUILD_DIR}"
run_command(cmake_build_command)
print("FastLED library compiled successfully.")
def parse_arguments():
parser = argparse.ArgumentParser(
description="Compile FastLED library with different compiler options."
)
parser.add_argument("--use-zig", action="store_true", help="Use Zig compiler")
parser.add_argument("--use-clang", action="store_true", help="Use Clang compiler")
parser.add_argument("--wasm", action="store_true", help="Build for WebAssembly")
parser.add_argument(
"--clean",
action="store_true",
help="Clean the build directory before compiling",
)
parser.add_argument(
"--test",
help="Specific test to compile (without test_ prefix)",
)
return parser.parse_args()
def get_build_info(args: argparse.Namespace) -> dict[str, str | dict[str, str]]:
return {
"USE_ZIG": str(USE_ZIG),
"USE_CLANG": str(USE_CLANG),
"WASM_BUILD": str(WASM_BUILD),
"CC": os.environ.get("CC", ""),
"CXX": os.environ.get("CXX", ""),
"AR": os.environ.get("AR", ""),
"CFLAGS": os.environ.get("CFLAGS", ""),
"CXXFLAGS": os.environ.get("CXXFLAGS", ""),
"ARGS": {
"use_zig": str(args.use_zig),
"use_clang": str(args.use_clang),
"wasm": str(args.wasm),
},
}
def should_clean_build(build_info: dict[str, str | dict[str, str]]) -> bool:
build_info_file = BUILD_DIR / "build_info.json"
if not build_info_file.exists():
return True
with open(build_info_file, "r") as f:
old_build_info = json.load(f)
return old_build_info != build_info
def update_build_info(build_info: dict[str, str | dict[str, str]]):
build_info_file = BUILD_DIR / "build_info.json"
with open(build_info_file, "w") as f:
json.dump(build_info, f, indent=2)
def main() -> None:
global USE_ZIG, USE_CLANG, WASM_BUILD
args = parse_arguments()
USE_ZIG = args.use_zig # use Zig's clang compiler
USE_CLANG = args.use_clang # Use pure Clang for WASM builds
WASM_BUILD = args.wasm
using_gcc = not USE_ZIG and not USE_CLANG and not WASM_BUILD
if using_gcc:
if not shutil.which("g++"):
print(
"gcc compiler not found in PATH, falling back zig's built in clang compiler"
)
USE_ZIG = True
USE_CLANG = False
if USE_CLANG:
if not _has_system_clang_compiler():
print(
"Clang compiler not found in PATH, falling back to Zig-clang compiler"
)
USE_ZIG = True
USE_CLANG = False
os.chdir(str(HERE))
print(f"Current directory: {Path('.').absolute()}")
build_info = get_build_info(args)
if args.clean or should_clean_build(build_info):
clean_build_directory()
compile_fastled(args.test)
update_build_info(build_info)
print("FastLED library compiled successfully.")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,269 @@
import argparse
import os
import re
import subprocess
import sys
import tempfile
from dataclasses import dataclass
from ci.paths import PROJECT_ROOT
@dataclass
class FailedTest:
name: str
return_code: int
stdout: str
def run_command(command, use_gdb=False) -> tuple[int, str]:
captured_lines = []
if use_gdb:
with tempfile.NamedTemporaryFile(mode="w+", delete=False) as gdb_script:
gdb_script.write("set pagination off\n")
gdb_script.write("run\n")
gdb_script.write("bt full\n")
gdb_script.write("info registers\n")
gdb_script.write("x/16i $pc\n")
gdb_script.write("thread apply all bt full\n")
gdb_script.write("quit\n")
gdb_command = (
f"gdb -return-child-result -batch -x {gdb_script.name} --args {command}"
)
process = subprocess.Popen(
gdb_command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, # Merge stderr into stdout
shell=True,
text=True,
bufsize=1, # Line buffered
)
assert process.stdout is not None
# Stream and capture output
while True:
line = process.stdout.readline()
if not line and process.poll() is not None:
break
if line:
captured_lines.append(line.rstrip())
print(line, end="") # Print in real-time
os.unlink(gdb_script.name)
output = "\n".join(captured_lines)
return process.returncode, output
else:
process = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, # Merge stderr into stdout
shell=True,
text=True,
bufsize=1, # Line buffered
)
assert process.stdout is not None
# Stream and capture output
while True:
line = process.stdout.readline()
if not line and process.poll() is not None:
break
if line:
captured_lines.append(line.rstrip())
print(line, end="") # Print in real-time
output = "\n".join(captured_lines)
return process.returncode, output
def compile_tests(clean: bool = False, unknown_args: list[str] = []) -> None:
os.chdir(str(PROJECT_ROOT))
print("Compiling tests...")
command = ["uv", "run", "ci/cpp_test_compile.py"]
if clean:
command.append("--clean")
command.extend(unknown_args)
return_code, _ = run_command(" ".join(command))
if return_code != 0:
print("Compilation failed:")
sys.exit(1)
print("Compilation successful.")
def run_tests(specific_test: str | None = None) -> None:
test_dir = os.path.join("tests", ".build", "bin")
if not os.path.exists(test_dir):
print(f"Test directory not found: {test_dir}")
sys.exit(1)
print("Running tests...")
failed_tests: list[FailedTest] = []
files = os.listdir(test_dir)
# filter out all pdb files (windows) and only keep test_ executables
files = [f for f in files if not f.endswith(".pdb") and f.startswith("test_")]
# If specific test is specified, filter for just that test
if specific_test:
test_name = f"test_{specific_test}"
if sys.platform == "win32":
test_name += ".exe"
files = [f for f in files if f == test_name]
if not files:
print(f"Test {test_name} not found in {test_dir}")
sys.exit(1)
for test_file in files:
test_path = os.path.join(test_dir, test_file)
if os.path.isfile(test_path) and os.access(test_path, os.X_OK):
print(f"Running test: {test_file}")
return_code, stdout = run_command(test_path)
output = stdout
failure_pattern = re.compile(r"Test .+ failed with return code (\d+)")
failure_match = failure_pattern.search(output)
is_crash = failure_match is not None
if is_crash:
print("Test crashed. Re-running with GDB to get stack trace...")
_, gdb_stdout = run_command(test_path, use_gdb=True)
stdout += "\n--- GDB Output ---\n" + gdb_stdout
# Extract crash information
crash_info = extract_crash_info(gdb_stdout)
print(f"Crash occurred at: {crash_info.file}:{crash_info.line}")
print(f"Cause: {crash_info.cause}")
print(f"Stack: {crash_info.stack}")
print("Test output:")
print(stdout)
if return_code == 0:
print("Test passed")
elif is_crash:
if failure_match:
print(f"Test crashed with return code {failure_match.group(1)}")
else:
print(f"Test crashed with return code {return_code}")
else:
print(f"Test failed with return code {return_code}")
print("-" * 40)
if return_code != 0:
failed_tests.append(FailedTest(test_file, return_code, stdout))
if failed_tests:
for failed_test in failed_tests:
print(
f"Test {failed_test.name} failed with return code {failed_test.return_code}\n{failed_test.stdout}"
)
tests_failed = len(failed_tests)
failed_test_names = [test.name for test in failed_tests]
print(
f"{tests_failed} test{'s' if tests_failed != 1 else ''} failed: {', '.join(failed_test_names)}"
)
sys.exit(1)
print("All tests passed.")
@dataclass
class CrashInfo:
cause: str = "Unknown"
stack: str = "Unknown"
file: str = "Unknown"
line: str = "Unknown"
def extract_crash_info(gdb_output: str) -> CrashInfo:
lines = gdb_output.split("\n")
crash_info = CrashInfo()
try:
for i, line in enumerate(lines):
if line.startswith("Program received signal"):
try:
crash_info.cause = line.split(":", 1)[1].strip()
except IndexError:
crash_info.cause = line.strip()
elif line.startswith("#0"):
crash_info.stack = line
for j in range(i, len(lines)):
if "at" in lines[j]:
try:
_, location = lines[j].split("at", 1)
location = location.strip()
if ":" in location:
crash_info.file, crash_info.line = location.rsplit(
":", 1
)
else:
crash_info.file = location
except ValueError:
pass # If split fails, we keep the default values
break
break
except Exception as e:
print(f"Error parsing GDB output: {e}")
return crash_info
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Compile and run C++ tests")
parser.add_argument(
"--compile-only",
action="store_true",
help="Only compile the tests without running them",
)
parser.add_argument(
"--run-only",
action="store_true",
help="Only run the tests without compiling them",
)
parser.add_argument(
"--only-run-failed-test",
action="store_true",
help="Only run the tests that failed in the previous run",
)
parser.add_argument(
"--clean", action="store_true", help="Clean build before compiling"
)
parser.add_argument(
"--test",
help="Specific test to run (without test_ prefix)",
)
parser.add_argument(
"--clang",
help="Use Clang compiler",
action="store_true",
)
args, unknown = parser.parse_known_args()
args.unknown = unknown
return args
def main() -> None:
args = parse_args()
run_only = args.run_only
compile_only = args.compile_only
specific_test = args.test
only_run_failed_test = args.only_run_failed_test
use_clang = args.clang
if not run_only:
passthrough_args = args.unknown
if use_clang:
passthrough_args.append("--use-clang")
compile_tests(clean=args.clean, unknown_args=passthrough_args)
if not compile_only:
if specific_test:
run_tests(specific_test)
else:
cmd = "ctest --test-dir tests/.build --output-on-failure"
if only_run_failed_test:
cmd += " --rerun-failed"
rtn, stdout = run_command(cmd)
if rtn != 0:
print("Failed tests:")
print(stdout)
sys.exit(1)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,186 @@
"""
Work in progress to generate doxygen via a script instead of a GitHub action.
"""
import os
import platform
import shutil
import subprocess
import warnings
from pathlib import Path
from typing import Optional, Tuple
from download import download # type: ignore
# Configs
DOXYGEN_VERSION = (
"1.13.2" # DOXYGEN_AWESOME styler is has certain restrictions with doxygen version
)
DOXYGEN_AWESOME_VERSION = "2.3.4" # deprecating
DOXYFILE_PATH = Path("docs/Doxyfile")
HTML_OUTPUT_DIR = Path("docs/html")
DOXYGEN_CSS_REPO = "https://github.com/jothepro/doxygen-awesome-css" # deprecating
HERE = Path(__file__).parent.resolve()
PROJECT_ROOT = HERE.parent
DOCS_ROOT = PROJECT_ROOT / "docs"
DOCS_TOOL_PATH = PROJECT_ROOT / ".tools_cache"
DOCS_OUTPUT_PATH = DOCS_ROOT / "html"
def run(
cmd: str,
cwd: Optional[str] = None,
shell: bool = True,
check: bool = True,
capture: bool = True,
) -> str:
print(f"Running: {cmd}")
result = subprocess.run(
cmd, shell=shell, cwd=cwd, check=False, capture_output=capture, text=False
)
if capture:
stdout = result.stdout.decode("utf-8") if result.stdout else ""
stderr = result.stderr.decode("utf-8") if result.stderr else ""
else:
stdout = ""
stderr = ""
if result.returncode != 0:
msg = f"Command failed with exit code {result.returncode}:\nstdout:\n{stdout}\n\nstderr:\n{stderr}"
warnings.warn(msg)
if check:
raise subprocess.CalledProcessError(
result.returncode, cmd, output=result.stdout
)
return stdout.strip()
def get_git_info() -> Tuple[str, str]:
release_tag = os.environ.get("RELEASE_TAG", "")
try:
latest_tag = run("git tag | grep -E '^[0-9]' | sort -V | tail -1")
latest_tag = latest_tag if latest_tag else ""
except subprocess.CalledProcessError:
latest_tag = ""
git_sha_short = run("git rev-parse --short HEAD")
full_sha = run("git rev-parse HEAD")
project_number = release_tag or latest_tag or git_sha_short
commit_message = (
f"{project_number} ({full_sha})"
if project_number != git_sha_short
else project_number
)
print(f"Project number: {project_number}")
print(f"Commit message: {commit_message}")
return project_number, commit_message
def install_doxygen_windows() -> Path:
print("Installing Doxygen...")
doxygen_url = (
f"https://www.doxygen.nl/files/doxygen-{DOXYGEN_VERSION}.windows.x64.bin.zip"
)
zip_path = DOCS_TOOL_PATH / f"doxygen-{DOXYGEN_VERSION}.zip"
extract_dir = DOCS_TOOL_PATH / f"doxygen-{DOXYGEN_VERSION}"
# Create tool path if it doesn't exist
DOCS_TOOL_PATH.mkdir(exist_ok=True, parents=True)
download(doxygen_url, zip_path)
shutil.unpack_archive(str(zip_path), extract_dir)
bin_path = next(extract_dir.glob("**/doxygen.exe"), None)
if not bin_path:
raise FileNotFoundError("Doxygen executable not found after extraction.")
print(f"Doxygen installed at: {bin_path}")
return bin_path
def install_doxygen_unix() -> Path:
print("Installing Doxygen...")
archive = f"doxygen-{DOXYGEN_VERSION}.linux.bin.tar.gz"
url = f"https://www.doxygen.nl/files/{archive}"
# Create tool path if it doesn't exist
DOCS_TOOL_PATH.mkdir(exist_ok=True, parents=True)
# Change to tool directory for download and extraction
original_dir = os.getcwd()
os.chdir(str(DOCS_TOOL_PATH))
try:
run(f"wget -q {url}")
run(f"tar -xf {archive}")
bin_dir = DOCS_TOOL_PATH / f"doxygen-{DOXYGEN_VERSION}"
return bin_dir / "bin" / "doxygen"
finally:
os.chdir(original_dir)
def install_theme() -> Path:
print("Installing Doxygen Awesome Theme...")
theme_path = DOCS_ROOT / "doxygen-awesome-css"
if theme_path.exists():
return theme_path
run(
f"git clone --depth 1 -b v{DOXYGEN_AWESOME_VERSION} {DOXYGEN_CSS_REPO}",
cwd=str(DOCS_ROOT),
)
return theme_path
def generate_docs(doxygen_bin: Path) -> None:
print("Generating documentation...")
cmd_str = f'"{doxygen_bin}" {DOXYFILE_PATH.name}'
run(cmd_str, cwd=str(DOCS_ROOT), capture=False)
# def install_graphviz() -> None:
# url: str = get_latest_release_for_platform()
# print(url)
def main() -> None:
is_windows = platform.system() == "Windows"
# is_macos = platform.system() == "Darwin"
_, commit_msg = get_git_info()
if is_windows:
doxygen_bin = install_doxygen_windows()
# add to path C:\Program Files\Graphviz\bin\
os.environ["PATH"] += os.pathsep + r"C:\Program Files\Graphviz\bin"
else:
doxygen_bin = install_doxygen_unix()
# install_theme()
# install_graphviz() # Work in progress
# Verify Graphviz installation
try:
dot_version = run("dot -V", check=False)
print(f"Graphviz detected: {dot_version}")
except Exception:
warnings.warn(
"Graphviz (dot) not found in PATH. Diagrams may not be generated."
)
# Check it graphviz is installed
# if linux
if not is_windows:
run("dot -Tsvg -Kneato -Grankdir=LR", check=True)
generate_docs(doxygen_bin=doxygen_bin)
print(f"\n✅ Docs generated in: {HTML_OUTPUT_DIR}")
print(f"📄 Commit message: {commit_msg}")
print("✨ You can now manually deploy to GitHub Pages or automate this step.")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,197 @@
import argparse
import json
import re
import subprocess
import sys
from pathlib import Path
from tempfile import TemporaryDirectory
from ci.bin_2_elf import bin_to_elf
from ci.elf import dump_symbol_sizes
from ci.map_dump import map_dump
def cpp_filt(cpp_filt_path: Path, input_text: str) -> str:
"""
Demangle C++ symbols using c++filt.
Args:
cpp_filt_path (Path): Path to c++filt executable.
input_text (str): Text to demangle.
Returns:
str: Demangled text.
"""
if not cpp_filt_path.exists():
raise FileNotFoundError(f"cppfilt not found at '{cpp_filt_path}'")
command = [str(cpp_filt_path), "-t", "-n"]
process = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
stdout, stderr = process.communicate(input=input_text)
if process.returncode != 0:
raise RuntimeError(f"Error running c++filt: {stderr}")
return stdout
def demangle_gnu_linkonce_symbols(cpp_filt_path: Path, map_text: str) -> str:
"""
Demangle .gnu.linkonce.t symbols in the map file.
Args:
cpp_filt_path (Path): Path to c++filt executable.
map_text (str): Content of the map file.
Returns:
str: Map file content with demangled symbols.
"""
# Extract all .gnu.linkonce.t symbols
pattern = r"\.gnu\.linkonce\.t\.(.+?)\s"
matches = re.findall(pattern, map_text)
if not matches:
return map_text
# Create a block of text with the extracted symbols
symbols_block = "\n".join(matches)
# Demangle the symbols
demangled_block = cpp_filt(cpp_filt_path, symbols_block)
# Create a dictionary of mangled to demangled symbols
demangled_dict = dict(zip(matches, demangled_block.strip().split("\n")))
# Replace the mangled symbols with demangled ones in the original text
for mangled, demangled in demangled_dict.items():
map_text = map_text.replace(
f".gnu.linkonce.t.{mangled}", f".gnu.linkonce.t.{demangled}"
)
return map_text
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Convert a binary file to ELF using map file."
)
parser.add_argument("--first", action="store_true", help="Inspect the first board")
parser.add_argument("--cwd", type=Path, help="Custom working directory")
return parser.parse_args()
def load_build_info(build_info_path: Path) -> dict:
"""
Load build information from a JSON file.
Args:
build_info_path (Path): Path to the build_info.json file.
Returns:
dict: Parsed JSON data.
"""
if not build_info_path.exists():
raise FileNotFoundError(f"Build info JSON not found at '{build_info_path}'")
return json.loads(build_info_path.read_text())
def main() -> int:
args = parse_args()
if args.cwd:
root_build_dir = args.cwd / ".build"
else:
root_build_dir = Path(".build")
board_dirs = [d for d in root_build_dir.iterdir() if d.is_dir()]
if not board_dirs:
print(f"No board directories found in {root_build_dir.absolute()}")
return 1
print("Available boards:")
for i, board_dir in enumerate(board_dirs):
print(f"[{i}]: {board_dir.name}")
which = (
0
if args.first
else int(input("Enter the number of the board you want to inspect: "))
)
board_dir = board_dirs[which]
build_info_json = board_dir / "build_info.json"
build_info = load_build_info(build_info_json)
board = board_dir.name
board_info = build_info.get(board) or build_info[next(iter(build_info))]
# Validate paths from build_info.json
elf_path = Path(board_info.get("prog_path", ""))
if not elf_path.exists():
print(
f"Error: ELF path '{elf_path}' does not exist. Check the 'prog_path' in build_info.json."
)
return 1
bin_file = elf_path.with_suffix(".bin")
if not bin_file.exists():
# use .hex or .uf2 if .bin doesn't exist
bin_file = elf_path.with_suffix(".hex")
if not bin_file.exists():
bin_file = elf_path.with_suffix(".uf2")
if not bin_file.exists():
print(f"Error: Binary file not found for '{elf_path}'")
return 1
cpp_filt_path = Path(board_info["aliases"]["c++filt"])
ld_path = Path(board_info["aliases"]["ld"])
as_path = Path(board_info["aliases"]["as"])
nm_path = Path(board_info["aliases"]["nm"])
objcopy_path = Path(board_info["aliases"]["objcopy"])
nm_path = Path(board_info["aliases"]["nm"])
map_file = board_dir / "firmware.map"
if not map_file.exists():
# Search for the map file
map_file = bin_file.with_suffix(".map")
if not map_file.exists():
possible_map_files = list(board_dir.glob("**/firmware.map"))
if possible_map_files:
map_file = possible_map_files[0]
else:
print("Error: firmware.map file not found")
return 1
try:
with TemporaryDirectory() as temp_dir:
temp_dir_path = Path(temp_dir)
output_elf = bin_to_elf(
bin_file,
map_file,
as_path,
ld_path,
objcopy_path,
temp_dir_path / "output.elf",
)
out = dump_symbol_sizes(nm_path, cpp_filt_path, output_elf)
print(out)
except Exception as e:
print(
f"Error while converting binary to ELF, binary analysis will not work on this build: {e}"
)
map_dump(map_file)
# Demangle .gnu.linkonce.t symbols and print map file
print("\n##################################################")
print("# Map file dump:")
print("##################################################\n")
map_text = map_file.read_text()
demangled_map_text = demangle_gnu_linkonce_symbols(cpp_filt_path, map_text)
print(demangled_map_text)
return 0
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,72 @@
import argparse
import json
from pathlib import Path
from ci.elf import dump_symbol_sizes
HERE = Path(__file__).resolve().parent
PROJECT_ROOT = HERE.parent
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Inspect a compiled binary")
parser.add_argument("--first", action="store_true", help="Inspect the first board")
parser.add_argument("--cwd", type=str, help="Custom working directory")
parser.add_argument("--elf", type=str, help="Path to the ELF file to inspect")
return parser.parse_args()
def main() -> int:
args = parse_args()
if args.elf:
firmware_path = Path(args.elf)
if not firmware_path.exists():
print(f"ELF file not found: {firmware_path}")
return 1
if args.cwd:
# os.chdir(args.cwd)
root_build_dir = Path(args.cwd) / ".build"
else:
root_build_dir = Path(".build")
# Find the first board directory
board_dirs = [d for d in root_build_dir.iterdir() if d.is_dir()]
if not board_dirs:
# print("No board directories found in .build")
print(f"No board directories found in {root_build_dir.absolute()}")
return 1
# display all the boards to the user and ask them to select which one they want by number
print("Available boards:")
for i, board_dir in enumerate(board_dirs):
print(f"[{i}]: {board_dir.name}")
if args.first:
which = 0
else:
which = int(input("Enter the number of the board you want to inspect: "))
board_dir = board_dirs[which]
board = board_dir.name
build_info_json = board_dir / "build_info.json"
build_info = json.loads(build_info_json.read_text())
board_info = build_info.get(board) or build_info[next(iter(build_info))]
firmware_path = Path(board_info["prog_path"])
cpp_filt_path = Path(board_info["aliases"]["c++filt"])
print(f"Dumping symbol sizes for {board} firmware: {firmware_path}")
try:
nm_path = Path(board_info["aliases"]["nm"])
symbol_sizes = dump_symbol_sizes(nm_path, cpp_filt_path, firmware_path)
print(symbol_sizes)
except Exception as e:
print(f"Error while dumping symbol sizes: {e}")
return 0
if __name__ == "__main__":
main()

View file

@ -0,0 +1,129 @@
import os
import subprocess
import sys
from pathlib import Path
from ci.paths import BUILD
from ci.tools import load_tools
def _list_builds() -> list[Path]:
str_paths = os.listdir(BUILD)
paths = [BUILD / p for p in str_paths]
dirs = [p for p in paths if p.is_dir()]
return dirs
def _check_build(build: Path) -> bool:
# 1. should contain a build_info.json file
# 2. should contain a .pio/build directory
has_build_info = (build / "build_info.json").exists()
has_pio_build = (build / ".pio" / "build").exists()
return has_build_info and has_pio_build
def _prompt_build() -> Path:
builds = _list_builds()
if not builds:
print("Error: No builds found", file=sys.stderr)
sys.exit(1)
print("Select a build:")
for i, build in enumerate(builds):
print(f" [{i}]: {build}")
while True:
try:
which = int(input("Enter the number of the build to use: "))
if 0 <= which < len(builds):
valid = _check_build(BUILD / builds[which])
if valid:
return BUILD / builds[which]
print("Error: Invalid build", file=sys.stderr)
else:
print("Error: Invalid selection", file=sys.stderr)
continue
except ValueError:
print("Error: Invalid input", file=sys.stderr)
continue
def _prompt_object_file(build: Path) -> Path:
# Look for object files in .pio/build directory
build_dir = build / ".pio" / "build"
object_files = []
# Walk through build directory to find .o files
for root, _, files in os.walk(build_dir):
for file in files:
if file.endswith(".o") and "FrameworkArduino" not in file:
full_path = Path(root) / file
if "FrameworkArduino" not in full_path.parts:
object_files.append(full_path)
if not object_files:
print("Error: No object files found", file=sys.stderr)
sys.exit(1)
print("\nSelect an object file:")
for i, obj_file in enumerate(object_files):
print(f" [{i}]: {obj_file.relative_to(build_dir)}")
while True:
try:
which = int(input("Enter the number of the object file to use: "))
if 0 <= which < len(object_files):
return object_files[which]
print("Error: Invalid selection", file=sys.stderr)
except ValueError:
print("Error: Invalid input", file=sys.stderr)
continue
def cli() -> None:
import argparse
parser = argparse.ArgumentParser(
description="Dump object file information using build tools"
)
parser.add_argument(
"build_path",
type=Path,
nargs="?",
help="Path to build directory containing build info JSON file",
)
args = parser.parse_args()
build_path = args.build_path
# Check if object file was provided and exists
if build_path is None:
build_path = _prompt_build()
else:
if not _check_build(build_path):
print("Error: Invalid build directory", file=sys.stderr)
sys.exit(1)
assert build_path is not None
assert build_path
build_info_path = build_path / "build_info.json"
assert build_info_path.exists(), f"File not found: {build_info_path}"
tools = load_tools(build_info_path)
object_file = _prompt_object_file(build_path)
cmd = [str(tools.objdump_path), "--syms", str(object_file)]
if sys.platform == "win32":
cmd = ["cmd", "/c"] + cmd
cmd_str = subprocess.list2cmdline(cmd)
subprocess.run(cmd, check=True)
print("\nDone. Command used:", cmd_str)
if __name__ == "__main__":
try:
cli()
except KeyboardInterrupt:
print("Exiting...")
sys.exit(1)

View file

@ -0,0 +1,16 @@
#include "FastLED.h"
// Let's include a bunch of stuff and see if it breaks the build.
#include <WiFi.h>
#include <ESPmDNS.h>
#include <NetworkUdp.h>
#include <ArduinoOTA.h>
#include <ArduinoJson.h>
void setup() {
}
void loop() {
}

View file

@ -0,0 +1,17 @@
[platformio]
src_dir = symlink://../../../src
[env:dev]
platform = https://github.com/pioarduino/platform-espressif32/releases/download/51.03.04/platform-espressif32.zip
board = esp32-s3-devkitc-1
framework = arduino
lib_deps =
https://github.com/dvarrel/AsyncTCP
https://github.com/mathieucarbou/ESPAsyncWebServer
ArduinoJson
FS
ArduinoOTA
ESPmDNS
lib_ldf_mode=deep
build_flags=-DFASTLED_STUB_MAIN_INCLUDE_INO="../ci/kitchensink/kitchensink.ino.cpp"

View file

@ -0,0 +1,11 @@
[platformio]
src_dir = symlink://../../../src
[env:dev]
platform = platformio/native
build_flags =
-DFASTLED_STUB_IMPL
-DFASTLED_STUB_MAIN_INCLUDE_INO="../examples/Blink/Blink.ino"
-std=c++17

View file

@ -0,0 +1,42 @@
import argparse
import sys
from pathlib import Path
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Convert a binary file to ELF using map file."
)
parser.add_argument("--first", action="store_true", help="Inspect the first board")
parser.add_argument("--cwd", type=Path, help="Custom working directory")
return parser.parse_args()
def main() -> int:
args = parse_args()
if args.cwd:
root_build_dir = args.cwd / ".build"
else:
root_build_dir = Path(".build")
board_dirs = [d for d in root_build_dir.iterdir() if d.is_dir()]
if not board_dirs:
print(f"No board directories found in {root_build_dir.absolute()}")
return 1
print("Available boards:")
for i, board_dir in enumerate(board_dirs):
print(f"[{i}]: {board_dir.name}")
which = (
0
if args.first
else int(input("Enter the number of the board you want to inspect: "))
)
board_dir = board_dirs[which]
# build_info_json = board_dir / "build_info.json"
optimization_report = board_dir / "optimization_report.txt"
text = optimization_report.read_text(encoding="utf-8")
print(text)
return 0
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,419 @@
#!/usr/bin/env python
"""A wrapper script around clang-format, suitable for linting multiple files
and to use for continuous integration.
This is an alternative API for the clang-format command line.
It runs over multiple files and directories in parallel.
A diff output is produced and a sensible exit code is returned.
"""
from __future__ import print_function, unicode_literals
import argparse
import codecs
import difflib
import errno
import fnmatch
import io
import multiprocessing
import os
import signal
import subprocess
import sys
import traceback
from functools import partial
try:
from subprocess import DEVNULL # py3k
except ImportError:
DEVNULL = open(os.devnull, "wb") # type: ignore
DEFAULT_EXTENSIONS = "c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx"
DEFAULT_CLANG_FORMAT_IGNORE = ".clang-format-ignore"
class ExitStatus:
SUCCESS = 0
DIFF = 1
TROUBLE = 2
def excludes_from_file(ignore_file):
excludes = []
try:
with io.open(ignore_file, "r", encoding="utf-8") as f:
for line in f:
if line.startswith("#"):
# ignore comments
continue
pattern = line.rstrip()
if not pattern:
# allow empty lines
continue
excludes.append(pattern)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
return excludes
def list_files(files, recursive=False, extensions=None, exclude=None):
if extensions is None:
extensions = []
if exclude is None:
exclude = []
out = []
for file in files:
if recursive and os.path.isdir(file):
for dirpath, dnames, fnames in os.walk(file):
fpaths = [os.path.join(dirpath, fname) for fname in fnames]
for pattern in exclude:
# os.walk() supports trimming down the dnames list
# by modifying it in-place,
# to avoid unnecessary directory listings.
dnames[:] = [
x
for x in dnames
if not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)
]
fpaths = [x for x in fpaths if not fnmatch.fnmatch(x, pattern)]
for f in fpaths:
ext = os.path.splitext(f)[1][1:]
if ext in extensions:
out.append(f)
else:
out.append(file)
return out
def make_diff(file, original, reformatted):
return list(
difflib.unified_diff(
original,
reformatted,
fromfile="{}\t(original)".format(file),
tofile="{}\t(reformatted)".format(file),
n=3,
)
)
class DiffError(Exception):
def __init__(self, message, errs=None):
super(DiffError, self).__init__(message)
self.errs = errs or []
class UnexpectedError(Exception):
def __init__(self, message, exc=None):
super(UnexpectedError, self).__init__(message)
self.formatted_traceback = traceback.format_exc()
self.exc = exc
def run_clang_format_diff_wrapper(args, file):
try:
ret = run_clang_format_diff(args, file)
return ret
except DiffError:
raise
except Exception as e:
raise UnexpectedError("{}: {}: {}".format(file, e.__class__.__name__, e), e)
def run_clang_format_diff(args, file):
try:
with io.open(file, "r", encoding="utf-8") as f:
original = f.readlines()
except IOError as exc:
raise DiffError(str(exc))
if args.in_place:
invocation = [args.clang_format_executable, "-i", file]
else:
invocation = [args.clang_format_executable, file]
if args.style:
invocation.extend(["--style", args.style])
if args.dry_run:
print(" ".join(invocation))
return [], []
# Use of utf-8 to decode the process output.
#
# Hopefully, this is the correct thing to do.
#
# It's done due to the following assumptions (which may be incorrect):
# - clang-format will returns the bytes read from the files as-is,
# without conversion, and it is already assumed that the files use utf-8.
# - if the diagnostics were internationalized, they would use utf-8:
# > Adding Translations to Clang
# >
# > Not possible yet!
# > Diagnostic strings should be written in UTF-8,
# > the client can translate to the relevant code page if needed.
# > Each translation completely replaces the format string
# > for the diagnostic.
# > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation
#
# It's not pretty, due to Python 2 & 3 compatibility.
encoding_py3 = {}
if sys.version_info[0] >= 3:
encoding_py3["encoding"] = "utf-8"
try:
proc = subprocess.Popen(
invocation,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
**encoding_py3,
)
except OSError as exc:
raise DiffError(
"Command '{}' failed to start: {}".format(
subprocess.list2cmdline(invocation), exc
)
)
proc_stdout = proc.stdout
proc_stderr = proc.stderr
assert proc_stdout is not None
assert proc_stderr is not None
if sys.version_info[0] < 3:
# make the pipes compatible with Python 3,
# reading lines should output unicode
encoding = "utf-8"
proc_stdout = codecs.getreader(encoding)(proc_stdout)
proc_stderr = codecs.getreader(encoding)(proc_stderr)
# hopefully the stderr pipe won't get full and block the process
outs = list(proc_stdout.readlines())
errs = list(proc_stderr.readlines())
proc.wait()
if proc.returncode:
raise DiffError(
"Command '{}' returned non-zero exit status {}".format(
subprocess.list2cmdline(invocation), proc.returncode
),
errs,
)
if args.in_place:
return [], errs
return make_diff(file, original, outs), errs
def bold_red(s):
return "\x1b[1m\x1b[31m" + s + "\x1b[0m"
def colorize(diff_lines):
def bold(s):
return "\x1b[1m" + s + "\x1b[0m"
def cyan(s):
return "\x1b[36m" + s + "\x1b[0m"
def green(s):
return "\x1b[32m" + s + "\x1b[0m"
def red(s):
return "\x1b[31m" + s + "\x1b[0m"
for line in diff_lines:
if line[:4] in ["--- ", "+++ "]:
yield bold(line)
elif line.startswith("@@ "):
yield cyan(line)
elif line.startswith("+"):
yield green(line)
elif line.startswith("-"):
yield red(line)
else:
yield line
def print_diff(diff_lines, use_color):
if use_color:
diff_lines = colorize(diff_lines)
if sys.version_info[0] < 3:
sys.stdout.writelines((line.encode("utf-8") for line in diff_lines))
else:
sys.stdout.writelines(diff_lines)
def print_trouble(prog, message, use_colors):
error_text = "error:"
if use_colors:
error_text = bold_red(error_text)
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--clang-format-executable",
metavar="EXECUTABLE",
help="path to the clang-format executable",
default="clang-format",
)
parser.add_argument(
"--extensions",
help="comma separated list of file extensions (default: {})".format(
DEFAULT_EXTENSIONS
),
default=DEFAULT_EXTENSIONS,
)
parser.add_argument(
"-r",
"--recursive",
action="store_true",
help="run recursively over directories",
)
parser.add_argument(
"-d", "--dry-run", action="store_true", help="just print the list of files"
)
parser.add_argument(
"-i",
"--in-place",
action="store_true",
help="format file instead of printing differences",
)
parser.add_argument("files", metavar="file", nargs="+")
parser.add_argument(
"-q",
"--quiet",
action="store_true",
help="disable output, useful for the exit code",
)
parser.add_argument(
"-j",
metavar="N",
type=int,
default=0,
help="run N clang-format jobs in parallel" " (default number of cpus + 1)",
)
parser.add_argument(
"--color",
default="auto",
choices=["auto", "always", "never"],
help="show colored diff (default: auto)",
)
parser.add_argument(
"-e",
"--exclude",
metavar="PATTERN",
action="append",
default=[],
help="exclude paths matching the given glob-like pattern(s)"
" from recursive search",
)
parser.add_argument(
"--style",
help="formatting style to apply (LLVM, Google, Chromium, Mozilla, WebKit)",
)
args = parser.parse_args()
# use default signal handling, like diff return SIGINT value on ^C
# https://bugs.python.org/issue14229#msg156446
signal.signal(signal.SIGINT, signal.SIG_DFL)
try:
signal.SIGPIPE # type: ignore
except AttributeError:
# compatibility, SIGPIPE does not exist on Windows
pass
else:
signal.signal(signal.SIGPIPE, signal.SIG_DFL) # type: ignore
colored_stdout = False
colored_stderr = False
if args.color == "always":
colored_stdout = True
colored_stderr = True
elif args.color == "auto":
colored_stdout = sys.stdout.isatty()
colored_stderr = sys.stderr.isatty()
version_invocation = [args.clang_format_executable, str("--version")]
try:
subprocess.check_call(version_invocation, stdout=DEVNULL)
except subprocess.CalledProcessError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
return ExitStatus.TROUBLE
except OSError as e:
print_trouble(
parser.prog,
"Command '{}' failed to start: {}".format(
subprocess.list2cmdline(version_invocation), e
),
use_colors=colored_stderr,
)
return ExitStatus.TROUBLE
retcode = ExitStatus.SUCCESS
excludes = excludes_from_file(DEFAULT_CLANG_FORMAT_IGNORE)
excludes.extend(args.exclude)
files = list_files(
args.files,
recursive=args.recursive,
exclude=excludes,
extensions=args.extensions.split(","),
)
if not files:
return
njobs = args.j
if njobs == 0:
njobs = multiprocessing.cpu_count() + 1
njobs = min(len(files), njobs)
if njobs == 1:
# execute directly instead of in a pool,
# less overhead, simpler stacktraces
it = (run_clang_format_diff_wrapper(args, file) for file in files)
pool = None
else:
pool = multiprocessing.Pool(njobs)
it = pool.imap_unordered(partial(run_clang_format_diff_wrapper, args), files)
pool.close()
while True:
try:
outs, errs = next(it)
except StopIteration:
break
except DiffError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
retcode = ExitStatus.TROUBLE
sys.stderr.writelines(e.errs)
except UnexpectedError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
sys.stderr.write(e.formatted_traceback)
retcode = ExitStatus.TROUBLE
# stop at the first unexpected error,
# something could be very wrong,
# don't process all files unnecessarily
if pool:
pool.terminate()
break
else:
sys.stderr.writelines(errs)
if outs == []:
continue
if not args.quiet:
print_diff(outs, use_color=colored_stdout)
if retcode == ExitStatus.SUCCESS:
retcode = ExitStatus.DIFF
if pool:
pool.join()
return retcode
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,29 @@
"""
Unit test file.
"""
import unittest
class ApiTester(unittest.TestCase):
"""Main tester class."""
def test_build_all_examples(self) -> None:
"""Test command line interface (CLI)."""
from fastled import Api, Test # type: ignore
with Api.server(auto_updates=True) as server:
exception_map = Test.test_examples(host=server)
if len(exception_map) > 0:
exception: Exception
msg: str = ""
for example, exception in exception_map.items():
msg += f"Failed to compile example: {example}, error: {exception}\n"
self.fail(msg)
# self.assertEqual(0, len(out), f"Failed tests: {out}")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,66 @@
import subprocess
import unittest
import warnings
from pathlib import Path
from ci.bin_2_elf import bin_to_elf
from ci.elf import dump_symbol_sizes
from ci.paths import PROJECT_ROOT
from ci.tools import Tools, load_tools
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
OUTPUT = HERE / "output"
BUILD_INFO_PATH = PROJECT_ROOT / ".build" / "uno" / "build_info.json"
DISABLED = True
class TestBinToElf(unittest.TestCase):
@classmethod
def setUpClass(cls):
if DISABLED:
return
uno_build = PROJECT_ROOT / ".build" / "uno"
print(f"Checking for Uno build in: {uno_build}")
if not uno_build.exists():
print("Uno build not found. Running compilation...")
try:
subprocess.run(
"uv run ci/ci-compile.py uno --examples Blink",
shell=True,
check=True,
)
print("Compilation completed successfully.")
except subprocess.CalledProcessError as e:
print(f"Error during compilation: {e}")
raise
@unittest.skip("Skip bin to elf conversion test")
def test_bin_to_elf_conversion(self) -> None:
if DISABLED:
return
tools: Tools = load_tools(BUILD_INFO_PATH)
bin_file = UNO / "firmware.hex"
map_file = UNO / "firmware.map"
output_elf = OUTPUT / "output.elf"
try:
bin_to_elf(
bin_file,
map_file,
tools.as_path,
tools.ld_path,
tools.objcopy_path,
output_elf,
)
stdout = dump_symbol_sizes(tools.nm_path, tools.cpp_filt_path, output_elf)
print(stdout)
except Exception as e:
warnings.warn(f"Error while converting binary to ELF: {e}")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,49 @@
import subprocess
import unittest
from pathlib import Path
from ci.elf import dump_symbol_sizes
from ci.paths import PROJECT_ROOT
from ci.tools import Tools, load_tools
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
OUTPUT = HERE / "output"
ELF_FILE = UNO / "firmware.elf"
BUILD_INFO_PATH = PROJECT_ROOT / ".build" / "uno" / "build_info.json"
PLATFORMIO_PATH = Path.home() / ".platformio"
PLATFORMIO_PACKAGES_PATH = PLATFORMIO_PATH / "packages"
TOOLCHAIN_AVR = PLATFORMIO_PACKAGES_PATH / "toolchain-atmelavr"
def init() -> None:
uno_build = PROJECT_ROOT / ".build" / "uno"
print(f"Checking for Uno build in: {uno_build}")
if not BUILD_INFO_PATH.exists() or not TOOLCHAIN_AVR.exists():
print("Uno build not found. Running compilation...")
try:
subprocess.run(
"uv run ci/ci-compile.py uno --examples Blink",
shell=True,
check=True,
cwd=str(PROJECT_ROOT),
)
print("Compilation completed successfully.")
except subprocess.CalledProcessError as e:
print(f"Error during compilation: {e}")
raise
class TestBinToElf(unittest.TestCase):
def test_bin_to_elf_conversion(self) -> None:
init()
tools: Tools = load_tools(BUILD_INFO_PATH)
msg = dump_symbol_sizes(tools.nm_path, tools.cpp_filt_path, ELF_FILE)
print(msg)
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,16 @@
import unittest
from pathlib import Path
from ci.map_dump import map_dump
HERE = Path(__file__).resolve().parent.absolute()
UNO = HERE / "uno"
class TestMapParser(unittest.TestCase):
def test_map_parser(self):
map_dump(UNO / "firmware.map")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,119 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from ci.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
NUM_WORKERS = (os.cpu_count() or 1) * 4
# Files that are allowed to not have #pragma once
EXCLUDED_FILES = [
# Add any exceptions here
]
EXCLUDED_DIRS = [
"third_party",
"platforms",
]
class TestMissingPragmaOnce(unittest.TestCase):
def check_file(self, file_path: str) -> list[str]:
"""Check if a header file has #pragma once directive or if a cpp file incorrectly has it."""
failings: list[str] = []
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
if file_path.endswith(".h"):
# For header files, check if #pragma once is missing
if "#pragma once" not in content:
failings.append(f"Missing #pragma once in {file_path}")
elif file_path.endswith(".cpp"):
# For cpp files, check if #pragma once is incorrectly present
if "#pragma once" in content:
failings.append(f"Incorrect #pragma once in cpp file: {file_path}")
return failings
def test_pragma_once_usage(self) -> None:
"""
Searches through files to:
1. Check for missing #pragma once in header files
2. Check for incorrect #pragma once in cpp files
"""
files_to_check = []
current_dir = None
# Collect files to check
for root, dirs, files in os.walk(SRC_ROOT):
# Log when we enter a new directory
rel_path = os.path.relpath(root, SRC_ROOT)
if current_dir != rel_path:
current_dir = rel_path
print(f"Traversing directory: {rel_path}")
if rel_path in EXCLUDED_DIRS:
print(f" Skipping excluded directory: {rel_path}")
dirs[:] = [] # Skip this directory and its subdirectories
continue
# Check if this directory should be excluded
# if any(os.path.normpath(root).startswith(os.path.normpath(excluded_dir))
# for excluded_dir in EXCLUDED_DIRS):
# print(f" Skipping excluded directory: {rel_path}")
# continue
for excluded_dir in EXCLUDED_DIRS:
npath = os.path.normpath(root)
npath_excluded = os.path.normpath(excluded_dir)
print(f"Checking {npath} against excluded {npath_excluded}")
if npath.startswith(npath_excluded):
print(f" Skipping excluded directory: {rel_path}")
break
for file in files:
if file.endswith((".h", ".cpp")): # Check both header and cpp files
file_path = os.path.join(root, file)
# Check if file is excluded
# if any(file_path.endswith(excluded) for excluded in EXCLUDED_FILES):
# print(f" Skipping excluded file: {file}")
# continue
for excluded in EXCLUDED_FILES:
# print(f"Checking {file_path} against excluded {excluded}")
if file_path.endswith(excluded):
print(f" Skipping excluded file: {file}")
break
files_to_check.append(file_path)
print(f"Found {len(files_to_check)} files to check")
# Process files in parallel
all_failings = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
# Report results
if all_failings:
msg = f"Found {len(all_failings)} pragma once issues: \n" + "\n".join(
all_failings
)
for failing in all_failings:
print(failing)
self.fail(msg)
else:
print("All files have proper pragma once usage.")
print(f"Pragma once check completed. Processed {len(files_to_check)} files.")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,132 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from ci.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
PLATFORMS_DIR = os.path.join(SRC_ROOT, "platforms")
PLATFORMS_ESP_DIR = os.path.join(PLATFORMS_DIR, "esp")
NUM_WORKERS = (os.cpu_count() or 1) * 4
ENABLE_PARANOID_GNU_HEADER_INSPECTION = False
if ENABLE_PARANOID_GNU_HEADER_INSPECTION:
BANNED_HEADERS_ESP = ["esp32-hal.h"]
else:
BANNED_HEADERS_ESP = []
BANNED_HEADERS_CORE = [
"assert.h",
"iostream",
"stdio.h",
"cstdio",
"cstdlib",
"vector",
"list",
"map",
"set",
"queue",
"deque",
"algorithm",
"memory",
"thread",
"mutex",
"chrono",
"fstream",
"sstream",
"iomanip",
"exception",
"stdexcept",
"typeinfo",
"ctime",
"cmath",
"complex",
"valarray",
"cfloat",
"cassert",
"cerrno",
"cctype",
"cwctype",
"cstring",
"cwchar",
"cuchar",
"cstdint",
"cstddef", # this certainally fails
"type_traits", # this certainally fails
"Arduino.h",
] + BANNED_HEADERS_ESP
EXCLUDED_FILES = [
"stub_main.cpp",
]
class TestNoBannedHeaders(unittest.TestCase):
def check_file(self, file_path: str) -> list[str]:
failings: list[str] = []
banned_headers_list = []
if file_path.startswith(PLATFORMS_DIR):
# continue # Skip the platforms directory
if file_path.startswith(PLATFORMS_ESP_DIR):
banned_headers_list = BANNED_HEADERS_ESP
else:
return failings
if len(banned_headers_list) == 0:
return failings
with open(file_path, "r", encoding="utf-8") as f:
for line_number, line in enumerate(f, 1):
if line.startswith("//"):
continue
for header in banned_headers_list:
if (
f"#include <{header}>" in line or f'#include "{header}"' in line
) and "// ok include" not in line:
failings.append(
f"Found banned header '{header}' in {file_path}:{line_number}"
)
return failings
def test_no_banned_headers(self) -> None:
"""Searches through the program files to check for banned headers, excluding src/platforms."""
files_to_check = []
for root, _, files in os.walk(SRC_ROOT):
for file in files:
if file.endswith(
(".cpp", ".h", ".hpp")
): # Add or remove file extensions as needed
file_path = os.path.join(root, file)
if not any(
file_path.endswith(excluded) for excluded in EXCLUDED_FILES
):
files_to_check.append(file_path)
all_failings = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
if all_failings:
msg = f"Found {len(all_failings)} banned header(s): \n" + "\n".join(
all_failings
)
for failing in all_failings:
print(failing)
self.fail(
msg + "\n"
"You can add '// ok include' at the end of the line to silence this error for specific inclusions."
)
else:
print("No banned headers found.")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,60 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from ci.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
PLATFORMS_DIR = os.path.join(SRC_ROOT, "platforms")
NUM_WORKERS = (os.cpu_count() or 1) * 4
class NoUsingNamespaceFlInHeaderTester(unittest.TestCase):
def check_file(self, file_path) -> list[str]:
if "FastLED.h" in file_path:
return []
failings: list[str] = []
with open(file_path, "r", encoding="utf-8") as f:
for line_number, line in enumerate(f, 1):
if line.startswith("//"):
continue
if "using namespace fl;" in line:
failings.append(f"{file_path}:{line_number}: {line.strip()}")
return failings
def test_no_using_namespace(self) -> None:
"""Searches through the program files to check for banned headers, excluding src/platforms."""
files_to_check = []
for root, _, files in os.walk(SRC_ROOT):
for file in files:
if file.endswith(
(".h", ".hpp")
): # Add or remove file extensions as needed
file_path = os.path.join(root, file)
files_to_check.append(file_path)
all_failings = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
if all_failings:
msg = (
f'Found {len(all_failings)} header file(s) "using namespace fl": \n'
+ "\n".join(all_failings)
)
for failing in all_failings:
print(failing)
self.fail(msg)
else:
print("No using namespace fl; found in headers.")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,79 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from ci.paths import PROJECT_ROOT
NUM_WORKERS = (os.cpu_count() or 1) * 4
WASM_ROOT = PROJECT_ROOT / "src" / "platforms" / "wasm"
class TestMissingPragmaOnce(unittest.TestCase):
def check_file(self, file_path: str) -> list[str]:
"""Check if a header file has #pragma once directive or if a cpp file incorrectly has it."""
failings: list[str] = []
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
if file_path.endswith(".h") or file_path.endswith(".cpp"):
content = f.read()
# For header files, check if #pragma once is missing
if "EM_ASM_" in content and "// clang-format off\n" not in content:
if "clang-format off" not in content:
failings.append(f"Missing clang-format off in {file_path}")
else:
failings.append(f"clang-format off is malformed in {file_path}")
return failings
def test_esm_asm_and_clang_format(self) -> None:
files_to_check = []
current_dir = None
# Collect files to check
for root, _, files in os.walk(WASM_ROOT):
# Log when we enter a new directory
rel_path = os.path.relpath(root, WASM_ROOT)
if current_dir != rel_path:
current_dir = rel_path
print(f"Traversing directory: {rel_path}")
for file in files:
if file.endswith((".h", ".cpp")): # Check both header and cpp files
file_path = os.path.join(root, file)
files_to_check.append(file_path)
print(f"Found {len(files_to_check)} files to check")
# Process files in parallel
all_failings = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
# Report results
if all_failings:
msg = (
f"Found {len(all_failings)} clang format issues in wasm: \n"
+ "\n".join(all_failings)
)
for failing in all_failings:
print(failing)
print(
"Please be aware you need // then one space then clang-format off then a new line exactly"
)
self.fail(msg)
else:
print("All files passed the check.")
print(f"Clange format check completed. Processed {len(files_to_check)} files.")
if __name__ == "__main__":
unittest.main()

View file

@ -0,0 +1,63 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from ci.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
# PLATFORMS_DIR = os.path.join(SRC_ROOT, "platforms")
NUM_WORKERS = (os.cpu_count() or 1) * 4
WRONG_DEFINES: dict[str, str] = {
"#if ESP32": "Use #ifdef ESP32 instead of #if ESP32",
"#if defined(FASTLED_RMT5)": "Use #ifdef FASTLED_RMT5 instead of #if defined(FASTLED_RMT5)",
"#if defined(FASTLED_ESP_HAS_CLOCKLESS_SPI)": "Use #ifdef FASTLED_ESP_HAS_CLOCKLESS_SPI instead of #if defined(FASTLED_ESP_HAS_CLOCKLESS_SPI)",
}
class TestWrongDefines(unittest.TestCase):
def check_file(self, file_path) -> list[str]:
failings = []
with open(file_path, "r", encoding="utf-8") as f:
for line_number, line in enumerate(f, 1):
line = line.strip()
if line.startswith("//"):
continue
for needle, message in WRONG_DEFINES.items():
if needle in line:
failings.append(f"{file_path}:{line_number}: {message}")
return failings
def test_no_bad_defines(self) -> None:
"""Searches through the program files to check for banned headers, excluding src/platforms."""
files_to_check = []
for root, _, files in os.walk(SRC_ROOT):
for file in files:
if file.endswith(
(".cpp", ".h", ".hpp")
): # Add or remove file extensions as needed
file_path = os.path.join(root, file)
files_to_check.append(file_path)
all_failings = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
if all_failings:
msg = f"Found {len(all_failings)} bad defines: \n" + "\n".join(all_failings)
for failing in all_failings:
print(failing)
self.fail("Please fix the defines: \n" + msg + "\n")
else:
print("No bad defines found.")
if __name__ == "__main__":
unittest.main()

Binary file not shown.

View file

@ -0,0 +1,237 @@
:100000000C9435000C945D000C945D000C945D0024
:100010000C945D000C945D000C945D000C945D00EC
:100020000C945D000C945D000C945D000C945D00DC
:100030000C945D000C945D000C945D000C945D00CC
:100040000C948D050C945D000C945D000C945D0087
:100050000C945D000C945D000C945D000C945D00AC
:100060000C945D000C945D00BB0611241FBECFEF05
:10007000D8E0DEBFCDBF11E0A0E0B1E0EEE8FEE0E9
:1000800002C005900D92AE32B107D9F721E0AEE281
:10009000B1E001C01D92A637B207E1F710E0C5E359
:1000A000D0E004C02197FE010E943A07C433D10773
:1000B000C9F70E94D7050C9445070C9400003FB780
:1000C000F8948091550190915601A0915701B091FB
:1000D000580126B5A89B05C02F3F19F00196A11D18
:1000E000B11D3FBFBA2FA92F982F8827BC01CD0182
:1000F000620F711D811D911D42E0660F771F881FE1
:10010000991F4A95D1F708952F923F924F925F928F
:100110006F927F928F929F92AF92BF92CF92DF9217
:10012000EF92FF920F931F93CF93DF93CDB7DEB77C
:10013000C358D1090FB6F894DEBF0FBECDBF782EDD
:100140008091660190916701A0916801B091690169
:10015000892B8A2B8B2BD9F00E945F0000917201B2
:10016000109173012091740130917501601B710B26
:10017000820B930B00916601109167012091680139
:10018000309169016017710782079307C8F20E94D6
:100190005F00609372017093730180937401909378
:1001A0007501E0916E01F0916F01309759F0409127
:1001B0006A0150916B0160916C0170916D01872D06
:1001C0000995782EFE01319680E8DF011D928A950F
:1001D000E9F7E0907001F09071016F015E01B1E804
:1001E000AB0EB11C4F01472C512CE114F10409F462
:1001F0005CC08A149B0409F458C0D701ED91FC91AE
:100200000484F585E02DC7010995F401819391934C
:100210004F0180916401909165018436910518F435
:10022000D7011C961C92D701ED91FC91228033805E
:10023000F501108211821282772019F187010A5F7D
:100240001F4FF0E0E0E0D8012D918D012223A9F0AD
:1002500012966C90662089F030E02F5F3F4FD201FC
:100260000E9405079B01AC01A62DB0E011960E94EB
:100270001B07D501AE0FBF1F8C933196E330F105FC
:1002800011F7F501008111812281D7011D964D9151
:100290005C911E9712966D917C91C701F1010995B1
:1002A000D7011496ED90FC90A0CF009170011091B1
:1002B00071010115110599F0CA14DB0481F0F601F2
:1002C000619171916F01D801ED91FC910684F785E0
:1002D000E02DC8010995F80104811581EACF8091CC
:1002E0006101909162019C012F5F3F4F3093620149
:1002F0002093610149970CF44AC08FB7F89420917C
:10030000590130915A0140915B0150915C018FBFBE
:1003100080915D0190915E01A0915F01B0916001BB
:10032000281B390B4A0B5B0B21F421E030E040E045
:1003300050E0E0916101F091620188EE93E0E89F66
:10034000B001E99F700DF89F700D1124072E000C6D
:10035000880B990B0E94E30630936501209364019A
:1003600010926201109261012FB7F89480915901A7
:1003700090915A01A0915B01B0915C012FBF8093D5
:100380005D0190935E01A0935F01B0936001CD5732
:10039000DF4F0FB6F894DEBF0FBECDBFDF91CF9118
:1003A0001F910F91FF90EF90DF90CF90BF90AF9093
:1003B0009F908F907F906F905F904F903F902F9085
:1003C00008958F929F92AF92BF92CF92DF92EF9259
:1003D000FF920E945F004B015C0184EFC82EDD2478
:1003E000D394E12CF12C0E945F00681979098A09E5
:1003F0009B09683E734081059105A8F321E0C21A6C
:10040000D108E108F10888EE880E83E0981EA11C4F
:10041000B11CC114D104E104F10429F7FF90EF905D
:10042000DF90CF90BF90AF909F908F90089580E91C
:1004300091E008950F931F93CF93DF9320912F01A5
:100440002F5F322F377030932F0120FF2BC020E811
:1004500031FD2064347009F02062205FFC01EC0162
:10046000239600E011E06485662329F070E0C8015E
:100470000E94CF066F5F6187822F869F080E80E003
:10048000811D1124811110C01682662311F0615064
:1004900061873196EC17FD0731F7DF91CF911F91FE
:1004A0000F91089520E0D4CF81508683EECF4F92F4
:1004B0005F927F928F929F92AF92BF92CF92DF9284
:1004C000EF92FF920F931F93CF93DF932C01EB01D9
:1004D0000E945F00F20125893689621B730B6A3026
:1004E0007105B0F3F8948A819B81181619060CF0F7
:1004F000CDC1E881F9816BB1862E689483F83BB158
:10050000377F3BB9DA848F812D2D281B822F2F83D3
:100510004F85042E000C550BAA81BB817D85FC8480
:10052000EE847F5FF394E3949E819884B984AB84D6
:100530001181C12C6C2D0C2D2C2D2181112788941B
:100540002111280F08F42FEF8195889470FD120F68
:100550001795889471FD120F1795889472FD120FEC
:100560001795889473FD120F1795889474FD120FD8
:100570001795889475FD120F1795889476FD120FC4
:100580001795889477FD120F17958894622F711133
:10059000612F8D0D162F002C8BB800C017FF3BB9B3
:1005A00020816627889400C000C0002C3BB921112F
:1005B000290F00C0002C8BB800C016FF3BB908F40F
:1005C0002FEF9195889400C000C0002C3BB9F0FC3F
:1005D000620F00C0002C8BB800C015FF3BB96795B7
:1005E0008894F1FC620F00C000C0002C3BB96795F5
:1005F000889400C0002C8BB800C014FF3BB9F2FCFB
:10060000620F6795889400C000C0002C3BB9F3FCD2
:10061000620F00C0002C8BB800C013FF3BB9679578
:100620008894F4FC620F00C000C0002C3BB96795B1
:10063000889400C0002C8BB800C012FF3BB9F5FCB9
:10064000620F6795889400C000C0002C3BB9F6FC8F
:10065000620F00C0002C8BB800C011FF3BB967953A
:100660008894F7FC620F00C000C0002C3BB967956E
:10067000889400C0002C8BB800C010FF3BB9122F2B
:10068000F110162F9B0D00C000C0002C3BB900C01C
:1006900000C0002C8BB800C017FF3BB92281662731
:1006A000889400C000C0002C3BB92111290D00C066
:1006B000002C8BB800C016FF3BB908F42FEFE40FF5
:1006C000F51F00C000C0002C3BB9E0FC620F00C069
:1006D000002C8BB800C015FF3BB967958894E1FCEE
:1006E000620F00C000C0002C3BB96795889400C021
:1006F000002C8BB800C014FF3BB9E2FC620F679579
:10070000889400C000C0002C3BB9E3FC620F00C01D
:10071000002C8BB800C013FF3BB967958894E4FCAC
:10072000620F00C000C0002C3BB96795889400C0E0
:10073000002C8BB800C012FF3BB9E5FC620F679537
:10074000889400C000C0002C3BB9E6FC620F00C0DA
:10075000002C8BB800C011FF3BB967958894E7FC6B
:10076000620F00C000C0002C3BB96795889400C0A0
:10077000002C8BB800C010FF3BB9122FE110162FD0
:10078000919400C000C0002C3BB99A0C00C000C07E
:100790008BB800C017FF3BB921816627889400C041
:1007A00000C0002C3BB92111280F00C0002C8BB8D1
:1007B00000C016FF3BB908F42FEF8195889400C064
:1007C00000C0002C3BB970FD620F00C0002C8BB83C
:1007D00000C015FF3BB96795889471FD620F00C09A
:1007E00000C0002C3BB96795889400C0002C8BB8E2
:1007F00000C014FF3BB972FD620F6795889400C07A
:1008000000C0002C3BB973FD620F00C0002C8BB8F8
:1008100000C013FF3BB96795889474FD620F00C058
:1008200000C0002C3BB96795889400C0002C8BB8A1
:1008300000C012FF3BB975FD620F6795889400C038
:1008400000C0002C3BB976FD620F00C0002C8BB8B5
:1008500000C011FF3BB96795889477FD620F00C017
:1008600000C0002C3BB96795889400C0002C8BB861
:1008700000C010FF3BB9122F7111162F8D0D00C053
:1008800000C0002C3BB9119709F086CE4A815B81EC
:1008900020EE31E0DA010E941407DC01CB01F4E024
:1008A000B695A79597958795FA95D1F730E020E012
:1008B000B901EAE94E9F040E611D5E9F600D711D36
:1008C0001124650F711D860F971FA11DB11D893E53
:1008D00043E09407A105B10508F434C0885E934055
:1008E000A109B10942E0B695A795979587954A95D4
:1008F000D1F747E0849F080E211D949F200D311DE4
:100900001124290F311D60912E0170E0860F971F71
:10091000820F931F4091590150915A0160915B01E0
:1009200070915C01292F3327420F531F611D711DE8
:100930004093590150935A0160935B0170935C019D
:1009400080932E0178940E945F00F201768B658B74
:10095000DF91CF911F910F91FF90EF90DF90CF909B
:10096000BF90AF909F908F907F905F904F90089531
:1009700081E090E00895539A08956F927F928F924C
:10098000CF92DF92EF92FF920F931F93CF93DF935B
:10099000CDB7DEB762970FB6F894DEBF0FBECDBFFE
:1009A0006C017A013801822EDC011C962C91CA015F
:1009B00057FF04C088279927841B950B7A83698386
:1009C0009C838B839E838D836D867E868F8621306C
:1009D00049F5CE0101960E941A0283E0888B1A8A9B
:1009E000198AF7FE02C08DEF888BD601ED91FC913C
:1009F0000288F389E02DBE016F5F7F4FC601099524
:100A000062960FB6F894DEBF0FBECDBFDF91CF91D7
:100A10001F910F91FF90EF90DF90CF908F907F907C
:100A20006F9008951C861B861A86198618861F8269
:100A3000D4CFEF92FF920F931F93CF93DF93CDB755
:100A4000DEB762970FB6F894DEBF0FBECDBF7C0154
:100A5000DC011C968C917A8369835C834B835E8373
:100A60004D830D871E872F878130F9F4CE010196C3
:100A70000E941A02188A1A8A198AD701ED91FC91EC
:100A80000288F389E02DBE016F5F7F4FC701099592
:100A900062960FB6F894DEBF0FBECDBFDF91CF9147
:100AA0001F910F91FF90EF9008951C861B861A8668
:100AB000198618861F82DECF90E080E00895FC0141
:100AC00064870895FC01848590E00895FC01858584
:100AD000968508950F931F93CF93DF9300D01F92B5
:100AE000CDB7DEB7AB0119821A821B82DC01ED9112
:100AF000FC910190F081E02D00E010E020E0BE01CB
:100B00006F5F7F4F09950F900F900F90DF91CF91FE
:100B10001F910F9108950E9440071F920F920FB6E8
:100B20000F9211242F933F938F939F93AF93BF9373
:100B30008091590190915A01A0915B01B0915C01A3
:100B40003091540123E0230F2D3758F50196A11D54
:100B5000B11D209354018093590190935A01A093A1
:100B60005B01B0935C018091550190915601A09179
:100B70005701B09158010196A11DB11D80935501F7
:100B800090935601A0935701B0935801BF91AF9134
:100B90009F918F913F912F910F900FBE0F901F90BB
:100BA000189526E8230F0296A11DB11DD2CF789487
:100BB00084B5826084BD84B5816084BD85B5826062
:100BC00085BD85B5816085BD80916E008160809313
:100BD0006E0010928100809181008260809381007C
:100BE000809181008160809381008091800081608C
:100BF000809380008091B10084608093B1008091E7
:100C0000B00081608093B00080917A00846080930E
:100C10007A0080917A00826080937A0080917A00D5
:100C2000816080937A0080917A00806880937A0056
:100C30001092C10080914901811155C01092350177
:100C4000109234018FEF80933801809339018093A3
:100C50003A0180933B0180933C0180933D0181E008
:100C600080933E011092400110923F0180E797E18E
:100C7000909342018093410183E090E0909344017E
:100C80008093430110924601109245011092370162
:100C9000109236018091700190917101892B31F48D
:100CA00082E391E09093710180937001E0913001B3
:100CB000F0913101309721F082E391E095838483B4
:100CC00082E391E0909331018093300110924801CA
:100CD000109247018AE191E09093330180933201B1
:100CE00081E080934901539A81E591E09093350129
:100CF0008093340181E090E09093400180933F0124
:100D00008091660190916701A0916801B09169019D
:100D1000843C29E09207A105B10520F484EC99E018
:100D2000A0E0B0E08093660190936701A093680112
:100D3000B0936901CFEF00E010E0C0935101109231
:100D4000520110925301809163010E9484000E941D
:100D5000E1011092510110925201109253018091C1
:100D600063010E9484000E94E1010115110529F32D
:100D70000E940000E2CFE3E6F1E08FEF8083128271
:100D80001182148613868FEF9FEFDC018783908793
:100D9000A187B2871382148215821682089597FB69
:100DA000072E16F4009407D077FD09D00E9426077D
:100DB00007FC05D03EF4909581959F4F089570955E
:100DC00061957F4F0895A1E21A2EAA1BBB1BFD015E
:100DD0000DC0AA1FBB1FEE1FFF1FA217B307E4071A
:100DE000F50720F0A21BB30BE40BF50B661F771F72
:100DF000881F991F1A9469F7609570958095909552
:100E00009B01AC01BD01CF010895A29FB001B39F2A
:100E1000C001A39F700D811D1124911DB29F700D03
:100E2000811D1124911D08950E940507B7FF0895A3
:100E3000821B930B08950E940507A59F900DB49FF8
:100E4000900DA49F800D911D11240895AA1BBB1B1A
:100E500051E107C0AA1FBB1FA617B70710F0A61BBA
:100E6000B70B881F991F5A95A9F780959095BC01DB
:100E7000CD010895EE0FFF1F0590F491E02D099428
:0E0E800081E090E0F8940C944507F894FFCFC1
:100E8E00000000008B058B058B056A056605B8040E
:100E9E0062055F055C05000000001905BD04BB047A
:0E0EAE006A056605B80462055F051702570263
:00000001FF

View file

@ -0,0 +1,789 @@
Archive member included to satisfy reference by file (symbol)
.pio\build\uno\lib6ec\libsrc.a(FastLED.cpp.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (_ZN14CLEDController7m_pHeadE)
.pio\build\uno\lib6ec\libsrc.a(crgb.cpp.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (_ZN4CRGB17computeAdjustmentEhRKS_S1_)
.pio\build\uno\lib6ec\libsrc.a(lib8tion.cpp.o)
FastLED.cpp.o (symbol from plugin) (memset8)
.pio\build\uno\libFrameworkArduino.a(abi.cpp.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (__cxa_pure_virtual)
.pio\build\uno\libFrameworkArduino.a(hooks.c.o)
FastLED.cpp.o (symbol from plugin) (yield)
.pio\build\uno\libFrameworkArduino.a(main.cpp.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o (main)
.pio\build\uno\libFrameworkArduino.a(wiring.c.o)
.pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin) (timer0_millis)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o (exit)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__divmodhi4)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__udivmodsi4)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__do_copy_data)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__do_clear_bss)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__do_global_ctors)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__umulhisi3)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__usmulhisi3)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (__muluhisi3)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o) (__udivmodhi4)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o) (__tablejump2__)
c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o (abort)
Discarded input sections
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.text 0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController4sizeEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController5lanesEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13beginShowLedsEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController11endShowLedsEPv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZNK14CLEDController17getMaxRefreshRateEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE5lanesEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZNK19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE17getMaxRefreshRateEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDControllerC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV14CLEDController
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13getAdjustmentEh
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController9showColorERK4CRGBih
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController9clearLedsEi
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CFastLED4showEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN22WS2812Controller800KhzILh3EL6EOrder66EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV22WS2812Controller800KhzILh3EL6EOrder66EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8NEOPIXELILh3EEC5Ev
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZTV8NEOPIXELILh3EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CFastLED7addLedsI8NEOPIXELLh3EEER14CLEDControllerP4CRGBii
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZGVZN8CFastLED7addLedsI8NEOPIXELLh3EEER14CLEDControllerP4CRGBiiE1c
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZZN8CFastLED7addLedsI8NEOPIXELLh3EEER14CLEDControllerP4CRGBiiE1c
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN7_AVRPINILh3ELh8E18__gen_struct_PORTD17__gen_struct_DDRD17__gen_struct_PINDE9setOutputEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE4initEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CMinWaitILi10EE4waitEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE15showRGBInternalER15PixelControllerILS0_66ELi1ELm4294967295EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CMinWaitILi10EE4markEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19ClocklessControllerILh3ELi4ELi10ELi6EL6EOrder66ELi0ELb0ELi10EE10showPixelsER15PixelControllerILS0_66ELi1ELm4294967295EE
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE11initOffsetsEi
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE21init_binary_ditheringEv
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE21init_binary_ditheringEvE1R
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EE16enable_ditheringEh
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EEC5EPK4CRGBiRS2_h
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE4showEPK4CRGBiS2_
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN15PixelControllerIL6EOrder66ELi1ELm4294967295EEC5ERK4CRGBiRS2_h
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN19CPixelLEDControllerIL6EOrder66ELi1ELm4294967295EE9showColorERK4CRGBiS2_
0x00000000 0x0 .pio\build\uno\src\Blink.ino.cpp.o (symbol from plugin)
.data 0x00000000 0x0 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
.text 0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController12clearLedDataEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13beginShowLedsEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController11endShowLedsEPv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController13getAdjustmentEh
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController8showLedsEh
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN14CLEDController9showColorERK4CRGBh
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZNK14CLEDController17getMaxRefreshRateEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.gnu.linkonce.t._ZN8CFastLED4showEv
0x00000000 0x0 FastLED.cpp.o (symbol from plugin)
.text 0x00000000 0x0 crgb.cpp.o (symbol from plugin)
.text 0x00000000 0x0 lib8tion.cpp.o (symbol from plugin)
.text 0x00000000 0x0 abi.cpp.o (symbol from plugin)
.text 0x00000000 0x0 hooks.c.o (symbol from plugin)
.text 0x00000000 0x0 main.cpp.o (symbol from plugin)
.text 0x00000000 0x0 wiring.c.o (symbol from plugin)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.mul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.div
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.prologue
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.builtins
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.fmul
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text.libgcc.fixed
0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.text 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
.data 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
.bss 0x00000000 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
Memory Configuration
Name Origin Length Attributes
text 0x00000000 0x00020000 xr
data 0x00800060 0x0000ffa0 rw !x
eeprom 0x00810000 0x00010000 rw !x
fuse 0x00820000 0x00000003 rw !x
lock 0x00830000 0x00000400 rw !x
signature 0x00840000 0x00000400 rw !x
user_signatures 0x00850000 0x00000400 rw !x
*default* 0x00000000 0xffffffff
Linker script and memory map
Address of section .data set to 0x800100
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
LOAD .pio\build\uno\src\Blink.ino.cpp.o
LOAD C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
START GROUP
LOAD .pio\build\uno\liba19\libSoftwareSerial.a
LOAD .pio\build\uno\lib8b0\libSPI.a
LOAD .pio\build\uno\lib6ec\libsrc.a
LOAD .pio\build\uno\libFrameworkArduinoVariant.a
LOAD .pio\build\uno\libFrameworkArduino.a
END GROUP
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libm.a
START GROUP
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libm.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a
LOAD c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libatmega328p.a
END GROUP
0x00020000 __TEXT_REGION_LENGTH__ = DEFINED (__TEXT_REGION_LENGTH__)?__TEXT_REGION_LENGTH__:0x20000
0x0000ffa0 __DATA_REGION_LENGTH__ = DEFINED (__DATA_REGION_LENGTH__)?__DATA_REGION_LENGTH__:0xffa0
0x00010000 __EEPROM_REGION_LENGTH__ = DEFINED (__EEPROM_REGION_LENGTH__)?__EEPROM_REGION_LENGTH__:0x10000
[0x00000003] __FUSE_REGION_LENGTH__ = DEFINED (__FUSE_REGION_LENGTH__)?__FUSE_REGION_LENGTH__:0x400
0x00000400 __LOCK_REGION_LENGTH__ = DEFINED (__LOCK_REGION_LENGTH__)?__LOCK_REGION_LENGTH__:0x400
0x00000400 __SIGNATURE_REGION_LENGTH__ = DEFINED (__SIGNATURE_REGION_LENGTH__)?__SIGNATURE_REGION_LENGTH__:0x400
0x00000400 __USER_SIGNATURE_REGION_LENGTH__ = DEFINED (__USER_SIGNATURE_REGION_LENGTH__)?__USER_SIGNATURE_REGION_LENGTH__:0x400
.hash
*(.hash)
.dynsym
*(.dynsym)
.dynstr
*(.dynstr)
.gnu.version
*(.gnu.version)
.gnu.version_d
*(.gnu.version_d)
.gnu.version_r
*(.gnu.version_r)
.rel.init
*(.rel.init)
.rela.init
*(.rela.init)
.rel.text
*(.rel.text)
*(.rel.text.*)
*(.rel.gnu.linkonce.t*)
.rela.text
*(.rela.text)
*(.rela.text.*)
*(.rela.gnu.linkonce.t*)
.rel.fini
*(.rel.fini)
.rela.fini
*(.rela.fini)
.rel.rodata
*(.rel.rodata)
*(.rel.rodata.*)
*(.rel.gnu.linkonce.r*)
.rela.rodata
*(.rela.rodata)
*(.rela.rodata.*)
*(.rela.gnu.linkonce.r*)
.rel.data
*(.rel.data)
*(.rel.data.*)
*(.rel.gnu.linkonce.d*)
.rela.data
*(.rela.data)
*(.rela.data.*)
*(.rela.gnu.linkonce.d*)
.rel.ctors
*(.rel.ctors)
.rela.ctors
*(.rela.ctors)
.rel.dtors
*(.rel.dtors)
.rela.dtors
*(.rela.dtors)
.rel.got
*(.rel.got)
.rela.got
*(.rela.got)
.rel.bss
*(.rel.bss)
.rela.bss
*(.rela.bss)
.rel.plt
*(.rel.plt)
.rela.plt
*(.rela.plt)
.text 0x00000000 0xe8e
*(.vectors)
.vectors 0x00000000 0x68 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
0x00000000 __vector_default
0x00000000 __vectors
*(.vectors)
*(.progmem.gcc*)
0x00000068 . = ALIGN (0x2)
0x00000068 __trampolines_start = .
*(.trampolines)
.trampolines 0x00000068 0x0 linker stubs
*(.trampolines*)
0x00000068 __trampolines_end = .
*libprintf_flt.a:*(.progmem.data)
*libc.a:*(.progmem.data)
*(.progmem*)
0x00000068 . = ALIGN (0x2)
*(.jumptables)
*(.jumptables*)
*(.lowtext)
*(.lowtext*)
0x00000068 __ctors_start = .
*(.ctors)
.ctors 0x00000068 0x2 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x0000006a __ctors_end = .
0x0000006a __dtors_start = .
*(.dtors)
0x0000006a __dtors_end = .
SORT(*)(.ctors)
SORT(*)(.dtors)
*(.init0)
.init0 0x0000006a 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
0x0000006a __init
*(.init0)
*(.init1)
*(.init1)
*(.init2)
.init2 0x0000006a 0xc c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
*(.init2)
*(.init3)
*(.init3)
*(.init4)
.init4 0x00000076 0x16 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
0x00000076 __do_copy_data
.init4 0x0000008c 0x10 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
0x0000008c __do_clear_bss
*(.init4)
*(.init5)
*(.init5)
*(.init6)
.init6 0x0000009c 0x16 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
0x0000009c __do_global_ctors
*(.init6)
*(.init7)
*(.init7)
*(.init8)
*(.init8)
*(.init9)
.init9 0x000000b2 0x8 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
*(.init9)
*(.text)
.text 0x000000ba 0x4 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
0x000000ba __vector_22
0x000000ba __vector_1
0x000000ba __vector_24
0x000000ba __vector_12
0x000000ba __bad_interrupt
0x000000ba __vector_6
0x000000ba __vector_3
0x000000ba __vector_23
0x000000ba __vector_25
0x000000ba __vector_11
0x000000ba __vector_13
0x000000ba __vector_17
0x000000ba __vector_19
0x000000ba __vector_7
0x000000ba __vector_5
0x000000ba __vector_4
0x000000ba __vector_9
0x000000ba __vector_2
0x000000ba __vector_21
0x000000ba __vector_15
0x000000ba __vector_8
0x000000ba __vector_14
0x000000ba __vector_10
0x000000ba __vector_18
0x000000ba __vector_20
.text 0x000000be 0xaf0 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x00000b1a __vector_16
0x00000bae . = ALIGN (0x2)
*(.text.*)
.text.startup 0x00000bae 0x1f0 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x00000bae main
.text.libgcc.div
0x00000d9e 0x28 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
0x00000d9e _div
0x00000d9e __divmodhi4
.text.libgcc.div
0x00000dc6 0x44 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
0x00000dc6 __udivmodsi4
.text.libgcc.mul
0x00000e0a 0x1e c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
0x00000e0a __umulhisi3
.text.libgcc.mul
0x00000e28 0xe c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
0x00000e28 __usmulhisi3
0x00000e2c __usmulhisi3_tail
.text.libgcc.mul
0x00000e36 0x16 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
0x00000e36 __muluhisi3
.text.libgcc.div
0x00000e4c 0x28 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
0x00000e4c __udivmodhi4
.text.libgcc 0x00000e74 0xc c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
0x00000e74 __tablejump2__
.text.avr-libc
0x00000e80 0xa c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5\libc.a(abort.o)
0x00000e80 abort
0x00000e8a . = ALIGN (0x2)
*(.fini9)
.fini9 0x00000e8a 0x0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
0x00000e8a _exit
0x00000e8a exit
*(.fini9)
*(.fini8)
*(.fini8)
*(.fini7)
*(.fini7)
*(.fini6)
*(.fini6)
*(.fini5)
*(.fini5)
*(.fini4)
*(.fini4)
*(.fini3)
*(.fini3)
*(.fini2)
*(.fini2)
*(.fini1)
*(.fini1)
*(.fini0)
.fini0 0x00000e8a 0x4 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
*(.fini0)
0x00000e8e _etext = .
.data 0x00800100 0x2e load address 0x00000e8e
0x00800100 PROVIDE (__data_start, .)
*(.data)
*(.data*)
*(.gnu.linkonce.d*)
*(.rodata)
.rodata 0x00800100 0x2e C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
*(.rodata*)
*(.gnu.linkonce.r*)
0x0080012e . = ALIGN (0x2)
0x0080012e _edata = .
0x0080012e PROVIDE (__data_end, .)
.bss 0x0080012e 0x48
0x0080012e PROVIDE (__bss_start, .)
*(.bss)
.bss 0x0080012e 0x48 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
*(.bss*)
*(COMMON)
0x00800176 PROVIDE (__bss_end, .)
0x00000e8e __data_load_start = LOADADDR (.data)
0x00000ebc __data_load_end = (__data_load_start + SIZEOF (.data))
.noinit 0x00800176 0x0
[!provide] PROVIDE (__noinit_start, .)
*(.noinit*)
[!provide] PROVIDE (__noinit_end, .)
0x00800176 _end = .
[!provide] PROVIDE (__heap_start, .)
.eeprom 0x00810000 0x0
*(.eeprom*)
0x00810000 __eeprom_end = .
.fuse
*(.fuse)
*(.lfuse)
*(.hfuse)
*(.efuse)
.lock
*(.lock*)
.signature
*(.signature*)
.user_signatures
*(.user_signatures*)
.stab
*(.stab)
.stabstr
*(.stabstr)
.stab.excl
*(.stab.excl)
.stab.exclstr
*(.stab.exclstr)
.stab.index
*(.stab.index)
.stab.indexstr
*(.stab.indexstr)
.comment 0x00000000 0x11
*(.comment)
.comment 0x00000000 0x11 C:\Users\niteris\AppData\Local\Temp\ccAA6ajC.ltrans0.ltrans.o
0x12 (size before relaxing)
.note.gnu.avr.deviceinfo
0x00000000 0x40
.note.gnu.avr.deviceinfo
0x00000000 0x40 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.note.gnu.build-id
*(.note.gnu.build-id)
.debug
*(.debug)
.line
*(.line)
.debug_srcinfo
*(.debug_srcinfo)
.debug_sfnames
*(.debug_sfnames)
.debug_aranges 0x00000000 0x160
*(.debug_aranges)
.debug_aranges
0x00000000 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_aranges
0x00000020 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_aranges
0x00000040 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_aranges
0x00000060 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_aranges
0x00000080 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_aranges
0x000000a0 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_aranges
0x000000c0 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_aranges
0x000000e0 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_aranges
0x00000100 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_aranges
0x00000120 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_aranges
0x00000140 0x20 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_pubnames
*(.debug_pubnames)
.debug_info 0x00000000 0xdfd
*(.debug_info .gnu.linkonce.wi.*)
.debug_info 0x00000000 0x5f4 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_info 0x000005f4 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_info 0x000006af 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_info 0x0000076a 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_info 0x00000825 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_info 0x000008e0 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_info 0x0000099b 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_info 0x00000a56 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_info 0x00000b11 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_info 0x00000bcc 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_info 0x00000c87 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_info 0x00000d42 0xbb c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_abbrev 0x00000000 0x67e
*(.debug_abbrev)
.debug_abbrev 0x00000000 0x5a2 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_abbrev 0x000005a2 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_abbrev 0x000005b6 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_abbrev 0x000005ca 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_abbrev 0x000005de 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_abbrev 0x000005f2 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_abbrev 0x00000606 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_abbrev 0x0000061a 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_abbrev 0x0000062e 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_abbrev 0x00000642 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_abbrev 0x00000656 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_abbrev 0x0000066a 0x14 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_line 0x00000000 0x71a
*(.debug_line .debug_line.* .debug_line_end)
.debug_line 0x00000000 0x1a c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_line 0x0000001a 0x62 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_exit.o)
.debug_line 0x0000007c 0xc8 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_divmodhi4.o)
.debug_line 0x00000144 0x122 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodsi4.o)
.debug_line 0x00000266 0x98 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_copy_data.o)
.debug_line 0x000002fe 0x86 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_clear_bss.o)
.debug_line 0x00000384 0x92 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_ctors.o)
.debug_line 0x00000416 0xb0 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_umulhisi3.o)
.debug_line 0x000004c6 0x7a c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_usmulhisi3.o)
.debug_line 0x00000540 0x92 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_muluhisi3.o)
.debug_line 0x000005d2 0xce c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_udivmodhi4.o)
.debug_line 0x000006a0 0x7a c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/avr5\libgcc.a(_tablejump2.o)
.debug_frame
*(.debug_frame)
.debug_str 0x00000000 0x208
*(.debug_str)
.debug_str 0x00000000 0x208 c:/users/niteris/.platformio/packages/toolchain-atmelavr/bin/../lib/gcc/avr/7.3.0/../../../../avr/lib/avr5/crtatmega328p.o
.debug_loc
*(.debug_loc)
.debug_macinfo
*(.debug_macinfo)
.debug_weaknames
*(.debug_weaknames)
.debug_funcnames
*(.debug_funcnames)
.debug_typenames
*(.debug_typenames)
.debug_varnames
*(.debug_varnames)
.debug_pubtypes
*(.debug_pubtypes)
.debug_ranges
*(.debug_ranges)
.debug_macro
*(.debug_macro)
OUTPUT(.pio\build\uno\firmware.elf elf32-avr)
LOAD linker stubs

View file

@ -0,0 +1,46 @@
import argparse
import subprocess
import sys
from typing import List, Tuple
def parse_args() -> Tuple[argparse.Namespace, list[str]]:
parser = argparse.ArgumentParser(description="Compile wasm")
parser.add_argument(
"sketch_dir",
nargs="?",
default="examples/wasm",
help="The directory of the sketch to compile",
)
# return parser.parse_args()
known_args, unknown_args = parser.parse_known_args()
return known_args, unknown_args
def run_command(cmd_list: List[str]) -> int:
"""Run a command and return its exit code."""
cmd_str = subprocess.list2cmdline(cmd_list)
print(f"Running command: {cmd_str}")
rtn = subprocess.call(cmd_list)
if rtn != 0:
print(f"ERROR: Command {cmd_str} failed with return code {rtn}")
return rtn
def main() -> int:
args, unknown_args = parse_args()
# First run the build command
build_cmd = ["fastled", args.sketch_dir, "--build"] + unknown_args
build_result = run_command(build_cmd)
# Then run the compile command
compile_cmd = ["fastled", args.sketch_dir, "--just-compile"] + unknown_args
compile_result = run_command(compile_cmd)
# Return non-zero if either command failed
return build_result if build_result != 0 else compile_result
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,108 @@
import asyncio
import os
import sys
import time
from pathlib import Path
from playwright.async_api import async_playwright # type: ignore
HERE = Path(__file__).parent
PROJECT_ROOT = HERE.parent
# Ensure Playwright browsers are installed
def install_playwright_browsers():
print("Installing Playwright browsers...")
try:
# Simulate the `playwright install` command
os.system(f"{sys.executable} -m playwright install chromium")
print("Playwright browsers installed successfully.")
except Exception as e:
print(f"Failed to install Playwright browsers: {e}", file=sys.stderr)
sys.exit(1)
# Start an HTTP server on the dynamic port
def start_http_server(port: int, directory: Path):
from fastled import Test # type: ignore
server_process = Test.spawn_http_server(
directory=directory, port=port, open_browser=False
)
return server_process
async def main() -> None:
install_playwright_browsers()
# Find an available port
port = (
8080 # Todo, figure out why the http server ignores any port other than 8080.
)
print(f"Using port: {port}")
# Start the HTTP server
os.chdir(str(PROJECT_ROOT))
directory = Path("examples/wasm/fastled_js")
server_process = start_http_server(port=port, directory=directory)
try:
# Give the server some time to start
time.sleep(2)
# Use Playwright to test the server
async with async_playwright() as p:
browser = await p.chromium.launch()
page = await browser.new_page()
try:
await page.goto(f"http://localhost:{port}", timeout=30000)
# Listen for console messages
def console_log_handler(msg):
if "INVALID_OPERATION" in msg.text:
print(
"INVALID_OPERATION detected in console log", file=sys.stderr
)
raise Exception("INVALID_OPERATION detected in console log")
page.on("console", console_log_handler)
# Evaluate and monitor window.frameCallCount
await page.evaluate(
"""
window.frameCallCount = 0;
globalThis.FastLED_onFrame = (jsonStr) => {
console.log('FastLED_onFrame called with:', jsonStr);
window.frameCallCount++;
};
"""
)
await page.wait_for_timeout(5000)
call_count = await page.evaluate("window.frameCallCount")
if call_count > 0:
print(
f"Success: FastLED.js was initialized and FastLED_onFrame was called {call_count} times"
)
else:
print(
"Error: FastLED.js had something go wrong and FastLED_onFrame was not called within 5 seconds",
file=sys.stderr,
)
raise Exception("FastLED.js failed to initialize")
except Exception as e:
print(f"An error occurred: {e}", file=sys.stderr)
raise Exception(f"An error occurred: {e}") from e
finally:
await browser.close()
finally:
# Terminate the server process
server_process.terminate()
# Run the main function
if __name__ == "__main__":
sys.exit(asyncio.run(main()))