updated libraries

This commit is contained in:
stuce-bot 2025-06-30 21:41:49 +02:00
parent d5d5d000b3
commit 3b7e68065a
102 changed files with 3020 additions and 1624 deletions

View file

@ -16,7 +16,7 @@ from ci.locked_print import locked_print
HERE = Path(__file__).parent.resolve()
LIBS = ["src", "ci"]
LIBS = ["src"]
EXTRA_LIBS = [
"https://github.com/me-no-dev/ESPAsyncWebServer.git",
"ArduinoOTA",
@ -45,6 +45,7 @@ DEFAULT_BOARDS_NAMES = [
"ATtiny1616",
"esp32c6",
"esp32s3",
"esp32p4",
"yun",
"digix",
"teensy30",
@ -69,16 +70,22 @@ OTHER_BOARDS_NAMES = [
# Examples to compile.
DEFAULT_EXAMPLES = [
"Animartrix",
"Apa102",
"Apa102HD",
"Apa102HDOverride",
"Audio",
"Blink",
"Blur",
"Chromancer",
"ColorPalette",
"ColorTemperature",
"Corkscrew",
"CompileTest",
"Cylon",
"DemoReel100",
"Downscale",
"FestivalStick",
"FirstLight",
"Fire2012",
"Multiple/MultipleStripsInOneArray",
@ -96,6 +103,8 @@ DEFAULT_EXAMPLES = [
"RGBWEmulated",
"TwinkleFox",
"XYMatrix",
"FireMatrix",
"FireCylinder",
"FxGfx2Video",
"FxSdCard",
"FxCylon",

View file

@ -11,6 +11,7 @@ ESP32_IDF_5_1_PIOARDUINO = "https://github.com/pioarduino/platform-espressif32/r
# TODO: Upgrade toolkit to 5.3
ESP32_IDF_5_3_PIOARDUINO = "https://github.com/pioarduino/platform-espressif32/releases/download/53.03.10/platform-espressif32.zip"
ESP32_IDF_5_4_PIOARDUINO = "https://github.com/pioarduino/platform-espressif32/releases/download/54.03.20/platform-espressif32.zip"
ESP32_IDF_5_1_PIOARDUINO_LATEST = (
"https://github.com/pioarduino/platform-espressif32.git#develop"
)
@ -22,6 +23,8 @@ APOLLO3_2_2_0 = "https://github.com/nigelb/platform-apollo3blue"
# Old fork that we were using
# ESP32_IDF_5_1_PIOARDUINO = "https://github.com/zackees/platform-espressif32#Arduino/IDF5"
# ALL will be auto populated in the Board constructor whenever a
# board is defined.
ALL: list["Board"] = []
@ -156,7 +159,7 @@ ESP32_C2_DEVKITM_1 = Board(
board_name="esp32c2",
real_board_name="esp32-c2-devkitm-1",
use_pio_run=True,
platform="https://github.com/Jason2866/platform-espressif32.git#Arduino/IDF5",
platform="https://github.com/pioarduino/platform-espressif32/releases/download/stable/platform-espressif32.zip",
defines=["CONFIG_IDF_TARGET_ESP32C2=1"],
)
@ -175,14 +178,7 @@ ESP32_C6_DEVKITC_1 = Board(
ESP32_S3_DEVKITC_1 = Board(
board_name="esp32s3",
real_board_name="seeed_xiao_esp32s3", # Seeed Xiao ESP32-S3 has psram.
platform=ESP32_IDF_5_3_PIOARDUINO,
defines=[
"BOARD_HAS_PSRAM",
],
build_flags=[ # Reserved for future use.
"-mfix-esp32-psram-cache-issue",
"-mfix-esp32-psram-cache-strategy=memw",
],
platform=ESP32_IDF_5_4_PIOARDUINO,
board_partitions="huge_app.csv", # Reserved for future use.
)
@ -199,6 +195,13 @@ ESP32_H2_DEVKITM_1 = Board(
platform=ESP32_IDF_5_3_PIOARDUINO,
)
ESP32_P4 = Board(
board_name="esp32p4",
real_board_name="esp32-p4-evboard",
platform_needs_install=True, # Install platform package to get the boards
platform="https://github.com/pioarduino/platform-espressif32/releases/download/stable/platform-espressif32.zip",
)
ADA_FEATHER_NRF52840_SENSE = Board(
board_name="adafruit_feather_nrf52840_sense",
platform="nordicnrf52",

View file

@ -0,0 +1,215 @@
import os
from abc import ABC, abstractmethod
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from typing import Dict, List, Optional
from ci.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
NUM_WORKERS = (os.cpu_count() or 1) * 4
EXCLUDED_FILES = [
"stub_main.cpp",
]
@dataclass
class FileContent:
"""Container for file content and metadata."""
path: str
content: str
lines: List[str]
def __post_init__(self):
if not self.lines:
self.lines = self.content.splitlines()
class FileContentChecker(ABC):
"""Abstract base class for checking file content."""
@abstractmethod
def should_process_file(self, file_path: str) -> bool:
"""Predicate to determine if a file should be processed.
Args:
file_path: Path to the file to check
Returns:
True if the file should be processed, False otherwise
"""
pass
@abstractmethod
def check_file_content(self, file_content: FileContent) -> List[str]:
"""Check the file content and return any issues found.
Args:
file_content: FileContent object containing path, content, and lines
Returns:
List of error messages, empty if no issues found
"""
pass
class MultiCheckerFileProcessor:
"""Processor that can run multiple checkers on files."""
def __init__(self):
pass
def process_files_with_checkers(
self, file_paths: List[str], checkers: List[FileContentChecker]
) -> Dict[str, List[str]]:
"""Process files with multiple checkers.
Args:
file_paths: List of file paths to process
checkers: List of checker instances to run on the files
Returns:
Dictionary mapping checker class name to list of issues found
"""
# Initialize results dictionary for each checker
results = {}
for checker in checkers:
checker_name = checker.__class__.__name__
results[checker_name] = []
# Process each file
for file_path in file_paths:
# Check if any checker wants to process this file
interested_checkers = [
checker
for checker in checkers
if checker.should_process_file(file_path)
]
# If any checker is interested, read the file once
if interested_checkers:
try:
with open(file_path, "r", encoding="utf-8") as f:
content = f.read()
# Create FileContent object with lines split
file_content = FileContent(
path=file_path, content=content, lines=content.splitlines()
)
# Pass the file content to all interested checkers
for checker in interested_checkers:
checker_name = checker.__class__.__name__
issues = checker.check_file_content(file_content)
results[checker_name].extend(issues)
except Exception as e:
# Add error to all interested checkers
error_msg = f"Error reading file {file_path}: {str(e)}"
for checker in interested_checkers:
checker_name = checker.__class__.__name__
results[checker_name].append(error_msg)
return results
# Legacy compatibility classes
class FileProcessorCallback(FileContentChecker):
"""Legacy compatibility wrapper - delegates to FileContentChecker methods."""
def check_file_content_legacy(self, file_path: str, content: str) -> List[str]:
"""Legacy method signature for backward compatibility."""
file_content = FileContent(path=file_path, content=content, lines=[])
return self.check_file_content(file_content)
class GenericFileSearcher:
"""Generic file searcher that processes files using a callback pattern."""
def __init__(self, max_workers: Optional[int] = None):
self.max_workers = max_workers or NUM_WORKERS
def search_directory(
self, start_dir: str, callback: FileProcessorCallback
) -> List[str]:
"""Search a directory and process files using the provided callback.
Args:
start_dir: Directory to start searching from
callback: Callback class to handle file processing
Returns:
List of all issues found across all files
"""
files_to_check = []
# Collect all files that should be processed
for root, _, files in os.walk(start_dir):
for file in files:
file_path = os.path.join(root, file)
if callback.should_process_file(file_path):
files_to_check.append(file_path)
# Process files in parallel
all_issues = []
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
futures = [
executor.submit(self._process_single_file, file_path, callback)
for file_path in files_to_check
]
for future in futures:
all_issues.extend(future.result())
return all_issues
def _process_single_file(
self, file_path: str, callback: FileProcessorCallback
) -> List[str]:
"""Process a single file using the callback.
Args:
file_path: Path to the file to process
callback: Callback to use for processing
Returns:
List of issues found in this file
"""
try:
with open(file_path, "r", encoding="utf-8") as f:
content = f.read()
file_content = FileContent(path=file_path, content=content, lines=[])
return callback.check_file_content(file_content)
except Exception as e:
return [f"Error processing file {file_path}: {str(e)}"]
def collect_files_to_check(
test_directories: List[str], extensions: List[str] | None = None
) -> List[str]:
"""Collect all files to check from the given directories."""
if extensions is None:
extensions = [".cpp", ".h", ".hpp"]
files_to_check = []
# Search each directory
for directory in test_directories:
if os.path.exists(directory):
for root, _, files in os.walk(directory):
for file in files:
if any(file.endswith(ext) for ext in extensions):
file_path = os.path.join(root, file)
files_to_check.append(file_path)
# Also check the main src directory files (not subdirectories)
for file in os.listdir(SRC_ROOT):
file_path = os.path.join(SRC_ROOT, file)
if os.path.isfile(file_path) and any(
file_path.endswith(ext) for ext in extensions
):
files_to_check.append(file_path)
return files_to_check

View file

@ -33,7 +33,7 @@ def main() -> int:
board_dir = board_dirs[which]
# build_info_json = board_dir / "build_info.json"
optimization_report = board_dir / "optimization_report.txt"
text = optimization_report.read_text(encoding="utf-8")
text = optimization_report.read_text(encoding="utf-8", errors="replace")
print(text)
return 0

View file

@ -1,14 +1,17 @@
import os
import unittest
from concurrent.futures import ThreadPoolExecutor
from typing import Callable, List
from ci.check_files import (
EXCLUDED_FILES,
FileContent,
FileContentChecker,
MultiCheckerFileProcessor,
collect_files_to_check,
)
from ci.paths import PROJECT_ROOT
SRC_ROOT = PROJECT_ROOT / "src"
PLATFORMS_DIR = os.path.join(SRC_ROOT, "platforms")
PLATFORMS_ESP_DIR = os.path.join(PLATFORMS_DIR, "esp")
NUM_WORKERS = (os.cpu_count() or 1) * 4
ENABLE_PARANOID_GNU_HEADER_INSPECTION = False
@ -17,8 +20,7 @@ if ENABLE_PARANOID_GNU_HEADER_INSPECTION:
else:
BANNED_HEADERS_ESP = []
BANNED_HEADERS_CORE = [
BANNED_HEADERS_COMMON = [
"assert.h",
"iostream",
"stdio.h",
@ -56,76 +58,123 @@ BANNED_HEADERS_CORE = [
"cstdint",
"cstddef", # this certainally fails
"type_traits", # this certainally fails
"Arduino.h",
] + BANNED_HEADERS_ESP
EXCLUDED_FILES = [
"stub_main.cpp",
]
BANNED_HEADERS_CORE = BANNED_HEADERS_COMMON + BANNED_HEADERS_ESP + ["Arduino.h"]
class BannedHeadersChecker(FileContentChecker):
"""Checker class for banned headers."""
def __init__(self, banned_headers_list: List[str]):
"""Initialize with the list of banned headers to check for."""
self.banned_headers_list = banned_headers_list
def should_process_file(self, file_path: str) -> bool:
"""Check if file should be processed for banned headers."""
# Check file extension
if not file_path.endswith((".cpp", ".h", ".hpp", ".ino")):
return False
# Check if file is in excluded list
if any(file_path.endswith(excluded) for excluded in EXCLUDED_FILES):
return False
return True
def check_file_content(self, file_content: FileContent) -> List[str]:
"""Check file content for banned headers."""
failings = []
if len(self.banned_headers_list) == 0:
return failings
# Check each line for banned headers
for line_number, line in enumerate(file_content.lines, 1):
if line.strip().startswith("//"):
continue
for header in self.banned_headers_list:
if (
f"#include <{header}>" in line or f'#include "{header}"' in line
) and "// ok include" not in line:
failings.append(
f"Found banned header '{header}' in {file_content.path}:{line_number}"
)
return failings
def _test_no_banned_headers(
test_directories: list[str],
banned_headers_list: List[str],
on_fail: Callable[[str], None],
) -> None:
"""Searches through the program files to check for banned headers."""
# Collect files to check
files_to_check = collect_files_to_check(test_directories)
# Create processor and checker
processor = MultiCheckerFileProcessor()
checker = BannedHeadersChecker(banned_headers_list)
# Process files
results = processor.process_files_with_checkers(files_to_check, [checker])
# Get results for banned headers checker
all_failings = results.get("BannedHeadersChecker", []) or []
if all_failings:
msg = f"Found {len(all_failings)} banned header(s): \n" + "\n".join(
all_failings
)
for failing in all_failings:
print(failing)
on_fail(msg)
else:
print("No banned headers found.")
class TestNoBannedHeaders(unittest.TestCase):
def check_file(self, file_path: str) -> list[str]:
failings: list[str] = []
banned_headers_list = []
if file_path.startswith(PLATFORMS_DIR):
# continue # Skip the platforms directory
if file_path.startswith(PLATFORMS_ESP_DIR):
banned_headers_list = BANNED_HEADERS_ESP
else:
return failings
if len(banned_headers_list) == 0:
return failings
with open(file_path, "r", encoding="utf-8") as f:
def test_no_banned_headers_src(self) -> None:
"""Searches through the program files to check for banned headers."""
for line_number, line in enumerate(f, 1):
if line.startswith("//"):
continue
for header in banned_headers_list:
if (
f"#include <{header}>" in line or f'#include "{header}"' in line
) and "// ok include" not in line:
failings.append(
f"Found banned header '{header}' in {file_path}:{line_number}"
)
return failings
def test_no_banned_headers(self) -> None:
"""Searches through the program files to check for banned headers, excluding src/platforms."""
files_to_check = []
for root, _, files in os.walk(SRC_ROOT):
for file in files:
if file.endswith(
(".cpp", ".h", ".hpp")
): # Add or remove file extensions as needed
file_path = os.path.join(root, file)
if not any(
file_path.endswith(excluded) for excluded in EXCLUDED_FILES
):
files_to_check.append(file_path)
all_failings = []
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = [
executor.submit(self.check_file, file_path)
for file_path in files_to_check
]
for future in futures:
all_failings.extend(future.result())
if all_failings:
msg = f"Found {len(all_failings)} banned header(s): \n" + "\n".join(
all_failings
)
for failing in all_failings:
print(failing)
def on_fail(msg: str) -> None:
self.fail(
msg + "\n"
"You can add '// ok include' at the end of the line to silence this error for specific inclusions."
)
else:
print("No banned headers found.")
# Test directories as requested
test_directories = [
os.path.join(SRC_ROOT, "fl"),
os.path.join(SRC_ROOT, "fx"),
os.path.join(SRC_ROOT, "sensors"),
]
_test_no_banned_headers(
test_directories=test_directories,
banned_headers_list=BANNED_HEADERS_CORE,
on_fail=on_fail,
)
def test_no_banned_headers_examples(self) -> None:
"""Searches through the program files to check for banned headers."""
def on_fail(msg: str) -> None:
self.fail(
msg + "\n"
"You can add '// ok include' at the end of the line to silence this error for specific inclusions."
)
test_directories = ["examples"]
_test_no_banned_headers(
test_directories=test_directories,
banned_headers_list=BANNED_HEADERS_COMMON,
on_fail=on_fail,
)
if __name__ == "__main__":