Compare commits

..

27 Commits

Author SHA1 Message Date
J. Nick Koston
3291488a8b compat 2025-10-21 12:24:39 -10:00
J. Nick Koston
75550b39f4 compat 2025-10-21 12:23:16 -10:00
J. Nick Koston
02e1ed2130 multiple networks 2025-10-21 11:57:06 -10:00
J. Nick Koston
2948264917 try to avoid some of the ram 2025-10-21 11:46:30 -10:00
J. Nick Koston
660411ac42 try to avoid some of the ram 2025-10-21 11:44:56 -10:00
J. Nick Koston
88e3f02c9c try to avoid some of the ram 2025-10-21 11:40:48 -10:00
J. Nick Koston
f3f419077b [wifi] Optimize WiFi network storage with FixedVector 2025-10-21 11:29:27 -10:00
Jeff Brown
8e8a2bde95 [light] Decouple AddressableLight and Light transition classes (#11166)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-21 10:37:29 -10:00
Petr Kejval
80265a6bd2 [sensor] Add optimistic option to heartbeat filter (#10993)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-21 09:17:07 -04:00
J. Nick Koston
87e9a7a1bd [climate] Remove unnecessary vector allocations in state save/restore (#11445) 2025-10-21 04:35:18 -05:00
J. Nick Koston
3aedfe8be3 [binary_sensor] Optimize AutorepeatFilter with FixedVector (#11444) 2025-10-21 04:30:13 -05:00
J. Nick Koston
7f2cc47ed6 [binary_sensor] Add compile test for auto repeat (#11443) 2025-10-21 04:25:59 -05:00
J. Nick Koston
a5542e0d2b [sensor] Optimize calibration and Or filters with FixedVector (#11437) 2025-10-20 21:38:05 -10:00
Keith Burzinski
66afe4a9be [climate] Add some integration tests (#11439) 2025-10-21 02:26:18 -05:00
J. Nick Koston
0ae9009e41 [ci] Fix clang-tidy split mode for core file changes (#11434) 2025-10-20 20:39:50 -10:00
J. Nick Koston
0b2f5fcd7e Add additional sensor filter tests (#11438) 2025-10-20 20:39:21 -10:00
J. Nick Koston
7a2887e2ed [analyze-memory] Improve symbol categorization accuracy (#11440) 2025-10-20 20:39:05 -10:00
J. Nick Koston
cd2d3f061d [espnow] Fix compilation error with initializer_list after #11433 (#11436) 2025-10-20 19:58:24 -10:00
J. Nick Koston
73f5d01c2d [core] Optimize automation actions memory usage with std::initializer_list (#11433) 2025-10-21 04:32:58 +00:00
Jesse Hills
0938609f7a [improv] Put next_url behind defines to save flash (#11420)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-21 16:58:26 +13:00
J. Nick Koston
77203f0cb4 [text_sensor] Optimize filters with FixedVector (1.6KB flash savings) (#11423)
Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com>
2025-10-21 03:24:51 +00:00
J. Nick Koston
040130e357 [ci] Fix memory impact workflow for new components (#11421) 2025-10-21 16:02:07 +13:00
J. Nick Koston
85959e3004 [sensor,text_sensor,binary_sensor] Optimize filter parameters with std::initializer_list (#11426) 2025-10-21 15:47:13 +13:00
Jonathan Swoboda
a809a13729 [core] Add support for extern "C" includes (#11422) 2025-10-21 15:46:50 +13:00
J. Nick Koston
3b6ff615e8 [ci] Fix clang-tidy split decision to account for component dependencies (#11430) 2025-10-21 15:39:15 +13:00
J. Nick Koston
05216db5f0 ESP8266: Complete testing mode memory patches with DRAM and Flash (#11427) 2025-10-21 15:26:49 +13:00
J. Nick Koston
9f668b0c4b Add basic text_sensor tests (#11424) 2025-10-21 15:26:41 +13:00
70 changed files with 1616 additions and 661 deletions

View File

@@ -731,13 +731,6 @@ def command_vscode(args: ArgsProtocol) -> int | None:
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
# Set memory analysis options in config
if args.analyze_memory:
config.setdefault(CONF_ESPHOME, {})["analyze_memory"] = True
if args.memory_report:
config.setdefault(CONF_ESPHOME, {})["memory_report_file"] = args.memory_report
exit_code = write_cpp(config)
if exit_code != 0:
return exit_code
@@ -1199,17 +1192,6 @@ def parse_args(argv):
help="Only generate source code, do not compile.",
action="store_true",
)
parser_compile.add_argument(
"--analyze-memory",
help="Analyze and display memory usage by component after compilation.",
action="store_true",
)
parser_compile.add_argument(
"--memory-report",
help="Save memory analysis report to a file (supports .json or .txt).",
type=str,
metavar="FILE",
)
parser_upload = subparsers.add_parser(
"upload",

View File

@@ -1,7 +1,6 @@
"""CLI interface for memory analysis with report generation."""
from collections import defaultdict
import json
import sys
from . import (
@@ -232,9 +231,22 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
api_component = (name, mem)
break
# Combine all components to analyze: top ESPHome + all external + API if not already included
components_to_analyze = list(top_esphome_components) + list(
top_external_components
# Also include wifi_stack and other important system components if they exist
system_components_to_include = [
# Empty list - we've finished debugging symbol categorization
# Add component names here if you need to debug their symbols
]
system_components = [
(name, mem)
for name, mem in components
if name in system_components_to_include
]
# Combine all components to analyze: top ESPHome + all external + API if not already included + system components
components_to_analyze = (
list(top_esphome_components)
+ list(top_external_components)
+ system_components
)
if api_component and api_component not in components_to_analyze:
components_to_analyze.append(api_component)
@@ -271,28 +283,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
return "\n".join(lines)
def to_json(self) -> str:
"""Export analysis results as JSON."""
data = {
"components": {
name: {
"text": mem.text_size,
"rodata": mem.rodata_size,
"data": mem.data_size,
"bss": mem.bss_size,
"flash_total": mem.flash_total,
"ram_total": mem.ram_total,
"symbol_count": mem.symbol_count,
}
for name, mem in self.components.items()
},
"totals": {
"flash": sum(c.flash_total for c in self.components.values()),
"ram": sum(c.ram_total for c in self.components.values()),
},
}
return json.dumps(data, indent=2)
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
"""Dump uncategorized symbols for analysis."""
# Sort by size descending

View File

@@ -127,40 +127,39 @@ SYMBOL_PATTERNS = {
"tryget_socket_unconn",
"cs_create_ctrl_sock",
"netbuf_alloc",
"tcp_", # TCP protocol functions
"udp_", # UDP protocol functions
"lwip_", # LwIP stack functions
"eagle_lwip", # ESP-specific LwIP functions
"new_linkoutput", # Link output function
"acd_", # Address Conflict Detection (ACD)
"eth_", # Ethernet functions
"mac_enable_bb", # MAC baseband enable
"reassemble_and_dispatch", # Packet reassembly
],
# dhcp must come before libc to avoid "dhcp_select" matching "select" pattern
"dhcp": ["dhcp", "handle_dhcp"],
"ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"],
"wifi_stack": [
"ieee80211",
"hostap",
"sta_",
"ap_",
"scan_",
"wifi_",
"wpa_",
"wps_",
"esp_wifi",
"cnx_",
"wpa3_",
"sae_",
"wDev_",
"ic_",
"mac_",
"esf_buf",
"gWpaSm",
"sm_WPA",
"eapol_",
"owe_",
"wifiLowLevelInit",
"s_do_mapping",
"gScanStruct",
"ppSearchTxframe",
"ppMapWaitTxq",
"ppFillAMPDUBar",
"ppCheckTxConnTrafficIdle",
"ppCalTkipMic",
# Order matters! More specific categories must come before general ones.
# mdns must come before bluetooth to avoid "_mdns_disable_pcb" matching "ble_" pattern
"mdns_lib": ["mdns"],
# memory_mgmt must come before wifi_stack to catch mmu_hal_* symbols
"memory_mgmt": [
"mem_",
"memory_",
"tlsf_",
"memp_",
"pbuf_",
"pbuf_alloc",
"pbuf_copy_partial_pbuf",
"esp_mmu_map",
"mmu_hal_",
"s_do_mapping", # Memory mapping function, not WiFi
"hash_map_", # Hash map data structure
"umm_assimilate", # UMM malloc assimilation
],
"bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"],
"wifi_bt_coex": ["coex"],
# Bluetooth categories must come BEFORE wifi_stack to avoid misclassification
# Many BLE symbols contain patterns like "ble_" that would otherwise match wifi patterns
"bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"],
"bluedroid_bt": [
"bluedroid",
@@ -207,6 +206,61 @@ SYMBOL_PATTERNS = {
"copy_extra_byte_in_db",
"parse_read_local_supported_commands_response",
],
"bluetooth": [
"bt_",
"_ble_", # More specific than "ble_" to avoid matching "able_", "enable_", "disable_"
"l2c_",
"l2ble_", # L2CAP for BLE
"gatt_",
"gap_",
"hci_",
"btsnd_hcic_", # Bluetooth HCI command send functions
"BT_init",
"BT_tx_", # Bluetooth transmit functions
"esp_ble_", # Catch esp_ble_* functions
],
"bluetooth_ll": [
"llm_", # Link layer manager
"llc_", # Link layer control
"lld_", # Link layer driver
"ld_acl_", # Link layer ACL (Asynchronous Connection-Oriented)
"llcp_", # Link layer control protocol
"lmp_", # Link manager protocol
],
"wifi_bt_coex": ["coex"],
"wifi_stack": [
"ieee80211",
"hostap",
"sta_",
"wifi_ap_", # More specific than "ap_" to avoid matching "cap_", "map_"
"wifi_scan_", # More specific than "scan_" to avoid matching "_scan_" in other contexts
"wifi_",
"wpa_",
"wps_",
"esp_wifi",
"cnx_",
"wpa3_",
"sae_",
"wDev_",
"ic_mac_", # More specific than "mac_" to avoid matching emac_
"esf_buf",
"gWpaSm",
"sm_WPA",
"eapol_",
"owe_",
"wifiLowLevelInit",
# Removed "s_do_mapping" - this is memory management, not WiFi
"gScanStruct",
"ppSearchTxframe",
"ppMapWaitTxq",
"ppFillAMPDUBar",
"ppCheckTxConnTrafficIdle",
"ppCalTkipMic",
"phy_force_wifi",
"phy_unforce_wifi",
"write_wifi_chan",
"wifi_track_pll",
],
"crypto_math": [
"ecp_",
"bignum_",
@@ -231,13 +285,36 @@ SYMBOL_PATTERNS = {
"p_256_init_curve",
"shift_sub_rows",
"rshift",
"rijndaelEncrypt", # AES Rijndael encryption
],
# System and Arduino core functions must come before libc
"esp_system": [
"system_", # ESP system functions
"postmortem_", # Postmortem reporting
],
"arduino_core": [
"pinMode",
"resetPins",
"millis",
"micros",
"delay(", # More specific - Arduino delay function with parenthesis
"delayMicroseconds",
"digitalWrite",
"digitalRead",
],
"sntp": ["sntp_", "sntp_recv"],
"scheduler": [
"run_scheduled_",
"compute_scheduled_",
"event_TaskQueue",
],
"hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"],
"libc": [
"printf",
"scanf",
"malloc",
"free",
"_free", # More specific than "free" to match _free, __free_r, etc. but not arbitrary "free" substring
"umm_free", # UMM malloc free function
"memcpy",
"memset",
"strcpy",
@@ -259,7 +336,7 @@ SYMBOL_PATTERNS = {
"_setenv_r",
"_tzset_unlocked_r",
"__tzcalc_limits",
"select",
"_select", # More specific than "select" to avoid matching "dhcp_select", etc.
"scalbnf",
"strtof",
"strtof_l",
@@ -316,8 +393,24 @@ SYMBOL_PATTERNS = {
"CSWTCH$",
"dst$",
"sulp",
"_strtol_l", # String to long with locale
"__cvt", # Convert
"__utoa", # Unsigned to ASCII
"__global_locale", # Global locale
"_ctype_", # Character type
"impure_data", # Impure data
],
"string_ops": [
"strcmp",
"strncmp",
"strchr",
"strstr",
"strtok",
"strdup",
"strncasecmp_P", # String compare (case insensitive, from program memory)
"strnlen_P", # String length (from program memory)
"strncat_P", # String concatenate (from program memory)
],
"string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"],
"memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"],
"file_io": [
"fread",
@@ -338,10 +431,26 @@ SYMBOL_PATTERNS = {
"vsscanf",
],
"cpp_anonymous": ["_GLOBAL__N_", "n$"],
"cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"],
"exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"],
# Plain C patterns only - C++ symbols will be categorized via DEMANGLED_PATTERNS
"nvs": ["nvs_"], # Plain C NVS functions
"ota": ["ota_", "OTA", "esp_ota", "app_desc"],
# cpp_runtime: Removed _ZN, _ZL to let DEMANGLED_PATTERNS categorize C++ symbols properly
# Only keep patterns that are truly runtime-specific and not categorizable by namespace
"cpp_runtime": ["__cxx", "_ZSt", "__gxx_personality", "_Z16"],
"exception_handling": [
"__cxa_",
"_Unwind_",
"__gcc_personality",
"uw_frame_state",
"search_object", # Search for exception handling object
"get_cie_encoding", # Get CIE encoding
"add_fdes", # Add frame description entries
"fde_unencoded_compare", # Compare FDEs
"fde_mixed_encoding_compare", # Compare mixed encoding FDEs
"frame_downheap", # Frame heap operations
"frame_heapsort", # Frame heap sorting
],
"static_init": ["_GLOBAL__sub_I_"],
"mdns_lib": ["mdns"],
"phy_radio": [
"phy_",
"rf_",
@@ -394,10 +503,47 @@ SYMBOL_PATTERNS = {
"txcal_debuge_mode",
"ant_wifitx_cfg",
"reg_init_begin",
"tx_cap_init", # TX capacitance init
"ram_set_txcap", # RAM TX capacitance setting
"tx_atten_", # TX attenuation
"txiq_", # TX I/Q calibration
"ram_cal_", # RAM calibration
"ram_rxiq_", # RAM RX I/Q
"readvdd33", # Read VDD33
"test_tout", # Test timeout
"tsen_meas", # Temperature sensor measurement
"bbpll_cal", # Baseband PLL calibration
"set_cal_", # Set calibration
"set_rfanagain_", # Set RF analog gain
"set_txdc_", # Set TX DC
"get_vdd33_", # Get VDD33
"gen_rx_gain_table", # Generate RX gain table
"ram_ana_inf_gating_en", # RAM analog interface gating enable
"tx_cont_en", # TX continuous enable
"tx_delay_cfg", # TX delay configuration
"tx_gain_table_set", # TX gain table set
"check_and_reset_hw_deadlock", # Hardware deadlock check
"s_config", # System/hardware config
"chan14_mic_cfg", # Channel 14 MIC config
],
"wifi_phy_pp": [
"pp_",
"ppT",
"ppR",
"ppP",
"ppInstall",
"ppCalTxAMPDULength",
"ppCheckTx", # Packet processor TX check
"ppCal", # Packet processor calibration
"HdlAllBuffedEb", # Handle buffered EB
],
"wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"],
"wifi_lmac": ["lmac"],
"wifi_device": ["wdev", "wDev_"],
"wifi_device": [
"wdev",
"wDev_",
"ic_set_sta", # Set station mode
"ic_set_vif", # Set virtual interface
],
"power_mgmt": [
"pm_",
"sleep",
@@ -406,15 +552,7 @@ SYMBOL_PATTERNS = {
"deep_sleep",
"power_down",
"g_pm",
],
"memory_mgmt": [
"mem_",
"memory_",
"tlsf_",
"memp_",
"pbuf_",
"pbuf_alloc",
"pbuf_copy_partial_pbuf",
"pmc", # Power Management Controller
],
"hal_layer": ["hal_"],
"clock_mgmt": [
@@ -439,7 +577,6 @@ SYMBOL_PATTERNS = {
"error_handling": ["panic", "abort", "assert", "error_", "fault"],
"authentication": ["auth"],
"ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"],
"dhcp": ["dhcp", "handle_dhcp"],
"ethernet_phy": [
"emac_",
"eth_phy_",
@@ -618,7 +755,15 @@ SYMBOL_PATTERNS = {
"ampdu_dispatch_upto",
],
"ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"],
"rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"],
"rate_control": [
"rssi_margin",
"rcGetSched",
"get_rate_fcc_index",
"rcGetRate", # Get rate
"rc_get_", # Rate control getters
"rc_set_", # Rate control setters
"rc_enable_", # Rate control enable functions
],
"nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"],
"channel_mgmt": ["chm_init", "chm_set_current_channel"],
"trace": ["trc_init", "trc_onAmpduOp"],
@@ -799,31 +944,18 @@ SYMBOL_PATTERNS = {
"supports_interlaced_inquiry_scan",
"supports_reading_remote_extended_features",
],
"bluetooth_ll": [
"lld_pdu_",
"ld_acl_",
"lld_stop_ind_handler",
"lld_evt_winsize_change",
"config_lld_evt_funcs_reset",
"config_lld_funcs_reset",
"config_llm_funcs_reset",
"llm_set_long_adv_data",
"lld_retry_tx_prog",
"llc_link_sup_to_ind_handler",
"config_llc_funcs_reset",
"lld_evt_rxwin_compute",
"config_btdm_funcs_reset",
"config_ea_funcs_reset",
"llc_defalut_state_tab_reset",
"config_rwip_funcs_reset",
"ke_lmp_rx_flooding_detect",
],
}
# Demangled patterns: patterns found in demangled C++ names
DEMANGLED_PATTERNS = {
"gpio_driver": ["GPIO"],
"uart_driver": ["UART"],
# mdns_lib must come before network_stack to avoid "udp" matching "_udpReadBuffer" in MDNSResponder
"mdns_lib": [
"MDNSResponder",
"MDNSImplementation",
"MDNS",
],
"network_stack": [
"lwip",
"tcp",
@@ -836,6 +968,24 @@ DEMANGLED_PATTERNS = {
"ethernet",
"ppp",
"slip",
"UdpContext", # UDP context class
"DhcpServer", # DHCP server class
],
"arduino_core": [
"String::", # Arduino String class
"Print::", # Arduino Print class
"HardwareSerial::", # Serial class
"IPAddress::", # IP address class
"EspClass::", # ESP class
"experimental::_SPI", # Experimental SPI
],
"ota": [
"UpdaterClass",
"Updater::",
],
"wifi": [
"ESP8266WiFi",
"WiFi::",
],
"wifi_stack": ["NetworkInterface"],
"nimble_bt": [
@@ -854,7 +1004,6 @@ DEMANGLED_PATTERNS = {
"rtti": ["__type_info", "__class_type_info"],
"web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"],
"async_tcp": ["AsyncClient", "AsyncServer"],
"mdns_lib": ["mdns"],
"json_lib": [
"ArduinoJson",
"JsonDocument",

View File

@@ -1143,7 +1143,7 @@ message ListEntitiesSelectResponse {
reserved 4; // Deprecated: was string unique_id
string icon = 5 [(field_ifdef) = "USE_ENTITY_ICON"];
repeated string options = 6 [(container_pointer) = "std::vector"];
repeated string options = 6 [(container_pointer) = "FixedVector"];
bool disabled_by_default = 7;
EntityCategory entity_category = 8;
uint32 device_id = 9 [(field_ifdef) = "USE_DEVICES"];

View File

@@ -142,11 +142,6 @@ APIError APINoiseFrameHelper::loop() {
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
*/
APIError APINoiseFrameHelper::try_read_frame_() {
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
if (this->rx_buf_len_ == 0) {
this->rx_buf_.clear();
}
// read header
if (rx_header_buf_len_ < 3) {
// no header information yet

View File

@@ -54,11 +54,6 @@ APIError APIPlaintextFrameHelper::loop() {
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
*/
APIError APIPlaintextFrameHelper::try_read_frame_() {
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
if (this->rx_buf_len_ == 0) {
this->rx_buf_.clear();
}
// read header
while (!rx_header_parsed_) {
// Now that we know when the socket is ready, we can read up to 3 bytes

View File

@@ -1534,7 +1534,7 @@ class ListEntitiesSelectResponse final : public InfoResponseProtoMessage {
#ifdef HAS_PROTO_MESSAGE_DUMP
const char *message_name() const override { return "list_entities_select_response"; }
#endif
const std::vector<std::string> *options{};
const FixedVector<std::string> *options{};
void encode(ProtoWriteBuffer buffer) const override;
void calculate_size(ProtoSize &size) const override;
#ifdef HAS_PROTO_MESSAGE_DUMP

View File

@@ -264,20 +264,31 @@ async def delayed_off_filter_to_code(config, filter_id):
),
)
async def autorepeat_filter_to_code(config, filter_id):
timings = []
if len(config) > 0:
timings.extend(
(conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
for conf in config
)
else:
timings.append(
(
cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds,
cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds,
cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds,
timings = [
cg.StructInitializer(
cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"),
("delay", conf[CONF_DELAY]),
("time_off", conf[CONF_TIME_OFF]),
("time_on", conf[CONF_TIME_ON]),
)
)
for conf in config
]
else:
timings = [
cg.StructInitializer(
cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"),
("delay", cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds),
(
"time_off",
cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds,
),
(
"time_on",
cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds,
),
)
]
var = cg.new_Pvariable(filter_id, timings)
await cg.register_component(var, {})
return var

View File

@@ -1,7 +1,6 @@
#include "filter.h"
#include "binary_sensor.h"
#include <utility>
namespace esphome {
@@ -68,7 +67,7 @@ float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARD
optional<bool> InvertFilter::new_value(bool value) { return !value; }
AutorepeatFilter::AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings) : timings_(std::move(timings)) {}
AutorepeatFilter::AutorepeatFilter(std::initializer_list<AutorepeatFilterTiming> timings) : timings_(timings) {}
optional<bool> AutorepeatFilter::new_value(bool value) {
if (value) {

View File

@@ -4,8 +4,6 @@
#include "esphome/core/component.h"
#include "esphome/core/helpers.h"
#include <vector>
namespace esphome {
namespace binary_sensor {
@@ -82,11 +80,6 @@ class InvertFilter : public Filter {
};
struct AutorepeatFilterTiming {
AutorepeatFilterTiming(uint32_t delay, uint32_t off, uint32_t on) {
this->delay = delay;
this->time_off = off;
this->time_on = on;
}
uint32_t delay;
uint32_t time_off;
uint32_t time_on;
@@ -94,7 +87,7 @@ struct AutorepeatFilterTiming {
class AutorepeatFilter : public Filter, public Component {
public:
explicit AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings);
explicit AutorepeatFilter(std::initializer_list<AutorepeatFilterTiming> timings);
optional<bool> new_value(bool value) override;
@@ -104,7 +97,7 @@ class AutorepeatFilter : public Filter, public Component {
void next_timing_();
void next_value_(bool val);
std::vector<AutorepeatFilterTiming> timings_;
FixedVector<AutorepeatFilterTiming> timings_;
uint8_t active_timing_{0};
};

View File

@@ -385,12 +385,14 @@ void Climate::save_state_() {
if (!traits.get_supported_custom_fan_modes().empty() && custom_fan_mode.has_value()) {
state.uses_custom_fan_mode = true;
const auto &supported = traits.get_supported_custom_fan_modes();
std::vector<std::string> vec{supported.begin(), supported.end()};
for (size_t i = 0; i < vec.size(); i++) {
if (vec[i] == custom_fan_mode) {
// std::set has consistent order (lexicographic for strings)
size_t i = 0;
for (const auto &mode : supported) {
if (mode == custom_fan_mode) {
state.custom_fan_mode = i;
break;
}
i++;
}
}
if (traits.get_supports_presets() && preset.has_value()) {
@@ -400,12 +402,14 @@ void Climate::save_state_() {
if (!traits.get_supported_custom_presets().empty() && custom_preset.has_value()) {
state.uses_custom_preset = true;
const auto &supported = traits.get_supported_custom_presets();
std::vector<std::string> vec{supported.begin(), supported.end()};
for (size_t i = 0; i < vec.size(); i++) {
if (vec[i] == custom_preset) {
// std::set has consistent order (lexicographic for strings)
size_t i = 0;
for (const auto &preset : supported) {
if (preset == custom_preset) {
state.custom_preset = i;
break;
}
i++;
}
}
if (traits.get_supports_swing_modes()) {
@@ -549,22 +553,34 @@ void ClimateDeviceRestoreState::apply(Climate *climate) {
climate->fan_mode = this->fan_mode;
}
if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) {
// std::set has consistent order (lexicographic for strings), so this is ok
// std::set has consistent order (lexicographic for strings)
const auto &modes = traits.get_supported_custom_fan_modes();
std::vector<std::string> modes_vec{modes.begin(), modes.end()};
if (custom_fan_mode < modes_vec.size()) {
climate->custom_fan_mode = modes_vec[this->custom_fan_mode];
if (custom_fan_mode < modes.size()) {
size_t i = 0;
for (const auto &mode : modes) {
if (i == this->custom_fan_mode) {
climate->custom_fan_mode = mode;
break;
}
i++;
}
}
}
if (traits.get_supports_presets() && !this->uses_custom_preset) {
climate->preset = this->preset;
}
if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) {
// std::set has consistent order (lexicographic for strings), so this is ok
// std::set has consistent order (lexicographic for strings)
const auto &presets = traits.get_supported_custom_presets();
std::vector<std::string> presets_vec{presets.begin(), presets.end()};
if (custom_preset < presets_vec.size()) {
climate->custom_preset = presets_vec[this->custom_preset];
if (custom_preset < presets.size()) {
size_t i = 0;
for (const auto &preset : presets) {
if (i == this->custom_preset) {
climate->custom_preset = preset;
break;
}
i++;
}
}
}
if (traits.get_supports_swing_modes()) {

View File

@@ -9,7 +9,8 @@ static const char *const TAG = "copy.select";
void CopySelect::setup() {
source_->add_on_state_callback([this](const std::string &value, size_t index) { this->publish_state(value); });
traits.set_options(source_->traits.get_options());
// Copy options from source select
this->traits.copy_options(source_->traits.get_options());
if (source_->has_state())
this->publish_state(source_->state);

View File

@@ -76,10 +76,6 @@ void ESP32BLE::advertising_set_service_data(const std::vector<uint8_t> &data) {
}
void ESP32BLE::advertising_set_manufacturer_data(const std::vector<uint8_t> &data) {
this->advertising_set_manufacturer_data(std::span<const uint8_t>(data));
}
void ESP32BLE::advertising_set_manufacturer_data(std::span<const uint8_t> data) {
this->advertising_init_();
this->advertising_->set_manufacturer_data(data);
this->advertising_start();

View File

@@ -118,7 +118,6 @@ class ESP32BLE : public Component {
void advertising_start();
void advertising_set_service_data(const std::vector<uint8_t> &data);
void advertising_set_manufacturer_data(const std::vector<uint8_t> &data);
void advertising_set_manufacturer_data(std::span<const uint8_t> data);
void advertising_set_appearance(uint16_t appearance) { this->appearance_ = appearance; }
void advertising_set_service_data_and_name(std::span<const uint8_t> data, bool include_name);
void advertising_add_service_uuid(ESPBTUUID uuid);

View File

@@ -59,10 +59,6 @@ void BLEAdvertising::set_service_data(const std::vector<uint8_t> &data) {
}
void BLEAdvertising::set_manufacturer_data(const std::vector<uint8_t> &data) {
this->set_manufacturer_data(std::span<const uint8_t>(data));
}
void BLEAdvertising::set_manufacturer_data(std::span<const uint8_t> data) {
delete[] this->advertising_data_.p_manufacturer_data;
this->advertising_data_.p_manufacturer_data = nullptr;
this->advertising_data_.manufacturer_len = data.size();

View File

@@ -37,7 +37,6 @@ class BLEAdvertising {
void set_scan_response(bool scan_response) { this->scan_response_ = scan_response; }
void set_min_preferred_interval(uint16_t interval) { this->advertising_data_.min_interval = interval; }
void set_manufacturer_data(const std::vector<uint8_t> &data);
void set_manufacturer_data(std::span<const uint8_t> data);
void set_appearance(uint16_t appearance) { this->advertising_data_.appearance = appearance; }
void set_service_data(const std::vector<uint8_t> &data);
void set_service_data(std::span<const uint8_t> data);

View File

@@ -1,6 +1,5 @@
#include "esp32_ble_beacon.h"
#include "esphome/core/log.h"
#include "esphome/core/helpers.h"
#ifdef USE_ESP32

View File

@@ -15,10 +15,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_characteristic_on_w
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
new Trigger<std::vector<uint8_t>, uint16_t>();
characteristic->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
// Convert span to vector for trigger - copy is necessary because:
// 1. Trigger stores the data for use in automation actions that execute later
// 2. The span is only valid during this callback (points to temporary BLE stack data)
// 3. User lambdas in automations need persistent data they can access asynchronously
// Convert span to vector for trigger
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
});
return on_write_trigger;
@@ -30,10 +27,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_descriptor_on_write
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
new Trigger<std::vector<uint8_t>, uint16_t>();
descriptor->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
// Convert span to vector for trigger - copy is necessary because:
// 1. Trigger stores the data for use in automation actions that execute later
// 2. The span is only valid during this callback (points to temporary BLE stack data)
// 3. User lambdas in automations need persistent data they can access asynchronously
// Convert span to vector for trigger
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
});
return on_write_trigger;

View File

@@ -112,7 +112,7 @@ async def to_code(config):
cg.add_define("USE_IMPROV")
await improv_base.setup_improv_core(var, config)
await improv_base.setup_improv_core(var, config, "esp32_improv")
cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION]))
cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION]))

View File

@@ -389,11 +389,13 @@ void ESP32ImprovComponent::check_wifi_connection_() {
std::string url_strings[3];
size_t url_count = 0;
#ifdef USE_ESP32_IMPROV_NEXT_URL
// Add next_url if configured (should be first per Improv BLE spec)
std::string next_url = this->get_formatted_next_url_();
if (!next_url.empty()) {
url_strings[url_count++] = std::move(next_url);
}
#endif
// Add default URLs for backward compatibility
url_strings[url_count++] = ESPHOME_MY_LINK;

View File

@@ -107,7 +107,7 @@ void IDFI2CBus::dump_config() {
if (s.second) {
ESP_LOGCONFIG(TAG, "Found device at address 0x%02X", s.first);
} else {
ESP_LOGCONFIG(TAG, "Unknown error at address 0x%02X", s.first);
ESP_LOGE(TAG, "Unknown error at address 0x%02X", s.first);
}
}
}

View File

@@ -3,6 +3,8 @@ import re
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.const import __version__
from esphome.cpp_generator import MockObj
from esphome.types import ConfigType
CODEOWNERS = ["@esphome/core"]
@@ -35,7 +37,9 @@ def _process_next_url(url: str):
return url
async def setup_improv_core(var, config):
if CONF_NEXT_URL in config:
cg.add(var.set_next_url(_process_next_url(config[CONF_NEXT_URL])))
async def setup_improv_core(var: MockObj, config: ConfigType, component: str):
if next_url := config.get(CONF_NEXT_URL):
cg.add(var.set_next_url(_process_next_url(next_url)))
cg.add_define(f"USE_{component.upper()}_NEXT_URL")
cg.add_library("improv/Improv", "1.2.4")

View File

@@ -2,10 +2,12 @@
#include "esphome/components/network/util.h"
#include "esphome/core/application.h"
#include "esphome/core/defines.h"
namespace esphome {
namespace improv_base {
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
@@ -43,6 +45,7 @@ std::string ImprovBase::get_formatted_next_url_() {
return formatted_url;
}
#endif
} // namespace improv_base
} // namespace esphome

View File

@@ -1,17 +1,22 @@
#pragma once
#include <string>
#include "esphome/core/defines.h"
namespace esphome {
namespace improv_base {
class ImprovBase {
public:
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
void set_next_url(const std::string &next_url) { this->next_url_ = next_url; }
#endif
protected:
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
std::string get_formatted_next_url_();
std::string next_url_;
#endif
};
} // namespace improv_base

View File

@@ -43,4 +43,4 @@ FINAL_VALIDATE_SCHEMA = validate_logger
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
await improv_base.setup_improv_core(var, config)
await improv_base.setup_improv_core(var, config, "improv_serial")

View File

@@ -146,9 +146,11 @@ void ImprovSerialComponent::loop() {
std::vector<uint8_t> ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) {
std::vector<std::string> urls;
#ifdef USE_IMPROV_SERIAL_NEXT_URL
if (!this->next_url_.empty()) {
urls.push_back(this->get_formatted_next_url_());
}
#endif
#ifdef USE_WEBSERVER
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
if (ip.is_ip4()) {

View File

@@ -62,7 +62,7 @@ void AddressableLightTransformer::start() {
}
optional<LightColorValues> AddressableLightTransformer::apply() {
float smoothed_progress = LightTransitionTransformer::smoothed_progress(this->get_progress_());
float smoothed_progress = LightTransformer::smoothed_progress(this->get_progress_());
// When running an output-buffer modifying effect, don't try to transition individual LEDs, but instead just fade the
// LightColorValues. write_state() then picks up the change in brightness, and the color change is picked up by the

View File

@@ -8,7 +8,7 @@
#include "esphome/core/defines.h"
#include "light_output.h"
#include "light_state.h"
#include "transformers.h"
#include "light_transformer.h"
#ifdef USE_POWER_SUPPLY
#include "esphome/components/power_supply/power_supply.h"
@@ -103,7 +103,7 @@ class AddressableLight : public LightOutput, public Component {
bool effect_active_{false};
};
class AddressableLightTransformer : public LightTransitionTransformer {
class AddressableLightTransformer : public LightTransformer {
public:
AddressableLightTransformer(AddressableLight &light) : light_(light) {}

View File

@@ -38,6 +38,10 @@ class LightTransformer {
const LightColorValues &get_target_values() const { return this->target_values_; }
protected:
// This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like
// transition from 0 to 1 on x = [0, 1]
static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); }
/// The progress of this transition, on a scale of 0 to 1.
float get_progress_() {
uint32_t now = esphome::millis();

View File

@@ -50,15 +50,11 @@ class LightTransitionTransformer : public LightTransformer {
if (this->changing_color_mode_)
p = p < 0.5f ? p * 2 : (p - 0.5) * 2;
float v = LightTransitionTransformer::smoothed_progress(p);
float v = LightTransformer::smoothed_progress(p);
return LightColorValues::lerp(start, end, v);
}
protected:
// This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like
// transition from 0 to 1 on x = [0, 1]
static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); }
LightColorValues end_values_{};
LightColorValues intermediate_values_{};
bool changing_color_mode_{false};

View File

@@ -300,11 +300,11 @@ void LvSelectable::set_selected_text(const std::string &text, lv_anim_enable_t a
}
}
void LvSelectable::set_options(std::vector<std::string> options) {
void LvSelectable::set_options(std::initializer_list<std::string> options) {
auto index = this->get_selected_index();
if (index >= options.size())
index = options.size() - 1;
this->options_ = std::move(options);
this->options_ = options;
this->set_option_string(join_string(this->options_).c_str());
lv_event_send(this->obj, LV_EVENT_REFRESH, nullptr);
this->set_selected_index(index, LV_ANIM_OFF);

View File

@@ -358,12 +358,12 @@ class LvSelectable : public LvCompound {
virtual void set_selected_index(size_t index, lv_anim_enable_t anim) = 0;
void set_selected_text(const std::string &text, lv_anim_enable_t anim);
std::string get_selected_text();
std::vector<std::string> get_options() { return this->options_; }
void set_options(std::vector<std::string> options);
const FixedVector<std::string> &get_options() { return this->options_; }
void set_options(std::initializer_list<std::string> options);
protected:
virtual void set_option_string(const char *options) = 0;
std::vector<std::string> options_{};
FixedVector<std::string> options_{};
};
#ifdef USE_LVGL_DROPDOWN

View File

@@ -53,7 +53,10 @@ class LVGLSelect : public select::Select, public Component {
this->widget_->set_selected_text(value, this->anim_);
this->publish();
}
void set_options_() { this->traits.set_options(this->widget_->get_options()); }
void set_options_() {
// Copy options from lvgl widget to select traits
this->traits.copy_options(this->widget_->get_options());
}
LvSelectable *widget_;
lv_anim_enable_t anim_;

View File

@@ -56,7 +56,7 @@ void MCP23016::pin_mode(uint8_t pin, gpio::Flags flags) {
this->update_reg_(pin, false, iodir);
}
}
float MCP23016::get_setup_priority() const { return setup_priority::IO; }
float MCP23016::get_setup_priority() const { return setup_priority::HARDWARE; }
bool MCP23016::read_reg_(uint8_t reg, uint8_t *value) {
if (this->is_failed())
return false;

View File

@@ -3,9 +3,9 @@
namespace esphome {
namespace select {
void SelectTraits::set_options(std::vector<std::string> options) { this->options_ = std::move(options); }
void SelectTraits::set_options(std::initializer_list<std::string> options) { this->options_ = options; }
const std::vector<std::string> &SelectTraits::get_options() const { return this->options_; }
const FixedVector<std::string> &SelectTraits::get_options() const { return this->options_; }
} // namespace select
} // namespace esphome

View File

@@ -1,18 +1,21 @@
#pragma once
#include <vector>
#include <string>
#include <initializer_list>
#include "esphome/core/helpers.h"
namespace esphome {
namespace select {
class SelectTraits {
public:
void set_options(std::vector<std::string> options);
const std::vector<std::string> &get_options() const;
void set_options(std::initializer_list<std::string> options);
const FixedVector<std::string> &get_options() const;
/// Copy options from another SelectTraits (for copy_select, lvgl)
void copy_options(const FixedVector<std::string> &other) { this->options_.copy_from(other); }
protected:
std::vector<std::string> options_;
FixedVector<std::string> options_;
};
} // namespace select

View File

@@ -28,6 +28,8 @@ from esphome.const import (
CONF_ON_RAW_VALUE,
CONF_ON_VALUE,
CONF_ON_VALUE_RANGE,
CONF_OPTIMISTIC,
CONF_PERIOD,
CONF_QUANTILE,
CONF_SEND_EVERY,
CONF_SEND_FIRST_AT,
@@ -644,10 +646,29 @@ async def throttle_with_priority_filter_to_code(config, filter_id):
return cg.new_Pvariable(filter_id, config[CONF_TIMEOUT], template_)
HEARTBEAT_SCHEMA = cv.Schema(
{
cv.Required(CONF_PERIOD): cv.positive_time_period_milliseconds,
cv.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
}
)
@FILTER_REGISTRY.register(
"heartbeat", HeartbeatFilter, cv.positive_time_period_milliseconds
"heartbeat",
HeartbeatFilter,
cv.Any(
cv.positive_time_period_milliseconds,
HEARTBEAT_SCHEMA,
),
)
async def heartbeat_filter_to_code(config, filter_id):
if isinstance(config, dict):
var = cg.new_Pvariable(filter_id, config[CONF_PERIOD])
await cg.register_component(var, {})
cg.add(var.set_optimistic(config[CONF_OPTIMISTIC]))
return var
var = cg.new_Pvariable(filter_id, config)
await cg.register_component(var, {})
return var

View File

@@ -313,7 +313,7 @@ optional<float> DeltaFilter::new_value(float value) {
}
// OrFilter
OrFilter::OrFilter(std::vector<Filter *> filters) : filters_(std::move(filters)), phi_(this) {}
OrFilter::OrFilter(std::initializer_list<Filter *> filters) : filters_(filters), phi_(this) {}
OrFilter::PhiNode::PhiNode(OrFilter *or_parent) : or_parent_(or_parent) {}
optional<float> OrFilter::PhiNode::new_value(float value) {
@@ -326,14 +326,14 @@ optional<float> OrFilter::PhiNode::new_value(float value) {
}
optional<float> OrFilter::new_value(float value) {
this->has_value_ = false;
for (Filter *filter : this->filters_)
for (auto *filter : this->filters_)
filter->input(value);
return {};
}
void OrFilter::initialize(Sensor *parent, Filter *next) {
Filter::initialize(parent, next);
for (Filter *filter : this->filters_) {
for (auto *filter : this->filters_) {
filter->initialize(parent, &this->phi_);
}
this->phi_.initialize(parent, nullptr);
@@ -372,8 +372,12 @@ optional<float> HeartbeatFilter::new_value(float value) {
this->last_input_ = value;
this->has_value_ = true;
if (this->optimistic_) {
return value;
}
return {};
}
void HeartbeatFilter::setup() {
this->set_interval("heartbeat", this->time_period_, [this]() {
ESP_LOGVV(TAG, "HeartbeatFilter(%p)::interval(has_value=%s, last_input=%f)", this, YESNO(this->has_value_),
@@ -384,20 +388,27 @@ void HeartbeatFilter::setup() {
this->output(this->last_input_);
});
}
float HeartbeatFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
CalibrateLinearFilter::CalibrateLinearFilter(std::initializer_list<std::array<float, 3>> linear_functions)
: linear_functions_(linear_functions) {}
optional<float> CalibrateLinearFilter::new_value(float value) {
for (std::array<float, 3> f : this->linear_functions_) {
for (const auto &f : this->linear_functions_) {
if (!std::isfinite(f[2]) || value < f[2])
return (value * f[0]) + f[1];
}
return NAN;
}
CalibratePolynomialFilter::CalibratePolynomialFilter(std::initializer_list<float> coefficients)
: coefficients_(coefficients) {}
optional<float> CalibratePolynomialFilter::new_value(float value) {
float res = 0.0f;
float x = 1.0f;
for (float coefficient : this->coefficients_) {
for (const auto &coefficient : this->coefficients_) {
res += x * coefficient;
x *= value;
}

View File

@@ -396,15 +396,16 @@ class HeartbeatFilter : public Filter, public Component {
explicit HeartbeatFilter(uint32_t time_period);
void setup() override;
optional<float> new_value(float value) override;
float get_setup_priority() const override;
void set_optimistic(bool optimistic) { this->optimistic_ = optimistic; }
protected:
uint32_t time_period_;
float last_input_;
bool has_value_{false};
bool optimistic_{false};
};
class DeltaFilter : public Filter {
@@ -422,7 +423,7 @@ class DeltaFilter : public Filter {
class OrFilter : public Filter {
public:
explicit OrFilter(std::vector<Filter *> filters);
explicit OrFilter(std::initializer_list<Filter *> filters);
void initialize(Sensor *parent, Filter *next) override;
@@ -438,28 +439,27 @@ class OrFilter : public Filter {
OrFilter *or_parent_;
};
std::vector<Filter *> filters_;
FixedVector<Filter *> filters_;
PhiNode phi_;
bool has_value_{false};
};
class CalibrateLinearFilter : public Filter {
public:
CalibrateLinearFilter(std::vector<std::array<float, 3>> linear_functions)
: linear_functions_(std::move(linear_functions)) {}
explicit CalibrateLinearFilter(std::initializer_list<std::array<float, 3>> linear_functions);
optional<float> new_value(float value) override;
protected:
std::vector<std::array<float, 3>> linear_functions_;
FixedVector<std::array<float, 3>> linear_functions_;
};
class CalibratePolynomialFilter : public Filter {
public:
CalibratePolynomialFilter(std::vector<float> coefficients) : coefficients_(std::move(coefficients)) {}
explicit CalibratePolynomialFilter(std::initializer_list<float> coefficients);
optional<float> new_value(float value) override;
protected:
std::vector<float> coefficients_;
FixedVector<float> coefficients_;
};
class ClampFilter : public Filter {

View File

@@ -62,7 +62,8 @@ optional<std::string> AppendFilter::new_value(std::string value) { return value
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
// Substitute
SubstituteFilter::SubstituteFilter(std::initializer_list<Substitution> substitutions) : substitutions_(substitutions) {}
SubstituteFilter::SubstituteFilter(const std::initializer_list<Substitution> &substitutions)
: substitutions_(substitutions) {}
optional<std::string> SubstituteFilter::new_value(std::string value) {
std::size_t pos;
@@ -74,7 +75,7 @@ optional<std::string> SubstituteFilter::new_value(std::string value) {
}
// Map
MapFilter::MapFilter(std::initializer_list<Substitution> mappings) : mappings_(mappings) {}
MapFilter::MapFilter(const std::initializer_list<Substitution> &mappings) : mappings_(mappings) {}
optional<std::string> MapFilter::new_value(std::string value) {
for (const auto &mapping : this->mappings_) {

View File

@@ -102,7 +102,7 @@ struct Substitution {
/// A simple filter that replaces a substring with another substring
class SubstituteFilter : public Filter {
public:
explicit SubstituteFilter(std::initializer_list<Substitution> substitutions);
explicit SubstituteFilter(const std::initializer_list<Substitution> &substitutions);
optional<std::string> new_value(std::string value) override;
protected:
@@ -135,7 +135,7 @@ class SubstituteFilter : public Filter {
*/
class MapFilter : public Filter {
public:
explicit MapFilter(std::initializer_list<Substitution> mappings);
explicit MapFilter(const std::initializer_list<Substitution> &mappings);
optional<std::string> new_value(std::string value) override;
protected:

View File

@@ -378,14 +378,18 @@ async def to_code(config):
# Track if any network uses Enterprise authentication
has_eap = False
def add_sta(ap, network):
ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP))
cg.add(var.add_sta(wifi_network(network, ap, ip_config)))
# Build all WiFiAP objects
networks = config.get(CONF_NETWORKS, [])
if networks:
wifi_aps = []
for network in networks:
if CONF_EAP in network:
has_eap = True
ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP))
wifi_aps.append(wifi_network(network, WiFiAP(), ip_config))
for network in config.get(CONF_NETWORKS, []):
if CONF_EAP in network:
has_eap = True
cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network)
# Set all WiFi networks at once
cg.add(var.set_stas(wifi_aps))
if CONF_AP in config:
conf = config[CONF_AP]

View File

@@ -330,11 +330,8 @@ float WiFiComponent::get_loop_priority() const {
return 10.0f; // before other loop components
}
void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); }
void WiFiComponent::set_sta(const WiFiAP &ap) {
this->clear_sta();
this->add_sta(ap);
}
void WiFiComponent::set_stas(const std::initializer_list<WiFiAP> &aps) { this->sta_ = aps; }
void WiFiComponent::set_sta(const WiFiAP &ap) { this->set_stas({ap}); }
void WiFiComponent::clear_sta() { this->sta_.clear(); }
void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &password) {
SavedWifiSettings save{}; // zero-initialized - all bytes set to \0, guaranteeing null termination

View File

@@ -219,7 +219,7 @@ class WiFiComponent : public Component {
void set_sta(const WiFiAP &ap);
WiFiAP get_sta() { return this->selected_ap_; }
void add_sta(const WiFiAP &ap);
void set_stas(const std::initializer_list<WiFiAP> &aps);
void clear_sta();
#ifdef USE_WIFI_AP
@@ -393,7 +393,7 @@ class WiFiComponent : public Component {
#endif
std::string use_address_;
std::vector<WiFiAP> sta_;
FixedVector<WiFiAP> sta_;
std::vector<WiFiSTAPriority> sta_priorities_;
wifi_scan_vector_t<WiFiScanResult> scan_result_;
WiFiAP selected_ap_;

View File

@@ -471,6 +471,7 @@ CONF_IMPORT_REACTIVE_ENERGY = "import_reactive_energy"
CONF_INC_PIN = "inc_pin"
CONF_INCLUDE_INTERNAL = "include_internal"
CONF_INCLUDES = "includes"
CONF_INCLUDES_C = "includes_c"
CONF_INDEX = "index"
CONF_INDOOR = "indoor"
CONF_INFRARED = "infrared"

View File

@@ -709,15 +709,6 @@ class EsphomeCore:
def relative_piolibdeps_path(self, *path: str | Path) -> Path:
return self.relative_build_path(".piolibdeps", *path)
@property
def platformio_cache_dir(self) -> str:
"""Get the PlatformIO cache directory path."""
# Check if running in Docker/HA addon with custom cache dir
if (cache_dir := os.environ.get("PLATFORMIO_CACHE_DIR")) and cache_dir.strip():
return cache_dir
# Default PlatformIO cache location
return os.path.expanduser("~/.platformio/.cache")
@property
def firmware_bin(self) -> Path:
if self.is_libretiny:

View File

@@ -21,6 +21,7 @@ from esphome.const import (
CONF_FRIENDLY_NAME,
CONF_ID,
CONF_INCLUDES,
CONF_INCLUDES_C,
CONF_LIBRARIES,
CONF_MIN_VERSION,
CONF_NAME,
@@ -227,6 +228,7 @@ CONFIG_SCHEMA = cv.All(
}
),
cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include),
cv.Optional(CONF_INCLUDES_C, default=[]): cv.ensure_list(valid_include),
cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict),
cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean,
cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean,
@@ -302,6 +304,17 @@ def _list_target_platforms():
return target_platforms
def _sort_includes_by_type(includes: list[str]) -> tuple[list[str], list[str]]:
system_includes = []
other_includes = []
for include in includes:
if include.startswith("<") and include.endswith(">"):
system_includes.append(include)
else:
other_includes.append(include)
return system_includes, other_includes
def preload_core_config(config, result) -> str:
with cv.prepend_path(CONF_ESPHOME):
conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME])
@@ -339,7 +352,7 @@ def preload_core_config(config, result) -> str:
return target_platforms[0]
def include_file(path: Path, basename: Path):
def include_file(path: Path, basename: Path, is_c_header: bool = False):
parts = basename.parts
dst = CORE.relative_src_path(*parts)
copy_file_if_changed(path, dst)
@@ -347,7 +360,14 @@ def include_file(path: Path, basename: Path):
ext = path.suffix
if ext in [".h", ".hpp", ".tcc"]:
# Header, add include statement
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
if is_c_header:
# Wrap in extern "C" block for C headers
cg.add_global(
cg.RawStatement(f'extern "C" {{\n #include "{basename}"\n}}')
)
else:
# Regular include
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
ARDUINO_GLUE_CODE = """\
@@ -377,7 +397,7 @@ async def add_arduino_global_workaround():
@coroutine_with_priority(CoroPriority.FINAL)
async def add_includes(includes: list[str]) -> None:
async def add_includes(includes: list[str], is_c_header: bool = False) -> None:
# Add includes at the very end, so that the included files can access global variables
for include in includes:
path = CORE.relative_config_path(include)
@@ -385,11 +405,11 @@ async def add_includes(includes: list[str]) -> None:
# Directory, copy tree
for p in walk_files(path):
basename = p.relative_to(path.parent)
include_file(p, basename)
include_file(p, basename, is_c_header)
else:
# Copy file
basename = Path(path.name)
include_file(path, basename)
include_file(path, basename, is_c_header)
@coroutine_with_priority(CoroPriority.FINAL)
@@ -494,19 +514,25 @@ async def to_code(config: ConfigType) -> None:
CORE.add_job(add_arduino_global_workaround)
if config[CONF_INCLUDES]:
# Get the <...> includes
system_includes = []
other_includes = []
for include in config[CONF_INCLUDES]:
if include.startswith("<") and include.endswith(">"):
system_includes.append(include)
else:
other_includes.append(include)
system_includes, other_includes = _sort_includes_by_type(config[CONF_INCLUDES])
# <...> includes should be at the start
for include in system_includes:
cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True)
# Other includes should be at the end
CORE.add_job(add_includes, other_includes)
CORE.add_job(add_includes, other_includes, False)
if config[CONF_INCLUDES_C]:
system_includes, other_includes = _sort_includes_by_type(
config[CONF_INCLUDES_C]
)
# <...> includes should be at the start
for include in system_includes:
cg.add_global(
cg.RawStatement(f'extern "C" {{\n #include {include}\n}}'),
prepend=True,
)
# Other includes should be at the end
CORE.add_job(add_includes, other_includes, True)
if project_conf := config.get(CONF_PROJECT):
cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME])

View File

@@ -44,6 +44,7 @@
#define USE_GRAPHICAL_DISPLAY_MENU
#define USE_HOMEASSISTANT_TIME
#define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT
#define USE_IMPROV_SERIAL_NEXT_URL
#define USE_JSON
#define USE_LIGHT
#define USE_LOCK
@@ -186,6 +187,7 @@
#define USE_ESP32_CAMERA_JPEG_ENCODER
#define USE_I2C
#define USE_IMPROV
#define USE_ESP32_IMPROV_NEXT_URL
#define USE_MICROPHONE
#define USE_PSRAM
#define USE_SOCKET_IMPL_BSD_SOCKETS

View File

@@ -414,8 +414,10 @@ int8_t step_to_accuracy_decimals(float step) {
return str.length() - dot_pos - 1;
}
// Store BASE64 characters as array - automatically placed in flash/ROM on embedded platforms
static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
// Use C-style string constant to store in ROM instead of RAM (saves 24 bytes)
static constexpr const char *BASE64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/";
// Helper function to find the index of a base64 character in the lookup table.
// Returns the character's position (0-63) if found, or 0 if not found.
@@ -425,8 +427,8 @@ static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr
// stops processing at the first invalid character due to the is_base64() check in its
// while loop condition, making this edge case harmless in practice.
static inline uint8_t base64_find_char(char c) {
const void *ptr = memchr(BASE64_CHARS, c, sizeof(BASE64_CHARS));
return ptr ? (static_cast<const char *>(ptr) - BASE64_CHARS) : 0;
const char *pos = strchr(BASE64_CHARS, c);
return pos ? (pos - BASE64_CHARS) : 0;
}
static inline bool is_base64(char c) { return (isalnum(c) || (c == '+') || (c == '/')); }

View File

@@ -143,9 +143,6 @@ template<typename T, size_t N> class StaticVector {
size_t size() const { return count_; }
bool empty() const { return count_ == 0; }
// Direct access to size counter for efficient in-place construction
size_t &count() { return count_; }
T &operator[](size_t i) { return data_[i]; }
const T &operator[](size_t i) const { return data_[i]; }
@@ -197,12 +194,8 @@ template<typename T> class FixedVector {
size_ = 0;
}
public:
FixedVector() = default;
/// Constructor from initializer list - allocates exact size needed
/// This enables brace initialization: FixedVector<int> v = {1, 2, 3};
FixedVector(std::initializer_list<T> init_list) {
// Helper to assign from initializer list (shared by constructor and assignment operator)
void assign_from_initializer_list_(std::initializer_list<T> init_list) {
init(init_list.size());
size_t idx = 0;
for (const auto &item : init_list) {
@@ -212,9 +205,17 @@ template<typename T> class FixedVector {
size_ = init_list.size();
}
public:
FixedVector() = default;
/// Constructor from initializer list - allocates exact size needed
/// This enables brace initialization: FixedVector<int> v = {1, 2, 3};
FixedVector(std::initializer_list<T> init_list) { assign_from_initializer_list_(init_list); }
~FixedVector() { cleanup_(); }
// Disable copy operations (avoid accidental expensive copies)
// Use copy_from() for explicit copying when needed (e.g., copy_select)
FixedVector(const FixedVector &) = delete;
FixedVector &operator=(const FixedVector &) = delete;
@@ -237,6 +238,28 @@ template<typename T> class FixedVector {
return *this;
}
/// Assignment from initializer list - avoids temporary and move overhead
/// This enables: FixedVector<int> v; v = {1, 2, 3};
FixedVector &operator=(std::initializer_list<T> init_list) {
cleanup_();
reset_();
assign_from_initializer_list_(init_list);
return *this;
}
/// Explicitly copy another FixedVector
/// This method exists instead of operator= to make copying intentional and visible.
/// Copying is expensive on embedded systems, so we require explicit opt-in.
/// Use cases: copy_select (copying source options), lvgl (copying widget options)
void copy_from(const FixedVector &other) {
cleanup_();
reset_();
init(other.size());
for (const auto &item : other) {
push_back(item);
}
}
// Allocate capacity - can be called multiple times to reinit
void init(size_t n) {
cleanup_();
@@ -295,6 +318,11 @@ template<typename T> class FixedVector {
return data_[size_ - 1];
}
/// Access first element (no bounds checking - matches std::vector behavior)
/// Caller must ensure vector is not empty (size() > 0)
T &front() { return data_[0]; }
const T &front() const { return data_[0]; }
/// Access last element (no bounds checking - matches std::vector behavior)
/// Caller must ensure vector is not empty (size() > 0)
T &back() { return data_[size_ - 1]; }
@@ -308,6 +336,12 @@ template<typename T> class FixedVector {
T &operator[](size_t i) { return data_[i]; }
const T &operator[](size_t i) const { return data_[i]; }
/// Access element with bounds checking (matches std::vector behavior)
/// Returns reference to element at index i
/// Behavior for out of bounds access matches std::vector::at() (undefined on embedded)
T &at(size_t i) { return data_[i]; }
const T &at(size_t i) const { return data_[i]; }
// Iterator support for range-based for loops
T *begin() { return data_; }
T *end() { return data_ + size_; }

View File

@@ -95,9 +95,10 @@ class Scheduler {
} name_;
uint32_t interval;
// Split time to handle millis() rollover. The scheduler combines the 32-bit millis()
// with a 16-bit rollover counter to create a 48-bit time space (stored as 64-bit
// for compatibility). With 49.7 days per 32-bit rollover, the 16-bit counter
// supports 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
// with a 16-bit rollover counter to create a 48-bit time space (using 32+16 bits).
// This is intentionally limited to 48 bits, not stored as a full 64-bit value.
// With 49.7 days per 32-bit rollover, the 16-bit counter supports
// 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
// even when devices run for months. Split into two fields for better memory
// alignment on 32-bit systems.
uint32_t next_execution_low_; // Lower 32 bits of execution time (millis value)

View File

@@ -145,16 +145,7 @@ def run_compile(config, verbose):
args = []
if CONF_COMPILE_PROCESS_LIMIT in config[CONF_ESPHOME]:
args += [f"-j{config[CONF_ESPHOME][CONF_COMPILE_PROCESS_LIMIT]}"]
result = run_platformio_cli_run(config, verbose, *args)
# Run memory analysis if enabled
if config.get(CONF_ESPHOME, {}).get("analyze_memory", False):
try:
analyze_memory_usage(config)
except Exception as e:
_LOGGER.warning("Failed to analyze memory usage: %s", e)
return result
return run_platformio_cli_run(config, verbose, *args)
def _run_idedata(config):
@@ -403,74 +394,3 @@ class IDEData:
if path.endswith(".exe")
else f"{path[:-3]}readelf"
)
def analyze_memory_usage(config: dict[str, Any]) -> None:
"""Analyze memory usage by component after compilation."""
# Lazy import to avoid overhead when not needed
from esphome.analyze_memory.cli import MemoryAnalyzerCLI
from esphome.analyze_memory.helpers import get_esphome_components
idedata = get_idedata(config)
# Get paths to tools
elf_path = idedata.firmware_elf_path
objdump_path = idedata.objdump_path
readelf_path = idedata.readelf_path
# Debug logging
_LOGGER.debug("ELF path from idedata: %s", elf_path)
# Check if file exists
if not Path(elf_path).exists():
# Try alternate path
alt_path = Path(CORE.relative_build_path(".pioenvs", CORE.name, "firmware.elf"))
if alt_path.exists():
elf_path = str(alt_path)
_LOGGER.debug("Using alternate ELF path: %s", elf_path)
else:
_LOGGER.warning("ELF file not found at %s or %s", elf_path, alt_path)
return
# Extract external components from config
external_components = set()
# Get the list of built-in ESPHome components
builtin_components = get_esphome_components()
# Special non-component keys that appear in configs
NON_COMPONENT_KEYS = {
CONF_ESPHOME,
"substitutions",
"packages",
"globals",
"<<",
}
# Check all top-level keys in config
for key in config:
if key not in builtin_components and key not in NON_COMPONENT_KEYS:
# This is an external component
external_components.add(key)
_LOGGER.debug("Detected external components: %s", external_components)
# Create analyzer and run analysis
analyzer = MemoryAnalyzerCLI(
elf_path, objdump_path, readelf_path, external_components
)
analyzer.analyze()
# Generate and print report
report = analyzer.generate_report()
_LOGGER.info("\n%s", report)
# Optionally save to file
if config.get(CONF_ESPHOME, {}).get("memory_report_file"):
report_file = Path(config[CONF_ESPHOME]["memory_report_file"])
if report_file.suffix == ".json":
report_file.write_text(analyzer.to_json())
_LOGGER.info("Memory report saved to %s", report_file)
else:
report_file.write_text(report)
_LOGGER.info("Memory report saved to %s", report_file)

View File

@@ -43,7 +43,6 @@ from enum import StrEnum
from functools import cache
import json
import os
from pathlib import Path
import subprocess
import sys
from typing import Any
@@ -53,10 +52,14 @@ from helpers import (
CPP_FILE_EXTENSIONS,
PYTHON_FILE_EXTENSIONS,
changed_files,
filter_component_files,
get_all_dependencies,
get_changed_components,
get_component_from_path,
get_component_test_files,
get_components_from_integration_fixtures,
get_components_with_dependencies,
git_ls_files,
parse_test_filename,
root_path,
)
@@ -162,6 +165,26 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
return False
@cache
def _is_clang_tidy_full_scan() -> bool:
"""Check if clang-tidy configuration changed (requires full scan).
Returns:
True if full scan is needed (hash changed), False otherwise.
"""
try:
result = subprocess.run(
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
capture_output=True,
check=False,
)
# Exit 0 means hash changed (full scan needed)
return result.returncode == 0
except Exception:
# If hash check fails, run full scan to be safe
return True
def should_run_clang_tidy(branch: str | None = None) -> bool:
"""Determine if clang-tidy should run based on changed files.
@@ -198,17 +221,7 @@ def should_run_clang_tidy(branch: str | None = None) -> bool:
True if clang-tidy should run, False otherwise.
"""
# First check if clang-tidy configuration changed (full scan needed)
try:
result = subprocess.run(
[os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"],
capture_output=True,
check=False,
)
# Exit 0 means hash changed (full scan needed)
if result.returncode == 0:
return True
except Exception:
# If hash check fails, run clang-tidy to be safe
if _is_clang_tidy_full_scan():
return True
# Check if .clang-tidy.hash file itself was changed
@@ -550,16 +563,29 @@ def main() -> None:
run_python_linters = should_run_python_linters(args.branch)
changed_cpp_file_count = count_changed_cpp_files(args.branch)
# Get both directly changed and all changed components (with dependencies) in one call
script_path = Path(__file__).parent / "list-components.py"
cmd = [sys.executable, str(script_path), "--changed-with-deps"]
if args.branch:
cmd.extend(["-b", args.branch])
# Get changed components
# get_changed_components() returns:
# None: Core files changed (need full scan)
# []: No components changed
# [list]: Changed components (already includes dependencies)
changed_components_result = get_changed_components()
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
component_data = json.loads(result.stdout)
directly_changed_components = component_data["directly_changed"]
changed_components = component_data["all_changed"]
if changed_components_result is None:
# Core files changed - will trigger full clang-tidy scan
# No specific components to test
changed_components = []
directly_changed_components = []
is_core_change = True
else:
# Get both directly changed and all changed (with dependencies)
changed = changed_files(args.branch)
component_files = [f for f in changed if filter_component_files(f)]
directly_changed_components = get_components_with_dependencies(
component_files, False
)
changed_components = get_components_with_dependencies(component_files, True)
is_core_change = False
# Filter to only components that have test files
# Components without tests shouldn't generate CI test jobs
@@ -570,11 +596,11 @@ def main() -> None:
# Get directly changed components with tests (for isolated testing)
# These will be tested WITHOUT --testing-mode in CI to enable full validation
# (pin conflicts, etc.) since they contain the actual changes being reviewed
directly_changed_with_tests = [
directly_changed_with_tests = {
component
for component in directly_changed_components
if _component_has_tests(component)
]
}
# Get dependency-only components (for grouped testing)
dependency_only_components = [
@@ -586,13 +612,38 @@ def main() -> None:
# Detect components for memory impact analysis (merged config)
memory_impact = detect_memory_impact_config(args.branch)
# Determine clang-tidy mode based on actual files that will be checked
if run_clang_tidy:
if changed_cpp_file_count < CLANG_TIDY_SPLIT_THRESHOLD:
clang_tidy_mode = "nosplit"
else:
# Full scan needed if: hash changed OR core files changed
is_full_scan = _is_clang_tidy_full_scan() or is_core_change
if is_full_scan:
# Full scan checks all files - always use split mode for efficiency
clang_tidy_mode = "split"
files_to_check_count = -1 # Sentinel value for "all files"
else:
# Targeted scan - calculate actual files that will be checked
# This accounts for component dependencies, not just directly changed files
if changed_components:
# Count C++ files in all changed components (including dependencies)
all_cpp_files = list(git_ls_files(["*.cpp"]).keys())
component_set = set(changed_components)
files_to_check_count = sum(
1
for f in all_cpp_files
if get_component_from_path(f) in component_set
)
else:
# If no components changed, use the simple count of changed C++ files
files_to_check_count = changed_cpp_file_count
if files_to_check_count < CLANG_TIDY_SPLIT_THRESHOLD:
clang_tidy_mode = "nosplit"
else:
clang_tidy_mode = "split"
else:
clang_tidy_mode = "disabled"
files_to_check_count = 0
# Build output
output: dict[str, Any] = {
@@ -603,7 +654,7 @@ def main() -> None:
"python_linters": run_python_linters,
"changed_components": changed_components,
"changed_components_with_tests": changed_components_with_tests,
"directly_changed_components_with_tests": directly_changed_with_tests,
"directly_changed_components_with_tests": list(directly_changed_with_tests),
"dependency_only_components_with_tests": dependency_only_components,
"component_test_count": len(changed_components_with_tests),
"directly_changed_count": len(directly_changed_with_tests),

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
from collections.abc import Callable
from functools import cache
import json
import os
@@ -7,6 +8,7 @@ import os.path
from pathlib import Path
import re
import subprocess
import sys
import time
from typing import Any
@@ -304,7 +306,10 @@ def get_changed_components() -> list[str] | None:
for f in changed
)
if core_cpp_changed:
print("Core C++/header files changed - will run full clang-tidy scan")
print(
"Core C++/header files changed - will run full clang-tidy scan",
file=sys.stderr,
)
return None
# Use list-components.py to get changed components
@@ -318,7 +323,10 @@ def get_changed_components() -> list[str] | None:
return parse_list_components_output(result.stdout)
except subprocess.CalledProcessError:
# If the script fails, fall back to full scan
print("Could not determine changed components - will run full clang-tidy scan")
print(
"Could not determine changed components - will run full clang-tidy scan",
file=sys.stderr,
)
return None
@@ -370,14 +378,14 @@ def _filter_changed_ci(files: list[str]) -> list[str]:
if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH)
]
if not files:
print("No files changed")
print("No files changed", file=sys.stderr)
return files
# Scenario 3: Specific components changed
# Action: Check ALL files in each changed component
# Convert component list to set for O(1) lookups
component_set = set(components)
print(f"Changed components: {', '.join(sorted(components))}")
print(f"Changed components: {', '.join(sorted(components))}", file=sys.stderr)
# The 'files' parameter contains ALL files in the codebase that clang-tidy would check.
# We filter this down to only files in the changed components.
@@ -648,3 +656,220 @@ def get_components_from_integration_fixtures() -> set[str]:
components.add(item["platform"])
return components
def filter_component_files(file_path: str) -> bool:
"""Check if a file path is a component file.
Args:
file_path: Path to check
Returns:
True if the file is in a component directory
"""
return file_path.startswith("esphome/components/") or file_path.startswith(
"tests/components/"
)
def extract_component_names_from_files(files: list[str]) -> list[str]:
"""Extract unique component names from a list of file paths.
Args:
files: List of file paths
Returns:
List of unique component names (preserves order)
"""
return list(
dict.fromkeys(comp for file in files if (comp := get_component_from_path(file)))
)
def add_item_to_components_graph(
components_graph: dict[str, list[str]], parent: str, child: str
) -> None:
"""Add a dependency relationship to the components graph.
Args:
components_graph: Graph mapping parent components to their children
parent: Parent component name
child: Child component name (dependent)
"""
if not parent.startswith("__") and parent != child:
if parent not in components_graph:
components_graph[parent] = []
if child not in components_graph[parent]:
components_graph[parent].append(child)
def resolve_auto_load(
auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]],
config: dict | None = None,
) -> list[str]:
"""Resolve AUTO_LOAD to a list, handling callables with or without config parameter.
Args:
auto_load: The AUTO_LOAD value (list or callable)
config: Optional config to pass to callable AUTO_LOAD functions
Returns:
List of component names to auto-load
"""
if not callable(auto_load):
return auto_load
import inspect
if inspect.signature(auto_load).parameters:
return auto_load(config)
return auto_load()
def create_components_graph() -> dict[str, list[str]]:
"""Create a graph of component dependencies.
Returns:
Dictionary mapping parent components to their children (dependencies)
"""
from pathlib import Path
from esphome import const
from esphome.core import CORE
from esphome.loader import ComponentManifest, get_component, get_platform
# The root directory of the repo
root = Path(__file__).parent.parent
components_dir = root / "esphome" / "components"
# Fake some directory so that get_component works
CORE.config_path = root
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
KEY_CORE = const.KEY_CORE
KEY_TARGET_FRAMEWORK = const.KEY_TARGET_FRAMEWORK
KEY_TARGET_PLATFORM = const.KEY_TARGET_PLATFORM
PLATFORM_ESP32 = const.PLATFORM_ESP32
PLATFORM_ESP8266 = const.PLATFORM_ESP8266
TARGET_CONFIGURATIONS = [
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266},
]
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
components_graph = {}
platforms = []
components: list[tuple[ComponentManifest, str, Path]] = []
for path in components_dir.iterdir():
if not path.is_dir():
continue
if not (path / "__init__.py").is_file():
continue
name = path.name
comp = get_component(name)
if comp is None:
raise RuntimeError(
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
)
components.append((comp, name, path))
if comp.is_platform_component:
platforms.append(name)
platforms = set(platforms)
for comp, name, path in components:
for dependency in comp.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(comp.auto_load, config=None):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
for platform_path in path.iterdir():
platform_name = platform_path.stem
if platform_name == name or platform_name not in platforms:
continue
platform = get_platform(platform_name, name)
if platform is None:
continue
add_item_to_components_graph(components_graph, platform_name, name)
for dependency in platform.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(platform.auto_load, config={}):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
return components_graph
def find_children_of_component(
components_graph: dict[str, list[str]], component_name: str, depth: int = 0
) -> list[str]:
"""Find all components that depend on the given component (recursively).
Args:
components_graph: Graph mapping parent components to their children
component_name: Component name to find children for
depth: Current recursion depth (max 10)
Returns:
List of all dependent component names (may contain duplicates removed at end)
"""
if component_name not in components_graph:
return []
children = []
for child in components_graph[component_name]:
children.append(child)
if depth < 10:
children.extend(
find_children_of_component(components_graph, child, depth + 1)
)
# Remove duplicate values
return list(set(children))
def get_components_with_dependencies(
files: list[str], get_dependencies: bool = False
) -> list[str]:
"""Get component names from files, optionally including their dependencies.
Args:
files: List of file paths
get_dependencies: If True, include all dependent components
Returns:
Sorted list of component names
"""
components = extract_component_names_from_files(files)
if get_dependencies:
components_graph = create_components_graph()
all_components = components.copy()
for c in components:
all_components.extend(find_children_of_component(components_graph, c))
# Remove duplicate values
all_changed_components = list(set(all_components))
return sorted(all_changed_components)
return sorted(components)

View File

@@ -1,24 +1,12 @@
#!/usr/bin/env python3
import argparse
from collections.abc import Callable
from pathlib import Path
import sys
from helpers import changed_files, get_component_from_path, git_ls_files
from esphome.const import (
KEY_CORE,
KEY_TARGET_FRAMEWORK,
KEY_TARGET_PLATFORM,
PLATFORM_ESP32,
PLATFORM_ESP8266,
from helpers import (
changed_files,
filter_component_files,
get_components_with_dependencies,
git_ls_files,
)
from esphome.core import CORE
from esphome.loader import ComponentManifest, get_component, get_platform
def filter_component_files(str):
return str.startswith("esphome/components/") | str.startswith("tests/components/")
def get_all_component_files() -> list[str]:
@@ -27,156 +15,6 @@ def get_all_component_files() -> list[str]:
return list(filter(filter_component_files, files))
def extract_component_names_array_from_files_array(files):
components = []
for file in files:
component_name = get_component_from_path(file)
if component_name and component_name not in components:
components.append(component_name)
return components
def add_item_to_components_graph(components_graph, parent, child):
if not parent.startswith("__") and parent != child:
if parent not in components_graph:
components_graph[parent] = []
if child not in components_graph[parent]:
components_graph[parent].append(child)
def resolve_auto_load(
auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]],
config: dict | None = None,
) -> list[str]:
"""Resolve AUTO_LOAD to a list, handling callables with or without config parameter.
Args:
auto_load: The AUTO_LOAD value (list or callable)
config: Optional config to pass to callable AUTO_LOAD functions
Returns:
List of component names to auto-load
"""
if not callable(auto_load):
return auto_load
import inspect
if inspect.signature(auto_load).parameters:
return auto_load(config)
return auto_load()
def create_components_graph():
# The root directory of the repo
root = Path(__file__).parent.parent
components_dir = root / "esphome" / "components"
# Fake some directory so that get_component works
CORE.config_path = root
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
TARGET_CONFIGURATIONS = [
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266},
]
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
components_graph = {}
platforms = []
components: list[tuple[ComponentManifest, str, Path]] = []
for path in components_dir.iterdir():
if not path.is_dir():
continue
if not (path / "__init__.py").is_file():
continue
name = path.name
comp = get_component(name)
if comp is None:
print(
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
)
sys.exit(1)
components.append((comp, name, path))
if comp.is_platform_component:
platforms.append(name)
platforms = set(platforms)
for comp, name, path in components:
for dependency in comp.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(comp.auto_load, config=None):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
for platform_path in path.iterdir():
platform_name = platform_path.stem
if platform_name == name or platform_name not in platforms:
continue
platform = get_platform(platform_name, name)
if platform is None:
continue
add_item_to_components_graph(components_graph, platform_name, name)
for dependency in platform.dependencies:
add_item_to_components_graph(
components_graph, dependency.split(".")[0], name
)
for target_config in TARGET_CONFIGURATIONS:
CORE.data[KEY_CORE] = target_config
for item in resolve_auto_load(platform.auto_load, config={}):
add_item_to_components_graph(components_graph, item, name)
# restore config
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
return components_graph
def find_children_of_component(components_graph, component_name, depth=0):
if component_name not in components_graph:
return []
children = []
for child in components_graph[component_name]:
children.append(child)
if depth < 10:
children.extend(
find_children_of_component(components_graph, child, depth + 1)
)
# Remove duplicate values
return list(set(children))
def get_components(files: list[str], get_dependencies: bool = False):
components = extract_component_names_array_from_files_array(files)
if get_dependencies:
components_graph = create_components_graph()
all_components = components.copy()
for c in components:
all_components.extend(find_children_of_component(components_graph, c))
# Remove duplicate values
all_changed_components = list(set(all_components))
return sorted(all_changed_components)
return sorted(components)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
@@ -251,8 +89,8 @@ def main():
# Return JSON with both directly changed and all changed components
import json
directly_changed = get_components(files, False)
all_changed = get_components(files, True)
directly_changed = get_components_with_dependencies(files, False)
all_changed = get_components_with_dependencies(files, True)
output = {
"directly_changed": directly_changed,
"all_changed": all_changed,
@@ -260,11 +98,11 @@ def main():
print(json.dumps(output))
elif args.changed_direct:
# Return only directly changed components (without dependencies)
for c in get_components(files, False):
for c in get_components_with_dependencies(files, False):
print(c)
else:
# Return all changed components (with dependencies) - default behavior
for c in get_components(files, args.changed):
for c in get_components_with_dependencies(files, args.changed):
print(c)

View File

@@ -966,11 +966,33 @@ def test_components(
# Find all component tests
all_tests = {}
for pattern in component_patterns:
# Skip empty patterns (happens when components list is empty string)
if not pattern:
continue
all_tests.update(find_component_tests(tests_dir, pattern, base_only))
# If no components found, build a reference configuration for baseline comparison
# Create a synthetic "empty" component test that will build just the base config
if not all_tests:
print(f"No components found matching: {component_patterns}")
return 1
print(
"Building reference configuration with no components for baseline comparison..."
)
# Create empty test files for each platform (or filtered platform)
reference_tests: list[Path] = []
for platform_name, base_file in platform_bases.items():
if platform_filter and not platform_name.startswith(platform_filter):
continue
# Create an empty test file named to match the platform
empty_test_file = build_dir / f"reference.{platform_name}.yaml"
empty_test_file.write_text(
"# Empty component test for baseline reference\n"
)
reference_tests.append(empty_test_file)
# Add to all_tests dict with component name "reference"
all_tests["reference"] = reference_tests
print(f"Found {len(all_tests)} components to test")

View File

@@ -37,3 +37,36 @@ binary_sensor:
format: "New state is %s"
args: ['x.has_value() ? ONOFF(x) : "Unknown"']
- binary_sensor.invalidate_state: some_binary_sensor
# Test autorepeat with default configuration (no timings)
- platform: template
id: autorepeat_default
name: "Autorepeat Default"
filters:
- autorepeat:
# Test autorepeat with single timing entry
- platform: template
id: autorepeat_single
name: "Autorepeat Single"
filters:
- autorepeat:
- delay: 2s
time_off: 200ms
time_on: 800ms
# Test autorepeat with three timing entries
- platform: template
id: autorepeat_multiple
name: "Autorepeat Multiple"
filters:
- autorepeat:
- delay: 500ms
time_off: 50ms
time_on: 950ms
- delay: 2s
time_off: 100ms
time_on: 900ms
- delay: 10s
time_off: 200ms
time_on: 800ms

View File

@@ -173,3 +173,66 @@ sensor:
timeout: 1000ms
value: [42.0]
- multiply: 2.0
# CalibrateLinearFilter - piecewise linear calibration
- platform: copy
source_id: source_sensor
name: "Calibrate Linear Two Points"
filters:
- calibrate_linear:
- 0.0 -> 0.0
- 100.0 -> 100.0
- platform: copy
source_id: source_sensor
name: "Calibrate Linear Multiple Segments"
filters:
- calibrate_linear:
- 0.0 -> 0.0
- 50.0 -> 55.0
- 100.0 -> 102.5
- platform: copy
source_id: source_sensor
name: "Calibrate Linear Least Squares"
filters:
- calibrate_linear:
method: least_squares
datapoints:
- 0.0 -> 0.0
- 50.0 -> 55.0
- 100.0 -> 102.5
# CalibratePolynomialFilter - polynomial calibration
- platform: copy
source_id: source_sensor
name: "Calibrate Polynomial Degree 2"
filters:
- calibrate_polynomial:
degree: 2
datapoints:
- 0.0 -> 0.0
- 50.0 -> 55.0
- 100.0 -> 102.5
- platform: copy
source_id: source_sensor
name: "Calibrate Polynomial Degree 3"
filters:
- calibrate_polynomial:
degree: 3
datapoints:
- 0.0 -> 0.0
- 25.0 -> 26.0
- 50.0 -> 55.0
- 100.0 -> 102.5
# OrFilter - filter branching
- platform: copy
source_id: source_sensor
name: "Or Filter with Multiple Branches"
filters:
- or:
- multiply: 2.0
- offset: 10.0
- lambda: return x * 3.0;

View File

@@ -101,6 +101,9 @@ sensor:
- filter_out: 10
- filter_out: !lambda return NAN;
- heartbeat: 5s
- heartbeat:
period: 5s
optimistic: true
- lambda: return x * (9.0/5.0) + 32.0;
- max:
window_size: 10

View File

@@ -12,5 +12,8 @@ esphome:
- logger.log: "Failed to connect to WiFi!"
wifi:
ssid: MySSID
password: password1
networks:
- ssid: MySSID
password: password1
- ssid: MySSID2
password: password2

View File

@@ -0,0 +1,112 @@
esphome:
name: host-climate-test
host:
api:
logger:
climate:
- platform: thermostat
id: dual_mode_thermostat
name: Dual-mode Thermostat
sensor: host_thermostat_temperature_sensor
humidity_sensor: host_thermostat_humidity_sensor
humidity_hysteresis: 1.0
min_cooling_off_time: 20s
min_cooling_run_time: 20s
max_cooling_run_time: 30s
supplemental_cooling_delta: 3.0
min_heating_off_time: 20s
min_heating_run_time: 20s
max_heating_run_time: 30s
supplemental_heating_delta: 3.0
min_fanning_off_time: 20s
min_fanning_run_time: 20s
min_idle_time: 10s
visual:
min_humidity: 20%
max_humidity: 70%
min_temperature: 15.0
max_temperature: 32.0
temperature_step: 0.1
default_preset: home
preset:
- name: "away"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
- name: "home"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
auto_mode:
- logger.log: "AUTO mode set"
heat_cool_mode:
- logger.log: "HEAT_COOL mode set"
cool_action:
- switch.turn_on: air_cond
supplemental_cooling_action:
- switch.turn_on: air_cond_2
heat_action:
- switch.turn_on: heater
supplemental_heating_action:
- switch.turn_on: heater_2
dry_action:
- switch.turn_on: air_cond
fan_only_action:
- switch.turn_on: fan_only
idle_action:
- switch.turn_off: air_cond
- switch.turn_off: air_cond_2
- switch.turn_off: heater
- switch.turn_off: heater_2
- switch.turn_off: fan_only
humidity_control_humidify_action:
- switch.turn_on: humidifier
humidity_control_off_action:
- switch.turn_off: humidifier
sensor:
- platform: template
id: host_thermostat_humidity_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 42.0;
update_interval: 0.1s
- platform: template
id: host_thermostat_temperature_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 22.0;
update_interval: 0.1s
switch:
- platform: template
id: air_cond
name: Air Conditioner
optimistic: true
- platform: template
id: air_cond_2
name: Air Conditioner 2
optimistic: true
- platform: template
id: fan_only
name: Fan
optimistic: true
- platform: template
id: heater
name: Heater
optimistic: true
- platform: template
id: heater_2
name: Heater 2
optimistic: true
- platform: template
id: dehumidifier
name: Dehumidifier
optimistic: true
- platform: template
id: humidifier
name: Humidifier
optimistic: true

View File

@@ -0,0 +1,108 @@
esphome:
name: host-climate-test
host:
api:
logger:
climate:
- platform: thermostat
id: dual_mode_thermostat
name: Dual-mode Thermostat
sensor: host_thermostat_temperature_sensor
humidity_sensor: host_thermostat_humidity_sensor
humidity_hysteresis: 1.0
min_cooling_off_time: 20s
min_cooling_run_time: 20s
max_cooling_run_time: 30s
supplemental_cooling_delta: 3.0
min_heating_off_time: 20s
min_heating_run_time: 20s
max_heating_run_time: 30s
supplemental_heating_delta: 3.0
min_fanning_off_time: 20s
min_fanning_run_time: 20s
min_idle_time: 10s
visual:
min_humidity: 20%
max_humidity: 70%
min_temperature: 15.0
max_temperature: 32.0
temperature_step: 0.1
default_preset: home
preset:
- name: "away"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
- name: "home"
default_target_temperature_low: 18.0
default_target_temperature_high: 24.0
auto_mode:
- logger.log: "AUTO mode set"
heat_cool_mode:
- logger.log: "HEAT_COOL mode set"
cool_action:
- switch.turn_on: air_cond
supplemental_cooling_action:
- switch.turn_on: air_cond_2
heat_action:
- switch.turn_on: heater
supplemental_heating_action:
- switch.turn_on: heater_2
dry_action:
- switch.turn_on: air_cond
fan_only_action:
- switch.turn_on: fan_only
idle_action:
- switch.turn_off: air_cond
- switch.turn_off: air_cond_2
- switch.turn_off: heater
- switch.turn_off: heater_2
- switch.turn_off: fan_only
humidity_control_humidify_action:
- switch.turn_on: humidifier
humidity_control_off_action:
- switch.turn_off: humidifier
sensor:
- platform: template
id: host_thermostat_humidity_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 42.0;
update_interval: 0.1s
- platform: template
id: host_thermostat_temperature_sensor
unit_of_measurement: °C
accuracy_decimals: 2
state_class: measurement
force_update: true
lambda: return 22.0;
update_interval: 0.1s
switch:
- platform: template
id: air_cond
name: Air Conditioner
optimistic: true
- platform: template
id: air_cond_2
name: Air Conditioner 2
optimistic: true
- platform: template
id: fan_only
name: Fan
optimistic: true
- platform: template
id: heater
name: Heater
optimistic: true
- platform: template
id: heater_2
name: Heater 2
optimistic: true
- platform: template
id: humidifier
name: Humidifier
optimistic: true

View File

@@ -210,7 +210,15 @@ sensor:
name: "Test Sensor 50"
lambda: return 50.0;
update_interval: 0.1s
# Temperature sensor for the thermostat
# Sensors for the thermostat
- platform: template
name: "Humidity Sensor"
id: humidity_sensor
lambda: return 35.0;
unit_of_measurement: "%"
device_class: humidity
state_class: measurement
update_interval: 5s
- platform: template
name: "Temperature Sensor"
id: temp_sensor
@@ -295,6 +303,11 @@ valve:
- logger.log: "Valve stopping"
output:
- platform: template
id: humidifier_output
type: binary
write_action:
- logger.log: "Humidifier output changed"
- platform: template
id: heater_output
type: binary
@@ -305,18 +318,31 @@ output:
type: binary
write_action:
- logger.log: "Cooler output changed"
- platform: template
id: fan_output
type: binary
write_action:
- logger.log: "Fan output changed"
climate:
- platform: thermostat
name: "Test Thermostat"
sensor: temp_sensor
humidity_sensor: humidity_sensor
default_preset: Home
on_boot_restore_from: default_preset
min_heating_off_time: 1s
min_heating_run_time: 1s
min_cooling_off_time: 1s
min_cooling_run_time: 1s
min_fan_mode_switching_time: 1s
min_idle_time: 1s
visual:
min_humidity: 20%
max_humidity: 70%
min_temperature: 15.0
max_temperature: 32.0
temperature_step: 0.1
heat_action:
- output.turn_on: heater_output
cool_action:
@@ -324,6 +350,14 @@ climate:
idle_action:
- output.turn_off: heater_output
- output.turn_off: cooler_output
humidity_control_humidify_action:
- output.turn_on: humidifier_output
humidity_control_off_action:
- output.turn_off: humidifier_output
fan_mode_auto_action:
- output.turn_off: fan_output
fan_mode_on_action:
- output.turn_on: fan_output
preset:
- name: Home
default_target_temperature_low: 20

View File

@@ -0,0 +1,49 @@
"""Integration test for Host mode with climate."""
from __future__ import annotations
import asyncio
import aioesphomeapi
from aioesphomeapi import ClimateAction, ClimateMode, ClimatePreset, EntityState
import pytest
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_host_mode_climate_basic_state(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test basic climate state reporting."""
loop = asyncio.get_running_loop()
async with run_compiled(yaml_config), api_client_connected() as client:
states: dict[int, EntityState] = {}
climate_future: asyncio.Future[EntityState] = loop.create_future()
def on_state(state: EntityState) -> None:
states[state.key] = state
if (
isinstance(state, aioesphomeapi.ClimateState)
and not climate_future.done()
):
climate_future.set_result(state)
client.subscribe_states(on_state)
try:
climate_state = await asyncio.wait_for(climate_future, timeout=5.0)
except TimeoutError:
pytest.fail("Climate state not received within 5 seconds")
assert isinstance(climate_state, aioesphomeapi.ClimateState)
assert climate_state.mode == ClimateMode.OFF
assert climate_state.action == ClimateAction.OFF
assert climate_state.current_temperature == 22.0
assert climate_state.target_temperature_low == 18.0
assert climate_state.target_temperature_high == 24.0
assert climate_state.preset == ClimatePreset.HOME
assert climate_state.current_humidity == 42.0
assert climate_state.target_humidity == 20.0

View File

@@ -0,0 +1,76 @@
"""Integration test for Host mode with climate."""
from __future__ import annotations
import asyncio
import aioesphomeapi
from aioesphomeapi import ClimateInfo, ClimateMode, EntityState
import pytest
from .state_utils import InitialStateHelper
from .types import APIClientConnectedFactory, RunCompiledFunction
@pytest.mark.asyncio
async def test_host_mode_climate_control(
yaml_config: str,
run_compiled: RunCompiledFunction,
api_client_connected: APIClientConnectedFactory,
) -> None:
"""Test climate mode control."""
loop = asyncio.get_running_loop()
async with run_compiled(yaml_config), api_client_connected() as client:
states: dict[int, EntityState] = {}
climate_future: asyncio.Future[EntityState] = loop.create_future()
def on_state(state: EntityState) -> None:
states[state.key] = state
if (
isinstance(state, aioesphomeapi.ClimateState)
and state.mode == ClimateMode.HEAT
and state.target_temperature_low == 21.5
and state.target_temperature_high == 26.5
and not climate_future.done()
):
climate_future.set_result(state)
# Get entities and set up state synchronization
entities, services = await client.list_entities_services()
initial_state_helper = InitialStateHelper(entities)
climate_infos = [e for e in entities if isinstance(e, ClimateInfo)]
assert len(climate_infos) >= 1, "Expected at least 1 climate entity"
# Subscribe with the wrapper that filters initial states
client.subscribe_states(initial_state_helper.on_state_wrapper(on_state))
# Wait for all initial states to be broadcast
try:
await initial_state_helper.wait_for_initial_states()
except TimeoutError:
pytest.fail("Timeout waiting for initial states")
test_climate = next(
(c for c in climate_infos if c.name == "Dual-mode Thermostat"), None
)
assert test_climate is not None, (
"Dual-mode Thermostat thermostat climate not found"
)
# Adjust setpoints
client.climate_command(
test_climate.key,
mode=ClimateMode.HEAT,
target_temperature_low=21.5,
target_temperature_high=26.5,
)
try:
climate_state = await asyncio.wait_for(climate_future, timeout=5.0)
except TimeoutError:
pytest.fail("Climate state not received within 5 seconds")
assert isinstance(climate_state, aioesphomeapi.ClimateState)
assert climate_state.mode == ClimateMode.HEAT
assert climate_state.target_temperature_low == 21.5
assert climate_state.target_temperature_high == 26.5

View File

@@ -5,7 +5,10 @@ from __future__ import annotations
import asyncio
from aioesphomeapi import (
ClimateFanMode,
ClimateFeature,
ClimateInfo,
ClimateMode,
DateInfo,
DateState,
DateTimeInfo,
@@ -121,6 +124,46 @@ async def test_host_mode_many_entities(
assert len(climate_infos) >= 1, "Expected at least 1 climate entity"
climate_info = climate_infos[0]
# Verify feature flags set as expected
assert climate_info.feature_flags == (
ClimateFeature.SUPPORTS_ACTION
| ClimateFeature.SUPPORTS_CURRENT_HUMIDITY
| ClimateFeature.SUPPORTS_CURRENT_TEMPERATURE
| ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE
| ClimateFeature.SUPPORTS_TARGET_HUMIDITY
)
# Verify modes
assert climate_info.supported_modes == [
ClimateMode.OFF,
ClimateMode.COOL,
ClimateMode.HEAT,
], f"Expected modes [OFF, COOL, HEAT], got {climate_info.supported_modes}"
# Verify visual parameters
assert climate_info.visual_min_temperature == 15.0, (
f"Expected min_temperature=15.0, got {climate_info.visual_min_temperature}"
)
assert climate_info.visual_max_temperature == 32.0, (
f"Expected max_temperature=32.0, got {climate_info.visual_max_temperature}"
)
assert climate_info.visual_target_temperature_step == 0.1, (
f"Expected temperature_step=0.1, got {climate_info.visual_target_temperature_step}"
)
assert climate_info.visual_min_humidity == 20.0, (
f"Expected min_humidity=20.0, got {climate_info.visual_min_humidity}"
)
assert climate_info.visual_max_humidity == 70.0, (
f"Expected max_humidity=70.0, got {climate_info.visual_max_humidity}"
)
# Verify fan modes
assert climate_info.supported_fan_modes == [
ClimateFanMode.ON,
ClimateFanMode.AUTO,
], f"Expected fan modes [ON, AUTO], got {climate_info.supported_fan_modes}"
# Verify the thermostat has presets
assert len(climate_info.supported_presets) > 0, (
"Expected climate to have presets"

View File

@@ -71,6 +71,12 @@ def mock_changed_files() -> Generator[Mock, None, None]:
yield mock
@pytest.fixture(autouse=True)
def clear_clang_tidy_cache() -> None:
"""Clear the clang-tidy full scan cache before each test."""
determine_jobs._is_clang_tidy_full_scan.cache_clear()
def test_main_all_tests_should_run(
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
@@ -90,15 +96,35 @@ def test_main_all_tests_should_run(
mock_should_run_clang_format.return_value = True
mock_should_run_python_linters.return_value = True
# Mock list-components.py output (now returns JSON with --changed-with-deps)
mock_result = Mock()
mock_result.stdout = json.dumps(
{"directly_changed": ["wifi", "api"], "all_changed": ["wifi", "api", "sensor"]}
)
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return non-component files (to avoid memory impact)
# Memory impact only runs when component C++ files change
mock_changed_files.return_value = [
"esphome/config.py",
"esphome/helpers.py",
]
# Run main function with mocked argv
with patch("sys.argv", ["determine-jobs.py"]):
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
patch.object(
determine_jobs,
"get_changed_components",
return_value=["wifi", "api", "sensor"],
),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs,
"get_components_with_dependencies",
side_effect=lambda files, deps: ["wifi", "api"]
if not deps
else ["wifi", "api", "sensor"],
),
):
determine_jobs.main()
# Check output
@@ -121,9 +147,9 @@ def test_main_all_tests_should_run(
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be present
# memory_impact should be false (no component C++ files changed)
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false" # No files changed
assert output["memory_impact"]["should_run"] == "false"
def test_main_no_tests_should_run(
@@ -145,13 +171,18 @@ def test_main_no_tests_should_run(
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Mock empty list-components.py output
mock_result = Mock()
mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []})
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return no component files
mock_changed_files.return_value = []
# Run main function with mocked argv
with patch("sys.argv", ["determine-jobs.py"]):
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "get_changed_components", return_value=[]),
patch.object(determine_jobs, "filter_component_files", return_value=False),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=[]
),
):
determine_jobs.main()
# Check output
@@ -217,14 +248,23 @@ def test_main_with_branch_argument(
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = True
# Mock list-components.py output
mock_result = Mock()
mock_result.stdout = json.dumps(
{"directly_changed": ["mqtt"], "all_changed": ["mqtt"]}
)
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return non-component files (to avoid memory impact)
# Memory impact only runs when component C++ files change
mock_changed_files.return_value = ["esphome/config.py"]
with patch("sys.argv", ["script.py", "-b", "main"]):
with (
patch("sys.argv", ["script.py", "-b", "main"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
patch.object(determine_jobs, "get_changed_components", return_value=["mqtt"]),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=["mqtt"]
),
):
determine_jobs.main()
# Check that functions were called with branch
@@ -233,13 +273,6 @@ def test_main_with_branch_argument(
mock_should_run_clang_format.assert_called_once_with("main")
mock_should_run_python_linters.assert_called_once_with("main")
# Check that list-components.py was called with branch
mock_subprocess_run.assert_called_once()
call_args = mock_subprocess_run.call_args[0][0]
assert "--changed-with-deps" in call_args
assert "-b" in call_args
assert "main" in call_args
# Check output
captured = capsys.readouterr()
output = json.loads(captured.out)
@@ -260,7 +293,7 @@ def test_main_with_branch_argument(
# changed_cpp_file_count should be present
assert "changed_cpp_file_count" in output
assert isinstance(output["changed_cpp_file_count"], int)
# memory_impact should be present
# memory_impact should be false (no component C++ files changed)
assert "memory_impact" in output
assert output["memory_impact"]["should_run"] == "false"
@@ -363,16 +396,6 @@ def test_should_run_clang_tidy_hash_check_exception() -> None:
result = determine_jobs.should_run_clang_tidy()
assert result is True # Fail safe - run clang-tidy
# Even with C++ files, exception should trigger clang-tidy
with (
patch.object(
determine_jobs, "changed_files", return_value=["esphome/core.cpp"]
),
patch("subprocess.run", side_effect=Exception("Hash check failed")),
):
result = determine_jobs.should_run_clang_tidy()
assert result is True
def test_should_run_clang_tidy_with_branch() -> None:
"""Test should_run_clang_tidy with branch argument."""
@@ -498,16 +521,11 @@ def test_main_filters_components_without_tests(
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Mock list-components.py output with 3 components
# wifi: has tests, sensor: has tests, airthings_ble: no tests
mock_result = Mock()
mock_result.stdout = json.dumps(
{
"directly_changed": ["wifi", "sensor"],
"all_changed": ["wifi", "sensor", "airthings_ble"],
}
)
mock_subprocess_run.return_value = mock_result
# Mock changed_files to return component files
mock_changed_files.return_value = [
"esphome/components/wifi/wifi.cpp",
"esphome/components/sensor/sensor.h",
]
# Create test directory structure
tests_dir = tmp_path / "tests" / "components"
@@ -531,6 +549,23 @@ def test_main_filters_components_without_tests(
patch.object(determine_jobs, "root_path", str(tmp_path)),
patch.object(helpers, "root_path", str(tmp_path)),
patch("sys.argv", ["determine-jobs.py"]),
patch.object(
determine_jobs,
"get_changed_components",
return_value=["wifi", "sensor", "airthings_ble"],
),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs,
"get_components_with_dependencies",
side_effect=lambda files, deps: ["wifi", "sensor"]
if not deps
else ["wifi", "sensor", "airthings_ble"],
),
):
# Clear the cache since we're mocking root_path
determine_jobs._component_has_tests.cache_clear()
@@ -763,3 +798,130 @@ def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) -
assert result["should_run"] == "true"
assert result["components"] == ["wifi"]
assert "i2c" not in result["components"]
# Tests for clang-tidy split mode logic
def test_clang_tidy_mode_full_scan(
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
mock_should_run_clang_format: Mock,
mock_should_run_python_linters: Mock,
mock_subprocess_run: Mock,
mock_changed_files: Mock,
capsys: pytest.CaptureFixture[str],
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test that full scan (hash changed) always uses split mode."""
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
mock_should_run_integration_tests.return_value = False
mock_should_run_clang_tidy.return_value = True
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Mock changed_files to return no component files
mock_changed_files.return_value = []
# Mock full scan (hash changed)
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True),
patch.object(determine_jobs, "get_changed_components", return_value=[]),
patch.object(determine_jobs, "filter_component_files", return_value=False),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=[]
),
):
determine_jobs.main()
captured = capsys.readouterr()
output = json.loads(captured.out)
# Full scan should always use split mode
assert output["clang_tidy_mode"] == "split"
@pytest.mark.parametrize(
("component_count", "files_per_component", "expected_mode"),
[
# Small PR: 5 files in 1 component -> nosplit
(1, 5, "nosplit"),
# Medium PR: 30 files in 2 components -> nosplit
(2, 15, "nosplit"),
# Medium PR: 64 files total -> nosplit (just under threshold)
(2, 32, "nosplit"),
# Large PR: 65 files total -> split (at threshold)
(2, 33, "split"), # 2 * 33 = 66 files
# Large PR: 100 files in 10 components -> split
(10, 10, "split"),
],
ids=[
"1_comp_5_files_nosplit",
"2_comp_30_files_nosplit",
"2_comp_64_files_nosplit_under_threshold",
"2_comp_66_files_split_at_threshold",
"10_comp_100_files_split",
],
)
def test_clang_tidy_mode_targeted_scan(
component_count: int,
files_per_component: int,
expected_mode: str,
mock_should_run_integration_tests: Mock,
mock_should_run_clang_tidy: Mock,
mock_should_run_clang_format: Mock,
mock_should_run_python_linters: Mock,
mock_subprocess_run: Mock,
mock_changed_files: Mock,
capsys: pytest.CaptureFixture[str],
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test clang-tidy mode selection based on files_to_check count."""
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
mock_should_run_integration_tests.return_value = False
mock_should_run_clang_tidy.return_value = True
mock_should_run_clang_format.return_value = False
mock_should_run_python_linters.return_value = False
# Create component names
components = [f"comp{i}" for i in range(component_count)]
# Mock changed_files to return component files
mock_changed_files.return_value = [
f"esphome/components/{comp}/file.cpp" for comp in components
]
# Mock git_ls_files to return files for each component
cpp_files = {
f"esphome/components/{comp}/file{i}.cpp": 0
for comp in components
for i in range(files_per_component)
}
# Create a mock that returns the cpp_files dict for any call
def mock_git_ls_files(patterns=None):
return cpp_files
with (
patch("sys.argv", ["determine-jobs.py"]),
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files),
patch.object(determine_jobs, "get_changed_components", return_value=components),
patch.object(
determine_jobs,
"filter_component_files",
side_effect=lambda f: f.startswith("esphome/components/"),
),
patch.object(
determine_jobs, "get_components_with_dependencies", return_value=components
),
):
determine_jobs.main()
captured = capsys.readouterr()
output = json.loads(captured.out)
assert output["clang_tidy_mode"] == expected_mode

View File

@@ -517,6 +517,35 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No
mock_cg.add_global.assert_not_called()
def test_include_file_with_c_header(
tmp_path: Path, mock_copy_file_if_changed: Mock
) -> None:
"""Test include_file wraps header in extern C block when is_c_header is True."""
src_file = tmp_path / "c_library.h"
src_file.write_text("// C library header")
CORE.build_path = tmp_path / "build"
with patch("esphome.core.config.cg") as mock_cg:
# Mock RawStatement to capture the text
mock_raw_statement = MagicMock()
mock_raw_statement.text = ""
def raw_statement_side_effect(text):
mock_raw_statement.text = text
return mock_raw_statement
mock_cg.RawStatement.side_effect = raw_statement_side_effect
config.include_file(src_file, Path("c_library.h"), is_c_header=True)
mock_copy_file_if_changed.assert_called_once()
mock_cg.add_global.assert_called_once()
# Check that include statement is wrapped in extern "C" block
assert 'extern "C"' in mock_raw_statement.text
assert '#include "c_library.h"' in mock_raw_statement.text
def test_get_usable_cpu_count() -> None:
"""Test get_usable_cpu_count returns CPU count."""
count = config.get_usable_cpu_count()

View File

@@ -668,45 +668,3 @@ class TestEsphomeCore:
os.environ.pop("ESPHOME_IS_HA_ADDON", None)
os.environ.pop("ESPHOME_DATA_DIR", None)
assert target.data_dir == Path(expected_default)
def test_platformio_cache_dir_with_env_var(self):
"""Test platformio_cache_dir when PLATFORMIO_CACHE_DIR env var is set."""
target = core.EsphomeCore()
test_cache_dir = "/custom/cache/dir"
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": test_cache_dir}):
assert target.platformio_cache_dir == test_cache_dir
def test_platformio_cache_dir_without_env_var(self):
"""Test platformio_cache_dir defaults to ~/.platformio/.cache."""
target = core.EsphomeCore()
with patch.dict(os.environ, {}, clear=True):
# Ensure env var is not set
os.environ.pop("PLATFORMIO_CACHE_DIR", None)
expected = os.path.expanduser("~/.platformio/.cache")
assert target.platformio_cache_dir == expected
def test_platformio_cache_dir_empty_env_var(self):
"""Test platformio_cache_dir with empty env var falls back to default."""
target = core.EsphomeCore()
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": ""}):
expected = os.path.expanduser("~/.platformio/.cache")
assert target.platformio_cache_dir == expected
def test_platformio_cache_dir_whitespace_env_var(self):
"""Test platformio_cache_dir with whitespace-only env var falls back to default."""
target = core.EsphomeCore()
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": " "}):
expected = os.path.expanduser("~/.platformio/.cache")
assert target.platformio_cache_dir == expected
def test_platformio_cache_dir_docker_addon_path(self):
"""Test platformio_cache_dir in Docker/HA addon environment."""
target = core.EsphomeCore()
addon_cache = "/data/cache/platformio"
with patch.dict(os.environ, {"PLATFORMIO_CACHE_DIR": addon_cache}):
assert target.platformio_cache_dir == addon_cache

View File

@@ -355,7 +355,6 @@ def test_clean_build(
mock_core.relative_pioenvs_path.return_value = pioenvs_dir
mock_core.relative_piolibdeps_path.return_value = piolibdeps_dir
mock_core.relative_build_path.return_value = dependencies_lock
mock_core.platformio_cache_dir = str(platformio_cache_dir)
# Verify all exist before
assert pioenvs_dir.exists()