Compare commits

...

16 Commits

Author SHA1 Message Date
Jonathan Swoboda
375e53105f Merge pull request #12444 from esphome/bump-2025.12.0b2
2025.12.0b2
2025-12-12 12:15:41 -05:00
Jonathan Swoboda
c9506b056d Bump version to 2025.12.0b2 2025-12-12 11:12:58 -05:00
Jonathan Swoboda
2c77668a05 [http_request] Skip update check when network not connected (#12418)
Co-authored-by: Claude <noreply@anthropic.com>
2025-12-12 11:12:58 -05:00
J. Nick Koston
5567d96dd9 [esp8266] Eliminate up to 16ms socket latency (#12397) 2025-12-12 11:12:58 -05:00
J. Nick Koston
78b76045ce [api] Fix potential buffer overflow in noise PSK base64 decode (#12395) 2025-12-12 11:12:58 -05:00
J. Nick Koston
1d13d18a16 [light] Add zero-copy support for API effect commands (#12384) 2025-12-12 11:12:58 -05:00
Jonathan Swoboda
a3a2a6d965 Merge pull request #12396 from esphome/bump-2025.12.0b1
2025.12.0b1
2025-12-09 21:33:58 -05:00
Jonathan Swoboda
26770e09dc Bump version to 2025.12.0b1 2025-12-09 20:08:35 -05:00
Javier Peletier
9f2693ead5 [core] Packages refactor and conditional package inclusion (package refactor part 1) (#11605)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-12-10 00:59:58 +01:00
J. Nick Koston
3642399460 [tests] Fix clang-tidy warnings in custom_api_device_component fixture (#12390) 2025-12-10 00:50:26 +01:00
J. Nick Koston
3a6edbc2c7 [micronova] Fix test UART package key to match directory name (#12391) 2025-12-10 00:49:44 +01:00
J. Nick Koston
608f834eaa [ci] Isolate usb_cdc_acm in component tests due to tinyusb/usb_host conflict (#12392) 2025-12-10 00:49:29 +01:00
J. Nick Koston
5919355d18 [ci] Allow memory impact target branch build to fail without blocking CI (#12381) 2025-12-10 00:26:24 +01:00
dependabot[bot]
1e23b10eed Bump aioesphomeapi from 43.1.0 to 43.2.1 (#12385)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-09 22:02:42 +00:00
Clyde Stubbs
ad0218fd40 [mipi_rgb] Add Waveshare 3.16 (#12309) 2025-12-10 08:17:59 +11:00
Clyde Stubbs
87142efbb4 [epaper_spi] Set reasonable default update interval (#12331) 2025-12-10 06:42:11 +11:00
40 changed files with 761 additions and 162 deletions

View File

@@ -959,13 +959,13 @@ jobs:
- memory-impact-comment
if: always()
steps:
- name: Success
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: exit 0
- name: Failure
if: ${{ contains(needs.*.result, 'failure') }}
- name: Check job results
env:
JSON_DOC: ${{ toJSON(needs) }}
NEEDS_JSON: ${{ toJSON(needs) }}
run: |
echo $JSON_DOC | jq
exit 1
# memory-impact-target-branch is allowed to fail without blocking CI.
# This job builds the target branch (dev/beta/release) which may fail because:
# 1. The target branch has a build issue independent of this PR
# 2. This PR fixes a build issue on the target branch
# In either case, we only care that the PR branch builds successfully.
echo "$NEEDS_JSON" | jq -e 'del(.["memory-impact-target-branch"]) | all(.result != "failure")'

View File

@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2025.12.0-dev
PROJECT_NUMBER = 2025.12.0b2
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -579,7 +579,7 @@ message LightCommandRequest {
bool has_flash_length = 16;
uint32 flash_length = 17;
bool has_effect = 18;
string effect = 19;
string effect = 19 [(pointer_to_buffer) = true];
uint32 device_id = 28 [(field_ifdef) = "USE_DEVICES"];
}

View File

@@ -533,7 +533,7 @@ void APIConnection::light_command(const LightCommandRequest &msg) {
if (msg.has_flash_length)
call.set_flash_length(msg.flash_length);
if (msg.has_effect)
call.set_effect(msg.effect);
call.set_effect(reinterpret_cast<const char *>(msg.effect), msg.effect_len);
call.perform();
}
#endif
@@ -1669,7 +1669,7 @@ bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryption
} else {
ESP_LOGW(TAG, "Failed to clear encryption key");
}
} else if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) {
} else if (base64_decode(msg.key, psk.data(), psk.size()) != psk.size()) {
ESP_LOGW(TAG, "Invalid encryption key length");
} else if (!this->parent_->save_noise_psk(psk, true)) {
ESP_LOGW(TAG, "Failed to save encryption key");

View File

@@ -611,9 +611,12 @@ bool LightCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
}
bool LightCommandRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 19:
this->effect = value.as_string();
case 19: {
// Use raw data directly to avoid allocation
this->effect = value.data();
this->effect_len = value.size();
break;
}
default:
return false;
}

View File

@@ -840,7 +840,7 @@ class LightStateResponse final : public StateResponseProtoMessage {
class LightCommandRequest final : public CommandProtoMessage {
public:
static constexpr uint8_t MESSAGE_TYPE = 32;
static constexpr uint8_t ESTIMATED_SIZE = 112;
static constexpr uint8_t ESTIMATED_SIZE = 122;
#ifdef HAS_PROTO_MESSAGE_DUMP
const char *message_name() const override { return "light_command_request"; }
#endif
@@ -869,7 +869,8 @@ class LightCommandRequest final : public CommandProtoMessage {
bool has_flash_length{false};
uint32_t flash_length{0};
bool has_effect{false};
std::string effect{};
const uint8_t *effect{nullptr};
uint16_t effect_len{0};
#ifdef HAS_PROTO_MESSAGE_DUMP
void dump_to(std::string &out) const override;
#endif

View File

@@ -999,7 +999,9 @@ void LightCommandRequest::dump_to(std::string &out) const {
dump_field(out, "has_flash_length", this->has_flash_length);
dump_field(out, "flash_length", this->flash_length);
dump_field(out, "has_effect", this->has_effect);
dump_field(out, "effect", this->effect);
out.append(" effect: ");
out.append(format_hex_pretty(this->effect, this->effect_len));
out.append("\n");
#ifdef USE_DEVICES
dump_field(out, "device_id", this->device_id);
#endif

View File

@@ -41,6 +41,7 @@ AUTO_LOAD = ["split_buffer"]
DEPENDENCIES = ["spi"]
CONF_INIT_SEQUENCE_ID = "init_sequence_id"
CONF_MINIMUM_UPDATE_INTERVAL = "minimum_update_interval"
epaper_spi_ns = cg.esphome_ns.namespace("epaper_spi")
EPaperBase = epaper_spi_ns.class_(
@@ -71,6 +72,9 @@ TRANSFORM_OPTIONS = {CONF_MIRROR_X, CONF_MIRROR_Y, CONF_SWAP_XY}
def model_schema(config):
model = MODELS[config[CONF_MODEL]]
class_name = epaper_spi_ns.class_(model.class_name, EPaperBase)
minimum_update_interval = update_interval(
model.get_default(CONF_MINIMUM_UPDATE_INTERVAL, "1s")
)
cv_dimensions = cv.Optional if model.get_default(CONF_WIDTH) else cv.Required
return (
display.FULL_DISPLAY_SCHEMA.extend(
@@ -90,9 +94,9 @@ def model_schema(config):
{
cv.Optional(CONF_ROTATION, default=0): validate_rotation,
cv.Required(CONF_MODEL): cv.one_of(model.name, upper=True),
cv.Optional(
CONF_UPDATE_INTERVAL, default=cv.UNDEFINED
): update_interval,
cv.Optional(CONF_UPDATE_INTERVAL, default=cv.UNDEFINED): cv.All(
update_interval, cv.Range(min=minimum_update_interval)
),
cv.Optional(CONF_TRANSFORM): cv.Schema(
{
cv.Required(CONF_MIRROR_X): cv.boolean,
@@ -153,9 +157,8 @@ def _final_validate(config):
else:
# If no drawing methods are configured, and LVGL is not enabled, show a test card
config[CONF_SHOW_TEST_CARD] = True
config[CONF_UPDATE_INTERVAL] = core.TimePeriod(
seconds=60
).total_milliseconds
elif CONF_UPDATE_INTERVAL not in config:
config[CONF_UPDATE_INTERVAL] = update_interval("1min")
return config

View File

@@ -286,7 +286,7 @@ void EPaperBase::initialise_() {
* @param y
* @return false if the coordinates are out of bounds
*/
bool EPaperBase::rotate_coordinates_(int &x, int &y) const {
bool EPaperBase::rotate_coordinates_(int &x, int &y) {
if (!this->get_clipping().inside(x, y))
return false;
if (this->transform_ & SWAP_XY)
@@ -297,6 +297,10 @@ bool EPaperBase::rotate_coordinates_(int &x, int &y) const {
y = this->height_ - y - 1;
if (x >= this->width_ || y >= this->height_ || x < 0 || y < 0)
return false;
this->x_low_ = clamp_at_most(this->x_low_, x);
this->x_high_ = clamp_at_least(this->x_high_, x + 1);
this->y_low_ = clamp_at_most(this->y_low_, y);
this->y_high_ = clamp_at_least(this->y_high_, y + 1);
return true;
}
@@ -319,10 +323,6 @@ void HOT EPaperBase::draw_pixel_at(int x, int y, Color color) {
} else {
this->buffer_[byte_position] = original | pixel_bit;
}
this->x_low_ = clamp_at_most(this->x_low_, x);
this->x_high_ = clamp_at_least(this->x_high_, x + 1);
this->y_low_ = clamp_at_most(this->y_low_, y);
this->y_high_ = clamp_at_least(this->y_high_, y + 1);
}
void EPaperBase::dump_config() {

View File

@@ -106,7 +106,7 @@ class EPaperBase : public Display,
void initialise_();
void wait_for_idle_(bool should_wait);
bool init_buffer_(size_t buffer_length);
bool rotate_coordinates_(int &x, int &y) const;
bool rotate_coordinates_(int &x, int &y);
/**
* Methods that must be implemented by concrete classes to control the display

View File

@@ -4,8 +4,8 @@ from . import EpaperModel
class SpectraE6(EpaperModel):
def __init__(self, name, class_name="EPaperSpectraE6", **kwargs):
super().__init__(name, class_name, **kwargs)
def __init__(self, name, class_name="EPaperSpectraE6", **defaults):
super().__init__(name, class_name, **defaults)
# fmt: off
def get_init_sequence(self, config: dict):
@@ -30,7 +30,7 @@ class SpectraE6(EpaperModel):
return self.defaults.get(key, fallback)
spectra_e6 = SpectraE6("spectra-e6")
spectra_e6 = SpectraE6("spectra-e6", minimum_update_interval="30s")
spectra_e6_7p3 = spectra_e6.extend(
"7.3in-Spectra-E6",

View File

@@ -36,6 +36,10 @@ void HttpRequestUpdate::setup() {
}
void HttpRequestUpdate::update() {
if (!network::is_connected()) {
ESP_LOGD(TAG, "Network not connected, skipping update check");
return;
}
#ifdef USE_ESP32
xTaskCreate(HttpRequestUpdate::update_task, "update_task", 8192, (void *) this, 1, &this->update_task_handle_);
#else

View File

@@ -504,8 +504,8 @@ color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() {
#undef KEY
}
LightCall &LightCall::set_effect(const std::string &effect) {
if (strcasecmp(effect.c_str(), "none") == 0) {
LightCall &LightCall::set_effect(const char *effect, size_t len) {
if (len == 4 && strncasecmp(effect, "none", 4) == 0) {
this->set_effect(0);
return *this;
}
@@ -513,15 +513,16 @@ LightCall &LightCall::set_effect(const std::string &effect) {
bool found = false;
for (uint32_t i = 0; i < this->parent_->effects_.size(); i++) {
LightEffect *e = this->parent_->effects_[i];
const char *name = e->get_name();
if (strcasecmp(effect.c_str(), e->get_name()) == 0) {
if (strncasecmp(effect, name, len) == 0 && name[len] == '\0') {
this->set_effect(i + 1);
found = true;
break;
}
}
if (!found) {
ESP_LOGW(TAG, "'%s': no such effect '%s'", this->parent_->get_name().c_str(), effect.c_str());
ESP_LOGW(TAG, "'%s': no such effect '%.*s'", this->parent_->get_name().c_str(), (int) len, effect);
}
return *this;
}

View File

@@ -129,7 +129,9 @@ class LightCall {
/// Set the effect of the light by its name.
LightCall &set_effect(optional<std::string> effect);
/// Set the effect of the light by its name.
LightCall &set_effect(const std::string &effect);
LightCall &set_effect(const std::string &effect) { return this->set_effect(effect.data(), effect.size()); }
/// Set the effect of the light by its name and length (zero-copy from API).
LightCall &set_effect(const char *effect, size_t len);
/// Set the effect of the light by its internal index number (only for internal use).
LightCall &set_effect(uint32_t effect_number);
LightCall &set_effect(optional<uint32_t> effect_number);

View File

@@ -498,12 +498,12 @@ void LvglComponent::setup() {
buf_bytes /= MIN_BUFFER_FRAC;
buffer = lv_custom_mem_alloc(buf_bytes); // NOLINT
}
this->buffer_frac_ = frac;
if (buffer == nullptr) {
this->status_set_error(LOG_STR("Memory allocation failure"));
this->mark_failed();
return;
}
this->buffer_frac_ = frac;
lv_disp_draw_buf_init(&this->draw_buf_, buffer, nullptr, buffer_pixels);
this->disp_drv_.hor_res = display->get_width();
this->disp_drv_.ver_res = display->get_height();

View File

@@ -24,7 +24,7 @@ from esphome.components.mipi import (
CONF_VSYNC_BACK_PORCH,
CONF_VSYNC_FRONT_PORCH,
CONF_VSYNC_PULSE_WIDTH,
MODE_BGR,
MODE_RGB,
PIXEL_MODE_16BIT,
PIXEL_MODE_18BIT,
DriverChip,
@@ -157,7 +157,7 @@ def model_schema(config):
model.option(CONF_ENABLE_PIN, cv.UNDEFINED): cv.ensure_list(
pins.gpio_output_pin_schema
),
model.option(CONF_COLOR_ORDER, MODE_BGR): cv.enum(COLOR_ORDERS, upper=True),
model.option(CONF_COLOR_ORDER, MODE_RGB): cv.enum(COLOR_ORDERS, upper=True),
model.option(CONF_DRAW_ROUNDING, 2): power_of_two,
model.option(CONF_PIXEL_MODE, PIXEL_MODE_16BIT): cv.one_of(
*pixel_modes, lower=True
@@ -280,14 +280,9 @@ async def to_code(config):
red_pins = config[CONF_DATA_PINS][CONF_RED]
green_pins = config[CONF_DATA_PINS][CONF_GREEN]
blue_pins = config[CONF_DATA_PINS][CONF_BLUE]
if config[CONF_COLOR_ORDER] == "BGR":
dpins.extend(red_pins)
dpins.extend(green_pins)
dpins.extend(blue_pins)
else:
dpins.extend(blue_pins)
dpins.extend(green_pins)
dpins.extend(red_pins)
dpins.extend(blue_pins)
dpins.extend(green_pins)
dpins.extend(red_pins)
# swap bytes to match big-endian format
dpins = dpins[8:16] + dpins[0:8]
else:

View File

@@ -371,17 +371,10 @@ void MipiRgb::dump_config() {
get_pin_name(this->de_pin_).c_str(), get_pin_name(this->pclk_pin_).c_str(),
get_pin_name(this->hsync_pin_).c_str(), get_pin_name(this->vsync_pin_).c_str());
if (this->madctl_ & MADCTL_BGR) {
this->dump_pins_(8, 13, "Blue", 0);
this->dump_pins_(13, 16, "Green", 0);
this->dump_pins_(0, 3, "Green", 3);
this->dump_pins_(3, 8, "Red", 0);
} else {
this->dump_pins_(8, 13, "Red", 0);
this->dump_pins_(13, 16, "Green", 0);
this->dump_pins_(0, 3, "Green", 3);
this->dump_pins_(3, 8, "Blue", 0);
}
this->dump_pins_(8, 13, "Blue", 0);
this->dump_pins_(13, 16, "Green", 0);
this->dump_pins_(0, 3, "Green", 3);
this->dump_pins_(3, 8, "Red", 0);
}
} // namespace mipi_rgb

View File

@@ -7,7 +7,6 @@ ST7701S(
"T-PANEL-S3",
width=480,
height=480,
color_order="BGR",
invert_colors=False,
swap_xy=UNDEFINED,
spi_mode="MODE3",
@@ -56,7 +55,6 @@ t_rgb = ST7701S(
"T-RGB-2.1",
width=480,
height=480,
color_order="BGR",
pixel_mode="18bit",
invert_colors=False,
swap_xy=UNDEFINED,

View File

@@ -82,7 +82,6 @@ st7701s.extend(
"MAKERFABS-4",
width=480,
height=480,
color_order="RGB",
invert_colors=True,
pixel_mode="18bit",
cs_pin=1,

View File

@@ -1,13 +1,13 @@
from esphome.components.mipi import DriverChip
from esphome.components.mipi import DriverChip, delay
from esphome.config_validation import UNDEFINED
from .st7701s import st7701s
# fmt: off
wave_4_3 = DriverChip(
"ESP32-S3-TOUCH-LCD-4.3",
swap_xy=UNDEFINED,
initsequence=(),
color_order="RGB",
width=800,
height=480,
pclk_frequency="16MHz",
@@ -55,10 +55,9 @@ wave_4_3.extend(
)
st7701s.extend(
"WAVESHARE-4-480x480",
"WAVESHARE-4-480X480",
data_rate="2MHz",
spi_mode="MODE3",
color_order="BGR",
pixel_mode="18bit",
width=480,
height=480,
@@ -76,3 +75,72 @@ st7701s.extend(
"blue": [5, 45, 48, 47, 21],
},
)
st7701s.extend(
"WAVESHARE-3.16-320X820",
width=320,
height=820,
de_pin=40,
hsync_pin=38,
vsync_pin=39,
pclk_pin=41,
cs_pin={
"number": 0,
"ignore_strapping_warning": True,
},
pclk_frequency="18MHz",
reset_pin=16,
hsync_back_porch=30,
hsync_front_porch=30,
hsync_pulse_width=6,
vsync_back_porch=20,
vsync_front_porch=20,
vsync_pulse_width=40,
data_pins={
"red": [17, 46, 3, 8, 18],
"green": [14, 13, 12, 11, 10, 9],
"blue": [21, 5, 45, 48, 47],
},
initsequence=(
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x13),
(0xEF, 0x08),
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x10),
(0xC0, 0xE5, 0x02),
(0xC1, 0x15, 0x0A),
(0xC2, 0x07, 0x02),
(0xCC, 0x10),
(0xB0, 0x00, 0x08, 0x51, 0x0D, 0xCE, 0x06, 0x00, 0x08, 0x08, 0x24, 0x05, 0xD0, 0x0F, 0x6F, 0x36, 0x1F),
(0xB1, 0x00, 0x10, 0x4F, 0x0C, 0x11, 0x05, 0x00, 0x07, 0x07, 0x18, 0x02, 0xD3, 0x11, 0x6E, 0x34, 0x1F),
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x11),
(0xB0, 0x4D),
(0xB1, 0x37),
(0xB2, 0x87),
(0xB3, 0x80),
(0xB5, 0x4A),
(0xB7, 0x85),
(0xB8, 0x21),
(0xB9, 0x00, 0x13),
(0xC0, 0x09),
(0xC1, 0x78),
(0xC2, 0x78),
(0xD0, 0x88),
(0xE0, 0x80, 0x00, 0x02),
(0xE1, 0x0F, 0xA0, 0x00, 0x00, 0x10, 0xA0, 0x00, 0x00, 0x00, 0x60, 0x60),
(0xE2, 0x30, 0x30, 0x60, 0x60, 0x45, 0xA0, 0x00, 0x00, 0x46, 0xA0, 0x00, 0x00, 0x00),
(0xE3, 0x00, 0x00, 0x33, 0x33),
(0xE4, 0x44, 0x44),
(0xE5, 0x0F, 0x4A, 0xA0, 0xA0, 0x11, 0x4A, 0xA0, 0xA0, 0x13, 0x4A, 0xA0, 0xA0, 0x15, 0x4A, 0xA0, 0xA0),
(0xE6, 0x00, 0x00, 0x33, 0x33),
(0xE7, 0x44, 0x44),
(0xE8, 0x10, 0x4A, 0xA0, 0xA0, 0x12, 0x4A, 0xA0, 0xA0, 0x14, 0x4A, 0xA0, 0xA0, 0x16, 0x4A, 0xA0, 0xA0),
(0xEB, 0x02, 0x00, 0x4E, 0x4E, 0xEE, 0x44, 0x00),
(0xED, 0xFF, 0xFF, 0x04, 0x56, 0x72, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x27, 0x65, 0x40, 0xFF, 0xFF),
(0xEF, 0x08, 0x08, 0x08, 0x40, 0x3F, 0x64),
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x13),
(0xE8, 0x00, 0x0E),
(0xE8, 0x00, 0x0C),
delay(10),
(0xE8, 0x00, 0x00),
(0xFF, 0x77, 0x01, 0x00, 0x00, 0x00),
)
)

View File

@@ -1,5 +1,9 @@
from collections import UserDict
from collections.abc import Callable
from functools import reduce
import logging
from pathlib import Path
from typing import Any
from esphome import git, yaml_util
from esphome.components.substitutions.jinja import has_jinja
@@ -15,6 +19,7 @@ from esphome.const import (
CONF_PATH,
CONF_REF,
CONF_REFRESH,
CONF_SUBSTITUTIONS,
CONF_URL,
CONF_USERNAME,
CONF_VARS,
@@ -27,32 +32,43 @@ _LOGGER = logging.getLogger(__name__)
DOMAIN = CONF_PACKAGES
def valid_package_contents(package_config: dict):
"""Validates that a package_config that will be merged looks as much as possible to a valid config
to fail early on obvious mistakes."""
if isinstance(package_config, dict):
if CONF_URL in package_config:
# If a URL key is found, then make sure the config conforms to a remote package schema:
return REMOTE_PACKAGE_SCHEMA(package_config)
def validate_has_jinja(value: Any):
if not isinstance(value, str) or not has_jinja(value):
raise cv.Invalid("string does not contain Jinja syntax")
return value
# Validate manually since Voluptuous would regenerate dicts and lose metadata
# such as ESPHomeDataBase
for k, v in package_config.items():
if not isinstance(k, str):
raise cv.Invalid("Package content keys must be strings")
if isinstance(v, (dict, list, Remove)):
continue # e.g. script: [], psram: !remove, logger: {level: debug}
if v is None:
continue # e.g. web_server:
if isinstance(v, str) and has_jinja(v):
# e.g: remote package shorthand:
# package_name: github://esphome/repo/file.yaml@${ branch }
continue
raise cv.Invalid("Invalid component content in package definition")
return package_config
def valid_package_contents(allow_jinja: bool = True) -> Callable[[Any], dict]:
"""Returns a validator that checks if a package_config that will be merged looks as
much as possible to a valid config to fail early on obvious mistakes."""
raise cv.Invalid("Package contents must be a dict")
def validator(package_config: dict) -> dict:
if isinstance(package_config, dict):
if CONF_URL in package_config:
# If a URL key is found, then make sure the config conforms to a remote package schema:
return REMOTE_PACKAGE_SCHEMA(package_config)
# Validate manually since Voluptuous would regenerate dicts and lose metadata
# such as ESPHomeDataBase
for k, v in package_config.items():
if not isinstance(k, str):
raise cv.Invalid("Package content keys must be strings")
if isinstance(v, (dict, list, Remove)):
continue # e.g. script: [], psram: !remove, logger: {level: debug}
if v is None:
continue # e.g. web_server:
if allow_jinja and isinstance(v, str) and has_jinja(v):
# e.g: remote package shorthand:
# package_name: github://esphome/repo/file.yaml@${ branch }, or:
# switch: ${ expression that evals to a switch }
continue
raise cv.Invalid("Invalid component content in package definition")
return package_config
raise cv.Invalid("Package contents must be a dict")
return validator
def expand_file_to_files(config: dict):
@@ -142,7 +158,10 @@ REMOTE_PACKAGE_SCHEMA = cv.All(
PACKAGE_SCHEMA = cv.Any( # A package definition is either:
validate_source_shorthand, # A git URL shorthand string that expands to a remote package schema, or
REMOTE_PACKAGE_SCHEMA, # a valid remote package schema, or
valid_package_contents, # Something that at least looks like an actual package, e.g. {wifi:{ssid: xxx}}
validate_has_jinja, # a Jinja string that may resolve to a package, or
valid_package_contents(
allow_jinja=True
), # Something that at least looks like an actual package, e.g. {wifi:{ssid: xxx}}
# which will have to be fully validated later as per each component's schema.
)
@@ -235,32 +254,84 @@ def _process_remote_package(config: dict, skip_update: bool = False) -> dict:
return {"packages": packages}
def _process_package(package_config, config, skip_update: bool = False):
recursive_package = package_config
if CONF_URL in package_config:
package_config = _process_remote_package(package_config, skip_update)
if isinstance(package_config, dict):
recursive_package = do_packages_pass(package_config, skip_update)
return merge_config(recursive_package, config)
def do_packages_pass(config: dict, skip_update: bool = False):
def _walk_packages(
config: dict, callback: Callable[[dict], dict], validate_deprecated: bool = True
) -> dict:
if CONF_PACKAGES not in config:
return config
packages = config[CONF_PACKAGES]
with cv.prepend_path(CONF_PACKAGES):
# The following block and `validate_deprecated` parameter can be safely removed
# once single-package deprecation is effective
if validate_deprecated:
packages = CONFIG_SCHEMA(packages)
with cv.prepend_path(CONF_PACKAGES):
if isinstance(packages, dict):
for package_name, package_config in reversed(packages.items()):
with cv.prepend_path(package_name):
config = _process_package(package_config, config, skip_update)
package_config = callback(package_config)
packages[package_name] = _walk_packages(package_config, callback)
elif isinstance(packages, list):
for package_config in reversed(packages):
config = _process_package(package_config, config, skip_update)
for idx in reversed(range(len(packages))):
with cv.prepend_path(idx):
package_config = callback(packages[idx])
packages[idx] = _walk_packages(package_config, callback)
else:
raise cv.Invalid(
f"Packages must be a key to value mapping or list, got {type(packages)} instead"
)
del config[CONF_PACKAGES]
config[CONF_PACKAGES] = packages
return config
def do_packages_pass(config: dict, skip_update: bool = False) -> dict:
"""Processes, downloads and validates all packages in the config.
Also extracts and merges all substitutions found in packages into the main config substitutions.
"""
if CONF_PACKAGES not in config:
return config
substitutions = UserDict(config.pop(CONF_SUBSTITUTIONS, {}))
def process_package_callback(package_config: dict) -> dict:
"""This will be called for each package found in the config."""
package_config = PACKAGE_SCHEMA(package_config)
if isinstance(package_config, str):
return package_config # Jinja string, skip processing
if CONF_URL in package_config:
package_config = _process_remote_package(package_config, skip_update)
# Extract substitutions from the package and merge them into the main substitutions:
substitutions.data = merge_config(
package_config.pop(CONF_SUBSTITUTIONS, {}), substitutions.data
)
return package_config
_walk_packages(config, process_package_callback)
if substitutions:
config[CONF_SUBSTITUTIONS] = substitutions.data
return config
def merge_packages(config: dict) -> dict:
"""Merges all packages into the main config and removes the `packages:` key."""
if CONF_PACKAGES not in config:
return config
# Build flat list of all package configs to merge in priority order:
merge_list: list[dict] = []
validate_package = valid_package_contents(allow_jinja=False)
def process_package_callback(package_config: dict) -> dict:
"""This will be called for each package found in the config."""
merge_list.append(validate_package(package_config))
return package_config
_walk_packages(config, process_package_callback, validate_deprecated=False)
# Merge all packages into the main config:
config = reduce(lambda new, old: merge_config(old, new), merge_list, config)
del config[CONF_PACKAGES]
return config

View File

@@ -14,13 +14,36 @@
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#ifdef USE_ESP8266
#include <coredecls.h> // For esp_schedule()
#endif
namespace esphome {
namespace socket {
#ifdef USE_ESP8266
// Flag to signal socket activity - checked by socket_delay() to exit early
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)
static volatile bool s_socket_woke = false;
void socket_delay(uint32_t ms) {
// Use esp_delay with a callback that checks if socket data arrived.
// This allows the delay to exit early when socket_wake() is called by
// lwip recv_fn/accept_fn callbacks, reducing socket latency.
s_socket_woke = false;
esp_delay(ms, []() { return !s_socket_woke; });
}
void socket_wake() {
s_socket_woke = true;
esp_schedule();
}
#endif
static const char *const TAG = "socket.lwip";
// set to 1 to enable verbose lwip logging
#if 0
#if 0 // NOLINT(readability-avoid-unconditional-preprocessor-if)
#define LWIP_LOG(msg, ...) ESP_LOGVV(TAG, "socket %p: " msg, this, ##__VA_ARGS__)
#else
#define LWIP_LOG(msg, ...)
@@ -323,9 +346,10 @@ class LWIPRawImpl : public Socket {
for (int i = 0; i < iovcnt; i++) {
ssize_t err = read(reinterpret_cast<uint8_t *>(iov[i].iov_base), iov[i].iov_len);
if (err == -1) {
if (ret != 0)
if (ret != 0) {
// if we already read some don't return an error
break;
}
return err;
}
ret += err;
@@ -393,9 +417,10 @@ class LWIPRawImpl : public Socket {
ssize_t written = internal_write(buf, len);
if (written == -1)
return -1;
if (written == 0)
if (written == 0) {
// no need to output if nothing written
return 0;
}
if (nodelay_) {
int err = internal_output();
if (err == -1)
@@ -408,18 +433,20 @@ class LWIPRawImpl : public Socket {
for (int i = 0; i < iovcnt; i++) {
ssize_t err = internal_write(reinterpret_cast<uint8_t *>(iov[i].iov_base), iov[i].iov_len);
if (err == -1) {
if (written != 0)
if (written != 0) {
// if we already read some don't return an error
break;
}
return err;
}
written += err;
if ((size_t) err != iov[i].iov_len)
break;
}
if (written == 0)
if (written == 0) {
// no need to output if nothing written
return 0;
}
if (nodelay_) {
int err = internal_output();
if (err == -1)
@@ -473,6 +500,10 @@ class LWIPRawImpl : public Socket {
} else {
pbuf_cat(rx_buf_, pb);
}
#ifdef USE_ESP8266
// Wake the main loop immediately so it can process the received data.
socket_wake();
#endif
return ERR_OK;
}
@@ -612,7 +643,7 @@ class LWIPRawListenImpl : public LWIPRawImpl {
}
private:
err_t accept_fn(struct tcp_pcb *newpcb, err_t err) {
err_t accept_fn_(struct tcp_pcb *newpcb, err_t err) {
LWIP_LOG("accept(newpcb=%p err=%d)", newpcb, err);
if (err != ERR_OK || newpcb == nullptr) {
// "An error code if there has been an error accepting. Only return ERR_ABRT if you have
@@ -633,12 +664,16 @@ class LWIPRawListenImpl : public LWIPRawImpl {
sock->init();
accepted_sockets_[accepted_socket_count_++] = std::move(sock);
LWIP_LOG("Accepted connection, queue size: %d", accepted_socket_count_);
#ifdef USE_ESP8266
// Wake the main loop immediately so it can accept the new connection.
socket_wake();
#endif
return ERR_OK;
}
static err_t s_accept_fn(void *arg, struct tcp_pcb *newpcb, err_t err) {
LWIPRawListenImpl *arg_this = reinterpret_cast<LWIPRawListenImpl *>(arg);
return arg_this->accept_fn(newpcb, err);
return arg_this->accept_fn_(newpcb, err);
}
// Accept queue - holds incoming connections briefly until the event loop calls accept()

View File

@@ -82,6 +82,15 @@ socklen_t set_sockaddr(struct sockaddr *addr, socklen_t addrlen, const std::stri
/// Set a sockaddr to the any address and specified port for the IP version used by socket_ip().
socklen_t set_sockaddr_any(struct sockaddr *addr, socklen_t addrlen, uint16_t port);
#if defined(USE_ESP8266) && defined(USE_SOCKET_IMPL_LWIP_TCP)
/// Delay that can be woken early by socket activity.
/// On ESP8266, lwip callbacks set a flag and call esp_schedule() to wake the delay.
void socket_delay(uint32_t ms);
/// Called by lwip callbacks to signal socket activity and wake delay.
void socket_wake();
#endif
} // namespace socket
} // namespace esphome
#endif

View File

@@ -1012,14 +1012,20 @@ def validate_config(
CORE.raw_config = config
# 1.1. Resolve !extend and !remove and check for REPLACEME
# 1.1. Merge packages
if CONF_PACKAGES in config:
from esphome.components.packages import merge_packages
config = merge_packages(config)
# 1.2. Resolve !extend and !remove and check for REPLACEME
# After this step, there will not be any Extend or Remove values in the config anymore
try:
resolve_extend_remove(config)
except vol.Invalid as err:
result.add_error(err)
# 1.2. Load external_components
# 1.3. Load external_components
if CONF_EXTERNAL_COMPONENTS in config:
from esphome.components.external_components import do_external_components_pass

View File

@@ -4,7 +4,7 @@ from enum import Enum
from esphome.enum import StrEnum
__version__ = "2025.12.0-dev"
__version__ = "2025.12.0b2"
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
VALID_SUBSTITUTIONS_CHARACTERS = (

View File

@@ -12,6 +12,10 @@
#include "esphome/components/status_led/status_led.h"
#endif
#if defined(USE_ESP8266) && defined(USE_SOCKET_IMPL_LWIP_TCP)
#include "esphome/components/socket/socket.h"
#endif
#ifdef USE_SOCKET_SELECT_SUPPORT
#include <cerrno>
@@ -627,6 +631,9 @@ void Application::yield_with_select_(uint32_t delay_ms) {
// No sockets registered, use regular delay
delay(delay_ms);
}
#elif defined(USE_ESP8266) && defined(USE_SOCKET_IMPL_LWIP_TCP)
// No select support but can wake on socket activity via esp_schedule()
socket::socket_delay(delay_ms);
#else
// No select support, use regular delay
delay(delay_ms);

View File

@@ -480,22 +480,13 @@ std::string base64_encode(const uint8_t *buf, size_t buf_len) {
}
size_t base64_decode(const std::string &encoded_string, uint8_t *buf, size_t buf_len) {
std::vector<uint8_t> decoded = base64_decode(encoded_string);
if (decoded.size() > buf_len) {
ESP_LOGW(TAG, "Base64 decode: buffer too small, truncating");
decoded.resize(buf_len);
}
memcpy(buf, decoded.data(), decoded.size());
return decoded.size();
}
std::vector<uint8_t> base64_decode(const std::string &encoded_string) {
int in_len = encoded_string.size();
int i = 0;
int j = 0;
int in = 0;
size_t out = 0;
uint8_t char_array_4[4], char_array_3[3];
std::vector<uint8_t> ret;
bool truncated = false;
// SAFETY: The loop condition checks is_base64() before processing each character.
// This ensures base64_find_char() is only called on valid base64 characters,
@@ -511,8 +502,13 @@ std::vector<uint8_t> base64_decode(const std::string &encoded_string) {
char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2);
char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3];
for (i = 0; (i < 3); i++)
ret.push_back(char_array_3[i]);
for (i = 0; i < 3; i++) {
if (out < buf_len) {
buf[out++] = char_array_3[i];
} else {
truncated = true;
}
}
i = 0;
}
}
@@ -528,10 +524,28 @@ std::vector<uint8_t> base64_decode(const std::string &encoded_string) {
char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2);
char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3];
for (j = 0; (j < i - 1); j++)
ret.push_back(char_array_3[j]);
for (j = 0; j < i - 1; j++) {
if (out < buf_len) {
buf[out++] = char_array_3[j];
} else {
truncated = true;
}
}
}
if (truncated) {
ESP_LOGW(TAG, "Base64 decode: buffer too small, truncating");
}
return out;
}
std::vector<uint8_t> base64_decode(const std::string &encoded_string) {
// Calculate maximum decoded size: every 4 base64 chars = 3 bytes
size_t max_len = ((encoded_string.size() + 3) / 4) * 3;
std::vector<uint8_t> ret(max_len);
size_t actual_len = base64_decode(encoded_string, ret.data(), max_len);
ret.resize(actual_len);
return ret;
}

View File

@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
esptool==5.1.0
click==8.1.7
esphome-dashboard==20251013.0
aioesphomeapi==43.1.0
aioesphomeapi==43.2.1
zeroconf==0.148.0
puremagic==1.30
ruamel.yaml==0.18.16 # dashboard_import

View File

@@ -87,6 +87,7 @@ ISOLATED_COMPONENTS = {
"neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
"packages": "cannot merge packages",
"tinyusb": "Conflicts with usb_host component - cannot be used together",
"usb_cdc_acm": "Depends on tinyusb which conflicts with usb_host",
}

View File

@@ -215,6 +215,20 @@ def prepare_symbol_changes_data(
}
def format_components_str(components: list[str]) -> str:
"""Format a list of components for display.
Args:
components: List of component names
Returns:
Formatted string with backtick-quoted component names
"""
if len(components) == 1:
return f"`{components[0]}`"
return ", ".join(f"`{c}`" for c in sorted(components))
def prepare_component_breakdown_data(
target_analysis: dict | None, pr_analysis: dict | None
) -> list[tuple[str, int, int, int]] | None:
@@ -316,11 +330,10 @@ def create_comment_body(
}
# Format components list
context["components_str"] = format_components_str(components)
if len(components) == 1:
context["components_str"] = f"`{components[0]}`"
context["config_note"] = "a representative test configuration"
else:
context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components))
context["config_note"] = (
f"a merged configuration with {len(components)} components"
)
@@ -502,6 +515,43 @@ def post_or_update_comment(pr_number: str, comment_body: str) -> None:
print("Comment posted/updated successfully", file=sys.stderr)
def create_target_unavailable_comment(
pr_data: dict,
) -> str:
"""Create a comment body when target branch data is unavailable.
This happens when the target branch (dev/beta/release) fails to build.
This can occur because:
1. The target branch has a build issue independent of this PR
2. This PR fixes a build issue on the target branch
In either case, we only care that the PR branch builds successfully.
Args:
pr_data: Dictionary with PR branch analysis results
Returns:
Formatted comment body
"""
components = pr_data.get("components", [])
platform = pr_data.get("platform", "unknown")
pr_ram = pr_data.get("ram_bytes", 0)
pr_flash = pr_data.get("flash_bytes", 0)
env = Environment(
loader=FileSystemLoader(TEMPLATE_DIR),
trim_blocks=True,
lstrip_blocks=True,
)
template = env.get_template("ci_memory_impact_target_unavailable.j2")
return template.render(
comment_marker=COMMENT_MARKER,
components_str=format_components_str(components),
platform=platform,
pr_ram=format_bytes(pr_ram),
pr_flash=format_bytes(pr_flash),
)
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
@@ -523,15 +573,25 @@ def main() -> int:
# Load analysis JSON files (all data comes from JSON for security)
target_data: dict | None = load_analysis_json(args.target_json)
if not target_data:
print("Error: Failed to load target analysis JSON", file=sys.stderr)
sys.exit(1)
pr_data: dict | None = load_analysis_json(args.pr_json)
# PR data is required - if the PR branch can't build, that's a real error
if not pr_data:
print("Error: Failed to load PR analysis JSON", file=sys.stderr)
sys.exit(1)
# Target data is optional - target branch (dev) may fail to build because:
# 1. The target branch has a build issue independent of this PR
# 2. This PR fixes a build issue on the target branch
if not target_data:
print(
"Warning: Target branch analysis unavailable, posting limited comment",
file=sys.stderr,
)
comment_body = create_target_unavailable_comment(pr_data)
post_or_update_comment(args.pr_number, comment_body)
return 0
# Extract detailed analysis if available
target_analysis: dict | None = None
pr_analysis: dict | None = None

View File

@@ -0,0 +1,19 @@
{{ comment_marker }}
## Memory Impact Analysis
**Components:** {{ components_str }}
**Platform:** `{{ platform }}`
| Metric | This PR |
|--------|---------|
| **RAM** | {{ pr_ram }} |
| **Flash** | {{ pr_flash }} |
> ⚠️ **Target branch comparison unavailable** - The target branch failed to build.
> This can happen when the target branch has a build issue, or when this PR fixes a build issue on the target branch.
> The PR branch compiled successfully with the memory usage shown above.
---
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation).
*This analysis runs automatically when components change.*

View File

@@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch
import pytest
from esphome.components.packages import CONFIG_SCHEMA, do_packages_pass
from esphome.components.packages import CONFIG_SCHEMA, do_packages_pass, merge_packages
from esphome.config import resolve_extend_remove
from esphome.config_helpers import Extend, Remove
import esphome.config_validation as cv
@@ -27,6 +27,7 @@ from esphome.const import (
CONF_REFRESH,
CONF_SENSOR,
CONF_SSID,
CONF_SUBSTITUTIONS,
CONF_UPDATE_INTERVAL,
CONF_URL,
CONF_VARS,
@@ -68,11 +69,12 @@ def fixture_basic_esphome():
def packages_pass(config):
"""Wrapper around packages_pass that also resolves Extend and Remove."""
config = do_packages_pass(config)
config = merge_packages(config)
resolve_extend_remove(config)
return config
def test_package_unused(basic_esphome, basic_wifi):
def test_package_unused(basic_esphome, basic_wifi) -> None:
"""
Ensures do_package_pass does not change a config if packages aren't used.
"""
@@ -82,7 +84,7 @@ def test_package_unused(basic_esphome, basic_wifi):
assert actual == config
def test_package_invalid_dict(basic_esphome, basic_wifi):
def test_package_invalid_dict(basic_esphome, basic_wifi) -> None:
"""
If a url: key is present, it's expected to be well-formed remote package spec. Ensure an error is raised if not.
Any other simple dict passed as a package will be merged as usual but may fail later validation.
@@ -107,7 +109,7 @@ def test_package_invalid_dict(basic_esphome, basic_wifi):
],
],
)
def test_package_shorthand(packages):
def test_package_shorthand(packages) -> None:
CONFIG_SCHEMA(packages)
@@ -133,12 +135,12 @@ def test_package_shorthand(packages):
[3],
],
)
def test_package_invalid(packages):
def test_package_invalid(packages) -> None:
with pytest.raises(cv.Invalid):
CONFIG_SCHEMA(packages)
def test_package_include(basic_wifi, basic_esphome):
def test_package_include(basic_wifi, basic_esphome) -> None:
"""
Tests the simple case where an independent config present in a package is added to the top-level config as is.
@@ -159,7 +161,7 @@ def test_single_package(
basic_esphome,
basic_wifi,
caplog: pytest.LogCaptureFixture,
):
) -> None:
"""
Tests the simple case where a single package is added to the top-level config as is.
In this test, the CONF_WIFI config is expected to be simply added to the top-level config.
@@ -179,7 +181,7 @@ def test_single_package(
assert "This method for including packages will go away in 2026.7.0" in caplog.text
def test_package_append(basic_wifi, basic_esphome):
def test_package_append(basic_wifi, basic_esphome) -> None:
"""
Tests the case where a key is present in both a package and top-level config.
@@ -204,7 +206,7 @@ def test_package_append(basic_wifi, basic_esphome):
assert actual == expected
def test_package_override(basic_wifi, basic_esphome):
def test_package_override(basic_wifi, basic_esphome) -> None:
"""
Ensures that the top-level configuration takes precedence over duplicate keys defined in a package.
@@ -228,7 +230,7 @@ def test_package_override(basic_wifi, basic_esphome):
assert actual == expected
def test_multiple_package_order():
def test_multiple_package_order() -> None:
"""
Ensures that mutiple packages are merged in order.
"""
@@ -257,7 +259,7 @@ def test_multiple_package_order():
assert actual == expected
def test_package_list_merge():
def test_package_list_merge() -> None:
"""
Ensures lists defined in both a package and the top-level config are merged correctly
"""
@@ -313,7 +315,7 @@ def test_package_list_merge():
assert actual == expected
def test_package_list_merge_by_id():
def test_package_list_merge_by_id() -> None:
"""
Ensures that components with matching IDs are merged correctly.
@@ -391,7 +393,7 @@ def test_package_list_merge_by_id():
assert actual == expected
def test_package_merge_by_id_with_list():
def test_package_merge_by_id_with_list() -> None:
"""
Ensures that components with matching IDs are merged correctly when their configuration contains lists.
@@ -430,7 +432,7 @@ def test_package_merge_by_id_with_list():
assert actual == expected
def test_package_merge_by_missing_id():
def test_package_merge_by_missing_id() -> None:
"""
Ensures that a validation error is thrown when trying to extend a missing ID.
"""
@@ -466,7 +468,7 @@ def test_package_merge_by_missing_id():
assert error_raised
def test_package_list_remove_by_id():
def test_package_list_remove_by_id() -> None:
"""
Ensures that components with matching IDs are removed correctly.
@@ -517,7 +519,7 @@ def test_package_list_remove_by_id():
assert actual == expected
def test_multiple_package_list_remove_by_id():
def test_multiple_package_list_remove_by_id() -> None:
"""
Ensures that components with matching IDs are removed correctly.
@@ -563,7 +565,7 @@ def test_multiple_package_list_remove_by_id():
assert actual == expected
def test_package_dict_remove_by_id(basic_wifi, basic_esphome):
def test_package_dict_remove_by_id(basic_wifi, basic_esphome) -> None:
"""
Ensures that components with missing IDs are removed from dict.
Ensures that the top-level configuration takes precedence over duplicate keys defined in a package.
@@ -584,7 +586,7 @@ def test_package_dict_remove_by_id(basic_wifi, basic_esphome):
assert actual == expected
def test_package_remove_by_missing_id():
def test_package_remove_by_missing_id() -> None:
"""
Ensures that components with missing IDs are not merged.
"""
@@ -632,7 +634,7 @@ def test_package_remove_by_missing_id():
@patch("esphome.git.clone_or_update")
def test_remote_packages_with_files_list(
mock_clone_or_update, mock_is_file, mock_load_yaml
):
) -> None:
"""
Ensures that packages are loaded as mixed list of dictionary and strings
"""
@@ -704,7 +706,7 @@ def test_remote_packages_with_files_list(
@patch("esphome.git.clone_or_update")
def test_remote_packages_with_files_and_vars(
mock_clone_or_update, mock_is_file, mock_load_yaml
):
) -> None:
"""
Ensures that packages are loaded as mixed list of dictionary and strings with vars
"""
@@ -793,3 +795,199 @@ def test_remote_packages_with_files_and_vars(
actual = packages_pass(config)
assert actual == expected
def test_packages_merge_substitutions() -> None:
"""
Tests that substitutions from packages in a complex package hierarchy
are extracted and merged into the top-level config.
"""
config = {
CONF_SUBSTITUTIONS: {
"a": 1,
"b": 2,
"c": 3,
},
CONF_PACKAGES: {
"package1": {
"logger": {
"level": "DEBUG",
},
CONF_PACKAGES: [
{
CONF_SUBSTITUTIONS: {
"a": 10,
"e": 5,
},
"sensor": [
{"platform": "template", "id": "sensor1"},
],
},
],
"sensor": [
{"platform": "template", "id": "sensor2"},
],
},
"package2": {
"logger": {
"level": "VERBOSE",
},
},
"package3": {
CONF_PACKAGES: [
{
CONF_PACKAGES: [
{
CONF_SUBSTITUTIONS: {
"b": 20,
"d": 4,
},
"sensor": [
{"platform": "template", "id": "sensor3"},
],
},
],
CONF_SUBSTITUTIONS: {
"b": 20,
"d": 6,
},
"sensor": [
{"platform": "template", "id": "sensor4"},
],
},
],
},
},
}
expected = {
CONF_SUBSTITUTIONS: {"a": 1, "e": 5, "b": 2, "d": 6, "c": 3},
CONF_PACKAGES: {
"package1": {
"logger": {
"level": "DEBUG",
},
CONF_PACKAGES: [
{
"sensor": [
{"platform": "template", "id": "sensor1"},
],
},
],
"sensor": [
{"platform": "template", "id": "sensor2"},
],
},
"package2": {
"logger": {
"level": "VERBOSE",
},
},
"package3": {
CONF_PACKAGES: [
{
CONF_PACKAGES: [
{
"sensor": [
{"platform": "template", "id": "sensor3"},
],
},
],
"sensor": [
{"platform": "template", "id": "sensor4"},
],
},
],
},
},
}
actual = do_packages_pass(config)
assert actual == expected
def test_package_merge() -> None:
"""
Tests that all packages are merged into the top-level config.
"""
config = {
CONF_SUBSTITUTIONS: {"a": 1, "e": 5, "b": 2, "d": 6, "c": 3},
CONF_PACKAGES: {
"package1": {
"logger": {
"level": "DEBUG",
},
CONF_PACKAGES: [
{
"sensor": [
{"platform": "template", "id": "sensor1"},
],
},
],
"sensor": [
{"platform": "template", "id": "sensor2"},
],
},
"package2": {
"logger": {
"level": "VERBOSE",
},
},
"package3": {
CONF_PACKAGES: [
{
CONF_PACKAGES: [
{
"sensor": [
{"platform": "template", "id": "sensor3"},
],
},
],
"sensor": [
{"platform": "template", "id": "sensor4"},
],
},
],
},
},
}
expected = {
"sensor": [
{"platform": "template", "id": "sensor1"},
{"platform": "template", "id": "sensor2"},
{"platform": "template", "id": "sensor3"},
{"platform": "template", "id": "sensor4"},
],
"logger": {"level": "VERBOSE"},
CONF_SUBSTITUTIONS: {"a": 1, "e": 5, "b": 2, "d": 6, "c": 3},
}
actual = merge_packages(config)
assert actual == expected
@pytest.mark.parametrize(
"invalid_package",
[
6,
"some string",
["some string"],
None,
True,
{"some_component": 8},
{3: 2},
{"some_component": r"${unevaluated expression}"},
],
)
def test_package_merge_invalid(invalid_package) -> None:
"""
Tests that trying to merge an invalid package raises an error.
"""
config = {
CONF_PACKAGES: {
"some_package": invalid_package,
},
}
with pytest.raises(cv.Invalid):
merge_packages(config)

View File

@@ -2,6 +2,6 @@ substitutions:
enable_rx_pin: GPIO13
packages:
uart: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp32-idf.yaml
uart_1200_none_2stopbits: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -2,6 +2,6 @@ substitutions:
enable_rx_pin: GPIO15
packages:
uart: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp8266-ard.yaml
uart_1200_none_2stopbits: !include ../../test_build_components/common/uart_1200_none_2stopbits/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -2,6 +2,6 @@ substitutions:
enable_rx_pin: GPIO3
packages:
uart: !include ../../test_build_components/common/uart_1200_none_2stopbits/rp2040-ard.yaml
uart_1200_none_2stopbits: !include ../../test_build_components/common/uart_1200_none_2stopbits/rp2040-ard.yaml
<<: !include common.yaml

View File

@@ -52,6 +52,7 @@ void CustomAPIDeviceComponent::on_service_with_arrays(std::vector<bool> bool_arr
}
}
// NOLINTNEXTLINE(performance-unnecessary-value-param)
void CustomAPIDeviceComponent::on_ha_state_changed(std::string entity_id, std::string state) {
ESP_LOGI(TAG, "Home Assistant state changed for %s: %s", entity_id.c_str(), state.c_str());
ESP_LOGI(TAG, "This subscription uses std::string API for backward compatibility");

View File

@@ -24,6 +24,7 @@ class CustomAPIDeviceComponent : public Component, public CustomAPIDevice {
std::vector<float> float_array, std::vector<std::string> string_array);
// Test Home Assistant state subscription with std::string API
// NOLINTNEXTLINE(performance-unnecessary-value-param)
void on_ha_state_changed(std::string entity_id, std::string state);
};

View File

@@ -0,0 +1,43 @@
fancy_component: &id001
- id: component9
value: 9
some_component:
- id: component1
value: 1
- id: component2
value: 2
- id: component3
value: 3
- id: component4
value: 4
- id: component5
value: 79
power: 200
- id: component6
value: 6
- id: component7
value: 7
switch: &id002
- platform: gpio
id: switch1
pin: 12
- platform: gpio
id: switch2
pin: 13
display:
- platform: ili9xxx
dimensions:
width: 100
height: 480
substitutions:
extended_component: component5
package_options:
alternative_package:
alternative_component:
- id: component8
value: 8
fancy_package:
fancy_component: *id001
pin: 12
some_switches: *id002
package_selection: fancy_package

View File

@@ -0,0 +1,61 @@
substitutions:
package_options:
alternative_package:
alternative_component:
- id: component8
value: 8
fancy_package:
fancy_component:
- id: component9
value: 9
pin: 12
some_switches:
- platform: gpio
id: switch1
pin: ${pin}
- platform: gpio
id: switch2
pin: ${pin+1}
package_selection: fancy_package
packages:
- ${ package_options[package_selection] }
- some_component:
- id: component1
value: 1
- some_component:
- id: component2
value: 2
- switch: ${ some_switches }
- packages:
package_with_defaults: !include
file: display.yaml
vars:
native_width: 100
high_dpi: false
my_package:
packages:
- packages:
special_package:
substitutions:
extended_component: component5
some_component:
- id: component3
value: 3
some_component:
- id: component4
value: 4
- id: !extend ${ extended_component }
power: 200
value: 79
some_component:
- id: component5
value: 5
some_component:
- id: component6
value: 6
- id: component7
value: 7

View File

@@ -8,7 +8,7 @@ import pytest
from esphome import config as config_module, yaml_util
from esphome.components import substitutions
from esphome.components.packages import do_packages_pass
from esphome.components.packages import do_packages_pass, merge_packages
from esphome.config import resolve_extend_remove
from esphome.config_helpers import merge_config
from esphome.const import CONF_SUBSTITUTIONS
@@ -74,6 +74,8 @@ def verify_database(value: Any, path: str = "") -> str | None:
return None
if isinstance(value, dict):
for k, v in value.items():
if path == "" and k == CONF_SUBSTITUTIONS:
return None # ignore substitutions key at top level since it is merged.
key_result = verify_database(k, f"{path}/{k}")
if key_result is not None:
return key_result
@@ -144,6 +146,8 @@ def test_substitutions_fixtures(
substitutions.do_substitution_pass(config, command_line_substitutions)
config = merge_packages(config)
resolve_extend_remove(config)
verify_database_result = verify_database(config)
if verify_database_result is not None: