mirror of
https://github.com/esphome/esphome.git
synced 2026-02-10 19:47:35 -07:00
Compare commits
7 Commits
optimize_b
...
api-dedup-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
39bde3eb73 | ||
|
|
c53baf70c7 | ||
|
|
041c43fb32 | ||
|
|
b4741ade0d | ||
|
|
2c3a92db97 | ||
|
|
fc91a4d7a3 | ||
|
|
9bf90eff01 |
@@ -1155,11 +1155,9 @@ enum WaterHeaterCommandHasField {
|
||||
WATER_HEATER_COMMAND_HAS_NONE = 0;
|
||||
WATER_HEATER_COMMAND_HAS_MODE = 1;
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE = 2;
|
||||
WATER_HEATER_COMMAND_HAS_STATE = 4 [deprecated=true];
|
||||
WATER_HEATER_COMMAND_HAS_STATE = 4;
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_LOW = 8;
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH = 16;
|
||||
WATER_HEATER_COMMAND_HAS_ON_STATE = 32;
|
||||
WATER_HEATER_COMMAND_HAS_AWAY_STATE = 64;
|
||||
}
|
||||
|
||||
message WaterHeaterCommandRequest {
|
||||
|
||||
@@ -219,35 +219,8 @@ void APIConnection::loop() {
|
||||
this->process_batch_();
|
||||
}
|
||||
|
||||
switch (this->active_iterator_) {
|
||||
case ActiveIterator::LIST_ENTITIES:
|
||||
if (this->iterator_storage_.list_entities.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
if (this->flags_.state_subscription) {
|
||||
this->begin_iterator_(ActiveIterator::INITIAL_STATE);
|
||||
}
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.list_entities);
|
||||
}
|
||||
break;
|
||||
case ActiveIterator::INITIAL_STATE:
|
||||
if (this->iterator_storage_.initial_state.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
// Process any remaining batched messages immediately
|
||||
if (!this->deferred_batch_.empty()) {
|
||||
this->process_batch_();
|
||||
}
|
||||
// Now that everything is sent, enable immediate sending for future state changes
|
||||
this->flags_.should_try_send_immediately = true;
|
||||
// Release excess memory from buffers that grew during initial sync
|
||||
this->deferred_batch_.release_buffer();
|
||||
this->helper_->release_buffers();
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.initial_state);
|
||||
}
|
||||
break;
|
||||
case ActiveIterator::NONE:
|
||||
break;
|
||||
if (this->active_iterator_ != ActiveIterator::NONE) {
|
||||
this->process_active_iterator_();
|
||||
}
|
||||
|
||||
if (this->flags_.sent_ping) {
|
||||
@@ -283,6 +256,49 @@ void APIConnection::loop() {
|
||||
#endif
|
||||
}
|
||||
|
||||
void APIConnection::process_active_iterator_() {
|
||||
// Caller ensures active_iterator_ != NONE
|
||||
if (this->active_iterator_ == ActiveIterator::LIST_ENTITIES) {
|
||||
if (this->iterator_storage_.list_entities.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
if (this->flags_.state_subscription) {
|
||||
this->begin_iterator_(ActiveIterator::INITIAL_STATE);
|
||||
}
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.list_entities);
|
||||
}
|
||||
} else { // INITIAL_STATE
|
||||
if (this->iterator_storage_.initial_state.completed()) {
|
||||
this->destroy_active_iterator_();
|
||||
// Process any remaining batched messages immediately
|
||||
if (!this->deferred_batch_.empty()) {
|
||||
this->process_batch_();
|
||||
}
|
||||
// Now that everything is sent, enable immediate sending for future state changes
|
||||
this->flags_.should_try_send_immediately = true;
|
||||
// Release excess memory from buffers that grew during initial sync
|
||||
this->deferred_batch_.release_buffer();
|
||||
this->helper_->release_buffers();
|
||||
} else {
|
||||
this->process_iterator_batch_(this->iterator_storage_.initial_state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void APIConnection::process_iterator_batch_(ComponentIterator &iterator) {
|
||||
size_t initial_size = this->deferred_batch_.size();
|
||||
size_t max_batch = this->get_max_batch_size_();
|
||||
while (!iterator.completed() && (this->deferred_batch_.size() - initial_size) < max_batch) {
|
||||
iterator.advance();
|
||||
}
|
||||
|
||||
// If the batch is full, process it immediately
|
||||
// Note: iterator.advance() already calls schedule_batch_() via schedule_message_()
|
||||
if (this->deferred_batch_.size() >= max_batch) {
|
||||
this->process_batch_();
|
||||
}
|
||||
}
|
||||
|
||||
bool APIConnection::send_disconnect_response_() {
|
||||
// remote initiated disconnect_client
|
||||
// don't close yet, we still need to send the disconnect response
|
||||
@@ -1343,12 +1359,8 @@ void APIConnection::on_water_heater_command_request(const WaterHeaterCommandRequ
|
||||
call.set_target_temperature_low(msg.target_temperature_low);
|
||||
if (msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH)
|
||||
call.set_target_temperature_high(msg.target_temperature_high);
|
||||
if ((msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_AWAY_STATE) ||
|
||||
(msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_STATE)) {
|
||||
if (msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_STATE) {
|
||||
call.set_away((msg.state & water_heater::WATER_HEATER_STATE_AWAY) != 0);
|
||||
}
|
||||
if ((msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_ON_STATE) ||
|
||||
(msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_STATE)) {
|
||||
call.set_on((msg.state & water_heater::WATER_HEATER_STATE_ON) != 0);
|
||||
}
|
||||
call.perform();
|
||||
@@ -1899,6 +1911,10 @@ bool APIConnection::schedule_batch_() {
|
||||
}
|
||||
|
||||
void APIConnection::process_batch_() {
|
||||
// Ensure MessageInfo remains trivially destructible for our placement new approach
|
||||
static_assert(std::is_trivially_destructible<MessageInfo>::value,
|
||||
"MessageInfo must remain trivially destructible with this placement-new approach");
|
||||
|
||||
if (this->deferred_batch_.empty()) {
|
||||
this->flags_.batch_scheduled = false;
|
||||
return;
|
||||
@@ -1923,10 +1939,6 @@ void APIConnection::process_batch_() {
|
||||
for (size_t i = 0; i < num_items; i++) {
|
||||
total_estimated_size += this->deferred_batch_[i].estimated_size;
|
||||
}
|
||||
// Clamp to MAX_BATCH_PACKET_SIZE — we won't send more than that per batch
|
||||
if (total_estimated_size > MAX_BATCH_PACKET_SIZE) {
|
||||
total_estimated_size = MAX_BATCH_PACKET_SIZE;
|
||||
}
|
||||
|
||||
this->prepare_first_message_buffer(shared_buf, header_padding, total_estimated_size);
|
||||
|
||||
@@ -1950,20 +1962,7 @@ void APIConnection::process_batch_() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Multi-message path — heavy stack frame isolated in separate noinline function
|
||||
this->process_batch_multi_(shared_buf, num_items, header_padding, footer_size);
|
||||
}
|
||||
|
||||
// Separated from process_batch_() so the single-message fast path gets a minimal
|
||||
// stack frame without the MAX_MESSAGES_PER_BATCH * sizeof(MessageInfo) array.
|
||||
void APIConnection::process_batch_multi_(std::vector<uint8_t> &shared_buf, size_t num_items, uint8_t header_padding,
|
||||
uint8_t footer_size) {
|
||||
// Ensure MessageInfo remains trivially destructible for our placement new approach
|
||||
static_assert(std::is_trivially_destructible<MessageInfo>::value,
|
||||
"MessageInfo must remain trivially destructible with this placement-new approach");
|
||||
|
||||
const size_t messages_to_process = std::min(num_items, MAX_MESSAGES_PER_BATCH);
|
||||
const uint8_t frame_overhead = header_padding + footer_size;
|
||||
size_t messages_to_process = std::min(num_items, MAX_MESSAGES_PER_BATCH);
|
||||
|
||||
// Stack-allocated array for message info
|
||||
alignas(MessageInfo) char message_info_storage[MAX_MESSAGES_PER_BATCH * sizeof(MessageInfo)];
|
||||
@@ -1990,7 +1989,7 @@ void APIConnection::process_batch_multi_(std::vector<uint8_t> &shared_buf, size_
|
||||
|
||||
// Message was encoded successfully
|
||||
// payload_size is header_padding + actual payload size + footer_size
|
||||
uint16_t proto_payload_size = payload_size - frame_overhead;
|
||||
uint16_t proto_payload_size = payload_size - header_padding - footer_size;
|
||||
// Use placement new to construct MessageInfo in pre-allocated stack array
|
||||
// This avoids default-constructing all MAX_MESSAGES_PER_BATCH elements
|
||||
// Explicit destruction is not needed because MessageInfo is trivially destructible,
|
||||
@@ -2006,38 +2005,42 @@ void APIConnection::process_batch_multi_(std::vector<uint8_t> &shared_buf, size_
|
||||
current_offset = shared_buf.size() + footer_size;
|
||||
}
|
||||
|
||||
if (items_processed > 0) {
|
||||
// Add footer space for the last message (for Noise protocol MAC)
|
||||
if (footer_size > 0) {
|
||||
shared_buf.resize(shared_buf.size() + footer_size);
|
||||
}
|
||||
|
||||
// Send all collected messages
|
||||
APIError err = this->helper_->write_protobuf_messages(ProtoWriteBuffer{&shared_buf},
|
||||
std::span<const MessageInfo>(message_info, items_processed));
|
||||
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
||||
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
|
||||
}
|
||||
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
// Log messages after send attempt for VV debugging
|
||||
// It's safe to use the buffer for logging at this point regardless of send result
|
||||
for (size_t i = 0; i < items_processed; i++) {
|
||||
const auto &item = this->deferred_batch_[i];
|
||||
this->log_batch_item_(item);
|
||||
}
|
||||
#endif
|
||||
|
||||
// Partial batch — remove processed items and reschedule
|
||||
if (items_processed < this->deferred_batch_.size()) {
|
||||
this->deferred_batch_.remove_front(items_processed);
|
||||
this->schedule_batch_();
|
||||
return;
|
||||
}
|
||||
if (items_processed == 0) {
|
||||
this->deferred_batch_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
// All items processed (or none could be processed)
|
||||
this->clear_batch_();
|
||||
// Add footer space for the last message (for Noise protocol MAC)
|
||||
if (footer_size > 0) {
|
||||
shared_buf.resize(shared_buf.size() + footer_size);
|
||||
}
|
||||
|
||||
// Send all collected messages
|
||||
APIError err = this->helper_->write_protobuf_messages(ProtoWriteBuffer{&shared_buf},
|
||||
std::span<const MessageInfo>(message_info, items_processed));
|
||||
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
||||
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
|
||||
}
|
||||
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
// Log messages after send attempt for VV debugging
|
||||
// It's safe to use the buffer for logging at this point regardless of send result
|
||||
for (size_t i = 0; i < items_processed; i++) {
|
||||
const auto &item = this->deferred_batch_[i];
|
||||
this->log_batch_item_(item);
|
||||
}
|
||||
#endif
|
||||
|
||||
// Handle remaining items more efficiently
|
||||
if (items_processed < this->deferred_batch_.size()) {
|
||||
// Remove processed items from the beginning
|
||||
this->deferred_batch_.remove_front(items_processed);
|
||||
// Reschedule for remaining items
|
||||
this->schedule_batch_();
|
||||
} else {
|
||||
// All items processed
|
||||
this->clear_batch_();
|
||||
}
|
||||
}
|
||||
|
||||
// Dispatch message encoding based on message_type
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
class ComponentIterator;
|
||||
} // namespace esphome
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// Keepalive timeout in milliseconds
|
||||
@@ -364,20 +368,13 @@ class APIConnection final : public APIServerConnectionBase {
|
||||
return this->client_supports_api_version(1, 14) ? MAX_INITIAL_PER_BATCH : MAX_INITIAL_PER_BATCH_LEGACY;
|
||||
}
|
||||
|
||||
// Helper method to process multiple entities from an iterator in a batch
|
||||
template<typename Iterator> void process_iterator_batch_(Iterator &iterator) {
|
||||
size_t initial_size = this->deferred_batch_.size();
|
||||
size_t max_batch = this->get_max_batch_size_();
|
||||
while (!iterator.completed() && (this->deferred_batch_.size() - initial_size) < max_batch) {
|
||||
iterator.advance();
|
||||
}
|
||||
// Process active iterator (list_entities/initial_state) during connection setup.
|
||||
// Extracted from loop() — only runs during initial handshake, NONE in steady state.
|
||||
void __attribute__((noinline)) process_active_iterator_();
|
||||
|
||||
// If the batch is full, process it immediately
|
||||
// Note: iterator.advance() already calls schedule_batch_() via schedule_message_()
|
||||
if (this->deferred_batch_.size() >= max_batch) {
|
||||
this->process_batch_();
|
||||
}
|
||||
}
|
||||
// Helper method to process multiple entities from an iterator in a batch.
|
||||
// Takes ComponentIterator base class reference to avoid duplicate template instantiations.
|
||||
void process_iterator_batch_(ComponentIterator &iterator);
|
||||
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
static uint16_t try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||
@@ -549,8 +546,8 @@ class APIConnection final : public APIServerConnectionBase {
|
||||
batch_start_time = 0;
|
||||
}
|
||||
|
||||
// Remove processed items from the front — noinline to keep memmove out of warm callers
|
||||
void remove_front(size_t count) __attribute__((noinline)) { items.erase(items.begin(), items.begin() + count); }
|
||||
// Remove processed items from the front
|
||||
void remove_front(size_t count) { items.erase(items.begin(), items.begin() + count); }
|
||||
|
||||
bool empty() const { return items.empty(); }
|
||||
size_t size() const { return items.size(); }
|
||||
@@ -622,8 +619,6 @@ class APIConnection final : public APIServerConnectionBase {
|
||||
|
||||
bool schedule_batch_();
|
||||
void process_batch_();
|
||||
void process_batch_multi_(std::vector<uint8_t> &shared_buf, size_t num_items, uint8_t header_padding,
|
||||
uint8_t footer_size) __attribute__((noinline));
|
||||
void clear_batch_() {
|
||||
this->deferred_batch_.clear();
|
||||
this->flags_.batch_scheduled = false;
|
||||
|
||||
@@ -147,8 +147,6 @@ enum WaterHeaterCommandHasField : uint32_t {
|
||||
WATER_HEATER_COMMAND_HAS_STATE = 4,
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_LOW = 8,
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH = 16,
|
||||
WATER_HEATER_COMMAND_HAS_ON_STATE = 32,
|
||||
WATER_HEATER_COMMAND_HAS_AWAY_STATE = 64,
|
||||
};
|
||||
#ifdef USE_NUMBER
|
||||
enum NumberMode : uint32_t {
|
||||
|
||||
@@ -385,10 +385,6 @@ const char *proto_enum_to_string<enums::WaterHeaterCommandHasField>(enums::Water
|
||||
return "WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_LOW";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH:
|
||||
return "WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_ON_STATE:
|
||||
return "WATER_HEATER_COMMAND_HAS_ON_STATE";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_AWAY_STATE:
|
||||
return "WATER_HEATER_COMMAND_HAS_AWAY_STATE";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
|
||||
@@ -94,7 +94,6 @@ class ListEntitiesIterator : public ComponentIterator {
|
||||
bool on_update(update::UpdateEntity *entity) override;
|
||||
#endif
|
||||
bool on_end() override;
|
||||
bool completed() { return this->state_ == IteratorState::NONE; }
|
||||
|
||||
protected:
|
||||
APIConnection *client_;
|
||||
|
||||
@@ -88,7 +88,6 @@ class InitialStateIterator : public ComponentIterator {
|
||||
#ifdef USE_UPDATE
|
||||
bool on_update(update::UpdateEntity *entity) override;
|
||||
#endif
|
||||
bool completed() { return this->state_ == IteratorState::NONE; }
|
||||
|
||||
protected:
|
||||
APIConnection *client_;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import uart
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, CONF_ON_DATA, CONF_THROTTLE, CONF_TRIGGER_ID
|
||||
from esphome.const import CONF_ID, CONF_THROTTLE
|
||||
|
||||
AUTO_LOAD = ["ld24xx"]
|
||||
DEPENDENCIES = ["uart"]
|
||||
@@ -12,8 +11,6 @@ MULTI_CONF = True
|
||||
ld2450_ns = cg.esphome_ns.namespace("ld2450")
|
||||
LD2450Component = ld2450_ns.class_("LD2450Component", cg.Component, uart.UARTDevice)
|
||||
|
||||
LD2450DataTrigger = ld2450_ns.class_("LD2450DataTrigger", automation.Trigger.template())
|
||||
|
||||
CONF_LD2450_ID = "ld2450_id"
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
@@ -23,11 +20,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_THROTTLE): cv.invalid(
|
||||
f"{CONF_THROTTLE} has been removed; use per-sensor filters, instead"
|
||||
),
|
||||
cv.Optional(CONF_ON_DATA): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(LD2450DataTrigger),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
.extend(uart.UART_DEVICE_SCHEMA)
|
||||
@@ -53,6 +45,3 @@ async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
await uart.register_uart_device(var, config)
|
||||
for conf in config.get(CONF_ON_DATA, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
@@ -413,10 +413,6 @@ void LD2450Component::restart_and_read_all_info() {
|
||||
this->set_timeout(1500, [this]() { this->read_all_info(); });
|
||||
}
|
||||
|
||||
void LD2450Component::add_on_data_callback(std::function<void()> &&callback) {
|
||||
this->data_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
// Send command with values to LD2450
|
||||
void LD2450Component::send_command_(uint8_t command, const uint8_t *command_value, uint8_t command_value_len) {
|
||||
ESP_LOGV(TAG, "Sending COMMAND %02X", command);
|
||||
@@ -617,8 +613,6 @@ void LD2450Component::handle_periodic_data_() {
|
||||
this->still_presence_millis_ = App.get_loop_component_start_time();
|
||||
}
|
||||
#endif
|
||||
|
||||
this->data_callback_.call();
|
||||
}
|
||||
|
||||
bool LD2450Component::handle_ack_data_() {
|
||||
|
||||
@@ -141,9 +141,6 @@ class LD2450Component : public Component, public uart::UARTDevice {
|
||||
int32_t zone2_x1, int32_t zone2_y1, int32_t zone2_x2, int32_t zone2_y2, int32_t zone3_x1,
|
||||
int32_t zone3_y1, int32_t zone3_x2, int32_t zone3_y2);
|
||||
|
||||
/// Add a callback that will be called after each successfully processed periodic data frame.
|
||||
void add_on_data_callback(std::function<void()> &&callback);
|
||||
|
||||
protected:
|
||||
void send_command_(uint8_t command_str, const uint8_t *command_value, uint8_t command_value_len);
|
||||
void set_config_mode_(bool enable);
|
||||
@@ -193,15 +190,6 @@ class LD2450Component : public Component, public uart::UARTDevice {
|
||||
#ifdef USE_TEXT_SENSOR
|
||||
std::array<text_sensor::TextSensor *, 3> direction_text_sensors_{};
|
||||
#endif
|
||||
|
||||
LazyCallbackManager<void()> data_callback_;
|
||||
};
|
||||
|
||||
class LD2450DataTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit LD2450DataTrigger(LD2450Component *parent) {
|
||||
parent->add_on_data_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace esphome::ld2450
|
||||
|
||||
@@ -3,7 +3,6 @@ import esphome.codegen as cg
|
||||
from esphome.components import water_heater
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_AWAY,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_OPTIMISTIC,
|
||||
@@ -19,7 +18,6 @@ from esphome.types import ConfigType
|
||||
from .. import template_ns
|
||||
|
||||
CONF_CURRENT_TEMPERATURE = "current_temperature"
|
||||
CONF_IS_ON = "is_on"
|
||||
|
||||
TemplateWaterHeater = template_ns.class_(
|
||||
"TemplateWaterHeater", cg.Component, water_heater.WaterHeater
|
||||
@@ -53,8 +51,6 @@ CONFIG_SCHEMA = (
|
||||
cv.Optional(CONF_SUPPORTED_MODES): cv.ensure_list(
|
||||
water_heater.validate_water_heater_mode
|
||||
),
|
||||
cv.Optional(CONF_AWAY): cv.returning_lambda,
|
||||
cv.Optional(CONF_IS_ON): cv.returning_lambda,
|
||||
}
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
@@ -102,22 +98,6 @@ async def to_code(config: ConfigType) -> None:
|
||||
if CONF_SUPPORTED_MODES in config:
|
||||
cg.add(var.set_supported_modes(config[CONF_SUPPORTED_MODES]))
|
||||
|
||||
if CONF_AWAY in config:
|
||||
template_ = await cg.process_lambda(
|
||||
config[CONF_AWAY],
|
||||
[],
|
||||
return_type=cg.optional.template(bool),
|
||||
)
|
||||
cg.add(var.set_away_lambda(template_))
|
||||
|
||||
if CONF_IS_ON in config:
|
||||
template_ = await cg.process_lambda(
|
||||
config[CONF_IS_ON],
|
||||
[],
|
||||
return_type=cg.optional.template(bool),
|
||||
)
|
||||
cg.add(var.set_is_on_lambda(template_))
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
"water_heater.template.publish",
|
||||
@@ -130,8 +110,6 @@ async def to_code(config: ConfigType) -> None:
|
||||
cv.Optional(CONF_MODE): cv.templatable(
|
||||
water_heater.validate_water_heater_mode
|
||||
),
|
||||
cv.Optional(CONF_AWAY): cv.templatable(cv.boolean),
|
||||
cv.Optional(CONF_IS_ON): cv.templatable(cv.boolean),
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -156,12 +134,4 @@ async def water_heater_template_publish_to_code(
|
||||
template_ = await cg.templatable(mode, args, water_heater.WaterHeaterMode)
|
||||
cg.add(var.set_mode(template_))
|
||||
|
||||
if CONF_AWAY in config:
|
||||
template_ = await cg.templatable(config[CONF_AWAY], args, bool)
|
||||
cg.add(var.set_away(template_))
|
||||
|
||||
if CONF_IS_ON in config:
|
||||
template_ = await cg.templatable(config[CONF_IS_ON], args, bool)
|
||||
cg.add(var.set_is_on(template_))
|
||||
|
||||
return var
|
||||
|
||||
@@ -11,15 +11,12 @@ class TemplateWaterHeaterPublishAction : public Action<Ts...>, public Parented<T
|
||||
TEMPLATABLE_VALUE(float, current_temperature)
|
||||
TEMPLATABLE_VALUE(float, target_temperature)
|
||||
TEMPLATABLE_VALUE(water_heater::WaterHeaterMode, mode)
|
||||
TEMPLATABLE_VALUE(bool, away)
|
||||
TEMPLATABLE_VALUE(bool, is_on)
|
||||
|
||||
void play(const Ts &...x) override {
|
||||
if (this->current_temperature_.has_value()) {
|
||||
this->parent_->set_current_temperature(this->current_temperature_.value(x...));
|
||||
}
|
||||
bool needs_call = this->target_temperature_.has_value() || this->mode_.has_value() || this->away_.has_value() ||
|
||||
this->is_on_.has_value();
|
||||
bool needs_call = this->target_temperature_.has_value() || this->mode_.has_value();
|
||||
if (needs_call) {
|
||||
auto call = this->parent_->make_call();
|
||||
if (this->target_temperature_.has_value()) {
|
||||
@@ -28,12 +25,6 @@ class TemplateWaterHeaterPublishAction : public Action<Ts...>, public Parented<T
|
||||
if (this->mode_.has_value()) {
|
||||
call.set_mode(this->mode_.value(x...));
|
||||
}
|
||||
if (this->away_.has_value()) {
|
||||
call.set_away(this->away_.value(x...));
|
||||
}
|
||||
if (this->is_on_.has_value()) {
|
||||
call.set_on(this->is_on_.value(x...));
|
||||
}
|
||||
call.perform();
|
||||
} else {
|
||||
this->parent_->publish_state();
|
||||
|
||||
@@ -17,7 +17,7 @@ void TemplateWaterHeater::setup() {
|
||||
}
|
||||
}
|
||||
if (!this->current_temperature_f_.has_value() && !this->target_temperature_f_.has_value() &&
|
||||
!this->mode_f_.has_value() && !this->away_f_.has_value() && !this->is_on_f_.has_value())
|
||||
!this->mode_f_.has_value())
|
||||
this->disable_loop();
|
||||
}
|
||||
|
||||
@@ -32,12 +32,6 @@ water_heater::WaterHeaterTraits TemplateWaterHeater::traits() {
|
||||
if (this->target_temperature_f_.has_value()) {
|
||||
traits.add_feature_flags(water_heater::WATER_HEATER_SUPPORTS_TARGET_TEMPERATURE);
|
||||
}
|
||||
if (this->away_f_.has_value()) {
|
||||
traits.set_supports_away_mode(true);
|
||||
}
|
||||
if (this->is_on_f_.has_value()) {
|
||||
traits.add_feature_flags(water_heater::WATER_HEATER_SUPPORTS_ON_OFF);
|
||||
}
|
||||
return traits;
|
||||
}
|
||||
|
||||
@@ -68,22 +62,6 @@ void TemplateWaterHeater::loop() {
|
||||
}
|
||||
}
|
||||
|
||||
auto away = this->away_f_.call();
|
||||
if (away.has_value()) {
|
||||
if (*away != this->is_away()) {
|
||||
this->set_state_flag_(water_heater::WATER_HEATER_STATE_AWAY, *away);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
auto is_on = this->is_on_f_.call();
|
||||
if (is_on.has_value()) {
|
||||
if (*is_on != this->is_on()) {
|
||||
this->set_state_flag_(water_heater::WATER_HEATER_STATE_ON, *is_on);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (changed) {
|
||||
this->publish_state();
|
||||
}
|
||||
@@ -112,17 +90,6 @@ void TemplateWaterHeater::control(const water_heater::WaterHeaterCall &call) {
|
||||
}
|
||||
}
|
||||
|
||||
if (call.get_away().has_value()) {
|
||||
if (this->optimistic_) {
|
||||
this->set_state_flag_(water_heater::WATER_HEATER_STATE_AWAY, *call.get_away());
|
||||
}
|
||||
}
|
||||
if (call.get_on().has_value()) {
|
||||
if (this->optimistic_) {
|
||||
this->set_state_flag_(water_heater::WATER_HEATER_STATE_ON, *call.get_on());
|
||||
}
|
||||
}
|
||||
|
||||
this->set_trigger_.trigger();
|
||||
|
||||
if (this->optimistic_) {
|
||||
|
||||
@@ -24,8 +24,6 @@ class TemplateWaterHeater : public Component, public water_heater::WaterHeater {
|
||||
this->target_temperature_f_.set(std::forward<F>(f));
|
||||
}
|
||||
template<typename F> void set_mode_lambda(F &&f) { this->mode_f_.set(std::forward<F>(f)); }
|
||||
template<typename F> void set_away_lambda(F &&f) { this->away_f_.set(std::forward<F>(f)); }
|
||||
template<typename F> void set_is_on_lambda(F &&f) { this->is_on_f_.set(std::forward<F>(f)); }
|
||||
|
||||
void set_optimistic(bool optimistic) { this->optimistic_ = optimistic; }
|
||||
void set_restore_mode(TemplateWaterHeaterRestoreMode restore_mode) { this->restore_mode_ = restore_mode; }
|
||||
@@ -51,8 +49,6 @@ class TemplateWaterHeater : public Component, public water_heater::WaterHeater {
|
||||
TemplateLambda<float> current_temperature_f_;
|
||||
TemplateLambda<float> target_temperature_f_;
|
||||
TemplateLambda<water_heater::WaterHeaterMode> mode_f_;
|
||||
TemplateLambda<bool> away_f_;
|
||||
TemplateLambda<bool> is_on_f_;
|
||||
TemplateWaterHeaterRestoreMode restore_mode_{WATER_HEATER_NO_RESTORE};
|
||||
water_heater::WaterHeaterModeMask supported_modes_;
|
||||
bool optimistic_{true};
|
||||
|
||||
@@ -26,6 +26,7 @@ class ComponentIterator {
|
||||
public:
|
||||
void begin(bool include_internal = false);
|
||||
void advance();
|
||||
bool completed() const { return this->state_ == IteratorState::NONE; }
|
||||
virtual bool on_begin();
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
virtual bool on_binary_sensor(binary_sensor::BinarySensor *binary_sensor) = 0;
|
||||
|
||||
@@ -11,8 +11,8 @@ pyserial==3.5
|
||||
platformio==6.1.19
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20260210.0
|
||||
aioesphomeapi==44.0.0
|
||||
esphome-dashboard==20260110.0
|
||||
aioesphomeapi==43.14.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.19.1 # dashboard_import
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
ld2450:
|
||||
- id: ld2450_radar
|
||||
on_data:
|
||||
then:
|
||||
- logger.log: "LD2450 Radar Data Received"
|
||||
|
||||
button:
|
||||
- platform: ld2450
|
||||
|
||||
@@ -13,8 +13,6 @@ esphome:
|
||||
id: template_water_heater
|
||||
target_temperature: 50.0
|
||||
mode: ECO
|
||||
away: false
|
||||
is_on: true
|
||||
|
||||
# Templated
|
||||
- water_heater.template.publish:
|
||||
@@ -22,8 +20,6 @@ esphome:
|
||||
current_temperature: !lambda "return 45.0;"
|
||||
target_temperature: !lambda "return 55.0;"
|
||||
mode: !lambda "return water_heater::WATER_HEATER_MODE_GAS;"
|
||||
away: !lambda "return true;"
|
||||
is_on: !lambda "return false;"
|
||||
|
||||
# Test C++ API: set_template() with stateless lambda (no captures)
|
||||
# NOTE: set_template() is not intended to be a public API, but we test it to ensure it doesn't break.
|
||||
@@ -418,8 +414,6 @@ water_heater:
|
||||
current_temperature: !lambda "return 42.0f;"
|
||||
target_temperature: !lambda "return 60.0f;"
|
||||
mode: !lambda "return water_heater::WATER_HEATER_MODE_ECO;"
|
||||
away: !lambda "return false;"
|
||||
is_on: !lambda "return true;"
|
||||
supported_modes:
|
||||
- "OFF"
|
||||
- ECO
|
||||
|
||||
@@ -4,14 +4,6 @@ host:
|
||||
api:
|
||||
logger:
|
||||
|
||||
globals:
|
||||
- id: global_away
|
||||
type: bool
|
||||
initial_value: "false"
|
||||
- id: global_is_on
|
||||
type: bool
|
||||
initial_value: "true"
|
||||
|
||||
water_heater:
|
||||
- platform: template
|
||||
id: test_boiler
|
||||
@@ -19,8 +11,6 @@ water_heater:
|
||||
optimistic: true
|
||||
current_temperature: !lambda "return 45.0f;"
|
||||
target_temperature: !lambda "return 60.0f;"
|
||||
away: !lambda "return id(global_away);"
|
||||
is_on: !lambda "return id(global_is_on);"
|
||||
# Note: No mode lambda - we want optimistic mode changes to stick
|
||||
# A mode lambda would override mode changes in loop()
|
||||
supported_modes:
|
||||
@@ -32,8 +22,3 @@ water_heater:
|
||||
min_temperature: 30.0
|
||||
max_temperature: 85.0
|
||||
target_temperature_step: 0.5
|
||||
set_action:
|
||||
- lambda: |-
|
||||
// Sync optimistic state back to globals so lambdas reflect the change
|
||||
id(global_away) = id(test_boiler).is_away();
|
||||
id(global_is_on) = id(test_boiler).is_on();
|
||||
|
||||
@@ -5,13 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
|
||||
import aioesphomeapi
|
||||
from aioesphomeapi import (
|
||||
WaterHeaterFeature,
|
||||
WaterHeaterInfo,
|
||||
WaterHeaterMode,
|
||||
WaterHeaterState,
|
||||
WaterHeaterStateFlag,
|
||||
)
|
||||
from aioesphomeapi import WaterHeaterInfo, WaterHeaterMode, WaterHeaterState
|
||||
import pytest
|
||||
|
||||
from .state_utils import InitialStateHelper
|
||||
@@ -28,25 +22,18 @@ async def test_water_heater_template(
|
||||
loop = asyncio.get_running_loop()
|
||||
async with run_compiled(yaml_config), api_client_connected() as client:
|
||||
states: dict[int, aioesphomeapi.EntityState] = {}
|
||||
state_future: asyncio.Future[WaterHeaterState] | None = None
|
||||
gas_mode_future: asyncio.Future[WaterHeaterState] = loop.create_future()
|
||||
eco_mode_future: asyncio.Future[WaterHeaterState] = loop.create_future()
|
||||
|
||||
def on_state(state: aioesphomeapi.EntityState) -> None:
|
||||
states[state.key] = state
|
||||
if (
|
||||
isinstance(state, WaterHeaterState)
|
||||
and state_future is not None
|
||||
and not state_future.done()
|
||||
):
|
||||
state_future.set_result(state)
|
||||
|
||||
async def wait_for_state(timeout: float = 5.0) -> WaterHeaterState:
|
||||
"""Wait for next water heater state change."""
|
||||
nonlocal state_future
|
||||
state_future = loop.create_future()
|
||||
try:
|
||||
return await asyncio.wait_for(state_future, timeout)
|
||||
finally:
|
||||
state_future = None
|
||||
if isinstance(state, WaterHeaterState):
|
||||
# Wait for GAS mode
|
||||
if state.mode == WaterHeaterMode.GAS and not gas_mode_future.done():
|
||||
gas_mode_future.set_result(state)
|
||||
# Wait for ECO mode (we start at OFF, so test transitioning to ECO)
|
||||
elif state.mode == WaterHeaterMode.ECO and not eco_mode_future.done():
|
||||
eco_mode_future.set_result(state)
|
||||
|
||||
# Get entities and set up state synchronization
|
||||
entities, services = await client.list_entities_services()
|
||||
@@ -102,52 +89,24 @@ async def test_water_heater_template(
|
||||
f"Expected target temp 60.0, got {initial_state.target_temperature}"
|
||||
)
|
||||
|
||||
# Verify supported features: away mode and on/off (fixture has away + is_on lambdas)
|
||||
assert (
|
||||
test_water_heater.supported_features & WaterHeaterFeature.SUPPORTS_AWAY_MODE
|
||||
) != 0, "Expected SUPPORTS_AWAY_MODE in supported_features"
|
||||
assert (
|
||||
test_water_heater.supported_features & WaterHeaterFeature.SUPPORTS_ON_OFF
|
||||
) != 0, "Expected SUPPORTS_ON_OFF in supported_features"
|
||||
|
||||
# Verify initial state: on (is_on lambda returns true), not away (away lambda returns false)
|
||||
assert (initial_state.state & WaterHeaterStateFlag.ON) != 0, (
|
||||
"Expected initial state to include ON flag"
|
||||
)
|
||||
assert (initial_state.state & WaterHeaterStateFlag.AWAY) == 0, (
|
||||
"Expected initial state to not include AWAY flag"
|
||||
)
|
||||
|
||||
# Test turning on away mode
|
||||
client.water_heater_command(test_water_heater.key, away=True)
|
||||
away_on_state = await wait_for_state()
|
||||
assert (away_on_state.state & WaterHeaterStateFlag.AWAY) != 0
|
||||
# ON flag should still be set (is_on lambda returns true)
|
||||
assert (away_on_state.state & WaterHeaterStateFlag.ON) != 0
|
||||
|
||||
# Test turning off away mode
|
||||
client.water_heater_command(test_water_heater.key, away=False)
|
||||
away_off_state = await wait_for_state()
|
||||
assert (away_off_state.state & WaterHeaterStateFlag.AWAY) == 0
|
||||
assert (away_off_state.state & WaterHeaterStateFlag.ON) != 0
|
||||
|
||||
# Test turning off (on=False)
|
||||
client.water_heater_command(test_water_heater.key, on=False)
|
||||
off_state = await wait_for_state()
|
||||
assert (off_state.state & WaterHeaterStateFlag.ON) == 0
|
||||
assert (off_state.state & WaterHeaterStateFlag.AWAY) == 0
|
||||
|
||||
# Test turning back on (on=True)
|
||||
client.water_heater_command(test_water_heater.key, on=True)
|
||||
on_state = await wait_for_state()
|
||||
assert (on_state.state & WaterHeaterStateFlag.ON) != 0
|
||||
|
||||
# Test changing to GAS mode
|
||||
client.water_heater_command(test_water_heater.key, mode=WaterHeaterMode.GAS)
|
||||
gas_state = await wait_for_state()
|
||||
|
||||
try:
|
||||
gas_state = await asyncio.wait_for(gas_mode_future, timeout=5.0)
|
||||
except TimeoutError:
|
||||
pytest.fail("GAS mode change not received within 5 seconds")
|
||||
|
||||
assert isinstance(gas_state, WaterHeaterState)
|
||||
assert gas_state.mode == WaterHeaterMode.GAS
|
||||
|
||||
# Test changing to ECO mode (from GAS)
|
||||
client.water_heater_command(test_water_heater.key, mode=WaterHeaterMode.ECO)
|
||||
eco_state = await wait_for_state()
|
||||
|
||||
try:
|
||||
eco_state = await asyncio.wait_for(eco_mode_future, timeout=5.0)
|
||||
except TimeoutError:
|
||||
pytest.fail("ECO mode change not received within 5 seconds")
|
||||
|
||||
assert isinstance(eco_state, WaterHeaterState)
|
||||
assert eco_state.mode == WaterHeaterMode.ECO
|
||||
|
||||
Reference in New Issue
Block a user