Merge branch 'parition_callbacks' into integration

This commit is contained in:
J. Nick Koston
2025-12-02 22:42:13 -06:00
20 changed files with 175 additions and 37 deletions

View File

@@ -22,7 +22,7 @@ jobs:
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
steps:
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Generate a token
id: generate-token

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0

View File

@@ -43,7 +43,7 @@ jobs:
- "docker"
# - "lint"
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:

View File

@@ -49,7 +49,7 @@ jobs:
- name: Check out code from base repository
if: steps.pr.outputs.skip != 'true'
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Always check out from the base repository (esphome/esphome), never from forks
# Use the PR's target branch to ensure we run trusted code from the main repo

View File

@@ -36,7 +36,7 @@ jobs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Generate cache-key
id: cache-key
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
@@ -70,7 +70,7 @@ jobs:
if: needs.determine-jobs.outputs.python-linters == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -91,7 +91,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -132,7 +132,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
id: restore-python
uses: ./.github/actions/restore-python
@@ -183,7 +183,7 @@ jobs:
component-test-batches: ${{ steps.determine.outputs.component-test-batches }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Fetch enough history to find the merge base
fetch-depth: 2
@@ -237,7 +237,7 @@ jobs:
if: needs.determine-jobs.outputs.integration-tests == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python 3.13
id: python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
@@ -273,7 +273,7 @@ jobs:
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
@@ -321,7 +321,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -400,7 +400,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -489,7 +489,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -577,7 +577,7 @@ jobs:
version: 1.0
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -662,7 +662,7 @@ jobs:
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -688,7 +688,7 @@ jobs:
skip: ${{ steps.check-script.outputs.skip }}
steps:
- name: Check out target branch
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.base_ref }}
@@ -840,7 +840,7 @@ jobs:
flash_usage: ${{ steps.extract.outputs.flash_usage }}
steps:
- name: Check out PR branch
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -908,7 +908,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:

View File

@@ -54,7 +54,7 @@ jobs:
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -20,7 +20,7 @@ jobs:
branch_build: ${{ steps.tag.outputs.branch_build }}
deploy_env: ${{ steps.tag.outputs.deploy_env }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Get tag
id: tag
# yamllint disable rule:line-length
@@ -60,7 +60,7 @@ jobs:
contents: read
id-token: write
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
@@ -92,7 +92,7 @@ jobs:
os: "ubuntu-24.04-arm"
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
@@ -168,7 +168,7 @@ jobs:
- ghcr
- dockerhub
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Download digests
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0

View File

@@ -13,10 +13,10 @@ jobs:
if: github.repository == 'esphome/esphome'
steps:
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Checkout Home Assistant
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
repository: home-assistant/core
path: lib/home-assistant

View File

@@ -12,10 +12,11 @@ void AnalogThresholdBinarySensor::setup() {
// TRUE state is defined to be when sensor is >= threshold
// so when undefined sensor value initialize to FALSE
if (std::isnan(sensor_value)) {
this->raw_state_ = false;
this->publish_initial_state(false);
} else {
this->publish_initial_state(sensor_value >=
(this->lower_threshold_.value() + this->upper_threshold_.value()) / 2.0f);
this->raw_state_ = sensor_value >= (this->lower_threshold_.value() + this->upper_threshold_.value()) / 2.0f;
this->publish_initial_state(this->raw_state_);
}
}
@@ -25,8 +26,10 @@ void AnalogThresholdBinarySensor::set_sensor(sensor::Sensor *analog_sensor) {
this->sensor_->add_on_state_callback([this](float sensor_value) {
// if there is an invalid sensor reading, ignore the change and keep the current state
if (!std::isnan(sensor_value)) {
this->publish_state(sensor_value >=
(this->state ? this->lower_threshold_.value() : this->upper_threshold_.value()));
// Use raw_state_ for hysteresis logic, not this->state which is post-filter
this->raw_state_ =
sensor_value >= (this->raw_state_ ? this->lower_threshold_.value() : this->upper_threshold_.value());
this->publish_state(this->raw_state_);
}
});
}

View File

@@ -20,6 +20,7 @@ class AnalogThresholdBinarySensor : public Component, public binary_sensor::Bina
sensor::Sensor *sensor_{nullptr};
TemplatableValue<float> upper_threshold_{};
TemplatableValue<float> lower_threshold_{};
bool raw_state_{false}; // Pre-filter state for hysteresis logic
};
} // namespace analog_threshold

View File

@@ -580,7 +580,7 @@ void WiFiComponent::loop() {
WiFiComponent::WiFiComponent() { global_wifi_component = this; }
bool WiFiComponent::has_ap() const { return this->has_ap_; }
bool WiFiComponent::is_ap_active() const { return this->state_ == WIFI_COMPONENT_STATE_AP; }
bool WiFiComponent::is_ap_active() const { return this->ap_started_; }
bool WiFiComponent::has_sta() const { return !this->sta_.empty(); }
#ifdef USE_WIFI_11KV_SUPPORT
void WiFiComponent::set_btm(bool btm) { this->btm_ = btm; }

View File

@@ -616,6 +616,7 @@ class WiFiComponent : public Component {
bool error_from_callback_{false};
bool scan_done_{false};
bool ap_setup_{false};
bool ap_started_{false};
bool passive_scan_{false};
bool has_saved_wifi_settings_{false};
#ifdef USE_WIFI_11KV_SUPPORT

View File

@@ -82,8 +82,11 @@ bool WiFiComponent::wifi_mode_(optional<bool> sta, optional<bool> ap) {
if (!ret) {
ESP_LOGW(TAG, "Set mode failed");
return false;
}
this->ap_started_ = target_ap;
return ret;
}
bool WiFiComponent::wifi_apply_power_save_() {

View File

@@ -53,7 +53,6 @@ static esp_netif_t *s_ap_netif = nullptr; // NOLINT(cppcoreguidelines-avoid-
#endif // USE_WIFI_AP
static bool s_sta_started = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
static bool s_sta_connected = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
static bool s_ap_started = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
static bool s_sta_connect_not_found = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
static bool s_sta_connect_error = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
static bool s_sta_connecting = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
@@ -831,11 +830,11 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
} else if (data->event_base == WIFI_EVENT && data->event_id == WIFI_EVENT_AP_START) {
ESP_LOGV(TAG, "AP start");
s_ap_started = true;
this->ap_started_ = true;
} else if (data->event_base == WIFI_EVENT && data->event_id == WIFI_EVENT_AP_STOP) {
ESP_LOGV(TAG, "AP stop");
s_ap_started = false;
this->ap_started_ = false;
} else if (data->event_base == WIFI_EVENT && data->event_id == WIFI_EVENT_AP_PROBEREQRECVED) {
const auto &it = data->data.ap_probe_req_rx;

View File

@@ -50,8 +50,11 @@ bool WiFiComponent::wifi_mode_(optional<bool> sta, optional<bool> ap) {
if (!ret) {
ESP_LOGW(TAG, "Setting mode failed");
return false;
}
this->ap_started_ = enable_ap;
return ret;
}
bool WiFiComponent::wifi_apply_output_power_(float output_power) {

View File

@@ -28,11 +28,15 @@ bool WiFiComponent::wifi_mode_(optional<bool> sta, optional<bool> ap) {
cyw43_wifi_set_up(&cyw43_state, CYW43_ITF_STA, true, CYW43_COUNTRY_WORLDWIDE);
}
}
bool ap_state = false;
if (ap.has_value()) {
if (ap.value()) {
cyw43_wifi_set_up(&cyw43_state, CYW43_ITF_AP, true, CYW43_COUNTRY_WORLDWIDE);
ap_state = true;
}
}
this->ap_started_ = ap_state;
return true;
}

View File

@@ -107,9 +107,24 @@ FILTER_PLATFORMIO_LINES = [
r"Warning: DEPRECATED: 'esptool.py' is deprecated. Please use 'esptool' instead. The '.py' suffix will be removed in a future major release.",
r"Warning: esp-idf-size exited with code 2",
r"esp_idf_size: error: unrecognized arguments: --ng",
r"Package configuration completed successfully",
]
class PlatformioLogFilter(logging.Filter):
"""Filter to suppress noisy platformio log messages."""
_PATTERN = re.compile(
r"|".join(r"(?:" + pattern + r")" for pattern in FILTER_PLATFORMIO_LINES)
)
def filter(self, record: logging.LogRecord) -> bool:
# Only filter messages from platformio-related loggers
if "platformio" not in record.name.lower():
return True
return self._PATTERN.match(record.getMessage()) is None
def run_platformio_cli(*args, **kwargs) -> str | int:
os.environ["PLATFORMIO_FORCE_COLOR"] = "true"
os.environ["PLATFORMIO_BUILD_DIR"] = str(CORE.relative_pioenvs_path().absolute())
@@ -130,7 +145,18 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
patch_structhash()
patch_file_downloader()
return run_external_command(platformio.__main__.main, *cmd, **kwargs)
# Add log filter to suppress noisy platformio messages
log_filter = PlatformioLogFilter() if not CORE.verbose else None
if log_filter:
for handler in logging.getLogger().handlers:
handler.addFilter(log_filter)
try:
return run_external_command(platformio.__main__.main, *cmd, **kwargs)
finally:
if log_filter:
for handler in logging.getLogger().handlers:
handler.removeFilter(log_filter)
def run_platformio_cli_run(config, verbose, *args, **kwargs) -> str | int:

View File

@@ -1,4 +1,4 @@
pylint==4.0.3
pylint==4.0.4
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
ruff==0.14.7 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.21.2 # also change in .pre-commit-config.yaml when updating

View File

@@ -1,6 +1,7 @@
"""Tests for platformio_api.py path functions."""
import json
import logging
import os
from pathlib import Path
import shutil
@@ -670,3 +671,100 @@ def test_process_stacktrace_bad_alloc(
assert "Memory allocation of 512 bytes failed at 40201234" in caplog.text
mock_decode_pc.assert_called_once_with(config, "40201234")
assert state is False
def test_platformio_log_filter_allows_non_platformio_messages() -> None:
"""Test that non-platformio logger messages are allowed through."""
log_filter = platformio_api.PlatformioLogFilter()
record = logging.LogRecord(
name="esphome.core",
level=logging.INFO,
pathname="",
lineno=0,
msg="Some esphome message",
args=(),
exc_info=None,
)
assert log_filter.filter(record) is True
@pytest.mark.parametrize(
"msg",
[
"Verbose mode can be enabled via `-v, --verbose` option",
"Found 5 compatible libraries",
"Found 123 compatible libraries",
"Building in release mode",
"Building in debug mode",
"Merged 2 ELF section",
"esptool.py v4.7.0",
"esptool v4.8.1",
"PLATFORM: espressif32 @ 6.4.0",
"Using cache: /path/to/cache",
"Package configuration completed successfully",
"Scanning dependencies...",
"Installing dependencies",
"Library Manager: Already installed, built-in library",
"Memory Usage -> https://bit.ly/pio-memory-usage",
],
)
def test_platformio_log_filter_blocks_noisy_messages(msg: str) -> None:
"""Test that noisy platformio messages are filtered out."""
log_filter = platformio_api.PlatformioLogFilter()
record = logging.LogRecord(
name="platformio.builder",
level=logging.INFO,
pathname="",
lineno=0,
msg=msg,
args=(),
exc_info=None,
)
assert log_filter.filter(record) is False
@pytest.mark.parametrize(
"msg",
[
"Compiling .pio/build/test/src/main.cpp.o",
"Linking .pio/build/test/firmware.elf",
"Error: something went wrong",
"warning: unused variable",
],
)
def test_platformio_log_filter_allows_other_platformio_messages(msg: str) -> None:
"""Test that non-noisy platformio messages are allowed through."""
log_filter = platformio_api.PlatformioLogFilter()
record = logging.LogRecord(
name="platformio.builder",
level=logging.INFO,
pathname="",
lineno=0,
msg=msg,
args=(),
exc_info=None,
)
assert log_filter.filter(record) is True
@pytest.mark.parametrize(
"logger_name",
[
"PLATFORMIO.builder",
"PlatformIO.core",
"platformio.run",
],
)
def test_platformio_log_filter_case_insensitive_logger_name(logger_name: str) -> None:
"""Test that platformio logger name matching is case insensitive."""
log_filter = platformio_api.PlatformioLogFilter()
record = logging.LogRecord(
name=logger_name,
level=logging.INFO,
pathname="",
lineno=0,
msg="Found 5 compatible libraries",
args=(),
exc_info=None,
)
assert log_filter.filter(record) is False