Compare commits

...

77 Commits

Author SHA1 Message Date
J. Nick Koston
840ad30880 Merge branch 'dev' into json_web_server_stack 2026-01-29 14:48:48 -10:00
J. Nick Koston
cfe121b38b private implementation details 2026-01-29 18:45:18 -06:00
J. Nick Koston
5fbd9d5b14 avoid misuse 2026-01-29 18:23:25 -06:00
J. Nick Koston
2b1783ce61 tweak 2026-01-29 18:19:29 -06:00
J. Nick Koston
904072ce79 make sure NRVO works 2026-01-29 18:17:34 -06:00
J. Nick Koston
0a4b98d74a fix double templates 2026-01-29 17:43:26 -06:00
David Woodhouse
823b5ac1ab [ch423] Add CH423 I/O expander component (#13079)
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2026-01-29 18:16:15 -05:00
J. Nick Koston
b8017de724 avoid regressing performance of mqtt 2026-01-29 16:55:20 -06:00
J. Nick Koston
ca96604582 change all the cases 2026-01-29 16:49:28 -06:00
J. Nick Koston
d18d378f06 missed a few 2026-01-29 16:27:28 -06:00
J. Nick Koston
83e3752544 missed a few 2026-01-29 16:26:50 -06:00
J. Nick Koston
0490b2d450 no dummy 2026-01-29 16:24:30 -06:00
J. Nick Koston
55ff740e4e no dummy 2026-01-29 16:23:41 -06:00
J. Nick Koston
aba8a83cba ard as well 2026-01-29 16:02:32 -06:00
J. Nick Koston
a23809d5db fixes 2026-01-29 15:41:29 -06:00
J. Nick Koston
32fc3ea6f5 config stack 2026-01-29 15:33:24 -06:00
J. Nick Koston
deb8ffd348 json webserver stack 2026-01-29 15:26:37 -06:00
dependabot[bot]
6de2049076 Bump actions/cache from 5.0.2 to 5.0.3 in /.github/actions/restore-python (#13622)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-29 14:35:52 -06:00
dependabot[bot]
cd43f8474e Bump actions/cache from 5.0.2 to 5.0.3 (#13621)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-29 14:35:32 -06:00
J. Nick Koston
ecc0b366b3 [esp32] Reduce compile time by excluding unused IDF components (#13610) 2026-01-29 13:21:12 -06:00
tomaszduda23
6a17db8857 [nrf52,zigbee] Support for number component (#13581) 2026-01-29 11:52:46 -05:00
Keith Burzinski
0843ec6ae8 [const] Move CONF_AUDIO_DAC (#13614) 2026-01-29 04:39:40 +00:00
J. Nick Koston
74c84c8747 [esp32] Add advanced sdkconfig options to reduce build time and binary size (#13611) 2026-01-28 18:20:39 -10:00
rwrozelle
3e9a6c582e [mdns] Do not broadcast registration when using openthread component (#13592) 2026-01-28 18:16:59 -10:00
Keith Burzinski
084113926c [es8156] Add bits_per_sample validation, comment code (#13612) 2026-01-28 22:03:50 -06:00
J. Nick Koston
a5f60750c2 [tx20] Eliminate heap allocations in wind sensor (#13298) 2026-01-29 16:07:41 +13:00
Clyde Stubbs
a382383d83 [workflows] Add deprecation check (#13584) 2026-01-29 12:08:45 +13:00
Clyde Stubbs
03cfd87b16 [waveshare_epaper] Add deprecation message (#13583) 2026-01-29 09:44:21 +13:00
Clyde Stubbs
6d8294c2d3 [workflows] Refactor auto-label-pr script into modular JS (#13582) 2026-01-29 09:42:55 +13:00
J. Nick Koston
6a3205f4db [globals] Convert restoring globals to PollingComponent to reduce CPU usage (#13345) 2026-01-28 20:35:26 +00:00
dependabot[bot]
6f22509883 Bump docker/login-action from 3.6.0 to 3.7.0 in the docker-actions group (#13606)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-28 09:42:05 -10:00
J. Nick Koston
455ade0dca [http_request] Fix empty body for chunked transfer encoding responses (#13599) 2026-01-28 09:41:42 -10:00
J. Nick Koston
87fcfc9d76 [wifi] Fix ESP8266 yield panic when WiFi scan fails (#13603) 2026-01-28 09:40:00 -10:00
tomaszduda23
d86048cc2d [nrf52,zigbee] Address change (#13580) 2026-01-28 11:41:04 -05:00
J. Nick Koston
e1355de4cb [runtime_stats] Eliminate heap churn by using stack-allocated buffer for sorting (#13586) 2026-01-28 16:06:33 +00:00
Cody Cutrer
7385c4cf3d [ld2450] preserve precision of angle (#13600) 2026-01-28 11:04:43 -05:00
tomaszduda23
3bd6ec4ec7 [nrf52,zigbee] Time synchronization (#12236)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
2026-01-28 15:51:17 +00:00
J. Nick Koston
051604f284 [wifi] Filter scan results to only store matching networks (#13409)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-28 05:37:05 -10:00
Dan Schafer
10dfd95ff2 [esp32] Add pin definitions for adafruit_feather_esp32s3_reversetft (#13273) 2026-01-28 09:50:19 -05:00
Hypothalamus
22e0a8ce2e [hub75] Add Huidu HD-WF1 board configuration (#13341) 2026-01-27 20:10:49 -10:00
J. Nick Koston
b4f63fd992 [core] Add LOG_ENTITY_ICON/DEVICE_CLASS/UNIT_OF_MEASUREMENT macros (#13578) 2026-01-28 05:11:30 +00:00
tomaszduda23
ded835ab63 [nrf52] Move toolchain to platform (#13498)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
2026-01-28 04:51:18 +00:00
J. Nick Koston
73a249c075 [esp32] Default to CMN certificate bundle, saving ~51KB flash (#13574) 2026-01-28 04:02:01 +00:00
J. Nick Koston
fe6f27c526 [text_sensor] Use in-place mutation for filters to reduce heap allocations (#13475) 2026-01-27 17:33:46 -10:00
J. Nick Koston
f73c539ea7 [web_server] Add RP2040 platform support (#13576) 2026-01-27 17:18:31 -10:00
Edward Firmo
f87aa384d0 [nextion] Fix alternative code path for dump_device_info (#13566)
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2026-01-27 16:31:00 -10:00
J. Nick Koston
f9687a2a31 [web_server_idf] Replace heap-allocated url() with stack-based url_to() (#13407) 2026-01-28 14:02:19 +13:00
Stuart Parmenter
f084d320fc [hub75] Update esp-hub75 to 0.3.2 (#13572)
Co-authored-by: Claude <noreply@anthropic.com>
2026-01-27 09:24:13 -10:00
esphomebot
f93382445e Update webserver local assets to 20260127-190637 (#13573)
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2026-01-27 19:21:26 +00:00
J. Nick Koston
463363a08d [web_server] Add name_id to SSE for entity ID format migration (#13535) 2026-01-27 09:08:46 -10:00
J. Nick Koston
a0790f926e [libretiny] Regenerate boards for v1.11.0 (#13539) 2026-01-28 07:59:01 +13:00
J. Nick Koston
ca59ab8f37 [esp32] Eliminate dead exception class code via linker wraps (#13564) 2026-01-27 07:47:34 -10:00
J. Nick Koston
b2474c6de9 [nfc] Use StaticVector for NFC UID storage to eliminate heap allocation (#13507) 2026-01-26 19:43:52 -10:00
J. Nick Koston
3aaf10b6a8 [web_server_base] Update ESPAsyncWebServer to 3.9.5 (#13467) 2026-01-27 04:18:57 +00:00
J. Nick Koston
33f545a8e3 [factory_reset] Store reset reason comparison strings in flash on ESP8266 (#13547)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
2026-01-27 03:50:49 +00:00
J. Nick Koston
d056e1040b [mqtt] Store command comparison strings in flash on ESP8266 (#13546)
Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com>
2026-01-27 03:48:06 +00:00
J. Nick Koston
75a78b2bf3 [core] Encapsulate entity preference creation to prepare for hash migration (#13505) 2026-01-26 17:35:45 -10:00
J. Nick Koston
cd6314dc96 [socket] ESP8266: call delay(0) instead of esp_delay(0, cb) for zero timeout (#13530) 2026-01-26 17:34:55 -10:00
J. Nick Koston
f91bffff9a [wifi] Avoid heap allocation when building AP SSID (#13474) 2026-01-26 17:32:58 -10:00
J. Nick Koston
5cbe9af485 [rp2040] Use SmallBufferWithHeapFallback for preferences (#13501) 2026-01-26 17:32:03 -10:00
J. Nick Koston
a7fbecb25c [ci] Soft-deprecate str_sprintf/str_snprintf to prevent hidden heap allocations (#13227) 2026-01-26 17:28:07 -10:00
J. Nick Koston
bf92d94863 [mqtt] Use stack buffers for publish_state() topic building (#13434)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com>
2026-01-26 17:25:02 -10:00
J. Nick Koston
9c3817f544 [sml] Use constexpr std::array for START_SEQ constant (#13506) 2026-01-26 17:21:17 -10:00
J. Nick Koston
ee9e3315b6 [tm1638] Use member array instead of heap allocation for display buffer (#13504) 2026-01-26 17:21:05 -10:00
J. Nick Koston
67dea1e538 [light] Use member array instead of heap allocation in AddressableLightWrapper (#13503) 2026-01-26 17:20:49 -10:00
J. Nick Koston
003b9c6c3f [uln2003] Refactor step mode logging to use LogString (#13543) 2026-01-26 17:20:33 -10:00
J. Nick Koston
2f1a345905 [mhz19] Refactor detection range logging to use LogString (#13541) 2026-01-26 17:20:21 -10:00
J. Nick Koston
7ef933abec [libretiny] Bump to 1.11.0 (#13512) 2026-01-26 17:20:08 -10:00
J. Nick Koston
4ddd40bcfb [core] Add PROGMEM string comparison helpers and use in cover/valve/helpers (#13545) 2026-01-26 17:19:50 -10:00
J. Nick Koston
8ae901b3f1 [http_request] Use stack allocation for MD5 buffer in OTA (#13550) 2026-01-26 17:19:30 -10:00
J. Nick Koston
bc49174920 Add additional text_sensor filter tests (#13479) 2026-01-26 17:18:36 -10:00
J. Nick Koston
123ee02d39 [ota] Improve error message when device closes connection without responding (#13562) 2026-01-26 17:13:18 -10:00
Jonathan Swoboda
0cc8055757 [http_request] Add custom CA certificate support for ESP32 (#13552)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-26 22:07:27 -05:00
dependabot[bot]
27a212c14d Bump aioesphomeapi from 43.13.0 to 43.14.0 (#13557)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-26 15:43:40 -10:00
dependabot[bot]
65dc182526 Bump setuptools from 80.10.1 to 80.10.2 (#13558)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-26 15:43:27 -10:00
dependabot[bot]
dd91039ff1 Bump github/codeql-action from 4.31.11 to 4.32.0 (#13559)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-26 15:43:16 -10:00
sebcaps
1c9a9c7536 [mhz19] Fix Uninitialized var warning message (#13526) 2026-01-25 20:07:29 -10:00
216 changed files with 12845 additions and 10065 deletions

View File

@@ -1 +1 @@
15dc295268b2dcf75942f42759b3ddec64eba89f75525698eb39c95a7f4b14ce
cf3d341206b4184ec8b7fe85141aef4fe4696aa720c3f8a06d4e57930574bdab

View File

@@ -22,7 +22,7 @@ runs:
python-version: ${{ inputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: venv
# yamllint disable-line rule:line-length

View File

@@ -0,0 +1,38 @@
// Constants and markers for PR auto-labeling
module.exports = {
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
TOO_BIG_MARKER: '<!-- too-big-request -->',
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
MANAGED_LABELS: [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'chained-pr',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck',
'bugfix',
'new-feature',
'breaking-change',
'developer-breaking-change',
'code-quality',
'deprecated-component'
],
DOCS_PR_PATTERNS: [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
]
};

View File

@@ -0,0 +1,373 @@
const fs = require('fs');
const { DOCS_PR_PATTERNS } = require('./constants');
// Strategy: Merge branch detection
async function detectMergeBranch(context) {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
if (baseRef === 'release') {
labels.add('merging-to-release');
} else if (baseRef === 'beta') {
labels.add('merging-to-beta');
} else if (baseRef !== 'dev') {
labels.add('chained-pr');
}
return labels;
}
// Strategy: Component and platform labeling
async function detectComponentPlatforms(changedFiles, apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents(prFiles) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(prFiles, apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges(changedFiles) {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD) {
const labels = new Set();
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges(changedFiles) {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges(changedFiles) {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner(github, context, changedFiles) {
const labels = new Set();
const { owner, repo } = context.repo;
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests(changedFiles) {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: PR Template Checkbox detection
async function detectPRTemplateCheckboxes(context) {
const labels = new Set();
const prBody = context.payload.pull_request.body || '';
console.log('Checking PR template checkboxes...');
// Check for checked checkboxes in the "Types of changes" section
const checkboxPatterns = [
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
];
for (const { pattern, label } of checkboxPatterns) {
if (pattern.test(prBody)) {
console.log(`Found checked checkbox for: ${label}`);
labels.add(label);
}
}
return labels;
}
// Strategy: Deprecated component detection
async function detectDeprecatedComponents(github, context, changedFiles) {
const labels = new Set();
const deprecatedInfo = [];
const { owner, repo } = context.repo;
// Compile regex once for better performance
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
// Get files that are modified or added in components directory
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
if (componentFiles.length === 0) {
return { labels, deprecatedInfo };
}
// Extract unique component names using the same regex
const components = new Set();
for (const file of componentFiles) {
const match = file.match(componentFileRegex);
if (match) {
components.add(match[1]);
}
}
// Get PR head to fetch files from the PR branch
const prNumber = context.payload.pull_request.number;
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
for (const component of components) {
const initFile = `esphome/components/${component}/__init__.py`;
try {
// Fetch file content from PR head using GitHub API
const { data: fileData } = await github.rest.repos.getContent({
owner,
repo,
path: initFile,
ref: `refs/pull/${prNumber}/head`
});
// Decode base64 content
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
// Support single quotes, double quotes, and triple quotes (for multiline)
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
if (deprecatedMatch) {
labels.add('deprecated-component');
deprecatedInfo.push({
component: component,
message: deprecatedMatch[1].trim()
});
console.log(`Found deprecated component: ${component}`);
}
} catch (error) {
// Only log if it's not a simple "file not found" error (404)
if (error.status !== 404) {
console.log(`Error reading ${initFile}:`, error.message);
}
}
}
return { labels, deprecatedInfo };
}
// Strategy: Requirements detection
async function detectRequirements(allLabels, prFiles, context) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
module.exports = {
detectMergeBranch,
detectComponentPlatforms,
detectNewComponents,
detectNewPlatforms,
detectCoreChanges,
detectPRSize,
detectDashboardChanges,
detectGitHubActionsChanges,
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
};

187
.github/scripts/auto-label-pr/index.js vendored Normal file
View File

@@ -0,0 +1,187 @@
const { MANAGED_LABELS } = require('./constants');
const {
detectMergeBranch,
detectComponentPlatforms,
detectNewComponents,
detectNewPlatforms,
detectCoreChanges,
detectPRSize,
detectDashboardChanges,
detectGitHubActionsChanges,
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
} = require('./detectors');
const { handleReviews } = require('./reviews');
const { applyLabels, removeOldLabels } = require('./labels');
// Fetch API data
async function fetchApiData() {
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
module.exports = async ({ github, context }) => {
// Environment variables
const SMALL_PR_THRESHOLD = parseInt(process.env.SMALL_PR_THRESHOLD);
const MAX_LABELS = parseInt(process.env.MAX_LABELS);
const TOO_BIG_THRESHOLD = parseInt(process.env.TOO_BIG_THRESHOLD);
const COMPONENT_LABEL_THRESHOLD = parseInt(process.env.COMPONENT_LABEL_THRESHOLD);
// Global state
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
// Get current labels and PR data
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: pr_number
});
const currentLabels = currentLabelsData.map(label => label.name);
const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
);
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges);
if (isMegaPR) {
console.log('Mega-PR detected - applying limited labeling logic');
}
// Fetch API data
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for release and beta branches only
if (baseRef === 'release' || baseRef === 'beta') {
const branchLabels = await detectMergeBranch(context);
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Apply labels
await applyLabels(github, context, finalLabels);
// Remove old managed labels
await removeOldLabels(github, context, managedLabels, finalLabels);
return;
}
// Run all strategies
const [
branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels,
checkboxLabels,
deprecatedResult
] = await Promise.all([
detectMergeBranch(context),
detectComponentPlatforms(changedFiles, apiData),
detectNewComponents(prFiles),
detectNewPlatforms(prFiles, apiData),
detectCoreChanges(changedFiles),
detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD),
detectDashboardChanges(changedFiles),
detectGitHubActionsChanges(changedFiles),
detectCodeOwner(github, context, changedFiles),
detectTests(changedFiles),
detectPRTemplateCheckboxes(context),
detectDeprecatedComponents(github, context, changedFiles)
]);
// Extract deprecated component info
const deprecatedLabels = deprecatedResult.labels;
const deprecatedInfo = deprecatedResult.deprecatedInfo;
// Combine all labels
const allLabels = new Set([
...branchLabels,
...componentLabels,
...newComponentLabels,
...newPlatformLabels,
...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels,
...checkboxLabels,
...deprecatedLabels
]);
// Detect requirements based on all other labels
const requirementLabels = await detectRequirements(allLabels, prFiles, context);
for (const label of requirementLabels) {
allLabels.add(label);
}
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
}
}
// Handle too many labels (only for non-mega PRs)
const tooManyLabels = finalLabels.length > MAX_LABELS;
const originalLabelCount = finalLabels.length;
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
}
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
// Apply labels
await applyLabels(github, context, finalLabels);
// Remove old managed labels
await removeOldLabels(github, context, managedLabels, finalLabels);
};

41
.github/scripts/auto-label-pr/labels.js vendored Normal file
View File

@@ -0,0 +1,41 @@
// Apply labels to PR
async function applyLabels(github, context, finalLabels) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
}
// Remove old managed labels
async function removeOldLabels(github, context, managedLabels, finalLabels) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
}
module.exports = {
applyLabels,
removeOldLabels
};

141
.github/scripts/auto-label-pr/reviews.js vendored Normal file
View File

@@ -0,0 +1,141 @@
const {
BOT_COMMENT_MARKER,
CODEOWNERS_MARKER,
TOO_BIG_MARKER,
DEPRECATED_COMPONENT_MARKER
} = require('./constants');
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
const messages = [];
// Deprecated component message
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
message += `Hey there @${prAuthor},\n`;
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
for (const info of deprecatedInfo) {
message += `#### Component: \`${info.component}\`\n`;
message += `${info.message}\n\n`;
}
message += `Consider migrating to the recommended alternative if applicable.`;
messages.push(message);
}
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
const tooManyLabels = originalLabelCount > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${originalLabelCount} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const prAuthor = context.payload.pull_request.user.login;
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
module.exports = {
handleReviews
};

View File

@@ -36,633 +36,5 @@ jobs:
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const fs = require('fs');
// Constants
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
const TOO_BIG_MARKER = '<!-- too-big-request -->';
const MANAGED_LABELS = [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'chained-pr',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck',
'bugfix',
'new-feature',
'breaking-change',
'developer-breaking-change',
'code-quality'
];
const DOCS_PR_PATTERNS = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
// Global state
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
// Get current labels and PR data
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: pr_number
});
const currentLabels = currentLabelsData.map(label => label.name);
const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
);
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges);
if (isMegaPR) {
console.log('Mega-PR detected - applying limited labeling logic');
}
// Fetch API data
async function fetchApiData() {
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
// Strategy: Merge branch detection
async function detectMergeBranch() {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
if (baseRef === 'release') {
labels.add('merging-to-release');
} else if (baseRef === 'beta') {
labels.add('merging-to-beta');
} else if (baseRef !== 'dev') {
labels.add('chained-pr');
}
return labels;
}
// Strategy: Component and platform labeling
async function detectComponentPlatforms(apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents() {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges() {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize() {
const labels = new Set();
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges() {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges() {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner() {
const labels = new Set();
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests() {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: PR Template Checkbox detection
async function detectPRTemplateCheckboxes() {
const labels = new Set();
const prBody = context.payload.pull_request.body || '';
console.log('Checking PR template checkboxes...');
// Check for checked checkboxes in the "Types of changes" section
const checkboxPatterns = [
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
];
for (const { pattern, label } of checkboxPatterns) {
if (pattern.test(prBody)) {
console.log(`Found checked checkbox for: ${label}`);
labels.add(label);
}
}
return labels;
}
// Strategy: Requirements detection
async function detectRequirements(allLabels) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount) {
const messages = [];
const prAuthor = context.payload.pull_request.user.login;
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
const tooManyLabels = originalLabelCount > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${originalLabelCount} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(finalLabels, originalLabelCount) {
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Main execution
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for release and beta branches only
if (baseRef === 'release' || baseRef === 'beta') {
const branchLabels = await detectMergeBranch();
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Apply labels
if (finalLabels.length > 0) {
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
// Remove old managed labels
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
return;
}
// Run all strategies
const [
branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels,
checkboxLabels
] = await Promise.all([
detectMergeBranch(),
detectComponentPlatforms(apiData),
detectNewComponents(),
detectNewPlatforms(apiData),
detectCoreChanges(),
detectPRSize(),
detectDashboardChanges(),
detectGitHubActionsChanges(),
detectCodeOwner(),
detectTests(),
detectPRTemplateCheckboxes()
]);
// Combine all labels
const allLabels = new Set([
...branchLabels,
...componentLabels,
...newComponentLabels,
...newPlatformLabels,
...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels,
...checkboxLabels
]);
// Detect requirements based on all other labels
const requirementLabels = await detectRequirements(allLabels);
for (const label of requirementLabels) {
allLabels.add(label);
}
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
}
}
// Handle too many labels (only for non-mega PRs)
const tooManyLabels = finalLabels.length > MAX_LABELS;
const originalLabelCount = finalLabels.length;
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
}
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(finalLabels, originalLabelCount);
// Apply labels
if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
// Remove old managed labels
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
const script = require('./.github/scripts/auto-label-pr/index.js');
await script({ github, context });

View File

@@ -47,7 +47,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: venv
# yamllint disable-line rule:line-length
@@ -157,7 +157,7 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Save Python virtual environment cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: venv
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -193,7 +193,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Restore components graph cache
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: .temp/components_graph.json
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
@@ -223,7 +223,7 @@ jobs:
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
- name: Save components graph cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: .temp/components_graph.json
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
@@ -245,7 +245,7 @@ jobs:
python-version: "3.13"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: venv
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -334,14 +334,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
@@ -413,14 +413,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
@@ -502,14 +502,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
@@ -735,7 +735,7 @@ jobs:
- name: Restore cached memory analysis
id: cache-memory-analysis
if: steps.check-script.outputs.skip != 'true'
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
@@ -759,7 +759,7 @@ jobs:
- name: Cache platformio
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
@@ -800,7 +800,7 @@ jobs:
- name: Save memory analysis to cache
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
@@ -847,7 +847,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}

View File

@@ -58,7 +58,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@19b2f06db2b6f5108140aeb04014ef02b648f789 # v4.31.11
uses: github/codeql-action/init@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
@@ -86,6 +86,6 @@ jobs:
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@19b2f06db2b6f5108140aeb04014ef02b648f789 # v4.31.11
uses: github/codeql-action/analyze@b20883b0cd1f46c72ae0ba6d1090936928f9fa30 # v4.32.0
with:
category: "/language:${{matrix.language}}"

View File

@@ -102,12 +102,12 @@ jobs:
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Log in to docker hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -182,13 +182,13 @@ jobs:
- name: Log in to docker hub
if: matrix.registry == 'dockerhub'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
if: matrix.registry == 'ghcr'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@@ -104,6 +104,7 @@ esphome/components/cc1101/* @gabest11 @lygris
esphome/components/ccs811/* @habbie
esphome/components/cd74hc4067/* @asoehlke
esphome/components/ch422g/* @clydebarrow @jesterret
esphome/components/ch423/* @dwmw2
esphome/components/chsc6x/* @kkosik20
esphome/components/climate/* @esphome/core
esphome/components/climate_ir/* @glmnet

View File

@@ -2,7 +2,7 @@ import logging
import esphome.codegen as cg
from esphome.components import sensor, voltage_sampler
from esphome.components.esp32 import get_esp32_variant
from esphome.components.esp32 import get_esp32_variant, include_builtin_idf_component
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
from esphome.components.zephyr import (
zephyr_add_overlay,
@@ -118,6 +118,9 @@ async def to_code(config):
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
if CORE.is_esp32:
# Re-enable ESP-IDF's ADC driver (excluded by default to save compile time)
include_builtin_idf_component("esp_adc")
if attenuation := config.get(CONF_ATTENUATION):
if attenuation == "auto":
cg.add(var.set_autorange(cg.global_ns.true))

View File

@@ -264,9 +264,9 @@ template<typename... Ts> class APIRespondAction : public Action<Ts...> {
// Build and send JSON response
json::JsonBuilder builder;
this->json_builder_(x..., builder.root());
std::string json_str = builder.serialize();
auto json_buf = builder.serialize();
this->parent_->send_action_response(call_id, success, StringRef(error_message),
reinterpret_cast<const uint8_t *>(json_str.data()), json_str.size());
reinterpret_cast<const uint8_t *>(json_buf.data()), json_buf.size());
return;
}
#endif

View File

@@ -38,8 +38,10 @@ async def to_code(config):
# https://github.com/ESP32Async/ESPAsyncTCP
cg.add_library("ESP32Async/ESPAsyncTCP", "2.0.0")
elif CORE.is_rp2040:
# https://github.com/khoih-prog/AsyncTCP_RP2040W
cg.add_library("khoih-prog/AsyncTCP_RP2040W", "1.2.0")
# https://github.com/ayushsharma82/RPAsyncTCP
# RPAsyncTCP is a drop-in replacement for AsyncTCP_RP2040W with better
# ESPAsyncWebServer compatibility
cg.add_library("ayushsharma82/RPAsyncTCP", "1.3.2")
# Other platforms (host, etc) use socket-based implementation

View File

@@ -8,8 +8,8 @@
// Use ESPAsyncTCP library for ESP8266 (always Arduino)
#include <ESPAsyncTCP.h>
#elif defined(USE_RP2040)
// Use AsyncTCP_RP2040W library for RP2040
#include <AsyncTCP_RP2040W.h>
// Use RPAsyncTCP library for RP2040
#include <RPAsyncTCP.h>
#else
// Use socket-based implementation for other platforms
#include "async_tcp_socket.h"

View File

@@ -1,5 +1,5 @@
import esphome.codegen as cg
from esphome.components.esp32 import add_idf_component
from esphome.components.esp32 import add_idf_component, include_builtin_idf_component
import esphome.config_validation as cv
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
import esphome.final_validate as fv
@@ -166,6 +166,9 @@ def final_validate_audio_schema(
async def to_code(config):
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
include_builtin_idf_component("esp_http_client")
add_idf_component(
name="esphome/esp-audio-libs",
ref="2.0.3",

View File

@@ -14,10 +14,7 @@ void log_binary_sensor(const char *tag, const char *prefix, const char *type, Bi
}
ESP_LOGCONFIG(tag, "%s%s '%s'", prefix, type, obj->get_name().c_str());
if (!obj->get_device_class_ref().empty()) {
ESP_LOGCONFIG(tag, "%s Device Class: '%s'", prefix, obj->get_device_class_ref().c_str());
}
LOG_ENTITY_DEVICE_CLASS(tag, prefix, *obj);
}
void BinarySensor::publish_state(bool new_state) {

View File

@@ -9,7 +9,7 @@ static const char *const TAG = "bl0940.number";
void CalibrationNumber::setup() {
float value = 0.0f;
if (this->restore_value_) {
this->pref_ = global_preferences->make_preference<float>(this->get_preference_hash());
this->pref_ = this->make_entity_preference<float>();
if (!this->pref_.load(&value)) {
value = 0.0f;
}

View File

@@ -12,10 +12,7 @@ void log_button(const char *tag, const char *prefix, const char *type, Button *o
}
ESP_LOGCONFIG(tag, "%s%s '%s'", prefix, type, obj->get_name().c_str());
if (!obj->get_icon_ref().empty()) {
ESP_LOGCONFIG(tag, "%s Icon: '%s'", prefix, obj->get_icon_ref().c_str());
}
LOG_ENTITY_ICON(tag, prefix, *obj);
}
void Button::press() {

View File

@@ -96,10 +96,16 @@ void CaptivePortal::start() {
}
void CaptivePortal::handleRequest(AsyncWebServerRequest *req) {
if (req->url() == ESPHOME_F("/config.json")) {
#ifdef USE_ESP32
char url_buf[AsyncWebServerRequest::URL_BUF_SIZE];
StringRef url = req->url_to(url_buf);
#else
const auto &url = req->url();
#endif
if (url == ESPHOME_F("/config.json")) {
this->handle_config(req);
return;
} else if (req->url() == ESPHOME_F("/wifisave")) {
} else if (url == ESPHOME_F("/wifisave")) {
this->handle_wifisave(req);
return;
}

View File

@@ -0,0 +1,103 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components import i2c
from esphome.components.i2c import I2CBus
import esphome.config_validation as cv
from esphome.const import (
CONF_I2C_ID,
CONF_ID,
CONF_INPUT,
CONF_INVERTED,
CONF_MODE,
CONF_NUMBER,
CONF_OPEN_DRAIN,
CONF_OUTPUT,
)
from esphome.core import CORE
CODEOWNERS = ["@dwmw2"]
DEPENDENCIES = ["i2c"]
MULTI_CONF = True
ch423_ns = cg.esphome_ns.namespace("ch423")
CH423Component = ch423_ns.class_("CH423Component", cg.Component, i2c.I2CDevice)
CH423GPIOPin = ch423_ns.class_(
"CH423GPIOPin", cg.GPIOPin, cg.Parented.template(CH423Component)
)
CONF_CH423 = "ch423"
# Note that no address is configurable - each register in the CH423 has a dedicated i2c address
CONFIG_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_ID): cv.declare_id(CH423Component),
cv.GenerateID(CONF_I2C_ID): cv.use_id(I2CBus),
}
).extend(cv.COMPONENT_SCHEMA)
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
# Can't use register_i2c_device because there is no CONF_ADDRESS
parent = await cg.get_variable(config[CONF_I2C_ID])
cg.add(var.set_i2c_bus(parent))
# This is used as a final validation step so that modes have been fully transformed.
def pin_mode_check(pin_config, _):
if pin_config[CONF_MODE][CONF_INPUT] and pin_config[CONF_NUMBER] >= 8:
raise cv.Invalid("CH423 only supports input on pins 0-7")
if pin_config[CONF_MODE][CONF_OPEN_DRAIN] and pin_config[CONF_NUMBER] < 8:
raise cv.Invalid("CH423 only supports open drain output on pins 8-23")
ch423_id = pin_config[CONF_CH423]
pin_num = pin_config[CONF_NUMBER]
is_output = pin_config[CONF_MODE][CONF_OUTPUT]
is_open_drain = pin_config[CONF_MODE][CONF_OPEN_DRAIN]
# Track pin modes per CH423 instance in CORE.data
ch423_modes = CORE.data.setdefault(CONF_CH423, {})
if ch423_id not in ch423_modes:
ch423_modes[ch423_id] = {"gpio_output": None, "gpo_open_drain": None}
if pin_num < 8:
# GPIO pins (0-7): all must have same direction
if ch423_modes[ch423_id]["gpio_output"] is None:
ch423_modes[ch423_id]["gpio_output"] = is_output
elif ch423_modes[ch423_id]["gpio_output"] != is_output:
raise cv.Invalid(
"CH423 GPIO pins (0-7) must all be configured as input or all as output"
)
# GPO pins (8-23): all must have same open-drain setting
elif ch423_modes[ch423_id]["gpo_open_drain"] is None:
ch423_modes[ch423_id]["gpo_open_drain"] = is_open_drain
elif ch423_modes[ch423_id]["gpo_open_drain"] != is_open_drain:
raise cv.Invalid(
"CH423 GPO pins (8-23) must all be configured as push-pull or all as open-drain"
)
CH423_PIN_SCHEMA = pins.gpio_base_schema(
CH423GPIOPin,
cv.int_range(min=0, max=23),
modes=[CONF_INPUT, CONF_OUTPUT, CONF_OPEN_DRAIN],
).extend(
{
cv.Required(CONF_CH423): cv.use_id(CH423Component),
}
)
@pins.PIN_SCHEMA_REGISTRY.register(CONF_CH423, CH423_PIN_SCHEMA, pin_mode_check)
async def ch423_pin_to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
parent = await cg.get_variable(config[CONF_CH423])
cg.add(var.set_parent(parent))
num = config[CONF_NUMBER]
cg.add(var.set_pin(num))
cg.add(var.set_inverted(config[CONF_INVERTED]))
cg.add(var.set_flags(pins.gpio_flags_expr(config[CONF_MODE])))
return var

View File

@@ -0,0 +1,148 @@
#include "ch423.h"
#include "esphome/core/log.h"
#include "esphome/core/progmem.h"
namespace esphome::ch423 {
static constexpr uint8_t CH423_REG_SYS = 0x24; // Set system parameters (0x48 >> 1)
static constexpr uint8_t CH423_SYS_IO_OE = 0x01; // IO output enable
static constexpr uint8_t CH423_SYS_OD_EN = 0x04; // Open drain enable for OC pins
static constexpr uint8_t CH423_REG_IO = 0x30; // Write/read IO7-IO0 (0x60 >> 1)
static constexpr uint8_t CH423_REG_IO_RD = 0x26; // Read IO7-IO0 (0x4D >> 1, rounded down)
static constexpr uint8_t CH423_REG_OCL = 0x22; // Write OC7-OC0 (0x44 >> 1)
static constexpr uint8_t CH423_REG_OCH = 0x23; // Write OC15-OC8 (0x46 >> 1)
static const char *const TAG = "ch423";
void CH423Component::setup() {
// set outputs before mode
this->write_outputs_();
// Set system parameters and check for errors
bool success = this->write_reg_(CH423_REG_SYS, this->sys_params_);
// Only read inputs if pins are configured for input (IO_OE not set)
if (success && !(this->sys_params_ & CH423_SYS_IO_OE)) {
success = this->read_inputs_();
}
if (!success) {
ESP_LOGE(TAG, "CH423 not detected");
this->mark_failed();
return;
}
ESP_LOGCONFIG(TAG, "Initialization complete. Warning: %d, Error: %d", this->status_has_warning(),
this->status_has_error());
}
void CH423Component::loop() {
// Clear all the previously read flags.
this->pin_read_flags_ = 0x00;
}
void CH423Component::dump_config() {
ESP_LOGCONFIG(TAG, "CH423:");
if (this->is_failed()) {
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
}
}
void CH423Component::pin_mode(uint8_t pin, gpio::Flags flags) {
if (pin < 8) {
if (flags & gpio::FLAG_OUTPUT) {
this->sys_params_ |= CH423_SYS_IO_OE;
}
} else if (pin >= 8 && pin < 24) {
if (flags & gpio::FLAG_OPEN_DRAIN) {
this->sys_params_ |= CH423_SYS_OD_EN;
}
}
}
bool CH423Component::digital_read(uint8_t pin) {
if (this->pin_read_flags_ == 0 || this->pin_read_flags_ & (1 << pin)) {
// Read values on first access or in case it's being read again in the same loop
this->read_inputs_();
}
this->pin_read_flags_ |= (1 << pin);
return (this->input_bits_ & (1 << pin)) != 0;
}
void CH423Component::digital_write(uint8_t pin, bool value) {
if (value) {
this->output_bits_ |= (1 << pin);
} else {
this->output_bits_ &= ~(1 << pin);
}
this->write_outputs_();
}
bool CH423Component::read_inputs_() {
if (this->is_failed()) {
return false;
}
// reading inputs requires IO_OE to be 0
if (this->sys_params_ & CH423_SYS_IO_OE) {
return false;
}
uint8_t result = this->read_reg_(CH423_REG_IO_RD);
this->input_bits_ = result;
this->status_clear_warning();
return true;
}
// Write a register. Can't use the standard write_byte() method because there is no single pre-configured i2c address.
bool CH423Component::write_reg_(uint8_t reg, uint8_t value) {
auto err = this->bus_->write_readv(reg, &value, 1, nullptr, 0);
if (err != i2c::ERROR_OK) {
char buf[64];
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("write failed for register 0x%X, error %d"), reg, err);
this->status_set_warning(buf);
return false;
}
this->status_clear_warning();
return true;
}
uint8_t CH423Component::read_reg_(uint8_t reg) {
uint8_t value;
auto err = this->bus_->write_readv(reg, nullptr, 0, &value, 1);
if (err != i2c::ERROR_OK) {
char buf[64];
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("read failed for register 0x%X, error %d"), reg, err);
this->status_set_warning(buf);
return 0;
}
this->status_clear_warning();
return value;
}
bool CH423Component::write_outputs_() {
bool success = true;
// Write IO7-IO0
success &= this->write_reg_(CH423_REG_IO, static_cast<uint8_t>(this->output_bits_));
// Write OC7-OC0
success &= this->write_reg_(CH423_REG_OCL, static_cast<uint8_t>(this->output_bits_ >> 8));
// Write OC15-OC8
success &= this->write_reg_(CH423_REG_OCH, static_cast<uint8_t>(this->output_bits_ >> 16));
return success;
}
float CH423Component::get_setup_priority() const { return setup_priority::IO; }
// Run our loop() method very early in the loop, so that we cache read values
// before other components call our digital_read() method.
float CH423Component::get_loop_priority() const { return 9.0f; } // Just after WIFI
void CH423GPIOPin::pin_mode(gpio::Flags flags) { this->parent_->pin_mode(this->pin_, flags); }
bool CH423GPIOPin::digital_read() { return this->parent_->digital_read(this->pin_) ^ this->inverted_; }
void CH423GPIOPin::digital_write(bool value) { this->parent_->digital_write(this->pin_, value ^ this->inverted_); }
size_t CH423GPIOPin::dump_summary(char *buffer, size_t len) const {
return snprintf(buffer, len, "EXIO%u via CH423", this->pin_);
}
void CH423GPIOPin::set_flags(gpio::Flags flags) {
flags_ = flags;
this->parent_->pin_mode(this->pin_, flags);
}
} // namespace esphome::ch423

View File

@@ -0,0 +1,67 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/hal.h"
#include "esphome/components/i2c/i2c.h"
namespace esphome::ch423 {
class CH423Component : public Component, public i2c::I2CDevice {
public:
CH423Component() = default;
/// Check i2c availability and setup masks
void setup() override;
/// Poll for input changes periodically
void loop() override;
/// Helper function to read the value of a pin.
bool digital_read(uint8_t pin);
/// Helper function to write the value of a pin.
void digital_write(uint8_t pin, bool value);
/// Helper function to set the pin mode of a pin.
void pin_mode(uint8_t pin, gpio::Flags flags);
float get_setup_priority() const override;
float get_loop_priority() const override;
void dump_config() override;
protected:
bool write_reg_(uint8_t reg, uint8_t value);
uint8_t read_reg_(uint8_t reg);
bool read_inputs_();
bool write_outputs_();
/// The mask to write as output state - 1 means HIGH, 0 means LOW
uint32_t output_bits_{0x00};
/// Flags to check if read previously during this loop
uint8_t pin_read_flags_{0x00};
/// Copy of last read values
uint8_t input_bits_{0x00};
/// System parameters
uint8_t sys_params_{0x00};
};
/// Helper class to expose a CH423 pin as a GPIO pin.
class CH423GPIOPin : public GPIOPin {
public:
void setup() override{};
void pin_mode(gpio::Flags flags) override;
bool digital_read() override;
void digital_write(bool value) override;
size_t dump_summary(char *buffer, size_t len) const override;
void set_parent(CH423Component *parent) { parent_ = parent; }
void set_pin(uint8_t pin) { pin_ = pin; }
void set_inverted(bool inverted) { inverted_ = inverted; }
void set_flags(gpio::Flags flags);
gpio::Flags get_flags() const override { return this->flags_; }
protected:
CH423Component *parent_{};
uint8_t pin_{};
bool inverted_{};
gpio::Flags flags_{};
};
} // namespace esphome::ch423

View File

@@ -360,8 +360,7 @@ void Climate::add_on_control_callback(std::function<void(ClimateCall &)> &&callb
static const uint32_t RESTORE_STATE_VERSION = 0x848EA6ADUL;
optional<ClimateDeviceRestoreState> Climate::restore_state_() {
this->rtc_ = global_preferences->make_preference<ClimateDeviceRestoreState>(this->get_preference_hash() ^
RESTORE_STATE_VERSION);
this->rtc_ = this->make_entity_preference<ClimateDeviceRestoreState>(RESTORE_STATE_VERSION);
ClimateDeviceRestoreState recovered{};
if (!this->rtc_.load(&recovered))
return {};

View File

@@ -1,11 +1,11 @@
#include "cover.h"
#include "esphome/core/defines.h"
#include "esphome/core/controller_registry.h"
#include "esphome/core/log.h"
#include "esphome/core/progmem.h"
#include <strings.h>
#include "esphome/core/log.h"
namespace esphome::cover {
static const char *const TAG = "cover";
@@ -39,13 +39,13 @@ Cover::Cover() : position{COVER_OPEN} {}
CoverCall::CoverCall(Cover *parent) : parent_(parent) {}
CoverCall &CoverCall::set_command(const char *command) {
if (strcasecmp(command, "OPEN") == 0) {
if (ESPHOME_strcasecmp_P(command, ESPHOME_PSTR("OPEN")) == 0) {
this->set_command_open();
} else if (strcasecmp(command, "CLOSE") == 0) {
} else if (ESPHOME_strcasecmp_P(command, ESPHOME_PSTR("CLOSE")) == 0) {
this->set_command_close();
} else if (strcasecmp(command, "STOP") == 0) {
} else if (ESPHOME_strcasecmp_P(command, ESPHOME_PSTR("STOP")) == 0) {
this->set_command_stop();
} else if (strcasecmp(command, "TOGGLE") == 0) {
} else if (ESPHOME_strcasecmp_P(command, ESPHOME_PSTR("TOGGLE")) == 0) {
this->set_command_toggle();
} else {
ESP_LOGW(TAG, "'%s' - Unrecognized command %s", this->parent_->get_name().c_str(), command);
@@ -187,7 +187,7 @@ void Cover::publish_state(bool save) {
}
}
optional<CoverRestoreState> Cover::restore_state_() {
this->rtc_ = global_preferences->make_preference<CoverRestoreState>(this->get_preference_hash());
this->rtc_ = this->make_entity_preference<CoverRestoreState>();
CoverRestoreState recovered{};
if (!this->rtc_.load(&recovered))
return {};

View File

@@ -20,9 +20,7 @@ const extern float COVER_CLOSED;
if (traits_.get_is_assumed_state()) { \
ESP_LOGCONFIG(TAG, "%s Assumed State: YES", prefix); \
} \
if (!(obj)->get_device_class_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Device Class: '%s'", prefix, (obj)->get_device_class_ref().c_str()); \
} \
LOG_ENTITY_DEVICE_CLASS(TAG, prefix, *(obj)); \
}
class Cover;

View File

@@ -15,9 +15,7 @@ namespace esphome::datetime {
#define LOG_DATETIME_DATE(prefix, type, obj) \
if ((obj) != nullptr) { \
ESP_LOGCONFIG(TAG, "%s%s '%s'", prefix, LOG_STR_LITERAL(type), (obj)->get_name().c_str()); \
if (!(obj)->get_icon_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Icon: '%s'", prefix, (obj)->get_icon_ref().c_str()); \
} \
LOG_ENTITY_ICON(TAG, prefix, *(obj)); \
}
class DateCall;

View File

@@ -15,9 +15,7 @@ namespace esphome::datetime {
#define LOG_DATETIME_DATETIME(prefix, type, obj) \
if ((obj) != nullptr) { \
ESP_LOGCONFIG(TAG, "%s%s '%s'", prefix, LOG_STR_LITERAL(type), (obj)->get_name().c_str()); \
if (!(obj)->get_icon_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Icon: '%s'", prefix, (obj)->get_icon_ref().c_str()); \
} \
LOG_ENTITY_ICON(TAG, prefix, *(obj)); \
}
class DateTimeCall;

View File

@@ -15,9 +15,7 @@ namespace esphome::datetime {
#define LOG_DATETIME_TIME(prefix, type, obj) \
if ((obj) != nullptr) { \
ESP_LOGCONFIG(TAG, "%s%s '%s'", prefix, LOG_STR_LITERAL(type), (obj)->get_name().c_str()); \
if (!(obj)->get_icon_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Icon: '%s'", prefix, (obj)->get_icon_ref().c_str()); \
} \
LOG_ENTITY_ICON(TAG, prefix, *(obj)); \
}
class TimeCall;

View File

@@ -15,7 +15,7 @@ from esphome.const import (
CONF_UPDATE_INTERVAL,
SCHEDULER_DONT_RUN,
)
from esphome.core import CoroPriority, coroutine_with_priority
from esphome.core import CORE, CoroPriority, coroutine_with_priority
IS_PLATFORM_COMPONENT = True
@@ -222,3 +222,8 @@ async def display_is_displaying_page_to_code(config, condition_id, template_arg,
async def to_code(config):
cg.add_global(display_ns.using)
cg.add_define("USE_DISPLAY")
if CORE.is_esp32:
# Re-enable ESP-IDF's LCD driver (excluded by default to save compile time)
from esphome.components.esp32 import include_builtin_idf_component
include_builtin_idf_component("esp_lcd")

View File

@@ -41,7 +41,7 @@ void DutyTimeSensor::setup() {
uint32_t seconds = 0;
if (this->restore_) {
this->pref_ = global_preferences->make_preference<uint32_t>(this->get_preference_hash());
this->pref_ = this->make_entity_preference<uint32_t>();
this->pref_.load(&seconds);
}

View File

@@ -2,7 +2,8 @@ import esphome.codegen as cg
from esphome.components import i2c
from esphome.components.audio_dac import AudioDac
import esphome.config_validation as cv
from esphome.const import CONF_ID
from esphome.const import CONF_AUDIO_DAC, CONF_BITS_PER_SAMPLE, CONF_ID
import esphome.final_validate as fv
CODEOWNERS = ["@kbx81"]
DEPENDENCIES = ["i2c"]
@@ -21,6 +22,29 @@ CONFIG_SCHEMA = (
)
def _final_validate(config):
full_config = fv.full_config.get()
# Check all speaker configurations for ones that reference this es8156
speaker_configs = full_config.get("speaker", [])
for speaker_config in speaker_configs:
audio_dac_id = speaker_config.get(CONF_AUDIO_DAC)
if (
audio_dac_id is not None
and audio_dac_id == config[CONF_ID]
and (bits_per_sample := speaker_config.get(CONF_BITS_PER_SAMPLE))
is not None
and bits_per_sample > 24
):
raise cv.Invalid(
f"ES8156 does not support more than 24 bits per sample. "
f"The speaker referencing this audio_dac has bits_per_sample set to {bits_per_sample}."
)
FINAL_VALIDATE_SCHEMA = _final_validate
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)

View File

@@ -17,24 +17,61 @@ static const char *const TAG = "es8156";
}
void ES8156::setup() {
// REG02 MODE CONFIG 1: Enable software mode for I2C control of volume/mute
// Bit 2: SOFT_MODE_SEL=1 (software mode enabled)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG02_SCLK_MODE, 0x04));
// Analog system configuration (active-low power down bits, active-high enables)
// REG20 ANALOG SYSTEM: Configure analog signal path
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG20_ANALOG_SYS1, 0x2A));
// REG21 ANALOG SYSTEM: VSEL=0x1C (bias level ~120%), normal VREF ramp speed
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG21_ANALOG_SYS2, 0x3C));
// REG22 ANALOG SYSTEM: Line out mode (HPSW=0), OUT_MUTE=0 (not muted)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG22_ANALOG_SYS3, 0x00));
// REG24 ANALOG SYSTEM: Low power mode for VREFBUF, HPCOM, DACVRP; DAC normal power
// Bits 2:0 = 0x07: LPVREFBUF=1, LPHPCOM=1, LPDACVRP=1, LPDAC=0
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG24_ANALOG_LP, 0x07));
// REG23 ANALOG SYSTEM: Lowest bias (IBIAS_SW=0), VMIDLVL=VDDA/2, normal impedance
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG23_ANALOG_SYS4, 0x00));
// Timing and interface configuration
// REG0A/0B TIME CONTROL: Fast state machine transitions
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0A_TIME_CONTROL1, 0x01));
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0B_TIME_CONTROL2, 0x01));
// REG11 SDP INTERFACE CONFIG: Default I2S format (24-bit, I2S mode)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG11_DAC_SDP, 0x00));
// REG19 EQ CONTROL 1: EQ disabled (EQ_ON=0), EQ_BAND_NUM=2
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG19_EQ_CONTROL1, 0x20));
// REG0D P2S CONTROL: Parallel-to-serial converter settings
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0D_P2S_CONTROL, 0x14));
// REG09 MISC CONTROL 2: Default settings
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG09_MISC_CONTROL2, 0x00));
// REG18 MISC CONTROL 3: Stereo channel routing, no inversion
// Bits 5:4 CHN_CROSS: 0=L→L/R→R, 1=L to both, 2=R to both, 3=swap L/R
// Bits 3:2: LCH_INV/RCH_INV channel inversion
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG18_MISC_CONTROL3, 0x00));
// REG08 CLOCK OFF: Enable all internal clocks (0x3F = all clock gates open)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG08_CLOCK_ON_OFF, 0x3F));
// REG00 RESET CONTROL: Reset sequence
// First: RST_DIG=1 (assert digital reset)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x02));
// Then: CSM_ON=1 (enable chip state machine), RST_DIG=1
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x03));
// REG25 ANALOG SYSTEM: Power up analog blocks
// VMIDSEL=2 (normal VMID operation), PDN_ANA=0, ENREFR=0, ENHPCOM=0
// PDN_DACVREFGEN=0, PDN_VREFBUF=0, PDN_DAC=0 (all enabled)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG25_ANALOG_SYS5, 0x20));
}

View File

@@ -53,8 +53,10 @@ from .const import ( # noqa
KEY_BOARD,
KEY_COMPONENTS,
KEY_ESP32,
KEY_EXCLUDE_COMPONENTS,
KEY_EXTRA_BUILD_FILES,
KEY_FLASH_SIZE,
KEY_FULL_CERT_BUNDLE,
KEY_PATH,
KEY_REF,
KEY_REPO,
@@ -85,6 +87,7 @@ IS_TARGET_PLATFORM = True
CONF_ASSERTION_LEVEL = "assertion_level"
CONF_COMPILER_OPTIMIZATION = "compiler_optimization"
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES = "enable_idf_experimental_features"
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS = "include_builtin_idf_components"
CONF_ENABLE_LWIP_ASSERT = "enable_lwip_assert"
CONF_ENABLE_OTA_ROLLBACK = "enable_ota_rollback"
CONF_EXECUTE_FROM_PSRAM = "execute_from_psram"
@@ -113,6 +116,36 @@ COMPILER_OPTIMIZATIONS = {
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
}
# ESP-IDF components excluded by default to reduce compile time.
# Components can be re-enabled by calling include_builtin_idf_component() in to_code().
#
# Cannot be excluded (dependencies of required components):
# - "console": espressif/mdns unconditionally depends on it
# - "sdmmc": driver -> esp_driver_sdmmc -> sdmmc dependency chain
DEFAULT_EXCLUDED_IDF_COMPONENTS = (
"cmock", # Unit testing mock framework - ESPHome doesn't use IDF's testing
"esp_adc", # ADC driver - only needed by adc component
"esp_driver_i2s", # I2S driver - only needed by i2s_audio component
"esp_driver_rmt", # RMT driver - only needed by remote_transmitter/receiver, neopixelbus
"esp_driver_touch_sens", # Touch sensor driver - only needed by esp32_touch
"esp_eth", # Ethernet driver - only needed by ethernet component
"esp_hid", # HID host/device support - ESPHome doesn't implement HID functionality
"esp_http_client", # HTTP client - only needed by http_request component
"esp_https_ota", # ESP-IDF HTTPS OTA - ESPHome has its own OTA implementation
"esp_https_server", # HTTPS server - ESPHome has its own web server
"esp_lcd", # LCD controller drivers - only needed by display component
"esp_local_ctrl", # Local control over HTTPS/BLE - ESPHome has native API
"espcoredump", # Core dump support - ESPHome has its own debug component
"fatfs", # FAT filesystem - ESPHome doesn't use filesystem storage
"mqtt", # ESP-IDF MQTT library - ESPHome has its own MQTT implementation
"perfmon", # Xtensa performance monitor - ESPHome has its own debug component
"protocomm", # Protocol communication for provisioning - unused by ESPHome
"spiffs", # SPIFFS filesystem - ESPHome doesn't use filesystem storage (IDF only)
"unity", # Unit testing framework - ESPHome doesn't use IDF's testing
"wear_levelling", # Flash wear levelling for fatfs - unused since fatfs unused
"wifi_provisioning", # WiFi provisioning - ESPHome uses its own improv implementation
)
# ESP32 (original) chip revision options
# Setting minimum revision to 3.0 or higher:
# - Reduces flash size by excluding workaround code for older chip bugs
@@ -202,6 +235,9 @@ def set_core_data(config):
)
CORE.data[KEY_ESP32][KEY_SDKCONFIG_OPTIONS] = {}
CORE.data[KEY_ESP32][KEY_COMPONENTS] = {}
# Initialize with default exclusions - components can call include_builtin_idf_component()
# to re-enable any they need
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS] = set(DEFAULT_EXCLUDED_IDF_COMPONENTS)
CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] = cv.Version.parse(
config[CONF_FRAMEWORK][CONF_VERSION]
)
@@ -327,6 +363,28 @@ def add_idf_component(
}
def exclude_builtin_idf_component(name: str) -> None:
"""Exclude an ESP-IDF component from the build.
This reduces compile time by skipping components that are not needed.
The component will be passed to ESP-IDF's EXCLUDE_COMPONENTS cmake variable.
Note: Components that are dependencies of other required components
cannot be excluded - ESP-IDF will still build them.
"""
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].add(name)
def include_builtin_idf_component(name: str) -> None:
"""Remove an ESP-IDF component from the exclusion list.
Call this from components that need an ESP-IDF component that is
excluded by default in DEFAULT_EXCLUDED_IDF_COMPONENTS. This ensures the
component will be built when needed.
"""
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].discard(name)
def add_extra_script(stage: str, filename: str, path: Path):
"""Add an extra script to the project."""
key = f"{stage}:{filename}"
@@ -670,11 +728,26 @@ CONF_FREERTOS_IN_IRAM = "freertos_in_iram"
CONF_RINGBUF_IN_IRAM = "ringbuf_in_iram"
CONF_HEAP_IN_IRAM = "heap_in_iram"
CONF_LOOP_TASK_STACK_SIZE = "loop_task_stack_size"
CONF_USE_FULL_CERTIFICATE_BUNDLE = "use_full_certificate_bundle"
CONF_DISABLE_DEBUG_STUBS = "disable_debug_stubs"
CONF_DISABLE_OCD_AWARE = "disable_ocd_aware"
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY = "disable_usb_serial_jtag_secondary"
CONF_DISABLE_DEV_NULL_VFS = "disable_dev_null_vfs"
CONF_DISABLE_MBEDTLS_PEER_CERT = "disable_mbedtls_peer_cert"
CONF_DISABLE_MBEDTLS_PKCS7 = "disable_mbedtls_pkcs7"
CONF_DISABLE_REGI2C_IN_IRAM = "disable_regi2c_in_iram"
CONF_DISABLE_FATFS = "disable_fatfs"
# VFS requirement tracking
# Components that need VFS features can call require_vfs_select() or require_vfs_dir()
KEY_VFS_SELECT_REQUIRED = "vfs_select_required"
KEY_VFS_DIR_REQUIRED = "vfs_dir_required"
# Feature requirement tracking - components can call require_* functions to re-enable
# These are stored in CORE.data[KEY_ESP32] dict
KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED = "usb_serial_jtag_secondary_required"
KEY_MBEDTLS_PEER_CERT_REQUIRED = "mbedtls_peer_cert_required"
KEY_MBEDTLS_PKCS7_REQUIRED = "mbedtls_pkcs7_required"
KEY_FATFS_REQUIRED = "fatfs_required"
def require_vfs_select() -> None:
@@ -695,6 +768,55 @@ def require_vfs_dir() -> None:
CORE.data[KEY_VFS_DIR_REQUIRED] = True
def require_full_certificate_bundle() -> None:
"""Request the full certificate bundle instead of the common-CAs-only bundle.
By default, ESPHome uses CONFIG_MBEDTLS_CERTIFICATE_BUNDLE_DEFAULT_CMN which
includes only CAs with >1% market share (~51 KB smaller than full bundle).
This covers ~99% of websites including Let's Encrypt, DigiCert, Google, Amazon.
Call this from components that need to connect to services using uncommon CAs.
"""
CORE.data[KEY_ESP32][KEY_FULL_CERT_BUNDLE] = True
def require_usb_serial_jtag_secondary() -> None:
"""Mark that USB Serial/JTAG secondary console is required by a component.
Call this from components (e.g., logger) that need USB Serial/JTAG console output.
This prevents CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG from being disabled.
"""
CORE.data[KEY_ESP32][KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED] = True
def require_mbedtls_peer_cert() -> None:
"""Mark that mbedTLS peer certificate retention is required by a component.
Call this from components that need access to the peer certificate after
the TLS handshake is complete. This prevents CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE
from being disabled.
"""
CORE.data[KEY_ESP32][KEY_MBEDTLS_PEER_CERT_REQUIRED] = True
def require_mbedtls_pkcs7() -> None:
"""Mark that mbedTLS PKCS#7 support is required by a component.
Call this from components that need PKCS#7 certificate validation.
This prevents CONFIG_MBEDTLS_PKCS7_C from being disabled.
"""
CORE.data[KEY_ESP32][KEY_MBEDTLS_PKCS7_REQUIRED] = True
def require_fatfs() -> None:
"""Mark that FATFS support is required by a component.
Call this from components that use FATFS (e.g., SD card, storage components).
This prevents FATFS from being disabled when disable_fatfs is set.
"""
CORE.data[KEY_ESP32][KEY_FATFS_REQUIRED] = True
def _parse_idf_component(value: str) -> ConfigType:
"""Parse IDF component shorthand syntax like 'owner/component^version'"""
# Match operator followed by version-like string (digit or *)
@@ -776,6 +898,22 @@ FRAMEWORK_SCHEMA = cv.Schema(
min=8192, max=32768
),
cv.Optional(CONF_ENABLE_OTA_ROLLBACK, default=True): cv.boolean,
cv.Optional(
CONF_USE_FULL_CERTIFICATE_BUNDLE, default=False
): cv.boolean,
cv.Optional(
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, default=[]
): cv.ensure_list(cv.string_strict),
cv.Optional(CONF_DISABLE_DEBUG_STUBS, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_OCD_AWARE, default=True): cv.boolean,
cv.Optional(
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY, default=True
): cv.boolean,
cv.Optional(CONF_DISABLE_DEV_NULL_VFS, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_MBEDTLS_PEER_CERT, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_MBEDTLS_PKCS7, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_REGI2C_IN_IRAM, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_FATFS, default=True): cv.boolean,
}
),
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
@@ -965,6 +1103,19 @@ def _configure_lwip_max_sockets(conf: dict) -> None:
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
@coroutine_with_priority(CoroPriority.FINAL)
async def _write_exclude_components() -> None:
"""Write EXCLUDE_COMPONENTS cmake arg after all components have registered exclusions."""
if KEY_ESP32 not in CORE.data:
return
excluded = CORE.data[KEY_ESP32].get(KEY_EXCLUDE_COMPONENTS)
if excluded:
exclude_list = ";".join(sorted(excluded))
cg.add_platformio_option(
"board_build.cmake_extra_args", f"-DEXCLUDE_COMPONENTS={exclude_list}"
)
@coroutine_with_priority(CoroPriority.FINAL)
async def _add_yaml_idf_components(components: list[ConfigType]):
"""Add IDF components from YAML config with final priority to override code-added components."""
@@ -1048,6 +1199,19 @@ async def to_code(config):
cg.add_build_flag("-DUSE_ESP32_FRAMEWORK_ESP_IDF")
if use_platformio:
cg.add_platformio_option("framework", "espidf")
# Wrap std::__throw_* functions to abort immediately, eliminating ~3KB of
# exception class overhead. See throw_stubs.cpp for implementation.
# ESP-IDF already compiles with -fno-exceptions, so this code was dead anyway.
for mangled in [
"_ZSt20__throw_length_errorPKc",
"_ZSt19__throw_logic_errorPKc",
"_ZSt20__throw_out_of_rangePKc",
"_ZSt24__throw_out_of_range_fmtPKcz",
"_ZSt17__throw_bad_allocv",
"_ZSt25__throw_bad_function_callv",
]:
cg.add_build_flag(f"-Wl,--wrap={mangled}")
else:
cg.add_build_flag("-DUSE_ARDUINO")
cg.add_build_flag("-DUSE_ESP32_FRAMEWORK_ARDUINO")
@@ -1080,6 +1244,18 @@ async def to_code(config):
cg.add_build_flag("-Wno-nonnull-compare")
# Use CMN (common CAs) bundle by default to save ~51KB flash
# CMN covers CAs with >1% market share (~99% of websites)
# Components needing uncommon CAs can call require_full_certificate_bundle()
use_full_bundle = conf[CONF_ADVANCED].get(
CONF_USE_FULL_CERTIFICATE_BUNDLE, False
) or CORE.data[KEY_ESP32].get(KEY_FULL_CERT_BUNDLE, False)
add_idf_sdkconfig_option(
"CONFIG_MBEDTLS_CERTIFICATE_BUNDLE_DEFAULT_FULL", use_full_bundle
)
if not use_full_bundle:
add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE_DEFAULT_CMN", True)
add_idf_sdkconfig_option(f"CONFIG_IDF_TARGET_{variant}", True)
add_idf_sdkconfig_option(
f"CONFIG_ESPTOOLPY_FLASHSIZE_{config[CONF_FLASH_SIZE]}", True
@@ -1153,6 +1329,11 @@ async def to_code(config):
# Apply LWIP optimization settings
advanced = conf[CONF_ADVANCED]
# Re-include any IDF components the user explicitly requested
for component_name in advanced.get(CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, []):
include_builtin_idf_component(component_name)
# DHCP server: only disable if explicitly set to false
# WiFi component handles its own optimization when AP mode is not used
# When using Arduino with Ethernet, DHCP server functions must be available
@@ -1274,6 +1455,61 @@ async def to_code(config):
add_idf_sdkconfig_option(f"CONFIG_LOG_DEFAULT_LEVEL_{conf[CONF_LOG_LEVEL]}", True)
# Disable OpenOCD debug stubs to save code size
# These are used for on-chip debugging with OpenOCD/JTAG, rarely needed for ESPHome
if advanced[CONF_DISABLE_DEBUG_STUBS]:
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_STUBS_ENABLE", False)
# Disable OCD-aware exception handlers
# When enabled, the panic handler detects JTAG debugger and halts instead of resetting
# Most ESPHome users don't use JTAG debugging
if advanced[CONF_DISABLE_OCD_AWARE]:
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_OCDAWARE", False)
# Disable USB Serial/JTAG secondary console
# Components like logger can call require_usb_serial_jtag_secondary() to re-enable
if CORE.data[KEY_ESP32].get(KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED, False):
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG", True)
elif advanced[CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY]:
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_NONE", True)
# Disable /dev/null VFS initialization
# ESPHome doesn't typically need /dev/null
if advanced[CONF_DISABLE_DEV_NULL_VFS]:
add_idf_sdkconfig_option("CONFIG_VFS_INITIALIZE_DEV_NULL", False)
# Disable keeping peer certificate after TLS handshake
# Saves ~4KB heap per connection, but prevents certificate inspection after handshake
# Components that need it can call require_mbedtls_peer_cert()
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PEER_CERT_REQUIRED, False):
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", True)
elif advanced[CONF_DISABLE_MBEDTLS_PEER_CERT]:
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", False)
# Disable PKCS#7 support in mbedTLS
# Only needed for specific certificate validation scenarios
# Components that need it can call require_mbedtls_pkcs7()
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PKCS7_REQUIRED, False):
# Component called require_mbedtls_pkcs7() - enable regardless of user setting
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", True)
elif advanced[CONF_DISABLE_MBEDTLS_PKCS7]:
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", False)
# Disable regi2c control functions in IRAM
# Only needed if using analog peripherals (ADC, DAC, etc.) from ISRs while cache is disabled
if advanced[CONF_DISABLE_REGI2C_IN_IRAM]:
add_idf_sdkconfig_option("CONFIG_ESP_REGI2C_CTRL_FUNC_IN_IRAM", False)
# Disable FATFS support
# Components that need FATFS (SD card, etc.) can call require_fatfs()
if CORE.data[KEY_ESP32].get(KEY_FATFS_REQUIRED, False):
# Component called require_fatfs() - enable regardless of user setting
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", False)
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 2)
elif advanced[CONF_DISABLE_FATFS]:
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", True)
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 0)
for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
@@ -1282,6 +1518,11 @@ async def to_code(config):
if conf[CONF_COMPONENTS]:
CORE.add_job(_add_yaml_idf_components, conf[CONF_COMPONENTS])
# Write EXCLUDE_COMPONENTS at FINAL priority after all components have had
# a chance to call include_builtin_idf_component() to re-enable components they need.
# Default exclusions are added in set_core_data() during config validation.
CORE.add_job(_write_exclude_components)
APP_PARTITION_SIZES = {
"2MB": 0x0C0000, # 768 KB

View File

@@ -175,6 +175,32 @@ ESP32_BOARD_PINS = {
"LED": 13,
"LED_BUILTIN": 13,
},
"adafruit_feather_esp32s3_reversetft": {
"BUTTON": 0,
"A0": 18,
"A1": 17,
"A2": 16,
"A3": 15,
"A4": 14,
"A5": 8,
"SCK": 36,
"MOSI": 35,
"MISO": 37,
"RX": 38,
"TX": 39,
"SCL": 4,
"SDA": 3,
"NEOPIXEL": 33,
"PIN_NEOPIXEL": 33,
"NEOPIXEL_POWER": 21,
"TFT_I2C_POWER": 7,
"TFT_CS": 42,
"TFT_DC": 40,
"TFT_RESET": 41,
"TFT_BACKLIGHT": 45,
"LED": 13,
"LED_BUILTIN": 13,
},
"adafruit_feather_esp32s3_tft": {
"BUTTON": 0,
"A0": 18,

View File

@@ -6,12 +6,14 @@ KEY_FLASH_SIZE = "flash_size"
KEY_VARIANT = "variant"
KEY_SDKCONFIG_OPTIONS = "sdkconfig_options"
KEY_COMPONENTS = "components"
KEY_EXCLUDE_COMPONENTS = "exclude_components"
KEY_REPO = "repo"
KEY_REF = "ref"
KEY_REFRESH = "refresh"
KEY_PATH = "path"
KEY_SUBMODULES = "submodules"
KEY_EXTRA_BUILD_FILES = "extra_build_files"
KEY_FULL_CERT_BUNDLE = "full_cert_bundle"
VARIANT_ESP32 = "ESP32"
VARIANT_ESP32C2 = "ESP32C2"

View File

@@ -0,0 +1,57 @@
/*
* Linker wrap stubs for std::__throw_* functions.
*
* ESP-IDF compiles with -fno-exceptions, so C++ exceptions always abort.
* However, ESP-IDF only wraps low-level functions (__cxa_throw, etc.),
* not the std::__throw_* functions that construct exception objects first.
* This pulls in ~3KB of dead exception class code that can never run.
*
* ESP8266 Arduino already solved this: their toolchain rebuilds libstdc++
* with throw functions that just call abort(). We achieve the same result
* using linker --wrap without requiring toolchain changes.
*
* These stubs abort immediately with a descriptive message, allowing
* the linker to dead-code eliminate the exception class infrastructure.
*
* Wrapped functions and their callers:
* - std::__throw_length_error: std::string::reserve, std::vector::reserve
* - std::__throw_logic_error: std::promise, std::packaged_task
* - std::__throw_out_of_range: std::string::at, std::vector::at
* - std::__throw_out_of_range_fmt: std::bitset::to_ulong
* - std::__throw_bad_alloc: operator new
* - std::__throw_bad_function_call: std::function::operator()
*/
#ifdef USE_ESP_IDF
#include "esp_system.h"
namespace esphome::esp32 {}
// Linker wraps for std::__throw_* - must be extern "C" at global scope.
// Names must be __wrap_ + mangled name for the linker's --wrap option.
// NOLINTBEGIN(bugprone-reserved-identifier,cert-dcl37-c,cert-dcl51-cpp,readability-identifier-naming)
extern "C" {
// std::__throw_length_error(char const*) - called when container size exceeds max_size()
void __wrap__ZSt20__throw_length_errorPKc(const char *) { esp_system_abort("std::length_error"); }
// std::__throw_logic_error(char const*) - called for logic errors (e.g., promise already satisfied)
void __wrap__ZSt19__throw_logic_errorPKc(const char *) { esp_system_abort("std::logic_error"); }
// std::__throw_out_of_range(char const*) - called by at() when index is out of bounds
void __wrap__ZSt20__throw_out_of_rangePKc(const char *) { esp_system_abort("std::out_of_range"); }
// std::__throw_out_of_range_fmt(char const*, ...) - called by bitset::to_ulong when value doesn't fit
void __wrap__ZSt24__throw_out_of_range_fmtPKcz(const char *, ...) { esp_system_abort("std::out_of_range"); }
// std::__throw_bad_alloc() - called when operator new fails
void __wrap__ZSt17__throw_bad_allocv() { esp_system_abort("std::bad_alloc"); }
// std::__throw_bad_function_call() - called when invoking empty std::function
void __wrap__ZSt25__throw_bad_function_callv() { esp_system_abort("std::bad_function_call"); }
} // extern "C"
// NOLINTEND(bugprone-reserved-identifier,cert-dcl37-c,cert-dcl51-cpp,readability-identifier-naming)
#endif // USE_ESP_IDF

View File

@@ -5,6 +5,7 @@ from esphome import pins
import esphome.codegen as cg
from esphome.components import esp32, light
from esphome.components.const import CONF_USE_PSRAM
from esphome.components.esp32 import include_builtin_idf_component
import esphome.config_validation as cv
from esphome.const import (
CONF_CHIPSET,
@@ -129,6 +130,9 @@ CONFIG_SCHEMA = cv.All(
async def to_code(config):
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
include_builtin_idf_component("esp_driver_rmt")
var = cg.new_Pvariable(config[CONF_OUTPUT_ID])
await light.register_light(var, config)
await cg.register_component(var, config)

View File

@@ -6,6 +6,7 @@ from esphome.components.esp32 import (
VARIANT_ESP32S3,
get_esp32_variant,
gpio,
include_builtin_idf_component,
)
import esphome.config_validation as cv
from esphome.const import (
@@ -266,6 +267,9 @@ CONFIG_SCHEMA = cv.All(
async def to_code(config):
# Re-enable ESP-IDF's touch sensor driver (excluded by default to save compile time)
include_builtin_idf_component("esp_driver_touch_sens")
touch = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(touch, config)

View File

@@ -14,6 +14,7 @@ from esphome.components.esp32 import (
add_idf_component,
add_idf_sdkconfig_option,
get_esp32_variant,
include_builtin_idf_component,
)
from esphome.components.network import ip_address_literal
from esphome.components.spi import CONF_INTERFACE_INDEX, get_spi_interface
@@ -419,6 +420,9 @@ async def to_code(config):
# Also disable WiFi/BT coexistence since WiFi is disabled
add_idf_sdkconfig_option("CONFIG_SW_COEXIST_ENABLE", False)
# Re-enable ESP-IDF's Ethernet driver (excluded by default to save compile time)
include_builtin_idf_component("esp_eth")
if config[CONF_TYPE] == "LAN8670":
# Add LAN867x 10BASE-T1S PHY support component
add_idf_component(name="espressif/lan867x", ref="2.0.0")

View File

@@ -16,12 +16,8 @@ namespace event {
#define LOG_EVENT(prefix, type, obj) \
if ((obj) != nullptr) { \
ESP_LOGCONFIG(TAG, "%s%s '%s'", prefix, LOG_STR_LITERAL(type), (obj)->get_name().c_str()); \
if (!(obj)->get_icon_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Icon: '%s'", prefix, (obj)->get_icon_ref().c_str()); \
} \
if (!(obj)->get_device_class_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Device Class: '%s'", prefix, (obj)->get_device_class_ref().c_str()); \
} \
LOG_ENTITY_ICON(TAG, prefix, *(obj)); \
LOG_ENTITY_DEVICE_CLASS(TAG, prefix, *(obj)); \
}
class Event : public EntityBase, public EntityBase_DeviceClass {

View File

@@ -3,6 +3,7 @@
#include "esphome/core/application.h"
#include "esphome/core/hal.h"
#include "esphome/core/log.h"
#include "esphome/core/progmem.h"
#include <cinttypes>
@@ -19,7 +20,8 @@ static bool was_power_cycled() {
#endif
#ifdef USE_ESP8266
auto reset_reason = EspClass::getResetReason();
return strcasecmp(reset_reason.c_str(), "power On") == 0 || strcasecmp(reset_reason.c_str(), "external system") == 0;
return ESPHOME_strcasecmp_P(reset_reason.c_str(), ESPHOME_PSTR("power On")) == 0 ||
ESPHOME_strcasecmp_P(reset_reason.c_str(), ESPHOME_PSTR("external system")) == 0;
#endif
#ifdef USE_LIBRETINY
auto reason = lt_get_reboot_reason();

View File

@@ -227,8 +227,7 @@ void Fan::publish_state() {
constexpr uint32_t RESTORE_STATE_VERSION = 0x71700ABA;
optional<FanRestoreState> Fan::restore_state_() {
FanRestoreState recovered{};
this->rtc_ =
global_preferences->make_preference<FanRestoreState>(this->get_preference_hash() ^ RESTORE_STATE_VERSION);
this->rtc_ = this->make_entity_preference<FanRestoreState>(RESTORE_STATE_VERSION);
bool restored = this->rtc_.load(&recovered);
switch (this->restore_mode_) {

View File

@@ -9,30 +9,56 @@ from esphome.const import (
CONF_VALUE,
)
from esphome.core import CoroPriority, coroutine_with_priority
from esphome.types import ConfigType
CODEOWNERS = ["@esphome/core"]
globals_ns = cg.esphome_ns.namespace("globals")
GlobalsComponent = globals_ns.class_("GlobalsComponent", cg.Component)
RestoringGlobalsComponent = globals_ns.class_("RestoringGlobalsComponent", cg.Component)
RestoringGlobalsComponent = globals_ns.class_(
"RestoringGlobalsComponent", cg.PollingComponent
)
RestoringGlobalStringComponent = globals_ns.class_(
"RestoringGlobalStringComponent", cg.Component
"RestoringGlobalStringComponent", cg.PollingComponent
)
GlobalVarSetAction = globals_ns.class_("GlobalVarSetAction", automation.Action)
CONF_MAX_RESTORE_DATA_LENGTH = "max_restore_data_length"
# Base schema fields shared by both variants
_BASE_SCHEMA = {
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
cv.Required(CONF_TYPE): cv.string_strict,
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
}
MULTI_CONF = True
CONFIG_SCHEMA = cv.Schema(
# Non-restoring globals: regular Component (no polling needed)
_NON_RESTORING_SCHEMA = cv.Schema(
{
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
cv.Required(CONF_TYPE): cv.string_strict,
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
**_BASE_SCHEMA,
cv.Optional(CONF_RESTORE_VALUE, default=False): cv.boolean,
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
}
).extend(cv.COMPONENT_SCHEMA)
# Restoring globals: PollingComponent with configurable update_interval
_RESTORING_SCHEMA = cv.Schema(
{
**_BASE_SCHEMA,
cv.Optional(CONF_RESTORE_VALUE, default=True): cv.boolean,
}
).extend(cv.polling_component_schema("1s"))
def _globals_schema(config: ConfigType) -> ConfigType:
"""Select schema based on restore_value setting."""
if config.get(CONF_RESTORE_VALUE, False):
return _RESTORING_SCHEMA(config)
return _NON_RESTORING_SCHEMA(config)
MULTI_CONF = True
CONFIG_SCHEMA = _globals_schema
# Run with low priority so that namespaces are registered first
@coroutine_with_priority(CoroPriority.LATE)

View File

@@ -5,8 +5,7 @@
#include "esphome/core/helpers.h"
#include <cstring>
namespace esphome {
namespace globals {
namespace esphome::globals {
template<typename T> class GlobalsComponent : public Component {
public:
@@ -24,13 +23,14 @@ template<typename T> class GlobalsComponent : public Component {
T value_{};
};
template<typename T> class RestoringGlobalsComponent : public Component {
template<typename T> class RestoringGlobalsComponent : public PollingComponent {
public:
using value_type = T;
explicit RestoringGlobalsComponent() = default;
explicit RestoringGlobalsComponent(T initial_value) : value_(initial_value) {}
explicit RestoringGlobalsComponent() : PollingComponent(1000) {}
explicit RestoringGlobalsComponent(T initial_value) : PollingComponent(1000), value_(initial_value) {}
explicit RestoringGlobalsComponent(
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
: PollingComponent(1000) {
memcpy(this->value_, initial_value.data(), sizeof(T));
}
@@ -44,7 +44,7 @@ template<typename T> class RestoringGlobalsComponent : public Component {
float get_setup_priority() const override { return setup_priority::HARDWARE; }
void loop() override { store_value_(); }
void update() override { store_value_(); }
void on_shutdown() override { store_value_(); }
@@ -66,13 +66,14 @@ template<typename T> class RestoringGlobalsComponent : public Component {
};
// Use with string or subclasses of strings
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public Component {
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public PollingComponent {
public:
using value_type = T;
explicit RestoringGlobalStringComponent() = default;
explicit RestoringGlobalStringComponent(T initial_value) { this->value_ = initial_value; }
explicit RestoringGlobalStringComponent() : PollingComponent(1000) {}
explicit RestoringGlobalStringComponent(T initial_value) : PollingComponent(1000) { this->value_ = initial_value; }
explicit RestoringGlobalStringComponent(
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
: PollingComponent(1000) {
memcpy(this->value_, initial_value.data(), sizeof(T));
}
@@ -90,7 +91,7 @@ template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public C
float get_setup_priority() const override { return setup_priority::HARDWARE; }
void loop() override { store_value_(); }
void update() override { store_value_(); }
void on_shutdown() override { store_value_(); }
@@ -144,5 +145,4 @@ template<typename T> T &id(GlobalsComponent<T> *value) { return value->value();
template<typename T> T &id(RestoringGlobalsComponent<T> *value) { return value->value(); }
template<typename T, uint8_t SZ> T &id(RestoringGlobalStringComponent<T, SZ> *value) { return value->value(); }
} // namespace globals
} // namespace esphome
} // namespace esphome::globals

View File

@@ -350,8 +350,7 @@ ClimateTraits HaierClimateBase::traits() { return traits_; }
void HaierClimateBase::initialization() {
constexpr uint32_t restore_settings_version = 0xA77D21EF;
this->base_rtc_ =
global_preferences->make_preference<HaierBaseSettings>(this->get_preference_hash() ^ restore_settings_version);
this->base_rtc_ = this->make_entity_preference<HaierBaseSettings>(restore_settings_version);
HaierBaseSettings recovered;
if (!this->base_rtc_.load(&recovered)) {
recovered = {false, true};

View File

@@ -515,8 +515,7 @@ haier_protocol::HaierMessage HonClimate::get_power_message(bool state) {
void HonClimate::initialization() {
HaierClimateBase::initialization();
constexpr uint32_t restore_settings_version = 0x57EB59DDUL;
this->hon_rtc_ =
global_preferences->make_preference<HonSettings>(this->get_preference_hash() ^ restore_settings_version);
this->hon_rtc_ = this->make_entity_preference<HonSettings>(restore_settings_version);
HonSettings recovered;
if (this->hon_rtc_.load(&recovered)) {
this->settings_ = recovered;

View File

@@ -126,7 +126,7 @@ CONFIG_SCHEMA = cv.All(
),
cv.Optional(CONF_CA_CERTIFICATE_PATH): cv.All(
cv.file_,
cv.only_on(PLATFORM_HOST),
cv.Any(cv.only_on(PLATFORM_HOST), cv.only_on_esp32),
),
}
).extend(cv.COMPONENT_SCHEMA),
@@ -155,12 +155,32 @@ async def to_code(config):
cg.add(var.set_watchdog_timeout(timeout_ms))
if CORE.is_esp32:
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
esp32.include_builtin_idf_component("esp_http_client")
cg.add(var.set_buffer_size_rx(config[CONF_BUFFER_SIZE_RX]))
cg.add(var.set_buffer_size_tx(config[CONF_BUFFER_SIZE_TX]))
cg.add(var.set_verify_ssl(config[CONF_VERIFY_SSL]))
if config.get(CONF_VERIFY_SSL):
esp32.add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
if ca_cert_path := config.get(CONF_CA_CERTIFICATE_PATH):
with open(ca_cert_path, encoding="utf-8") as f:
ca_cert_content = f.read()
cg.add(var.set_ca_certificate(ca_cert_content))
else:
# Uses the certificate bundle configured in esp32 component.
# By default, ESPHome uses the CMN (common CAs) bundle which covers
# ~99% of websites including GitHub, Let's Encrypt, DigiCert, etc.
# If connecting to services with uncommon CAs, components can call:
# esp32.require_full_certificate_bundle()
# Or users can set in their config:
# esp32:
# framework:
# advanced:
# use_full_certificate_bundle: true
esp32.add_idf_sdkconfig_option(
"CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True
)
esp32.add_idf_sdkconfig_option(
"CONFIG_ESP_TLS_INSECURE",

View File

@@ -131,6 +131,10 @@ std::shared_ptr<HttpContainer> HttpRequestArduino::perform(const std::string &ur
}
}
// HTTPClient::getSize() returns -1 for chunked transfer encoding (no Content-Length).
// When cast to size_t, -1 becomes SIZE_MAX (4294967295 on 32-bit).
// The read() method handles this: bytes_read_ can never reach SIZE_MAX, so the
// early return check (bytes_read_ >= content_length) will never trigger.
int content_length = container->client_.getSize();
ESP_LOGD(TAG, "Content-Length: %d", content_length);
container->content_length = (size_t) content_length;
@@ -167,17 +171,23 @@ int HttpContainerArduino::read(uint8_t *buf, size_t max_len) {
}
int available_data = stream_ptr->available();
int bufsize = std::min(max_len, std::min(this->content_length - this->bytes_read_, (size_t) available_data));
// For chunked transfer encoding, HTTPClient::getSize() returns -1, which becomes SIZE_MAX when
// cast to size_t. SIZE_MAX - bytes_read_ is still huge, so it won't limit the read.
size_t remaining = (this->content_length > 0) ? (this->content_length - this->bytes_read_) : max_len;
int bufsize = std::min(max_len, std::min(remaining, (size_t) available_data));
if (bufsize == 0) {
this->duration_ms += (millis() - start);
// Check if we've read all expected content
if (this->bytes_read_ >= this->content_length) {
// Check if we've read all expected content (only valid when content_length is known and not SIZE_MAX)
// For chunked encoding (content_length == SIZE_MAX), we can't use this check
if (this->content_length > 0 && this->bytes_read_ >= this->content_length) {
return 0; // All content read successfully
}
// No data available - check if connection is still open
// For chunked encoding, !connected() after reading means EOF (all chunks received)
// For known content_length with bytes_read_ < content_length, it means connection dropped
if (!stream_ptr->connected()) {
return HTTP_ERROR_CONNECTION_CLOSED; // Connection closed prematurely
return HTTP_ERROR_CONNECTION_CLOSED; // Connection closed or EOF for chunked
}
return 0; // No data yet, caller should retry
}

View File

@@ -27,8 +27,9 @@ void HttpRequestIDF::dump_config() {
HttpRequestComponent::dump_config();
ESP_LOGCONFIG(TAG,
" Buffer Size RX: %u\n"
" Buffer Size TX: %u",
this->buffer_size_rx_, this->buffer_size_tx_);
" Buffer Size TX: %u\n"
" Custom CA Certificate: %s",
this->buffer_size_rx_, this->buffer_size_tx_, YESNO(this->ca_certificate_ != nullptr));
}
esp_err_t HttpRequestIDF::http_event_handler(esp_http_client_event_t *evt) {
@@ -88,11 +89,15 @@ std::shared_ptr<HttpContainer> HttpRequestIDF::perform(const std::string &url, c
config.disable_auto_redirect = !this->follow_redirects_;
config.max_redirection_count = this->redirect_limit_;
config.auth_type = HTTP_AUTH_TYPE_BASIC;
#if CONFIG_MBEDTLS_CERTIFICATE_BUNDLE
if (secure && this->verify_ssl_) {
config.crt_bundle_attach = esp_crt_bundle_attach;
}
if (this->ca_certificate_ != nullptr) {
config.cert_pem = this->ca_certificate_;
#if CONFIG_MBEDTLS_CERTIFICATE_BUNDLE
} else {
config.crt_bundle_attach = esp_crt_bundle_attach;
#endif
}
}
if (this->useragent_ != nullptr) {
config.user_agent = this->useragent_;
@@ -152,6 +157,8 @@ std::shared_ptr<HttpContainer> HttpRequestIDF::perform(const std::string &url, c
}
container->feed_wdt();
// esp_http_client_fetch_headers() returns 0 for chunked transfer encoding (no Content-Length header).
// The read() method handles content_length == 0 specially to support chunked responses.
container->content_length = esp_http_client_fetch_headers(client);
container->feed_wdt();
container->status_code = esp_http_client_get_status_code(client);
@@ -220,14 +227,22 @@ std::shared_ptr<HttpContainer> HttpRequestIDF::perform(const std::string &url, c
//
// We normalize to HttpContainer::read() contract:
// > 0: bytes read
// 0: no data yet / all content read (caller should check bytes_read vs content_length)
// 0: all content read (only returned when content_length is known and fully read)
// < 0: error/connection closed
//
// Note on chunked transfer encoding:
// esp_http_client_fetch_headers() returns 0 for chunked responses (no Content-Length header).
// We handle this by skipping the content_length check when content_length is 0,
// allowing esp_http_client_read() to handle chunked decoding internally and signal EOF
// by returning 0.
int HttpContainerIDF::read(uint8_t *buf, size_t max_len) {
const uint32_t start = millis();
watchdog::WatchdogManager wdm(this->parent_->get_watchdog_timeout());
// Check if we've already read all expected content
if (this->bytes_read_ >= this->content_length) {
// Skip this check when content_length is 0 (chunked transfer encoding or unknown length)
// For chunked responses, esp_http_client_read() will return 0 when all data is received
if (this->content_length > 0 && this->bytes_read_ >= this->content_length) {
return 0; // All content read successfully
}
@@ -242,7 +257,13 @@ int HttpContainerIDF::read(uint8_t *buf, size_t max_len) {
return read_len_or_error;
}
// Connection closed by server before all content received
// esp_http_client_read() returns 0 in two cases:
// 1. Known content_length: connection closed before all data received (error)
// 2. Chunked encoding (content_length == 0): end of stream reached (EOF)
// For case 1, returning HTTP_ERROR_CONNECTION_CLOSED is correct.
// For case 2, 0 indicates that all chunked data has already been delivered
// in previous successful read() calls, so treating this as a closed
// connection does not cause any loss of response data.
if (read_len_or_error == 0) {
return HTTP_ERROR_CONNECTION_CLOSED;
}

View File

@@ -35,6 +35,7 @@ class HttpRequestIDF : public HttpRequestComponent {
void set_buffer_size_rx(uint16_t buffer_size_rx) { this->buffer_size_rx_ = buffer_size_rx; }
void set_buffer_size_tx(uint16_t buffer_size_tx) { this->buffer_size_tx_ = buffer_size_tx; }
void set_verify_ssl(bool verify_ssl) { this->verify_ssl_ = verify_ssl; }
void set_ca_certificate(const char *ca_certificate) { this->ca_certificate_ = ca_certificate; }
protected:
std::shared_ptr<HttpContainer> perform(const std::string &url, const std::string &method, const std::string &body,
@@ -44,6 +45,7 @@ class HttpRequestIDF : public HttpRequestComponent {
uint16_t buffer_size_rx_{};
uint16_t buffer_size_tx_{};
bool verify_ssl_{true};
const char *ca_certificate_{nullptr};
/// @brief Monitors the http client events to gather response headers
static esp_err_t http_event_handler(esp_http_client_event_t *evt);

View File

@@ -82,7 +82,7 @@ uint8_t OtaHttpRequestComponent::do_ota_() {
uint32_t last_progress = 0;
uint32_t update_start_time = millis();
md5::MD5Digest md5_receive;
std::unique_ptr<char[]> md5_receive_str(new char[33]);
char md5_receive_str[33];
if (this->md5_expected_.empty() && !this->http_get_md5_()) {
return OTA_MD5_INVALID;
@@ -176,14 +176,14 @@ uint8_t OtaHttpRequestComponent::do_ota_() {
// verify MD5 is as expected and act accordingly
md5_receive.calculate();
md5_receive.get_hex(md5_receive_str.get());
this->md5_computed_ = md5_receive_str.get();
md5_receive.get_hex(md5_receive_str);
this->md5_computed_ = md5_receive_str;
if (strncmp(this->md5_computed_.c_str(), this->md5_expected_.c_str(), MD5_SIZE) != 0) {
ESP_LOGE(TAG, "MD5 computed: %s - Aborting due to MD5 mismatch", this->md5_computed_.c_str());
this->cleanup_(std::move(backend), container);
return ota::OTA_RESPONSE_ERROR_MD5_MISMATCH;
} else {
backend->set_update_md5(md5_receive_str.get());
backend->set_update_md5(md5_receive_str);
}
container->end();

View File

@@ -2,6 +2,25 @@
from . import BoardConfig
# Huidu HD-WF1
BoardConfig(
"huidu-hd-wf1",
r1_pin=2,
g1_pin=6,
b1_pin=3,
r2_pin=4,
g2_pin=8,
b2_pin=5,
a_pin=39,
b_pin=38,
c_pin=37,
d_pin=36,
e_pin=12,
lat_pin=33,
oe_pin=35,
clk_pin=34,
)
# Huidu HD-WF2
BoardConfig(
"huidu-hd-wf2",

View File

@@ -587,7 +587,7 @@ def _build_config_struct(
async def to_code(config: ConfigType) -> None:
add_idf_component(
name="esphome/esp-hub75",
ref="0.3.0",
ref="0.3.2",
)
# Set compile-time configuration via build flags (so external library sees them)

View File

@@ -1,6 +1,11 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components.esp32 import (
add_idf_sdkconfig_option,
get_esp32_variant,
include_builtin_idf_component,
)
from esphome.components.esp32.const import (
VARIANT_ESP32,
VARIANT_ESP32C3,
VARIANT_ESP32C5,
@@ -10,8 +15,6 @@ from esphome.components.esp32 import (
VARIANT_ESP32P4,
VARIANT_ESP32S2,
VARIANT_ESP32S3,
add_idf_sdkconfig_option,
get_esp32_variant,
)
import esphome.config_validation as cv
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_CHANNEL, CONF_ID, CONF_SAMPLE_RATE
@@ -272,6 +275,10 @@ FINAL_VALIDATE_SCHEMA = _final_validate
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
# Re-enable ESP-IDF's I2S driver (excluded by default to save compile time)
include_builtin_idf_component("esp_driver_i2s")
if use_legacy():
cg.add_define("USE_I2S_LEGACY")

View File

@@ -10,7 +10,7 @@ static const char *const TAG = "integration";
void IntegrationSensor::setup() {
if (this->restore_) {
this->pref_ = global_preferences->make_preference<float>(this->get_preference_hash());
this->pref_ = this->make_entity_preference<float>();
float preference_value = 0;
this->pref_.load(&preference_value);
this->result_ = preference_value;

View File

@@ -15,7 +15,7 @@ static const char *const TAG = "json";
static SpiRamAllocator global_json_allocator;
#endif
std::string build_json(const json_build_t &f) {
SerializationBuffer<> build_json(const json_build_t &f) {
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
JsonBuilder builder;
JsonObject root = builder.root();
@@ -61,14 +61,62 @@ JsonDocument parse_json(const uint8_t *data, size_t len) {
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
}
std::string JsonBuilder::serialize() {
SerializationBuffer<> JsonBuilder::serialize() {
// ===========================================================================================
// CRITICAL: NRVO (Named Return Value Optimization) - DO NOT REFACTOR WITHOUT UNDERSTANDING
// ===========================================================================================
//
// This function is carefully structured to enable NRVO. The compiler constructs `result`
// directly in the caller's stack frame, eliminating the move constructor call entirely.
//
// WITHOUT NRVO: Each return would trigger SerializationBuffer's move constructor, which
// must memcpy up to 768 bytes of stack buffer content. This happens on EVERY JSON
// serialization (sensor updates, web server responses, MQTT publishes, etc.).
//
// WITH NRVO: Zero memcpy, zero move constructor overhead. The buffer lives directly
// where the caller needs it.
//
// Requirements for NRVO to work:
// 1. Single named variable (`result`) returned from ALL paths
// 2. All paths must return the SAME variable (not different variables)
// 3. No std::move() on the return statement
//
// If you must modify this function:
// - Keep a single `result` variable declared at the top
// - All code paths must return `result` (not a different variable)
// - Verify NRVO still works by checking the disassembly for move constructor calls
// - Test: objdump -d -C firmware.elf | grep "SerializationBuffer.*SerializationBuffer"
// Should show only destructor, NOT move constructor
//
// Why we avoid measureJson(): It instantiates DummyWriter templates adding ~1KB flash.
// Instead, try stack buffer first. 768 bytes covers 99.9% of JSON payloads (sensors ~200B,
// lights ~170B, climate ~700B). Only entities with 40+ options exceed this.
//
// ===========================================================================================
constexpr size_t buf_size = SerializationBuffer<>::BUFFER_SIZE;
SerializationBuffer<> result(buf_size - 1); // Max content size (reserve 1 for null)
if (doc_.overflowed()) {
ESP_LOGE(TAG, "JSON document overflow");
return "{}";
auto *buf = result.data_writable_();
buf[0] = '{';
buf[1] = '}';
buf[2] = '\0';
result.set_size_(2);
return result;
}
std::string output;
serializeJson(doc_, output);
return output;
size_t size = serializeJson(doc_, result.data_writable_(), buf_size);
if (size < buf_size) {
// Fits in stack buffer - update size to actual length
result.set_size_(size);
return result;
}
// Needs heap allocation - reallocate and serialize again with exact size
result.reallocate_heap_(size);
serializeJson(doc_, result.data_writable_(), size + 1);
return result;
}
} // namespace json

View File

@@ -1,5 +1,7 @@
#pragma once
#include <cstring>
#include <string>
#include <vector>
#include "esphome/core/defines.h"
@@ -14,6 +16,108 @@
namespace esphome {
namespace json {
/// Buffer for JSON serialization that uses stack allocation for small payloads.
/// Template parameter STACK_SIZE specifies the stack buffer size (default 768 bytes).
/// Supports move semantics for efficient return-by-value.
template<size_t STACK_SIZE = 768> class SerializationBuffer {
public:
static constexpr size_t BUFFER_SIZE = STACK_SIZE; ///< Stack buffer size for this instantiation
/// Construct with known size (typically from measureJson)
explicit SerializationBuffer(size_t size) : size_(size) {
if (size + 1 <= STACK_SIZE) {
buffer_ = stack_buffer_;
} else {
heap_buffer_ = new char[size + 1];
buffer_ = heap_buffer_;
}
buffer_[0] = '\0';
}
~SerializationBuffer() { delete[] heap_buffer_; }
// Move constructor - works with same template instantiation
SerializationBuffer(SerializationBuffer &&other) noexcept : heap_buffer_(other.heap_buffer_), size_(other.size_) {
if (other.buffer_ == other.stack_buffer_) {
// Stack buffer - must copy content
std::memcpy(stack_buffer_, other.stack_buffer_, size_ + 1);
buffer_ = stack_buffer_;
} else {
// Heap buffer - steal ownership
buffer_ = heap_buffer_;
other.heap_buffer_ = nullptr;
}
// Leave moved-from object in valid empty state
other.stack_buffer_[0] = '\0';
other.buffer_ = other.stack_buffer_;
other.size_ = 0;
}
// Move assignment
SerializationBuffer &operator=(SerializationBuffer &&other) noexcept {
if (this != &other) {
delete[] heap_buffer_;
heap_buffer_ = other.heap_buffer_;
size_ = other.size_;
if (other.buffer_ == other.stack_buffer_) {
std::memcpy(stack_buffer_, other.stack_buffer_, size_ + 1);
buffer_ = stack_buffer_;
} else {
buffer_ = heap_buffer_;
other.heap_buffer_ = nullptr;
}
// Leave moved-from object in valid empty state
other.stack_buffer_[0] = '\0';
other.buffer_ = other.stack_buffer_;
other.size_ = 0;
}
return *this;
}
// Delete copy operations
SerializationBuffer(const SerializationBuffer &) = delete;
SerializationBuffer &operator=(const SerializationBuffer &) = delete;
/// Get null-terminated C string
const char *c_str() const { return buffer_; }
/// Get data pointer
const char *data() const { return buffer_; }
/// Get string length (excluding null terminator)
size_t size() const { return size_; }
/// Implicit conversion to std::string for backward compatibility
/// WARNING: This allocates a new std::string on the heap. Prefer using
/// c_str() or data()/size() directly when possible to avoid allocation.
operator std::string() const { return std::string(buffer_, size_); } // NOLINT(google-explicit-constructor)
private:
friend class JsonBuilder; ///< Allows JsonBuilder::serialize() to call private methods
/// Get writable buffer (for serialization)
char *data_writable_() { return buffer_; }
/// Set actual size after serialization (must not exceed allocated size)
/// Also ensures null termination for c_str() safety
void set_size_(size_t size) {
size_ = size;
buffer_[size] = '\0';
}
/// Reallocate to heap buffer with new size (for when stack buffer is too small)
/// This invalidates any previous buffer content. Used by JsonBuilder::serialize().
void reallocate_heap_(size_t size) {
delete[] heap_buffer_;
heap_buffer_ = new char[size + 1];
buffer_ = heap_buffer_;
size_ = size;
buffer_[0] = '\0';
}
char stack_buffer_[STACK_SIZE];
char *heap_buffer_{nullptr};
char *buffer_;
size_t size_;
};
#ifdef USE_PSRAM
// Build an allocator for the JSON Library using the RAMAllocator class
// This is only compiled when PSRAM is enabled
@@ -46,7 +150,8 @@ using json_parse_t = std::function<bool(JsonObject)>;
using json_build_t = std::function<void(JsonObject)>;
/// Build a JSON string with the provided json build function.
std::string build_json(const json_build_t &f);
/// Returns SerializationBuffer for stack-first allocation; implicitly converts to std::string.
SerializationBuffer<> build_json(const json_build_t &f);
/// Parse a JSON string and run the provided json parse function if it's valid.
bool parse_json(const std::string &data, const json_parse_t &f);
@@ -69,7 +174,9 @@ class JsonBuilder {
return root_;
}
std::string serialize();
/// Serialize the JSON document to a SerializationBuffer (stack-first allocation)
/// Uses 768-byte stack buffer by default, falls back to heap for larger JSON
SerializationBuffer<> serialize();
private:
#ifdef USE_PSRAM

View File

@@ -184,7 +184,7 @@ static inline bool validate_header_footer(const uint8_t *header_footer, const ui
void LD2450Component::setup() {
#ifdef USE_NUMBER
if (this->presence_timeout_number_ != nullptr) {
this->pref_ = global_preferences->make_preference<float>(this->presence_timeout_number_->get_preference_hash());
this->pref_ = this->presence_timeout_number_->make_entity_preference<float>();
this->set_presence_timeout();
}
#endif
@@ -451,7 +451,7 @@ void LD2450Component::handle_periodic_data_() {
int16_t ty = 0;
int16_t td = 0;
int16_t ts = 0;
int16_t angle = 0;
float angle = 0;
uint8_t index = 0;
Direction direction{DIRECTION_UNDEFINED};
bool is_moving = false;

View File

@@ -143,6 +143,7 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
],
icon=ICON_FORMAT_TEXT_ROTATION_ANGLE_UP,
unit_of_measurement=UNIT_DEGREES,
accuracy_decimals=1,
),
cv.Optional(CONF_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,

View File

@@ -191,10 +191,17 @@ def _notify_old_style(config):
# The dev and latest branches will be at *least* this version, which is what matters.
# Use GitHub releases directly to avoid PlatformIO moderation delays.
ARDUINO_VERSIONS = {
"dev": (cv.Version(1, 9, 2), "https://github.com/libretiny-eu/libretiny.git"),
"latest": (cv.Version(1, 9, 2), "libretiny"),
"recommended": (cv.Version(1, 9, 2), None),
"dev": (cv.Version(1, 11, 0), "https://github.com/libretiny-eu/libretiny.git"),
"latest": (
cv.Version(1, 11, 0),
"https://github.com/libretiny-eu/libretiny.git#v1.11.0",
),
"recommended": (
cv.Version(1, 11, 0),
"https://github.com/libretiny-eu/libretiny.git#v1.11.0",
),
}

View File

@@ -7,9 +7,7 @@ namespace esphome::light {
class AddressableLightWrapper : public light::AddressableLight {
public:
explicit AddressableLightWrapper(light::LightState *light_state) : light_state_(light_state) {
this->wrapper_state_ = new uint8_t[5]; // NOLINT(cppcoreguidelines-owning-memory)
}
explicit AddressableLightWrapper(light::LightState *light_state) : light_state_(light_state) {}
int32_t size() const override { return 1; }
@@ -118,7 +116,7 @@ class AddressableLightWrapper : public light::AddressableLight {
}
light::LightState *light_state_;
uint8_t *wrapper_state_;
mutable uint8_t wrapper_state_[5]{};
ColorMode color_mode_{ColorMode::UNKNOWN};
};

View File

@@ -44,7 +44,7 @@ void LightState::setup() {
case LIGHT_RESTORE_DEFAULT_ON:
case LIGHT_RESTORE_INVERTED_DEFAULT_OFF:
case LIGHT_RESTORE_INVERTED_DEFAULT_ON:
this->rtc_ = global_preferences->make_preference<LightStateRTCState>(this->get_preference_hash());
this->rtc_ = this->make_entity_preference<LightStateRTCState>();
// Attempt to load from preferences, else fall back to default values
if (!this->rtc_.load(&recovered)) {
recovered.state = (this->restore_mode_ == LIGHT_RESTORE_DEFAULT_ON ||
@@ -57,7 +57,7 @@ void LightState::setup() {
break;
case LIGHT_RESTORE_AND_OFF:
case LIGHT_RESTORE_AND_ON:
this->rtc_ = global_preferences->make_preference<LightStateRTCState>(this->get_preference_hash());
this->rtc_ = this->make_entity_preference<LightStateRTCState>();
this->rtc_.load(&recovered);
recovered.state = (this->restore_mode_ == LIGHT_RESTORE_AND_ON);
break;

View File

@@ -14,9 +14,7 @@ class Lock;
#define LOG_LOCK(prefix, type, obj) \
if ((obj) != nullptr) { \
ESP_LOGCONFIG(TAG, "%s%s '%s'", prefix, LOG_STR_LITERAL(type), (obj)->get_name().c_str()); \
if (!(obj)->get_icon_ref().empty()) { \
ESP_LOGCONFIG(TAG, "%s Icon: '%s'", prefix, (obj)->get_icon_ref().c_str()); \
} \
LOG_ENTITY_ICON(TAG, prefix, *(obj)); \
if ((obj)->traits.get_assumed_state()) { \
ESP_LOGCONFIG(TAG, "%s Assumed State: YES", prefix); \
} \

View File

@@ -21,7 +21,7 @@ class LVGLNumber : public number::Number, public Component {
void setup() override {
float value = this->value_lambda_();
if (this->restore_) {
this->pref_ = global_preferences->make_preference<float>(this->get_preference_hash());
this->pref_ = this->make_entity_preference<float>();
if (this->pref_.load(&value)) {
this->control_lambda_(value);
}

View File

@@ -20,7 +20,7 @@ class LVGLSelect : public select::Select, public Component {
this->set_options_();
if (this->restore_) {
size_t index;
this->pref_ = global_preferences->make_preference<size_t>(this->get_preference_hash());
this->pref_ = this->make_entity_preference<size_t>();
if (this->pref_.load(&index))
this->widget_->set_selected_index(index, LV_ANIM_OFF);
}

View File

@@ -12,6 +12,10 @@ namespace esphome::mdns {
static const char *const TAG = "mdns";
static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_SERVICE_COUNT> &services) {
#ifdef USE_OPENTHREAD
// OpenThread handles service registration via SRP client
// Services are compiled by MDNSComponent::compile_records_() and consumed by OpenThreadSrpComponent
#else
esp_err_t err = mdns_init();
if (err != ESP_OK) {
ESP_LOGW(TAG, "Init failed: %s", esp_err_to_name(err));
@@ -41,13 +45,16 @@ static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_S
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
}
}
#endif
}
void MDNSComponent::setup() { this->setup_buffers_and_register_(register_esp32); }
void MDNSComponent::on_shutdown() {
#ifndef USE_OPENTHREAD
mdns_free();
delay(40); // Allow the mdns packets announcing service removal to be sent
#endif
}
} // namespace esphome::mdns

View File

@@ -3,8 +3,7 @@
#include <cinttypes>
namespace esphome {
namespace mhz19 {
namespace esphome::mhz19 {
static const char *const TAG = "mhz19";
static const uint8_t MHZ19_REQUEST_LENGTH = 8;
@@ -17,6 +16,19 @@ static const uint8_t MHZ19_COMMAND_DETECTION_RANGE_0_2000PPM[] = {0xFF, 0x01, 0x
static const uint8_t MHZ19_COMMAND_DETECTION_RANGE_0_5000PPM[] = {0xFF, 0x01, 0x99, 0x00, 0x00, 0x00, 0x13, 0x88};
static const uint8_t MHZ19_COMMAND_DETECTION_RANGE_0_10000PPM[] = {0xFF, 0x01, 0x99, 0x00, 0x00, 0x00, 0x27, 0x10};
static const LogString *detection_range_to_log_string(MHZ19DetectionRange range) {
switch (range) {
case MHZ19_DETECTION_RANGE_0_2000PPM:
return LOG_STR("0-2000 ppm");
case MHZ19_DETECTION_RANGE_0_5000PPM:
return LOG_STR("0-5000 ppm");
case MHZ19_DETECTION_RANGE_0_10000PPM:
return LOG_STR("0-10000 ppm");
default:
return LOG_STR("default");
}
}
uint8_t mhz19_checksum(const uint8_t *command) {
uint8_t sum = 0;
for (uint8_t i = 1; i < MHZ19_REQUEST_LENGTH; i++) {
@@ -91,24 +103,24 @@ void MHZ19Component::abc_disable() {
this->mhz19_write_command_(MHZ19_COMMAND_ABC_DISABLE, nullptr);
}
void MHZ19Component::range_set(MHZ19DetectionRange detection_ppm) {
switch (detection_ppm) {
case MHZ19_DETECTION_RANGE_DEFAULT:
ESP_LOGV(TAG, "Using previously set detection range (no change)");
break;
void MHZ19Component::range_set(MHZ19DetectionRange detection_range) {
const uint8_t *command;
switch (detection_range) {
case MHZ19_DETECTION_RANGE_0_2000PPM:
ESP_LOGD(TAG, "Setting detection range to 0 to 2000ppm");
this->mhz19_write_command_(MHZ19_COMMAND_DETECTION_RANGE_0_2000PPM, nullptr);
command = MHZ19_COMMAND_DETECTION_RANGE_0_2000PPM;
break;
case MHZ19_DETECTION_RANGE_0_5000PPM:
ESP_LOGD(TAG, "Setting detection range to 0 to 5000ppm");
this->mhz19_write_command_(MHZ19_COMMAND_DETECTION_RANGE_0_5000PPM, nullptr);
command = MHZ19_COMMAND_DETECTION_RANGE_0_5000PPM;
break;
case MHZ19_DETECTION_RANGE_0_10000PPM:
ESP_LOGD(TAG, "Setting detection range to 0 to 10000ppm");
this->mhz19_write_command_(MHZ19_COMMAND_DETECTION_RANGE_0_10000PPM, nullptr);
command = MHZ19_COMMAND_DETECTION_RANGE_0_10000PPM;
break;
default:
ESP_LOGV(TAG, "Using previously set detection range (no change)");
return;
}
ESP_LOGD(TAG, "Setting detection range to %s", LOG_STR_ARG(detection_range_to_log_string(detection_range)));
this->mhz19_write_command_(command, nullptr);
}
bool MHZ19Component::mhz19_write_command_(const uint8_t *command, uint8_t *response) {
@@ -140,24 +152,7 @@ void MHZ19Component::dump_config() {
}
ESP_LOGCONFIG(TAG, " Warmup time: %" PRIu32 " s", this->warmup_seconds_);
const char *range_str;
switch (this->detection_range_) {
case MHZ19_DETECTION_RANGE_DEFAULT:
range_str = "default";
break;
case MHZ19_DETECTION_RANGE_0_2000PPM:
range_str = "0 to 2000ppm";
break;
case MHZ19_DETECTION_RANGE_0_5000PPM:
range_str = "0 to 5000ppm";
break;
case MHZ19_DETECTION_RANGE_0_10000PPM:
range_str = "0 to 10000ppm";
break;
}
ESP_LOGCONFIG(TAG, " Detection range: %s", range_str);
ESP_LOGCONFIG(TAG, " Detection range: %s", LOG_STR_ARG(detection_range_to_log_string(this->detection_range_)));
}
} // namespace mhz19
} // namespace esphome
} // namespace esphome::mhz19

View File

@@ -5,8 +5,7 @@
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/uart/uart.h"
namespace esphome {
namespace mhz19 {
namespace esphome::mhz19 {
enum MHZ19ABCLogic {
MHZ19_ABC_NONE = 0,
@@ -32,7 +31,7 @@ class MHZ19Component : public PollingComponent, public uart::UARTDevice {
void calibrate_zero();
void abc_enable();
void abc_disable();
void range_set(MHZ19DetectionRange detection_ppm);
void range_set(MHZ19DetectionRange detection_range);
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
void set_co2_sensor(sensor::Sensor *co2_sensor) { co2_sensor_ = co2_sensor; }
@@ -74,5 +73,4 @@ template<typename... Ts> class MHZ19DetectionRangeSetAction : public Action<Ts..
void play(const Ts &...x) override { this->parent_->range_set(this->detection_range_.value(x...)); }
};
} // namespace mhz19
} // namespace esphome
} // namespace esphome::mhz19

View File

@@ -4,7 +4,10 @@ from esphome import automation
from esphome.automation import Condition
import esphome.codegen as cg
from esphome.components import logger, socket
from esphome.components.esp32 import add_idf_sdkconfig_option
from esphome.components.esp32 import (
add_idf_sdkconfig_option,
include_builtin_idf_component,
)
from esphome.config_helpers import filter_source_files_from_platform
import esphome.config_validation as cv
from esphome.const import (
@@ -360,6 +363,8 @@ async def to_code(config):
# This enables low-latency MQTT event processing instead of waiting for select() timeout
if CORE.is_esp32:
socket.require_wake_loop_threadsafe()
# Re-enable ESP-IDF's mqtt component (excluded by default to save compile time)
include_builtin_idf_component("mqtt")
cg.add_define("USE_MQTT")
cg.add_global(mqtt_ns.using)

View File

@@ -1,5 +1,6 @@
#include "mqtt_alarm_control_panel.h"
#include "esphome/core/log.h"
#include "esphome/core/progmem.h"
#include "mqtt_const.h"
@@ -18,21 +19,21 @@ void MQTTAlarmControlPanelComponent::setup() {
this->alarm_control_panel_->add_on_state_callback([this]() { this->publish_state(); });
this->subscribe(this->get_command_topic_(), [this](const std::string &topic, const std::string &payload) {
auto call = this->alarm_control_panel_->make_call();
if (strcasecmp(payload.c_str(), "ARM_AWAY") == 0) {
if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("ARM_AWAY")) == 0) {
call.arm_away();
} else if (strcasecmp(payload.c_str(), "ARM_HOME") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("ARM_HOME")) == 0) {
call.arm_home();
} else if (strcasecmp(payload.c_str(), "ARM_NIGHT") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("ARM_NIGHT")) == 0) {
call.arm_night();
} else if (strcasecmp(payload.c_str(), "ARM_VACATION") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("ARM_VACATION")) == 0) {
call.arm_vacation();
} else if (strcasecmp(payload.c_str(), "ARM_CUSTOM_BYPASS") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("ARM_CUSTOM_BYPASS")) == 0) {
call.arm_custom_bypass();
} else if (strcasecmp(payload.c_str(), "DISARM") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("DISARM")) == 0) {
call.disarm();
} else if (strcasecmp(payload.c_str(), "PENDING") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("PENDING")) == 0) {
call.pending();
} else if (strcasecmp(payload.c_str(), "TRIGGERED") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("TRIGGERED")) == 0) {
call.triggered();
} else {
ESP_LOGW(TAG, "'%s': Received unknown command payload %s", this->friendly_name_().c_str(), payload.c_str());
@@ -119,7 +120,8 @@ bool MQTTAlarmControlPanelComponent::publish_state() {
default:
state_s = "unknown";
}
return this->publish(this->get_state_topic_(), state_s);
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish(this->get_state_topic_to_(topic_buf), state_s);
}
} // namespace esphome::mqtt

View File

@@ -52,8 +52,9 @@ bool MQTTBinarySensorComponent::publish_state(bool state) {
if (this->binary_sensor_->is_status_binary_sensor())
return true;
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
const char *state_s = state ? "ON" : "OFF";
return this->publish(this->get_state_topic_(), state_s);
return this->publish(this->get_state_topic_to_(topic_buf), state_s);
}
} // namespace esphome::mqtt

View File

@@ -564,8 +564,8 @@ bool MQTTClientComponent::publish(const char *topic, const char *payload, size_t
}
bool MQTTClientComponent::publish_json(const char *topic, const json::json_build_t &f, uint8_t qos, bool retain) {
std::string message = json::build_json(f);
return this->publish(topic, message.c_str(), message.length(), qos, retain);
auto message = json::build_json(f);
return this->publish(topic, message.c_str(), message.size(), qos, retain);
}
void MQTTClientComponent::enable() {

View File

@@ -132,17 +132,29 @@ std::string MQTTComponent::get_command_topic_() const {
}
bool MQTTComponent::publish(const std::string &topic, const std::string &payload) {
return this->publish(topic, payload.data(), payload.size());
return this->publish(topic.c_str(), payload.data(), payload.size());
}
bool MQTTComponent::publish(const std::string &topic, const char *payload, size_t payload_length) {
if (topic.empty())
return this->publish(topic.c_str(), payload, payload_length);
}
bool MQTTComponent::publish(const char *topic, const char *payload, size_t payload_length) {
if (topic[0] == '\0')
return false;
return global_mqtt_client->publish(topic, payload, payload_length, this->qos_, this->retain_);
}
bool MQTTComponent::publish(const char *topic, const char *payload) {
return this->publish(topic, payload, strlen(payload));
}
bool MQTTComponent::publish_json(const std::string &topic, const json::json_build_t &f) {
if (topic.empty())
return this->publish_json(topic.c_str(), f);
}
bool MQTTComponent::publish_json(const char *topic, const json::json_build_t &f) {
if (topic[0] == '\0')
return false;
return global_mqtt_client->publish_json(topic, f, this->qos_, this->retain_);
}

View File

@@ -157,6 +157,38 @@ class MQTTComponent : public Component {
*/
bool publish(const std::string &topic, const char *payload, size_t payload_length);
/** Send a MQTT message (no heap allocation for topic).
*
* @param topic The topic as C string.
* @param payload The payload buffer.
* @param payload_length The length of the payload.
*/
bool publish(const char *topic, const char *payload, size_t payload_length);
/** Send a MQTT message (no heap allocation for topic).
*
* @param topic The topic as StringRef (for use with get_state_topic_to_()).
* @param payload The payload buffer.
* @param payload_length The length of the payload.
*/
bool publish(StringRef topic, const char *payload, size_t payload_length) {
return this->publish(topic.c_str(), payload, payload_length);
}
/** Send a MQTT message (no heap allocation for topic).
*
* @param topic The topic as C string.
* @param payload The null-terminated payload.
*/
bool publish(const char *topic, const char *payload);
/** Send a MQTT message (no heap allocation for topic).
*
* @param topic The topic as StringRef (for use with get_state_topic_to_()).
* @param payload The null-terminated payload.
*/
bool publish(StringRef topic, const char *payload) { return this->publish(topic.c_str(), payload); }
/** Construct and send a JSON MQTT message.
*
* @param topic The topic.
@@ -164,6 +196,20 @@ class MQTTComponent : public Component {
*/
bool publish_json(const std::string &topic, const json::json_build_t &f);
/** Construct and send a JSON MQTT message (no heap allocation for topic).
*
* @param topic The topic as C string.
* @param f The Json Message builder.
*/
bool publish_json(const char *topic, const json::json_build_t &f);
/** Construct and send a JSON MQTT message (no heap allocation for topic).
*
* @param topic The topic as StringRef (for use with get_state_topic_to_()).
* @param f The Json Message builder.
*/
bool publish_json(StringRef topic, const json::json_build_t &f) { return this->publish_json(topic.c_str(), f); }
/** Subscribe to a MQTT topic.
*
* @param topic The topic. Wildcards are currently not supported.

View File

@@ -115,7 +115,8 @@ bool MQTTCoverComponent::publish_state() {
: this->cover_->position == COVER_OPEN ? "open"
: traits.get_supports_position() ? "open"
: "unknown";
if (!this->publish(this->get_state_topic_(), state_s))
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
if (!this->publish(this->get_state_topic_to_(topic_buf), state_s))
success = false;
return success;
}

View File

@@ -53,7 +53,8 @@ bool MQTTDateComponent::send_initial_state() {
}
}
bool MQTTDateComponent::publish_state(uint16_t year, uint8_t month, uint8_t day) {
return this->publish_json(this->get_state_topic_(), [year, month, day](JsonObject root) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish_json(this->get_state_topic_to_(topic_buf), [year, month, day](JsonObject root) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
root[ESPHOME_F("year")] = year;
root[ESPHOME_F("month")] = month;

View File

@@ -66,15 +66,17 @@ bool MQTTDateTimeComponent::send_initial_state() {
}
bool MQTTDateTimeComponent::publish_state(uint16_t year, uint8_t month, uint8_t day, uint8_t hour, uint8_t minute,
uint8_t second) {
return this->publish_json(this->get_state_topic_(), [year, month, day, hour, minute, second](JsonObject root) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
root[ESPHOME_F("year")] = year;
root[ESPHOME_F("month")] = month;
root[ESPHOME_F("day")] = day;
root[ESPHOME_F("hour")] = hour;
root[ESPHOME_F("minute")] = minute;
root[ESPHOME_F("second")] = second;
});
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish_json(this->get_state_topic_to_(topic_buf),
[year, month, day, hour, minute, second](JsonObject root) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
root[ESPHOME_F("year")] = year;
root[ESPHOME_F("month")] = month;
root[ESPHOME_F("day")] = day;
root[ESPHOME_F("hour")] = hour;
root[ESPHOME_F("minute")] = minute;
root[ESPHOME_F("second")] = second;
});
}
} // namespace esphome::mqtt

View File

@@ -44,7 +44,8 @@ void MQTTEventComponent::dump_config() {
}
bool MQTTEventComponent::publish_event_(const std::string &event_type) {
return this->publish_json(this->get_state_topic_(), [event_type](JsonObject root) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish_json(this->get_state_topic_to_(topic_buf), [event_type](JsonObject root) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
root[MQTT_EVENT_TYPE] = event_type;
});

View File

@@ -158,9 +158,10 @@ void MQTTFanComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig
}
}
bool MQTTFanComponent::publish_state() {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
const char *state_s = this->state_->state ? "ON" : "OFF";
ESP_LOGD(TAG, "'%s' Sending state %s.", this->state_->get_name().c_str(), state_s);
this->publish(this->get_state_topic_(), state_s);
this->publish(this->get_state_topic_to_(topic_buf), state_s);
bool failed = false;
if (this->state_->get_traits().supports_direction()) {
bool success = this->publish(this->get_direction_state_topic(),

View File

@@ -34,7 +34,8 @@ void MQTTJSONLightComponent::on_light_remote_values_update() {
MQTTJSONLightComponent::MQTTJSONLightComponent(LightState *state) : state_(state) {}
bool MQTTJSONLightComponent::publish_state_() {
return this->publish_json(this->get_state_topic_(), [this](JsonObject root) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish_json(this->get_state_topic_to_(topic_buf), [this](JsonObject root) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
LightJSONSchema::dump_json(*this->state_, root);
});

View File

@@ -1,5 +1,6 @@
#include "mqtt_lock.h"
#include "esphome/core/log.h"
#include "esphome/core/progmem.h"
#include "mqtt_const.h"
@@ -16,11 +17,11 @@ MQTTLockComponent::MQTTLockComponent(lock::Lock *a_lock) : lock_(a_lock) {}
void MQTTLockComponent::setup() {
this->subscribe(this->get_command_topic_(), [this](const std::string &topic, const std::string &payload) {
if (strcasecmp(payload.c_str(), "LOCK") == 0) {
if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("LOCK")) == 0) {
this->lock_->lock();
} else if (strcasecmp(payload.c_str(), "UNLOCK") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("UNLOCK")) == 0) {
this->lock_->unlock();
} else if (strcasecmp(payload.c_str(), "OPEN") == 0) {
} else if (ESPHOME_strcasecmp_P(payload.c_str(), ESPHOME_PSTR("OPEN")) == 0) {
this->lock_->open();
} else {
ESP_LOGW(TAG, "'%s': Received unknown status payload: %s", this->friendly_name_().c_str(), payload.c_str());
@@ -47,13 +48,14 @@ void MQTTLockComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfi
bool MQTTLockComponent::send_initial_state() { return this->publish_state(); }
bool MQTTLockComponent::publish_state() {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
#ifdef USE_STORE_LOG_STR_IN_FLASH
char buf[LOCK_STATE_STR_SIZE];
strncpy_P(buf, (PGM_P) lock_state_to_string(this->lock_->state), sizeof(buf) - 1);
buf[sizeof(buf) - 1] = '\0';
return this->publish(this->get_state_topic_(), buf);
return this->publish(this->get_state_topic_to_(topic_buf), buf);
#else
return this->publish(this->get_state_topic_(), LOG_STR_ARG(lock_state_to_string(this->lock_->state)));
return this->publish(this->get_state_topic_to_(topic_buf), LOG_STR_ARG(lock_state_to_string(this->lock_->state)));
#endif
}

View File

@@ -74,9 +74,10 @@ bool MQTTNumberComponent::send_initial_state() {
}
}
bool MQTTNumberComponent::publish_state(float value) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
char buffer[64];
buf_append_printf(buffer, sizeof(buffer), 0, "%f", value);
return this->publish(this->get_state_topic_(), buffer);
size_t len = buf_append_printf(buffer, sizeof(buffer), 0, "%f", value);
return this->publish(this->get_state_topic_to_(topic_buf), buffer, len);
}
} // namespace esphome::mqtt

View File

@@ -50,7 +50,8 @@ bool MQTTSelectComponent::send_initial_state() {
}
}
bool MQTTSelectComponent::publish_state(const std::string &value) {
return this->publish(this->get_state_topic_(), value);
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish(this->get_state_topic_to_(topic_buf), value.data(), value.size());
}
} // namespace esphome::mqtt

View File

@@ -79,12 +79,13 @@ bool MQTTSensorComponent::send_initial_state() {
}
}
bool MQTTSensorComponent::publish_state(float value) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
if (mqtt::global_mqtt_client->is_publish_nan_as_none() && std::isnan(value))
return this->publish(this->get_state_topic_(), "None", 4);
return this->publish(this->get_state_topic_to_(topic_buf), "None", 4);
int8_t accuracy = this->sensor_->get_accuracy_decimals();
char buf[VALUE_ACCURACY_MAX_LEN];
size_t len = value_accuracy_to_buf(buf, value, accuracy);
return this->publish(this->get_state_topic_(), buf, len);
return this->publish(this->get_state_topic_to_(topic_buf), buf, len);
}
} // namespace esphome::mqtt

View File

@@ -52,8 +52,9 @@ void MQTTSwitchComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryCon
bool MQTTSwitchComponent::send_initial_state() { return this->publish_state(this->switch_->state); }
bool MQTTSwitchComponent::publish_state(bool state) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
const char *state_s = state ? "ON" : "OFF";
return this->publish(this->get_state_topic_(), state_s);
return this->publish(this->get_state_topic_to_(topic_buf), state_s);
}
} // namespace esphome::mqtt

View File

@@ -53,7 +53,8 @@ bool MQTTTextComponent::send_initial_state() {
}
}
bool MQTTTextComponent::publish_state(const std::string &value) {
return this->publish(this->get_state_topic_(), value);
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish(this->get_state_topic_to_(topic_buf), value.data(), value.size());
}
} // namespace esphome::mqtt

View File

@@ -31,7 +31,10 @@ void MQTTTextSensor::dump_config() {
LOG_MQTT_COMPONENT(true, false);
}
bool MQTTTextSensor::publish_state(const std::string &value) { return this->publish(this->get_state_topic_(), value); }
bool MQTTTextSensor::publish_state(const std::string &value) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish(this->get_state_topic_to_(topic_buf), value.data(), value.size());
}
bool MQTTTextSensor::send_initial_state() {
if (this->sensor_->has_state()) {
return this->publish_state(this->sensor_->state);

View File

@@ -53,7 +53,8 @@ bool MQTTTimeComponent::send_initial_state() {
}
}
bool MQTTTimeComponent::publish_state(uint8_t hour, uint8_t minute, uint8_t second) {
return this->publish_json(this->get_state_topic_(), [hour, minute, second](JsonObject root) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish_json(this->get_state_topic_to_(topic_buf), [hour, minute, second](JsonObject root) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
root[ESPHOME_F("hour")] = hour;
root[ESPHOME_F("minute")] = minute;

View File

@@ -28,7 +28,8 @@ void MQTTUpdateComponent::setup() {
}
bool MQTTUpdateComponent::publish_state() {
return this->publish_json(this->get_state_topic_(), [this](JsonObject root) {
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
return this->publish_json(this->get_state_topic_to_(topic_buf), [this](JsonObject root) {
root[ESPHOME_F("installed_version")] = this->update_->update_info.current_version;
root[ESPHOME_F("latest_version")] = this->update_->update_info.latest_version;
root[ESPHOME_F("title")] = this->update_->update_info.title;

View File

@@ -84,7 +84,8 @@ bool MQTTValveComponent::publish_state() {
: this->valve_->position == VALVE_OPEN ? "open"
: traits.get_supports_position() ? "open"
: "unknown";
if (!this->publish(this->get_state_topic_(), state_s))
char topic_buf[MQTT_DEFAULT_TOPIC_MAX_LEN];
if (!this->publish(this->get_state_topic_to_(topic_buf), state_s))
success = false;
return success;
}

View File

@@ -1,7 +1,12 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components import light
from esphome.components.esp32 import VARIANT_ESP32C3, VARIANT_ESP32S3, get_esp32_variant
from esphome.components.esp32 import (
VARIANT_ESP32C3,
VARIANT_ESP32S3,
get_esp32_variant,
include_builtin_idf_component,
)
import esphome.config_validation as cv
from esphome.const import (
CONF_CHANNEL,
@@ -205,6 +210,10 @@ async def to_code(config):
has_white = "W" in config[CONF_TYPE]
method = config[CONF_METHOD]
# Re-enable ESP-IDF's RMT driver if using RMT method (excluded by default)
if CORE.is_esp32 and method[CONF_TYPE] == METHOD_ESP32_RMT:
include_builtin_idf_component("esp_driver_rmt")
method_template = METHODS[method[CONF_TYPE]].to_code(
method, config[CONF_VARIANT], config[CONF_INVERT]
)

View File

@@ -177,6 +177,8 @@ async def to_code(config):
cg.add_define("USE_NEXTION_TFT_UPLOAD")
cg.add(var.set_tft_url(config[CONF_TFT_URL]))
if CORE.is_esp32:
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
esp32.include_builtin_idf_component("esp_http_client")
esp32.add_idf_sdkconfig_option("CONFIG_ESP_TLS_INSECURE", True)
esp32.add_idf_sdkconfig_option(
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY", True

View File

@@ -150,27 +150,24 @@ void Nextion::dump_config() {
#ifdef USE_NEXTION_CONFIG_SKIP_CONNECTION_HANDSHAKE
ESP_LOGCONFIG(TAG, " Skip handshake: YES");
#else // USE_NEXTION_CONFIG_SKIP_CONNECTION_HANDSHAKE
ESP_LOGCONFIG(TAG,
#ifdef USE_NEXTION_CONFIG_DUMP_DEVICE_INFO
ESP_LOGCONFIG(TAG,
" Device Model: %s\n"
" FW Version: %s\n"
" Serial Number: %s\n"
" Flash Size: %s\n"
" Max queue age: %u ms\n"
" Startup override: %u ms\n",
this->device_model_.c_str(), this->firmware_version_.c_str(), this->serial_number_.c_str(),
this->flash_size_.c_str(), this->max_q_age_ms_, this->startup_override_ms_);
#endif // USE_NEXTION_CONFIG_DUMP_DEVICE_INFO
#ifdef USE_NEXTION_CONFIG_EXIT_REPARSE_ON_START
" Exit reparse: YES\n"
ESP_LOGCONFIG(TAG, " Exit reparse: YES\n");
#endif // USE_NEXTION_CONFIG_EXIT_REPARSE_ON_START
ESP_LOGCONFIG(TAG,
" Wake On Touch: %s\n"
" Touch Timeout: %" PRIu16,
#ifdef USE_NEXTION_CONFIG_DUMP_DEVICE_INFO
this->device_model_.c_str(), this->firmware_version_.c_str(), this->serial_number_.c_str(),
this->flash_size_.c_str(), this->max_q_age_ms_,
this->startup_override_ms_
#endif // USE_NEXTION_CONFIG_DUMP_DEVICE_INFO
YESNO(this->connection_state_.auto_wake_on_touch_),
this->touch_sleep_timeout_);
YESNO(this->connection_state_.auto_wake_on_touch_), this->touch_sleep_timeout_);
#endif // USE_NEXTION_CONFIG_SKIP_CONNECTION_HANDSHAKE
#ifdef USE_NEXTION_MAX_COMMANDS_PER_LOOP

View File

@@ -40,7 +40,7 @@ void NfcTagBinarySensor::set_tag_name(const std::string &str) {
this->match_tag_name_ = true;
}
void NfcTagBinarySensor::set_uid(const std::vector<uint8_t> &uid) { this->uid_ = uid; }
void NfcTagBinarySensor::set_uid(const NfcTagUid &uid) { this->uid_ = uid; }
bool NfcTagBinarySensor::tag_match_ndef_string(const std::shared_ptr<NdefMessage> &msg) {
for (const auto &record : msg->get_records()) {
@@ -63,7 +63,7 @@ bool NfcTagBinarySensor::tag_match_tag_name(const std::shared_ptr<NdefMessage> &
return false;
}
bool NfcTagBinarySensor::tag_match_uid(const std::vector<uint8_t> &data) {
bool NfcTagBinarySensor::tag_match_uid(const NfcTagUid &data) {
if (data.size() != this->uid_.size()) {
return false;
}

View File

@@ -19,11 +19,11 @@ class NfcTagBinarySensor : public binary_sensor::BinarySensor,
void set_ndef_match_string(const std::string &str);
void set_tag_name(const std::string &str);
void set_uid(const std::vector<uint8_t> &uid);
void set_uid(const NfcTagUid &uid);
bool tag_match_ndef_string(const std::shared_ptr<NdefMessage> &msg);
bool tag_match_tag_name(const std::shared_ptr<NdefMessage> &msg);
bool tag_match_uid(const std::vector<uint8_t> &data);
bool tag_match_uid(const NfcTagUid &data);
void tag_off(NfcTag &tag) override;
void tag_on(NfcTag &tag) override;
@@ -31,7 +31,7 @@ class NfcTagBinarySensor : public binary_sensor::BinarySensor,
protected:
bool match_tag_name_{false};
std::string match_string_;
std::vector<uint8_t> uid_;
NfcTagUid uid_;
};
} // namespace nfc

View File

@@ -8,19 +8,23 @@ namespace nfc {
static const char *const TAG = "nfc";
char *format_uid_to(char *buffer, const std::vector<uint8_t> &uid) {
char *format_uid_to(char *buffer, std::span<const uint8_t> uid) {
return format_hex_pretty_to(buffer, FORMAT_UID_BUFFER_SIZE, uid.data(), uid.size(), '-');
}
char *format_bytes_to(char *buffer, const std::vector<uint8_t> &bytes) {
char *format_bytes_to(char *buffer, std::span<const uint8_t> bytes) {
return format_hex_pretty_to(buffer, FORMAT_BYTES_BUFFER_SIZE, bytes.data(), bytes.size(), ' ');
}
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
// Deprecated wrappers intentionally use heap-allocating version for backward compatibility
std::string format_uid(const std::vector<uint8_t> &uid) { return format_hex_pretty(uid, '-', false); } // NOLINT
std::string format_bytes(const std::vector<uint8_t> &bytes) { return format_hex_pretty(bytes, ' ', false); } // NOLINT
std::string format_uid(std::span<const uint8_t> uid) {
return format_hex_pretty(uid.data(), uid.size(), '-', false); // NOLINT
}
std::string format_bytes(std::span<const uint8_t> bytes) {
return format_hex_pretty(bytes.data(), bytes.size(), ' ', false); // NOLINT
}
#pragma GCC diagnostic pop
uint8_t guess_tag_type(uint8_t uid_length) {

View File

@@ -6,6 +6,7 @@
#include "ndef_record.h"
#include "nfc_tag.h"
#include <span>
#include <vector>
namespace esphome {
@@ -56,19 +57,19 @@ static const uint8_t MAD_KEY[6] = {0xA0, 0xA1, 0xA2, 0xA3, 0xA4, 0xA5};
/// Max UID size is 10 bytes, formatted as "XX-XX-XX-XX-XX-XX-XX-XX-XX-XX\0" = 30 chars
static constexpr size_t FORMAT_UID_BUFFER_SIZE = 30;
/// Format UID to buffer with '-' separator (e.g., "04-11-22-33"). Returns buffer for inline use.
char *format_uid_to(char *buffer, const std::vector<uint8_t> &uid);
char *format_uid_to(char *buffer, std::span<const uint8_t> uid);
/// Buffer size for format_bytes_to (64 bytes max = 192 chars with space separator)
static constexpr size_t FORMAT_BYTES_BUFFER_SIZE = 192;
/// Format bytes to buffer with ' ' separator (e.g., "04 11 22 33"). Returns buffer for inline use.
char *format_bytes_to(char *buffer, const std::vector<uint8_t> &bytes);
char *format_bytes_to(char *buffer, std::span<const uint8_t> bytes);
// Remove before 2026.6.0
ESPDEPRECATED("Use format_uid_to() with stack buffer instead. Removed in 2026.6.0", "2025.12.0")
std::string format_uid(const std::vector<uint8_t> &uid);
std::string format_uid(std::span<const uint8_t> uid);
// Remove before 2026.6.0
ESPDEPRECATED("Use format_bytes_to() with stack buffer instead. Removed in 2026.6.0", "2025.12.0")
std::string format_bytes(const std::vector<uint8_t> &bytes);
std::string format_bytes(std::span<const uint8_t> bytes);
uint8_t guess_tag_type(uint8_t uid_length);
uint8_t get_mifare_classic_ndef_start_index(std::vector<uint8_t> &data);

View File

@@ -10,26 +10,27 @@
namespace esphome {
namespace nfc {
// NFC UIDs are 4, 7, or 10 bytes depending on tag type
static constexpr size_t NFC_UID_MAX_LENGTH = 10;
using NfcTagUid = StaticVector<uint8_t, NFC_UID_MAX_LENGTH>;
class NfcTag {
public:
NfcTag() {
this->uid_ = {};
this->tag_type_ = "Unknown";
};
NfcTag(std::vector<uint8_t> &uid) {
NfcTag() { this->tag_type_ = "Unknown"; };
NfcTag(const NfcTagUid &uid) {
this->uid_ = uid;
this->tag_type_ = "Unknown";
};
NfcTag(std::vector<uint8_t> &uid, const std::string &tag_type) {
NfcTag(const NfcTagUid &uid, const std::string &tag_type) {
this->uid_ = uid;
this->tag_type_ = tag_type;
};
NfcTag(std::vector<uint8_t> &uid, const std::string &tag_type, std::unique_ptr<nfc::NdefMessage> ndef_message) {
NfcTag(const NfcTagUid &uid, const std::string &tag_type, std::unique_ptr<nfc::NdefMessage> ndef_message) {
this->uid_ = uid;
this->tag_type_ = tag_type;
this->ndef_message_ = std::move(ndef_message);
};
NfcTag(std::vector<uint8_t> &uid, const std::string &tag_type, std::vector<uint8_t> &ndef_data) {
NfcTag(const NfcTagUid &uid, const std::string &tag_type, std::vector<uint8_t> &ndef_data) {
this->uid_ = uid;
this->tag_type_ = tag_type;
this->ndef_message_ = make_unique<NdefMessage>(ndef_data);
@@ -41,14 +42,14 @@ class NfcTag {
ndef_message_ = make_unique<NdefMessage>(*rhs.ndef_message_);
}
std::vector<uint8_t> &get_uid() { return this->uid_; };
NfcTagUid &get_uid() { return this->uid_; };
const std::string &get_tag_type() { return this->tag_type_; };
bool has_ndef_message() { return this->ndef_message_ != nullptr; };
const std::shared_ptr<NdefMessage> &get_ndef_message() { return this->ndef_message_; };
void set_ndef_message(std::unique_ptr<NdefMessage> ndef_message) { this->ndef_message_ = std::move(ndef_message); };
protected:
std::vector<uint8_t> uid_;
NfcTagUid uid_;
std::string tag_type_;
std::shared_ptr<NdefMessage> ndef_message_;
};

Some files were not shown because too many files have changed in this diff Show More