Compare commits

..

29 Commits

Author SHA1 Message Date
J. Nick Koston
a3c2248b44 Merge upstream/dev into compact_string_wifi 2026-01-28 05:51:28 -10:00
J. Nick Koston
d75254309f Merge branch 'filter_wifi_scan_results' into compact_string_wifi 2026-01-25 20:08:49 -10:00
J. Nick Koston
5d1acb0cb8 Merge branch 'dev' into filter_wifi_scan_results 2026-01-25 20:08:41 -10:00
J. Nick Koston
cafc7651c2 Merge branch 'filter_wifi_scan_results' into compact_string_wifi 2026-01-25 17:24:41 -10:00
J. Nick Koston
4099e944d6 tweak 2026-01-25 17:22:00 -10:00
J. Nick Koston
5ad989a13a Merge remote-tracking branch 'upstream/dev' into filter_wifi_scan_results
# Conflicts:
#	esphome/components/wifi/wifi_component_esp_idf.cpp
2026-01-25 17:17:27 -10:00
J. Nick Koston
7336985753 reduce some more 2026-01-22 17:53:50 -10:00
J. Nick Koston
73d076c278 reduce some more 2026-01-22 17:35:00 -10:00
J. Nick Koston
3a2c66171b use placement new to avoid duplicate code 2026-01-22 17:29:21 -10:00
J. Nick Koston
fca867e18d [wifi] Add CompactString to reduce WiFi scan heap fragmentation 2026-01-22 17:18:13 -10:00
J. Nick Koston
0ae90512cf [wifi] Add CompactString to reduce WiFi scan heap fragmentation 2026-01-22 17:16:35 -10:00
J. Nick Koston
165f81dc97 Merge branch 'dev' into filter_wifi_scan_results 2026-01-22 15:05:38 -10:00
J. Nick Koston
dc971b4ed0 tidy 2026-01-20 22:54:52 -10:00
J. Nick Koston
a4fe9852aa tidy 2026-01-20 22:54:36 -10:00
J. Nick Koston
f6ec5e9c28 tweak 2026-01-20 22:41:45 -10:00
J. Nick Koston
0051196e86 fix 2026-01-20 21:41:43 -10:00
J. Nick Koston
9f83b24913 tweak 2026-01-20 21:19:30 -10:00
J. Nick Koston
5c0747cfe0 Update esphome/components/wifi/wifi_component.cpp
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-20 21:10:51 -10:00
J. Nick Koston
f0c7306ad5 log scan complete 2026-01-20 21:04:52 -10:00
J. Nick Koston
09b42b778b log scan complete 2026-01-20 20:57:39 -10:00
J. Nick Koston
d610c3ae91 fix bssid only 2026-01-20 20:54:30 -10:00
J. Nick Koston
687f9a762d fixes for libretiny 2026-01-20 20:44:28 -10:00
J. Nick Koston
acb22ed286 tweaks 2026-01-20 20:39:30 -10:00
J. Nick Koston
692167341e tweaks 2026-01-20 20:37:16 -10:00
J. Nick Koston
d5d6936845 tweaks 2026-01-20 20:35:32 -10:00
J. Nick Koston
bffe4a2e05 tweaks 2026-01-20 20:34:53 -10:00
J. Nick Koston
d7c3947ccc tweak loggig 2026-01-20 20:31:38 -10:00
J. Nick Koston
6f3a49e509 tweak loggig 2026-01-20 20:30:55 -10:00
J. Nick Koston
7aef173e65 [wifi] Filter scan results to only store matching networks 2026-01-20 20:19:35 -10:00
88 changed files with 1125 additions and 2720 deletions

View File

@@ -22,7 +22,7 @@ runs:
python-version: ${{ inputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: venv
# yamllint disable-line rule:line-length

View File

@@ -1,38 +0,0 @@
// Constants and markers for PR auto-labeling
module.exports = {
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
TOO_BIG_MARKER: '<!-- too-big-request -->',
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
MANAGED_LABELS: [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'chained-pr',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck',
'bugfix',
'new-feature',
'breaking-change',
'developer-breaking-change',
'code-quality',
'deprecated-component'
],
DOCS_PR_PATTERNS: [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
]
};

View File

@@ -1,373 +0,0 @@
const fs = require('fs');
const { DOCS_PR_PATTERNS } = require('./constants');
// Strategy: Merge branch detection
async function detectMergeBranch(context) {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
if (baseRef === 'release') {
labels.add('merging-to-release');
} else if (baseRef === 'beta') {
labels.add('merging-to-beta');
} else if (baseRef !== 'dev') {
labels.add('chained-pr');
}
return labels;
}
// Strategy: Component and platform labeling
async function detectComponentPlatforms(changedFiles, apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents(prFiles) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(prFiles, apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges(changedFiles) {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD) {
const labels = new Set();
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges(changedFiles) {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges(changedFiles) {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner(github, context, changedFiles) {
const labels = new Set();
const { owner, repo } = context.repo;
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests(changedFiles) {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: PR Template Checkbox detection
async function detectPRTemplateCheckboxes(context) {
const labels = new Set();
const prBody = context.payload.pull_request.body || '';
console.log('Checking PR template checkboxes...');
// Check for checked checkboxes in the "Types of changes" section
const checkboxPatterns = [
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
];
for (const { pattern, label } of checkboxPatterns) {
if (pattern.test(prBody)) {
console.log(`Found checked checkbox for: ${label}`);
labels.add(label);
}
}
return labels;
}
// Strategy: Deprecated component detection
async function detectDeprecatedComponents(github, context, changedFiles) {
const labels = new Set();
const deprecatedInfo = [];
const { owner, repo } = context.repo;
// Compile regex once for better performance
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
// Get files that are modified or added in components directory
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
if (componentFiles.length === 0) {
return { labels, deprecatedInfo };
}
// Extract unique component names using the same regex
const components = new Set();
for (const file of componentFiles) {
const match = file.match(componentFileRegex);
if (match) {
components.add(match[1]);
}
}
// Get PR head to fetch files from the PR branch
const prNumber = context.payload.pull_request.number;
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
for (const component of components) {
const initFile = `esphome/components/${component}/__init__.py`;
try {
// Fetch file content from PR head using GitHub API
const { data: fileData } = await github.rest.repos.getContent({
owner,
repo,
path: initFile,
ref: `refs/pull/${prNumber}/head`
});
// Decode base64 content
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
// Support single quotes, double quotes, and triple quotes (for multiline)
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
if (deprecatedMatch) {
labels.add('deprecated-component');
deprecatedInfo.push({
component: component,
message: deprecatedMatch[1].trim()
});
console.log(`Found deprecated component: ${component}`);
}
} catch (error) {
// Only log if it's not a simple "file not found" error (404)
if (error.status !== 404) {
console.log(`Error reading ${initFile}:`, error.message);
}
}
}
return { labels, deprecatedInfo };
}
// Strategy: Requirements detection
async function detectRequirements(allLabels, prFiles, context) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
module.exports = {
detectMergeBranch,
detectComponentPlatforms,
detectNewComponents,
detectNewPlatforms,
detectCoreChanges,
detectPRSize,
detectDashboardChanges,
detectGitHubActionsChanges,
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
};

View File

@@ -1,187 +0,0 @@
const { MANAGED_LABELS } = require('./constants');
const {
detectMergeBranch,
detectComponentPlatforms,
detectNewComponents,
detectNewPlatforms,
detectCoreChanges,
detectPRSize,
detectDashboardChanges,
detectGitHubActionsChanges,
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
} = require('./detectors');
const { handleReviews } = require('./reviews');
const { applyLabels, removeOldLabels } = require('./labels');
// Fetch API data
async function fetchApiData() {
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
module.exports = async ({ github, context }) => {
// Environment variables
const SMALL_PR_THRESHOLD = parseInt(process.env.SMALL_PR_THRESHOLD);
const MAX_LABELS = parseInt(process.env.MAX_LABELS);
const TOO_BIG_THRESHOLD = parseInt(process.env.TOO_BIG_THRESHOLD);
const COMPONENT_LABEL_THRESHOLD = parseInt(process.env.COMPONENT_LABEL_THRESHOLD);
// Global state
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
// Get current labels and PR data
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: pr_number
});
const currentLabels = currentLabelsData.map(label => label.name);
const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
);
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges);
if (isMegaPR) {
console.log('Mega-PR detected - applying limited labeling logic');
}
// Fetch API data
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for release and beta branches only
if (baseRef === 'release' || baseRef === 'beta') {
const branchLabels = await detectMergeBranch(context);
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Apply labels
await applyLabels(github, context, finalLabels);
// Remove old managed labels
await removeOldLabels(github, context, managedLabels, finalLabels);
return;
}
// Run all strategies
const [
branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels,
checkboxLabels,
deprecatedResult
] = await Promise.all([
detectMergeBranch(context),
detectComponentPlatforms(changedFiles, apiData),
detectNewComponents(prFiles),
detectNewPlatforms(prFiles, apiData),
detectCoreChanges(changedFiles),
detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD),
detectDashboardChanges(changedFiles),
detectGitHubActionsChanges(changedFiles),
detectCodeOwner(github, context, changedFiles),
detectTests(changedFiles),
detectPRTemplateCheckboxes(context),
detectDeprecatedComponents(github, context, changedFiles)
]);
// Extract deprecated component info
const deprecatedLabels = deprecatedResult.labels;
const deprecatedInfo = deprecatedResult.deprecatedInfo;
// Combine all labels
const allLabels = new Set([
...branchLabels,
...componentLabels,
...newComponentLabels,
...newPlatformLabels,
...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels,
...checkboxLabels,
...deprecatedLabels
]);
// Detect requirements based on all other labels
const requirementLabels = await detectRequirements(allLabels, prFiles, context);
for (const label of requirementLabels) {
allLabels.add(label);
}
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
}
}
// Handle too many labels (only for non-mega PRs)
const tooManyLabels = finalLabels.length > MAX_LABELS;
const originalLabelCount = finalLabels.length;
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
}
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
// Apply labels
await applyLabels(github, context, finalLabels);
// Remove old managed labels
await removeOldLabels(github, context, managedLabels, finalLabels);
};

View File

@@ -1,41 +0,0 @@
// Apply labels to PR
async function applyLabels(github, context, finalLabels) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
}
// Remove old managed labels
async function removeOldLabels(github, context, managedLabels, finalLabels) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
}
module.exports = {
applyLabels,
removeOldLabels
};

View File

@@ -1,141 +0,0 @@
const {
BOT_COMMENT_MARKER,
CODEOWNERS_MARKER,
TOO_BIG_MARKER,
DEPRECATED_COMPONENT_MARKER
} = require('./constants');
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
const messages = [];
// Deprecated component message
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
message += `Hey there @${prAuthor},\n`;
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
for (const info of deprecatedInfo) {
message += `#### Component: \`${info.component}\`\n`;
message += `${info.message}\n\n`;
}
message += `Consider migrating to the recommended alternative if applicable.`;
messages.push(message);
}
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
const tooManyLabels = originalLabelCount > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${originalLabelCount} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const prAuthor = context.payload.pull_request.user.login;
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
module.exports = {
handleReviews
};

View File

@@ -36,5 +36,633 @@ jobs:
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const script = require('./.github/scripts/auto-label-pr/index.js');
await script({ github, context });
const fs = require('fs');
// Constants
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
const TOO_BIG_MARKER = '<!-- too-big-request -->';
const MANAGED_LABELS = [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'chained-pr',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck',
'bugfix',
'new-feature',
'breaking-change',
'developer-breaking-change',
'code-quality'
];
const DOCS_PR_PATTERNS = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
// Global state
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
// Get current labels and PR data
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: pr_number
});
const currentLabels = currentLabelsData.map(label => label.name);
const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
);
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges);
if (isMegaPR) {
console.log('Mega-PR detected - applying limited labeling logic');
}
// Fetch API data
async function fetchApiData() {
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
// Strategy: Merge branch detection
async function detectMergeBranch() {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
if (baseRef === 'release') {
labels.add('merging-to-release');
} else if (baseRef === 'beta') {
labels.add('merging-to-beta');
} else if (baseRef !== 'dev') {
labels.add('chained-pr');
}
return labels;
}
// Strategy: Component and platform labeling
async function detectComponentPlatforms(apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents() {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges() {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize() {
const labels = new Set();
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges() {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges() {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner() {
const labels = new Set();
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests() {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: PR Template Checkbox detection
async function detectPRTemplateCheckboxes() {
const labels = new Set();
const prBody = context.payload.pull_request.body || '';
console.log('Checking PR template checkboxes...');
// Check for checked checkboxes in the "Types of changes" section
const checkboxPatterns = [
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
];
for (const { pattern, label } of checkboxPatterns) {
if (pattern.test(prBody)) {
console.log(`Found checked checkbox for: ${label}`);
labels.add(label);
}
}
return labels;
}
// Strategy: Requirements detection
async function detectRequirements(allLabels) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount) {
const messages = [];
const prAuthor = context.payload.pull_request.user.login;
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
const tooManyLabels = originalLabelCount > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${originalLabelCount} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(finalLabels, originalLabelCount) {
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Main execution
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for release and beta branches only
if (baseRef === 'release' || baseRef === 'beta') {
const branchLabels = await detectMergeBranch();
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Apply labels
if (finalLabels.length > 0) {
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
// Remove old managed labels
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
return;
}
// Run all strategies
const [
branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels,
checkboxLabels
] = await Promise.all([
detectMergeBranch(),
detectComponentPlatforms(apiData),
detectNewComponents(),
detectNewPlatforms(apiData),
detectCoreChanges(),
detectPRSize(),
detectDashboardChanges(),
detectGitHubActionsChanges(),
detectCodeOwner(),
detectTests(),
detectPRTemplateCheckboxes()
]);
// Combine all labels
const allLabels = new Set([
...branchLabels,
...componentLabels,
...newComponentLabels,
...newPlatformLabels,
...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels,
...checkboxLabels
]);
// Detect requirements based on all other labels
const requirementLabels = await detectRequirements(allLabels);
for (const label of requirementLabels) {
allLabels.add(label);
}
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
}
}
// Handle too many labels (only for non-mega PRs)
const tooManyLabels = finalLabels.length > MAX_LABELS;
const originalLabelCount = finalLabels.length;
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
}
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(finalLabels, originalLabelCount);
// Apply labels
if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
// Remove old managed labels
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}

View File

@@ -47,7 +47,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: venv
# yamllint disable-line rule:line-length
@@ -157,7 +157,7 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Save Python virtual environment cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: venv
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -193,7 +193,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Restore components graph cache
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: .temp/components_graph.json
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
@@ -223,7 +223,7 @@ jobs:
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
- name: Save components graph cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: .temp/components_graph.json
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
@@ -245,7 +245,7 @@ jobs:
python-version: "3.13"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: venv
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -334,14 +334,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
@@ -413,14 +413,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
@@ -502,14 +502,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
@@ -735,7 +735,7 @@ jobs:
- name: Restore cached memory analysis
id: cache-memory-analysis
if: steps.check-script.outputs.skip != 'true'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
@@ -759,7 +759,7 @@ jobs:
- name: Cache platformio
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
@@ -800,7 +800,7 @@ jobs:
- name: Save memory analysis to cache
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
@@ -847,7 +847,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}

View File

@@ -102,12 +102,12 @@ jobs:
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Log in to docker hub
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -182,13 +182,13 @@ jobs:
- name: Log in to docker hub
if: matrix.registry == 'dockerhub'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
if: matrix.registry == 'ghcr'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@@ -104,7 +104,6 @@ esphome/components/cc1101/* @gabest11 @lygris
esphome/components/ccs811/* @habbie
esphome/components/cd74hc4067/* @asoehlke
esphome/components/ch422g/* @clydebarrow @jesterret
esphome/components/ch423/* @dwmw2
esphome/components/chsc6x/* @kkosik20
esphome/components/climate/* @esphome/core
esphome/components/climate_ir/* @glmnet

View File

@@ -2,7 +2,7 @@ import logging
import esphome.codegen as cg
from esphome.components import sensor, voltage_sampler
from esphome.components.esp32 import get_esp32_variant, include_builtin_idf_component
from esphome.components.esp32 import get_esp32_variant
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
from esphome.components.zephyr import (
zephyr_add_overlay,
@@ -118,9 +118,6 @@ async def to_code(config):
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
if CORE.is_esp32:
# Re-enable ESP-IDF's ADC driver (excluded by default to save compile time)
include_builtin_idf_component("esp_adc")
if attenuation := config.get(CONF_ATTENUATION):
if attenuation == "auto":
cg.add(var.set_autorange(cg.global_ns.true))

View File

@@ -264,9 +264,9 @@ template<typename... Ts> class APIRespondAction : public Action<Ts...> {
// Build and send JSON response
json::JsonBuilder builder;
this->json_builder_(x..., builder.root());
auto json_buf = builder.serialize();
std::string json_str = builder.serialize();
this->parent_->send_action_response(call_id, success, StringRef(error_message),
reinterpret_cast<const uint8_t *>(json_buf.data()), json_buf.size());
reinterpret_cast<const uint8_t *>(json_str.data()), json_str.size());
return;
}
#endif

View File

@@ -1,5 +1,5 @@
import esphome.codegen as cg
from esphome.components.esp32 import add_idf_component, include_builtin_idf_component
from esphome.components.esp32 import add_idf_component
import esphome.config_validation as cv
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
import esphome.final_validate as fv
@@ -166,9 +166,6 @@ def final_validate_audio_schema(
async def to_code(config):
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
include_builtin_idf_component("esp_http_client")
add_idf_component(
name="esphome/esp-audio-libs",
ref="2.0.3",

View File

@@ -1,103 +0,0 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components import i2c
from esphome.components.i2c import I2CBus
import esphome.config_validation as cv
from esphome.const import (
CONF_I2C_ID,
CONF_ID,
CONF_INPUT,
CONF_INVERTED,
CONF_MODE,
CONF_NUMBER,
CONF_OPEN_DRAIN,
CONF_OUTPUT,
)
from esphome.core import CORE
CODEOWNERS = ["@dwmw2"]
DEPENDENCIES = ["i2c"]
MULTI_CONF = True
ch423_ns = cg.esphome_ns.namespace("ch423")
CH423Component = ch423_ns.class_("CH423Component", cg.Component, i2c.I2CDevice)
CH423GPIOPin = ch423_ns.class_(
"CH423GPIOPin", cg.GPIOPin, cg.Parented.template(CH423Component)
)
CONF_CH423 = "ch423"
# Note that no address is configurable - each register in the CH423 has a dedicated i2c address
CONFIG_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_ID): cv.declare_id(CH423Component),
cv.GenerateID(CONF_I2C_ID): cv.use_id(I2CBus),
}
).extend(cv.COMPONENT_SCHEMA)
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
# Can't use register_i2c_device because there is no CONF_ADDRESS
parent = await cg.get_variable(config[CONF_I2C_ID])
cg.add(var.set_i2c_bus(parent))
# This is used as a final validation step so that modes have been fully transformed.
def pin_mode_check(pin_config, _):
if pin_config[CONF_MODE][CONF_INPUT] and pin_config[CONF_NUMBER] >= 8:
raise cv.Invalid("CH423 only supports input on pins 0-7")
if pin_config[CONF_MODE][CONF_OPEN_DRAIN] and pin_config[CONF_NUMBER] < 8:
raise cv.Invalid("CH423 only supports open drain output on pins 8-23")
ch423_id = pin_config[CONF_CH423]
pin_num = pin_config[CONF_NUMBER]
is_output = pin_config[CONF_MODE][CONF_OUTPUT]
is_open_drain = pin_config[CONF_MODE][CONF_OPEN_DRAIN]
# Track pin modes per CH423 instance in CORE.data
ch423_modes = CORE.data.setdefault(CONF_CH423, {})
if ch423_id not in ch423_modes:
ch423_modes[ch423_id] = {"gpio_output": None, "gpo_open_drain": None}
if pin_num < 8:
# GPIO pins (0-7): all must have same direction
if ch423_modes[ch423_id]["gpio_output"] is None:
ch423_modes[ch423_id]["gpio_output"] = is_output
elif ch423_modes[ch423_id]["gpio_output"] != is_output:
raise cv.Invalid(
"CH423 GPIO pins (0-7) must all be configured as input or all as output"
)
# GPO pins (8-23): all must have same open-drain setting
elif ch423_modes[ch423_id]["gpo_open_drain"] is None:
ch423_modes[ch423_id]["gpo_open_drain"] = is_open_drain
elif ch423_modes[ch423_id]["gpo_open_drain"] != is_open_drain:
raise cv.Invalid(
"CH423 GPO pins (8-23) must all be configured as push-pull or all as open-drain"
)
CH423_PIN_SCHEMA = pins.gpio_base_schema(
CH423GPIOPin,
cv.int_range(min=0, max=23),
modes=[CONF_INPUT, CONF_OUTPUT, CONF_OPEN_DRAIN],
).extend(
{
cv.Required(CONF_CH423): cv.use_id(CH423Component),
}
)
@pins.PIN_SCHEMA_REGISTRY.register(CONF_CH423, CH423_PIN_SCHEMA, pin_mode_check)
async def ch423_pin_to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
parent = await cg.get_variable(config[CONF_CH423])
cg.add(var.set_parent(parent))
num = config[CONF_NUMBER]
cg.add(var.set_pin(num))
cg.add(var.set_inverted(config[CONF_INVERTED]))
cg.add(var.set_flags(pins.gpio_flags_expr(config[CONF_MODE])))
return var

View File

@@ -1,148 +0,0 @@
#include "ch423.h"
#include "esphome/core/log.h"
#include "esphome/core/progmem.h"
namespace esphome::ch423 {
static constexpr uint8_t CH423_REG_SYS = 0x24; // Set system parameters (0x48 >> 1)
static constexpr uint8_t CH423_SYS_IO_OE = 0x01; // IO output enable
static constexpr uint8_t CH423_SYS_OD_EN = 0x04; // Open drain enable for OC pins
static constexpr uint8_t CH423_REG_IO = 0x30; // Write/read IO7-IO0 (0x60 >> 1)
static constexpr uint8_t CH423_REG_IO_RD = 0x26; // Read IO7-IO0 (0x4D >> 1, rounded down)
static constexpr uint8_t CH423_REG_OCL = 0x22; // Write OC7-OC0 (0x44 >> 1)
static constexpr uint8_t CH423_REG_OCH = 0x23; // Write OC15-OC8 (0x46 >> 1)
static const char *const TAG = "ch423";
void CH423Component::setup() {
// set outputs before mode
this->write_outputs_();
// Set system parameters and check for errors
bool success = this->write_reg_(CH423_REG_SYS, this->sys_params_);
// Only read inputs if pins are configured for input (IO_OE not set)
if (success && !(this->sys_params_ & CH423_SYS_IO_OE)) {
success = this->read_inputs_();
}
if (!success) {
ESP_LOGE(TAG, "CH423 not detected");
this->mark_failed();
return;
}
ESP_LOGCONFIG(TAG, "Initialization complete. Warning: %d, Error: %d", this->status_has_warning(),
this->status_has_error());
}
void CH423Component::loop() {
// Clear all the previously read flags.
this->pin_read_flags_ = 0x00;
}
void CH423Component::dump_config() {
ESP_LOGCONFIG(TAG, "CH423:");
if (this->is_failed()) {
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
}
}
void CH423Component::pin_mode(uint8_t pin, gpio::Flags flags) {
if (pin < 8) {
if (flags & gpio::FLAG_OUTPUT) {
this->sys_params_ |= CH423_SYS_IO_OE;
}
} else if (pin >= 8 && pin < 24) {
if (flags & gpio::FLAG_OPEN_DRAIN) {
this->sys_params_ |= CH423_SYS_OD_EN;
}
}
}
bool CH423Component::digital_read(uint8_t pin) {
if (this->pin_read_flags_ == 0 || this->pin_read_flags_ & (1 << pin)) {
// Read values on first access or in case it's being read again in the same loop
this->read_inputs_();
}
this->pin_read_flags_ |= (1 << pin);
return (this->input_bits_ & (1 << pin)) != 0;
}
void CH423Component::digital_write(uint8_t pin, bool value) {
if (value) {
this->output_bits_ |= (1 << pin);
} else {
this->output_bits_ &= ~(1 << pin);
}
this->write_outputs_();
}
bool CH423Component::read_inputs_() {
if (this->is_failed()) {
return false;
}
// reading inputs requires IO_OE to be 0
if (this->sys_params_ & CH423_SYS_IO_OE) {
return false;
}
uint8_t result = this->read_reg_(CH423_REG_IO_RD);
this->input_bits_ = result;
this->status_clear_warning();
return true;
}
// Write a register. Can't use the standard write_byte() method because there is no single pre-configured i2c address.
bool CH423Component::write_reg_(uint8_t reg, uint8_t value) {
auto err = this->bus_->write_readv(reg, &value, 1, nullptr, 0);
if (err != i2c::ERROR_OK) {
char buf[64];
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("write failed for register 0x%X, error %d"), reg, err);
this->status_set_warning(buf);
return false;
}
this->status_clear_warning();
return true;
}
uint8_t CH423Component::read_reg_(uint8_t reg) {
uint8_t value;
auto err = this->bus_->write_readv(reg, nullptr, 0, &value, 1);
if (err != i2c::ERROR_OK) {
char buf[64];
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("read failed for register 0x%X, error %d"), reg, err);
this->status_set_warning(buf);
return 0;
}
this->status_clear_warning();
return value;
}
bool CH423Component::write_outputs_() {
bool success = true;
// Write IO7-IO0
success &= this->write_reg_(CH423_REG_IO, static_cast<uint8_t>(this->output_bits_));
// Write OC7-OC0
success &= this->write_reg_(CH423_REG_OCL, static_cast<uint8_t>(this->output_bits_ >> 8));
// Write OC15-OC8
success &= this->write_reg_(CH423_REG_OCH, static_cast<uint8_t>(this->output_bits_ >> 16));
return success;
}
float CH423Component::get_setup_priority() const { return setup_priority::IO; }
// Run our loop() method very early in the loop, so that we cache read values
// before other components call our digital_read() method.
float CH423Component::get_loop_priority() const { return 9.0f; } // Just after WIFI
void CH423GPIOPin::pin_mode(gpio::Flags flags) { this->parent_->pin_mode(this->pin_, flags); }
bool CH423GPIOPin::digital_read() { return this->parent_->digital_read(this->pin_) ^ this->inverted_; }
void CH423GPIOPin::digital_write(bool value) { this->parent_->digital_write(this->pin_, value ^ this->inverted_); }
size_t CH423GPIOPin::dump_summary(char *buffer, size_t len) const {
return snprintf(buffer, len, "EXIO%u via CH423", this->pin_);
}
void CH423GPIOPin::set_flags(gpio::Flags flags) {
flags_ = flags;
this->parent_->pin_mode(this->pin_, flags);
}
} // namespace esphome::ch423

View File

@@ -1,67 +0,0 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/hal.h"
#include "esphome/components/i2c/i2c.h"
namespace esphome::ch423 {
class CH423Component : public Component, public i2c::I2CDevice {
public:
CH423Component() = default;
/// Check i2c availability and setup masks
void setup() override;
/// Poll for input changes periodically
void loop() override;
/// Helper function to read the value of a pin.
bool digital_read(uint8_t pin);
/// Helper function to write the value of a pin.
void digital_write(uint8_t pin, bool value);
/// Helper function to set the pin mode of a pin.
void pin_mode(uint8_t pin, gpio::Flags flags);
float get_setup_priority() const override;
float get_loop_priority() const override;
void dump_config() override;
protected:
bool write_reg_(uint8_t reg, uint8_t value);
uint8_t read_reg_(uint8_t reg);
bool read_inputs_();
bool write_outputs_();
/// The mask to write as output state - 1 means HIGH, 0 means LOW
uint32_t output_bits_{0x00};
/// Flags to check if read previously during this loop
uint8_t pin_read_flags_{0x00};
/// Copy of last read values
uint8_t input_bits_{0x00};
/// System parameters
uint8_t sys_params_{0x00};
};
/// Helper class to expose a CH423 pin as a GPIO pin.
class CH423GPIOPin : public GPIOPin {
public:
void setup() override{};
void pin_mode(gpio::Flags flags) override;
bool digital_read() override;
void digital_write(bool value) override;
size_t dump_summary(char *buffer, size_t len) const override;
void set_parent(CH423Component *parent) { parent_ = parent; }
void set_pin(uint8_t pin) { pin_ = pin; }
void set_inverted(bool inverted) { inverted_ = inverted; }
void set_flags(gpio::Flags flags);
gpio::Flags get_flags() const override { return this->flags_; }
protected:
CH423Component *parent_{};
uint8_t pin_{};
bool inverted_{};
gpio::Flags flags_{};
};
} // namespace esphome::ch423

View File

@@ -15,7 +15,7 @@ from esphome.const import (
CONF_UPDATE_INTERVAL,
SCHEDULER_DONT_RUN,
)
from esphome.core import CORE, CoroPriority, coroutine_with_priority
from esphome.core import CoroPriority, coroutine_with_priority
IS_PLATFORM_COMPONENT = True
@@ -222,8 +222,3 @@ async def display_is_displaying_page_to_code(config, condition_id, template_arg,
async def to_code(config):
cg.add_global(display_ns.using)
cg.add_define("USE_DISPLAY")
if CORE.is_esp32:
# Re-enable ESP-IDF's LCD driver (excluded by default to save compile time)
from esphome.components.esp32 import include_builtin_idf_component
include_builtin_idf_component("esp_lcd")

View File

@@ -2,8 +2,7 @@ import esphome.codegen as cg
from esphome.components import i2c
from esphome.components.audio_dac import AudioDac
import esphome.config_validation as cv
from esphome.const import CONF_AUDIO_DAC, CONF_BITS_PER_SAMPLE, CONF_ID
import esphome.final_validate as fv
from esphome.const import CONF_ID
CODEOWNERS = ["@kbx81"]
DEPENDENCIES = ["i2c"]
@@ -22,29 +21,6 @@ CONFIG_SCHEMA = (
)
def _final_validate(config):
full_config = fv.full_config.get()
# Check all speaker configurations for ones that reference this es8156
speaker_configs = full_config.get("speaker", [])
for speaker_config in speaker_configs:
audio_dac_id = speaker_config.get(CONF_AUDIO_DAC)
if (
audio_dac_id is not None
and audio_dac_id == config[CONF_ID]
and (bits_per_sample := speaker_config.get(CONF_BITS_PER_SAMPLE))
is not None
and bits_per_sample > 24
):
raise cv.Invalid(
f"ES8156 does not support more than 24 bits per sample. "
f"The speaker referencing this audio_dac has bits_per_sample set to {bits_per_sample}."
)
FINAL_VALIDATE_SCHEMA = _final_validate
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)

View File

@@ -17,61 +17,24 @@ static const char *const TAG = "es8156";
}
void ES8156::setup() {
// REG02 MODE CONFIG 1: Enable software mode for I2C control of volume/mute
// Bit 2: SOFT_MODE_SEL=1 (software mode enabled)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG02_SCLK_MODE, 0x04));
// Analog system configuration (active-low power down bits, active-high enables)
// REG20 ANALOG SYSTEM: Configure analog signal path
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG20_ANALOG_SYS1, 0x2A));
// REG21 ANALOG SYSTEM: VSEL=0x1C (bias level ~120%), normal VREF ramp speed
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG21_ANALOG_SYS2, 0x3C));
// REG22 ANALOG SYSTEM: Line out mode (HPSW=0), OUT_MUTE=0 (not muted)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG22_ANALOG_SYS3, 0x00));
// REG24 ANALOG SYSTEM: Low power mode for VREFBUF, HPCOM, DACVRP; DAC normal power
// Bits 2:0 = 0x07: LPVREFBUF=1, LPHPCOM=1, LPDACVRP=1, LPDAC=0
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG24_ANALOG_LP, 0x07));
// REG23 ANALOG SYSTEM: Lowest bias (IBIAS_SW=0), VMIDLVL=VDDA/2, normal impedance
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG23_ANALOG_SYS4, 0x00));
// Timing and interface configuration
// REG0A/0B TIME CONTROL: Fast state machine transitions
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0A_TIME_CONTROL1, 0x01));
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0B_TIME_CONTROL2, 0x01));
// REG11 SDP INTERFACE CONFIG: Default I2S format (24-bit, I2S mode)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG11_DAC_SDP, 0x00));
// REG19 EQ CONTROL 1: EQ disabled (EQ_ON=0), EQ_BAND_NUM=2
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG19_EQ_CONTROL1, 0x20));
// REG0D P2S CONTROL: Parallel-to-serial converter settings
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0D_P2S_CONTROL, 0x14));
// REG09 MISC CONTROL 2: Default settings
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG09_MISC_CONTROL2, 0x00));
// REG18 MISC CONTROL 3: Stereo channel routing, no inversion
// Bits 5:4 CHN_CROSS: 0=L→L/R→R, 1=L to both, 2=R to both, 3=swap L/R
// Bits 3:2: LCH_INV/RCH_INV channel inversion
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG18_MISC_CONTROL3, 0x00));
// REG08 CLOCK OFF: Enable all internal clocks (0x3F = all clock gates open)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG08_CLOCK_ON_OFF, 0x3F));
// REG00 RESET CONTROL: Reset sequence
// First: RST_DIG=1 (assert digital reset)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x02));
// Then: CSM_ON=1 (enable chip state machine), RST_DIG=1
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x03));
// REG25 ANALOG SYSTEM: Power up analog blocks
// VMIDSEL=2 (normal VMID operation), PDN_ANA=0, ENREFR=0, ENHPCOM=0
// PDN_DACVREFGEN=0, PDN_VREFBUF=0, PDN_DAC=0 (all enabled)
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG25_ANALOG_SYS5, 0x20));
}

View File

@@ -53,7 +53,6 @@ from .const import ( # noqa
KEY_BOARD,
KEY_COMPONENTS,
KEY_ESP32,
KEY_EXCLUDE_COMPONENTS,
KEY_EXTRA_BUILD_FILES,
KEY_FLASH_SIZE,
KEY_FULL_CERT_BUNDLE,
@@ -87,7 +86,6 @@ IS_TARGET_PLATFORM = True
CONF_ASSERTION_LEVEL = "assertion_level"
CONF_COMPILER_OPTIMIZATION = "compiler_optimization"
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES = "enable_idf_experimental_features"
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS = "include_builtin_idf_components"
CONF_ENABLE_LWIP_ASSERT = "enable_lwip_assert"
CONF_ENABLE_OTA_ROLLBACK = "enable_ota_rollback"
CONF_EXECUTE_FROM_PSRAM = "execute_from_psram"
@@ -116,36 +114,6 @@ COMPILER_OPTIMIZATIONS = {
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
}
# ESP-IDF components excluded by default to reduce compile time.
# Components can be re-enabled by calling include_builtin_idf_component() in to_code().
#
# Cannot be excluded (dependencies of required components):
# - "console": espressif/mdns unconditionally depends on it
# - "sdmmc": driver -> esp_driver_sdmmc -> sdmmc dependency chain
DEFAULT_EXCLUDED_IDF_COMPONENTS = (
"cmock", # Unit testing mock framework - ESPHome doesn't use IDF's testing
"esp_adc", # ADC driver - only needed by adc component
"esp_driver_i2s", # I2S driver - only needed by i2s_audio component
"esp_driver_rmt", # RMT driver - only needed by remote_transmitter/receiver, neopixelbus
"esp_driver_touch_sens", # Touch sensor driver - only needed by esp32_touch
"esp_eth", # Ethernet driver - only needed by ethernet component
"esp_hid", # HID host/device support - ESPHome doesn't implement HID functionality
"esp_http_client", # HTTP client - only needed by http_request component
"esp_https_ota", # ESP-IDF HTTPS OTA - ESPHome has its own OTA implementation
"esp_https_server", # HTTPS server - ESPHome has its own web server
"esp_lcd", # LCD controller drivers - only needed by display component
"esp_local_ctrl", # Local control over HTTPS/BLE - ESPHome has native API
"espcoredump", # Core dump support - ESPHome has its own debug component
"fatfs", # FAT filesystem - ESPHome doesn't use filesystem storage
"mqtt", # ESP-IDF MQTT library - ESPHome has its own MQTT implementation
"perfmon", # Xtensa performance monitor - ESPHome has its own debug component
"protocomm", # Protocol communication for provisioning - unused by ESPHome
"spiffs", # SPIFFS filesystem - ESPHome doesn't use filesystem storage (IDF only)
"unity", # Unit testing framework - ESPHome doesn't use IDF's testing
"wear_levelling", # Flash wear levelling for fatfs - unused since fatfs unused
"wifi_provisioning", # WiFi provisioning - ESPHome uses its own improv implementation
)
# ESP32 (original) chip revision options
# Setting minimum revision to 3.0 or higher:
# - Reduces flash size by excluding workaround code for older chip bugs
@@ -235,9 +203,6 @@ def set_core_data(config):
)
CORE.data[KEY_ESP32][KEY_SDKCONFIG_OPTIONS] = {}
CORE.data[KEY_ESP32][KEY_COMPONENTS] = {}
# Initialize with default exclusions - components can call include_builtin_idf_component()
# to re-enable any they need
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS] = set(DEFAULT_EXCLUDED_IDF_COMPONENTS)
CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] = cv.Version.parse(
config[CONF_FRAMEWORK][CONF_VERSION]
)
@@ -363,28 +328,6 @@ def add_idf_component(
}
def exclude_builtin_idf_component(name: str) -> None:
"""Exclude an ESP-IDF component from the build.
This reduces compile time by skipping components that are not needed.
The component will be passed to ESP-IDF's EXCLUDE_COMPONENTS cmake variable.
Note: Components that are dependencies of other required components
cannot be excluded - ESP-IDF will still build them.
"""
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].add(name)
def include_builtin_idf_component(name: str) -> None:
"""Remove an ESP-IDF component from the exclusion list.
Call this from components that need an ESP-IDF component that is
excluded by default in DEFAULT_EXCLUDED_IDF_COMPONENTS. This ensures the
component will be built when needed.
"""
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].discard(name)
def add_extra_script(stage: str, filename: str, path: Path):
"""Add an extra script to the project."""
key = f"{stage}:{filename}"
@@ -729,25 +672,11 @@ CONF_RINGBUF_IN_IRAM = "ringbuf_in_iram"
CONF_HEAP_IN_IRAM = "heap_in_iram"
CONF_LOOP_TASK_STACK_SIZE = "loop_task_stack_size"
CONF_USE_FULL_CERTIFICATE_BUNDLE = "use_full_certificate_bundle"
CONF_DISABLE_DEBUG_STUBS = "disable_debug_stubs"
CONF_DISABLE_OCD_AWARE = "disable_ocd_aware"
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY = "disable_usb_serial_jtag_secondary"
CONF_DISABLE_DEV_NULL_VFS = "disable_dev_null_vfs"
CONF_DISABLE_MBEDTLS_PEER_CERT = "disable_mbedtls_peer_cert"
CONF_DISABLE_MBEDTLS_PKCS7 = "disable_mbedtls_pkcs7"
CONF_DISABLE_REGI2C_IN_IRAM = "disable_regi2c_in_iram"
CONF_DISABLE_FATFS = "disable_fatfs"
# VFS requirement tracking
# Components that need VFS features can call require_vfs_select() or require_vfs_dir()
KEY_VFS_SELECT_REQUIRED = "vfs_select_required"
KEY_VFS_DIR_REQUIRED = "vfs_dir_required"
# Feature requirement tracking - components can call require_* functions to re-enable
# These are stored in CORE.data[KEY_ESP32] dict
KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED = "usb_serial_jtag_secondary_required"
KEY_MBEDTLS_PEER_CERT_REQUIRED = "mbedtls_peer_cert_required"
KEY_MBEDTLS_PKCS7_REQUIRED = "mbedtls_pkcs7_required"
KEY_FATFS_REQUIRED = "fatfs_required"
def require_vfs_select() -> None:
@@ -780,43 +709,6 @@ def require_full_certificate_bundle() -> None:
CORE.data[KEY_ESP32][KEY_FULL_CERT_BUNDLE] = True
def require_usb_serial_jtag_secondary() -> None:
"""Mark that USB Serial/JTAG secondary console is required by a component.
Call this from components (e.g., logger) that need USB Serial/JTAG console output.
This prevents CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG from being disabled.
"""
CORE.data[KEY_ESP32][KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED] = True
def require_mbedtls_peer_cert() -> None:
"""Mark that mbedTLS peer certificate retention is required by a component.
Call this from components that need access to the peer certificate after
the TLS handshake is complete. This prevents CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE
from being disabled.
"""
CORE.data[KEY_ESP32][KEY_MBEDTLS_PEER_CERT_REQUIRED] = True
def require_mbedtls_pkcs7() -> None:
"""Mark that mbedTLS PKCS#7 support is required by a component.
Call this from components that need PKCS#7 certificate validation.
This prevents CONFIG_MBEDTLS_PKCS7_C from being disabled.
"""
CORE.data[KEY_ESP32][KEY_MBEDTLS_PKCS7_REQUIRED] = True
def require_fatfs() -> None:
"""Mark that FATFS support is required by a component.
Call this from components that use FATFS (e.g., SD card, storage components).
This prevents FATFS from being disabled when disable_fatfs is set.
"""
CORE.data[KEY_ESP32][KEY_FATFS_REQUIRED] = True
def _parse_idf_component(value: str) -> ConfigType:
"""Parse IDF component shorthand syntax like 'owner/component^version'"""
# Match operator followed by version-like string (digit or *)
@@ -901,19 +793,6 @@ FRAMEWORK_SCHEMA = cv.Schema(
cv.Optional(
CONF_USE_FULL_CERTIFICATE_BUNDLE, default=False
): cv.boolean,
cv.Optional(
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, default=[]
): cv.ensure_list(cv.string_strict),
cv.Optional(CONF_DISABLE_DEBUG_STUBS, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_OCD_AWARE, default=True): cv.boolean,
cv.Optional(
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY, default=True
): cv.boolean,
cv.Optional(CONF_DISABLE_DEV_NULL_VFS, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_MBEDTLS_PEER_CERT, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_MBEDTLS_PKCS7, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_REGI2C_IN_IRAM, default=True): cv.boolean,
cv.Optional(CONF_DISABLE_FATFS, default=True): cv.boolean,
}
),
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
@@ -1103,19 +982,6 @@ def _configure_lwip_max_sockets(conf: dict) -> None:
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
@coroutine_with_priority(CoroPriority.FINAL)
async def _write_exclude_components() -> None:
"""Write EXCLUDE_COMPONENTS cmake arg after all components have registered exclusions."""
if KEY_ESP32 not in CORE.data:
return
excluded = CORE.data[KEY_ESP32].get(KEY_EXCLUDE_COMPONENTS)
if excluded:
exclude_list = ";".join(sorted(excluded))
cg.add_platformio_option(
"board_build.cmake_extra_args", f"-DEXCLUDE_COMPONENTS={exclude_list}"
)
@coroutine_with_priority(CoroPriority.FINAL)
async def _add_yaml_idf_components(components: list[ConfigType]):
"""Add IDF components from YAML config with final priority to override code-added components."""
@@ -1329,11 +1195,6 @@ async def to_code(config):
# Apply LWIP optimization settings
advanced = conf[CONF_ADVANCED]
# Re-include any IDF components the user explicitly requested
for component_name in advanced.get(CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, []):
include_builtin_idf_component(component_name)
# DHCP server: only disable if explicitly set to false
# WiFi component handles its own optimization when AP mode is not used
# When using Arduino with Ethernet, DHCP server functions must be available
@@ -1455,61 +1316,6 @@ async def to_code(config):
add_idf_sdkconfig_option(f"CONFIG_LOG_DEFAULT_LEVEL_{conf[CONF_LOG_LEVEL]}", True)
# Disable OpenOCD debug stubs to save code size
# These are used for on-chip debugging with OpenOCD/JTAG, rarely needed for ESPHome
if advanced[CONF_DISABLE_DEBUG_STUBS]:
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_STUBS_ENABLE", False)
# Disable OCD-aware exception handlers
# When enabled, the panic handler detects JTAG debugger and halts instead of resetting
# Most ESPHome users don't use JTAG debugging
if advanced[CONF_DISABLE_OCD_AWARE]:
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_OCDAWARE", False)
# Disable USB Serial/JTAG secondary console
# Components like logger can call require_usb_serial_jtag_secondary() to re-enable
if CORE.data[KEY_ESP32].get(KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED, False):
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG", True)
elif advanced[CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY]:
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_NONE", True)
# Disable /dev/null VFS initialization
# ESPHome doesn't typically need /dev/null
if advanced[CONF_DISABLE_DEV_NULL_VFS]:
add_idf_sdkconfig_option("CONFIG_VFS_INITIALIZE_DEV_NULL", False)
# Disable keeping peer certificate after TLS handshake
# Saves ~4KB heap per connection, but prevents certificate inspection after handshake
# Components that need it can call require_mbedtls_peer_cert()
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PEER_CERT_REQUIRED, False):
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", True)
elif advanced[CONF_DISABLE_MBEDTLS_PEER_CERT]:
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", False)
# Disable PKCS#7 support in mbedTLS
# Only needed for specific certificate validation scenarios
# Components that need it can call require_mbedtls_pkcs7()
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PKCS7_REQUIRED, False):
# Component called require_mbedtls_pkcs7() - enable regardless of user setting
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", True)
elif advanced[CONF_DISABLE_MBEDTLS_PKCS7]:
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", False)
# Disable regi2c control functions in IRAM
# Only needed if using analog peripherals (ADC, DAC, etc.) from ISRs while cache is disabled
if advanced[CONF_DISABLE_REGI2C_IN_IRAM]:
add_idf_sdkconfig_option("CONFIG_ESP_REGI2C_CTRL_FUNC_IN_IRAM", False)
# Disable FATFS support
# Components that need FATFS (SD card, etc.) can call require_fatfs()
if CORE.data[KEY_ESP32].get(KEY_FATFS_REQUIRED, False):
# Component called require_fatfs() - enable regardless of user setting
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", False)
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 2)
elif advanced[CONF_DISABLE_FATFS]:
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", True)
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 0)
for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
@@ -1518,11 +1324,6 @@ async def to_code(config):
if conf[CONF_COMPONENTS]:
CORE.add_job(_add_yaml_idf_components, conf[CONF_COMPONENTS])
# Write EXCLUDE_COMPONENTS at FINAL priority after all components have had
# a chance to call include_builtin_idf_component() to re-enable components they need.
# Default exclusions are added in set_core_data() during config validation.
CORE.add_job(_write_exclude_components)
APP_PARTITION_SIZES = {
"2MB": 0x0C0000, # 768 KB

View File

@@ -6,7 +6,6 @@ KEY_FLASH_SIZE = "flash_size"
KEY_VARIANT = "variant"
KEY_SDKCONFIG_OPTIONS = "sdkconfig_options"
KEY_COMPONENTS = "components"
KEY_EXCLUDE_COMPONENTS = "exclude_components"
KEY_REPO = "repo"
KEY_REF = "ref"
KEY_REFRESH = "refresh"

View File

@@ -5,7 +5,6 @@ from esphome import pins
import esphome.codegen as cg
from esphome.components import esp32, light
from esphome.components.const import CONF_USE_PSRAM
from esphome.components.esp32 import include_builtin_idf_component
import esphome.config_validation as cv
from esphome.const import (
CONF_CHIPSET,
@@ -130,9 +129,6 @@ CONFIG_SCHEMA = cv.All(
async def to_code(config):
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
include_builtin_idf_component("esp_driver_rmt")
var = cg.new_Pvariable(config[CONF_OUTPUT_ID])
await light.register_light(var, config)
await cg.register_component(var, config)

View File

@@ -6,7 +6,6 @@ from esphome.components.esp32 import (
VARIANT_ESP32S3,
get_esp32_variant,
gpio,
include_builtin_idf_component,
)
import esphome.config_validation as cv
from esphome.const import (
@@ -267,9 +266,6 @@ CONFIG_SCHEMA = cv.All(
async def to_code(config):
# Re-enable ESP-IDF's touch sensor driver (excluded by default to save compile time)
include_builtin_idf_component("esp_driver_touch_sens")
touch = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(touch, config)

View File

@@ -14,7 +14,6 @@ from esphome.components.esp32 import (
add_idf_component,
add_idf_sdkconfig_option,
get_esp32_variant,
include_builtin_idf_component,
)
from esphome.components.network import ip_address_literal
from esphome.components.spi import CONF_INTERFACE_INDEX, get_spi_interface
@@ -420,9 +419,6 @@ async def to_code(config):
# Also disable WiFi/BT coexistence since WiFi is disabled
add_idf_sdkconfig_option("CONFIG_SW_COEXIST_ENABLE", False)
# Re-enable ESP-IDF's Ethernet driver (excluded by default to save compile time)
include_builtin_idf_component("esp_eth")
if config[CONF_TYPE] == "LAN8670":
# Add LAN867x 10BASE-T1S PHY support component
add_idf_component(name="espressif/lan867x", ref="2.0.0")

View File

@@ -9,55 +9,29 @@ from esphome.const import (
CONF_VALUE,
)
from esphome.core import CoroPriority, coroutine_with_priority
from esphome.types import ConfigType
CODEOWNERS = ["@esphome/core"]
globals_ns = cg.esphome_ns.namespace("globals")
GlobalsComponent = globals_ns.class_("GlobalsComponent", cg.Component)
RestoringGlobalsComponent = globals_ns.class_(
"RestoringGlobalsComponent", cg.PollingComponent
)
RestoringGlobalsComponent = globals_ns.class_("RestoringGlobalsComponent", cg.Component)
RestoringGlobalStringComponent = globals_ns.class_(
"RestoringGlobalStringComponent", cg.PollingComponent
"RestoringGlobalStringComponent", cg.Component
)
GlobalVarSetAction = globals_ns.class_("GlobalVarSetAction", automation.Action)
CONF_MAX_RESTORE_DATA_LENGTH = "max_restore_data_length"
# Base schema fields shared by both variants
_BASE_SCHEMA = {
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
cv.Required(CONF_TYPE): cv.string_strict,
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
}
# Non-restoring globals: regular Component (no polling needed)
_NON_RESTORING_SCHEMA = cv.Schema(
{
**_BASE_SCHEMA,
cv.Optional(CONF_RESTORE_VALUE, default=False): cv.boolean,
}
).extend(cv.COMPONENT_SCHEMA)
# Restoring globals: PollingComponent with configurable update_interval
_RESTORING_SCHEMA = cv.Schema(
{
**_BASE_SCHEMA,
cv.Optional(CONF_RESTORE_VALUE, default=True): cv.boolean,
}
).extend(cv.polling_component_schema("1s"))
def _globals_schema(config: ConfigType) -> ConfigType:
"""Select schema based on restore_value setting."""
if config.get(CONF_RESTORE_VALUE, False):
return _RESTORING_SCHEMA(config)
return _NON_RESTORING_SCHEMA(config)
MULTI_CONF = True
CONFIG_SCHEMA = _globals_schema
CONFIG_SCHEMA = cv.Schema(
{
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
cv.Required(CONF_TYPE): cv.string_strict,
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
cv.Optional(CONF_RESTORE_VALUE, default=False): cv.boolean,
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
}
).extend(cv.COMPONENT_SCHEMA)
# Run with low priority so that namespaces are registered first

View File

@@ -5,7 +5,8 @@
#include "esphome/core/helpers.h"
#include <cstring>
namespace esphome::globals {
namespace esphome {
namespace globals {
template<typename T> class GlobalsComponent : public Component {
public:
@@ -23,14 +24,13 @@ template<typename T> class GlobalsComponent : public Component {
T value_{};
};
template<typename T> class RestoringGlobalsComponent : public PollingComponent {
template<typename T> class RestoringGlobalsComponent : public Component {
public:
using value_type = T;
explicit RestoringGlobalsComponent() : PollingComponent(1000) {}
explicit RestoringGlobalsComponent(T initial_value) : PollingComponent(1000), value_(initial_value) {}
explicit RestoringGlobalsComponent() = default;
explicit RestoringGlobalsComponent(T initial_value) : value_(initial_value) {}
explicit RestoringGlobalsComponent(
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
: PollingComponent(1000) {
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
memcpy(this->value_, initial_value.data(), sizeof(T));
}
@@ -44,7 +44,7 @@ template<typename T> class RestoringGlobalsComponent : public PollingComponent {
float get_setup_priority() const override { return setup_priority::HARDWARE; }
void update() override { store_value_(); }
void loop() override { store_value_(); }
void on_shutdown() override { store_value_(); }
@@ -66,14 +66,13 @@ template<typename T> class RestoringGlobalsComponent : public PollingComponent {
};
// Use with string or subclasses of strings
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public PollingComponent {
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public Component {
public:
using value_type = T;
explicit RestoringGlobalStringComponent() : PollingComponent(1000) {}
explicit RestoringGlobalStringComponent(T initial_value) : PollingComponent(1000) { this->value_ = initial_value; }
explicit RestoringGlobalStringComponent() = default;
explicit RestoringGlobalStringComponent(T initial_value) { this->value_ = initial_value; }
explicit RestoringGlobalStringComponent(
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
: PollingComponent(1000) {
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
memcpy(this->value_, initial_value.data(), sizeof(T));
}
@@ -91,7 +90,7 @@ template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public P
float get_setup_priority() const override { return setup_priority::HARDWARE; }
void update() override { store_value_(); }
void loop() override { store_value_(); }
void on_shutdown() override { store_value_(); }
@@ -145,4 +144,5 @@ template<typename T> T &id(GlobalsComponent<T> *value) { return value->value();
template<typename T> T &id(RestoringGlobalsComponent<T> *value) { return value->value(); }
template<typename T, uint8_t SZ> T &id(RestoringGlobalStringComponent<T, SZ> *value) { return value->value(); }
} // namespace esphome::globals
} // namespace globals
} // namespace esphome

View File

@@ -155,9 +155,6 @@ async def to_code(config):
cg.add(var.set_watchdog_timeout(timeout_ms))
if CORE.is_esp32:
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
esp32.include_builtin_idf_component("esp_http_client")
cg.add(var.set_buffer_size_rx(config[CONF_BUFFER_SIZE_RX]))
cg.add(var.set_buffer_size_tx(config[CONF_BUFFER_SIZE_TX]))
cg.add(var.set_verify_ssl(config[CONF_VERIFY_SSL]))

View File

@@ -131,10 +131,6 @@ std::shared_ptr<HttpContainer> HttpRequestArduino::perform(const std::string &ur
}
}
// HTTPClient::getSize() returns -1 for chunked transfer encoding (no Content-Length).
// When cast to size_t, -1 becomes SIZE_MAX (4294967295 on 32-bit).
// The read() method handles this: bytes_read_ can never reach SIZE_MAX, so the
// early return check (bytes_read_ >= content_length) will never trigger.
int content_length = container->client_.getSize();
ESP_LOGD(TAG, "Content-Length: %d", content_length);
container->content_length = (size_t) content_length;
@@ -171,23 +167,17 @@ int HttpContainerArduino::read(uint8_t *buf, size_t max_len) {
}
int available_data = stream_ptr->available();
// For chunked transfer encoding, HTTPClient::getSize() returns -1, which becomes SIZE_MAX when
// cast to size_t. SIZE_MAX - bytes_read_ is still huge, so it won't limit the read.
size_t remaining = (this->content_length > 0) ? (this->content_length - this->bytes_read_) : max_len;
int bufsize = std::min(max_len, std::min(remaining, (size_t) available_data));
int bufsize = std::min(max_len, std::min(this->content_length - this->bytes_read_, (size_t) available_data));
if (bufsize == 0) {
this->duration_ms += (millis() - start);
// Check if we've read all expected content (only valid when content_length is known and not SIZE_MAX)
// For chunked encoding (content_length == SIZE_MAX), we can't use this check
if (this->content_length > 0 && this->bytes_read_ >= this->content_length) {
// Check if we've read all expected content
if (this->bytes_read_ >= this->content_length) {
return 0; // All content read successfully
}
// No data available - check if connection is still open
// For chunked encoding, !connected() after reading means EOF (all chunks received)
// For known content_length with bytes_read_ < content_length, it means connection dropped
if (!stream_ptr->connected()) {
return HTTP_ERROR_CONNECTION_CLOSED; // Connection closed or EOF for chunked
return HTTP_ERROR_CONNECTION_CLOSED; // Connection closed prematurely
}
return 0; // No data yet, caller should retry
}

View File

@@ -157,8 +157,6 @@ std::shared_ptr<HttpContainer> HttpRequestIDF::perform(const std::string &url, c
}
container->feed_wdt();
// esp_http_client_fetch_headers() returns 0 for chunked transfer encoding (no Content-Length header).
// The read() method handles content_length == 0 specially to support chunked responses.
container->content_length = esp_http_client_fetch_headers(client);
container->feed_wdt();
container->status_code = esp_http_client_get_status_code(client);
@@ -227,22 +225,14 @@ std::shared_ptr<HttpContainer> HttpRequestIDF::perform(const std::string &url, c
//
// We normalize to HttpContainer::read() contract:
// > 0: bytes read
// 0: all content read (only returned when content_length is known and fully read)
// 0: no data yet / all content read (caller should check bytes_read vs content_length)
// < 0: error/connection closed
//
// Note on chunked transfer encoding:
// esp_http_client_fetch_headers() returns 0 for chunked responses (no Content-Length header).
// We handle this by skipping the content_length check when content_length is 0,
// allowing esp_http_client_read() to handle chunked decoding internally and signal EOF
// by returning 0.
int HttpContainerIDF::read(uint8_t *buf, size_t max_len) {
const uint32_t start = millis();
watchdog::WatchdogManager wdm(this->parent_->get_watchdog_timeout());
// Check if we've already read all expected content
// Skip this check when content_length is 0 (chunked transfer encoding or unknown length)
// For chunked responses, esp_http_client_read() will return 0 when all data is received
if (this->content_length > 0 && this->bytes_read_ >= this->content_length) {
if (this->bytes_read_ >= this->content_length) {
return 0; // All content read successfully
}
@@ -257,13 +247,7 @@ int HttpContainerIDF::read(uint8_t *buf, size_t max_len) {
return read_len_or_error;
}
// esp_http_client_read() returns 0 in two cases:
// 1. Known content_length: connection closed before all data received (error)
// 2. Chunked encoding (content_length == 0): end of stream reached (EOF)
// For case 1, returning HTTP_ERROR_CONNECTION_CLOSED is correct.
// For case 2, 0 indicates that all chunked data has already been delivered
// in previous successful read() calls, so treating this as a closed
// connection does not cause any loss of response data.
// Connection closed by server before all content received
if (read_len_or_error == 0) {
return HTTP_ERROR_CONNECTION_CLOSED;
}

View File

@@ -1,11 +1,6 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components.esp32 import (
add_idf_sdkconfig_option,
get_esp32_variant,
include_builtin_idf_component,
)
from esphome.components.esp32.const import (
VARIANT_ESP32,
VARIANT_ESP32C3,
VARIANT_ESP32C5,
@@ -15,6 +10,8 @@ from esphome.components.esp32.const import (
VARIANT_ESP32P4,
VARIANT_ESP32S2,
VARIANT_ESP32S3,
add_idf_sdkconfig_option,
get_esp32_variant,
)
import esphome.config_validation as cv
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_CHANNEL, CONF_ID, CONF_SAMPLE_RATE
@@ -275,10 +272,6 @@ FINAL_VALIDATE_SCHEMA = _final_validate
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
# Re-enable ESP-IDF's I2S driver (excluded by default to save compile time)
include_builtin_idf_component("esp_driver_i2s")
if use_legacy():
cg.add_define("USE_I2S_LEGACY")

View File

@@ -267,16 +267,26 @@ bool ImprovSerialComponent::parse_improv_payload_(improv::ImprovCommand &command
for (auto &scan : results) {
if (scan.get_is_hidden())
continue;
const std::string &ssid = scan.get_ssid();
if (std::find(networks.begin(), networks.end(), ssid) != networks.end())
const char *ssid_cstr = scan.get_ssid().c_str();
// Check if we've already sent this SSID
bool duplicate = false;
for (const auto &seen : networks) {
if (strcmp(seen.c_str(), ssid_cstr) == 0) {
duplicate = true;
break;
}
}
if (duplicate)
continue;
// Only allocate std::string after confirming it's not a duplicate
std::string ssid(ssid_cstr);
// Send each ssid separately to avoid overflowing the buffer
char rssi_buf[5]; // int8_t: -128 to 127, max 4 chars + null
*int8_to_str(rssi_buf, scan.get_rssi()) = '\0';
std::vector<uint8_t> data =
improv::build_rpc_response(improv::GET_WIFI_NETWORKS, {ssid, rssi_buf, YESNO(scan.get_with_auth())}, false);
this->send_response_(data);
networks.push_back(ssid);
networks.push_back(std::move(ssid));
}
// Send empty response to signify the end of the list.
std::vector<uint8_t> data =

View File

@@ -15,7 +15,7 @@ static const char *const TAG = "json";
static SpiRamAllocator global_json_allocator;
#endif
SerializationBuffer<> build_json(const json_build_t &f) {
std::string build_json(const json_build_t &f) {
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
JsonBuilder builder;
JsonObject root = builder.root();
@@ -61,62 +61,14 @@ JsonDocument parse_json(const uint8_t *data, size_t len) {
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
}
SerializationBuffer<> JsonBuilder::serialize() {
// ===========================================================================================
// CRITICAL: NRVO (Named Return Value Optimization) - DO NOT REFACTOR WITHOUT UNDERSTANDING
// ===========================================================================================
//
// This function is carefully structured to enable NRVO. The compiler constructs `result`
// directly in the caller's stack frame, eliminating the move constructor call entirely.
//
// WITHOUT NRVO: Each return would trigger SerializationBuffer's move constructor, which
// must memcpy up to 768 bytes of stack buffer content. This happens on EVERY JSON
// serialization (sensor updates, web server responses, MQTT publishes, etc.).
//
// WITH NRVO: Zero memcpy, zero move constructor overhead. The buffer lives directly
// where the caller needs it.
//
// Requirements for NRVO to work:
// 1. Single named variable (`result`) returned from ALL paths
// 2. All paths must return the SAME variable (not different variables)
// 3. No std::move() on the return statement
//
// If you must modify this function:
// - Keep a single `result` variable declared at the top
// - All code paths must return `result` (not a different variable)
// - Verify NRVO still works by checking the disassembly for move constructor calls
// - Test: objdump -d -C firmware.elf | grep "SerializationBuffer.*SerializationBuffer"
// Should show only destructor, NOT move constructor
//
// Why we avoid measureJson(): It instantiates DummyWriter templates adding ~1KB flash.
// Instead, try stack buffer first. 768 bytes covers 99.9% of JSON payloads (sensors ~200B,
// lights ~170B, climate ~700B). Only entities with 40+ options exceed this.
//
// ===========================================================================================
constexpr size_t buf_size = SerializationBuffer<>::BUFFER_SIZE;
SerializationBuffer<> result(buf_size - 1); // Max content size (reserve 1 for null)
std::string JsonBuilder::serialize() {
if (doc_.overflowed()) {
ESP_LOGE(TAG, "JSON document overflow");
auto *buf = result.data_writable_();
buf[0] = '{';
buf[1] = '}';
buf[2] = '\0';
result.set_size_(2);
return result;
return "{}";
}
size_t size = serializeJson(doc_, result.data_writable_(), buf_size);
if (size < buf_size) {
// Fits in stack buffer - update size to actual length
result.set_size_(size);
return result;
}
// Needs heap allocation - reallocate and serialize again with exact size
result.reallocate_heap_(size);
serializeJson(doc_, result.data_writable_(), size + 1);
return result;
std::string output;
serializeJson(doc_, output);
return output;
}
} // namespace json

View File

@@ -1,7 +1,5 @@
#pragma once
#include <cstring>
#include <string>
#include <vector>
#include "esphome/core/defines.h"
@@ -16,108 +14,6 @@
namespace esphome {
namespace json {
/// Buffer for JSON serialization that uses stack allocation for small payloads.
/// Template parameter STACK_SIZE specifies the stack buffer size (default 768 bytes).
/// Supports move semantics for efficient return-by-value.
template<size_t STACK_SIZE = 768> class SerializationBuffer {
public:
static constexpr size_t BUFFER_SIZE = STACK_SIZE; ///< Stack buffer size for this instantiation
/// Construct with known size (typically from measureJson)
explicit SerializationBuffer(size_t size) : size_(size) {
if (size + 1 <= STACK_SIZE) {
buffer_ = stack_buffer_;
} else {
heap_buffer_ = new char[size + 1];
buffer_ = heap_buffer_;
}
buffer_[0] = '\0';
}
~SerializationBuffer() { delete[] heap_buffer_; }
// Move constructor - works with same template instantiation
SerializationBuffer(SerializationBuffer &&other) noexcept : heap_buffer_(other.heap_buffer_), size_(other.size_) {
if (other.buffer_ == other.stack_buffer_) {
// Stack buffer - must copy content
std::memcpy(stack_buffer_, other.stack_buffer_, size_ + 1);
buffer_ = stack_buffer_;
} else {
// Heap buffer - steal ownership
buffer_ = heap_buffer_;
other.heap_buffer_ = nullptr;
}
// Leave moved-from object in valid empty state
other.stack_buffer_[0] = '\0';
other.buffer_ = other.stack_buffer_;
other.size_ = 0;
}
// Move assignment
SerializationBuffer &operator=(SerializationBuffer &&other) noexcept {
if (this != &other) {
delete[] heap_buffer_;
heap_buffer_ = other.heap_buffer_;
size_ = other.size_;
if (other.buffer_ == other.stack_buffer_) {
std::memcpy(stack_buffer_, other.stack_buffer_, size_ + 1);
buffer_ = stack_buffer_;
} else {
buffer_ = heap_buffer_;
other.heap_buffer_ = nullptr;
}
// Leave moved-from object in valid empty state
other.stack_buffer_[0] = '\0';
other.buffer_ = other.stack_buffer_;
other.size_ = 0;
}
return *this;
}
// Delete copy operations
SerializationBuffer(const SerializationBuffer &) = delete;
SerializationBuffer &operator=(const SerializationBuffer &) = delete;
/// Get null-terminated C string
const char *c_str() const { return buffer_; }
/// Get data pointer
const char *data() const { return buffer_; }
/// Get string length (excluding null terminator)
size_t size() const { return size_; }
/// Implicit conversion to std::string for backward compatibility
/// WARNING: This allocates a new std::string on the heap. Prefer using
/// c_str() or data()/size() directly when possible to avoid allocation.
operator std::string() const { return std::string(buffer_, size_); } // NOLINT(google-explicit-constructor)
private:
friend class JsonBuilder; ///< Allows JsonBuilder::serialize() to call private methods
/// Get writable buffer (for serialization)
char *data_writable_() { return buffer_; }
/// Set actual size after serialization (must not exceed allocated size)
/// Also ensures null termination for c_str() safety
void set_size_(size_t size) {
size_ = size;
buffer_[size] = '\0';
}
/// Reallocate to heap buffer with new size (for when stack buffer is too small)
/// This invalidates any previous buffer content. Used by JsonBuilder::serialize().
void reallocate_heap_(size_t size) {
delete[] heap_buffer_;
heap_buffer_ = new char[size + 1];
buffer_ = heap_buffer_;
size_ = size;
buffer_[0] = '\0';
}
char stack_buffer_[STACK_SIZE];
char *heap_buffer_{nullptr};
char *buffer_;
size_t size_;
};
#ifdef USE_PSRAM
// Build an allocator for the JSON Library using the RAMAllocator class
// This is only compiled when PSRAM is enabled
@@ -150,8 +46,7 @@ using json_parse_t = std::function<bool(JsonObject)>;
using json_build_t = std::function<void(JsonObject)>;
/// Build a JSON string with the provided json build function.
/// Returns SerializationBuffer for stack-first allocation; implicitly converts to std::string.
SerializationBuffer<> build_json(const json_build_t &f);
std::string build_json(const json_build_t &f);
/// Parse a JSON string and run the provided json parse function if it's valid.
bool parse_json(const std::string &data, const json_parse_t &f);
@@ -174,9 +69,7 @@ class JsonBuilder {
return root_;
}
/// Serialize the JSON document to a SerializationBuffer (stack-first allocation)
/// Uses 768-byte stack buffer by default, falls back to heap for larger JSON
SerializationBuffer<> serialize();
std::string serialize();
private:
#ifdef USE_PSRAM

View File

@@ -451,7 +451,7 @@ void LD2450Component::handle_periodic_data_() {
int16_t ty = 0;
int16_t td = 0;
int16_t ts = 0;
float angle = 0;
int16_t angle = 0;
uint8_t index = 0;
Direction direction{DIRECTION_UNDEFINED};
bool is_moving = false;

View File

@@ -143,7 +143,6 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
],
icon=ICON_FORMAT_TEXT_ROTATION_ANGLE_UP,
unit_of_measurement=UNIT_DEGREES,
accuracy_decimals=1,
),
cv.Optional(CONF_DISTANCE): sensor.sensor_schema(
device_class=DEVICE_CLASS_DISTANCE,

View File

@@ -12,10 +12,6 @@ namespace esphome::mdns {
static const char *const TAG = "mdns";
static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_SERVICE_COUNT> &services) {
#ifdef USE_OPENTHREAD
// OpenThread handles service registration via SRP client
// Services are compiled by MDNSComponent::compile_records_() and consumed by OpenThreadSrpComponent
#else
esp_err_t err = mdns_init();
if (err != ESP_OK) {
ESP_LOGW(TAG, "Init failed: %s", esp_err_to_name(err));
@@ -45,16 +41,13 @@ static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_S
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
}
}
#endif
}
void MDNSComponent::setup() { this->setup_buffers_and_register_(register_esp32); }
void MDNSComponent::on_shutdown() {
#ifndef USE_OPENTHREAD
mdns_free();
delay(40); // Allow the mdns packets announcing service removal to be sent
#endif
}
} // namespace esphome::mdns

View File

@@ -4,10 +4,7 @@ from esphome import automation
from esphome.automation import Condition
import esphome.codegen as cg
from esphome.components import logger, socket
from esphome.components.esp32 import (
add_idf_sdkconfig_option,
include_builtin_idf_component,
)
from esphome.components.esp32 import add_idf_sdkconfig_option
from esphome.config_helpers import filter_source_files_from_platform
import esphome.config_validation as cv
from esphome.const import (
@@ -363,8 +360,6 @@ async def to_code(config):
# This enables low-latency MQTT event processing instead of waiting for select() timeout
if CORE.is_esp32:
socket.require_wake_loop_threadsafe()
# Re-enable ESP-IDF's mqtt component (excluded by default to save compile time)
include_builtin_idf_component("mqtt")
cg.add_define("USE_MQTT")
cg.add_global(mqtt_ns.using)

View File

@@ -564,8 +564,8 @@ bool MQTTClientComponent::publish(const char *topic, const char *payload, size_t
}
bool MQTTClientComponent::publish_json(const char *topic, const json::json_build_t &f, uint8_t qos, bool retain) {
auto message = json::build_json(f);
return this->publish(topic, message.c_str(), message.size(), qos, retain);
std::string message = json::build_json(f);
return this->publish(topic, message.c_str(), message.length(), qos, retain);
}
void MQTTClientComponent::enable() {

View File

@@ -1,12 +1,7 @@
from esphome import pins
import esphome.codegen as cg
from esphome.components import light
from esphome.components.esp32 import (
VARIANT_ESP32C3,
VARIANT_ESP32S3,
get_esp32_variant,
include_builtin_idf_component,
)
from esphome.components.esp32 import VARIANT_ESP32C3, VARIANT_ESP32S3, get_esp32_variant
import esphome.config_validation as cv
from esphome.const import (
CONF_CHANNEL,
@@ -210,10 +205,6 @@ async def to_code(config):
has_white = "W" in config[CONF_TYPE]
method = config[CONF_METHOD]
# Re-enable ESP-IDF's RMT driver if using RMT method (excluded by default)
if CORE.is_esp32 and method[CONF_TYPE] == METHOD_ESP32_RMT:
include_builtin_idf_component("esp_driver_rmt")
method_template = METHODS[method[CONF_TYPE]].to_code(
method, config[CONF_VARIANT], config[CONF_INVERT]
)

View File

@@ -177,8 +177,6 @@ async def to_code(config):
cg.add_define("USE_NEXTION_TFT_UPLOAD")
cg.add(var.set_tft_url(config[CONF_TFT_URL]))
if CORE.is_esp32:
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
esp32.include_builtin_idf_component("esp_http_client")
esp32.add_idf_sdkconfig_option("CONFIG_ESP_TLS_INSECURE", True)
esp32.add_idf_sdkconfig_option(
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY", True

View File

@@ -1,6 +1,6 @@
from esphome import automation
import esphome.codegen as cg
from esphome.components import mqtt, web_server, zigbee
from esphome.components import mqtt, web_server
import esphome.config_validation as cv
from esphome.const import (
CONF_ABOVE,
@@ -189,7 +189,6 @@ validate_unit_of_measurement = cv.string_strict
_NUMBER_SCHEMA = (
cv.ENTITY_BASE_SCHEMA.extend(web_server.WEBSERVER_SORTING_SCHEMA)
.extend(cv.MQTT_COMMAND_COMPONENT_SCHEMA)
.extend(zigbee.NUMBER_SCHEMA)
.extend(
{
cv.OnlyWith(CONF_MQTT_ID, "mqtt"): cv.declare_id(mqtt.MQTTNumberComponent),
@@ -215,7 +214,6 @@ _NUMBER_SCHEMA = (
_NUMBER_SCHEMA.add_extra(entity_duplicate_validator("number"))
_NUMBER_SCHEMA.add_extra(zigbee.validate_number)
def number_schema(
@@ -279,8 +277,6 @@ async def setup_number_core_(
if web_server_config := config.get(CONF_WEB_SERVER):
await web_server.add_entity_config(var, web_server_config)
await zigbee.setup_number(var, config, min_value, max_value, step)
async def register_number(
var, config, *, min_value: float, max_value: float, step: float

View File

@@ -170,9 +170,6 @@ CONFIG_SCHEMA = remote_base.validate_triggers(
async def to_code(config):
pin = await cg.gpio_pin_expression(config[CONF_PIN])
if CORE.is_esp32:
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
esp32.include_builtin_idf_component("esp_driver_rmt")
var = cg.new_Pvariable(config[CONF_ID], pin)
cg.add(var.set_rmt_symbols(config[CONF_RMT_SYMBOLS]))
cg.add(var.set_receive_symbols(config[CONF_RECEIVE_SYMBOLS]))

View File

@@ -112,9 +112,6 @@ async def digital_write_action_to_code(config, action_id, template_arg, args):
async def to_code(config):
pin = await cg.gpio_pin_expression(config[CONF_PIN])
if CORE.is_esp32:
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
esp32.include_builtin_idf_component("esp_driver_rmt")
var = cg.new_Pvariable(config[CONF_ID], pin)
cg.add(var.set_rmt_symbols(config[CONF_RMT_SYMBOLS]))
cg.add(var.set_non_blocking(config[CONF_NON_BLOCKING]))

View File

@@ -27,61 +27,46 @@ void RuntimeStatsCollector::record_component_time(Component *component, uint32_t
}
void RuntimeStatsCollector::log_stats_() {
// First pass: count active components
size_t count = 0;
for (const auto &it : this->component_stats_) {
if (it.second.get_period_count() > 0) {
count++;
}
}
ESP_LOGI(TAG,
"Component Runtime Statistics\n"
" Period stats (last %" PRIu32 "ms): %zu active components",
this->log_interval_, count);
" Period stats (last %" PRIu32 "ms):",
this->log_interval_);
if (count == 0) {
return;
}
// First collect stats we want to display
std::vector<ComponentStatPair> stats_to_display;
// Stack buffer sized to actual active count (up to 256 components), heap fallback for larger
SmallBufferWithHeapFallback<256, Component *> buffer(count);
Component **sorted = buffer.get();
// Second pass: fill buffer with active components
size_t idx = 0;
for (const auto &it : this->component_stats_) {
if (it.second.get_period_count() > 0) {
sorted[idx++] = it.first;
Component *component = it.first;
const ComponentRuntimeStats &stats = it.second;
if (stats.get_period_count() > 0) {
ComponentStatPair pair = {component, &stats};
stats_to_display.push_back(pair);
}
}
// Sort by period runtime (descending)
std::sort(sorted, sorted + count, [this](Component *a, Component *b) {
return this->component_stats_[a].get_period_time_ms() > this->component_stats_[b].get_period_time_ms();
});
std::sort(stats_to_display.begin(), stats_to_display.end(), std::greater<ComponentStatPair>());
// Log top components by period runtime
for (size_t i = 0; i < count; i++) {
const auto &stats = this->component_stats_[sorted[i]];
for (const auto &it : stats_to_display) {
ESP_LOGI(TAG, " %s: count=%" PRIu32 ", avg=%.2fms, max=%" PRIu32 "ms, total=%" PRIu32 "ms",
LOG_STR_ARG(sorted[i]->get_component_log_str()), stats.get_period_count(), stats.get_period_avg_time_ms(),
stats.get_period_max_time_ms(), stats.get_period_time_ms());
LOG_STR_ARG(it.component->get_component_log_str()), it.stats->get_period_count(),
it.stats->get_period_avg_time_ms(), it.stats->get_period_max_time_ms(), it.stats->get_period_time_ms());
}
// Log total stats since boot (only for active components - idle ones haven't changed)
ESP_LOGI(TAG, " Total stats (since boot): %zu active components", count);
// Log total stats since boot
ESP_LOGI(TAG, " Total stats (since boot):");
// Re-sort by total runtime for all-time stats
std::sort(sorted, sorted + count, [this](Component *a, Component *b) {
return this->component_stats_[a].get_total_time_ms() > this->component_stats_[b].get_total_time_ms();
});
std::sort(stats_to_display.begin(), stats_to_display.end(),
[](const ComponentStatPair &a, const ComponentStatPair &b) {
return a.stats->get_total_time_ms() > b.stats->get_total_time_ms();
});
for (size_t i = 0; i < count; i++) {
const auto &stats = this->component_stats_[sorted[i]];
for (const auto &it : stats_to_display) {
ESP_LOGI(TAG, " %s: count=%" PRIu32 ", avg=%.2fms, max=%" PRIu32 "ms, total=%" PRIu32 "ms",
LOG_STR_ARG(sorted[i]->get_component_log_str()), stats.get_total_count(), stats.get_total_avg_time_ms(),
stats.get_total_max_time_ms(), stats.get_total_time_ms());
LOG_STR_ARG(it.component->get_component_log_str()), it.stats->get_total_count(),
it.stats->get_total_avg_time_ms(), it.stats->get_total_max_time_ms(), it.stats->get_total_time_ms());
}
}

View File

@@ -5,6 +5,7 @@
#ifdef USE_RUNTIME_STATS
#include <map>
#include <vector>
#include <cstdint>
#include <cstring>
#include "esphome/core/helpers.h"
@@ -76,6 +77,17 @@ class ComponentRuntimeStats {
uint32_t total_max_time_ms_;
};
// For sorting components by run time
struct ComponentStatPair {
Component *component;
const ComponentRuntimeStats *stats;
bool operator>(const ComponentStatPair &other) const {
// Sort by period time as that's what we're displaying in the logs
return stats->get_period_time_ms() > other.stats->get_period_time_ms();
}
};
class RuntimeStatsCollector {
public:
RuntimeStatsCollector();

View File

@@ -2,7 +2,7 @@ from esphome import automation
import esphome.codegen as cg
from esphome.components import audio, audio_dac
import esphome.config_validation as cv
from esphome.const import CONF_AUDIO_DAC, CONF_DATA, CONF_ID, CONF_VOLUME
from esphome.const import CONF_DATA, CONF_ID, CONF_VOLUME
from esphome.core import CORE, ID
from esphome.coroutine import CoroPriority, coroutine_with_priority
@@ -11,6 +11,8 @@ CODEOWNERS = ["@jesserockz", "@kahrendt"]
IS_PLATFORM_COMPONENT = True
CONF_AUDIO_DAC = "audio_dac"
speaker_ns = cg.esphome_ns.namespace("speaker")
Speaker = speaker_ns.class_("Speaker")

View File

@@ -27,9 +27,6 @@ void RealTimeClock::dump_config() {
#ifdef USE_TIME_TIMEZONE
ESP_LOGCONFIG(TAG, "Timezone: '%s'", this->timezone_.c_str());
#endif
auto time = this->now();
ESP_LOGCONFIG(TAG, "Current time: %04d-%02d-%02d %02d:%02d:%02d", time.year, time.month, time.day_of_month, time.hour,
time.minute, time.second);
}
void RealTimeClock::synchronize_epoch_(uint32_t epoch) {

View File

@@ -2,7 +2,7 @@
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#include <array>
#include <vector>
namespace esphome {
namespace tx20 {
@@ -45,25 +45,25 @@ std::string Tx20Component::get_wind_cardinal_direction() const { return this->wi
void Tx20Component::decode_and_publish_() {
ESP_LOGVV(TAG, "Decode Tx20");
std::array<bool, MAX_BUFFER_SIZE> bit_buffer{};
size_t bit_pos = 0;
std::string string_buffer;
std::string string_buffer_2;
std::vector<bool> bit_buffer;
bool current_bit = true;
// Cap at MAX_BUFFER_SIZE - 1 to prevent out-of-bounds access (buffer_index can exceed MAX_BUFFER_SIZE in ISR)
const int max_buffer_index =
std::min(static_cast<int>(this->store_.buffer_index), static_cast<int>(MAX_BUFFER_SIZE - 1));
for (int i = 1; i <= max_buffer_index; i++) {
for (int i = 1; i <= this->store_.buffer_index; i++) {
string_buffer_2 += to_string(this->store_.buffer[i]) + ", ";
uint8_t repeat = this->store_.buffer[i] / TX20_BIT_TIME;
// ignore segments at the end that were too short
for (uint8_t j = 0; j < repeat && bit_pos < MAX_BUFFER_SIZE; j++) {
bit_buffer[bit_pos++] = current_bit;
}
string_buffer.append(repeat, current_bit ? '1' : '0');
bit_buffer.insert(bit_buffer.end(), repeat, current_bit);
current_bit = !current_bit;
}
current_bit = !current_bit;
size_t bits_before_padding = bit_pos;
while (bit_pos < MAX_BUFFER_SIZE) {
bit_buffer[bit_pos++] = current_bit;
if (string_buffer.length() < MAX_BUFFER_SIZE) {
uint8_t remain = MAX_BUFFER_SIZE - string_buffer.length();
string_buffer_2 += to_string(remain) + ", ";
string_buffer.append(remain, current_bit ? '1' : '0');
bit_buffer.insert(bit_buffer.end(), remain, current_bit);
}
uint8_t tx20_sa = 0;
@@ -108,24 +108,8 @@ void Tx20Component::decode_and_publish_() {
// 2. Check received checksum matches calculated checksum
// 3. Check that Wind Direction matches Wind Direction (Inverted)
// 4. Check that Wind Speed matches Wind Speed (Inverted)
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
// Build debug strings from completed data
char debug_buf[320]; // buffer values: max 40 entries * 7 chars each
size_t debug_pos = 0;
for (int i = 1; i <= max_buffer_index; i++) {
debug_pos = buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%u, ", this->store_.buffer[i]);
}
if (bits_before_padding < MAX_BUFFER_SIZE) {
buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%zu, ", MAX_BUFFER_SIZE - bits_before_padding);
}
char bits_buf[MAX_BUFFER_SIZE + 1];
for (size_t i = 0; i < MAX_BUFFER_SIZE; i++) {
bits_buf[i] = bit_buffer[i] ? '1' : '0';
}
bits_buf[MAX_BUFFER_SIZE] = '\0';
ESP_LOGVV(TAG, "BUFFER %s", debug_buf);
ESP_LOGVV(TAG, "Decoded bits %s", bits_buf);
#endif
ESP_LOGVV(TAG, "BUFFER %s", string_buffer_2.c_str());
ESP_LOGVV(TAG, "Decoded bits %s", string_buffer.c_str());
if (tx20_sa == 4) {
if (chk == tx20_sd) {

View File

@@ -1,6 +1 @@
CODEOWNERS = ["@clydebarrow"]
DEPRECATED_COMPONENT = """
The 'waveshare_epaper' component is deprecated and no new models will be added to it.
New model PRs should target the newer and more performant 'epaper_spi' component.
"""

View File

@@ -209,7 +209,7 @@ void DeferredUpdateEventSource::deq_push_back_with_dedup_(void *source, message_
void DeferredUpdateEventSource::process_deferred_queue_() {
while (!deferred_queue_.empty()) {
DeferredEvent &de = deferred_queue_.front();
auto message = de.message_generator_(web_server_, de.source_);
std::string message = de.message_generator_(web_server_, de.source_);
if (this->send(message.c_str(), "state") != DISCARDED) {
// O(n) but memory efficiency is more important than speed here which is why std::vector was chosen
deferred_queue_.erase(deferred_queue_.begin());
@@ -266,7 +266,7 @@ void DeferredUpdateEventSource::deferrable_send_state(void *source, const char *
// deferred queue still not empty which means downstream event queue full, no point trying to send first
deq_push_back_with_dedup_(source, message_generator);
} else {
auto message = message_generator(web_server_, source);
std::string message = message_generator(web_server_, source);
if (this->send(message.c_str(), "state") == DISCARDED) {
deq_push_back_with_dedup_(source, message_generator);
} else {
@@ -320,7 +320,7 @@ void DeferredUpdateEventSourceList::on_client_connect_(DeferredUpdateEventSource
ws->defer([ws, source]() {
// Configure reconnect timeout and send config
// this should always go through since the AsyncEventSourceClient event queue is empty on connect
auto message = ws->get_config_json();
std::string message = ws->get_config_json();
source->try_send_nodefer(message.c_str(), "ping", millis(), 30000);
#ifdef USE_WEBSERVER_SORTING
@@ -329,10 +329,10 @@ void DeferredUpdateEventSourceList::on_client_connect_(DeferredUpdateEventSource
JsonObject root = builder.root();
root[ESPHOME_F("name")] = group.second.name;
root[ESPHOME_F("sorting_weight")] = group.second.weight;
auto group_msg = builder.serialize();
message = builder.serialize();
// up to 31 groups should be able to be queued initially without defer
source->try_send_nodefer(group_msg.c_str(), "sorting_group");
source->try_send_nodefer(message.c_str(), "sorting_group");
}
#endif
@@ -365,7 +365,7 @@ void WebServer::set_css_include(const char *css_include) { this->css_include_ =
void WebServer::set_js_include(const char *js_include) { this->js_include_ = js_include; }
#endif
json::SerializationBuffer<> WebServer::get_config_json() {
std::string WebServer::get_config_json() {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -597,20 +597,20 @@ void WebServer::handle_sensor_request(AsyncWebServerRequest *request, const UrlM
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
if (entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->sensor_json_(obj, obj->state, detail);
std::string data = this->sensor_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
}
request->send(404);
}
json::SerializationBuffer<> WebServer::sensor_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::sensor_state_json_generator(WebServer *web_server, void *source) {
return web_server->sensor_json_((sensor::Sensor *) (source), ((sensor::Sensor *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::sensor_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::sensor_all_json_generator(WebServer *web_server, void *source) {
return web_server->sensor_json_((sensor::Sensor *) (source), ((sensor::Sensor *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config) {
std::string WebServer::sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -644,23 +644,23 @@ void WebServer::handle_text_sensor_request(AsyncWebServerRequest *request, const
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
if (entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->text_sensor_json_(obj, obj->state, detail);
std::string data = this->text_sensor_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
}
request->send(404);
}
json::SerializationBuffer<> WebServer::text_sensor_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::text_sensor_state_json_generator(WebServer *web_server, void *source) {
return web_server->text_sensor_json_((text_sensor::TextSensor *) (source),
((text_sensor::TextSensor *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::text_sensor_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::text_sensor_all_json_generator(WebServer *web_server, void *source) {
return web_server->text_sensor_json_((text_sensor::TextSensor *) (source),
((text_sensor::TextSensor *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value,
JsonDetail start_config) {
std::string WebServer::text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value,
JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -705,7 +705,7 @@ void WebServer::handle_switch_request(AsyncWebServerRequest *request, const UrlM
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->switch_json_(obj, obj->state, detail);
std::string data = this->switch_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -734,13 +734,13 @@ void WebServer::handle_switch_request(AsyncWebServerRequest *request, const UrlM
}
request->send(404);
}
json::SerializationBuffer<> WebServer::switch_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::switch_state_json_generator(WebServer *web_server, void *source) {
return web_server->switch_json_((switch_::Switch *) (source), ((switch_::Switch *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::switch_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::switch_all_json_generator(WebServer *web_server, void *source) {
return web_server->switch_json_((switch_::Switch *) (source), ((switch_::Switch *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config) {
std::string WebServer::switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -762,7 +762,7 @@ void WebServer::handle_button_request(AsyncWebServerRequest *request, const UrlM
continue;
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->button_json_(obj, detail);
std::string data = this->button_json_(obj, detail);
request->send(200, "application/json", data.c_str());
} else if (match.method_equals(ESPHOME_F("press"))) {
DEFER_ACTION(obj, obj->press());
@@ -775,10 +775,10 @@ void WebServer::handle_button_request(AsyncWebServerRequest *request, const UrlM
}
request->send(404);
}
json::SerializationBuffer<> WebServer::button_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::button_all_json_generator(WebServer *web_server, void *source) {
return web_server->button_json_((button::Button *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::button_json_(button::Button *obj, JsonDetail start_config) {
std::string WebServer::button_json_(button::Button *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -805,23 +805,22 @@ void WebServer::handle_binary_sensor_request(AsyncWebServerRequest *request, con
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
if (entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->binary_sensor_json_(obj, obj->state, detail);
std::string data = this->binary_sensor_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
}
request->send(404);
}
json::SerializationBuffer<> WebServer::binary_sensor_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::binary_sensor_state_json_generator(WebServer *web_server, void *source) {
return web_server->binary_sensor_json_((binary_sensor::BinarySensor *) (source),
((binary_sensor::BinarySensor *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::binary_sensor_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::binary_sensor_all_json_generator(WebServer *web_server, void *source) {
return web_server->binary_sensor_json_((binary_sensor::BinarySensor *) (source),
((binary_sensor::BinarySensor *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value,
JsonDetail start_config) {
std::string WebServer::binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -848,7 +847,7 @@ void WebServer::handle_fan_request(AsyncWebServerRequest *request, const UrlMatc
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->fan_json_(obj, detail);
std::string data = this->fan_json_(obj, detail);
request->send(200, "application/json", data.c_str());
} else if (match.method_equals(ESPHOME_F("toggle"))) {
DEFER_ACTION(obj, obj->toggle().perform());
@@ -889,13 +888,13 @@ void WebServer::handle_fan_request(AsyncWebServerRequest *request, const UrlMatc
}
request->send(404);
}
json::SerializationBuffer<> WebServer::fan_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::fan_state_json_generator(WebServer *web_server, void *source) {
return web_server->fan_json_((fan::Fan *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::fan_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::fan_all_json_generator(WebServer *web_server, void *source) {
return web_server->fan_json_((fan::Fan *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::fan_json_(fan::Fan *obj, JsonDetail start_config) {
std::string WebServer::fan_json_(fan::Fan *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -929,7 +928,7 @@ void WebServer::handle_light_request(AsyncWebServerRequest *request, const UrlMa
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->light_json_(obj, detail);
std::string data = this->light_json_(obj, detail);
request->send(200, "application/json", data.c_str());
} else if (match.method_equals(ESPHOME_F("toggle"))) {
DEFER_ACTION(obj, obj->toggle().perform());
@@ -968,13 +967,13 @@ void WebServer::handle_light_request(AsyncWebServerRequest *request, const UrlMa
}
request->send(404);
}
json::SerializationBuffer<> WebServer::light_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::light_state_json_generator(WebServer *web_server, void *source) {
return web_server->light_json_((light::LightState *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::light_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::light_all_json_generator(WebServer *web_server, void *source) {
return web_server->light_json_((light::LightState *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::light_json_(light::LightState *obj, JsonDetail start_config) {
std::string WebServer::light_json_(light::LightState *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1008,7 +1007,7 @@ void WebServer::handle_cover_request(AsyncWebServerRequest *request, const UrlMa
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->cover_json_(obj, detail);
std::string data = this->cover_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1056,13 +1055,13 @@ void WebServer::handle_cover_request(AsyncWebServerRequest *request, const UrlMa
}
request->send(404);
}
json::SerializationBuffer<> WebServer::cover_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::cover_state_json_generator(WebServer *web_server, void *source) {
return web_server->cover_json_((cover::Cover *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::cover_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::cover_all_json_generator(WebServer *web_server, void *source) {
return web_server->cover_json_((cover::Cover *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::cover_json_(cover::Cover *obj, JsonDetail start_config) {
std::string WebServer::cover_json_(cover::Cover *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1097,7 +1096,7 @@ void WebServer::handle_number_request(AsyncWebServerRequest *request, const UrlM
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->number_json_(obj, obj->state, detail);
std::string data = this->number_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1116,13 +1115,13 @@ void WebServer::handle_number_request(AsyncWebServerRequest *request, const UrlM
request->send(404);
}
json::SerializationBuffer<> WebServer::number_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::number_state_json_generator(WebServer *web_server, void *source) {
return web_server->number_json_((number::Number *) (source), ((number::Number *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::number_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::number_all_json_generator(WebServer *web_server, void *source) {
return web_server->number_json_((number::Number *) (source), ((number::Number *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::number_json_(number::Number *obj, float value, JsonDetail start_config) {
std::string WebServer::number_json_(number::Number *obj, float value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1164,7 +1163,7 @@ void WebServer::handle_date_request(AsyncWebServerRequest *request, const UrlMat
continue;
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->date_json_(obj, detail);
std::string data = this->date_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1189,13 +1188,13 @@ void WebServer::handle_date_request(AsyncWebServerRequest *request, const UrlMat
request->send(404);
}
json::SerializationBuffer<> WebServer::date_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::date_state_json_generator(WebServer *web_server, void *source) {
return web_server->date_json_((datetime::DateEntity *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::date_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::date_all_json_generator(WebServer *web_server, void *source) {
return web_server->date_json_((datetime::DateEntity *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::date_json_(datetime::DateEntity *obj, JsonDetail start_config) {
std::string WebServer::date_json_(datetime::DateEntity *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1224,7 +1223,7 @@ void WebServer::handle_time_request(AsyncWebServerRequest *request, const UrlMat
continue;
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->time_json_(obj, detail);
std::string data = this->time_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1248,13 +1247,13 @@ void WebServer::handle_time_request(AsyncWebServerRequest *request, const UrlMat
}
request->send(404);
}
json::SerializationBuffer<> WebServer::time_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::time_state_json_generator(WebServer *web_server, void *source) {
return web_server->time_json_((datetime::TimeEntity *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::time_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::time_all_json_generator(WebServer *web_server, void *source) {
return web_server->time_json_((datetime::TimeEntity *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::time_json_(datetime::TimeEntity *obj, JsonDetail start_config) {
std::string WebServer::time_json_(datetime::TimeEntity *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1283,7 +1282,7 @@ void WebServer::handle_datetime_request(AsyncWebServerRequest *request, const Ur
continue;
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->datetime_json_(obj, detail);
std::string data = this->datetime_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1307,13 +1306,13 @@ void WebServer::handle_datetime_request(AsyncWebServerRequest *request, const Ur
}
request->send(404);
}
json::SerializationBuffer<> WebServer::datetime_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::datetime_state_json_generator(WebServer *web_server, void *source) {
return web_server->datetime_json_((datetime::DateTimeEntity *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::datetime_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::datetime_all_json_generator(WebServer *web_server, void *source) {
return web_server->datetime_json_((datetime::DateTimeEntity *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config) {
std::string WebServer::datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1344,7 +1343,7 @@ void WebServer::handle_text_request(AsyncWebServerRequest *request, const UrlMat
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->text_json_(obj, obj->state, detail);
std::string data = this->text_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1363,13 +1362,13 @@ void WebServer::handle_text_request(AsyncWebServerRequest *request, const UrlMat
request->send(404);
}
json::SerializationBuffer<> WebServer::text_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::text_state_json_generator(WebServer *web_server, void *source) {
return web_server->text_json_((text::Text *) (source), ((text::Text *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::text_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::text_all_json_generator(WebServer *web_server, void *source) {
return web_server->text_json_((text::Text *) (source), ((text::Text *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::text_json_(text::Text *obj, const std::string &value, JsonDetail start_config) {
std::string WebServer::text_json_(text::Text *obj, const std::string &value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1401,7 +1400,7 @@ void WebServer::handle_select_request(AsyncWebServerRequest *request, const UrlM
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), detail);
std::string data = this->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1420,15 +1419,15 @@ void WebServer::handle_select_request(AsyncWebServerRequest *request, const UrlM
}
request->send(404);
}
json::SerializationBuffer<> WebServer::select_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::select_state_json_generator(WebServer *web_server, void *source) {
auto *obj = (select::Select *) (source);
return web_server->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::select_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::select_all_json_generator(WebServer *web_server, void *source) {
auto *obj = (select::Select *) (source);
return web_server->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::select_json_(select::Select *obj, StringRef value, JsonDetail start_config) {
std::string WebServer::select_json_(select::Select *obj, StringRef value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1460,7 +1459,7 @@ void WebServer::handle_climate_request(AsyncWebServerRequest *request, const Url
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->climate_json_(obj, detail);
std::string data = this->climate_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1489,15 +1488,15 @@ void WebServer::handle_climate_request(AsyncWebServerRequest *request, const Url
}
request->send(404);
}
json::SerializationBuffer<> WebServer::climate_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::climate_state_json_generator(WebServer *web_server, void *source) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return web_server->climate_json_((climate::Climate *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::climate_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::climate_all_json_generator(WebServer *web_server, void *source) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return web_server->climate_json_((climate::Climate *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::climate_json_(climate::Climate *obj, JsonDetail start_config) {
std::string WebServer::climate_json_(climate::Climate *obj, JsonDetail start_config) {
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1630,7 +1629,7 @@ void WebServer::handle_lock_request(AsyncWebServerRequest *request, const UrlMat
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->lock_json_(obj, obj->state, detail);
std::string data = this->lock_json_(obj, obj->state, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1659,13 +1658,13 @@ void WebServer::handle_lock_request(AsyncWebServerRequest *request, const UrlMat
}
request->send(404);
}
json::SerializationBuffer<> WebServer::lock_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::lock_state_json_generator(WebServer *web_server, void *source) {
return web_server->lock_json_((lock::Lock *) (source), ((lock::Lock *) (source))->state, DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::lock_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::lock_all_json_generator(WebServer *web_server, void *source) {
return web_server->lock_json_((lock::Lock *) (source), ((lock::Lock *) (source))->state, DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config) {
std::string WebServer::lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1693,7 +1692,7 @@ void WebServer::handle_valve_request(AsyncWebServerRequest *request, const UrlMa
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->valve_json_(obj, detail);
std::string data = this->valve_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1739,13 +1738,13 @@ void WebServer::handle_valve_request(AsyncWebServerRequest *request, const UrlMa
}
request->send(404);
}
json::SerializationBuffer<> WebServer::valve_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::valve_state_json_generator(WebServer *web_server, void *source) {
return web_server->valve_json_((valve::Valve *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::valve_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::valve_all_json_generator(WebServer *web_server, void *source) {
return web_server->valve_json_((valve::Valve *) (source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::valve_json_(valve::Valve *obj, JsonDetail start_config) {
std::string WebServer::valve_json_(valve::Valve *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1778,7 +1777,7 @@ void WebServer::handle_alarm_control_panel_request(AsyncWebServerRequest *reques
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->alarm_control_panel_json_(obj, obj->get_state(), detail);
std::string data = this->alarm_control_panel_json_(obj, obj->get_state(), detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1818,19 +1817,19 @@ void WebServer::handle_alarm_control_panel_request(AsyncWebServerRequest *reques
}
request->send(404);
}
json::SerializationBuffer<> WebServer::alarm_control_panel_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::alarm_control_panel_state_json_generator(WebServer *web_server, void *source) {
return web_server->alarm_control_panel_json_((alarm_control_panel::AlarmControlPanel *) (source),
((alarm_control_panel::AlarmControlPanel *) (source))->get_state(),
DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::alarm_control_panel_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::alarm_control_panel_all_json_generator(WebServer *web_server, void *source) {
return web_server->alarm_control_panel_json_((alarm_control_panel::AlarmControlPanel *) (source),
((alarm_control_panel::AlarmControlPanel *) (source))->get_state(),
DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
alarm_control_panel::AlarmControlPanelState value,
JsonDetail start_config) {
std::string WebServer::alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
alarm_control_panel::AlarmControlPanelState value,
JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -1859,7 +1858,7 @@ void WebServer::handle_water_heater_request(AsyncWebServerRequest *request, cons
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->water_heater_json_(obj, detail);
std::string data = this->water_heater_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -1895,14 +1894,14 @@ void WebServer::handle_water_heater_request(AsyncWebServerRequest *request, cons
request->send(404);
}
json::SerializationBuffer<> WebServer::water_heater_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::water_heater_state_json_generator(WebServer *web_server, void *source) {
return web_server->water_heater_json_(static_cast<water_heater::WaterHeater *>(source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::water_heater_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::water_heater_all_json_generator(WebServer *web_server, void *source) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return web_server->water_heater_json_(static_cast<water_heater::WaterHeater *>(source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config) {
std::string WebServer::water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
char buf[PSTR_LOCAL_SIZE];
@@ -1965,7 +1964,7 @@ void WebServer::handle_infrared_request(AsyncWebServerRequest *request, const Ur
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->infrared_json_(obj, detail);
std::string data = this->infrared_json_(obj, detail);
request->send(200, ESPHOME_F("application/json"), data.c_str());
return;
}
@@ -2036,12 +2035,12 @@ void WebServer::handle_infrared_request(AsyncWebServerRequest *request, const Ur
request->send(404);
}
json::SerializationBuffer<> WebServer::infrared_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::infrared_all_json_generator(WebServer *web_server, void *source) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return web_server->infrared_json_(static_cast<infrared::Infrared *>(source), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::infrared_json_(infrared::Infrared *obj, JsonDetail start_config) {
std::string WebServer::infrared_json_(infrared::Infrared *obj, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -2076,7 +2075,7 @@ void WebServer::handle_event_request(AsyncWebServerRequest *request, const UrlMa
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
if (entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->event_json_(obj, StringRef(), detail);
std::string data = this->event_json_(obj, StringRef(), detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -2086,16 +2085,16 @@ void WebServer::handle_event_request(AsyncWebServerRequest *request, const UrlMa
static StringRef get_event_type(event::Event *event) { return event ? event->get_last_event_type() : StringRef(); }
json::SerializationBuffer<> WebServer::event_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::event_state_json_generator(WebServer *web_server, void *source) {
auto *event = static_cast<event::Event *>(source);
return web_server->event_json_(event, get_event_type(event), DETAIL_STATE);
}
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
json::SerializationBuffer<> WebServer::event_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::event_all_json_generator(WebServer *web_server, void *source) {
auto *event = static_cast<event::Event *>(source);
return web_server->event_json_(event, get_event_type(event), DETAIL_ALL);
}
json::SerializationBuffer<> WebServer::event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config) {
std::string WebServer::event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config) {
json::JsonBuilder builder;
JsonObject root = builder.root();
@@ -2142,7 +2141,7 @@ void WebServer::handle_update_request(AsyncWebServerRequest *request, const UrlM
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
auto detail = get_request_detail(request);
auto data = this->update_json_(obj, detail);
std::string data = this->update_json_(obj, detail);
request->send(200, "application/json", data.c_str());
return;
}
@@ -2158,15 +2157,15 @@ void WebServer::handle_update_request(AsyncWebServerRequest *request, const UrlM
}
request->send(404);
}
json::SerializationBuffer<> WebServer::update_state_json_generator(WebServer *web_server, void *source) {
std::string WebServer::update_state_json_generator(WebServer *web_server, void *source) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return web_server->update_json_((update::UpdateEntity *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::update_all_json_generator(WebServer *web_server, void *source) {
std::string WebServer::update_all_json_generator(WebServer *web_server, void *source) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return web_server->update_json_((update::UpdateEntity *) (source), DETAIL_STATE);
}
json::SerializationBuffer<> WebServer::update_json_(update::UpdateEntity *obj, JsonDetail start_config) {
std::string WebServer::update_json_(update::UpdateEntity *obj, JsonDetail start_config) {
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
json::JsonBuilder builder;
JsonObject root = builder.root();

View File

@@ -2,7 +2,6 @@
#include "list_entities.h"
#include "esphome/components/json/json_util.h"
#include "esphome/components/web_server_base/web_server_base.h"
#ifdef USE_WEBSERVER
#include "esphome/core/component.h"
@@ -105,7 +104,7 @@ enum JsonDetail { DETAIL_ALL, DETAIL_STATE };
can be forgotten.
*/
#if !defined(USE_ESP32) && defined(USE_ARDUINO)
using message_generator_t = json::SerializationBuffer<>(WebServer *, void *);
using message_generator_t = std::string(WebServer *, void *);
class DeferredUpdateEventSourceList;
class DeferredUpdateEventSource : public AsyncEventSource {
@@ -264,7 +263,7 @@ class WebServer : public Controller,
void handle_index_request(AsyncWebServerRequest *request);
/// Return the webserver configuration as JSON.
json::SerializationBuffer<> get_config_json();
std::string get_config_json();
#ifdef USE_WEBSERVER_CSS_INCLUDE
/// Handle included css request under '/0.css'.
@@ -286,8 +285,8 @@ class WebServer : public Controller,
/// Handle a sensor request under '/sensor/<id>'.
void handle_sensor_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> sensor_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> sensor_all_json_generator(WebServer *web_server, void *source);
static std::string sensor_state_json_generator(WebServer *web_server, void *source);
static std::string sensor_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_SWITCH
@@ -296,8 +295,8 @@ class WebServer : public Controller,
/// Handle a switch request under '/switch/<id>/</turn_on/turn_off/toggle>'.
void handle_switch_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> switch_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> switch_all_json_generator(WebServer *web_server, void *source);
static std::string switch_state_json_generator(WebServer *web_server, void *source);
static std::string switch_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_BUTTON
@@ -305,7 +304,7 @@ class WebServer : public Controller,
void handle_button_request(AsyncWebServerRequest *request, const UrlMatch &match);
// Buttons are stateless, so there is no button_state_json_generator
static json::SerializationBuffer<> button_all_json_generator(WebServer *web_server, void *source);
static std::string button_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_BINARY_SENSOR
@@ -314,8 +313,8 @@ class WebServer : public Controller,
/// Handle a binary sensor request under '/binary_sensor/<id>'.
void handle_binary_sensor_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> binary_sensor_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> binary_sensor_all_json_generator(WebServer *web_server, void *source);
static std::string binary_sensor_state_json_generator(WebServer *web_server, void *source);
static std::string binary_sensor_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_FAN
@@ -324,8 +323,8 @@ class WebServer : public Controller,
/// Handle a fan request under '/fan/<id>/</turn_on/turn_off/toggle>'.
void handle_fan_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> fan_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> fan_all_json_generator(WebServer *web_server, void *source);
static std::string fan_state_json_generator(WebServer *web_server, void *source);
static std::string fan_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_LIGHT
@@ -334,8 +333,8 @@ class WebServer : public Controller,
/// Handle a light request under '/light/<id>/</turn_on/turn_off/toggle>'.
void handle_light_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> light_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> light_all_json_generator(WebServer *web_server, void *source);
static std::string light_state_json_generator(WebServer *web_server, void *source);
static std::string light_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_TEXT_SENSOR
@@ -344,8 +343,8 @@ class WebServer : public Controller,
/// Handle a text sensor request under '/text_sensor/<id>'.
void handle_text_sensor_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> text_sensor_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> text_sensor_all_json_generator(WebServer *web_server, void *source);
static std::string text_sensor_state_json_generator(WebServer *web_server, void *source);
static std::string text_sensor_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_COVER
@@ -354,8 +353,8 @@ class WebServer : public Controller,
/// Handle a cover request under '/cover/<id>/<open/close/stop/set>'.
void handle_cover_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> cover_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> cover_all_json_generator(WebServer *web_server, void *source);
static std::string cover_state_json_generator(WebServer *web_server, void *source);
static std::string cover_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_NUMBER
@@ -363,8 +362,8 @@ class WebServer : public Controller,
/// Handle a number request under '/number/<id>'.
void handle_number_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> number_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> number_all_json_generator(WebServer *web_server, void *source);
static std::string number_state_json_generator(WebServer *web_server, void *source);
static std::string number_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_DATETIME_DATE
@@ -372,8 +371,8 @@ class WebServer : public Controller,
/// Handle a date request under '/date/<id>'.
void handle_date_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> date_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> date_all_json_generator(WebServer *web_server, void *source);
static std::string date_state_json_generator(WebServer *web_server, void *source);
static std::string date_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_DATETIME_TIME
@@ -381,8 +380,8 @@ class WebServer : public Controller,
/// Handle a time request under '/time/<id>'.
void handle_time_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> time_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> time_all_json_generator(WebServer *web_server, void *source);
static std::string time_state_json_generator(WebServer *web_server, void *source);
static std::string time_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_DATETIME_DATETIME
@@ -390,8 +389,8 @@ class WebServer : public Controller,
/// Handle a datetime request under '/datetime/<id>'.
void handle_datetime_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> datetime_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> datetime_all_json_generator(WebServer *web_server, void *source);
static std::string datetime_state_json_generator(WebServer *web_server, void *source);
static std::string datetime_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_TEXT
@@ -399,8 +398,8 @@ class WebServer : public Controller,
/// Handle a text input request under '/text/<id>'.
void handle_text_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> text_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> text_all_json_generator(WebServer *web_server, void *source);
static std::string text_state_json_generator(WebServer *web_server, void *source);
static std::string text_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_SELECT
@@ -408,8 +407,8 @@ class WebServer : public Controller,
/// Handle a select request under '/select/<id>'.
void handle_select_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> select_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> select_all_json_generator(WebServer *web_server, void *source);
static std::string select_state_json_generator(WebServer *web_server, void *source);
static std::string select_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_CLIMATE
@@ -417,8 +416,8 @@ class WebServer : public Controller,
/// Handle a climate request under '/climate/<id>'.
void handle_climate_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> climate_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> climate_all_json_generator(WebServer *web_server, void *source);
static std::string climate_state_json_generator(WebServer *web_server, void *source);
static std::string climate_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_LOCK
@@ -427,8 +426,8 @@ class WebServer : public Controller,
/// Handle a lock request under '/lock/<id>/</lock/unlock/open>'.
void handle_lock_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> lock_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> lock_all_json_generator(WebServer *web_server, void *source);
static std::string lock_state_json_generator(WebServer *web_server, void *source);
static std::string lock_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_VALVE
@@ -437,8 +436,8 @@ class WebServer : public Controller,
/// Handle a valve request under '/valve/<id>/<open/close/stop/set>'.
void handle_valve_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> valve_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> valve_all_json_generator(WebServer *web_server, void *source);
static std::string valve_state_json_generator(WebServer *web_server, void *source);
static std::string valve_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_ALARM_CONTROL_PANEL
@@ -447,8 +446,8 @@ class WebServer : public Controller,
/// Handle a alarm_control_panel request under '/alarm_control_panel/<id>'.
void handle_alarm_control_panel_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> alarm_control_panel_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> alarm_control_panel_all_json_generator(WebServer *web_server, void *source);
static std::string alarm_control_panel_state_json_generator(WebServer *web_server, void *source);
static std::string alarm_control_panel_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_WATER_HEATER
@@ -457,22 +456,22 @@ class WebServer : public Controller,
/// Handle a water_heater request under '/water_heater/<id>/<mode/set>'.
void handle_water_heater_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> water_heater_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> water_heater_all_json_generator(WebServer *web_server, void *source);
static std::string water_heater_state_json_generator(WebServer *web_server, void *source);
static std::string water_heater_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_INFRARED
/// Handle an infrared request under '/infrared/<id>/transmit'.
void handle_infrared_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> infrared_all_json_generator(WebServer *web_server, void *source);
static std::string infrared_all_json_generator(WebServer *web_server, void *source);
#endif
#ifdef USE_EVENT
void on_event(event::Event *obj) override;
static json::SerializationBuffer<> event_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> event_all_json_generator(WebServer *web_server, void *source);
static std::string event_state_json_generator(WebServer *web_server, void *source);
static std::string event_all_json_generator(WebServer *web_server, void *source);
/// Handle a event request under '/event<id>'.
void handle_event_request(AsyncWebServerRequest *request, const UrlMatch &match);
@@ -484,8 +483,8 @@ class WebServer : public Controller,
/// Handle a update request under '/update/<id>'.
void handle_update_request(AsyncWebServerRequest *request, const UrlMatch &match);
static json::SerializationBuffer<> update_state_json_generator(WebServer *web_server, void *source);
static json::SerializationBuffer<> update_all_json_generator(WebServer *web_server, void *source);
static std::string update_state_json_generator(WebServer *web_server, void *source);
static std::string update_all_json_generator(WebServer *web_server, void *source);
#endif
/// Override the web handler's canHandle method.
@@ -610,74 +609,71 @@ class WebServer : public Controller,
private:
#ifdef USE_SENSOR
json::SerializationBuffer<> sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config);
std::string sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config);
#endif
#ifdef USE_SWITCH
json::SerializationBuffer<> switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config);
std::string switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config);
#endif
#ifdef USE_BUTTON
json::SerializationBuffer<> button_json_(button::Button *obj, JsonDetail start_config);
std::string button_json_(button::Button *obj, JsonDetail start_config);
#endif
#ifdef USE_BINARY_SENSOR
json::SerializationBuffer<> binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value,
JsonDetail start_config);
std::string binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value, JsonDetail start_config);
#endif
#ifdef USE_FAN
json::SerializationBuffer<> fan_json_(fan::Fan *obj, JsonDetail start_config);
std::string fan_json_(fan::Fan *obj, JsonDetail start_config);
#endif
#ifdef USE_LIGHT
json::SerializationBuffer<> light_json_(light::LightState *obj, JsonDetail start_config);
std::string light_json_(light::LightState *obj, JsonDetail start_config);
#endif
#ifdef USE_TEXT_SENSOR
json::SerializationBuffer<> text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value,
JsonDetail start_config);
std::string text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value, JsonDetail start_config);
#endif
#ifdef USE_COVER
json::SerializationBuffer<> cover_json_(cover::Cover *obj, JsonDetail start_config);
std::string cover_json_(cover::Cover *obj, JsonDetail start_config);
#endif
#ifdef USE_NUMBER
json::SerializationBuffer<> number_json_(number::Number *obj, float value, JsonDetail start_config);
std::string number_json_(number::Number *obj, float value, JsonDetail start_config);
#endif
#ifdef USE_DATETIME_DATE
json::SerializationBuffer<> date_json_(datetime::DateEntity *obj, JsonDetail start_config);
std::string date_json_(datetime::DateEntity *obj, JsonDetail start_config);
#endif
#ifdef USE_DATETIME_TIME
json::SerializationBuffer<> time_json_(datetime::TimeEntity *obj, JsonDetail start_config);
std::string time_json_(datetime::TimeEntity *obj, JsonDetail start_config);
#endif
#ifdef USE_DATETIME_DATETIME
json::SerializationBuffer<> datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config);
std::string datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config);
#endif
#ifdef USE_TEXT
json::SerializationBuffer<> text_json_(text::Text *obj, const std::string &value, JsonDetail start_config);
std::string text_json_(text::Text *obj, const std::string &value, JsonDetail start_config);
#endif
#ifdef USE_SELECT
json::SerializationBuffer<> select_json_(select::Select *obj, StringRef value, JsonDetail start_config);
std::string select_json_(select::Select *obj, StringRef value, JsonDetail start_config);
#endif
#ifdef USE_CLIMATE
json::SerializationBuffer<> climate_json_(climate::Climate *obj, JsonDetail start_config);
std::string climate_json_(climate::Climate *obj, JsonDetail start_config);
#endif
#ifdef USE_LOCK
json::SerializationBuffer<> lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config);
std::string lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config);
#endif
#ifdef USE_VALVE
json::SerializationBuffer<> valve_json_(valve::Valve *obj, JsonDetail start_config);
std::string valve_json_(valve::Valve *obj, JsonDetail start_config);
#endif
#ifdef USE_ALARM_CONTROL_PANEL
json::SerializationBuffer<> alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
alarm_control_panel::AlarmControlPanelState value,
JsonDetail start_config);
std::string alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
alarm_control_panel::AlarmControlPanelState value, JsonDetail start_config);
#endif
#ifdef USE_EVENT
json::SerializationBuffer<> event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config);
std::string event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config);
#endif
#ifdef USE_WATER_HEATER
json::SerializationBuffer<> water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config);
std::string water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config);
#endif
#ifdef USE_INFRARED
json::SerializationBuffer<> infrared_json_(infrared::Infrared *obj, JsonDetail start_config);
std::string infrared_json_(infrared::Infrared *obj, JsonDetail start_config);
#endif
#ifdef USE_UPDATE
json::SerializationBuffer<> update_json_(update::UpdateEntity *obj, JsonDetail start_config);
std::string update_json_(update::UpdateEntity *obj, JsonDetail start_config);
#endif
};

View File

@@ -507,7 +507,7 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
// Configure reconnect timeout and send config
// this should always go through since the tcp send buffer is empty on connect
auto message = ws->get_config_json();
std::string message = ws->get_config_json();
this->try_send_nodefer(message.c_str(), "ping", millis(), 30000);
#ifdef USE_WEBSERVER_SORTING
@@ -561,7 +561,7 @@ void AsyncEventSourceResponse::deq_push_back_with_dedup_(void *source, message_g
void AsyncEventSourceResponse::process_deferred_queue_() {
while (!deferred_queue_.empty()) {
DeferredEvent &de = deferred_queue_.front();
auto message = de.message_generator_(web_server_, de.source_);
std::string message = de.message_generator_(web_server_, de.source_);
if (this->try_send_nodefer(message.c_str(), "state")) {
// O(n) but memory efficiency is more important than speed here which is why std::vector was chosen
deferred_queue_.erase(deferred_queue_.begin());
@@ -798,7 +798,7 @@ void AsyncEventSourceResponse::deferrable_send_state(void *source, const char *e
// trying to send first
deq_push_back_with_dedup_(source, message_generator);
} else {
auto message = message_generator(web_server_, source);
std::string message = message_generator(web_server_, source);
if (!this->try_send_nodefer(message.c_str(), "state")) {
deq_push_back_with_dedup_(source, message_generator);
}

View File

@@ -16,7 +16,6 @@
#include <vector>
#ifdef USE_WEBSERVER
#include "esphome/components/json/json_util.h"
#include "esphome/components/web_server/list_entities.h"
#endif
@@ -251,7 +250,7 @@ class AsyncWebHandler {
class AsyncEventSource;
class AsyncEventSourceResponse;
using message_generator_t = json::SerializationBuffer<>(esphome::web_server::WebServer *, void *);
using message_generator_t = std::string(esphome::web_server::WebServer *, void *);
/*
This class holds a pointer to the source component that wants to publish a state event, and a pointer to a function

View File

@@ -351,7 +351,7 @@ bool WiFiComponent::needs_scan_results_() const {
return this->scan_result_.empty() || !this->scan_result_[0].get_matches();
}
bool WiFiComponent::ssid_was_seen_in_scan_(const std::string &ssid) const {
bool WiFiComponent::ssid_was_seen_in_scan_(const CompactString &ssid) const {
// Check if this SSID is configured as hidden
// If explicitly marked hidden, we should always try hidden mode regardless of scan results
for (const auto &conf : this->sta_) {
@@ -955,9 +955,12 @@ WiFiAP WiFiComponent::get_sta() const {
return config ? *config : WiFiAP{};
}
void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &password) {
this->save_wifi_sta(ssid.c_str(), password.c_str());
}
void WiFiComponent::save_wifi_sta(const char *ssid, const char *password) {
SavedWifiSettings save{}; // zero-initialized - all bytes set to \0, guaranteeing null termination
strncpy(save.ssid, ssid.c_str(), sizeof(save.ssid) - 1); // max 32 chars, byte 32 remains \0
strncpy(save.password, password.c_str(), sizeof(save.password) - 1); // max 64 chars, byte 64 remains \0
strncpy(save.ssid, ssid, sizeof(save.ssid) - 1); // max 32 chars, byte 32 remains \0
strncpy(save.password, password, sizeof(save.password) - 1); // max 64 chars, byte 64 remains \0
this->pref_.save(&save);
// ensure it's written immediately
global_preferences->sync();
@@ -1802,11 +1805,11 @@ void WiFiComponent::log_and_adjust_priority_for_failed_connect_() {
}
// Get SSID for logging (use pointer to avoid copy)
const std::string *ssid = nullptr;
const char *ssid = nullptr;
if (this->retry_phase_ == WiFiRetryPhase::SCAN_CONNECTING && !this->scan_result_.empty()) {
ssid = &this->scan_result_[0].get_ssid();
ssid = this->scan_result_[0].get_ssid().c_str();
} else if (const WiFiAP *config = this->get_selected_sta_()) {
ssid = &config->get_ssid();
ssid = config->get_ssid().c_str();
}
// Only decrease priority on the last attempt for this phase
@@ -1826,8 +1829,8 @@ void WiFiComponent::log_and_adjust_priority_for_failed_connect_() {
}
char bssid_s[18];
format_mac_addr_upper(failed_bssid.value().data(), bssid_s);
ESP_LOGD(TAG, "Failed " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") ", priority %d → %d",
ssid != nullptr ? ssid->c_str() : "", bssid_s, old_priority, new_priority);
ESP_LOGD(TAG, "Failed " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") ", priority %d → %d", ssid != nullptr ? ssid : "",
bssid_s, old_priority, new_priority);
// After adjusting priority, check if all priorities are now at minimum
// If so, clear the vector to save memory and reset for fresh start
@@ -2075,10 +2078,14 @@ void WiFiComponent::save_fast_connect_settings_() {
}
#endif
void WiFiAP::set_ssid(const std::string &ssid) { this->ssid_ = ssid; }
void WiFiAP::set_ssid(const std::string &ssid) { this->ssid_ = CompactString(ssid.c_str(), ssid.size()); }
void WiFiAP::set_ssid(const char *ssid) { this->ssid_ = CompactString(ssid, strlen(ssid)); }
void WiFiAP::set_bssid(const bssid_t &bssid) { this->bssid_ = bssid; }
void WiFiAP::clear_bssid() { this->bssid_ = {}; }
void WiFiAP::set_password(const std::string &password) { this->password_ = password; }
void WiFiAP::set_password(const std::string &password) {
this->password_ = CompactString(password.c_str(), password.size());
}
void WiFiAP::set_password(const char *password) { this->password_ = CompactString(password, strlen(password)); }
#ifdef USE_WIFI_WPA2_EAP
void WiFiAP::set_eap(optional<EAPAuth> eap_auth) { this->eap_ = std::move(eap_auth); }
#endif
@@ -2088,10 +2095,8 @@ void WiFiAP::clear_channel() { this->channel_ = 0; }
void WiFiAP::set_manual_ip(optional<ManualIP> manual_ip) { this->manual_ip_ = manual_ip; }
#endif
void WiFiAP::set_hidden(bool hidden) { this->hidden_ = hidden; }
const std::string &WiFiAP::get_ssid() const { return this->ssid_; }
const bssid_t &WiFiAP::get_bssid() const { return this->bssid_; }
bool WiFiAP::has_bssid() const { return this->bssid_ != bssid_t{}; }
const std::string &WiFiAP::get_password() const { return this->password_; }
#ifdef USE_WIFI_WPA2_EAP
const optional<EAPAuth> &WiFiAP::get_eap() const { return this->eap_; }
#endif
@@ -2102,12 +2107,12 @@ const optional<ManualIP> &WiFiAP::get_manual_ip() const { return this->manual_ip
#endif
bool WiFiAP::get_hidden() const { return this->hidden_; }
WiFiScanResult::WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t channel, int8_t rssi, bool with_auth,
bool is_hidden)
WiFiScanResult::WiFiScanResult(const bssid_t &bssid, const char *ssid, size_t ssid_len, uint8_t channel, int8_t rssi,
bool with_auth, bool is_hidden)
: bssid_(bssid),
channel_(channel),
rssi_(rssi),
ssid_(std::move(ssid)),
ssid_(ssid, ssid_len),
with_auth_(with_auth),
is_hidden_(is_hidden) {}
bool WiFiScanResult::matches(const WiFiAP &config) const {
@@ -2150,7 +2155,6 @@ bool WiFiScanResult::matches(const WiFiAP &config) const {
bool WiFiScanResult::get_matches() const { return this->matches_; }
void WiFiScanResult::set_matches(bool matches) { this->matches_ = matches; }
const bssid_t &WiFiScanResult::get_bssid() const { return this->bssid_; }
const std::string &WiFiScanResult::get_ssid() const { return this->ssid_; }
uint8_t WiFiScanResult::get_channel() const { return this->channel_; }
int8_t WiFiScanResult::get_rssi() const { return this->rssi_; }
bool WiFiScanResult::get_with_auth() const { return this->with_auth_; }
@@ -2223,7 +2227,7 @@ void WiFiComponent::process_roaming_scan_() {
for (const auto &result : this->scan_result_) {
// Must be same SSID, different BSSID
if (current_ssid != result.get_ssid() || result.get_bssid() == current_bssid)
if (result.get_ssid() != current_ssid.c_str() || result.get_bssid() == current_bssid)
continue;
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE

View File

@@ -175,9 +175,13 @@ template<typename T> using wifi_scan_vector_t = FixedVector<T>;
class WiFiAP {
public:
void set_ssid(const std::string &ssid);
void set_ssid(const char *ssid);
void set_ssid(const CompactString &ssid) { this->ssid_ = ssid; }
void set_bssid(const bssid_t &bssid);
void clear_bssid();
void set_password(const std::string &password);
void set_password(const char *password);
void set_password(const CompactString &password) { this->password_ = password; }
#ifdef USE_WIFI_WPA2_EAP
void set_eap(optional<EAPAuth> eap_auth);
#endif // USE_WIFI_WPA2_EAP
@@ -188,10 +192,10 @@ class WiFiAP {
void set_manual_ip(optional<ManualIP> manual_ip);
#endif
void set_hidden(bool hidden);
const std::string &get_ssid() const;
const CompactString &get_ssid() const { return this->ssid_; }
const CompactString &get_password() const { return this->password_; }
const bssid_t &get_bssid() const;
bool has_bssid() const;
const std::string &get_password() const;
#ifdef USE_WIFI_WPA2_EAP
const optional<EAPAuth> &get_eap() const;
#endif // USE_WIFI_WPA2_EAP
@@ -204,8 +208,8 @@ class WiFiAP {
bool get_hidden() const;
protected:
std::string ssid_;
std::string password_;
CompactString ssid_;
CompactString password_;
#ifdef USE_WIFI_WPA2_EAP
optional<EAPAuth> eap_;
#endif // USE_WIFI_WPA2_EAP
@@ -221,14 +225,15 @@ class WiFiAP {
class WiFiScanResult {
public:
WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t channel, int8_t rssi, bool with_auth, bool is_hidden);
WiFiScanResult(const bssid_t &bssid, const char *ssid, size_t ssid_len, uint8_t channel, int8_t rssi, bool with_auth,
bool is_hidden);
bool matches(const WiFiAP &config) const;
bool get_matches() const;
void set_matches(bool matches);
const bssid_t &get_bssid() const;
const std::string &get_ssid() const;
const CompactString &get_ssid() const { return this->ssid_; }
uint8_t get_channel() const;
int8_t get_rssi() const;
bool get_with_auth() const;
@@ -242,7 +247,7 @@ class WiFiScanResult {
bssid_t bssid_;
uint8_t channel_;
int8_t rssi_;
std::string ssid_;
CompactString ssid_;
int8_t priority_{0};
bool matches_{false};
bool with_auth_;
@@ -381,6 +386,10 @@ class WiFiComponent : public Component {
void set_passive_scan(bool passive);
void save_wifi_sta(const std::string &ssid, const std::string &password);
void save_wifi_sta(const char *ssid, const char *password);
void save_wifi_sta(const CompactString &ssid, const CompactString &password) {
this->save_wifi_sta(ssid.c_str(), password.c_str());
}
// ========== INTERNAL METHODS ==========
// (In most use cases you won't need these)
@@ -541,7 +550,7 @@ class WiFiComponent : public Component {
int8_t find_first_non_hidden_index_() const;
/// Check if an SSID was seen in the most recent scan results
/// Used to skip hidden mode for SSIDs we know are visible
bool ssid_was_seen_in_scan_(const std::string &ssid) const;
bool ssid_was_seen_in_scan_(const CompactString &ssid) const;
/// Check if full scan results are needed (captive portal active, improv, listeners)
bool needs_full_scan_results_() const;
/// Check if network matches any configured network (for scan result filtering)

View File

@@ -756,10 +756,7 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
if (status != OK) {
ESP_LOGV(TAG, "Scan failed: %d", status);
// Don't call retry_connect() here - this callback runs in SDK system context
// where yield() cannot be called. Instead, just set scan_done_ and let
// check_scanning_finished() handle the empty scan_result_ from loop context.
this->scan_done_ = true;
this->retry_connect();
return;
}
@@ -784,8 +781,8 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
const char *ssid_cstr = reinterpret_cast<const char *>(it->ssid);
if (needs_full || this->matches_configured_network_(ssid_cstr, it->bssid)) {
this->scan_result_.emplace_back(
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
std::string(ssid_cstr, it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN, it->is_hidden != 0);
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]}, ssid_cstr,
it->ssid_len, it->channel, it->rssi, it->authmode != AUTH_OPEN, it->is_hidden != 0);
} else {
this->log_discarded_scan_result_(ssid_cstr, it->bssid, it->rssi, it->channel);
}

View File

@@ -870,8 +870,7 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
if (needs_full || this->matches_configured_network_(ssid_cstr, record.bssid)) {
bssid_t bssid;
std::copy(record.bssid, record.bssid + 6, bssid.begin());
std::string ssid(ssid_cstr);
this->scan_result_.emplace_back(bssid, std::move(ssid), record.primary, record.rssi,
this->scan_result_.emplace_back(bssid, ssid_cstr, strlen(ssid_cstr), record.primary, record.rssi,
record.authmode != WIFI_AUTH_OPEN, ssid_cstr[0] == '\0');
} else {
this->log_discarded_scan_result_(ssid_cstr, record.bssid, record.rssi, record.primary);

View File

@@ -694,7 +694,7 @@ void WiFiComponent::wifi_scan_done_callback_() {
auto &ap = scan->ap[i];
this->scan_result_.emplace_back(bssid_t{ap.bssid.addr[0], ap.bssid.addr[1], ap.bssid.addr[2], ap.bssid.addr[3],
ap.bssid.addr[4], ap.bssid.addr[5]},
std::string(ssid_cstr), ap.channel, ap.rssi, ap.auth != WIFI_AUTH_OPEN,
ssid_cstr, strlen(ssid_cstr), ap.channel, ap.rssi, ap.auth != WIFI_AUTH_OPEN,
ssid_cstr[0] == '\0');
} else {
auto &ap = scan->ap[i];

View File

@@ -149,9 +149,8 @@ void WiFiComponent::wifi_scan_result(void *env, const cyw43_ev_scan_result_t *re
bssid_t bssid;
std::copy(result->bssid, result->bssid + 6, bssid.begin());
std::string ssid(ssid_cstr);
WiFiScanResult res(bssid, std::move(ssid), result->channel, result->rssi, result->auth_mode != CYW43_AUTH_OPEN,
ssid_cstr[0] == '\0');
WiFiScanResult res(bssid, ssid_cstr, strlen(ssid_cstr), result->channel, result->rssi,
result->auth_mode != CYW43_AUTH_OPEN, ssid_cstr[0] == '\0');
if (std::find(this->scan_result_.begin(), this->scan_result_.end(), res) == this->scan_result_.end()) {
this->scan_result_.push_back(res);
}

View File

@@ -89,7 +89,7 @@ void ScanResultsWiFiInfo::on_wifi_scan_results(const wifi::wifi_scan_vector_t<wi
for (const auto &scan : results) {
if (scan.get_is_hidden())
continue;
const std::string &ssid = scan.get_ssid();
const auto &ssid = scan.get_ssid();
// Max space: ssid + ": " (2) + "-128" (4) + "dB\n" (3) = ssid + 9
if (ptr + ssid.size() + 9 > end)
break;

View File

@@ -12,7 +12,6 @@ from esphome.core import CORE
from esphome.types import ConfigType
from .const_zephyr import (
CONF_IEEE802154_VENDOR_OUI,
CONF_MAX_EP_NUMBER,
CONF_ON_JOIN,
CONF_POWER_SOURCE,
@@ -24,12 +23,7 @@ from .const_zephyr import (
ZigbeeComponent,
zigbee_ns,
)
from .zigbee_zephyr import (
zephyr_binary_sensor,
zephyr_number,
zephyr_sensor,
zephyr_switch,
)
from .zigbee_zephyr import zephyr_binary_sensor, zephyr_sensor, zephyr_switch
_LOGGER = logging.getLogger(__name__)
@@ -48,7 +42,6 @@ def zigbee_set_core_data(config: ConfigType) -> ConfigType:
BINARY_SENSOR_SCHEMA = cv.Schema({}).extend(zephyr_binary_sensor)
SENSOR_SCHEMA = cv.Schema({}).extend(zephyr_sensor)
SWITCH_SCHEMA = cv.Schema({}).extend(zephyr_switch)
NUMBER_SCHEMA = cv.Schema({}).extend(zephyr_number)
CONFIG_SCHEMA = cv.All(
cv.Schema(
@@ -65,13 +58,6 @@ CONFIG_SCHEMA = cv.All(
cv.Optional(CONF_POWER_SOURCE, default="DC_SOURCE"): cv.enum(
POWER_SOURCE, upper=True
),
cv.Optional(CONF_IEEE802154_VENDOR_OUI): cv.All(
cv.Any(
cv.int_range(min=0x000000, max=0xFFFFFF),
cv.one_of(*["random"], lower=True),
),
cv.requires_component("nrf52"),
),
}
).extend(cv.COMPONENT_SCHEMA),
zigbee_set_core_data,
@@ -131,25 +117,10 @@ async def setup_switch(entity: cg.MockObj, config: ConfigType) -> None:
await zephyr_setup_switch(entity, config)
async def setup_number(
entity: cg.MockObj,
config: ConfigType,
min_value: float,
max_value: float,
step: float,
) -> None:
if not config.get(CONF_ZIGBEE_ID) or config.get(CONF_INTERNAL):
return
if CORE.using_zephyr:
from .zigbee_zephyr import zephyr_setup_number
await zephyr_setup_number(entity, config, min_value, max_value, step)
def consume_endpoint(config: ConfigType) -> ConfigType:
if not config.get(CONF_ZIGBEE_ID) or config.get(CONF_INTERNAL):
return config
if CONF_NAME in config and " " in config[CONF_NAME]:
if " " in config[CONF_NAME]:
_LOGGER.warning(
"Spaces in '%s' work with ZHA but not Zigbee2MQTT. For Zigbee2MQTT use '%s'",
config[CONF_NAME],
@@ -173,10 +144,6 @@ def validate_switch(config: ConfigType) -> ConfigType:
return consume_endpoint(config)
def validate_number(config: ConfigType) -> ConfigType:
return consume_endpoint(config)
ZIGBEE_ACTION_SCHEMA = automation.maybe_simple_id(
cv.Schema(
{

View File

@@ -4,7 +4,6 @@ zigbee_ns = cg.esphome_ns.namespace("zigbee")
ZigbeeComponent = zigbee_ns.class_("ZigbeeComponent", cg.Component)
BinaryAttrs = zigbee_ns.struct("BinaryAttrs")
AnalogAttrs = zigbee_ns.struct("AnalogAttrs")
AnalogAttrsOutput = zigbee_ns.struct("AnalogAttrsOutput")
CONF_MAX_EP_NUMBER = 8
CONF_ZIGBEE_ID = "zigbee_id"
@@ -13,7 +12,6 @@ CONF_WIPE_ON_BOOT = "wipe_on_boot"
CONF_ZIGBEE_BINARY_SENSOR = "zigbee_binary_sensor"
CONF_ZIGBEE_SENSOR = "zigbee_sensor"
CONF_ZIGBEE_SWITCH = "zigbee_switch"
CONF_ZIGBEE_NUMBER = "zigbee_number"
CONF_POWER_SOURCE = "power_source"
POWER_SOURCE = {
"UNKNOWN": "ZB_ZCL_BASIC_POWER_SOURCE_UNKNOWN",
@@ -24,7 +22,6 @@ POWER_SOURCE = {
"EMERGENCY_MAINS_CONST": "ZB_ZCL_BASIC_POWER_SOURCE_EMERGENCY_MAINS_CONST",
"EMERGENCY_MAINS_TRANSF": "ZB_ZCL_BASIC_POWER_SOURCE_EMERGENCY_MAINS_TRANSF",
}
CONF_IEEE802154_VENDOR_OUI = "ieee802154_vendor_oui"
# Keys for CORE.data storage
KEY_ZIGBEE = "zigbee"
@@ -40,4 +37,3 @@ ZB_ZCL_CLUSTER_ID_IDENTIFY = "ZB_ZCL_CLUSTER_ID_IDENTIFY"
ZB_ZCL_CLUSTER_ID_BINARY_INPUT = "ZB_ZCL_CLUSTER_ID_BINARY_INPUT"
ZB_ZCL_CLUSTER_ID_ANALOG_INPUT = "ZB_ZCL_CLUSTER_ID_ANALOG_INPUT"
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT = "ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT"
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT = "ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT"

View File

@@ -1,86 +0,0 @@
import esphome.codegen as cg
from esphome.components import time as time_
import esphome.config_validation as cv
from esphome.const import CONF_ID
from esphome.core import CORE
from esphome.types import ConfigType
from .. import consume_endpoint
from ..const_zephyr import CONF_ZIGBEE_ID, zigbee_ns
from ..zigbee_zephyr import (
ZigbeeClusterDesc,
ZigbeeComponent,
get_slot_index,
zigbee_new_attr_list,
zigbee_new_cluster_list,
zigbee_new_variable,
zigbee_register_ep,
)
DEPENDENCIES = ["zigbee"]
ZigbeeTime = zigbee_ns.class_("ZigbeeTime", time_.RealTimeClock)
CONFIG_SCHEMA = cv.All(
time_.TIME_SCHEMA.extend(
{
cv.GenerateID(): cv.declare_id(ZigbeeTime),
cv.OnlyWith(CONF_ZIGBEE_ID, ["nrf52", "zigbee"]): cv.use_id(
ZigbeeComponent
),
}
)
.extend(cv.COMPONENT_SCHEMA)
.extend(cv.polling_component_schema("1s")),
consume_endpoint,
)
async def to_code(config: ConfigType) -> None:
CORE.add_job(_add_time, config)
async def _add_time(config: ConfigType) -> None:
slot_index = get_slot_index()
# Create unique names for this sensor's variables based on slot index
prefix = f"zigbee_ep{slot_index + 1}"
attrs_name = f"{prefix}_time_attrs"
attr_list_name = f"{prefix}_time_attrib_list"
cluster_list_name = f"{prefix}_cluster_list"
ep_name = f"{prefix}_ep"
# Create the binary attributes structure
time_attrs = zigbee_new_variable(attrs_name, "zb_zcl_time_attrs_t")
attr_list = zigbee_new_attr_list(
attr_list_name,
"ZB_ZCL_DECLARE_TIME_ATTR_LIST",
str(time_attrs),
)
# Create cluster list and register endpoint
cluster_list_name, clusters = zigbee_new_cluster_list(
cluster_list_name,
[
ZigbeeClusterDesc("ZB_ZCL_CLUSTER_ID_TIME", attr_list),
ZigbeeClusterDesc("ZB_ZCL_CLUSTER_ID_TIME"),
],
)
zigbee_register_ep(
ep_name,
cluster_list_name,
0,
clusters,
slot_index,
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
)
# Create the ZigbeeTime component
var = cg.new_Pvariable(config[CONF_ID])
await time_.register_time(var, config)
await cg.register_component(var, config)
cg.add(var.set_endpoint(slot_index + 1))
cg.add(var.set_cluster_attributes(time_attrs))
hub = await cg.get_variable(config[CONF_ZIGBEE_ID])
cg.add(var.set_parent(hub))

View File

@@ -1,87 +0,0 @@
#include "zigbee_time_zephyr.h"
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_TIME)
#include "esphome/core/log.h"
namespace esphome::zigbee {
static const char *const TAG = "zigbee.time";
// This time standard is the number of
// seconds since 0 hrs 0 mins 0 sec on 1st January 2000 UTC (Universal Coordinated Time).
constexpr time_t EPOCH_2000 = 946684800;
ZigbeeTime *global_time = nullptr; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
void ZigbeeTime::sync_time(zb_ret_t status, zb_uint32_t auth_level, zb_uint16_t short_addr, zb_uint8_t endpoint,
zb_uint32_t nw_time) {
if (status == RET_OK && auth_level >= ZB_ZCL_TIME_HAS_SYNCHRONIZED_BIT) {
global_time->set_epoch_time(nw_time + EPOCH_2000);
} else if (status != RET_TIMEOUT || !global_time->has_time_) {
ESP_LOGE(TAG, "Status: %d, auth_level: %u, short_addr: %d, endpoint: %d, nw_time: %u", status, auth_level,
short_addr, endpoint, nw_time);
}
}
void ZigbeeTime::setup() {
global_time = this;
this->parent_->add_callback(this->endpoint_, [this](zb_bufid_t bufid) { this->zcl_device_cb_(bufid); });
synchronize_epoch_(EPOCH_2000);
this->parent_->add_join_callback([this]() { zb_zcl_time_server_synchronize(this->endpoint_, sync_time); });
}
void ZigbeeTime::dump_config() {
ESP_LOGCONFIG(TAG,
"Zigbee Time\n"
" Endpoint: %d",
this->endpoint_);
RealTimeClock::dump_config();
}
void ZigbeeTime::update() {
time_t time = timestamp_now();
this->cluster_attributes_->time = time - EPOCH_2000;
}
void ZigbeeTime::set_epoch_time(uint32_t epoch) {
this->defer([this, epoch]() {
this->synchronize_epoch_(epoch);
this->has_time_ = true;
});
}
void ZigbeeTime::zcl_device_cb_(zb_bufid_t bufid) {
zb_zcl_device_callback_param_t *p_device_cb_param = ZB_BUF_GET_PARAM(bufid, zb_zcl_device_callback_param_t);
zb_zcl_device_callback_id_t device_cb_id = p_device_cb_param->device_cb_id;
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
switch (device_cb_id) {
/* ZCL set attribute value */
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
if (cluster_id == ZB_ZCL_CLUSTER_ID_TIME) {
if (attr_id == ZB_ZCL_ATTR_TIME_TIME_ID) {
zb_uint32_t value = p_device_cb_param->cb_param.set_attr_value_param.values.data32;
ESP_LOGI(TAG, "Synchronize time to %u", value);
this->defer([this, value]() { synchronize_epoch_(value + EPOCH_2000); });
} else if (attr_id == ZB_ZCL_ATTR_TIME_TIME_STATUS_ID) {
zb_uint8_t value = p_device_cb_param->cb_param.set_attr_value_param.values.data8;
ESP_LOGI(TAG, "Time status %hd", value);
this->defer([this, value]() { this->has_time_ = ZB_ZCL_TIME_TIME_STATUS_SYNCHRONIZED_BIT_IS_SET(value); });
}
} else {
/* other clusters attribute handled here */
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
}
break;
default:
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
break;
}
ESP_LOGD(TAG, "Zcl_device_cb_ status: %hd", p_device_cb_param->status);
}
} // namespace esphome::zigbee
#endif

View File

@@ -1,38 +0,0 @@
#pragma once
#include "esphome/core/defines.h"
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_TIME)
#include "esphome/core/component.h"
#include "esphome/components/time/real_time_clock.h"
#include "esphome/components/zigbee/zigbee_zephyr.h"
extern "C" {
#include <zboss_api.h>
#include <zboss_api_addons.h>
}
namespace esphome::zigbee {
class ZigbeeTime : public time::RealTimeClock, public ZigbeeEntity {
public:
void setup() override;
void dump_config() override;
void update() override;
void set_cluster_attributes(zb_zcl_time_attrs_t &cluster_attributes) {
this->cluster_attributes_ = &cluster_attributes;
}
void set_epoch_time(uint32_t epoch);
protected:
static void sync_time(zb_ret_t status, zb_uint32_t auth_level, zb_uint16_t short_addr, zb_uint8_t endpoint,
zb_uint32_t nw_time);
void zcl_device_cb_(zb_bufid_t bufid);
zb_zcl_time_attrs_t *cluster_attributes_{nullptr};
bool has_time_{false};
};
} // namespace esphome::zigbee
#endif

View File

@@ -22,7 +22,7 @@ void ZigbeeBinarySensor::setup() {
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_BINARY_INPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
ZB_ZCL_ATTR_BINARY_INPUT_PRESENT_VALUE_ID, &this->cluster_attributes_->present_value,
ZB_FALSE);
this->parent_->force_report();
this->parent_->flush();
});
}

View File

@@ -1,111 +0,0 @@
#include "zigbee_number_zephyr.h"
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_NUMBER)
#include "esphome/core/log.h"
extern "C" {
#include <zboss_api.h>
#include <zboss_api_addons.h>
#include <zb_nrf_platform.h>
#include <zigbee/zigbee_app_utils.h>
#include <zb_error_to_string.h>
}
namespace esphome::zigbee {
static const char *const TAG = "zigbee.number";
void ZigbeeNumber::setup() {
this->parent_->add_callback(this->endpoint_, [this](zb_bufid_t bufid) { this->zcl_device_cb_(bufid); });
this->number_->add_on_state_callback([this](float state) {
this->cluster_attributes_->present_value = state;
ESP_LOGD(TAG, "Set attribute endpoint: %d, present_value %f", this->endpoint_,
this->cluster_attributes_->present_value);
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, (zb_uint8_t *) &cluster_attributes_->present_value,
ZB_FALSE);
this->parent_->force_report();
});
}
void ZigbeeNumber::dump_config() {
ESP_LOGCONFIG(TAG,
"Zigbee Number\n"
" Endpoint: %d, present_value %f",
this->endpoint_, this->cluster_attributes_->present_value);
}
void ZigbeeNumber::zcl_device_cb_(zb_bufid_t bufid) {
zb_zcl_device_callback_param_t *p_device_cb_param = ZB_BUF_GET_PARAM(bufid, zb_zcl_device_callback_param_t);
zb_zcl_device_callback_id_t device_cb_id = p_device_cb_param->device_cb_id;
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
switch (device_cb_id) {
/* ZCL set attribute value */
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
if (cluster_id == ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT) {
ESP_LOGI(TAG, "Analog output attribute setting");
if (attr_id == ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID) {
float value =
*reinterpret_cast<const float *>(&p_device_cb_param->cb_param.set_attr_value_param.values.data32);
this->defer([this, value]() {
this->cluster_attributes_->present_value = value;
auto call = this->number_->make_call();
call.set_value(value);
call.perform();
});
}
} else {
/* other clusters attribute handled here */
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
}
break;
default:
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
break;
}
ESP_LOGD(TAG, "%s status: %hd", __func__, p_device_cb_param->status);
}
const zb_uint8_t ZB_ZCL_ANALOG_OUTPUT_STATUS_FLAG_MAX_VALUE = 0x0F;
static zb_ret_t check_value_analog_server(zb_uint16_t attr_id, zb_uint8_t endpoint,
zb_uint8_t *value) { // NOLINT(readability-non-const-parameter)
zb_ret_t ret = RET_OK;
ZVUNUSED(endpoint);
switch (attr_id) {
case ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID:
ret = ZB_ZCL_CHECK_BOOL_VALUE(*value) ? RET_OK : RET_ERROR;
break;
case ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID:
break;
case ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID:
if (*value > ZB_ZCL_ANALOG_OUTPUT_STATUS_FLAG_MAX_VALUE) {
ret = RET_ERROR;
}
break;
default:
break;
}
return ret;
}
} // namespace esphome::zigbee
void zb_zcl_analog_output_init_server() {
zb_zcl_add_cluster_handlers(ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
esphome::zigbee::check_value_analog_server, (zb_zcl_cluster_write_attr_hook_t) NULL,
(zb_zcl_cluster_handler_t) NULL);
}
void zb_zcl_analog_output_init_client() {
zb_zcl_add_cluster_handlers(ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_CLIENT_ROLE,
(zb_zcl_cluster_check_value_t) NULL, (zb_zcl_cluster_write_attr_hook_t) NULL,
(zb_zcl_cluster_handler_t) NULL);
}
#endif

View File

@@ -1,118 +0,0 @@
#pragma once
#include "esphome/core/defines.h"
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_NUMBER)
#include "esphome/components/zigbee/zigbee_zephyr.h"
#include "esphome/core/component.h"
#include "esphome/components/number/number.h"
extern "C" {
#include <zboss_api.h>
#include <zboss_api_addons.h>
}
enum {
ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID = 0x001C,
ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID = 0x0041,
ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID = 0x0045,
ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID = 0x0051,
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID = 0x0055,
ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID = 0x006A,
ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID = 0x006F,
ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID = 0x0075,
};
#define ZB_ZCL_ANALOG_OUTPUT_CLUSTER_REVISION_DEFAULT ((zb_uint16_t) 0x0001u)
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID, ZB_ZCL_ATTR_TYPE_CHAR_STRING, ZB_ZCL_ATTR_ACCESS_READ_ONLY, \
(ZB_ZCL_NON_MANUFACTURER_SPECIFIC), (void *) (data_ptr) \
}
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID, ZB_ZCL_ATTR_TYPE_BOOL, \
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
(void *) (data_ptr) \
}
// PresentValue
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
ZB_ZCL_ATTR_ACCESS_READ_WRITE | ZB_ZCL_ATTR_ACCESS_REPORTING, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
(void *) (data_ptr) \
}
// MaxPresentValue
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
(void *) (data_ptr) \
}
// MinPresentValue
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
(void *) (data_ptr) \
}
// Resolution
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
(void *) (data_ptr) \
}
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID, ZB_ZCL_ATTR_TYPE_8BITMAP, \
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_REPORTING, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
(void *) (data_ptr) \
}
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID(data_ptr) \
{ \
ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID, ZB_ZCL_ATTR_TYPE_16BIT_ENUM, ZB_ZCL_ATTR_ACCESS_READ_ONLY, \
(ZB_ZCL_NON_MANUFACTURER_SPECIFIC), (void *) (data_ptr) \
}
#define ESPHOME_ZB_ZCL_DECLARE_ANALOG_OUTPUT_ATTRIB_LIST(attr_list, out_of_service, present_value, status_flag, \
max_present_value, min_present_value, resolution, \
engineering_units, description) \
ZB_ZCL_START_DECLARE_ATTRIB_LIST_CLUSTER_REVISION(attr_list, ZB_ZCL_ANALOG_OUTPUT) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID, (out_of_service)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, (present_value)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID, (status_flag)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID, (max_present_value)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID, (min_present_value)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID, (resolution)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID, (engineering_units)) \
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID, (description)) \
ZB_ZCL_FINISH_DECLARE_ATTRIB_LIST
void zb_zcl_analog_output_init_server();
void zb_zcl_analog_output_init_client();
#define ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT_SERVER_ROLE_INIT zb_zcl_analog_output_init_server
#define ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT_CLIENT_ROLE_INIT zb_zcl_analog_output_init_client
namespace esphome::zigbee {
class ZigbeeNumber : public ZigbeeEntity, public Component {
public:
ZigbeeNumber(number::Number *n) : number_(n) {}
void set_cluster_attributes(AnalogAttrsOutput &cluster_attributes) {
this->cluster_attributes_ = &cluster_attributes;
}
void setup() override;
void dump_config() override;
protected:
number::Number *number_;
AnalogAttrsOutput *cluster_attributes_{nullptr};
void zcl_device_cb_(zb_bufid_t bufid);
};
} // namespace esphome::zigbee
#endif

View File

@@ -21,7 +21,7 @@ void ZigbeeSensor::setup() {
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_ANALOG_INPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
ZB_ZCL_ATTR_ANALOG_INPUT_PRESENT_VALUE_ID,
(zb_uint8_t *) &this->cluster_attributes_->present_value, ZB_FALSE);
this->parent_->force_report();
this->parent_->flush();
});
}

View File

@@ -31,7 +31,7 @@ void ZigbeeSwitch::setup() {
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
ZB_ZCL_ATTR_BINARY_OUTPUT_PRESENT_VALUE_ID, &this->cluster_attributes_->present_value,
ZB_FALSE);
this->parent_->force_report();
this->parent_->flush();
});
}
@@ -41,6 +41,8 @@ void ZigbeeSwitch::zcl_device_cb_(zb_bufid_t bufid) {
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
p_device_cb_param->status = RET_OK;
switch (device_cb_id) {
/* ZCL set attribute value */
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
@@ -50,17 +52,16 @@ void ZigbeeSwitch::zcl_device_cb_(zb_bufid_t bufid) {
if (attr_id == ZB_ZCL_ATTR_BINARY_OUTPUT_PRESENT_VALUE_ID) {
this->defer([this, value]() {
this->cluster_attributes_->present_value = value ? ZB_TRUE : ZB_FALSE;
this->switch_->control(value);
this->switch_->publish_state(value);
});
}
} else {
/* other clusters attribute handled here */
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
}
break;
default:
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
p_device_cb_param->status = RET_ERROR;
break;
}

View File

@@ -101,8 +101,8 @@ void ZigbeeComponent::zcl_device_cb(zb_bufid_t bufid) {
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
auto endpoint = p_device_cb_param->endpoint;
ESP_LOGI(TAG, "%s id %hd, cluster_id %d, attr_id %d, endpoint: %d", __func__, device_cb_id, cluster_id, attr_id,
endpoint);
ESP_LOGI(TAG, "Zcl_device_cb %s id %hd, cluster_id %d, attr_id %d, endpoint: %d", __func__, device_cb_id, cluster_id,
attr_id, endpoint);
/* Set default response value. */
p_device_cb_param->status = RET_OK;
@@ -112,10 +112,10 @@ void ZigbeeComponent::zcl_device_cb(zb_bufid_t bufid) {
const auto &cb = global_zigbee->callbacks_[endpoint - 1];
if (cb) {
cb(bufid);
return;
}
return;
}
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
p_device_cb_param->status = RET_ERROR;
}
void ZigbeeComponent::on_join_() {
@@ -230,11 +230,11 @@ static void send_attribute_report(zb_bufid_t bufid, zb_uint16_t cmd_id) {
zb_buf_free(bufid);
}
void ZigbeeComponent::force_report() { this->force_report_ = true; }
void ZigbeeComponent::flush() { this->need_flush_ = true; }
void ZigbeeComponent::loop() {
if (this->force_report_) {
this->force_report_ = false;
if (this->need_flush_) {
this->need_flush_ = false;
zb_buf_get_out_delayed_ext(send_attribute_report, 0, 0);
}
}

View File

@@ -60,12 +60,6 @@ struct AnalogAttrs {
zb_uchar_t description[ZB_ZCL_MAX_STRING_SIZE];
};
struct AnalogAttrsOutput : AnalogAttrs {
float max_present_value;
float min_present_value;
float resolution;
};
class ZigbeeComponent : public Component {
public:
void setup() override;
@@ -78,7 +72,7 @@ class ZigbeeComponent : public Component {
void zboss_signal_handler_esphome(zb_bufid_t bufid);
void factory_reset();
Trigger<> *get_join_trigger() { return &this->join_trigger_; };
void force_report();
void flush();
void loop() override;
protected:
@@ -90,7 +84,7 @@ class ZigbeeComponent : public Component {
std::array<std::function<void(zb_bufid_t bufid)>, ZIGBEE_ENDPOINTS_COUNT> callbacks_{};
CallbackManager<void()> join_cb_;
Trigger<> join_trigger_;
bool force_report_{false};
bool need_flush_{false};
};
class ZigbeeEntity {

View File

@@ -49,13 +49,11 @@ from esphome.cpp_generator import (
from esphome.types import ConfigType
from .const_zephyr import (
CONF_IEEE802154_VENDOR_OUI,
CONF_ON_JOIN,
CONF_POWER_SOURCE,
CONF_WIPE_ON_BOOT,
CONF_ZIGBEE_BINARY_SENSOR,
CONF_ZIGBEE_ID,
CONF_ZIGBEE_NUMBER,
CONF_ZIGBEE_SENSOR,
CONF_ZIGBEE_SWITCH,
KEY_EP_NUMBER,
@@ -63,14 +61,12 @@ from .const_zephyr import (
POWER_SOURCE,
ZB_ZCL_BASIC_ATTRS_EXT_T,
ZB_ZCL_CLUSTER_ID_ANALOG_INPUT,
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT,
ZB_ZCL_CLUSTER_ID_BASIC,
ZB_ZCL_CLUSTER_ID_BINARY_INPUT,
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT,
ZB_ZCL_CLUSTER_ID_IDENTIFY,
ZB_ZCL_IDENTIFY_ATTRS_T,
AnalogAttrs,
AnalogAttrsOutput,
BinaryAttrs,
ZigbeeComponent,
zigbee_ns,
@@ -79,7 +75,6 @@ from .const_zephyr import (
ZigbeeBinarySensor = zigbee_ns.class_("ZigbeeBinarySensor", cg.Component)
ZigbeeSensor = zigbee_ns.class_("ZigbeeSensor", cg.Component)
ZigbeeSwitch = zigbee_ns.class_("ZigbeeSwitch", cg.Component)
ZigbeeNumber = zigbee_ns.class_("ZigbeeNumber", cg.Component)
# BACnet engineering units mapping (ZCL uses BACnet unit codes)
# See: https://github.com/zigpy/zha/blob/dev/zha/application/platforms/number/bacnet.py
@@ -143,15 +138,6 @@ zephyr_switch = cv.Schema(
}
)
zephyr_number = cv.Schema(
{
cv.OnlyWith(CONF_ZIGBEE_ID, ["nrf52", "zigbee"]): cv.use_id(ZigbeeComponent),
cv.OnlyWith(CONF_ZIGBEE_NUMBER, ["nrf52", "zigbee"]): cv.declare_id(
ZigbeeNumber
),
}
)
async def zephyr_to_code(config: ConfigType) -> None:
zephyr_add_prj_conf("ZIGBEE", True)
@@ -166,13 +152,6 @@ async def zephyr_to_code(config: ConfigType) -> None:
zephyr_add_prj_conf("NET_IP_ADDR_CHECK", False)
zephyr_add_prj_conf("NET_UDP", False)
if CONF_IEEE802154_VENDOR_OUI in config:
zephyr_add_prj_conf("IEEE802154_VENDOR_OUI_ENABLE", True)
random_number = config[CONF_IEEE802154_VENDOR_OUI]
if random_number == "random":
random_number = random.randint(0x000000, 0xFFFFFF)
zephyr_add_prj_conf("IEEE802154_VENDOR_OUI", random_number)
if config[CONF_WIPE_ON_BOOT]:
if config[CONF_WIPE_ON_BOOT] == "once":
cg.add_define(
@@ -357,24 +336,14 @@ async def zephyr_setup_switch(entity: cg.MockObj, config: ConfigType) -> None:
CORE.add_job(_add_switch, entity, config)
async def zephyr_setup_number(
entity: cg.MockObj,
config: ConfigType,
min_value: float,
max_value: float,
step: float,
) -> None:
CORE.add_job(_add_number, entity, config, min_value, max_value, step)
def get_slot_index() -> int:
"""Find the next available endpoint slot."""
def _slot_index() -> int:
"""Find the next available endpoint slot"""
slot = next(
(i for i, v in enumerate(CORE.data[KEY_ZIGBEE][KEY_EP_NUMBER]) if v == ""), None
)
if slot is None:
raise cv.Invalid(
f"No available Zigbee endpoint slots ({len(CORE.data[KEY_ZIGBEE][KEY_EP_NUMBER])} in use)"
f"Not found empty slot, size ({len(CORE.data[KEY_ZIGBEE][KEY_EP_NUMBER])})"
)
return slot
@@ -389,7 +358,7 @@ async def _add_zigbee_ep(
app_device_id: str,
extra_field_values: dict[str, int] | None = None,
) -> None:
slot_index = get_slot_index()
slot_index = _slot_index()
prefix = f"zigbee_ep{slot_index + 1}"
attrs_name = f"{prefix}_attrs"
@@ -474,31 +443,3 @@ async def _add_switch(entity: cg.MockObj, config: ConfigType) -> None:
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT,
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
)
async def _add_number(
entity: cg.MockObj,
config: ConfigType,
min_value: float,
max_value: float,
step: float,
) -> None:
# Get BACnet engineering unit from unit_of_measurement
unit = config.get(CONF_UNIT_OF_MEASUREMENT, "")
bacnet_unit = BACNET_UNITS.get(unit, BACNET_UNIT_NO_UNITS)
await _add_zigbee_ep(
entity,
config,
CONF_ZIGBEE_NUMBER,
AnalogAttrsOutput,
"ESPHOME_ZB_ZCL_DECLARE_ANALOG_OUTPUT_ATTRIB_LIST",
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT,
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
extra_field_values={
"max_present_value": max_value,
"min_present_value": min_value,
"resolution": step,
"engineering_units": bacnet_unit,
},
)

View File

@@ -149,7 +149,6 @@ CONF_ASSUMED_STATE = "assumed_state"
CONF_AT = "at"
CONF_ATTENUATION = "attenuation"
CONF_ATTRIBUTE = "attribute"
CONF_AUDIO_DAC = "audio_dac"
CONF_AUTH = "auth"
CONF_AUTO_CLEAR_ENABLED = "auto_clear_enabled"
CONF_AUTO_MODE = "auto_mode"

View File

@@ -13,6 +13,7 @@
#include <cstdarg>
#include <cstdio>
#include <cstring>
#include <new>
#ifdef USE_ESP32
#include "rom/crc.h"
@@ -858,4 +859,60 @@ void IRAM_ATTR HOT delay_microseconds_safe(uint32_t us) {
;
}
// CompactString implementation
CompactString::CompactString(const char *str, size_t len) {
if (len > MAX_LENGTH) {
len = MAX_LENGTH; // Clamp to max valid length
}
this->length_ = len;
if (len <= INLINE_CAPACITY) {
// Store inline with null terminator
this->is_heap_ = 0;
if (len > 0) {
std::memcpy(this->storage_, str, len);
}
this->storage_[len] = '\0';
} else {
// Heap allocate with null terminator
this->is_heap_ = 1;
char *heap_data = new char[len + 1]; // NOLINT(cppcoreguidelines-owning-memory)
std::memcpy(heap_data, str, len);
heap_data[len] = '\0';
this->set_heap_ptr_(heap_data);
}
}
CompactString::CompactString(const CompactString &other) : CompactString(other.data(), other.size()) {}
CompactString &CompactString::operator=(const CompactString &other) {
if (this != &other) {
this->~CompactString();
new (this) CompactString(other);
}
return *this;
}
CompactString::CompactString(CompactString &&other) noexcept : length_(other.length_), is_heap_(other.is_heap_) {
// Copy full storage (includes null terminator for inline, or pointer for heap)
std::memcpy(this->storage_, other.storage_, INLINE_CAPACITY + 1);
other.length_ = 0;
other.is_heap_ = 0;
other.storage_[0] = '\0';
}
CompactString &CompactString::operator=(CompactString &&other) noexcept {
if (this != &other) {
this->~CompactString();
new (this) CompactString(std::move(other));
}
return *this;
}
CompactString::~CompactString() {
if (this->is_heap_) {
delete[] this->get_heap_ptr_(); // NOLINT(cppcoreguidelines-owning-memory)
}
}
} // namespace esphome

View File

@@ -16,6 +16,7 @@
#include <type_traits>
#include <vector>
#include <concepts>
#include <strings.h>
#include "esphome/core/optional.h"
@@ -1784,4 +1785,58 @@ template<typename T, enable_if_t<std::is_pointer<T *>::value, int> = 0> T &id(T
///@}
/// 20-byte string: 18 chars inline + null, heap for longer. Always null-terminated.
class CompactString {
public:
static constexpr uint8_t MAX_LENGTH = 127;
static constexpr uint8_t INLINE_CAPACITY = 18; // 18 chars + null terminator fits in 19 bytes
static constexpr uint8_t BUFFER_SIZE = MAX_LENGTH + 1; // For external buffer (128 bytes)
CompactString() : length_(0), is_heap_(0) { this->storage_[0] = '\0'; }
CompactString(const char *str, size_t len);
CompactString(const CompactString &other);
CompactString(CompactString &&other) noexcept;
CompactString &operator=(const CompactString &other);
CompactString &operator=(CompactString &&other) noexcept;
~CompactString();
const char *data() const { return this->is_heap_ ? this->get_heap_ptr_() : this->storage_; }
const char *c_str() const { return this->data(); } // Always null-terminated
size_t size() const { return this->length_; }
bool empty() const { return this->length_ == 0; }
// Implicit conversion to std::string for backwards compatibility
operator std::string() const { return std::string(this->data(), this->size()); }
bool operator==(const CompactString &other) const {
return this->size() == other.size() && std::memcmp(this->data(), other.data(), this->size()) == 0;
}
bool operator==(const std::string &other) const {
return this->size() == other.size() && std::memcmp(this->data(), other.data(), this->size()) == 0;
}
bool operator==(const char *other) const {
return this->size() == std::strlen(other) && std::memcmp(this->data(), other, this->size()) == 0;
}
bool operator!=(const CompactString &other) const { return !(*this == other); }
bool operator!=(const std::string &other) const { return !(*this == other); }
bool operator!=(const char *other) const { return !(*this == other); }
protected:
char *get_heap_ptr_() const {
char *ptr;
std::memcpy(&ptr, this->storage_, sizeof(ptr));
return ptr;
}
void set_heap_ptr_(char *ptr) { std::memcpy(this->storage_, &ptr, sizeof(ptr)); }
// Storage for string data. When is_heap_=0, contains the string directly (null-terminated).
// When is_heap_=1, first sizeof(char*) bytes contain pointer to heap allocation.
char storage_[INLINE_CAPACITY + 1]; // 19 bytes: 18 chars + null terminator
uint8_t length_ : 7; // String length (0-127)
uint8_t is_heap_ : 1; // 1 if using heap pointer, 0 if using inline storage
// Total size: 20 bytes (19 bytes storage + 1 byte bitfields)
};
static_assert(sizeof(CompactString) == 20, "CompactString must be exactly 20 bytes");
} // namespace esphome

View File

@@ -1,36 +0,0 @@
ch423:
- id: ch423_hub
i2c_id: i2c_bus
binary_sensor:
- platform: gpio
id: ch423_input
name: CH423 Binary Sensor
pin:
ch423: ch423_hub
number: 1
mode: INPUT
inverted: true
- platform: gpio
id: ch423_input_2
name: CH423 Binary Sensor 2
pin:
ch423: ch423_hub
number: 0
mode: INPUT
inverted: false
output:
- platform: gpio
id: ch423_out_11
pin:
ch423: ch423_hub
number: 11
mode: OUTPUT_OPEN_DRAIN
inverted: true
- platform: gpio
id: ch423_out_23
pin:
ch423: ch423_hub
number: 23
mode: OUTPUT_OPEN_DRAIN
inverted: false

View File

@@ -1,4 +0,0 @@
packages:
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
<<: !include common.yaml

View File

@@ -1,4 +0,0 @@
packages:
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
<<: !include common.yaml

View File

@@ -1,4 +0,0 @@
packages:
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
<<: !include common.yaml

View File

@@ -8,16 +8,6 @@ esp32:
enable_lwip_bridge_interface: true
disable_libc_locks_in_iram: false # Test explicit opt-out of RAM optimization
use_full_certificate_bundle: false # Test CMN bundle (default)
include_builtin_idf_components:
- freertos # Test escape hatch (freertos is always included anyway)
disable_debug_stubs: true
disable_ocd_aware: true
disable_usb_serial_jtag_secondary: true
disable_dev_null_vfs: true
disable_mbedtls_peer_cert: true
disable_mbedtls_pkcs7: true
disable_regi2c_in_iram: true
disable_fatfs: true
wifi:
ssid: MySSID

View File

@@ -10,14 +10,6 @@ esp32:
ref: 2.7.0
advanced:
enable_idf_experimental_features: yes
disable_debug_stubs: true
disable_ocd_aware: true
disable_usb_serial_jtag_secondary: true
disable_dev_null_vfs: true
disable_mbedtls_peer_cert: true
disable_mbedtls_pkcs7: true
disable_regi2c_in_iram: true
disable_fatfs: true
ota:
platform: esphome

View File

@@ -5,14 +5,6 @@ esp32:
advanced:
execute_from_psram: true
disable_libc_locks_in_iram: true # Test default RAM optimization enabled
disable_debug_stubs: true
disable_ocd_aware: true
disable_usb_serial_jtag_secondary: true
disable_dev_null_vfs: true
disable_mbedtls_peer_cert: true
disable_mbedtls_pkcs7: true
disable_regi2c_in_iram: true
disable_fatfs: true
psram:
mode: octal

View File

@@ -10,7 +10,6 @@ globals:
type: int
restore_value: true
initial_value: "0"
update_interval: 5s
- id: glob_float
type: float
restore_value: true

View File

@@ -4,16 +4,15 @@ interval:
- interval: 60s
then:
- lambda: |-
// Test build_json - returns SerializationBuffer, use auto to avoid heap allocation
auto json_buf = esphome::json::build_json([](JsonObject root) {
// Test build_json
std::string json_str = esphome::json::build_json([](JsonObject root) {
root["sensor"] = "temperature";
root["value"] = 23.5;
root["unit"] = "°C";
});
ESP_LOGD("test", "Built JSON: %s", json_buf.c_str());
ESP_LOGD("test", "Built JSON: %s", json_str.c_str());
// Test parse_json - implicit conversion to std::string for backward compatibility
std::string json_str = json_buf;
// Test parse_json
bool parse_ok = esphome::json::parse_json(json_str, [](JsonObject root) {
if (root["sensor"].is<const char*>() && root["value"].is<float>()) {
const char* sensor = root["sensor"];
@@ -27,10 +26,10 @@ interval:
});
ESP_LOGD("test", "Parse result (JSON syntax only): %s", parse_ok ? "success" : "failed");
// Test JsonBuilder class - returns SerializationBuffer
// Test JsonBuilder class
esphome::json::JsonBuilder builder;
JsonObject obj = builder.root();
obj["test"] = "direct_builder";
obj["count"] = 42;
auto result = builder.serialize();
std::string result = builder.serialize();
ESP_LOGD("test", "JsonBuilder result: %s", result.c_str());

View File

@@ -6,6 +6,10 @@ binary_sensor:
name: "Garage Door Open 2"
- platform: template
name: "Garage Door Open 3"
- platform: template
name: "Garage Door Open 4"
- platform: template
name: "Garage Door Open 5"
- platform: template
name: "Garage Door Internal"
internal: True
@@ -17,10 +21,6 @@ sensor:
- platform: template
name: "Analog 2"
lambda: return 11.0;
- platform: template
name: "Analog 3"
lambda: return 12.0;
internal: True
zigbee:
wipe_on_boot: true
@@ -35,18 +35,7 @@ output:
write_action:
- zigbee.factory_reset
time:
- platform: zigbee
switch:
- platform: template
name: "Template Switch"
optimistic: true
number:
- platform: template
name: "Template number"
optimistic: true
min_value: 2
max_value: 100
step: 1

View File

@@ -3,4 +3,3 @@
zigbee:
wipe_on_boot: once
power_source: battery
ieee802154_vendor_oui: 0x231

View File

@@ -1,58 +0,0 @@
"""Tests for ch423 component validation."""
from unittest.mock import patch
from esphome import config, yaml_util
from esphome.core import CORE
def test_ch423_mixed_gpio_modes_fails(tmp_path, capsys):
"""Test that mixing input/output on GPIO pins 0-7 fails validation."""
test_file = tmp_path / "test.yaml"
test_file.write_text("""
esphome:
name: test
esp8266:
board: esp01_1m
i2c:
sda: GPIO4
scl: GPIO5
ch423:
- id: ch423_hub
binary_sensor:
- platform: gpio
name: "CH423 Input 0"
pin:
ch423: ch423_hub
number: 0
mode: input
switch:
- platform: gpio
name: "CH423 Output 1"
pin:
ch423: ch423_hub
number: 1
mode: output
""")
parsed_yaml = yaml_util.load_yaml(test_file)
with (
patch.object(yaml_util, "load_yaml", return_value=parsed_yaml),
patch.object(CORE, "config_path", test_file),
):
result = config.read_config({})
assert result is None, "Expected validation to fail with mixed GPIO modes"
# Check that the error message mentions the GPIO pin restriction
captured = capsys.readouterr()
assert (
"GPIO pins (0-7) must all be configured as input or all as output"
in captured.out
)