mirror of
https://github.com/esphome/esphome.git
synced 2026-01-30 00:12:08 -07:00
Compare commits
39 Commits
chunked_fi
...
json_web_s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
840ad30880 | ||
|
|
cfe121b38b | ||
|
|
5fbd9d5b14 | ||
|
|
2b1783ce61 | ||
|
|
904072ce79 | ||
|
|
0a4b98d74a | ||
|
|
823b5ac1ab | ||
|
|
b8017de724 | ||
|
|
ca96604582 | ||
|
|
d18d378f06 | ||
|
|
83e3752544 | ||
|
|
0490b2d450 | ||
|
|
55ff740e4e | ||
|
|
aba8a83cba | ||
|
|
a23809d5db | ||
|
|
32fc3ea6f5 | ||
|
|
deb8ffd348 | ||
|
|
6de2049076 | ||
|
|
cd43f8474e | ||
|
|
ecc0b366b3 | ||
|
|
6a17db8857 | ||
|
|
0843ec6ae8 | ||
|
|
74c84c8747 | ||
|
|
3e9a6c582e | ||
|
|
084113926c | ||
|
|
a5f60750c2 | ||
|
|
a382383d83 | ||
|
|
03cfd87b16 | ||
|
|
6d8294c2d3 | ||
|
|
6a3205f4db | ||
|
|
6f22509883 | ||
|
|
455ade0dca | ||
|
|
87fcfc9d76 | ||
|
|
d86048cc2d | ||
|
|
e1355de4cb | ||
|
|
7385c4cf3d | ||
|
|
3bd6ec4ec7 | ||
|
|
051604f284 | ||
|
|
10dfd95ff2 |
2
.github/actions/restore-python/action.yml
vendored
2
.github/actions/restore-python/action.yml
vendored
@@ -22,7 +22,7 @@ runs:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
|
||||
38
.github/scripts/auto-label-pr/constants.js
vendored
Normal file
38
.github/scripts/auto-label-pr/constants.js
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
// Constants and markers for PR auto-labeling
|
||||
module.exports = {
|
||||
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
|
||||
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
|
||||
TOO_BIG_MARKER: '<!-- too-big-request -->',
|
||||
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
|
||||
|
||||
MANAGED_LABELS: [
|
||||
'new-component',
|
||||
'new-platform',
|
||||
'new-target-platform',
|
||||
'merging-to-release',
|
||||
'merging-to-beta',
|
||||
'chained-pr',
|
||||
'core',
|
||||
'small-pr',
|
||||
'dashboard',
|
||||
'github-actions',
|
||||
'by-code-owner',
|
||||
'has-tests',
|
||||
'needs-tests',
|
||||
'needs-docs',
|
||||
'needs-codeowners',
|
||||
'too-big',
|
||||
'labeller-recheck',
|
||||
'bugfix',
|
||||
'new-feature',
|
||||
'breaking-change',
|
||||
'developer-breaking-change',
|
||||
'code-quality',
|
||||
'deprecated-component'
|
||||
],
|
||||
|
||||
DOCS_PR_PATTERNS: [
|
||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||
/esphome\/esphome-docs#\d+/
|
||||
]
|
||||
};
|
||||
373
.github/scripts/auto-label-pr/detectors.js
vendored
Normal file
373
.github/scripts/auto-label-pr/detectors.js
vendored
Normal file
@@ -0,0 +1,373 @@
|
||||
const fs = require('fs');
|
||||
const { DOCS_PR_PATTERNS } = require('./constants');
|
||||
|
||||
// Strategy: Merge branch detection
|
||||
async function detectMergeBranch(context) {
|
||||
const labels = new Set();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
if (baseRef === 'release') {
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
} else if (baseRef !== 'dev') {
|
||||
labels.add('chained-pr');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Component and platform labeling
|
||||
async function detectComponentPlatforms(changedFiles, apiData) {
|
||||
const labels = new Set();
|
||||
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
||||
|
||||
for (const file of changedFiles) {
|
||||
const componentMatch = file.match(componentRegex);
|
||||
if (componentMatch) {
|
||||
labels.add(`component: ${componentMatch[1]}`);
|
||||
}
|
||||
|
||||
const platformMatch = file.match(targetPlatformRegex);
|
||||
if (platformMatch) {
|
||||
labels.add(`platform: ${platformMatch[1]}`);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New component detection
|
||||
async function detectNewComponents(prFiles) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
||||
if (componentMatch) {
|
||||
try {
|
||||
const content = fs.readFileSync(file, 'utf8');
|
||||
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
||||
labels.add('new-target-platform');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Failed to read content of ${file}:`, error.message);
|
||||
}
|
||||
labels.add('new-component');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New platform detection
|
||||
async function detectNewPlatforms(prFiles, apiData) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
||||
if (platformFileMatch) {
|
||||
const [, component, platform] = platformFileMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
|
||||
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
||||
if (platformDirMatch) {
|
||||
const [, component, platform] = platformDirMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Core files detection
|
||||
async function detectCoreChanges(changedFiles) {
|
||||
const labels = new Set();
|
||||
const coreFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/core/') ||
|
||||
(file.startsWith('esphome/') && file.split('/').length === 2)
|
||||
);
|
||||
|
||||
if (coreFiles.length > 0) {
|
||||
labels.add('core');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR size detection
|
||||
async function detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD) {
|
||||
const labels = new Set();
|
||||
|
||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||
labels.add('small-pr');
|
||||
return labels;
|
||||
}
|
||||
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
// Don't add too-big if mega-pr label is already present
|
||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||
labels.add('too-big');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Dashboard changes
|
||||
async function detectDashboardChanges(changedFiles) {
|
||||
const labels = new Set();
|
||||
const dashboardFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/dashboard/') ||
|
||||
file.startsWith('esphome/components/dashboard_import/')
|
||||
);
|
||||
|
||||
if (dashboardFiles.length > 0) {
|
||||
labels.add('dashboard');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: GitHub Actions changes
|
||||
async function detectGitHubActionsChanges(changedFiles) {
|
||||
const labels = new Set();
|
||||
const githubActionsFiles = changedFiles.filter(file =>
|
||||
file.startsWith('.github/workflows/')
|
||||
);
|
||||
|
||||
if (githubActionsFiles.length > 0) {
|
||||
labels.add('github-actions');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Code owner detection
|
||||
async function detectCodeOwner(github, context, changedFiles) {
|
||||
const labels = new Set();
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
try {
|
||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: 'CODEOWNERS',
|
||||
});
|
||||
|
||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
const codeownersLines = codeownersContent.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && !line.startsWith('#'));
|
||||
|
||||
const codeownersRegexes = codeownersLines.map(line => {
|
||||
const parts = line.split(/\s+/);
|
||||
const pattern = parts[0];
|
||||
const owners = parts.slice(1);
|
||||
|
||||
let regex;
|
||||
if (pattern.endsWith('*')) {
|
||||
const dir = pattern.slice(0, -1);
|
||||
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
||||
} else if (pattern.includes('*')) {
|
||||
// First escape all regex special chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
regex = new RegExp(`^${regexPattern}$`);
|
||||
} else {
|
||||
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
||||
}
|
||||
|
||||
return { regex, owners };
|
||||
});
|
||||
|
||||
for (const file of changedFiles) {
|
||||
for (const { regex, owners } of codeownersRegexes) {
|
||||
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
||||
labels.add('by-code-owner');
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Test detection
|
||||
async function detectTests(changedFiles) {
|
||||
const labels = new Set();
|
||||
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
labels.add('has-tests');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR Template Checkbox detection
|
||||
async function detectPRTemplateCheckboxes(context) {
|
||||
const labels = new Set();
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
|
||||
console.log('Checking PR template checkboxes...');
|
||||
|
||||
// Check for checked checkboxes in the "Types of changes" section
|
||||
const checkboxPatterns = [
|
||||
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
||||
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
||||
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
||||
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
||||
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
||||
];
|
||||
|
||||
for (const { pattern, label } of checkboxPatterns) {
|
||||
if (pattern.test(prBody)) {
|
||||
console.log(`Found checked checkbox for: ${label}`);
|
||||
labels.add(label);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Deprecated component detection
|
||||
async function detectDeprecatedComponents(github, context, changedFiles) {
|
||||
const labels = new Set();
|
||||
const deprecatedInfo = [];
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
// Compile regex once for better performance
|
||||
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
|
||||
// Get files that are modified or added in components directory
|
||||
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
|
||||
|
||||
if (componentFiles.length === 0) {
|
||||
return { labels, deprecatedInfo };
|
||||
}
|
||||
|
||||
// Extract unique component names using the same regex
|
||||
const components = new Set();
|
||||
for (const file of componentFiles) {
|
||||
const match = file.match(componentFileRegex);
|
||||
if (match) {
|
||||
components.add(match[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// Get PR head to fetch files from the PR branch
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
|
||||
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
|
||||
for (const component of components) {
|
||||
const initFile = `esphome/components/${component}/__init__.py`;
|
||||
try {
|
||||
// Fetch file content from PR head using GitHub API
|
||||
const { data: fileData } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: initFile,
|
||||
ref: `refs/pull/${prNumber}/head`
|
||||
});
|
||||
|
||||
// Decode base64 content
|
||||
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
|
||||
|
||||
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
|
||||
// Support single quotes, double quotes, and triple quotes (for multiline)
|
||||
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
|
||||
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
|
||||
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
|
||||
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
|
||||
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
|
||||
|
||||
if (deprecatedMatch) {
|
||||
labels.add('deprecated-component');
|
||||
deprecatedInfo.push({
|
||||
component: component,
|
||||
message: deprecatedMatch[1].trim()
|
||||
});
|
||||
console.log(`Found deprecated component: ${component}`);
|
||||
}
|
||||
} catch (error) {
|
||||
// Only log if it's not a simple "file not found" error (404)
|
||||
if (error.status !== 404) {
|
||||
console.log(`Error reading ${initFile}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { labels, deprecatedInfo };
|
||||
}
|
||||
|
||||
// Strategy: Requirements detection
|
||||
async function detectRequirements(allLabels, prFiles, context) {
|
||||
const labels = new Set();
|
||||
|
||||
// Check for missing tests
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
||||
labels.add('needs-tests');
|
||||
}
|
||||
|
||||
// Check for missing docs
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||
|
||||
if (!hasDocsLink) {
|
||||
labels.add('needs-docs');
|
||||
}
|
||||
}
|
||||
|
||||
// Check for missing CODEOWNERS
|
||||
if (allLabels.has('new-component')) {
|
||||
const codeownersModified = prFiles.some(file =>
|
||||
file.filename === 'CODEOWNERS' &&
|
||||
(file.status === 'modified' || file.status === 'added') &&
|
||||
(file.additions || 0) > 0
|
||||
);
|
||||
|
||||
if (!codeownersModified) {
|
||||
labels.add('needs-codeowners');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
detectMergeBranch,
|
||||
detectComponentPlatforms,
|
||||
detectNewComponents,
|
||||
detectNewPlatforms,
|
||||
detectCoreChanges,
|
||||
detectPRSize,
|
||||
detectDashboardChanges,
|
||||
detectGitHubActionsChanges,
|
||||
detectCodeOwner,
|
||||
detectTests,
|
||||
detectPRTemplateCheckboxes,
|
||||
detectDeprecatedComponents,
|
||||
detectRequirements
|
||||
};
|
||||
187
.github/scripts/auto-label-pr/index.js
vendored
Normal file
187
.github/scripts/auto-label-pr/index.js
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
const { MANAGED_LABELS } = require('./constants');
|
||||
const {
|
||||
detectMergeBranch,
|
||||
detectComponentPlatforms,
|
||||
detectNewComponents,
|
||||
detectNewPlatforms,
|
||||
detectCoreChanges,
|
||||
detectPRSize,
|
||||
detectDashboardChanges,
|
||||
detectGitHubActionsChanges,
|
||||
detectCodeOwner,
|
||||
detectTests,
|
||||
detectPRTemplateCheckboxes,
|
||||
detectDeprecatedComponents,
|
||||
detectRequirements
|
||||
} = require('./detectors');
|
||||
const { handleReviews } = require('./reviews');
|
||||
const { applyLabels, removeOldLabels } = require('./labels');
|
||||
|
||||
// Fetch API data
|
||||
async function fetchApiData() {
|
||||
try {
|
||||
const response = await fetch('https://data.esphome.io/components.json');
|
||||
const componentsData = await response.json();
|
||||
return {
|
||||
targetPlatforms: componentsData.target_platforms || [],
|
||||
platformComponents: componentsData.platform_components || []
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch components data from API:', error.message);
|
||||
return { targetPlatforms: [], platformComponents: [] };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = async ({ github, context }) => {
|
||||
// Environment variables
|
||||
const SMALL_PR_THRESHOLD = parseInt(process.env.SMALL_PR_THRESHOLD);
|
||||
const MAX_LABELS = parseInt(process.env.MAX_LABELS);
|
||||
const TOO_BIG_THRESHOLD = parseInt(process.env.TOO_BIG_THRESHOLD);
|
||||
const COMPONENT_LABEL_THRESHOLD = parseInt(process.env.COMPONENT_LABEL_THRESHOLD);
|
||||
|
||||
// Global state
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
// Get current labels and PR data
|
||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number
|
||||
});
|
||||
const currentLabels = currentLabelsData.map(label => label.name);
|
||||
const managedLabels = currentLabels.filter(label =>
|
||||
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
||||
);
|
||||
|
||||
// Check for mega-PR early - if present, skip most automatic labeling
|
||||
const isMegaPR = currentLabels.includes('mega-pr');
|
||||
|
||||
// Get all PR files with automatic pagination
|
||||
const prFiles = await github.paginate(
|
||||
github.rest.pulls.listFiles,
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
}
|
||||
);
|
||||
|
||||
// Calculate data from PR files
|
||||
const changedFiles = prFiles.map(file => file.filename);
|
||||
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const totalChanges = totalAdditions + totalDeletions;
|
||||
|
||||
console.log('Current labels:', currentLabels.join(', '));
|
||||
console.log('Changed files:', changedFiles.length);
|
||||
console.log('Total changes:', totalChanges);
|
||||
if (isMegaPR) {
|
||||
console.log('Mega-PR detected - applying limited labeling logic');
|
||||
}
|
||||
|
||||
// Fetch API data
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for release and beta branches only
|
||||
if (baseRef === 'release' || baseRef === 'beta') {
|
||||
const branchLabels = await detectMergeBranch(context);
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||
|
||||
// Apply labels
|
||||
await applyLabels(github, context, finalLabels);
|
||||
|
||||
// Remove old managed labels
|
||||
await removeOldLabels(github, context, managedLabels, finalLabels);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Run all strategies
|
||||
const [
|
||||
branchLabels,
|
||||
componentLabels,
|
||||
newComponentLabels,
|
||||
newPlatformLabels,
|
||||
coreLabels,
|
||||
sizeLabels,
|
||||
dashboardLabels,
|
||||
actionsLabels,
|
||||
codeOwnerLabels,
|
||||
testLabels,
|
||||
checkboxLabels,
|
||||
deprecatedResult
|
||||
] = await Promise.all([
|
||||
detectMergeBranch(context),
|
||||
detectComponentPlatforms(changedFiles, apiData),
|
||||
detectNewComponents(prFiles),
|
||||
detectNewPlatforms(prFiles, apiData),
|
||||
detectCoreChanges(changedFiles),
|
||||
detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD),
|
||||
detectDashboardChanges(changedFiles),
|
||||
detectGitHubActionsChanges(changedFiles),
|
||||
detectCodeOwner(github, context, changedFiles),
|
||||
detectTests(changedFiles),
|
||||
detectPRTemplateCheckboxes(context),
|
||||
detectDeprecatedComponents(github, context, changedFiles)
|
||||
]);
|
||||
|
||||
// Extract deprecated component info
|
||||
const deprecatedLabels = deprecatedResult.labels;
|
||||
const deprecatedInfo = deprecatedResult.deprecatedInfo;
|
||||
|
||||
// Combine all labels
|
||||
const allLabels = new Set([
|
||||
...branchLabels,
|
||||
...componentLabels,
|
||||
...newComponentLabels,
|
||||
...newPlatformLabels,
|
||||
...coreLabels,
|
||||
...sizeLabels,
|
||||
...dashboardLabels,
|
||||
...actionsLabels,
|
||||
...codeOwnerLabels,
|
||||
...testLabels,
|
||||
...checkboxLabels,
|
||||
...deprecatedLabels
|
||||
]);
|
||||
|
||||
// Detect requirements based on all other labels
|
||||
const requirementLabels = await detectRequirements(allLabels, prFiles, context);
|
||||
for (const label of requirementLabels) {
|
||||
allLabels.add(label);
|
||||
}
|
||||
|
||||
let finalLabels = Array.from(allLabels);
|
||||
|
||||
// For mega-PRs, exclude component labels if there are too many
|
||||
if (isMegaPR) {
|
||||
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
||||
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
||||
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
||||
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const originalLabelCount = finalLabels.length;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
}
|
||||
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
|
||||
|
||||
// Apply labels
|
||||
await applyLabels(github, context, finalLabels);
|
||||
|
||||
// Remove old managed labels
|
||||
await removeOldLabels(github, context, managedLabels, finalLabels);
|
||||
};
|
||||
41
.github/scripts/auto-label-pr/labels.js
vendored
Normal file
41
.github/scripts/auto-label-pr/labels.js
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
// Apply labels to PR
|
||||
async function applyLabels(github, context, finalLabels) {
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
if (finalLabels.length > 0) {
|
||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
async function removeOldLabels(github, context, managedLabels, finalLabels) {
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
console.log(`Removing label: ${label}`);
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
applyLabels,
|
||||
removeOldLabels
|
||||
};
|
||||
141
.github/scripts/auto-label-pr/reviews.js
vendored
Normal file
141
.github/scripts/auto-label-pr/reviews.js
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
const {
|
||||
BOT_COMMENT_MARKER,
|
||||
CODEOWNERS_MARKER,
|
||||
TOO_BIG_MARKER,
|
||||
DEPRECATED_COMPONENT_MARKER
|
||||
} = require('./constants');
|
||||
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
|
||||
const messages = [];
|
||||
|
||||
// Deprecated component message
|
||||
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
|
||||
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
|
||||
message += `Hey there @${prAuthor},\n`;
|
||||
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
|
||||
|
||||
for (const info of deprecatedInfo) {
|
||||
message += `#### Component: \`${info.component}\`\n`;
|
||||
message += `${info.message}\n\n`;
|
||||
}
|
||||
|
||||
message += `Consider migrating to the recommended alternative if applicable.`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// Too big message
|
||||
if (finalLabels.includes('too-big')) {
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
|
||||
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
||||
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// CODEOWNERS message
|
||||
if (finalLabels.includes('needs-codeowners')) {
|
||||
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
||||
`Hey there @${prAuthor},\n` +
|
||||
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
||||
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
||||
`In \`__init__.py\` of the integration, please add:\n\n` +
|
||||
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
||||
`And run \`script/build_codeowners.py\``;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
|
||||
);
|
||||
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
|
||||
const botReviews = reviews.filter(review =>
|
||||
review.user.type === 'Bot' &&
|
||||
review.state === 'CHANGES_REQUESTED' &&
|
||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||
);
|
||||
|
||||
if (hasReviewableLabels) {
|
||||
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
||||
|
||||
if (botReviews.length > 0) {
|
||||
// Update existing review
|
||||
await github.rest.pulls.updateReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: botReviews[0].id,
|
||||
body: reviewBody
|
||||
});
|
||||
console.log('Updated existing bot review');
|
||||
} else {
|
||||
// Create new review
|
||||
await github.rest.pulls.createReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
body: reviewBody,
|
||||
event: 'REQUEST_CHANGES'
|
||||
});
|
||||
console.log('Created new bot review');
|
||||
}
|
||||
} else if (botReviews.length > 0) {
|
||||
// Dismiss existing reviews
|
||||
for (const review of botReviews) {
|
||||
try {
|
||||
await github.rest.pulls.dismissReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: review.id,
|
||||
message: 'Review dismissed: All requirements have been met'
|
||||
});
|
||||
console.log(`Dismissed bot review ${review.id}`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
handleReviews
|
||||
};
|
||||
632
.github/workflows/auto-label-pr.yml
vendored
632
.github/workflows/auto-label-pr.yml
vendored
@@ -36,633 +36,5 @@ jobs:
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
// Constants
|
||||
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
|
||||
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
|
||||
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
|
||||
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
|
||||
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
|
||||
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
|
||||
const TOO_BIG_MARKER = '<!-- too-big-request -->';
|
||||
|
||||
const MANAGED_LABELS = [
|
||||
'new-component',
|
||||
'new-platform',
|
||||
'new-target-platform',
|
||||
'merging-to-release',
|
||||
'merging-to-beta',
|
||||
'chained-pr',
|
||||
'core',
|
||||
'small-pr',
|
||||
'dashboard',
|
||||
'github-actions',
|
||||
'by-code-owner',
|
||||
'has-tests',
|
||||
'needs-tests',
|
||||
'needs-docs',
|
||||
'needs-codeowners',
|
||||
'too-big',
|
||||
'labeller-recheck',
|
||||
'bugfix',
|
||||
'new-feature',
|
||||
'breaking-change',
|
||||
'developer-breaking-change',
|
||||
'code-quality'
|
||||
];
|
||||
|
||||
const DOCS_PR_PATTERNS = [
|
||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||
/esphome\/esphome-docs#\d+/
|
||||
];
|
||||
|
||||
// Global state
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
// Get current labels and PR data
|
||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number
|
||||
});
|
||||
const currentLabels = currentLabelsData.map(label => label.name);
|
||||
const managedLabels = currentLabels.filter(label =>
|
||||
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
||||
);
|
||||
|
||||
// Check for mega-PR early - if present, skip most automatic labeling
|
||||
const isMegaPR = currentLabels.includes('mega-pr');
|
||||
|
||||
// Get all PR files with automatic pagination
|
||||
const prFiles = await github.paginate(
|
||||
github.rest.pulls.listFiles,
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
}
|
||||
);
|
||||
|
||||
// Calculate data from PR files
|
||||
const changedFiles = prFiles.map(file => file.filename);
|
||||
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const totalChanges = totalAdditions + totalDeletions;
|
||||
|
||||
console.log('Current labels:', currentLabels.join(', '));
|
||||
console.log('Changed files:', changedFiles.length);
|
||||
console.log('Total changes:', totalChanges);
|
||||
if (isMegaPR) {
|
||||
console.log('Mega-PR detected - applying limited labeling logic');
|
||||
}
|
||||
|
||||
// Fetch API data
|
||||
async function fetchApiData() {
|
||||
try {
|
||||
const response = await fetch('https://data.esphome.io/components.json');
|
||||
const componentsData = await response.json();
|
||||
return {
|
||||
targetPlatforms: componentsData.target_platforms || [],
|
||||
platformComponents: componentsData.platform_components || []
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch components data from API:', error.message);
|
||||
return { targetPlatforms: [], platformComponents: [] };
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy: Merge branch detection
|
||||
async function detectMergeBranch() {
|
||||
const labels = new Set();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
if (baseRef === 'release') {
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
} else if (baseRef !== 'dev') {
|
||||
labels.add('chained-pr');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Component and platform labeling
|
||||
async function detectComponentPlatforms(apiData) {
|
||||
const labels = new Set();
|
||||
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
||||
|
||||
for (const file of changedFiles) {
|
||||
const componentMatch = file.match(componentRegex);
|
||||
if (componentMatch) {
|
||||
labels.add(`component: ${componentMatch[1]}`);
|
||||
}
|
||||
|
||||
const platformMatch = file.match(targetPlatformRegex);
|
||||
if (platformMatch) {
|
||||
labels.add(`platform: ${platformMatch[1]}`);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New component detection
|
||||
async function detectNewComponents() {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
||||
if (componentMatch) {
|
||||
try {
|
||||
const content = fs.readFileSync(file, 'utf8');
|
||||
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
||||
labels.add('new-target-platform');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Failed to read content of ${file}:`, error.message);
|
||||
}
|
||||
labels.add('new-component');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New platform detection
|
||||
async function detectNewPlatforms(apiData) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
||||
if (platformFileMatch) {
|
||||
const [, component, platform] = platformFileMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
|
||||
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
||||
if (platformDirMatch) {
|
||||
const [, component, platform] = platformDirMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Core files detection
|
||||
async function detectCoreChanges() {
|
||||
const labels = new Set();
|
||||
const coreFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/core/') ||
|
||||
(file.startsWith('esphome/') && file.split('/').length === 2)
|
||||
);
|
||||
|
||||
if (coreFiles.length > 0) {
|
||||
labels.add('core');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR size detection
|
||||
async function detectPRSize() {
|
||||
const labels = new Set();
|
||||
|
||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||
labels.add('small-pr');
|
||||
return labels;
|
||||
}
|
||||
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
// Don't add too-big if mega-pr label is already present
|
||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||
labels.add('too-big');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Dashboard changes
|
||||
async function detectDashboardChanges() {
|
||||
const labels = new Set();
|
||||
const dashboardFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/dashboard/') ||
|
||||
file.startsWith('esphome/components/dashboard_import/')
|
||||
);
|
||||
|
||||
if (dashboardFiles.length > 0) {
|
||||
labels.add('dashboard');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: GitHub Actions changes
|
||||
async function detectGitHubActionsChanges() {
|
||||
const labels = new Set();
|
||||
const githubActionsFiles = changedFiles.filter(file =>
|
||||
file.startsWith('.github/workflows/')
|
||||
);
|
||||
|
||||
if (githubActionsFiles.length > 0) {
|
||||
labels.add('github-actions');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Code owner detection
|
||||
async function detectCodeOwner() {
|
||||
const labels = new Set();
|
||||
|
||||
try {
|
||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: 'CODEOWNERS',
|
||||
});
|
||||
|
||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
const codeownersLines = codeownersContent.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && !line.startsWith('#'));
|
||||
|
||||
const codeownersRegexes = codeownersLines.map(line => {
|
||||
const parts = line.split(/\s+/);
|
||||
const pattern = parts[0];
|
||||
const owners = parts.slice(1);
|
||||
|
||||
let regex;
|
||||
if (pattern.endsWith('*')) {
|
||||
const dir = pattern.slice(0, -1);
|
||||
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
||||
} else if (pattern.includes('*')) {
|
||||
// First escape all regex special chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
regex = new RegExp(`^${regexPattern}$`);
|
||||
} else {
|
||||
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
||||
}
|
||||
|
||||
return { regex, owners };
|
||||
});
|
||||
|
||||
for (const file of changedFiles) {
|
||||
for (const { regex, owners } of codeownersRegexes) {
|
||||
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
||||
labels.add('by-code-owner');
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Test detection
|
||||
async function detectTests() {
|
||||
const labels = new Set();
|
||||
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
labels.add('has-tests');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR Template Checkbox detection
|
||||
async function detectPRTemplateCheckboxes() {
|
||||
const labels = new Set();
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
|
||||
console.log('Checking PR template checkboxes...');
|
||||
|
||||
// Check for checked checkboxes in the "Types of changes" section
|
||||
const checkboxPatterns = [
|
||||
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
||||
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
||||
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
||||
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
||||
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
||||
];
|
||||
|
||||
for (const { pattern, label } of checkboxPatterns) {
|
||||
if (pattern.test(prBody)) {
|
||||
console.log(`Found checked checkbox for: ${label}`);
|
||||
labels.add(label);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Requirements detection
|
||||
async function detectRequirements(allLabels) {
|
||||
const labels = new Set();
|
||||
|
||||
// Check for missing tests
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
||||
labels.add('needs-tests');
|
||||
}
|
||||
|
||||
// Check for missing docs
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||
|
||||
if (!hasDocsLink) {
|
||||
labels.add('needs-docs');
|
||||
}
|
||||
}
|
||||
|
||||
// Check for missing CODEOWNERS
|
||||
if (allLabels.has('new-component')) {
|
||||
const codeownersModified = prFiles.some(file =>
|
||||
file.filename === 'CODEOWNERS' &&
|
||||
(file.status === 'modified' || file.status === 'added') &&
|
||||
(file.additions || 0) > 0
|
||||
);
|
||||
|
||||
if (!codeownersModified) {
|
||||
labels.add('needs-codeowners');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels, originalLabelCount) {
|
||||
const messages = [];
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
// Too big message
|
||||
if (finalLabels.includes('too-big')) {
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
|
||||
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
||||
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// CODEOWNERS message
|
||||
if (finalLabels.includes('needs-codeowners')) {
|
||||
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
||||
`Hey there @${prAuthor},\n` +
|
||||
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
||||
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
||||
`In \`__init__.py\` of the integration, please add:\n\n` +
|
||||
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
||||
`And run \`script/build_codeowners.py\``;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(finalLabels, originalLabelCount) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners'].includes(label)
|
||||
);
|
||||
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
|
||||
const botReviews = reviews.filter(review =>
|
||||
review.user.type === 'Bot' &&
|
||||
review.state === 'CHANGES_REQUESTED' &&
|
||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||
);
|
||||
|
||||
if (hasReviewableLabels) {
|
||||
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
||||
|
||||
if (botReviews.length > 0) {
|
||||
// Update existing review
|
||||
await github.rest.pulls.updateReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: botReviews[0].id,
|
||||
body: reviewBody
|
||||
});
|
||||
console.log('Updated existing bot review');
|
||||
} else {
|
||||
// Create new review
|
||||
await github.rest.pulls.createReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
body: reviewBody,
|
||||
event: 'REQUEST_CHANGES'
|
||||
});
|
||||
console.log('Created new bot review');
|
||||
}
|
||||
} else if (botReviews.length > 0) {
|
||||
// Dismiss existing reviews
|
||||
for (const review of botReviews) {
|
||||
try {
|
||||
await github.rest.pulls.dismissReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: review.id,
|
||||
message: 'Review dismissed: All requirements have been met'
|
||||
});
|
||||
console.log(`Dismissed bot review ${review.id}`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Main execution
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for release and beta branches only
|
||||
if (baseRef === 'release' || baseRef === 'beta') {
|
||||
const branchLabels = await detectMergeBranch();
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Run all strategies
|
||||
const [
|
||||
branchLabels,
|
||||
componentLabels,
|
||||
newComponentLabels,
|
||||
newPlatformLabels,
|
||||
coreLabels,
|
||||
sizeLabels,
|
||||
dashboardLabels,
|
||||
actionsLabels,
|
||||
codeOwnerLabels,
|
||||
testLabels,
|
||||
checkboxLabels
|
||||
] = await Promise.all([
|
||||
detectMergeBranch(),
|
||||
detectComponentPlatforms(apiData),
|
||||
detectNewComponents(),
|
||||
detectNewPlatforms(apiData),
|
||||
detectCoreChanges(),
|
||||
detectPRSize(),
|
||||
detectDashboardChanges(),
|
||||
detectGitHubActionsChanges(),
|
||||
detectCodeOwner(),
|
||||
detectTests(),
|
||||
detectPRTemplateCheckboxes()
|
||||
]);
|
||||
|
||||
// Combine all labels
|
||||
const allLabels = new Set([
|
||||
...branchLabels,
|
||||
...componentLabels,
|
||||
...newComponentLabels,
|
||||
...newPlatformLabels,
|
||||
...coreLabels,
|
||||
...sizeLabels,
|
||||
...dashboardLabels,
|
||||
...actionsLabels,
|
||||
...codeOwnerLabels,
|
||||
...testLabels,
|
||||
...checkboxLabels
|
||||
]);
|
||||
|
||||
// Detect requirements based on all other labels
|
||||
const requirementLabels = await detectRequirements(allLabels);
|
||||
for (const label of requirementLabels) {
|
||||
allLabels.add(label);
|
||||
}
|
||||
|
||||
let finalLabels = Array.from(allLabels);
|
||||
|
||||
// For mega-PRs, exclude component labels if there are too many
|
||||
if (isMegaPR) {
|
||||
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
||||
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
||||
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
||||
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const originalLabelCount = finalLabels.length;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
}
|
||||
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(finalLabels, originalLabelCount);
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
console.log(`Removing label: ${label}`);
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
const script = require('./.github/scripts/auto-label-pr/index.js');
|
||||
await script({ github, context });
|
||||
|
||||
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
@@ -157,7 +157,7 @@ jobs:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Save Python virtual environment cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -193,7 +193,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Restore components graph cache
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: .temp/components_graph.json
|
||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
|
||||
- name: Save components graph cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: .temp/components_graph.json
|
||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||
@@ -245,7 +245,7 @@ jobs:
|
||||
python-version: "3.13"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -334,14 +334,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
@@ -413,14 +413,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
@@ -502,14 +502,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
@@ -735,7 +735,7 @@ jobs:
|
||||
- name: Restore cached memory analysis
|
||||
id: cache-memory-analysis
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
@@ -759,7 +759,7 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
@@ -800,7 +800,7 @@ jobs:
|
||||
|
||||
- name: Save memory analysis to cache
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
@@ -847,7 +847,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Cache platformio
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -102,12 +102,12 @@ jobs:
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -182,13 +182,13 @@ jobs:
|
||||
|
||||
- name: Log in to docker hub
|
||||
if: matrix.registry == 'dockerhub'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
if: matrix.registry == 'ghcr'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
@@ -104,6 +104,7 @@ esphome/components/cc1101/* @gabest11 @lygris
|
||||
esphome/components/ccs811/* @habbie
|
||||
esphome/components/cd74hc4067/* @asoehlke
|
||||
esphome/components/ch422g/* @clydebarrow @jesterret
|
||||
esphome/components/ch423/* @dwmw2
|
||||
esphome/components/chsc6x/* @kkosik20
|
||||
esphome/components/climate/* @esphome/core
|
||||
esphome/components/climate_ir/* @glmnet
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor, voltage_sampler
|
||||
from esphome.components.esp32 import get_esp32_variant
|
||||
from esphome.components.esp32 import get_esp32_variant, include_builtin_idf_component
|
||||
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
|
||||
from esphome.components.zephyr import (
|
||||
zephyr_add_overlay,
|
||||
@@ -118,6 +118,9 @@ async def to_code(config):
|
||||
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
|
||||
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's ADC driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_adc")
|
||||
|
||||
if attenuation := config.get(CONF_ATTENUATION):
|
||||
if attenuation == "auto":
|
||||
cg.add(var.set_autorange(cg.global_ns.true))
|
||||
|
||||
@@ -264,9 +264,9 @@ template<typename... Ts> class APIRespondAction : public Action<Ts...> {
|
||||
// Build and send JSON response
|
||||
json::JsonBuilder builder;
|
||||
this->json_builder_(x..., builder.root());
|
||||
std::string json_str = builder.serialize();
|
||||
auto json_buf = builder.serialize();
|
||||
this->parent_->send_action_response(call_id, success, StringRef(error_message),
|
||||
reinterpret_cast<const uint8_t *>(json_str.data()), json_str.size());
|
||||
reinterpret_cast<const uint8_t *>(json_buf.data()), json_buf.size());
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
from esphome.components.esp32 import add_idf_component, include_builtin_idf_component
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
|
||||
import esphome.final_validate as fv
|
||||
@@ -166,6 +166,9 @@ def final_validate_audio_schema(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_http_client")
|
||||
|
||||
add_idf_component(
|
||||
name="esphome/esp-audio-libs",
|
||||
ref="2.0.3",
|
||||
|
||||
103
esphome/components/ch423/__init__.py
Normal file
103
esphome/components/ch423/__init__.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c
|
||||
from esphome.components.i2c import I2CBus
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_I2C_ID,
|
||||
CONF_ID,
|
||||
CONF_INPUT,
|
||||
CONF_INVERTED,
|
||||
CONF_MODE,
|
||||
CONF_NUMBER,
|
||||
CONF_OPEN_DRAIN,
|
||||
CONF_OUTPUT,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
|
||||
CODEOWNERS = ["@dwmw2"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
MULTI_CONF = True
|
||||
ch423_ns = cg.esphome_ns.namespace("ch423")
|
||||
|
||||
CH423Component = ch423_ns.class_("CH423Component", cg.Component, i2c.I2CDevice)
|
||||
CH423GPIOPin = ch423_ns.class_(
|
||||
"CH423GPIOPin", cg.GPIOPin, cg.Parented.template(CH423Component)
|
||||
)
|
||||
|
||||
CONF_CH423 = "ch423"
|
||||
|
||||
# Note that no address is configurable - each register in the CH423 has a dedicated i2c address
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_ID): cv.declare_id(CH423Component),
|
||||
cv.GenerateID(CONF_I2C_ID): cv.use_id(I2CBus),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
# Can't use register_i2c_device because there is no CONF_ADDRESS
|
||||
parent = await cg.get_variable(config[CONF_I2C_ID])
|
||||
cg.add(var.set_i2c_bus(parent))
|
||||
|
||||
|
||||
# This is used as a final validation step so that modes have been fully transformed.
|
||||
def pin_mode_check(pin_config, _):
|
||||
if pin_config[CONF_MODE][CONF_INPUT] and pin_config[CONF_NUMBER] >= 8:
|
||||
raise cv.Invalid("CH423 only supports input on pins 0-7")
|
||||
if pin_config[CONF_MODE][CONF_OPEN_DRAIN] and pin_config[CONF_NUMBER] < 8:
|
||||
raise cv.Invalid("CH423 only supports open drain output on pins 8-23")
|
||||
|
||||
ch423_id = pin_config[CONF_CH423]
|
||||
pin_num = pin_config[CONF_NUMBER]
|
||||
is_output = pin_config[CONF_MODE][CONF_OUTPUT]
|
||||
is_open_drain = pin_config[CONF_MODE][CONF_OPEN_DRAIN]
|
||||
|
||||
# Track pin modes per CH423 instance in CORE.data
|
||||
ch423_modes = CORE.data.setdefault(CONF_CH423, {})
|
||||
if ch423_id not in ch423_modes:
|
||||
ch423_modes[ch423_id] = {"gpio_output": None, "gpo_open_drain": None}
|
||||
|
||||
if pin_num < 8:
|
||||
# GPIO pins (0-7): all must have same direction
|
||||
if ch423_modes[ch423_id]["gpio_output"] is None:
|
||||
ch423_modes[ch423_id]["gpio_output"] = is_output
|
||||
elif ch423_modes[ch423_id]["gpio_output"] != is_output:
|
||||
raise cv.Invalid(
|
||||
"CH423 GPIO pins (0-7) must all be configured as input or all as output"
|
||||
)
|
||||
# GPO pins (8-23): all must have same open-drain setting
|
||||
elif ch423_modes[ch423_id]["gpo_open_drain"] is None:
|
||||
ch423_modes[ch423_id]["gpo_open_drain"] = is_open_drain
|
||||
elif ch423_modes[ch423_id]["gpo_open_drain"] != is_open_drain:
|
||||
raise cv.Invalid(
|
||||
"CH423 GPO pins (8-23) must all be configured as push-pull or all as open-drain"
|
||||
)
|
||||
|
||||
|
||||
CH423_PIN_SCHEMA = pins.gpio_base_schema(
|
||||
CH423GPIOPin,
|
||||
cv.int_range(min=0, max=23),
|
||||
modes=[CONF_INPUT, CONF_OUTPUT, CONF_OPEN_DRAIN],
|
||||
).extend(
|
||||
{
|
||||
cv.Required(CONF_CH423): cv.use_id(CH423Component),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pins.PIN_SCHEMA_REGISTRY.register(CONF_CH423, CH423_PIN_SCHEMA, pin_mode_check)
|
||||
async def ch423_pin_to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
parent = await cg.get_variable(config[CONF_CH423])
|
||||
|
||||
cg.add(var.set_parent(parent))
|
||||
|
||||
num = config[CONF_NUMBER]
|
||||
cg.add(var.set_pin(num))
|
||||
cg.add(var.set_inverted(config[CONF_INVERTED]))
|
||||
cg.add(var.set_flags(pins.gpio_flags_expr(config[CONF_MODE])))
|
||||
return var
|
||||
148
esphome/components/ch423/ch423.cpp
Normal file
148
esphome/components/ch423/ch423.cpp
Normal file
@@ -0,0 +1,148 @@
|
||||
#include "ch423.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/progmem.h"
|
||||
|
||||
namespace esphome::ch423 {
|
||||
|
||||
static constexpr uint8_t CH423_REG_SYS = 0x24; // Set system parameters (0x48 >> 1)
|
||||
static constexpr uint8_t CH423_SYS_IO_OE = 0x01; // IO output enable
|
||||
static constexpr uint8_t CH423_SYS_OD_EN = 0x04; // Open drain enable for OC pins
|
||||
static constexpr uint8_t CH423_REG_IO = 0x30; // Write/read IO7-IO0 (0x60 >> 1)
|
||||
static constexpr uint8_t CH423_REG_IO_RD = 0x26; // Read IO7-IO0 (0x4D >> 1, rounded down)
|
||||
static constexpr uint8_t CH423_REG_OCL = 0x22; // Write OC7-OC0 (0x44 >> 1)
|
||||
static constexpr uint8_t CH423_REG_OCH = 0x23; // Write OC15-OC8 (0x46 >> 1)
|
||||
|
||||
static const char *const TAG = "ch423";
|
||||
|
||||
void CH423Component::setup() {
|
||||
// set outputs before mode
|
||||
this->write_outputs_();
|
||||
// Set system parameters and check for errors
|
||||
bool success = this->write_reg_(CH423_REG_SYS, this->sys_params_);
|
||||
// Only read inputs if pins are configured for input (IO_OE not set)
|
||||
if (success && !(this->sys_params_ & CH423_SYS_IO_OE)) {
|
||||
success = this->read_inputs_();
|
||||
}
|
||||
if (!success) {
|
||||
ESP_LOGE(TAG, "CH423 not detected");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
ESP_LOGCONFIG(TAG, "Initialization complete. Warning: %d, Error: %d", this->status_has_warning(),
|
||||
this->status_has_error());
|
||||
}
|
||||
|
||||
void CH423Component::loop() {
|
||||
// Clear all the previously read flags.
|
||||
this->pin_read_flags_ = 0x00;
|
||||
}
|
||||
|
||||
void CH423Component::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "CH423:");
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
|
||||
}
|
||||
}
|
||||
|
||||
void CH423Component::pin_mode(uint8_t pin, gpio::Flags flags) {
|
||||
if (pin < 8) {
|
||||
if (flags & gpio::FLAG_OUTPUT) {
|
||||
this->sys_params_ |= CH423_SYS_IO_OE;
|
||||
}
|
||||
} else if (pin >= 8 && pin < 24) {
|
||||
if (flags & gpio::FLAG_OPEN_DRAIN) {
|
||||
this->sys_params_ |= CH423_SYS_OD_EN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool CH423Component::digital_read(uint8_t pin) {
|
||||
if (this->pin_read_flags_ == 0 || this->pin_read_flags_ & (1 << pin)) {
|
||||
// Read values on first access or in case it's being read again in the same loop
|
||||
this->read_inputs_();
|
||||
}
|
||||
|
||||
this->pin_read_flags_ |= (1 << pin);
|
||||
return (this->input_bits_ & (1 << pin)) != 0;
|
||||
}
|
||||
|
||||
void CH423Component::digital_write(uint8_t pin, bool value) {
|
||||
if (value) {
|
||||
this->output_bits_ |= (1 << pin);
|
||||
} else {
|
||||
this->output_bits_ &= ~(1 << pin);
|
||||
}
|
||||
this->write_outputs_();
|
||||
}
|
||||
|
||||
bool CH423Component::read_inputs_() {
|
||||
if (this->is_failed()) {
|
||||
return false;
|
||||
}
|
||||
// reading inputs requires IO_OE to be 0
|
||||
if (this->sys_params_ & CH423_SYS_IO_OE) {
|
||||
return false;
|
||||
}
|
||||
uint8_t result = this->read_reg_(CH423_REG_IO_RD);
|
||||
this->input_bits_ = result;
|
||||
this->status_clear_warning();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Write a register. Can't use the standard write_byte() method because there is no single pre-configured i2c address.
|
||||
bool CH423Component::write_reg_(uint8_t reg, uint8_t value) {
|
||||
auto err = this->bus_->write_readv(reg, &value, 1, nullptr, 0);
|
||||
if (err != i2c::ERROR_OK) {
|
||||
char buf[64];
|
||||
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("write failed for register 0x%X, error %d"), reg, err);
|
||||
this->status_set_warning(buf);
|
||||
return false;
|
||||
}
|
||||
this->status_clear_warning();
|
||||
return true;
|
||||
}
|
||||
|
||||
uint8_t CH423Component::read_reg_(uint8_t reg) {
|
||||
uint8_t value;
|
||||
auto err = this->bus_->write_readv(reg, nullptr, 0, &value, 1);
|
||||
if (err != i2c::ERROR_OK) {
|
||||
char buf[64];
|
||||
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("read failed for register 0x%X, error %d"), reg, err);
|
||||
this->status_set_warning(buf);
|
||||
return 0;
|
||||
}
|
||||
this->status_clear_warning();
|
||||
return value;
|
||||
}
|
||||
|
||||
bool CH423Component::write_outputs_() {
|
||||
bool success = true;
|
||||
// Write IO7-IO0
|
||||
success &= this->write_reg_(CH423_REG_IO, static_cast<uint8_t>(this->output_bits_));
|
||||
// Write OC7-OC0
|
||||
success &= this->write_reg_(CH423_REG_OCL, static_cast<uint8_t>(this->output_bits_ >> 8));
|
||||
// Write OC15-OC8
|
||||
success &= this->write_reg_(CH423_REG_OCH, static_cast<uint8_t>(this->output_bits_ >> 16));
|
||||
return success;
|
||||
}
|
||||
|
||||
float CH423Component::get_setup_priority() const { return setup_priority::IO; }
|
||||
|
||||
// Run our loop() method very early in the loop, so that we cache read values
|
||||
// before other components call our digital_read() method.
|
||||
float CH423Component::get_loop_priority() const { return 9.0f; } // Just after WIFI
|
||||
|
||||
void CH423GPIOPin::pin_mode(gpio::Flags flags) { this->parent_->pin_mode(this->pin_, flags); }
|
||||
bool CH423GPIOPin::digital_read() { return this->parent_->digital_read(this->pin_) ^ this->inverted_; }
|
||||
|
||||
void CH423GPIOPin::digital_write(bool value) { this->parent_->digital_write(this->pin_, value ^ this->inverted_); }
|
||||
size_t CH423GPIOPin::dump_summary(char *buffer, size_t len) const {
|
||||
return snprintf(buffer, len, "EXIO%u via CH423", this->pin_);
|
||||
}
|
||||
void CH423GPIOPin::set_flags(gpio::Flags flags) {
|
||||
flags_ = flags;
|
||||
this->parent_->pin_mode(this->pin_, flags);
|
||||
}
|
||||
|
||||
} // namespace esphome::ch423
|
||||
67
esphome/components/ch423/ch423.h
Normal file
67
esphome/components/ch423/ch423.h
Normal file
@@ -0,0 +1,67 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome::ch423 {
|
||||
|
||||
class CH423Component : public Component, public i2c::I2CDevice {
|
||||
public:
|
||||
CH423Component() = default;
|
||||
|
||||
/// Check i2c availability and setup masks
|
||||
void setup() override;
|
||||
/// Poll for input changes periodically
|
||||
void loop() override;
|
||||
/// Helper function to read the value of a pin.
|
||||
bool digital_read(uint8_t pin);
|
||||
/// Helper function to write the value of a pin.
|
||||
void digital_write(uint8_t pin, bool value);
|
||||
/// Helper function to set the pin mode of a pin.
|
||||
void pin_mode(uint8_t pin, gpio::Flags flags);
|
||||
|
||||
float get_setup_priority() const override;
|
||||
float get_loop_priority() const override;
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
bool write_reg_(uint8_t reg, uint8_t value);
|
||||
uint8_t read_reg_(uint8_t reg);
|
||||
bool read_inputs_();
|
||||
bool write_outputs_();
|
||||
|
||||
/// The mask to write as output state - 1 means HIGH, 0 means LOW
|
||||
uint32_t output_bits_{0x00};
|
||||
/// Flags to check if read previously during this loop
|
||||
uint8_t pin_read_flags_{0x00};
|
||||
/// Copy of last read values
|
||||
uint8_t input_bits_{0x00};
|
||||
/// System parameters
|
||||
uint8_t sys_params_{0x00};
|
||||
};
|
||||
|
||||
/// Helper class to expose a CH423 pin as a GPIO pin.
|
||||
class CH423GPIOPin : public GPIOPin {
|
||||
public:
|
||||
void setup() override{};
|
||||
void pin_mode(gpio::Flags flags) override;
|
||||
bool digital_read() override;
|
||||
void digital_write(bool value) override;
|
||||
size_t dump_summary(char *buffer, size_t len) const override;
|
||||
|
||||
void set_parent(CH423Component *parent) { parent_ = parent; }
|
||||
void set_pin(uint8_t pin) { pin_ = pin; }
|
||||
void set_inverted(bool inverted) { inverted_ = inverted; }
|
||||
void set_flags(gpio::Flags flags);
|
||||
|
||||
gpio::Flags get_flags() const override { return this->flags_; }
|
||||
|
||||
protected:
|
||||
CH423Component *parent_{};
|
||||
uint8_t pin_{};
|
||||
bool inverted_{};
|
||||
gpio::Flags flags_{};
|
||||
};
|
||||
|
||||
} // namespace esphome::ch423
|
||||
@@ -15,7 +15,7 @@ from esphome.const import (
|
||||
CONF_UPDATE_INTERVAL,
|
||||
SCHEDULER_DONT_RUN,
|
||||
)
|
||||
from esphome.core import CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
|
||||
IS_PLATFORM_COMPONENT = True
|
||||
|
||||
@@ -222,3 +222,8 @@ async def display_is_displaying_page_to_code(config, condition_id, template_arg,
|
||||
async def to_code(config):
|
||||
cg.add_global(display_ns.using)
|
||||
cg.add_define("USE_DISPLAY")
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's LCD driver (excluded by default to save compile time)
|
||||
from esphome.components.esp32 import include_builtin_idf_component
|
||||
|
||||
include_builtin_idf_component("esp_lcd")
|
||||
|
||||
@@ -2,7 +2,8 @@ import esphome.codegen as cg
|
||||
from esphome.components import i2c
|
||||
from esphome.components.audio_dac import AudioDac
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.const import CONF_AUDIO_DAC, CONF_BITS_PER_SAMPLE, CONF_ID
|
||||
import esphome.final_validate as fv
|
||||
|
||||
CODEOWNERS = ["@kbx81"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
@@ -21,6 +22,29 @@ CONFIG_SCHEMA = (
|
||||
)
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
full_config = fv.full_config.get()
|
||||
|
||||
# Check all speaker configurations for ones that reference this es8156
|
||||
speaker_configs = full_config.get("speaker", [])
|
||||
for speaker_config in speaker_configs:
|
||||
audio_dac_id = speaker_config.get(CONF_AUDIO_DAC)
|
||||
if (
|
||||
audio_dac_id is not None
|
||||
and audio_dac_id == config[CONF_ID]
|
||||
and (bits_per_sample := speaker_config.get(CONF_BITS_PER_SAMPLE))
|
||||
is not None
|
||||
and bits_per_sample > 24
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"ES8156 does not support more than 24 bits per sample. "
|
||||
f"The speaker referencing this audio_dac has bits_per_sample set to {bits_per_sample}."
|
||||
)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
@@ -17,24 +17,61 @@ static const char *const TAG = "es8156";
|
||||
}
|
||||
|
||||
void ES8156::setup() {
|
||||
// REG02 MODE CONFIG 1: Enable software mode for I2C control of volume/mute
|
||||
// Bit 2: SOFT_MODE_SEL=1 (software mode enabled)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG02_SCLK_MODE, 0x04));
|
||||
|
||||
// Analog system configuration (active-low power down bits, active-high enables)
|
||||
// REG20 ANALOG SYSTEM: Configure analog signal path
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG20_ANALOG_SYS1, 0x2A));
|
||||
|
||||
// REG21 ANALOG SYSTEM: VSEL=0x1C (bias level ~120%), normal VREF ramp speed
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG21_ANALOG_SYS2, 0x3C));
|
||||
|
||||
// REG22 ANALOG SYSTEM: Line out mode (HPSW=0), OUT_MUTE=0 (not muted)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG22_ANALOG_SYS3, 0x00));
|
||||
|
||||
// REG24 ANALOG SYSTEM: Low power mode for VREFBUF, HPCOM, DACVRP; DAC normal power
|
||||
// Bits 2:0 = 0x07: LPVREFBUF=1, LPHPCOM=1, LPDACVRP=1, LPDAC=0
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG24_ANALOG_LP, 0x07));
|
||||
|
||||
// REG23 ANALOG SYSTEM: Lowest bias (IBIAS_SW=0), VMIDLVL=VDDA/2, normal impedance
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG23_ANALOG_SYS4, 0x00));
|
||||
|
||||
// Timing and interface configuration
|
||||
// REG0A/0B TIME CONTROL: Fast state machine transitions
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0A_TIME_CONTROL1, 0x01));
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0B_TIME_CONTROL2, 0x01));
|
||||
|
||||
// REG11 SDP INTERFACE CONFIG: Default I2S format (24-bit, I2S mode)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG11_DAC_SDP, 0x00));
|
||||
|
||||
// REG19 EQ CONTROL 1: EQ disabled (EQ_ON=0), EQ_BAND_NUM=2
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG19_EQ_CONTROL1, 0x20));
|
||||
|
||||
// REG0D P2S CONTROL: Parallel-to-serial converter settings
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0D_P2S_CONTROL, 0x14));
|
||||
|
||||
// REG09 MISC CONTROL 2: Default settings
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG09_MISC_CONTROL2, 0x00));
|
||||
|
||||
// REG18 MISC CONTROL 3: Stereo channel routing, no inversion
|
||||
// Bits 5:4 CHN_CROSS: 0=L→L/R→R, 1=L to both, 2=R to both, 3=swap L/R
|
||||
// Bits 3:2: LCH_INV/RCH_INV channel inversion
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG18_MISC_CONTROL3, 0x00));
|
||||
|
||||
// REG08 CLOCK OFF: Enable all internal clocks (0x3F = all clock gates open)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG08_CLOCK_ON_OFF, 0x3F));
|
||||
|
||||
// REG00 RESET CONTROL: Reset sequence
|
||||
// First: RST_DIG=1 (assert digital reset)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x02));
|
||||
// Then: CSM_ON=1 (enable chip state machine), RST_DIG=1
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x03));
|
||||
|
||||
// REG25 ANALOG SYSTEM: Power up analog blocks
|
||||
// VMIDSEL=2 (normal VMID operation), PDN_ANA=0, ENREFR=0, ENHPCOM=0
|
||||
// PDN_DACVREFGEN=0, PDN_VREFBUF=0, PDN_DAC=0 (all enabled)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG25_ANALOG_SYS5, 0x20));
|
||||
}
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@ from .const import ( # noqa
|
||||
KEY_BOARD,
|
||||
KEY_COMPONENTS,
|
||||
KEY_ESP32,
|
||||
KEY_EXCLUDE_COMPONENTS,
|
||||
KEY_EXTRA_BUILD_FILES,
|
||||
KEY_FLASH_SIZE,
|
||||
KEY_FULL_CERT_BUNDLE,
|
||||
@@ -86,6 +87,7 @@ IS_TARGET_PLATFORM = True
|
||||
CONF_ASSERTION_LEVEL = "assertion_level"
|
||||
CONF_COMPILER_OPTIMIZATION = "compiler_optimization"
|
||||
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES = "enable_idf_experimental_features"
|
||||
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS = "include_builtin_idf_components"
|
||||
CONF_ENABLE_LWIP_ASSERT = "enable_lwip_assert"
|
||||
CONF_ENABLE_OTA_ROLLBACK = "enable_ota_rollback"
|
||||
CONF_EXECUTE_FROM_PSRAM = "execute_from_psram"
|
||||
@@ -114,6 +116,36 @@ COMPILER_OPTIMIZATIONS = {
|
||||
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
|
||||
}
|
||||
|
||||
# ESP-IDF components excluded by default to reduce compile time.
|
||||
# Components can be re-enabled by calling include_builtin_idf_component() in to_code().
|
||||
#
|
||||
# Cannot be excluded (dependencies of required components):
|
||||
# - "console": espressif/mdns unconditionally depends on it
|
||||
# - "sdmmc": driver -> esp_driver_sdmmc -> sdmmc dependency chain
|
||||
DEFAULT_EXCLUDED_IDF_COMPONENTS = (
|
||||
"cmock", # Unit testing mock framework - ESPHome doesn't use IDF's testing
|
||||
"esp_adc", # ADC driver - only needed by adc component
|
||||
"esp_driver_i2s", # I2S driver - only needed by i2s_audio component
|
||||
"esp_driver_rmt", # RMT driver - only needed by remote_transmitter/receiver, neopixelbus
|
||||
"esp_driver_touch_sens", # Touch sensor driver - only needed by esp32_touch
|
||||
"esp_eth", # Ethernet driver - only needed by ethernet component
|
||||
"esp_hid", # HID host/device support - ESPHome doesn't implement HID functionality
|
||||
"esp_http_client", # HTTP client - only needed by http_request component
|
||||
"esp_https_ota", # ESP-IDF HTTPS OTA - ESPHome has its own OTA implementation
|
||||
"esp_https_server", # HTTPS server - ESPHome has its own web server
|
||||
"esp_lcd", # LCD controller drivers - only needed by display component
|
||||
"esp_local_ctrl", # Local control over HTTPS/BLE - ESPHome has native API
|
||||
"espcoredump", # Core dump support - ESPHome has its own debug component
|
||||
"fatfs", # FAT filesystem - ESPHome doesn't use filesystem storage
|
||||
"mqtt", # ESP-IDF MQTT library - ESPHome has its own MQTT implementation
|
||||
"perfmon", # Xtensa performance monitor - ESPHome has its own debug component
|
||||
"protocomm", # Protocol communication for provisioning - unused by ESPHome
|
||||
"spiffs", # SPIFFS filesystem - ESPHome doesn't use filesystem storage (IDF only)
|
||||
"unity", # Unit testing framework - ESPHome doesn't use IDF's testing
|
||||
"wear_levelling", # Flash wear levelling for fatfs - unused since fatfs unused
|
||||
"wifi_provisioning", # WiFi provisioning - ESPHome uses its own improv implementation
|
||||
)
|
||||
|
||||
# ESP32 (original) chip revision options
|
||||
# Setting minimum revision to 3.0 or higher:
|
||||
# - Reduces flash size by excluding workaround code for older chip bugs
|
||||
@@ -203,6 +235,9 @@ def set_core_data(config):
|
||||
)
|
||||
CORE.data[KEY_ESP32][KEY_SDKCONFIG_OPTIONS] = {}
|
||||
CORE.data[KEY_ESP32][KEY_COMPONENTS] = {}
|
||||
# Initialize with default exclusions - components can call include_builtin_idf_component()
|
||||
# to re-enable any they need
|
||||
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS] = set(DEFAULT_EXCLUDED_IDF_COMPONENTS)
|
||||
CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] = cv.Version.parse(
|
||||
config[CONF_FRAMEWORK][CONF_VERSION]
|
||||
)
|
||||
@@ -328,6 +363,28 @@ def add_idf_component(
|
||||
}
|
||||
|
||||
|
||||
def exclude_builtin_idf_component(name: str) -> None:
|
||||
"""Exclude an ESP-IDF component from the build.
|
||||
|
||||
This reduces compile time by skipping components that are not needed.
|
||||
The component will be passed to ESP-IDF's EXCLUDE_COMPONENTS cmake variable.
|
||||
|
||||
Note: Components that are dependencies of other required components
|
||||
cannot be excluded - ESP-IDF will still build them.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].add(name)
|
||||
|
||||
|
||||
def include_builtin_idf_component(name: str) -> None:
|
||||
"""Remove an ESP-IDF component from the exclusion list.
|
||||
|
||||
Call this from components that need an ESP-IDF component that is
|
||||
excluded by default in DEFAULT_EXCLUDED_IDF_COMPONENTS. This ensures the
|
||||
component will be built when needed.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].discard(name)
|
||||
|
||||
|
||||
def add_extra_script(stage: str, filename: str, path: Path):
|
||||
"""Add an extra script to the project."""
|
||||
key = f"{stage}:{filename}"
|
||||
@@ -672,11 +729,25 @@ CONF_RINGBUF_IN_IRAM = "ringbuf_in_iram"
|
||||
CONF_HEAP_IN_IRAM = "heap_in_iram"
|
||||
CONF_LOOP_TASK_STACK_SIZE = "loop_task_stack_size"
|
||||
CONF_USE_FULL_CERTIFICATE_BUNDLE = "use_full_certificate_bundle"
|
||||
CONF_DISABLE_DEBUG_STUBS = "disable_debug_stubs"
|
||||
CONF_DISABLE_OCD_AWARE = "disable_ocd_aware"
|
||||
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY = "disable_usb_serial_jtag_secondary"
|
||||
CONF_DISABLE_DEV_NULL_VFS = "disable_dev_null_vfs"
|
||||
CONF_DISABLE_MBEDTLS_PEER_CERT = "disable_mbedtls_peer_cert"
|
||||
CONF_DISABLE_MBEDTLS_PKCS7 = "disable_mbedtls_pkcs7"
|
||||
CONF_DISABLE_REGI2C_IN_IRAM = "disable_regi2c_in_iram"
|
||||
CONF_DISABLE_FATFS = "disable_fatfs"
|
||||
|
||||
# VFS requirement tracking
|
||||
# Components that need VFS features can call require_vfs_select() or require_vfs_dir()
|
||||
KEY_VFS_SELECT_REQUIRED = "vfs_select_required"
|
||||
KEY_VFS_DIR_REQUIRED = "vfs_dir_required"
|
||||
# Feature requirement tracking - components can call require_* functions to re-enable
|
||||
# These are stored in CORE.data[KEY_ESP32] dict
|
||||
KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED = "usb_serial_jtag_secondary_required"
|
||||
KEY_MBEDTLS_PEER_CERT_REQUIRED = "mbedtls_peer_cert_required"
|
||||
KEY_MBEDTLS_PKCS7_REQUIRED = "mbedtls_pkcs7_required"
|
||||
KEY_FATFS_REQUIRED = "fatfs_required"
|
||||
|
||||
|
||||
def require_vfs_select() -> None:
|
||||
@@ -709,6 +780,43 @@ def require_full_certificate_bundle() -> None:
|
||||
CORE.data[KEY_ESP32][KEY_FULL_CERT_BUNDLE] = True
|
||||
|
||||
|
||||
def require_usb_serial_jtag_secondary() -> None:
|
||||
"""Mark that USB Serial/JTAG secondary console is required by a component.
|
||||
|
||||
Call this from components (e.g., logger) that need USB Serial/JTAG console output.
|
||||
This prevents CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED] = True
|
||||
|
||||
|
||||
def require_mbedtls_peer_cert() -> None:
|
||||
"""Mark that mbedTLS peer certificate retention is required by a component.
|
||||
|
||||
Call this from components that need access to the peer certificate after
|
||||
the TLS handshake is complete. This prevents CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE
|
||||
from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_MBEDTLS_PEER_CERT_REQUIRED] = True
|
||||
|
||||
|
||||
def require_mbedtls_pkcs7() -> None:
|
||||
"""Mark that mbedTLS PKCS#7 support is required by a component.
|
||||
|
||||
Call this from components that need PKCS#7 certificate validation.
|
||||
This prevents CONFIG_MBEDTLS_PKCS7_C from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_MBEDTLS_PKCS7_REQUIRED] = True
|
||||
|
||||
|
||||
def require_fatfs() -> None:
|
||||
"""Mark that FATFS support is required by a component.
|
||||
|
||||
Call this from components that use FATFS (e.g., SD card, storage components).
|
||||
This prevents FATFS from being disabled when disable_fatfs is set.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_FATFS_REQUIRED] = True
|
||||
|
||||
|
||||
def _parse_idf_component(value: str) -> ConfigType:
|
||||
"""Parse IDF component shorthand syntax like 'owner/component^version'"""
|
||||
# Match operator followed by version-like string (digit or *)
|
||||
@@ -793,6 +901,19 @@ FRAMEWORK_SCHEMA = cv.Schema(
|
||||
cv.Optional(
|
||||
CONF_USE_FULL_CERTIFICATE_BUNDLE, default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, default=[]
|
||||
): cv.ensure_list(cv.string_strict),
|
||||
cv.Optional(CONF_DISABLE_DEBUG_STUBS, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_OCD_AWARE, default=True): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_DEV_NULL_VFS, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_MBEDTLS_PEER_CERT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_MBEDTLS_PKCS7, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_REGI2C_IN_IRAM, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_FATFS, default=True): cv.boolean,
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
|
||||
@@ -982,6 +1103,19 @@ def _configure_lwip_max_sockets(conf: dict) -> None:
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def _write_exclude_components() -> None:
|
||||
"""Write EXCLUDE_COMPONENTS cmake arg after all components have registered exclusions."""
|
||||
if KEY_ESP32 not in CORE.data:
|
||||
return
|
||||
excluded = CORE.data[KEY_ESP32].get(KEY_EXCLUDE_COMPONENTS)
|
||||
if excluded:
|
||||
exclude_list = ";".join(sorted(excluded))
|
||||
cg.add_platformio_option(
|
||||
"board_build.cmake_extra_args", f"-DEXCLUDE_COMPONENTS={exclude_list}"
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def _add_yaml_idf_components(components: list[ConfigType]):
|
||||
"""Add IDF components from YAML config with final priority to override code-added components."""
|
||||
@@ -1195,6 +1329,11 @@ async def to_code(config):
|
||||
|
||||
# Apply LWIP optimization settings
|
||||
advanced = conf[CONF_ADVANCED]
|
||||
|
||||
# Re-include any IDF components the user explicitly requested
|
||||
for component_name in advanced.get(CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, []):
|
||||
include_builtin_idf_component(component_name)
|
||||
|
||||
# DHCP server: only disable if explicitly set to false
|
||||
# WiFi component handles its own optimization when AP mode is not used
|
||||
# When using Arduino with Ethernet, DHCP server functions must be available
|
||||
@@ -1316,6 +1455,61 @@ async def to_code(config):
|
||||
|
||||
add_idf_sdkconfig_option(f"CONFIG_LOG_DEFAULT_LEVEL_{conf[CONF_LOG_LEVEL]}", True)
|
||||
|
||||
# Disable OpenOCD debug stubs to save code size
|
||||
# These are used for on-chip debugging with OpenOCD/JTAG, rarely needed for ESPHome
|
||||
if advanced[CONF_DISABLE_DEBUG_STUBS]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_STUBS_ENABLE", False)
|
||||
|
||||
# Disable OCD-aware exception handlers
|
||||
# When enabled, the panic handler detects JTAG debugger and halts instead of resetting
|
||||
# Most ESPHome users don't use JTAG debugging
|
||||
if advanced[CONF_DISABLE_OCD_AWARE]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_OCDAWARE", False)
|
||||
|
||||
# Disable USB Serial/JTAG secondary console
|
||||
# Components like logger can call require_usb_serial_jtag_secondary() to re-enable
|
||||
if CORE.data[KEY_ESP32].get(KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED, False):
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG", True)
|
||||
elif advanced[CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_NONE", True)
|
||||
|
||||
# Disable /dev/null VFS initialization
|
||||
# ESPHome doesn't typically need /dev/null
|
||||
if advanced[CONF_DISABLE_DEV_NULL_VFS]:
|
||||
add_idf_sdkconfig_option("CONFIG_VFS_INITIALIZE_DEV_NULL", False)
|
||||
|
||||
# Disable keeping peer certificate after TLS handshake
|
||||
# Saves ~4KB heap per connection, but prevents certificate inspection after handshake
|
||||
# Components that need it can call require_mbedtls_peer_cert()
|
||||
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PEER_CERT_REQUIRED, False):
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", True)
|
||||
elif advanced[CONF_DISABLE_MBEDTLS_PEER_CERT]:
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", False)
|
||||
|
||||
# Disable PKCS#7 support in mbedTLS
|
||||
# Only needed for specific certificate validation scenarios
|
||||
# Components that need it can call require_mbedtls_pkcs7()
|
||||
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PKCS7_REQUIRED, False):
|
||||
# Component called require_mbedtls_pkcs7() - enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", True)
|
||||
elif advanced[CONF_DISABLE_MBEDTLS_PKCS7]:
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", False)
|
||||
|
||||
# Disable regi2c control functions in IRAM
|
||||
# Only needed if using analog peripherals (ADC, DAC, etc.) from ISRs while cache is disabled
|
||||
if advanced[CONF_DISABLE_REGI2C_IN_IRAM]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_REGI2C_CTRL_FUNC_IN_IRAM", False)
|
||||
|
||||
# Disable FATFS support
|
||||
# Components that need FATFS (SD card, etc.) can call require_fatfs()
|
||||
if CORE.data[KEY_ESP32].get(KEY_FATFS_REQUIRED, False):
|
||||
# Component called require_fatfs() - enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", False)
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 2)
|
||||
elif advanced[CONF_DISABLE_FATFS]:
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", True)
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 0)
|
||||
|
||||
for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
|
||||
add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
|
||||
|
||||
@@ -1324,6 +1518,11 @@ async def to_code(config):
|
||||
if conf[CONF_COMPONENTS]:
|
||||
CORE.add_job(_add_yaml_idf_components, conf[CONF_COMPONENTS])
|
||||
|
||||
# Write EXCLUDE_COMPONENTS at FINAL priority after all components have had
|
||||
# a chance to call include_builtin_idf_component() to re-enable components they need.
|
||||
# Default exclusions are added in set_core_data() during config validation.
|
||||
CORE.add_job(_write_exclude_components)
|
||||
|
||||
|
||||
APP_PARTITION_SIZES = {
|
||||
"2MB": 0x0C0000, # 768 KB
|
||||
|
||||
@@ -175,6 +175,32 @@ ESP32_BOARD_PINS = {
|
||||
"LED": 13,
|
||||
"LED_BUILTIN": 13,
|
||||
},
|
||||
"adafruit_feather_esp32s3_reversetft": {
|
||||
"BUTTON": 0,
|
||||
"A0": 18,
|
||||
"A1": 17,
|
||||
"A2": 16,
|
||||
"A3": 15,
|
||||
"A4": 14,
|
||||
"A5": 8,
|
||||
"SCK": 36,
|
||||
"MOSI": 35,
|
||||
"MISO": 37,
|
||||
"RX": 38,
|
||||
"TX": 39,
|
||||
"SCL": 4,
|
||||
"SDA": 3,
|
||||
"NEOPIXEL": 33,
|
||||
"PIN_NEOPIXEL": 33,
|
||||
"NEOPIXEL_POWER": 21,
|
||||
"TFT_I2C_POWER": 7,
|
||||
"TFT_CS": 42,
|
||||
"TFT_DC": 40,
|
||||
"TFT_RESET": 41,
|
||||
"TFT_BACKLIGHT": 45,
|
||||
"LED": 13,
|
||||
"LED_BUILTIN": 13,
|
||||
},
|
||||
"adafruit_feather_esp32s3_tft": {
|
||||
"BUTTON": 0,
|
||||
"A0": 18,
|
||||
|
||||
@@ -6,6 +6,7 @@ KEY_FLASH_SIZE = "flash_size"
|
||||
KEY_VARIANT = "variant"
|
||||
KEY_SDKCONFIG_OPTIONS = "sdkconfig_options"
|
||||
KEY_COMPONENTS = "components"
|
||||
KEY_EXCLUDE_COMPONENTS = "exclude_components"
|
||||
KEY_REPO = "repo"
|
||||
KEY_REF = "ref"
|
||||
KEY_REFRESH = "refresh"
|
||||
|
||||
@@ -5,6 +5,7 @@ from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import esp32, light
|
||||
from esphome.components.const import CONF_USE_PSRAM
|
||||
from esphome.components.esp32 import include_builtin_idf_component
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_CHIPSET,
|
||||
@@ -129,6 +130,9 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_OUTPUT_ID])
|
||||
await light.register_light(var, config)
|
||||
await cg.register_component(var, config)
|
||||
|
||||
@@ -6,6 +6,7 @@ from esphome.components.esp32 import (
|
||||
VARIANT_ESP32S3,
|
||||
get_esp32_variant,
|
||||
gpio,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -266,6 +267,9 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
# Re-enable ESP-IDF's touch sensor driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_driver_touch_sens")
|
||||
|
||||
touch = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(touch, config)
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from esphome.components.esp32 import (
|
||||
add_idf_component,
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.components.network import ip_address_literal
|
||||
from esphome.components.spi import CONF_INTERFACE_INDEX, get_spi_interface
|
||||
@@ -419,6 +420,9 @@ async def to_code(config):
|
||||
# Also disable WiFi/BT coexistence since WiFi is disabled
|
||||
add_idf_sdkconfig_option("CONFIG_SW_COEXIST_ENABLE", False)
|
||||
|
||||
# Re-enable ESP-IDF's Ethernet driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_eth")
|
||||
|
||||
if config[CONF_TYPE] == "LAN8670":
|
||||
# Add LAN867x 10BASE-T1S PHY support component
|
||||
add_idf_component(name="espressif/lan867x", ref="2.0.0")
|
||||
|
||||
@@ -9,30 +9,56 @@ from esphome.const import (
|
||||
CONF_VALUE,
|
||||
)
|
||||
from esphome.core import CoroPriority, coroutine_with_priority
|
||||
from esphome.types import ConfigType
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
globals_ns = cg.esphome_ns.namespace("globals")
|
||||
GlobalsComponent = globals_ns.class_("GlobalsComponent", cg.Component)
|
||||
RestoringGlobalsComponent = globals_ns.class_("RestoringGlobalsComponent", cg.Component)
|
||||
RestoringGlobalsComponent = globals_ns.class_(
|
||||
"RestoringGlobalsComponent", cg.PollingComponent
|
||||
)
|
||||
RestoringGlobalStringComponent = globals_ns.class_(
|
||||
"RestoringGlobalStringComponent", cg.Component
|
||||
"RestoringGlobalStringComponent", cg.PollingComponent
|
||||
)
|
||||
GlobalVarSetAction = globals_ns.class_("GlobalVarSetAction", automation.Action)
|
||||
|
||||
CONF_MAX_RESTORE_DATA_LENGTH = "max_restore_data_length"
|
||||
|
||||
# Base schema fields shared by both variants
|
||||
_BASE_SCHEMA = {
|
||||
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
|
||||
cv.Required(CONF_TYPE): cv.string_strict,
|
||||
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
|
||||
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
|
||||
}
|
||||
|
||||
MULTI_CONF = True
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
# Non-restoring globals: regular Component (no polling needed)
|
||||
_NON_RESTORING_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
|
||||
cv.Required(CONF_TYPE): cv.string_strict,
|
||||
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
|
||||
**_BASE_SCHEMA,
|
||||
cv.Optional(CONF_RESTORE_VALUE, default=False): cv.boolean,
|
||||
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
# Restoring globals: PollingComponent with configurable update_interval
|
||||
_RESTORING_SCHEMA = cv.Schema(
|
||||
{
|
||||
**_BASE_SCHEMA,
|
||||
cv.Optional(CONF_RESTORE_VALUE, default=True): cv.boolean,
|
||||
}
|
||||
).extend(cv.polling_component_schema("1s"))
|
||||
|
||||
|
||||
def _globals_schema(config: ConfigType) -> ConfigType:
|
||||
"""Select schema based on restore_value setting."""
|
||||
if config.get(CONF_RESTORE_VALUE, False):
|
||||
return _RESTORING_SCHEMA(config)
|
||||
return _NON_RESTORING_SCHEMA(config)
|
||||
|
||||
|
||||
MULTI_CONF = True
|
||||
CONFIG_SCHEMA = _globals_schema
|
||||
|
||||
|
||||
# Run with low priority so that namespaces are registered first
|
||||
@coroutine_with_priority(CoroPriority.LATE)
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include <cstring>
|
||||
|
||||
namespace esphome {
|
||||
namespace globals {
|
||||
namespace esphome::globals {
|
||||
|
||||
template<typename T> class GlobalsComponent : public Component {
|
||||
public:
|
||||
@@ -24,13 +23,14 @@ template<typename T> class GlobalsComponent : public Component {
|
||||
T value_{};
|
||||
};
|
||||
|
||||
template<typename T> class RestoringGlobalsComponent : public Component {
|
||||
template<typename T> class RestoringGlobalsComponent : public PollingComponent {
|
||||
public:
|
||||
using value_type = T;
|
||||
explicit RestoringGlobalsComponent() = default;
|
||||
explicit RestoringGlobalsComponent(T initial_value) : value_(initial_value) {}
|
||||
explicit RestoringGlobalsComponent() : PollingComponent(1000) {}
|
||||
explicit RestoringGlobalsComponent(T initial_value) : PollingComponent(1000), value_(initial_value) {}
|
||||
explicit RestoringGlobalsComponent(
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
|
||||
: PollingComponent(1000) {
|
||||
memcpy(this->value_, initial_value.data(), sizeof(T));
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ template<typename T> class RestoringGlobalsComponent : public Component {
|
||||
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
void loop() override { store_value_(); }
|
||||
void update() override { store_value_(); }
|
||||
|
||||
void on_shutdown() override { store_value_(); }
|
||||
|
||||
@@ -66,13 +66,14 @@ template<typename T> class RestoringGlobalsComponent : public Component {
|
||||
};
|
||||
|
||||
// Use with string or subclasses of strings
|
||||
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public Component {
|
||||
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public PollingComponent {
|
||||
public:
|
||||
using value_type = T;
|
||||
explicit RestoringGlobalStringComponent() = default;
|
||||
explicit RestoringGlobalStringComponent(T initial_value) { this->value_ = initial_value; }
|
||||
explicit RestoringGlobalStringComponent() : PollingComponent(1000) {}
|
||||
explicit RestoringGlobalStringComponent(T initial_value) : PollingComponent(1000) { this->value_ = initial_value; }
|
||||
explicit RestoringGlobalStringComponent(
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
|
||||
: PollingComponent(1000) {
|
||||
memcpy(this->value_, initial_value.data(), sizeof(T));
|
||||
}
|
||||
|
||||
@@ -90,7 +91,7 @@ template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public C
|
||||
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
void loop() override { store_value_(); }
|
||||
void update() override { store_value_(); }
|
||||
|
||||
void on_shutdown() override { store_value_(); }
|
||||
|
||||
@@ -144,5 +145,4 @@ template<typename T> T &id(GlobalsComponent<T> *value) { return value->value();
|
||||
template<typename T> T &id(RestoringGlobalsComponent<T> *value) { return value->value(); }
|
||||
template<typename T, uint8_t SZ> T &id(RestoringGlobalStringComponent<T, SZ> *value) { return value->value(); }
|
||||
|
||||
} // namespace globals
|
||||
} // namespace esphome
|
||||
} // namespace esphome::globals
|
||||
|
||||
@@ -155,6 +155,9 @@ async def to_code(config):
|
||||
cg.add(var.set_watchdog_timeout(timeout_ms))
|
||||
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_http_client")
|
||||
|
||||
cg.add(var.set_buffer_size_rx(config[CONF_BUFFER_SIZE_RX]))
|
||||
cg.add(var.set_buffer_size_tx(config[CONF_BUFFER_SIZE_TX]))
|
||||
cg.add(var.set_verify_ssl(config[CONF_VERIFY_SSL]))
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp32 import (
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.components.esp32.const import (
|
||||
VARIANT_ESP32,
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32C5,
|
||||
@@ -10,8 +15,6 @@ from esphome.components.esp32 import (
|
||||
VARIANT_ESP32P4,
|
||||
VARIANT_ESP32S2,
|
||||
VARIANT_ESP32S3,
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_CHANNEL, CONF_ID, CONF_SAMPLE_RATE
|
||||
@@ -272,6 +275,10 @@ FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
# Re-enable ESP-IDF's I2S driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_driver_i2s")
|
||||
|
||||
if use_legacy():
|
||||
cg.add_define("USE_I2S_LEGACY")
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ static const char *const TAG = "json";
|
||||
static SpiRamAllocator global_json_allocator;
|
||||
#endif
|
||||
|
||||
std::string build_json(const json_build_t &f) {
|
||||
SerializationBuffer<> build_json(const json_build_t &f) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
@@ -61,14 +61,62 @@ JsonDocument parse_json(const uint8_t *data, size_t len) {
|
||||
// NOLINTEND(clang-analyzer-cplusplus.NewDeleteLeaks)
|
||||
}
|
||||
|
||||
std::string JsonBuilder::serialize() {
|
||||
SerializationBuffer<> JsonBuilder::serialize() {
|
||||
// ===========================================================================================
|
||||
// CRITICAL: NRVO (Named Return Value Optimization) - DO NOT REFACTOR WITHOUT UNDERSTANDING
|
||||
// ===========================================================================================
|
||||
//
|
||||
// This function is carefully structured to enable NRVO. The compiler constructs `result`
|
||||
// directly in the caller's stack frame, eliminating the move constructor call entirely.
|
||||
//
|
||||
// WITHOUT NRVO: Each return would trigger SerializationBuffer's move constructor, which
|
||||
// must memcpy up to 768 bytes of stack buffer content. This happens on EVERY JSON
|
||||
// serialization (sensor updates, web server responses, MQTT publishes, etc.).
|
||||
//
|
||||
// WITH NRVO: Zero memcpy, zero move constructor overhead. The buffer lives directly
|
||||
// where the caller needs it.
|
||||
//
|
||||
// Requirements for NRVO to work:
|
||||
// 1. Single named variable (`result`) returned from ALL paths
|
||||
// 2. All paths must return the SAME variable (not different variables)
|
||||
// 3. No std::move() on the return statement
|
||||
//
|
||||
// If you must modify this function:
|
||||
// - Keep a single `result` variable declared at the top
|
||||
// - All code paths must return `result` (not a different variable)
|
||||
// - Verify NRVO still works by checking the disassembly for move constructor calls
|
||||
// - Test: objdump -d -C firmware.elf | grep "SerializationBuffer.*SerializationBuffer"
|
||||
// Should show only destructor, NOT move constructor
|
||||
//
|
||||
// Why we avoid measureJson(): It instantiates DummyWriter templates adding ~1KB flash.
|
||||
// Instead, try stack buffer first. 768 bytes covers 99.9% of JSON payloads (sensors ~200B,
|
||||
// lights ~170B, climate ~700B). Only entities with 40+ options exceed this.
|
||||
//
|
||||
// ===========================================================================================
|
||||
constexpr size_t buf_size = SerializationBuffer<>::BUFFER_SIZE;
|
||||
SerializationBuffer<> result(buf_size - 1); // Max content size (reserve 1 for null)
|
||||
|
||||
if (doc_.overflowed()) {
|
||||
ESP_LOGE(TAG, "JSON document overflow");
|
||||
return "{}";
|
||||
auto *buf = result.data_writable_();
|
||||
buf[0] = '{';
|
||||
buf[1] = '}';
|
||||
buf[2] = '\0';
|
||||
result.set_size_(2);
|
||||
return result;
|
||||
}
|
||||
std::string output;
|
||||
serializeJson(doc_, output);
|
||||
return output;
|
||||
|
||||
size_t size = serializeJson(doc_, result.data_writable_(), buf_size);
|
||||
if (size < buf_size) {
|
||||
// Fits in stack buffer - update size to actual length
|
||||
result.set_size_(size);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Needs heap allocation - reallocate and serialize again with exact size
|
||||
result.reallocate_heap_(size);
|
||||
serializeJson(doc_, result.data_writable_(), size + 1);
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace json
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstring>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "esphome/core/defines.h"
|
||||
@@ -14,6 +16,108 @@
|
||||
namespace esphome {
|
||||
namespace json {
|
||||
|
||||
/// Buffer for JSON serialization that uses stack allocation for small payloads.
|
||||
/// Template parameter STACK_SIZE specifies the stack buffer size (default 768 bytes).
|
||||
/// Supports move semantics for efficient return-by-value.
|
||||
template<size_t STACK_SIZE = 768> class SerializationBuffer {
|
||||
public:
|
||||
static constexpr size_t BUFFER_SIZE = STACK_SIZE; ///< Stack buffer size for this instantiation
|
||||
|
||||
/// Construct with known size (typically from measureJson)
|
||||
explicit SerializationBuffer(size_t size) : size_(size) {
|
||||
if (size + 1 <= STACK_SIZE) {
|
||||
buffer_ = stack_buffer_;
|
||||
} else {
|
||||
heap_buffer_ = new char[size + 1];
|
||||
buffer_ = heap_buffer_;
|
||||
}
|
||||
buffer_[0] = '\0';
|
||||
}
|
||||
|
||||
~SerializationBuffer() { delete[] heap_buffer_; }
|
||||
|
||||
// Move constructor - works with same template instantiation
|
||||
SerializationBuffer(SerializationBuffer &&other) noexcept : heap_buffer_(other.heap_buffer_), size_(other.size_) {
|
||||
if (other.buffer_ == other.stack_buffer_) {
|
||||
// Stack buffer - must copy content
|
||||
std::memcpy(stack_buffer_, other.stack_buffer_, size_ + 1);
|
||||
buffer_ = stack_buffer_;
|
||||
} else {
|
||||
// Heap buffer - steal ownership
|
||||
buffer_ = heap_buffer_;
|
||||
other.heap_buffer_ = nullptr;
|
||||
}
|
||||
// Leave moved-from object in valid empty state
|
||||
other.stack_buffer_[0] = '\0';
|
||||
other.buffer_ = other.stack_buffer_;
|
||||
other.size_ = 0;
|
||||
}
|
||||
|
||||
// Move assignment
|
||||
SerializationBuffer &operator=(SerializationBuffer &&other) noexcept {
|
||||
if (this != &other) {
|
||||
delete[] heap_buffer_;
|
||||
heap_buffer_ = other.heap_buffer_;
|
||||
size_ = other.size_;
|
||||
if (other.buffer_ == other.stack_buffer_) {
|
||||
std::memcpy(stack_buffer_, other.stack_buffer_, size_ + 1);
|
||||
buffer_ = stack_buffer_;
|
||||
} else {
|
||||
buffer_ = heap_buffer_;
|
||||
other.heap_buffer_ = nullptr;
|
||||
}
|
||||
// Leave moved-from object in valid empty state
|
||||
other.stack_buffer_[0] = '\0';
|
||||
other.buffer_ = other.stack_buffer_;
|
||||
other.size_ = 0;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
// Delete copy operations
|
||||
SerializationBuffer(const SerializationBuffer &) = delete;
|
||||
SerializationBuffer &operator=(const SerializationBuffer &) = delete;
|
||||
|
||||
/// Get null-terminated C string
|
||||
const char *c_str() const { return buffer_; }
|
||||
/// Get data pointer
|
||||
const char *data() const { return buffer_; }
|
||||
/// Get string length (excluding null terminator)
|
||||
size_t size() const { return size_; }
|
||||
|
||||
/// Implicit conversion to std::string for backward compatibility
|
||||
/// WARNING: This allocates a new std::string on the heap. Prefer using
|
||||
/// c_str() or data()/size() directly when possible to avoid allocation.
|
||||
operator std::string() const { return std::string(buffer_, size_); } // NOLINT(google-explicit-constructor)
|
||||
|
||||
private:
|
||||
friend class JsonBuilder; ///< Allows JsonBuilder::serialize() to call private methods
|
||||
|
||||
/// Get writable buffer (for serialization)
|
||||
char *data_writable_() { return buffer_; }
|
||||
/// Set actual size after serialization (must not exceed allocated size)
|
||||
/// Also ensures null termination for c_str() safety
|
||||
void set_size_(size_t size) {
|
||||
size_ = size;
|
||||
buffer_[size] = '\0';
|
||||
}
|
||||
|
||||
/// Reallocate to heap buffer with new size (for when stack buffer is too small)
|
||||
/// This invalidates any previous buffer content. Used by JsonBuilder::serialize().
|
||||
void reallocate_heap_(size_t size) {
|
||||
delete[] heap_buffer_;
|
||||
heap_buffer_ = new char[size + 1];
|
||||
buffer_ = heap_buffer_;
|
||||
size_ = size;
|
||||
buffer_[0] = '\0';
|
||||
}
|
||||
|
||||
char stack_buffer_[STACK_SIZE];
|
||||
char *heap_buffer_{nullptr};
|
||||
char *buffer_;
|
||||
size_t size_;
|
||||
};
|
||||
|
||||
#ifdef USE_PSRAM
|
||||
// Build an allocator for the JSON Library using the RAMAllocator class
|
||||
// This is only compiled when PSRAM is enabled
|
||||
@@ -46,7 +150,8 @@ using json_parse_t = std::function<bool(JsonObject)>;
|
||||
using json_build_t = std::function<void(JsonObject)>;
|
||||
|
||||
/// Build a JSON string with the provided json build function.
|
||||
std::string build_json(const json_build_t &f);
|
||||
/// Returns SerializationBuffer for stack-first allocation; implicitly converts to std::string.
|
||||
SerializationBuffer<> build_json(const json_build_t &f);
|
||||
|
||||
/// Parse a JSON string and run the provided json parse function if it's valid.
|
||||
bool parse_json(const std::string &data, const json_parse_t &f);
|
||||
@@ -69,7 +174,9 @@ class JsonBuilder {
|
||||
return root_;
|
||||
}
|
||||
|
||||
std::string serialize();
|
||||
/// Serialize the JSON document to a SerializationBuffer (stack-first allocation)
|
||||
/// Uses 768-byte stack buffer by default, falls back to heap for larger JSON
|
||||
SerializationBuffer<> serialize();
|
||||
|
||||
private:
|
||||
#ifdef USE_PSRAM
|
||||
|
||||
@@ -451,7 +451,7 @@ void LD2450Component::handle_periodic_data_() {
|
||||
int16_t ty = 0;
|
||||
int16_t td = 0;
|
||||
int16_t ts = 0;
|
||||
int16_t angle = 0;
|
||||
float angle = 0;
|
||||
uint8_t index = 0;
|
||||
Direction direction{DIRECTION_UNDEFINED};
|
||||
bool is_moving = false;
|
||||
|
||||
@@ -143,6 +143,7 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
||||
],
|
||||
icon=ICON_FORMAT_TEXT_ROTATION_ANGLE_UP,
|
||||
unit_of_measurement=UNIT_DEGREES,
|
||||
accuracy_decimals=1,
|
||||
),
|
||||
cv.Optional(CONF_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
|
||||
@@ -12,6 +12,10 @@ namespace esphome::mdns {
|
||||
static const char *const TAG = "mdns";
|
||||
|
||||
static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_SERVICE_COUNT> &services) {
|
||||
#ifdef USE_OPENTHREAD
|
||||
// OpenThread handles service registration via SRP client
|
||||
// Services are compiled by MDNSComponent::compile_records_() and consumed by OpenThreadSrpComponent
|
||||
#else
|
||||
esp_err_t err = mdns_init();
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "Init failed: %s", esp_err_to_name(err));
|
||||
@@ -41,13 +45,16 @@ static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_S
|
||||
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void MDNSComponent::setup() { this->setup_buffers_and_register_(register_esp32); }
|
||||
|
||||
void MDNSComponent::on_shutdown() {
|
||||
#ifndef USE_OPENTHREAD
|
||||
mdns_free();
|
||||
delay(40); // Allow the mdns packets announcing service removal to be sent
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace esphome::mdns
|
||||
|
||||
@@ -4,7 +4,10 @@ from esphome import automation
|
||||
from esphome.automation import Condition
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import logger, socket
|
||||
from esphome.components.esp32 import add_idf_sdkconfig_option
|
||||
from esphome.components.esp32 import (
|
||||
add_idf_sdkconfig_option,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.config_helpers import filter_source_files_from_platform
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -360,6 +363,8 @@ async def to_code(config):
|
||||
# This enables low-latency MQTT event processing instead of waiting for select() timeout
|
||||
if CORE.is_esp32:
|
||||
socket.require_wake_loop_threadsafe()
|
||||
# Re-enable ESP-IDF's mqtt component (excluded by default to save compile time)
|
||||
include_builtin_idf_component("mqtt")
|
||||
|
||||
cg.add_define("USE_MQTT")
|
||||
cg.add_global(mqtt_ns.using)
|
||||
|
||||
@@ -564,8 +564,8 @@ bool MQTTClientComponent::publish(const char *topic, const char *payload, size_t
|
||||
}
|
||||
|
||||
bool MQTTClientComponent::publish_json(const char *topic, const json::json_build_t &f, uint8_t qos, bool retain) {
|
||||
std::string message = json::build_json(f);
|
||||
return this->publish(topic, message.c_str(), message.length(), qos, retain);
|
||||
auto message = json::build_json(f);
|
||||
return this->publish(topic, message.c_str(), message.size(), qos, retain);
|
||||
}
|
||||
|
||||
void MQTTClientComponent::enable() {
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import light
|
||||
from esphome.components.esp32 import VARIANT_ESP32C3, VARIANT_ESP32S3, get_esp32_variant
|
||||
from esphome.components.esp32 import (
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32S3,
|
||||
get_esp32_variant,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_CHANNEL,
|
||||
@@ -205,6 +210,10 @@ async def to_code(config):
|
||||
has_white = "W" in config[CONF_TYPE]
|
||||
method = config[CONF_METHOD]
|
||||
|
||||
# Re-enable ESP-IDF's RMT driver if using RMT method (excluded by default)
|
||||
if CORE.is_esp32 and method[CONF_TYPE] == METHOD_ESP32_RMT:
|
||||
include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
method_template = METHODS[method[CONF_TYPE]].to_code(
|
||||
method, config[CONF_VARIANT], config[CONF_INVERT]
|
||||
)
|
||||
|
||||
@@ -177,6 +177,8 @@ async def to_code(config):
|
||||
cg.add_define("USE_NEXTION_TFT_UPLOAD")
|
||||
cg.add(var.set_tft_url(config[CONF_TFT_URL]))
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_http_client")
|
||||
esp32.add_idf_sdkconfig_option("CONFIG_ESP_TLS_INSECURE", True)
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY", True
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import mqtt, web_server
|
||||
from esphome.components import mqtt, web_server, zigbee
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ABOVE,
|
||||
@@ -189,6 +189,7 @@ validate_unit_of_measurement = cv.string_strict
|
||||
_NUMBER_SCHEMA = (
|
||||
cv.ENTITY_BASE_SCHEMA.extend(web_server.WEBSERVER_SORTING_SCHEMA)
|
||||
.extend(cv.MQTT_COMMAND_COMPONENT_SCHEMA)
|
||||
.extend(zigbee.NUMBER_SCHEMA)
|
||||
.extend(
|
||||
{
|
||||
cv.OnlyWith(CONF_MQTT_ID, "mqtt"): cv.declare_id(mqtt.MQTTNumberComponent),
|
||||
@@ -214,6 +215,7 @@ _NUMBER_SCHEMA = (
|
||||
|
||||
|
||||
_NUMBER_SCHEMA.add_extra(entity_duplicate_validator("number"))
|
||||
_NUMBER_SCHEMA.add_extra(zigbee.validate_number)
|
||||
|
||||
|
||||
def number_schema(
|
||||
@@ -277,6 +279,8 @@ async def setup_number_core_(
|
||||
if web_server_config := config.get(CONF_WEB_SERVER):
|
||||
await web_server.add_entity_config(var, web_server_config)
|
||||
|
||||
await zigbee.setup_number(var, config, min_value, max_value, step)
|
||||
|
||||
|
||||
async def register_number(
|
||||
var, config, *, min_value: float, max_value: float, step: float
|
||||
|
||||
@@ -170,6 +170,9 @@ CONFIG_SCHEMA = remote_base.validate_triggers(
|
||||
async def to_code(config):
|
||||
pin = await cg.gpio_pin_expression(config[CONF_PIN])
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_ID], pin)
|
||||
cg.add(var.set_rmt_symbols(config[CONF_RMT_SYMBOLS]))
|
||||
cg.add(var.set_receive_symbols(config[CONF_RECEIVE_SYMBOLS]))
|
||||
|
||||
@@ -112,6 +112,9 @@ async def digital_write_action_to_code(config, action_id, template_arg, args):
|
||||
async def to_code(config):
|
||||
pin = await cg.gpio_pin_expression(config[CONF_PIN])
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_ID], pin)
|
||||
cg.add(var.set_rmt_symbols(config[CONF_RMT_SYMBOLS]))
|
||||
cg.add(var.set_non_blocking(config[CONF_NON_BLOCKING]))
|
||||
|
||||
@@ -27,46 +27,61 @@ void RuntimeStatsCollector::record_component_time(Component *component, uint32_t
|
||||
}
|
||||
|
||||
void RuntimeStatsCollector::log_stats_() {
|
||||
// First pass: count active components
|
||||
size_t count = 0;
|
||||
for (const auto &it : this->component_stats_) {
|
||||
if (it.second.get_period_count() > 0) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
ESP_LOGI(TAG,
|
||||
"Component Runtime Statistics\n"
|
||||
" Period stats (last %" PRIu32 "ms):",
|
||||
this->log_interval_);
|
||||
" Period stats (last %" PRIu32 "ms): %zu active components",
|
||||
this->log_interval_, count);
|
||||
|
||||
// First collect stats we want to display
|
||||
std::vector<ComponentStatPair> stats_to_display;
|
||||
if (count == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Stack buffer sized to actual active count (up to 256 components), heap fallback for larger
|
||||
SmallBufferWithHeapFallback<256, Component *> buffer(count);
|
||||
Component **sorted = buffer.get();
|
||||
|
||||
// Second pass: fill buffer with active components
|
||||
size_t idx = 0;
|
||||
for (const auto &it : this->component_stats_) {
|
||||
Component *component = it.first;
|
||||
const ComponentRuntimeStats &stats = it.second;
|
||||
if (stats.get_period_count() > 0) {
|
||||
ComponentStatPair pair = {component, &stats};
|
||||
stats_to_display.push_back(pair);
|
||||
if (it.second.get_period_count() > 0) {
|
||||
sorted[idx++] = it.first;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by period runtime (descending)
|
||||
std::sort(stats_to_display.begin(), stats_to_display.end(), std::greater<ComponentStatPair>());
|
||||
std::sort(sorted, sorted + count, [this](Component *a, Component *b) {
|
||||
return this->component_stats_[a].get_period_time_ms() > this->component_stats_[b].get_period_time_ms();
|
||||
});
|
||||
|
||||
// Log top components by period runtime
|
||||
for (const auto &it : stats_to_display) {
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
const auto &stats = this->component_stats_[sorted[i]];
|
||||
ESP_LOGI(TAG, " %s: count=%" PRIu32 ", avg=%.2fms, max=%" PRIu32 "ms, total=%" PRIu32 "ms",
|
||||
LOG_STR_ARG(it.component->get_component_log_str()), it.stats->get_period_count(),
|
||||
it.stats->get_period_avg_time_ms(), it.stats->get_period_max_time_ms(), it.stats->get_period_time_ms());
|
||||
LOG_STR_ARG(sorted[i]->get_component_log_str()), stats.get_period_count(), stats.get_period_avg_time_ms(),
|
||||
stats.get_period_max_time_ms(), stats.get_period_time_ms());
|
||||
}
|
||||
|
||||
// Log total stats since boot
|
||||
ESP_LOGI(TAG, " Total stats (since boot):");
|
||||
// Log total stats since boot (only for active components - idle ones haven't changed)
|
||||
ESP_LOGI(TAG, " Total stats (since boot): %zu active components", count);
|
||||
|
||||
// Re-sort by total runtime for all-time stats
|
||||
std::sort(stats_to_display.begin(), stats_to_display.end(),
|
||||
[](const ComponentStatPair &a, const ComponentStatPair &b) {
|
||||
return a.stats->get_total_time_ms() > b.stats->get_total_time_ms();
|
||||
});
|
||||
std::sort(sorted, sorted + count, [this](Component *a, Component *b) {
|
||||
return this->component_stats_[a].get_total_time_ms() > this->component_stats_[b].get_total_time_ms();
|
||||
});
|
||||
|
||||
for (const auto &it : stats_to_display) {
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
const auto &stats = this->component_stats_[sorted[i]];
|
||||
ESP_LOGI(TAG, " %s: count=%" PRIu32 ", avg=%.2fms, max=%" PRIu32 "ms, total=%" PRIu32 "ms",
|
||||
LOG_STR_ARG(it.component->get_component_log_str()), it.stats->get_total_count(),
|
||||
it.stats->get_total_avg_time_ms(), it.stats->get_total_max_time_ms(), it.stats->get_total_time_ms());
|
||||
LOG_STR_ARG(sorted[i]->get_component_log_str()), stats.get_total_count(), stats.get_total_avg_time_ms(),
|
||||
stats.get_total_max_time_ms(), stats.get_total_time_ms());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
#ifdef USE_RUNTIME_STATS
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <cstdint>
|
||||
#include <cstring>
|
||||
#include "esphome/core/helpers.h"
|
||||
@@ -77,17 +76,6 @@ class ComponentRuntimeStats {
|
||||
uint32_t total_max_time_ms_;
|
||||
};
|
||||
|
||||
// For sorting components by run time
|
||||
struct ComponentStatPair {
|
||||
Component *component;
|
||||
const ComponentRuntimeStats *stats;
|
||||
|
||||
bool operator>(const ComponentStatPair &other) const {
|
||||
// Sort by period time as that's what we're displaying in the logs
|
||||
return stats->get_period_time_ms() > other.stats->get_period_time_ms();
|
||||
}
|
||||
};
|
||||
|
||||
class RuntimeStatsCollector {
|
||||
public:
|
||||
RuntimeStatsCollector();
|
||||
|
||||
@@ -2,7 +2,7 @@ from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import audio, audio_dac
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_DATA, CONF_ID, CONF_VOLUME
|
||||
from esphome.const import CONF_AUDIO_DAC, CONF_DATA, CONF_ID, CONF_VOLUME
|
||||
from esphome.core import CORE, ID
|
||||
from esphome.coroutine import CoroPriority, coroutine_with_priority
|
||||
|
||||
@@ -11,8 +11,6 @@ CODEOWNERS = ["@jesserockz", "@kahrendt"]
|
||||
|
||||
IS_PLATFORM_COMPONENT = True
|
||||
|
||||
CONF_AUDIO_DAC = "audio_dac"
|
||||
|
||||
speaker_ns = cg.esphome_ns.namespace("speaker")
|
||||
|
||||
Speaker = speaker_ns.class_("Speaker")
|
||||
|
||||
@@ -27,6 +27,9 @@ void RealTimeClock::dump_config() {
|
||||
#ifdef USE_TIME_TIMEZONE
|
||||
ESP_LOGCONFIG(TAG, "Timezone: '%s'", this->timezone_.c_str());
|
||||
#endif
|
||||
auto time = this->now();
|
||||
ESP_LOGCONFIG(TAG, "Current time: %04d-%02d-%02d %02d:%02d:%02d", time.year, time.month, time.day_of_month, time.hour,
|
||||
time.minute, time.second);
|
||||
}
|
||||
|
||||
void RealTimeClock::synchronize_epoch_(uint32_t epoch) {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include <vector>
|
||||
#include <array>
|
||||
|
||||
namespace esphome {
|
||||
namespace tx20 {
|
||||
@@ -45,25 +45,25 @@ std::string Tx20Component::get_wind_cardinal_direction() const { return this->wi
|
||||
void Tx20Component::decode_and_publish_() {
|
||||
ESP_LOGVV(TAG, "Decode Tx20");
|
||||
|
||||
std::string string_buffer;
|
||||
std::string string_buffer_2;
|
||||
std::vector<bool> bit_buffer;
|
||||
std::array<bool, MAX_BUFFER_SIZE> bit_buffer{};
|
||||
size_t bit_pos = 0;
|
||||
bool current_bit = true;
|
||||
// Cap at MAX_BUFFER_SIZE - 1 to prevent out-of-bounds access (buffer_index can exceed MAX_BUFFER_SIZE in ISR)
|
||||
const int max_buffer_index =
|
||||
std::min(static_cast<int>(this->store_.buffer_index), static_cast<int>(MAX_BUFFER_SIZE - 1));
|
||||
|
||||
for (int i = 1; i <= this->store_.buffer_index; i++) {
|
||||
string_buffer_2 += to_string(this->store_.buffer[i]) + ", ";
|
||||
for (int i = 1; i <= max_buffer_index; i++) {
|
||||
uint8_t repeat = this->store_.buffer[i] / TX20_BIT_TIME;
|
||||
// ignore segments at the end that were too short
|
||||
string_buffer.append(repeat, current_bit ? '1' : '0');
|
||||
bit_buffer.insert(bit_buffer.end(), repeat, current_bit);
|
||||
for (uint8_t j = 0; j < repeat && bit_pos < MAX_BUFFER_SIZE; j++) {
|
||||
bit_buffer[bit_pos++] = current_bit;
|
||||
}
|
||||
current_bit = !current_bit;
|
||||
}
|
||||
current_bit = !current_bit;
|
||||
if (string_buffer.length() < MAX_BUFFER_SIZE) {
|
||||
uint8_t remain = MAX_BUFFER_SIZE - string_buffer.length();
|
||||
string_buffer_2 += to_string(remain) + ", ";
|
||||
string_buffer.append(remain, current_bit ? '1' : '0');
|
||||
bit_buffer.insert(bit_buffer.end(), remain, current_bit);
|
||||
size_t bits_before_padding = bit_pos;
|
||||
while (bit_pos < MAX_BUFFER_SIZE) {
|
||||
bit_buffer[bit_pos++] = current_bit;
|
||||
}
|
||||
|
||||
uint8_t tx20_sa = 0;
|
||||
@@ -108,8 +108,24 @@ void Tx20Component::decode_and_publish_() {
|
||||
// 2. Check received checksum matches calculated checksum
|
||||
// 3. Check that Wind Direction matches Wind Direction (Inverted)
|
||||
// 4. Check that Wind Speed matches Wind Speed (Inverted)
|
||||
ESP_LOGVV(TAG, "BUFFER %s", string_buffer_2.c_str());
|
||||
ESP_LOGVV(TAG, "Decoded bits %s", string_buffer.c_str());
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
// Build debug strings from completed data
|
||||
char debug_buf[320]; // buffer values: max 40 entries * 7 chars each
|
||||
size_t debug_pos = 0;
|
||||
for (int i = 1; i <= max_buffer_index; i++) {
|
||||
debug_pos = buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%u, ", this->store_.buffer[i]);
|
||||
}
|
||||
if (bits_before_padding < MAX_BUFFER_SIZE) {
|
||||
buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%zu, ", MAX_BUFFER_SIZE - bits_before_padding);
|
||||
}
|
||||
char bits_buf[MAX_BUFFER_SIZE + 1];
|
||||
for (size_t i = 0; i < MAX_BUFFER_SIZE; i++) {
|
||||
bits_buf[i] = bit_buffer[i] ? '1' : '0';
|
||||
}
|
||||
bits_buf[MAX_BUFFER_SIZE] = '\0';
|
||||
ESP_LOGVV(TAG, "BUFFER %s", debug_buf);
|
||||
ESP_LOGVV(TAG, "Decoded bits %s", bits_buf);
|
||||
#endif
|
||||
|
||||
if (tx20_sa == 4) {
|
||||
if (chk == tx20_sd) {
|
||||
|
||||
@@ -1 +1,6 @@
|
||||
CODEOWNERS = ["@clydebarrow"]
|
||||
|
||||
DEPRECATED_COMPONENT = """
|
||||
The 'waveshare_epaper' component is deprecated and no new models will be added to it.
|
||||
New model PRs should target the newer and more performant 'epaper_spi' component.
|
||||
"""
|
||||
|
||||
@@ -209,7 +209,7 @@ void DeferredUpdateEventSource::deq_push_back_with_dedup_(void *source, message_
|
||||
void DeferredUpdateEventSource::process_deferred_queue_() {
|
||||
while (!deferred_queue_.empty()) {
|
||||
DeferredEvent &de = deferred_queue_.front();
|
||||
std::string message = de.message_generator_(web_server_, de.source_);
|
||||
auto message = de.message_generator_(web_server_, de.source_);
|
||||
if (this->send(message.c_str(), "state") != DISCARDED) {
|
||||
// O(n) but memory efficiency is more important than speed here which is why std::vector was chosen
|
||||
deferred_queue_.erase(deferred_queue_.begin());
|
||||
@@ -266,7 +266,7 @@ void DeferredUpdateEventSource::deferrable_send_state(void *source, const char *
|
||||
// deferred queue still not empty which means downstream event queue full, no point trying to send first
|
||||
deq_push_back_with_dedup_(source, message_generator);
|
||||
} else {
|
||||
std::string message = message_generator(web_server_, source);
|
||||
auto message = message_generator(web_server_, source);
|
||||
if (this->send(message.c_str(), "state") == DISCARDED) {
|
||||
deq_push_back_with_dedup_(source, message_generator);
|
||||
} else {
|
||||
@@ -320,7 +320,7 @@ void DeferredUpdateEventSourceList::on_client_connect_(DeferredUpdateEventSource
|
||||
ws->defer([ws, source]() {
|
||||
// Configure reconnect timeout and send config
|
||||
// this should always go through since the AsyncEventSourceClient event queue is empty on connect
|
||||
std::string message = ws->get_config_json();
|
||||
auto message = ws->get_config_json();
|
||||
source->try_send_nodefer(message.c_str(), "ping", millis(), 30000);
|
||||
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
@@ -329,10 +329,10 @@ void DeferredUpdateEventSourceList::on_client_connect_(DeferredUpdateEventSource
|
||||
JsonObject root = builder.root();
|
||||
root[ESPHOME_F("name")] = group.second.name;
|
||||
root[ESPHOME_F("sorting_weight")] = group.second.weight;
|
||||
message = builder.serialize();
|
||||
auto group_msg = builder.serialize();
|
||||
|
||||
// up to 31 groups should be able to be queued initially without defer
|
||||
source->try_send_nodefer(message.c_str(), "sorting_group");
|
||||
source->try_send_nodefer(group_msg.c_str(), "sorting_group");
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -365,7 +365,7 @@ void WebServer::set_css_include(const char *css_include) { this->css_include_ =
|
||||
void WebServer::set_js_include(const char *js_include) { this->js_include_ = js_include; }
|
||||
#endif
|
||||
|
||||
std::string WebServer::get_config_json() {
|
||||
json::SerializationBuffer<> WebServer::get_config_json() {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -597,20 +597,20 @@ void WebServer::handle_sensor_request(AsyncWebServerRequest *request, const UrlM
|
||||
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
|
||||
if (entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->sensor_json_(obj, obj->state, detail);
|
||||
auto data = this->sensor_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::sensor_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::sensor_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->sensor_json_((sensor::Sensor *) (source), ((sensor::Sensor *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::sensor_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::sensor_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->sensor_json_((sensor::Sensor *) (source), ((sensor::Sensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -644,23 +644,23 @@ void WebServer::handle_text_sensor_request(AsyncWebServerRequest *request, const
|
||||
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
|
||||
if (entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->text_sensor_json_(obj, obj->state, detail);
|
||||
auto data = this->text_sensor_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::text_sensor_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::text_sensor_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->text_sensor_json_((text_sensor::TextSensor *) (source),
|
||||
((text_sensor::TextSensor *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::text_sensor_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::text_sensor_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->text_sensor_json_((text_sensor::TextSensor *) (source),
|
||||
((text_sensor::TextSensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value,
|
||||
JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value,
|
||||
JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -705,7 +705,7 @@ void WebServer::handle_switch_request(AsyncWebServerRequest *request, const UrlM
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->switch_json_(obj, obj->state, detail);
|
||||
auto data = this->switch_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -734,13 +734,13 @@ void WebServer::handle_switch_request(AsyncWebServerRequest *request, const UrlM
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::switch_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::switch_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->switch_json_((switch_::Switch *) (source), ((switch_::Switch *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::switch_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::switch_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->switch_json_((switch_::Switch *) (source), ((switch_::Switch *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -762,7 +762,7 @@ void WebServer::handle_button_request(AsyncWebServerRequest *request, const UrlM
|
||||
continue;
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->button_json_(obj, detail);
|
||||
auto data = this->button_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
} else if (match.method_equals(ESPHOME_F("press"))) {
|
||||
DEFER_ACTION(obj, obj->press());
|
||||
@@ -775,10 +775,10 @@ void WebServer::handle_button_request(AsyncWebServerRequest *request, const UrlM
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::button_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::button_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->button_json_((button::Button *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::button_json_(button::Button *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::button_json_(button::Button *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -805,22 +805,23 @@ void WebServer::handle_binary_sensor_request(AsyncWebServerRequest *request, con
|
||||
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
|
||||
if (entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->binary_sensor_json_(obj, obj->state, detail);
|
||||
auto data = this->binary_sensor_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::binary_sensor_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::binary_sensor_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->binary_sensor_json_((binary_sensor::BinarySensor *) (source),
|
||||
((binary_sensor::BinarySensor *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::binary_sensor_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::binary_sensor_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->binary_sensor_json_((binary_sensor::BinarySensor *) (source),
|
||||
((binary_sensor::BinarySensor *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value,
|
||||
JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -847,7 +848,7 @@ void WebServer::handle_fan_request(AsyncWebServerRequest *request, const UrlMatc
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->fan_json_(obj, detail);
|
||||
auto data = this->fan_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
} else if (match.method_equals(ESPHOME_F("toggle"))) {
|
||||
DEFER_ACTION(obj, obj->toggle().perform());
|
||||
@@ -888,13 +889,13 @@ void WebServer::handle_fan_request(AsyncWebServerRequest *request, const UrlMatc
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::fan_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::fan_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->fan_json_((fan::Fan *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::fan_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::fan_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->fan_json_((fan::Fan *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::fan_json_(fan::Fan *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::fan_json_(fan::Fan *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -928,7 +929,7 @@ void WebServer::handle_light_request(AsyncWebServerRequest *request, const UrlMa
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->light_json_(obj, detail);
|
||||
auto data = this->light_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
} else if (match.method_equals(ESPHOME_F("toggle"))) {
|
||||
DEFER_ACTION(obj, obj->toggle().perform());
|
||||
@@ -967,13 +968,13 @@ void WebServer::handle_light_request(AsyncWebServerRequest *request, const UrlMa
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::light_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::light_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->light_json_((light::LightState *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::light_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::light_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->light_json_((light::LightState *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::light_json_(light::LightState *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::light_json_(light::LightState *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1007,7 +1008,7 @@ void WebServer::handle_cover_request(AsyncWebServerRequest *request, const UrlMa
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->cover_json_(obj, detail);
|
||||
auto data = this->cover_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1055,13 +1056,13 @@ void WebServer::handle_cover_request(AsyncWebServerRequest *request, const UrlMa
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::cover_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::cover_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->cover_json_((cover::Cover *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::cover_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::cover_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->cover_json_((cover::Cover *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::cover_json_(cover::Cover *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::cover_json_(cover::Cover *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1096,7 +1097,7 @@ void WebServer::handle_number_request(AsyncWebServerRequest *request, const UrlM
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->number_json_(obj, obj->state, detail);
|
||||
auto data = this->number_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1115,13 +1116,13 @@ void WebServer::handle_number_request(AsyncWebServerRequest *request, const UrlM
|
||||
request->send(404);
|
||||
}
|
||||
|
||||
std::string WebServer::number_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::number_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->number_json_((number::Number *) (source), ((number::Number *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::number_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::number_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->number_json_((number::Number *) (source), ((number::Number *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::number_json_(number::Number *obj, float value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::number_json_(number::Number *obj, float value, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1163,7 +1164,7 @@ void WebServer::handle_date_request(AsyncWebServerRequest *request, const UrlMat
|
||||
continue;
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->date_json_(obj, detail);
|
||||
auto data = this->date_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1188,13 +1189,13 @@ void WebServer::handle_date_request(AsyncWebServerRequest *request, const UrlMat
|
||||
request->send(404);
|
||||
}
|
||||
|
||||
std::string WebServer::date_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::date_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->date_json_((datetime::DateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::date_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::date_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->date_json_((datetime::DateEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::date_json_(datetime::DateEntity *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::date_json_(datetime::DateEntity *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1223,7 +1224,7 @@ void WebServer::handle_time_request(AsyncWebServerRequest *request, const UrlMat
|
||||
continue;
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->time_json_(obj, detail);
|
||||
auto data = this->time_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1247,13 +1248,13 @@ void WebServer::handle_time_request(AsyncWebServerRequest *request, const UrlMat
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::time_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::time_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->time_json_((datetime::TimeEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::time_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::time_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->time_json_((datetime::TimeEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::time_json_(datetime::TimeEntity *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::time_json_(datetime::TimeEntity *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1282,7 +1283,7 @@ void WebServer::handle_datetime_request(AsyncWebServerRequest *request, const Ur
|
||||
continue;
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->datetime_json_(obj, detail);
|
||||
auto data = this->datetime_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1306,13 +1307,13 @@ void WebServer::handle_datetime_request(AsyncWebServerRequest *request, const Ur
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::datetime_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::datetime_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->datetime_json_((datetime::DateTimeEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::datetime_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::datetime_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->datetime_json_((datetime::DateTimeEntity *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1343,7 +1344,7 @@ void WebServer::handle_text_request(AsyncWebServerRequest *request, const UrlMat
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->text_json_(obj, obj->state, detail);
|
||||
auto data = this->text_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1362,13 +1363,13 @@ void WebServer::handle_text_request(AsyncWebServerRequest *request, const UrlMat
|
||||
request->send(404);
|
||||
}
|
||||
|
||||
std::string WebServer::text_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::text_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->text_json_((text::Text *) (source), ((text::Text *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::text_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::text_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->text_json_((text::Text *) (source), ((text::Text *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::text_json_(text::Text *obj, const std::string &value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::text_json_(text::Text *obj, const std::string &value, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1400,7 +1401,7 @@ void WebServer::handle_select_request(AsyncWebServerRequest *request, const UrlM
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), detail);
|
||||
auto data = this->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1419,15 +1420,15 @@ void WebServer::handle_select_request(AsyncWebServerRequest *request, const UrlM
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::select_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::select_state_json_generator(WebServer *web_server, void *source) {
|
||||
auto *obj = (select::Select *) (source);
|
||||
return web_server->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::select_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::select_all_json_generator(WebServer *web_server, void *source) {
|
||||
auto *obj = (select::Select *) (source);
|
||||
return web_server->select_json_(obj, obj->has_state() ? obj->current_option() : StringRef(), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::select_json_(select::Select *obj, StringRef value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::select_json_(select::Select *obj, StringRef value, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1459,7 +1460,7 @@ void WebServer::handle_climate_request(AsyncWebServerRequest *request, const Url
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->climate_json_(obj, detail);
|
||||
auto data = this->climate_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1488,15 +1489,15 @@ void WebServer::handle_climate_request(AsyncWebServerRequest *request, const Url
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::climate_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::climate_state_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->climate_json_((climate::Climate *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::climate_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::climate_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->climate_json_((climate::Climate *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::climate_json_(climate::Climate *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::climate_json_(climate::Climate *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
@@ -1629,7 +1630,7 @@ void WebServer::handle_lock_request(AsyncWebServerRequest *request, const UrlMat
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->lock_json_(obj, obj->state, detail);
|
||||
auto data = this->lock_json_(obj, obj->state, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1658,13 +1659,13 @@ void WebServer::handle_lock_request(AsyncWebServerRequest *request, const UrlMat
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::lock_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::lock_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->lock_json_((lock::Lock *) (source), ((lock::Lock *) (source))->state, DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::lock_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::lock_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->lock_json_((lock::Lock *) (source), ((lock::Lock *) (source))->state, DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1692,7 +1693,7 @@ void WebServer::handle_valve_request(AsyncWebServerRequest *request, const UrlMa
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->valve_json_(obj, detail);
|
||||
auto data = this->valve_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1738,13 +1739,13 @@ void WebServer::handle_valve_request(AsyncWebServerRequest *request, const UrlMa
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::valve_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::valve_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->valve_json_((valve::Valve *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::valve_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::valve_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->valve_json_((valve::Valve *) (source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::valve_json_(valve::Valve *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::valve_json_(valve::Valve *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1777,7 +1778,7 @@ void WebServer::handle_alarm_control_panel_request(AsyncWebServerRequest *reques
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->alarm_control_panel_json_(obj, obj->get_state(), detail);
|
||||
auto data = this->alarm_control_panel_json_(obj, obj->get_state(), detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1817,19 +1818,19 @@ void WebServer::handle_alarm_control_panel_request(AsyncWebServerRequest *reques
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::alarm_control_panel_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::alarm_control_panel_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->alarm_control_panel_json_((alarm_control_panel::AlarmControlPanel *) (source),
|
||||
((alarm_control_panel::AlarmControlPanel *) (source))->get_state(),
|
||||
DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::alarm_control_panel_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::alarm_control_panel_all_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->alarm_control_panel_json_((alarm_control_panel::AlarmControlPanel *) (source),
|
||||
((alarm_control_panel::AlarmControlPanel *) (source))->get_state(),
|
||||
DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
|
||||
alarm_control_panel::AlarmControlPanelState value,
|
||||
JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
|
||||
alarm_control_panel::AlarmControlPanelState value,
|
||||
JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -1858,7 +1859,7 @@ void WebServer::handle_water_heater_request(AsyncWebServerRequest *request, cons
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->water_heater_json_(obj, detail);
|
||||
auto data = this->water_heater_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -1894,14 +1895,14 @@ void WebServer::handle_water_heater_request(AsyncWebServerRequest *request, cons
|
||||
request->send(404);
|
||||
}
|
||||
|
||||
std::string WebServer::water_heater_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::water_heater_state_json_generator(WebServer *web_server, void *source) {
|
||||
return web_server->water_heater_json_(static_cast<water_heater::WaterHeater *>(source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::water_heater_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::water_heater_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->water_heater_json_(static_cast<water_heater::WaterHeater *>(source), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
char buf[PSTR_LOCAL_SIZE];
|
||||
@@ -1964,7 +1965,7 @@ void WebServer::handle_infrared_request(AsyncWebServerRequest *request, const Ur
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->infrared_json_(obj, detail);
|
||||
auto data = this->infrared_json_(obj, detail);
|
||||
request->send(200, ESPHOME_F("application/json"), data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -2035,12 +2036,12 @@ void WebServer::handle_infrared_request(AsyncWebServerRequest *request, const Ur
|
||||
request->send(404);
|
||||
}
|
||||
|
||||
std::string WebServer::infrared_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::infrared_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->infrared_json_(static_cast<infrared::Infrared *>(source), DETAIL_ALL);
|
||||
}
|
||||
|
||||
std::string WebServer::infrared_json_(infrared::Infrared *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::infrared_json_(infrared::Infrared *obj, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -2075,7 +2076,7 @@ void WebServer::handle_event_request(AsyncWebServerRequest *request, const UrlMa
|
||||
// Note: request->method() is always HTTP_GET here (canHandle ensures this)
|
||||
if (entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->event_json_(obj, StringRef(), detail);
|
||||
auto data = this->event_json_(obj, StringRef(), detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -2085,16 +2086,16 @@ void WebServer::handle_event_request(AsyncWebServerRequest *request, const UrlMa
|
||||
|
||||
static StringRef get_event_type(event::Event *event) { return event ? event->get_last_event_type() : StringRef(); }
|
||||
|
||||
std::string WebServer::event_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::event_state_json_generator(WebServer *web_server, void *source) {
|
||||
auto *event = static_cast<event::Event *>(source);
|
||||
return web_server->event_json_(event, get_event_type(event), DETAIL_STATE);
|
||||
}
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
std::string WebServer::event_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::event_all_json_generator(WebServer *web_server, void *source) {
|
||||
auto *event = static_cast<event::Event *>(source);
|
||||
return web_server->event_json_(event, get_event_type(event), DETAIL_ALL);
|
||||
}
|
||||
std::string WebServer::event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config) {
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -2141,7 +2142,7 @@ void WebServer::handle_update_request(AsyncWebServerRequest *request, const UrlM
|
||||
|
||||
if (request->method() == HTTP_GET && entity_match.action_is_empty) {
|
||||
auto detail = get_request_detail(request);
|
||||
std::string data = this->update_json_(obj, detail);
|
||||
auto data = this->update_json_(obj, detail);
|
||||
request->send(200, "application/json", data.c_str());
|
||||
return;
|
||||
}
|
||||
@@ -2157,15 +2158,15 @@ void WebServer::handle_update_request(AsyncWebServerRequest *request, const UrlM
|
||||
}
|
||||
request->send(404);
|
||||
}
|
||||
std::string WebServer::update_state_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::update_state_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->update_json_((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_all_json_generator(WebServer *web_server, void *source) {
|
||||
json::SerializationBuffer<> WebServer::update_all_json_generator(WebServer *web_server, void *source) {
|
||||
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
return web_server->update_json_((update::UpdateEntity *) (source), DETAIL_STATE);
|
||||
}
|
||||
std::string WebServer::update_json_(update::UpdateEntity *obj, JsonDetail start_config) {
|
||||
json::SerializationBuffer<> WebServer::update_json_(update::UpdateEntity *obj, JsonDetail start_config) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
json::JsonBuilder builder;
|
||||
JsonObject root = builder.root();
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
#include "list_entities.h"
|
||||
|
||||
#include "esphome/components/json/json_util.h"
|
||||
#include "esphome/components/web_server_base/web_server_base.h"
|
||||
#ifdef USE_WEBSERVER
|
||||
#include "esphome/core/component.h"
|
||||
@@ -104,7 +105,7 @@ enum JsonDetail { DETAIL_ALL, DETAIL_STATE };
|
||||
can be forgotten.
|
||||
*/
|
||||
#if !defined(USE_ESP32) && defined(USE_ARDUINO)
|
||||
using message_generator_t = std::string(WebServer *, void *);
|
||||
using message_generator_t = json::SerializationBuffer<>(WebServer *, void *);
|
||||
|
||||
class DeferredUpdateEventSourceList;
|
||||
class DeferredUpdateEventSource : public AsyncEventSource {
|
||||
@@ -263,7 +264,7 @@ class WebServer : public Controller,
|
||||
void handle_index_request(AsyncWebServerRequest *request);
|
||||
|
||||
/// Return the webserver configuration as JSON.
|
||||
std::string get_config_json();
|
||||
json::SerializationBuffer<> get_config_json();
|
||||
|
||||
#ifdef USE_WEBSERVER_CSS_INCLUDE
|
||||
/// Handle included css request under '/0.css'.
|
||||
@@ -285,8 +286,8 @@ class WebServer : public Controller,
|
||||
/// Handle a sensor request under '/sensor/<id>'.
|
||||
void handle_sensor_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string sensor_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string sensor_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> sensor_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> sensor_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_SWITCH
|
||||
@@ -295,8 +296,8 @@ class WebServer : public Controller,
|
||||
/// Handle a switch request under '/switch/<id>/</turn_on/turn_off/toggle>'.
|
||||
void handle_switch_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string switch_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string switch_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> switch_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> switch_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_BUTTON
|
||||
@@ -304,7 +305,7 @@ class WebServer : public Controller,
|
||||
void handle_button_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
// Buttons are stateless, so there is no button_state_json_generator
|
||||
static std::string button_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> button_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
@@ -313,8 +314,8 @@ class WebServer : public Controller,
|
||||
/// Handle a binary sensor request under '/binary_sensor/<id>'.
|
||||
void handle_binary_sensor_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string binary_sensor_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string binary_sensor_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> binary_sensor_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> binary_sensor_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_FAN
|
||||
@@ -323,8 +324,8 @@ class WebServer : public Controller,
|
||||
/// Handle a fan request under '/fan/<id>/</turn_on/turn_off/toggle>'.
|
||||
void handle_fan_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string fan_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string fan_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> fan_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> fan_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_LIGHT
|
||||
@@ -333,8 +334,8 @@ class WebServer : public Controller,
|
||||
/// Handle a light request under '/light/<id>/</turn_on/turn_off/toggle>'.
|
||||
void handle_light_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string light_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string light_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> light_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> light_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_TEXT_SENSOR
|
||||
@@ -343,8 +344,8 @@ class WebServer : public Controller,
|
||||
/// Handle a text sensor request under '/text_sensor/<id>'.
|
||||
void handle_text_sensor_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string text_sensor_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string text_sensor_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> text_sensor_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> text_sensor_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_COVER
|
||||
@@ -353,8 +354,8 @@ class WebServer : public Controller,
|
||||
/// Handle a cover request under '/cover/<id>/<open/close/stop/set>'.
|
||||
void handle_cover_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string cover_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string cover_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> cover_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> cover_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_NUMBER
|
||||
@@ -362,8 +363,8 @@ class WebServer : public Controller,
|
||||
/// Handle a number request under '/number/<id>'.
|
||||
void handle_number_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string number_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string number_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> number_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> number_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_DATETIME_DATE
|
||||
@@ -371,8 +372,8 @@ class WebServer : public Controller,
|
||||
/// Handle a date request under '/date/<id>'.
|
||||
void handle_date_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string date_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string date_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> date_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> date_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_DATETIME_TIME
|
||||
@@ -380,8 +381,8 @@ class WebServer : public Controller,
|
||||
/// Handle a time request under '/time/<id>'.
|
||||
void handle_time_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string time_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string time_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> time_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> time_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_DATETIME_DATETIME
|
||||
@@ -389,8 +390,8 @@ class WebServer : public Controller,
|
||||
/// Handle a datetime request under '/datetime/<id>'.
|
||||
void handle_datetime_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string datetime_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string datetime_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> datetime_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> datetime_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_TEXT
|
||||
@@ -398,8 +399,8 @@ class WebServer : public Controller,
|
||||
/// Handle a text input request under '/text/<id>'.
|
||||
void handle_text_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string text_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string text_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> text_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> text_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_SELECT
|
||||
@@ -407,8 +408,8 @@ class WebServer : public Controller,
|
||||
/// Handle a select request under '/select/<id>'.
|
||||
void handle_select_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string select_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string select_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> select_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> select_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_CLIMATE
|
||||
@@ -416,8 +417,8 @@ class WebServer : public Controller,
|
||||
/// Handle a climate request under '/climate/<id>'.
|
||||
void handle_climate_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string climate_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string climate_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> climate_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> climate_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_LOCK
|
||||
@@ -426,8 +427,8 @@ class WebServer : public Controller,
|
||||
/// Handle a lock request under '/lock/<id>/</lock/unlock/open>'.
|
||||
void handle_lock_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string lock_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string lock_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> lock_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> lock_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_VALVE
|
||||
@@ -436,8 +437,8 @@ class WebServer : public Controller,
|
||||
/// Handle a valve request under '/valve/<id>/<open/close/stop/set>'.
|
||||
void handle_valve_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string valve_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string valve_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> valve_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> valve_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
@@ -446,8 +447,8 @@ class WebServer : public Controller,
|
||||
/// Handle a alarm_control_panel request under '/alarm_control_panel/<id>'.
|
||||
void handle_alarm_control_panel_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string alarm_control_panel_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string alarm_control_panel_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> alarm_control_panel_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> alarm_control_panel_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_WATER_HEATER
|
||||
@@ -456,22 +457,22 @@ class WebServer : public Controller,
|
||||
/// Handle a water_heater request under '/water_heater/<id>/<mode/set>'.
|
||||
void handle_water_heater_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string water_heater_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string water_heater_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> water_heater_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> water_heater_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_INFRARED
|
||||
/// Handle an infrared request under '/infrared/<id>/transmit'.
|
||||
void handle_infrared_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string infrared_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> infrared_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
#ifdef USE_EVENT
|
||||
void on_event(event::Event *obj) override;
|
||||
|
||||
static std::string event_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string event_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> event_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> event_all_json_generator(WebServer *web_server, void *source);
|
||||
|
||||
/// Handle a event request under '/event<id>'.
|
||||
void handle_event_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
@@ -483,8 +484,8 @@ class WebServer : public Controller,
|
||||
/// Handle a update request under '/update/<id>'.
|
||||
void handle_update_request(AsyncWebServerRequest *request, const UrlMatch &match);
|
||||
|
||||
static std::string update_state_json_generator(WebServer *web_server, void *source);
|
||||
static std::string update_all_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> update_state_json_generator(WebServer *web_server, void *source);
|
||||
static json::SerializationBuffer<> update_all_json_generator(WebServer *web_server, void *source);
|
||||
#endif
|
||||
|
||||
/// Override the web handler's canHandle method.
|
||||
@@ -609,71 +610,74 @@ class WebServer : public Controller,
|
||||
|
||||
private:
|
||||
#ifdef USE_SENSOR
|
||||
std::string sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> sensor_json_(sensor::Sensor *obj, float value, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_SWITCH
|
||||
std::string switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> switch_json_(switch_::Switch *obj, bool value, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_BUTTON
|
||||
std::string button_json_(button::Button *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> button_json_(button::Button *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
std::string binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> binary_sensor_json_(binary_sensor::BinarySensor *obj, bool value,
|
||||
JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_FAN
|
||||
std::string fan_json_(fan::Fan *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> fan_json_(fan::Fan *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_LIGHT
|
||||
std::string light_json_(light::LightState *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> light_json_(light::LightState *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_TEXT_SENSOR
|
||||
std::string text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> text_sensor_json_(text_sensor::TextSensor *obj, const std::string &value,
|
||||
JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_COVER
|
||||
std::string cover_json_(cover::Cover *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> cover_json_(cover::Cover *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_NUMBER
|
||||
std::string number_json_(number::Number *obj, float value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> number_json_(number::Number *obj, float value, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATE
|
||||
std::string date_json_(datetime::DateEntity *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> date_json_(datetime::DateEntity *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_DATETIME_TIME
|
||||
std::string time_json_(datetime::TimeEntity *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> time_json_(datetime::TimeEntity *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATETIME
|
||||
std::string datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> datetime_json_(datetime::DateTimeEntity *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_TEXT
|
||||
std::string text_json_(text::Text *obj, const std::string &value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> text_json_(text::Text *obj, const std::string &value, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_SELECT
|
||||
std::string select_json_(select::Select *obj, StringRef value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> select_json_(select::Select *obj, StringRef value, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
std::string climate_json_(climate::Climate *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> climate_json_(climate::Climate *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_LOCK
|
||||
std::string lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> lock_json_(lock::Lock *obj, lock::LockState value, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_VALVE
|
||||
std::string valve_json_(valve::Valve *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> valve_json_(valve::Valve *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
std::string alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
|
||||
alarm_control_panel::AlarmControlPanelState value, JsonDetail start_config);
|
||||
json::SerializationBuffer<> alarm_control_panel_json_(alarm_control_panel::AlarmControlPanel *obj,
|
||||
alarm_control_panel::AlarmControlPanelState value,
|
||||
JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_EVENT
|
||||
std::string event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config);
|
||||
json::SerializationBuffer<> event_json_(event::Event *obj, StringRef event_type, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_WATER_HEATER
|
||||
std::string water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> water_heater_json_(water_heater::WaterHeater *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_INFRARED
|
||||
std::string infrared_json_(infrared::Infrared *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> infrared_json_(infrared::Infrared *obj, JsonDetail start_config);
|
||||
#endif
|
||||
#ifdef USE_UPDATE
|
||||
std::string update_json_(update::UpdateEntity *obj, JsonDetail start_config);
|
||||
json::SerializationBuffer<> update_json_(update::UpdateEntity *obj, JsonDetail start_config);
|
||||
#endif
|
||||
};
|
||||
|
||||
|
||||
@@ -507,7 +507,7 @@ AsyncEventSourceResponse::AsyncEventSourceResponse(const AsyncWebServerRequest *
|
||||
|
||||
// Configure reconnect timeout and send config
|
||||
// this should always go through since the tcp send buffer is empty on connect
|
||||
std::string message = ws->get_config_json();
|
||||
auto message = ws->get_config_json();
|
||||
this->try_send_nodefer(message.c_str(), "ping", millis(), 30000);
|
||||
|
||||
#ifdef USE_WEBSERVER_SORTING
|
||||
@@ -561,7 +561,7 @@ void AsyncEventSourceResponse::deq_push_back_with_dedup_(void *source, message_g
|
||||
void AsyncEventSourceResponse::process_deferred_queue_() {
|
||||
while (!deferred_queue_.empty()) {
|
||||
DeferredEvent &de = deferred_queue_.front();
|
||||
std::string message = de.message_generator_(web_server_, de.source_);
|
||||
auto message = de.message_generator_(web_server_, de.source_);
|
||||
if (this->try_send_nodefer(message.c_str(), "state")) {
|
||||
// O(n) but memory efficiency is more important than speed here which is why std::vector was chosen
|
||||
deferred_queue_.erase(deferred_queue_.begin());
|
||||
@@ -798,7 +798,7 @@ void AsyncEventSourceResponse::deferrable_send_state(void *source, const char *e
|
||||
// trying to send first
|
||||
deq_push_back_with_dedup_(source, message_generator);
|
||||
} else {
|
||||
std::string message = message_generator(web_server_, source);
|
||||
auto message = message_generator(web_server_, source);
|
||||
if (!this->try_send_nodefer(message.c_str(), "state")) {
|
||||
deq_push_back_with_dedup_(source, message_generator);
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
#include <vector>
|
||||
|
||||
#ifdef USE_WEBSERVER
|
||||
#include "esphome/components/json/json_util.h"
|
||||
#include "esphome/components/web_server/list_entities.h"
|
||||
#endif
|
||||
|
||||
@@ -250,7 +251,7 @@ class AsyncWebHandler {
|
||||
class AsyncEventSource;
|
||||
class AsyncEventSourceResponse;
|
||||
|
||||
using message_generator_t = std::string(esphome::web_server::WebServer *, void *);
|
||||
using message_generator_t = json::SerializationBuffer<>(esphome::web_server::WebServer *, void *);
|
||||
|
||||
/*
|
||||
This class holds a pointer to the source component that wants to publish a state event, and a pointer to a function
|
||||
|
||||
@@ -39,6 +39,10 @@
|
||||
#include "esphome/components/esp32_improv/esp32_improv_component.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_IMPROV_SERIAL
|
||||
#include "esphome/components/improv_serial/improv_serial_component.h"
|
||||
#endif
|
||||
|
||||
namespace esphome::wifi {
|
||||
|
||||
static const char *const TAG = "wifi";
|
||||
@@ -365,6 +369,75 @@ bool WiFiComponent::ssid_was_seen_in_scan_(const std::string &ssid) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool WiFiComponent::needs_full_scan_results_() const {
|
||||
// Components that require full scan results (for example, scan result listeners)
|
||||
// are expected to call request_wifi_scan_results(), which sets keep_scan_results_.
|
||||
if (this->keep_scan_results_) {
|
||||
return true;
|
||||
}
|
||||
|
||||
#ifdef USE_CAPTIVE_PORTAL
|
||||
// Captive portal needs full results when active (showing network list to user)
|
||||
if (captive_portal::global_captive_portal != nullptr && captive_portal::global_captive_portal->is_active()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_IMPROV_SERIAL
|
||||
// Improv serial needs results during provisioning (before connected)
|
||||
if (improv_serial::global_improv_serial_component != nullptr && !this->is_connected()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_IMPROV
|
||||
// BLE improv also needs results during provisioning
|
||||
if (esp32_improv::global_improv_component != nullptr && esp32_improv::global_improv_component->is_active()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool WiFiComponent::matches_configured_network_(const char *ssid, const uint8_t *bssid) const {
|
||||
// Hidden networks in scan results have empty SSIDs - skip them
|
||||
if (ssid[0] == '\0') {
|
||||
return false;
|
||||
}
|
||||
for (const auto &sta : this->sta_) {
|
||||
// Skip hidden network configs (they don't appear in normal scans)
|
||||
if (sta.get_hidden()) {
|
||||
continue;
|
||||
}
|
||||
// For BSSID-only configs (empty SSID), match by BSSID
|
||||
if (sta.get_ssid().empty()) {
|
||||
if (sta.has_bssid() && std::memcmp(sta.get_bssid().data(), bssid, 6) == 0) {
|
||||
return true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Match by SSID
|
||||
if (sta.get_ssid() == ssid) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void WiFiComponent::log_discarded_scan_result_(const char *ssid, const uint8_t *bssid, int8_t rssi, uint8_t channel) {
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
// Skip logging during roaming scans to avoid log buffer overflow
|
||||
// (roaming scans typically find many networks but only care about same-SSID APs)
|
||||
if (this->roaming_state_ == RoamingState::SCANNING) {
|
||||
return;
|
||||
}
|
||||
char bssid_s[MAC_ADDRESS_PRETTY_BUFFER_SIZE];
|
||||
format_mac_addr_upper(bssid, bssid_s);
|
||||
ESP_LOGV(TAG, "- " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") " %ddB Ch:%u", ssid, bssid_s, rssi, channel);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t WiFiComponent::find_next_hidden_sta_(int8_t start_index) {
|
||||
// Find next SSID to try in RETRY_HIDDEN phase.
|
||||
//
|
||||
@@ -656,8 +729,12 @@ void WiFiComponent::loop() {
|
||||
ESP_LOGI(TAG, "Starting fallback AP");
|
||||
this->setup_ap_config_();
|
||||
#ifdef USE_CAPTIVE_PORTAL
|
||||
if (captive_portal::global_captive_portal != nullptr)
|
||||
if (captive_portal::global_captive_portal != nullptr) {
|
||||
// Reset so we force one full scan after captive portal starts
|
||||
// (previous scans were filtered because captive portal wasn't active yet)
|
||||
this->has_completed_scan_after_captive_portal_start_ = false;
|
||||
captive_portal::global_captive_portal->start();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -1195,7 +1272,7 @@ template<typename VectorType> static void insertion_sort_scan_results(VectorType
|
||||
// has overhead from UART transmission, so combining INFO+DEBUG into one line halves
|
||||
// the blocking time. Do NOT split this into separate ESP_LOGI/ESP_LOGD calls.
|
||||
__attribute__((noinline)) static void log_scan_result(const WiFiScanResult &res) {
|
||||
char bssid_s[18];
|
||||
char bssid_s[MAC_ADDRESS_PRETTY_BUFFER_SIZE];
|
||||
auto bssid = res.get_bssid();
|
||||
format_mac_addr_upper(bssid.data(), bssid_s);
|
||||
|
||||
@@ -1211,18 +1288,6 @@ __attribute__((noinline)) static void log_scan_result(const WiFiScanResult &res)
|
||||
#endif
|
||||
}
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
// Helper function to log non-matching scan results at verbose level
|
||||
__attribute__((noinline)) static void log_scan_result_non_matching(const WiFiScanResult &res) {
|
||||
char bssid_s[18];
|
||||
auto bssid = res.get_bssid();
|
||||
format_mac_addr_upper(bssid.data(), bssid_s);
|
||||
|
||||
ESP_LOGV(TAG, "- " LOG_SECRET("'%s'") " " LOG_SECRET("(%s) ") "%s", res.get_ssid().c_str(), bssid_s,
|
||||
LOG_STR_ARG(get_signal_bars(res.get_rssi())));
|
||||
}
|
||||
#endif
|
||||
|
||||
void WiFiComponent::check_scanning_finished() {
|
||||
if (!this->scan_done_) {
|
||||
if (millis() - this->action_started_ > WIFI_SCAN_TIMEOUT_MS) {
|
||||
@@ -1232,6 +1297,8 @@ void WiFiComponent::check_scanning_finished() {
|
||||
return;
|
||||
}
|
||||
this->scan_done_ = false;
|
||||
this->has_completed_scan_after_captive_portal_start_ =
|
||||
true; // Track that we've done a scan since captive portal started
|
||||
this->retry_hidden_mode_ = RetryHiddenMode::SCAN_BASED;
|
||||
|
||||
if (this->scan_result_.empty()) {
|
||||
@@ -1259,21 +1326,12 @@ void WiFiComponent::check_scanning_finished() {
|
||||
// Sort scan results using insertion sort for better memory efficiency
|
||||
insertion_sort_scan_results(this->scan_result_);
|
||||
|
||||
size_t non_matching_count = 0;
|
||||
// Log matching networks (non-matching already logged at VERBOSE in scan callback)
|
||||
for (auto &res : this->scan_result_) {
|
||||
if (res.get_matches()) {
|
||||
log_scan_result(res);
|
||||
} else {
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
log_scan_result_non_matching(res);
|
||||
#else
|
||||
non_matching_count++;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
if (non_matching_count > 0) {
|
||||
ESP_LOGD(TAG, "- %zu non-matching (VERBOSE to show)", non_matching_count);
|
||||
}
|
||||
|
||||
// SYNCHRONIZATION POINT: Establish link between scan_result_[0] and selected_sta_index_
|
||||
// After sorting, scan_result_[0] contains the best network. Now find which sta_[i] config
|
||||
@@ -1532,7 +1590,10 @@ WiFiRetryPhase WiFiComponent::determine_next_phase_() {
|
||||
if (this->went_through_explicit_hidden_phase_()) {
|
||||
return WiFiRetryPhase::EXPLICIT_HIDDEN;
|
||||
}
|
||||
// Skip scanning when captive portal/improv is active to avoid disrupting AP.
|
||||
// Skip scanning when captive portal/improv is active to avoid disrupting AP,
|
||||
// BUT only if we've already completed at least one scan AFTER the portal started.
|
||||
// When captive portal first starts, scan results may be filtered/stale, so we need
|
||||
// to do one full scan to populate available networks for the captive portal UI.
|
||||
//
|
||||
// WHY SCANNING DISRUPTS AP MODE:
|
||||
// WiFi scanning requires the radio to leave the AP's channel and hop through
|
||||
@@ -1549,7 +1610,16 @@ WiFiRetryPhase WiFiComponent::determine_next_phase_() {
|
||||
//
|
||||
// This allows users to configure WiFi via captive portal while the device keeps
|
||||
// attempting to connect to all configured networks in sequence.
|
||||
if (this->is_captive_portal_active_() || this->is_esp32_improv_active_()) {
|
||||
// Captive portal needs scan results to show available networks.
|
||||
// If captive portal is active, only skip scanning if we've done a scan after it started.
|
||||
// If only improv is active (no captive portal), skip scanning since improv doesn't need results.
|
||||
if (this->is_captive_portal_active_()) {
|
||||
if (this->has_completed_scan_after_captive_portal_start_) {
|
||||
return WiFiRetryPhase::RETRY_HIDDEN;
|
||||
}
|
||||
// Need to scan for captive portal
|
||||
} else if (this->is_esp32_improv_active_()) {
|
||||
// Improv doesn't need scan results
|
||||
return WiFiRetryPhase::RETRY_HIDDEN;
|
||||
}
|
||||
return WiFiRetryPhase::SCAN_CONNECTING;
|
||||
@@ -2096,7 +2166,7 @@ void WiFiComponent::clear_roaming_state_() {
|
||||
|
||||
void WiFiComponent::release_scan_results_() {
|
||||
if (!this->keep_scan_results_) {
|
||||
#ifdef USE_RP2040
|
||||
#if defined(USE_RP2040) || defined(USE_ESP32)
|
||||
// std::vector - use swap trick since shrink_to_fit is non-binding
|
||||
decltype(this->scan_result_)().swap(this->scan_result_);
|
||||
#else
|
||||
|
||||
@@ -161,9 +161,12 @@ struct EAPAuth {
|
||||
|
||||
using bssid_t = std::array<uint8_t, 6>;
|
||||
|
||||
// Use std::vector for RP2040 since scan count is unknown (callback-based)
|
||||
// Use FixedVector for other platforms where count is queried first
|
||||
#ifdef USE_RP2040
|
||||
/// Initial reserve size for filtered scan results (typical: 1-3 matching networks per SSID)
|
||||
static constexpr size_t WIFI_SCAN_RESULT_FILTERED_RESERVE = 8;
|
||||
|
||||
// Use std::vector for RP2040 (callback-based) and ESP32 (destructive scan API)
|
||||
// Use FixedVector for ESP8266 and LibreTiny where two-pass exact allocation is possible
|
||||
#if defined(USE_RP2040) || defined(USE_ESP32)
|
||||
template<typename T> using wifi_scan_vector_t = std::vector<T>;
|
||||
#else
|
||||
template<typename T> using wifi_scan_vector_t = FixedVector<T>;
|
||||
@@ -539,6 +542,13 @@ class WiFiComponent : public Component {
|
||||
/// Check if an SSID was seen in the most recent scan results
|
||||
/// Used to skip hidden mode for SSIDs we know are visible
|
||||
bool ssid_was_seen_in_scan_(const std::string &ssid) const;
|
||||
/// Check if full scan results are needed (captive portal active, improv, listeners)
|
||||
bool needs_full_scan_results_() const;
|
||||
/// Check if network matches any configured network (for scan result filtering)
|
||||
/// Matches by SSID when configured, or by BSSID for BSSID-only configs
|
||||
bool matches_configured_network_(const char *ssid, const uint8_t *bssid) const;
|
||||
/// Log a discarded scan result at VERBOSE level (skipped during roaming scans to avoid log overflow)
|
||||
void log_discarded_scan_result_(const char *ssid, const uint8_t *bssid, int8_t rssi, uint8_t channel);
|
||||
/// Find next SSID that wasn't in scan results (might be hidden)
|
||||
/// Returns index of next potentially hidden SSID, or -1 if none found
|
||||
/// @param start_index Start searching from index after this (-1 to start from beginning)
|
||||
@@ -710,6 +720,8 @@ class WiFiComponent : public Component {
|
||||
bool enable_on_boot_{true};
|
||||
bool got_ipv4_address_{false};
|
||||
bool keep_scan_results_{false};
|
||||
bool has_completed_scan_after_captive_portal_start_{
|
||||
false}; // Tracks if we've completed a scan after captive portal started
|
||||
RetryHiddenMode retry_hidden_mode_{RetryHiddenMode::BLIND_RETRY};
|
||||
bool skip_cooldown_next_cycle_{false};
|
||||
bool post_connect_roaming_{true}; // Enabled by default
|
||||
|
||||
@@ -756,24 +756,42 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
|
||||
|
||||
if (status != OK) {
|
||||
ESP_LOGV(TAG, "Scan failed: %d", status);
|
||||
this->retry_connect();
|
||||
// Don't call retry_connect() here - this callback runs in SDK system context
|
||||
// where yield() cannot be called. Instead, just set scan_done_ and let
|
||||
// check_scanning_finished() handle the empty scan_result_ from loop context.
|
||||
this->scan_done_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// Count the number of results first
|
||||
auto *head = reinterpret_cast<bss_info *>(arg);
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
|
||||
// First pass: count matching networks (linked list is non-destructive)
|
||||
size_t total = 0;
|
||||
size_t count = 0;
|
||||
for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
|
||||
count++;
|
||||
total++;
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(it->ssid);
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, it->bssid)) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
this->scan_result_.init(count);
|
||||
this->scan_result_.init(count); // Exact allocation
|
||||
|
||||
// Second pass: store matching networks
|
||||
for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
|
||||
this->scan_result_.emplace_back(
|
||||
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
|
||||
std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN,
|
||||
it->is_hidden != 0);
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(it->ssid);
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, it->bssid)) {
|
||||
this->scan_result_.emplace_back(
|
||||
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
|
||||
std::string(ssid_cstr, it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN, it->is_hidden != 0);
|
||||
} else {
|
||||
this->log_discarded_scan_result_(ssid_cstr, it->bssid, it->rssi, it->channel);
|
||||
}
|
||||
}
|
||||
ESP_LOGV(TAG, "Scan complete: %zu found, %zu stored%s", total, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
this->scan_done_ = true;
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : global_wifi_component->scan_results_listeners_) {
|
||||
|
||||
@@ -828,11 +828,21 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
}
|
||||
|
||||
uint16_t number = it.number;
|
||||
scan_result_.init(number);
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
|
||||
// Smart reserve: full capacity if needed, small reserve otherwise
|
||||
if (needs_full) {
|
||||
this->scan_result_.reserve(number);
|
||||
} else {
|
||||
this->scan_result_.reserve(WIFI_SCAN_RESULT_FILTERED_RESERVE);
|
||||
}
|
||||
|
||||
#ifdef USE_ESP32_HOSTED
|
||||
// getting records one at a time fails on P4 with hosted esp32 WiFi coprocessor
|
||||
// Presumably an upstream bug, work-around by getting all records at once
|
||||
auto records = std::make_unique<wifi_ap_record_t[]>(number);
|
||||
// Use stack buffer (3904 bytes / ~80 bytes per record = ~48 records) with heap fallback
|
||||
static constexpr size_t SCAN_RECORD_STACK_COUNT = 3904 / sizeof(wifi_ap_record_t);
|
||||
SmallBufferWithHeapFallback<SCAN_RECORD_STACK_COUNT, wifi_ap_record_t> records(number);
|
||||
err = esp_wifi_scan_get_ap_records(&number, records.get());
|
||||
if (err != ESP_OK) {
|
||||
esp_wifi_clear_ap_list();
|
||||
@@ -840,7 +850,7 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
return;
|
||||
}
|
||||
for (uint16_t i = 0; i < number; i++) {
|
||||
wifi_ap_record_t &record = records[i];
|
||||
wifi_ap_record_t &record = records.get()[i];
|
||||
#else
|
||||
// Process one record at a time to avoid large buffer allocation
|
||||
for (uint16_t i = 0; i < number; i++) {
|
||||
@@ -852,12 +862,23 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
break;
|
||||
}
|
||||
#endif // USE_ESP32_HOSTED
|
||||
bssid_t bssid;
|
||||
std::copy(record.bssid, record.bssid + 6, bssid.begin());
|
||||
std::string ssid(reinterpret_cast<const char *>(record.ssid));
|
||||
scan_result_.emplace_back(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN,
|
||||
ssid.empty());
|
||||
|
||||
// Check C string first - avoid std::string construction for non-matching networks
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(record.ssid);
|
||||
|
||||
// Only construct std::string and store if needed
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, record.bssid)) {
|
||||
bssid_t bssid;
|
||||
std::copy(record.bssid, record.bssid + 6, bssid.begin());
|
||||
std::string ssid(ssid_cstr);
|
||||
this->scan_result_.emplace_back(bssid, std::move(ssid), record.primary, record.rssi,
|
||||
record.authmode != WIFI_AUTH_OPEN, ssid_cstr[0] == '\0');
|
||||
} else {
|
||||
this->log_discarded_scan_result_(ssid_cstr, record.bssid, record.rssi, record.primary);
|
||||
}
|
||||
}
|
||||
ESP_LOGV(TAG, "Scan complete: %u found, %zu stored%s", number, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : this->scan_results_listeners_) {
|
||||
listener->on_wifi_scan_results(this->scan_result_);
|
||||
|
||||
@@ -670,18 +670,39 @@ void WiFiComponent::wifi_scan_done_callback_() {
|
||||
if (num < 0)
|
||||
return;
|
||||
|
||||
this->scan_result_.init(static_cast<unsigned int>(num));
|
||||
for (int i = 0; i < num; i++) {
|
||||
String ssid = WiFi.SSID(i);
|
||||
wifi_auth_mode_t authmode = WiFi.encryptionType(i);
|
||||
int32_t rssi = WiFi.RSSI(i);
|
||||
uint8_t *bssid = WiFi.BSSID(i);
|
||||
int32_t channel = WiFi.channel(i);
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
|
||||
this->scan_result_.emplace_back(bssid_t{bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]},
|
||||
std::string(ssid.c_str()), channel, rssi, authmode != WIFI_AUTH_OPEN,
|
||||
ssid.length() == 0);
|
||||
// Access scan results directly via WiFi.scan struct to avoid Arduino String allocations
|
||||
// WiFi.scan is public in LibreTiny for WiFiEvents & WiFiScan static handlers
|
||||
auto *scan = WiFi.scan;
|
||||
|
||||
// First pass: count matching networks
|
||||
size_t count = 0;
|
||||
for (int i = 0; i < num; i++) {
|
||||
const char *ssid_cstr = scan->ap[i].ssid;
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, scan->ap[i].bssid.addr)) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
this->scan_result_.init(count); // Exact allocation
|
||||
|
||||
// Second pass: store matching networks
|
||||
for (int i = 0; i < num; i++) {
|
||||
const char *ssid_cstr = scan->ap[i].ssid;
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, scan->ap[i].bssid.addr)) {
|
||||
auto &ap = scan->ap[i];
|
||||
this->scan_result_.emplace_back(bssid_t{ap.bssid.addr[0], ap.bssid.addr[1], ap.bssid.addr[2], ap.bssid.addr[3],
|
||||
ap.bssid.addr[4], ap.bssid.addr[5]},
|
||||
std::string(ssid_cstr), ap.channel, ap.rssi, ap.auth != WIFI_AUTH_OPEN,
|
||||
ssid_cstr[0] == '\0');
|
||||
} else {
|
||||
auto &ap = scan->ap[i];
|
||||
this->log_discarded_scan_result_(ssid_cstr, ap.bssid.addr, ap.rssi, ap.channel);
|
||||
}
|
||||
}
|
||||
ESP_LOGV(TAG, "Scan complete: %d found, %zu stored%s", num, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
WiFi.scanDelete();
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : this->scan_results_listeners_) {
|
||||
|
||||
@@ -21,6 +21,7 @@ static const char *const TAG = "wifi_pico_w";
|
||||
// Track previous state for detecting changes
|
||||
static bool s_sta_was_connected = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
static bool s_sta_had_ip = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
static size_t s_scan_result_count = 0; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
bool WiFiComponent::wifi_mode_(optional<bool> sta, optional<bool> ap) {
|
||||
if (sta.has_value()) {
|
||||
@@ -137,10 +138,20 @@ int WiFiComponent::s_wifi_scan_result(void *env, const cyw43_ev_scan_result_t *r
|
||||
}
|
||||
|
||||
void WiFiComponent::wifi_scan_result(void *env, const cyw43_ev_scan_result_t *result) {
|
||||
s_scan_result_count++;
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(result->ssid);
|
||||
|
||||
// Skip networks that don't match any configured network (unless full results needed)
|
||||
if (!this->needs_full_scan_results_() && !this->matches_configured_network_(ssid_cstr, result->bssid)) {
|
||||
this->log_discarded_scan_result_(ssid_cstr, result->bssid, result->rssi, result->channel);
|
||||
return;
|
||||
}
|
||||
|
||||
bssid_t bssid;
|
||||
std::copy(result->bssid, result->bssid + 6, bssid.begin());
|
||||
std::string ssid(reinterpret_cast<const char *>(result->ssid));
|
||||
WiFiScanResult res(bssid, ssid, result->channel, result->rssi, result->auth_mode != CYW43_AUTH_OPEN, ssid.empty());
|
||||
std::string ssid(ssid_cstr);
|
||||
WiFiScanResult res(bssid, std::move(ssid), result->channel, result->rssi, result->auth_mode != CYW43_AUTH_OPEN,
|
||||
ssid_cstr[0] == '\0');
|
||||
if (std::find(this->scan_result_.begin(), this->scan_result_.end(), res) == this->scan_result_.end()) {
|
||||
this->scan_result_.push_back(res);
|
||||
}
|
||||
@@ -149,6 +160,7 @@ void WiFiComponent::wifi_scan_result(void *env, const cyw43_ev_scan_result_t *re
|
||||
bool WiFiComponent::wifi_scan_start_(bool passive) {
|
||||
this->scan_result_.clear();
|
||||
this->scan_done_ = false;
|
||||
s_scan_result_count = 0;
|
||||
cyw43_wifi_scan_options_t scan_options = {0};
|
||||
scan_options.scan_type = passive ? 1 : 0;
|
||||
int err = cyw43_wifi_scan(&cyw43_state, &scan_options, nullptr, &s_wifi_scan_result);
|
||||
@@ -244,7 +256,9 @@ void WiFiComponent::wifi_loop_() {
|
||||
// Handle scan completion
|
||||
if (this->state_ == WIFI_COMPONENT_STATE_STA_SCANNING && !cyw43_wifi_scan_active(&cyw43_state)) {
|
||||
this->scan_done_ = true;
|
||||
ESP_LOGV(TAG, "Scan done");
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
ESP_LOGV(TAG, "Scan complete: %zu found, %zu stored%s", s_scan_result_count, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : this->scan_results_listeners_) {
|
||||
listener->on_wifi_scan_results(this->scan_result_);
|
||||
|
||||
@@ -12,6 +12,7 @@ from esphome.core import CORE
|
||||
from esphome.types import ConfigType
|
||||
|
||||
from .const_zephyr import (
|
||||
CONF_IEEE802154_VENDOR_OUI,
|
||||
CONF_MAX_EP_NUMBER,
|
||||
CONF_ON_JOIN,
|
||||
CONF_POWER_SOURCE,
|
||||
@@ -23,7 +24,12 @@ from .const_zephyr import (
|
||||
ZigbeeComponent,
|
||||
zigbee_ns,
|
||||
)
|
||||
from .zigbee_zephyr import zephyr_binary_sensor, zephyr_sensor, zephyr_switch
|
||||
from .zigbee_zephyr import (
|
||||
zephyr_binary_sensor,
|
||||
zephyr_number,
|
||||
zephyr_sensor,
|
||||
zephyr_switch,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,6 +48,7 @@ def zigbee_set_core_data(config: ConfigType) -> ConfigType:
|
||||
BINARY_SENSOR_SCHEMA = cv.Schema({}).extend(zephyr_binary_sensor)
|
||||
SENSOR_SCHEMA = cv.Schema({}).extend(zephyr_sensor)
|
||||
SWITCH_SCHEMA = cv.Schema({}).extend(zephyr_switch)
|
||||
NUMBER_SCHEMA = cv.Schema({}).extend(zephyr_number)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
@@ -58,6 +65,13 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_POWER_SOURCE, default="DC_SOURCE"): cv.enum(
|
||||
POWER_SOURCE, upper=True
|
||||
),
|
||||
cv.Optional(CONF_IEEE802154_VENDOR_OUI): cv.All(
|
||||
cv.Any(
|
||||
cv.int_range(min=0x000000, max=0xFFFFFF),
|
||||
cv.one_of(*["random"], lower=True),
|
||||
),
|
||||
cv.requires_component("nrf52"),
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
zigbee_set_core_data,
|
||||
@@ -117,10 +131,25 @@ async def setup_switch(entity: cg.MockObj, config: ConfigType) -> None:
|
||||
await zephyr_setup_switch(entity, config)
|
||||
|
||||
|
||||
async def setup_number(
|
||||
entity: cg.MockObj,
|
||||
config: ConfigType,
|
||||
min_value: float,
|
||||
max_value: float,
|
||||
step: float,
|
||||
) -> None:
|
||||
if not config.get(CONF_ZIGBEE_ID) or config.get(CONF_INTERNAL):
|
||||
return
|
||||
if CORE.using_zephyr:
|
||||
from .zigbee_zephyr import zephyr_setup_number
|
||||
|
||||
await zephyr_setup_number(entity, config, min_value, max_value, step)
|
||||
|
||||
|
||||
def consume_endpoint(config: ConfigType) -> ConfigType:
|
||||
if not config.get(CONF_ZIGBEE_ID) or config.get(CONF_INTERNAL):
|
||||
return config
|
||||
if " " in config[CONF_NAME]:
|
||||
if CONF_NAME in config and " " in config[CONF_NAME]:
|
||||
_LOGGER.warning(
|
||||
"Spaces in '%s' work with ZHA but not Zigbee2MQTT. For Zigbee2MQTT use '%s'",
|
||||
config[CONF_NAME],
|
||||
@@ -144,6 +173,10 @@ def validate_switch(config: ConfigType) -> ConfigType:
|
||||
return consume_endpoint(config)
|
||||
|
||||
|
||||
def validate_number(config: ConfigType) -> ConfigType:
|
||||
return consume_endpoint(config)
|
||||
|
||||
|
||||
ZIGBEE_ACTION_SCHEMA = automation.maybe_simple_id(
|
||||
cv.Schema(
|
||||
{
|
||||
|
||||
@@ -4,6 +4,7 @@ zigbee_ns = cg.esphome_ns.namespace("zigbee")
|
||||
ZigbeeComponent = zigbee_ns.class_("ZigbeeComponent", cg.Component)
|
||||
BinaryAttrs = zigbee_ns.struct("BinaryAttrs")
|
||||
AnalogAttrs = zigbee_ns.struct("AnalogAttrs")
|
||||
AnalogAttrsOutput = zigbee_ns.struct("AnalogAttrsOutput")
|
||||
|
||||
CONF_MAX_EP_NUMBER = 8
|
||||
CONF_ZIGBEE_ID = "zigbee_id"
|
||||
@@ -12,6 +13,7 @@ CONF_WIPE_ON_BOOT = "wipe_on_boot"
|
||||
CONF_ZIGBEE_BINARY_SENSOR = "zigbee_binary_sensor"
|
||||
CONF_ZIGBEE_SENSOR = "zigbee_sensor"
|
||||
CONF_ZIGBEE_SWITCH = "zigbee_switch"
|
||||
CONF_ZIGBEE_NUMBER = "zigbee_number"
|
||||
CONF_POWER_SOURCE = "power_source"
|
||||
POWER_SOURCE = {
|
||||
"UNKNOWN": "ZB_ZCL_BASIC_POWER_SOURCE_UNKNOWN",
|
||||
@@ -22,6 +24,7 @@ POWER_SOURCE = {
|
||||
"EMERGENCY_MAINS_CONST": "ZB_ZCL_BASIC_POWER_SOURCE_EMERGENCY_MAINS_CONST",
|
||||
"EMERGENCY_MAINS_TRANSF": "ZB_ZCL_BASIC_POWER_SOURCE_EMERGENCY_MAINS_TRANSF",
|
||||
}
|
||||
CONF_IEEE802154_VENDOR_OUI = "ieee802154_vendor_oui"
|
||||
|
||||
# Keys for CORE.data storage
|
||||
KEY_ZIGBEE = "zigbee"
|
||||
@@ -37,3 +40,4 @@ ZB_ZCL_CLUSTER_ID_IDENTIFY = "ZB_ZCL_CLUSTER_ID_IDENTIFY"
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_INPUT = "ZB_ZCL_CLUSTER_ID_BINARY_INPUT"
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_INPUT = "ZB_ZCL_CLUSTER_ID_ANALOG_INPUT"
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT = "ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT"
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT = "ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT"
|
||||
|
||||
86
esphome/components/zigbee/time/__init__.py
Normal file
86
esphome/components/zigbee/time/__init__.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import time as time_
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.types import ConfigType
|
||||
|
||||
from .. import consume_endpoint
|
||||
from ..const_zephyr import CONF_ZIGBEE_ID, zigbee_ns
|
||||
from ..zigbee_zephyr import (
|
||||
ZigbeeClusterDesc,
|
||||
ZigbeeComponent,
|
||||
get_slot_index,
|
||||
zigbee_new_attr_list,
|
||||
zigbee_new_cluster_list,
|
||||
zigbee_new_variable,
|
||||
zigbee_register_ep,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ["zigbee"]
|
||||
|
||||
ZigbeeTime = zigbee_ns.class_("ZigbeeTime", time_.RealTimeClock)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
time_.TIME_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ZigbeeTime),
|
||||
cv.OnlyWith(CONF_ZIGBEE_ID, ["nrf52", "zigbee"]): cv.use_id(
|
||||
ZigbeeComponent
|
||||
),
|
||||
}
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
.extend(cv.polling_component_schema("1s")),
|
||||
consume_endpoint,
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config: ConfigType) -> None:
|
||||
CORE.add_job(_add_time, config)
|
||||
|
||||
|
||||
async def _add_time(config: ConfigType) -> None:
|
||||
slot_index = get_slot_index()
|
||||
|
||||
# Create unique names for this sensor's variables based on slot index
|
||||
prefix = f"zigbee_ep{slot_index + 1}"
|
||||
attrs_name = f"{prefix}_time_attrs"
|
||||
attr_list_name = f"{prefix}_time_attrib_list"
|
||||
cluster_list_name = f"{prefix}_cluster_list"
|
||||
ep_name = f"{prefix}_ep"
|
||||
|
||||
# Create the binary attributes structure
|
||||
time_attrs = zigbee_new_variable(attrs_name, "zb_zcl_time_attrs_t")
|
||||
attr_list = zigbee_new_attr_list(
|
||||
attr_list_name,
|
||||
"ZB_ZCL_DECLARE_TIME_ATTR_LIST",
|
||||
str(time_attrs),
|
||||
)
|
||||
|
||||
# Create cluster list and register endpoint
|
||||
cluster_list_name, clusters = zigbee_new_cluster_list(
|
||||
cluster_list_name,
|
||||
[
|
||||
ZigbeeClusterDesc("ZB_ZCL_CLUSTER_ID_TIME", attr_list),
|
||||
ZigbeeClusterDesc("ZB_ZCL_CLUSTER_ID_TIME"),
|
||||
],
|
||||
)
|
||||
zigbee_register_ep(
|
||||
ep_name,
|
||||
cluster_list_name,
|
||||
0,
|
||||
clusters,
|
||||
slot_index,
|
||||
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
|
||||
)
|
||||
|
||||
# Create the ZigbeeTime component
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await time_.register_time(var, config)
|
||||
await cg.register_component(var, config)
|
||||
|
||||
cg.add(var.set_endpoint(slot_index + 1))
|
||||
cg.add(var.set_cluster_attributes(time_attrs))
|
||||
hub = await cg.get_variable(config[CONF_ZIGBEE_ID])
|
||||
cg.add(var.set_parent(hub))
|
||||
87
esphome/components/zigbee/time/zigbee_time_zephyr.cpp
Normal file
87
esphome/components/zigbee/time/zigbee_time_zephyr.cpp
Normal file
@@ -0,0 +1,87 @@
|
||||
#include "zigbee_time_zephyr.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_TIME)
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::zigbee {
|
||||
|
||||
static const char *const TAG = "zigbee.time";
|
||||
|
||||
// This time standard is the number of
|
||||
// seconds since 0 hrs 0 mins 0 sec on 1st January 2000 UTC (Universal Coordinated Time).
|
||||
constexpr time_t EPOCH_2000 = 946684800;
|
||||
|
||||
ZigbeeTime *global_time = nullptr; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
void ZigbeeTime::sync_time(zb_ret_t status, zb_uint32_t auth_level, zb_uint16_t short_addr, zb_uint8_t endpoint,
|
||||
zb_uint32_t nw_time) {
|
||||
if (status == RET_OK && auth_level >= ZB_ZCL_TIME_HAS_SYNCHRONIZED_BIT) {
|
||||
global_time->set_epoch_time(nw_time + EPOCH_2000);
|
||||
} else if (status != RET_TIMEOUT || !global_time->has_time_) {
|
||||
ESP_LOGE(TAG, "Status: %d, auth_level: %u, short_addr: %d, endpoint: %d, nw_time: %u", status, auth_level,
|
||||
short_addr, endpoint, nw_time);
|
||||
}
|
||||
}
|
||||
|
||||
void ZigbeeTime::setup() {
|
||||
global_time = this;
|
||||
this->parent_->add_callback(this->endpoint_, [this](zb_bufid_t bufid) { this->zcl_device_cb_(bufid); });
|
||||
synchronize_epoch_(EPOCH_2000);
|
||||
this->parent_->add_join_callback([this]() { zb_zcl_time_server_synchronize(this->endpoint_, sync_time); });
|
||||
}
|
||||
|
||||
void ZigbeeTime::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Zigbee Time\n"
|
||||
" Endpoint: %d",
|
||||
this->endpoint_);
|
||||
RealTimeClock::dump_config();
|
||||
}
|
||||
|
||||
void ZigbeeTime::update() {
|
||||
time_t time = timestamp_now();
|
||||
this->cluster_attributes_->time = time - EPOCH_2000;
|
||||
}
|
||||
|
||||
void ZigbeeTime::set_epoch_time(uint32_t epoch) {
|
||||
this->defer([this, epoch]() {
|
||||
this->synchronize_epoch_(epoch);
|
||||
this->has_time_ = true;
|
||||
});
|
||||
}
|
||||
|
||||
void ZigbeeTime::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
zb_zcl_device_callback_param_t *p_device_cb_param = ZB_BUF_GET_PARAM(bufid, zb_zcl_device_callback_param_t);
|
||||
zb_zcl_device_callback_id_t device_cb_id = p_device_cb_param->device_cb_id;
|
||||
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
|
||||
switch (device_cb_id) {
|
||||
/* ZCL set attribute value */
|
||||
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
|
||||
if (cluster_id == ZB_ZCL_CLUSTER_ID_TIME) {
|
||||
if (attr_id == ZB_ZCL_ATTR_TIME_TIME_ID) {
|
||||
zb_uint32_t value = p_device_cb_param->cb_param.set_attr_value_param.values.data32;
|
||||
ESP_LOGI(TAG, "Synchronize time to %u", value);
|
||||
this->defer([this, value]() { synchronize_epoch_(value + EPOCH_2000); });
|
||||
} else if (attr_id == ZB_ZCL_ATTR_TIME_TIME_STATUS_ID) {
|
||||
zb_uint8_t value = p_device_cb_param->cb_param.set_attr_value_param.values.data8;
|
||||
ESP_LOGI(TAG, "Time status %hd", value);
|
||||
this->defer([this, value]() { this->has_time_ = ZB_ZCL_TIME_TIME_STATUS_SYNCHRONIZED_BIT_IS_SET(value); });
|
||||
}
|
||||
} else {
|
||||
/* other clusters attribute handled here */
|
||||
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
break;
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Zcl_device_cb_ status: %hd", p_device_cb_param->status);
|
||||
}
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
|
||||
#endif
|
||||
38
esphome/components/zigbee/time/zigbee_time_zephyr.h
Normal file
38
esphome/components/zigbee/time/zigbee_time_zephyr.h
Normal file
@@ -0,0 +1,38 @@
|
||||
#pragma once
|
||||
#include "esphome/core/defines.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_TIME)
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/time/real_time_clock.h"
|
||||
#include "esphome/components/zigbee/zigbee_zephyr.h"
|
||||
|
||||
extern "C" {
|
||||
#include <zboss_api.h>
|
||||
#include <zboss_api_addons.h>
|
||||
}
|
||||
|
||||
namespace esphome::zigbee {
|
||||
|
||||
class ZigbeeTime : public time::RealTimeClock, public ZigbeeEntity {
|
||||
public:
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void update() override;
|
||||
|
||||
void set_cluster_attributes(zb_zcl_time_attrs_t &cluster_attributes) {
|
||||
this->cluster_attributes_ = &cluster_attributes;
|
||||
}
|
||||
|
||||
void set_epoch_time(uint32_t epoch);
|
||||
|
||||
protected:
|
||||
static void sync_time(zb_ret_t status, zb_uint32_t auth_level, zb_uint16_t short_addr, zb_uint8_t endpoint,
|
||||
zb_uint32_t nw_time);
|
||||
void zcl_device_cb_(zb_bufid_t bufid);
|
||||
zb_zcl_time_attrs_t *cluster_attributes_{nullptr};
|
||||
|
||||
bool has_time_{false};
|
||||
};
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
|
||||
#endif
|
||||
@@ -22,7 +22,7 @@ void ZigbeeBinarySensor::setup() {
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_BINARY_INPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_BINARY_INPUT_PRESENT_VALUE_ID, &this->cluster_attributes_->present_value,
|
||||
ZB_FALSE);
|
||||
this->parent_->flush();
|
||||
this->parent_->force_report();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
111
esphome/components/zigbee/zigbee_number_zephyr.cpp
Normal file
111
esphome/components/zigbee/zigbee_number_zephyr.cpp
Normal file
@@ -0,0 +1,111 @@
|
||||
#include "zigbee_number_zephyr.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_NUMBER)
|
||||
#include "esphome/core/log.h"
|
||||
extern "C" {
|
||||
#include <zboss_api.h>
|
||||
#include <zboss_api_addons.h>
|
||||
#include <zb_nrf_platform.h>
|
||||
#include <zigbee/zigbee_app_utils.h>
|
||||
#include <zb_error_to_string.h>
|
||||
}
|
||||
namespace esphome::zigbee {
|
||||
|
||||
static const char *const TAG = "zigbee.number";
|
||||
|
||||
void ZigbeeNumber::setup() {
|
||||
this->parent_->add_callback(this->endpoint_, [this](zb_bufid_t bufid) { this->zcl_device_cb_(bufid); });
|
||||
this->number_->add_on_state_callback([this](float state) {
|
||||
this->cluster_attributes_->present_value = state;
|
||||
ESP_LOGD(TAG, "Set attribute endpoint: %d, present_value %f", this->endpoint_,
|
||||
this->cluster_attributes_->present_value);
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, (zb_uint8_t *) &cluster_attributes_->present_value,
|
||||
ZB_FALSE);
|
||||
this->parent_->force_report();
|
||||
});
|
||||
}
|
||||
|
||||
void ZigbeeNumber::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Zigbee Number\n"
|
||||
" Endpoint: %d, present_value %f",
|
||||
this->endpoint_, this->cluster_attributes_->present_value);
|
||||
}
|
||||
|
||||
void ZigbeeNumber::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
zb_zcl_device_callback_param_t *p_device_cb_param = ZB_BUF_GET_PARAM(bufid, zb_zcl_device_callback_param_t);
|
||||
zb_zcl_device_callback_id_t device_cb_id = p_device_cb_param->device_cb_id;
|
||||
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
|
||||
switch (device_cb_id) {
|
||||
/* ZCL set attribute value */
|
||||
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
|
||||
if (cluster_id == ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT) {
|
||||
ESP_LOGI(TAG, "Analog output attribute setting");
|
||||
if (attr_id == ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID) {
|
||||
float value =
|
||||
*reinterpret_cast<const float *>(&p_device_cb_param->cb_param.set_attr_value_param.values.data32);
|
||||
this->defer([this, value]() {
|
||||
this->cluster_attributes_->present_value = value;
|
||||
auto call = this->number_->make_call();
|
||||
call.set_value(value);
|
||||
call.perform();
|
||||
});
|
||||
}
|
||||
} else {
|
||||
/* other clusters attribute handled here */
|
||||
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
break;
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "%s status: %hd", __func__, p_device_cb_param->status);
|
||||
}
|
||||
|
||||
const zb_uint8_t ZB_ZCL_ANALOG_OUTPUT_STATUS_FLAG_MAX_VALUE = 0x0F;
|
||||
|
||||
static zb_ret_t check_value_analog_server(zb_uint16_t attr_id, zb_uint8_t endpoint,
|
||||
zb_uint8_t *value) { // NOLINT(readability-non-const-parameter)
|
||||
zb_ret_t ret = RET_OK;
|
||||
ZVUNUSED(endpoint);
|
||||
|
||||
switch (attr_id) {
|
||||
case ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID:
|
||||
ret = ZB_ZCL_CHECK_BOOL_VALUE(*value) ? RET_OK : RET_ERROR;
|
||||
break;
|
||||
case ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID:
|
||||
break;
|
||||
|
||||
case ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID:
|
||||
if (*value > ZB_ZCL_ANALOG_OUTPUT_STATUS_FLAG_MAX_VALUE) {
|
||||
ret = RET_ERROR;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
|
||||
void zb_zcl_analog_output_init_server() {
|
||||
zb_zcl_add_cluster_handlers(ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
esphome::zigbee::check_value_analog_server, (zb_zcl_cluster_write_attr_hook_t) NULL,
|
||||
(zb_zcl_cluster_handler_t) NULL);
|
||||
}
|
||||
|
||||
void zb_zcl_analog_output_init_client() {
|
||||
zb_zcl_add_cluster_handlers(ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_CLIENT_ROLE,
|
||||
(zb_zcl_cluster_check_value_t) NULL, (zb_zcl_cluster_write_attr_hook_t) NULL,
|
||||
(zb_zcl_cluster_handler_t) NULL);
|
||||
}
|
||||
|
||||
#endif
|
||||
118
esphome/components/zigbee/zigbee_number_zephyr.h
Normal file
118
esphome/components/zigbee/zigbee_number_zephyr.h
Normal file
@@ -0,0 +1,118 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/defines.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_NUMBER)
|
||||
#include "esphome/components/zigbee/zigbee_zephyr.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/number/number.h"
|
||||
extern "C" {
|
||||
#include <zboss_api.h>
|
||||
#include <zboss_api_addons.h>
|
||||
}
|
||||
|
||||
enum {
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID = 0x001C,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID = 0x0041,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID = 0x0045,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID = 0x0051,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID = 0x0055,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID = 0x006A,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID = 0x006F,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID = 0x0075,
|
||||
};
|
||||
|
||||
#define ZB_ZCL_ANALOG_OUTPUT_CLUSTER_REVISION_DEFAULT ((zb_uint16_t) 0x0001u)
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID, ZB_ZCL_ATTR_TYPE_CHAR_STRING, ZB_ZCL_ATTR_ACCESS_READ_ONLY, \
|
||||
(ZB_ZCL_NON_MANUFACTURER_SPECIFIC), (void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID, ZB_ZCL_ATTR_TYPE_BOOL, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// PresentValue
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_WRITE | ZB_ZCL_ATTR_ACCESS_REPORTING, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// MaxPresentValue
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// MinPresentValue
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// Resolution
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID, ZB_ZCL_ATTR_TYPE_8BITMAP, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_REPORTING, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID, ZB_ZCL_ATTR_TYPE_16BIT_ENUM, ZB_ZCL_ATTR_ACCESS_READ_ONLY, \
|
||||
(ZB_ZCL_NON_MANUFACTURER_SPECIFIC), (void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ESPHOME_ZB_ZCL_DECLARE_ANALOG_OUTPUT_ATTRIB_LIST(attr_list, out_of_service, present_value, status_flag, \
|
||||
max_present_value, min_present_value, resolution, \
|
||||
engineering_units, description) \
|
||||
ZB_ZCL_START_DECLARE_ATTRIB_LIST_CLUSTER_REVISION(attr_list, ZB_ZCL_ANALOG_OUTPUT) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID, (out_of_service)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, (present_value)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID, (status_flag)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID, (max_present_value)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID, (min_present_value)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID, (resolution)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID, (engineering_units)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID, (description)) \
|
||||
ZB_ZCL_FINISH_DECLARE_ATTRIB_LIST
|
||||
|
||||
void zb_zcl_analog_output_init_server();
|
||||
void zb_zcl_analog_output_init_client();
|
||||
#define ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT_SERVER_ROLE_INIT zb_zcl_analog_output_init_server
|
||||
#define ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT_CLIENT_ROLE_INIT zb_zcl_analog_output_init_client
|
||||
|
||||
namespace esphome::zigbee {
|
||||
|
||||
class ZigbeeNumber : public ZigbeeEntity, public Component {
|
||||
public:
|
||||
ZigbeeNumber(number::Number *n) : number_(n) {}
|
||||
void set_cluster_attributes(AnalogAttrsOutput &cluster_attributes) {
|
||||
this->cluster_attributes_ = &cluster_attributes;
|
||||
}
|
||||
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
number::Number *number_;
|
||||
AnalogAttrsOutput *cluster_attributes_{nullptr};
|
||||
void zcl_device_cb_(zb_bufid_t bufid);
|
||||
};
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
#endif
|
||||
@@ -21,7 +21,7 @@ void ZigbeeSensor::setup() {
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_ANALOG_INPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_ANALOG_INPUT_PRESENT_VALUE_ID,
|
||||
(zb_uint8_t *) &this->cluster_attributes_->present_value, ZB_FALSE);
|
||||
this->parent_->flush();
|
||||
this->parent_->force_report();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ void ZigbeeSwitch::setup() {
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_BINARY_OUTPUT_PRESENT_VALUE_ID, &this->cluster_attributes_->present_value,
|
||||
ZB_FALSE);
|
||||
this->parent_->flush();
|
||||
this->parent_->force_report();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -41,8 +41,6 @@ void ZigbeeSwitch::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
|
||||
p_device_cb_param->status = RET_OK;
|
||||
|
||||
switch (device_cb_id) {
|
||||
/* ZCL set attribute value */
|
||||
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
|
||||
@@ -52,16 +50,17 @@ void ZigbeeSwitch::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
if (attr_id == ZB_ZCL_ATTR_BINARY_OUTPUT_PRESENT_VALUE_ID) {
|
||||
this->defer([this, value]() {
|
||||
this->cluster_attributes_->present_value = value ? ZB_TRUE : ZB_FALSE;
|
||||
this->switch_->publish_state(value);
|
||||
this->switch_->control(value);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
/* other clusters attribute handled here */
|
||||
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
p_device_cb_param->status = RET_ERROR;
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -101,8 +101,8 @@ void ZigbeeComponent::zcl_device_cb(zb_bufid_t bufid) {
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
auto endpoint = p_device_cb_param->endpoint;
|
||||
|
||||
ESP_LOGI(TAG, "Zcl_device_cb %s id %hd, cluster_id %d, attr_id %d, endpoint: %d", __func__, device_cb_id, cluster_id,
|
||||
attr_id, endpoint);
|
||||
ESP_LOGI(TAG, "%s id %hd, cluster_id %d, attr_id %d, endpoint: %d", __func__, device_cb_id, cluster_id, attr_id,
|
||||
endpoint);
|
||||
|
||||
/* Set default response value. */
|
||||
p_device_cb_param->status = RET_OK;
|
||||
@@ -112,10 +112,10 @@ void ZigbeeComponent::zcl_device_cb(zb_bufid_t bufid) {
|
||||
const auto &cb = global_zigbee->callbacks_[endpoint - 1];
|
||||
if (cb) {
|
||||
cb(bufid);
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
p_device_cb_param->status = RET_ERROR;
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
||||
void ZigbeeComponent::on_join_() {
|
||||
@@ -230,11 +230,11 @@ static void send_attribute_report(zb_bufid_t bufid, zb_uint16_t cmd_id) {
|
||||
zb_buf_free(bufid);
|
||||
}
|
||||
|
||||
void ZigbeeComponent::flush() { this->need_flush_ = true; }
|
||||
void ZigbeeComponent::force_report() { this->force_report_ = true; }
|
||||
|
||||
void ZigbeeComponent::loop() {
|
||||
if (this->need_flush_) {
|
||||
this->need_flush_ = false;
|
||||
if (this->force_report_) {
|
||||
this->force_report_ = false;
|
||||
zb_buf_get_out_delayed_ext(send_attribute_report, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +60,12 @@ struct AnalogAttrs {
|
||||
zb_uchar_t description[ZB_ZCL_MAX_STRING_SIZE];
|
||||
};
|
||||
|
||||
struct AnalogAttrsOutput : AnalogAttrs {
|
||||
float max_present_value;
|
||||
float min_present_value;
|
||||
float resolution;
|
||||
};
|
||||
|
||||
class ZigbeeComponent : public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
@@ -72,7 +78,7 @@ class ZigbeeComponent : public Component {
|
||||
void zboss_signal_handler_esphome(zb_bufid_t bufid);
|
||||
void factory_reset();
|
||||
Trigger<> *get_join_trigger() { return &this->join_trigger_; };
|
||||
void flush();
|
||||
void force_report();
|
||||
void loop() override;
|
||||
|
||||
protected:
|
||||
@@ -84,7 +90,7 @@ class ZigbeeComponent : public Component {
|
||||
std::array<std::function<void(zb_bufid_t bufid)>, ZIGBEE_ENDPOINTS_COUNT> callbacks_{};
|
||||
CallbackManager<void()> join_cb_;
|
||||
Trigger<> join_trigger_;
|
||||
bool need_flush_{false};
|
||||
bool force_report_{false};
|
||||
};
|
||||
|
||||
class ZigbeeEntity {
|
||||
|
||||
@@ -49,11 +49,13 @@ from esphome.cpp_generator import (
|
||||
from esphome.types import ConfigType
|
||||
|
||||
from .const_zephyr import (
|
||||
CONF_IEEE802154_VENDOR_OUI,
|
||||
CONF_ON_JOIN,
|
||||
CONF_POWER_SOURCE,
|
||||
CONF_WIPE_ON_BOOT,
|
||||
CONF_ZIGBEE_BINARY_SENSOR,
|
||||
CONF_ZIGBEE_ID,
|
||||
CONF_ZIGBEE_NUMBER,
|
||||
CONF_ZIGBEE_SENSOR,
|
||||
CONF_ZIGBEE_SWITCH,
|
||||
KEY_EP_NUMBER,
|
||||
@@ -61,12 +63,14 @@ from .const_zephyr import (
|
||||
POWER_SOURCE,
|
||||
ZB_ZCL_BASIC_ATTRS_EXT_T,
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_INPUT,
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT,
|
||||
ZB_ZCL_CLUSTER_ID_BASIC,
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_INPUT,
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT,
|
||||
ZB_ZCL_CLUSTER_ID_IDENTIFY,
|
||||
ZB_ZCL_IDENTIFY_ATTRS_T,
|
||||
AnalogAttrs,
|
||||
AnalogAttrsOutput,
|
||||
BinaryAttrs,
|
||||
ZigbeeComponent,
|
||||
zigbee_ns,
|
||||
@@ -75,6 +79,7 @@ from .const_zephyr import (
|
||||
ZigbeeBinarySensor = zigbee_ns.class_("ZigbeeBinarySensor", cg.Component)
|
||||
ZigbeeSensor = zigbee_ns.class_("ZigbeeSensor", cg.Component)
|
||||
ZigbeeSwitch = zigbee_ns.class_("ZigbeeSwitch", cg.Component)
|
||||
ZigbeeNumber = zigbee_ns.class_("ZigbeeNumber", cg.Component)
|
||||
|
||||
# BACnet engineering units mapping (ZCL uses BACnet unit codes)
|
||||
# See: https://github.com/zigpy/zha/blob/dev/zha/application/platforms/number/bacnet.py
|
||||
@@ -138,6 +143,15 @@ zephyr_switch = cv.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
zephyr_number = cv.Schema(
|
||||
{
|
||||
cv.OnlyWith(CONF_ZIGBEE_ID, ["nrf52", "zigbee"]): cv.use_id(ZigbeeComponent),
|
||||
cv.OnlyWith(CONF_ZIGBEE_NUMBER, ["nrf52", "zigbee"]): cv.declare_id(
|
||||
ZigbeeNumber
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def zephyr_to_code(config: ConfigType) -> None:
|
||||
zephyr_add_prj_conf("ZIGBEE", True)
|
||||
@@ -152,6 +166,13 @@ async def zephyr_to_code(config: ConfigType) -> None:
|
||||
zephyr_add_prj_conf("NET_IP_ADDR_CHECK", False)
|
||||
zephyr_add_prj_conf("NET_UDP", False)
|
||||
|
||||
if CONF_IEEE802154_VENDOR_OUI in config:
|
||||
zephyr_add_prj_conf("IEEE802154_VENDOR_OUI_ENABLE", True)
|
||||
random_number = config[CONF_IEEE802154_VENDOR_OUI]
|
||||
if random_number == "random":
|
||||
random_number = random.randint(0x000000, 0xFFFFFF)
|
||||
zephyr_add_prj_conf("IEEE802154_VENDOR_OUI", random_number)
|
||||
|
||||
if config[CONF_WIPE_ON_BOOT]:
|
||||
if config[CONF_WIPE_ON_BOOT] == "once":
|
||||
cg.add_define(
|
||||
@@ -336,14 +357,24 @@ async def zephyr_setup_switch(entity: cg.MockObj, config: ConfigType) -> None:
|
||||
CORE.add_job(_add_switch, entity, config)
|
||||
|
||||
|
||||
def _slot_index() -> int:
|
||||
"""Find the next available endpoint slot"""
|
||||
async def zephyr_setup_number(
|
||||
entity: cg.MockObj,
|
||||
config: ConfigType,
|
||||
min_value: float,
|
||||
max_value: float,
|
||||
step: float,
|
||||
) -> None:
|
||||
CORE.add_job(_add_number, entity, config, min_value, max_value, step)
|
||||
|
||||
|
||||
def get_slot_index() -> int:
|
||||
"""Find the next available endpoint slot."""
|
||||
slot = next(
|
||||
(i for i, v in enumerate(CORE.data[KEY_ZIGBEE][KEY_EP_NUMBER]) if v == ""), None
|
||||
)
|
||||
if slot is None:
|
||||
raise cv.Invalid(
|
||||
f"Not found empty slot, size ({len(CORE.data[KEY_ZIGBEE][KEY_EP_NUMBER])})"
|
||||
f"No available Zigbee endpoint slots ({len(CORE.data[KEY_ZIGBEE][KEY_EP_NUMBER])} in use)"
|
||||
)
|
||||
return slot
|
||||
|
||||
@@ -358,7 +389,7 @@ async def _add_zigbee_ep(
|
||||
app_device_id: str,
|
||||
extra_field_values: dict[str, int] | None = None,
|
||||
) -> None:
|
||||
slot_index = _slot_index()
|
||||
slot_index = get_slot_index()
|
||||
|
||||
prefix = f"zigbee_ep{slot_index + 1}"
|
||||
attrs_name = f"{prefix}_attrs"
|
||||
@@ -443,3 +474,31 @@ async def _add_switch(entity: cg.MockObj, config: ConfigType) -> None:
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT,
|
||||
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
|
||||
)
|
||||
|
||||
|
||||
async def _add_number(
|
||||
entity: cg.MockObj,
|
||||
config: ConfigType,
|
||||
min_value: float,
|
||||
max_value: float,
|
||||
step: float,
|
||||
) -> None:
|
||||
# Get BACnet engineering unit from unit_of_measurement
|
||||
unit = config.get(CONF_UNIT_OF_MEASUREMENT, "")
|
||||
bacnet_unit = BACNET_UNITS.get(unit, BACNET_UNIT_NO_UNITS)
|
||||
|
||||
await _add_zigbee_ep(
|
||||
entity,
|
||||
config,
|
||||
CONF_ZIGBEE_NUMBER,
|
||||
AnalogAttrsOutput,
|
||||
"ESPHOME_ZB_ZCL_DECLARE_ANALOG_OUTPUT_ATTRIB_LIST",
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT,
|
||||
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
|
||||
extra_field_values={
|
||||
"max_present_value": max_value,
|
||||
"min_present_value": min_value,
|
||||
"resolution": step,
|
||||
"engineering_units": bacnet_unit,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -149,6 +149,7 @@ CONF_ASSUMED_STATE = "assumed_state"
|
||||
CONF_AT = "at"
|
||||
CONF_ATTENUATION = "attenuation"
|
||||
CONF_ATTRIBUTE = "attribute"
|
||||
CONF_AUDIO_DAC = "audio_dac"
|
||||
CONF_AUTH = "auth"
|
||||
CONF_AUTO_CLEAR_ENABLED = "auto_clear_enabled"
|
||||
CONF_AUTO_MODE = "auto_mode"
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
#include <type_traits>
|
||||
#include <vector>
|
||||
#include <concepts>
|
||||
|
||||
#include <strings.h>
|
||||
|
||||
#include "esphome/core/optional.h"
|
||||
|
||||
36
tests/components/ch423/common.yaml
Normal file
36
tests/components/ch423/common.yaml
Normal file
@@ -0,0 +1,36 @@
|
||||
ch423:
|
||||
- id: ch423_hub
|
||||
i2c_id: i2c_bus
|
||||
|
||||
binary_sensor:
|
||||
- platform: gpio
|
||||
id: ch423_input
|
||||
name: CH423 Binary Sensor
|
||||
pin:
|
||||
ch423: ch423_hub
|
||||
number: 1
|
||||
mode: INPUT
|
||||
inverted: true
|
||||
- platform: gpio
|
||||
id: ch423_input_2
|
||||
name: CH423 Binary Sensor 2
|
||||
pin:
|
||||
ch423: ch423_hub
|
||||
number: 0
|
||||
mode: INPUT
|
||||
inverted: false
|
||||
output:
|
||||
- platform: gpio
|
||||
id: ch423_out_11
|
||||
pin:
|
||||
ch423: ch423_hub
|
||||
number: 11
|
||||
mode: OUTPUT_OPEN_DRAIN
|
||||
inverted: true
|
||||
- platform: gpio
|
||||
id: ch423_out_23
|
||||
pin:
|
||||
ch423: ch423_hub
|
||||
number: 23
|
||||
mode: OUTPUT_OPEN_DRAIN
|
||||
inverted: false
|
||||
4
tests/components/ch423/test.esp32-idf.yaml
Normal file
4
tests/components/ch423/test.esp32-idf.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
packages:
|
||||
i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
4
tests/components/ch423/test.esp8266-ard.yaml
Normal file
4
tests/components/ch423/test.esp8266-ard.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
packages:
|
||||
i2c: !include ../../test_build_components/common/i2c/esp8266-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
4
tests/components/ch423/test.rp2040-ard.yaml
Normal file
4
tests/components/ch423/test.rp2040-ard.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
packages:
|
||||
i2c: !include ../../test_build_components/common/i2c/rp2040-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
@@ -8,6 +8,16 @@ esp32:
|
||||
enable_lwip_bridge_interface: true
|
||||
disable_libc_locks_in_iram: false # Test explicit opt-out of RAM optimization
|
||||
use_full_certificate_bundle: false # Test CMN bundle (default)
|
||||
include_builtin_idf_components:
|
||||
- freertos # Test escape hatch (freertos is always included anyway)
|
||||
disable_debug_stubs: true
|
||||
disable_ocd_aware: true
|
||||
disable_usb_serial_jtag_secondary: true
|
||||
disable_dev_null_vfs: true
|
||||
disable_mbedtls_peer_cert: true
|
||||
disable_mbedtls_pkcs7: true
|
||||
disable_regi2c_in_iram: true
|
||||
disable_fatfs: true
|
||||
|
||||
wifi:
|
||||
ssid: MySSID
|
||||
|
||||
@@ -10,6 +10,14 @@ esp32:
|
||||
ref: 2.7.0
|
||||
advanced:
|
||||
enable_idf_experimental_features: yes
|
||||
disable_debug_stubs: true
|
||||
disable_ocd_aware: true
|
||||
disable_usb_serial_jtag_secondary: true
|
||||
disable_dev_null_vfs: true
|
||||
disable_mbedtls_peer_cert: true
|
||||
disable_mbedtls_pkcs7: true
|
||||
disable_regi2c_in_iram: true
|
||||
disable_fatfs: true
|
||||
|
||||
ota:
|
||||
platform: esphome
|
||||
|
||||
@@ -5,6 +5,14 @@ esp32:
|
||||
advanced:
|
||||
execute_from_psram: true
|
||||
disable_libc_locks_in_iram: true # Test default RAM optimization enabled
|
||||
disable_debug_stubs: true
|
||||
disable_ocd_aware: true
|
||||
disable_usb_serial_jtag_secondary: true
|
||||
disable_dev_null_vfs: true
|
||||
disable_mbedtls_peer_cert: true
|
||||
disable_mbedtls_pkcs7: true
|
||||
disable_regi2c_in_iram: true
|
||||
disable_fatfs: true
|
||||
|
||||
psram:
|
||||
mode: octal
|
||||
|
||||
@@ -10,6 +10,7 @@ globals:
|
||||
type: int
|
||||
restore_value: true
|
||||
initial_value: "0"
|
||||
update_interval: 5s
|
||||
- id: glob_float
|
||||
type: float
|
||||
restore_value: true
|
||||
|
||||
@@ -4,15 +4,16 @@ interval:
|
||||
- interval: 60s
|
||||
then:
|
||||
- lambda: |-
|
||||
// Test build_json
|
||||
std::string json_str = esphome::json::build_json([](JsonObject root) {
|
||||
// Test build_json - returns SerializationBuffer, use auto to avoid heap allocation
|
||||
auto json_buf = esphome::json::build_json([](JsonObject root) {
|
||||
root["sensor"] = "temperature";
|
||||
root["value"] = 23.5;
|
||||
root["unit"] = "°C";
|
||||
});
|
||||
ESP_LOGD("test", "Built JSON: %s", json_str.c_str());
|
||||
ESP_LOGD("test", "Built JSON: %s", json_buf.c_str());
|
||||
|
||||
// Test parse_json
|
||||
// Test parse_json - implicit conversion to std::string for backward compatibility
|
||||
std::string json_str = json_buf;
|
||||
bool parse_ok = esphome::json::parse_json(json_str, [](JsonObject root) {
|
||||
if (root["sensor"].is<const char*>() && root["value"].is<float>()) {
|
||||
const char* sensor = root["sensor"];
|
||||
@@ -26,10 +27,10 @@ interval:
|
||||
});
|
||||
ESP_LOGD("test", "Parse result (JSON syntax only): %s", parse_ok ? "success" : "failed");
|
||||
|
||||
// Test JsonBuilder class
|
||||
// Test JsonBuilder class - returns SerializationBuffer
|
||||
esphome::json::JsonBuilder builder;
|
||||
JsonObject obj = builder.root();
|
||||
obj["test"] = "direct_builder";
|
||||
obj["count"] = 42;
|
||||
std::string result = builder.serialize();
|
||||
auto result = builder.serialize();
|
||||
ESP_LOGD("test", "JsonBuilder result: %s", result.c_str());
|
||||
|
||||
@@ -6,10 +6,6 @@ binary_sensor:
|
||||
name: "Garage Door Open 2"
|
||||
- platform: template
|
||||
name: "Garage Door Open 3"
|
||||
- platform: template
|
||||
name: "Garage Door Open 4"
|
||||
- platform: template
|
||||
name: "Garage Door Open 5"
|
||||
- platform: template
|
||||
name: "Garage Door Internal"
|
||||
internal: True
|
||||
@@ -21,6 +17,10 @@ sensor:
|
||||
- platform: template
|
||||
name: "Analog 2"
|
||||
lambda: return 11.0;
|
||||
- platform: template
|
||||
name: "Analog 3"
|
||||
lambda: return 12.0;
|
||||
internal: True
|
||||
|
||||
zigbee:
|
||||
wipe_on_boot: true
|
||||
@@ -35,7 +35,18 @@ output:
|
||||
write_action:
|
||||
- zigbee.factory_reset
|
||||
|
||||
time:
|
||||
- platform: zigbee
|
||||
|
||||
switch:
|
||||
- platform: template
|
||||
name: "Template Switch"
|
||||
optimistic: true
|
||||
|
||||
number:
|
||||
- platform: template
|
||||
name: "Template number"
|
||||
optimistic: true
|
||||
min_value: 2
|
||||
max_value: 100
|
||||
step: 1
|
||||
|
||||
@@ -3,3 +3,4 @@
|
||||
zigbee:
|
||||
wipe_on_boot: once
|
||||
power_source: battery
|
||||
ieee802154_vendor_oui: 0x231
|
||||
|
||||
58
tests/unit_tests/components/test_ch423.py
Normal file
58
tests/unit_tests/components/test_ch423.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Tests for ch423 component validation."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from esphome import config, yaml_util
|
||||
from esphome.core import CORE
|
||||
|
||||
|
||||
def test_ch423_mixed_gpio_modes_fails(tmp_path, capsys):
|
||||
"""Test that mixing input/output on GPIO pins 0-7 fails validation."""
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("""
|
||||
esphome:
|
||||
name: test
|
||||
|
||||
esp8266:
|
||||
board: esp01_1m
|
||||
|
||||
i2c:
|
||||
sda: GPIO4
|
||||
scl: GPIO5
|
||||
|
||||
ch423:
|
||||
- id: ch423_hub
|
||||
|
||||
binary_sensor:
|
||||
- platform: gpio
|
||||
name: "CH423 Input 0"
|
||||
pin:
|
||||
ch423: ch423_hub
|
||||
number: 0
|
||||
mode: input
|
||||
|
||||
switch:
|
||||
- platform: gpio
|
||||
name: "CH423 Output 1"
|
||||
pin:
|
||||
ch423: ch423_hub
|
||||
number: 1
|
||||
mode: output
|
||||
""")
|
||||
|
||||
parsed_yaml = yaml_util.load_yaml(test_file)
|
||||
|
||||
with (
|
||||
patch.object(yaml_util, "load_yaml", return_value=parsed_yaml),
|
||||
patch.object(CORE, "config_path", test_file),
|
||||
):
|
||||
result = config.read_config({})
|
||||
|
||||
assert result is None, "Expected validation to fail with mixed GPIO modes"
|
||||
|
||||
# Check that the error message mentions the GPIO pin restriction
|
||||
captured = capsys.readouterr()
|
||||
assert (
|
||||
"GPIO pins (0-7) must all be configured as input or all as output"
|
||||
in captured.out
|
||||
)
|
||||
Reference in New Issue
Block a user