mirror of
https://github.com/esphome/esphome.git
synced 2026-02-01 01:12:08 -07:00
Compare commits
4 Commits
task_prio
...
chunked_fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e67898073 | ||
|
|
0c868cbcc5 | ||
|
|
e8ea90cb13 | ||
|
|
3744186c3d |
@@ -1 +1 @@
|
||||
069fa9526c52f7c580a9ec17c7678d12f142221387e9b561c18f95394d4629a3
|
||||
cf3d341206b4184ec8b7fe85141aef4fe4696aa720c3f8a06d4e57930574bdab
|
||||
|
||||
2
.github/actions/restore-python/action.yml
vendored
2
.github/actions/restore-python/action.yml
vendored
@@ -22,7 +22,7 @@ runs:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
|
||||
38
.github/scripts/auto-label-pr/constants.js
vendored
38
.github/scripts/auto-label-pr/constants.js
vendored
@@ -1,38 +0,0 @@
|
||||
// Constants and markers for PR auto-labeling
|
||||
module.exports = {
|
||||
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
|
||||
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
|
||||
TOO_BIG_MARKER: '<!-- too-big-request -->',
|
||||
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
|
||||
|
||||
MANAGED_LABELS: [
|
||||
'new-component',
|
||||
'new-platform',
|
||||
'new-target-platform',
|
||||
'merging-to-release',
|
||||
'merging-to-beta',
|
||||
'chained-pr',
|
||||
'core',
|
||||
'small-pr',
|
||||
'dashboard',
|
||||
'github-actions',
|
||||
'by-code-owner',
|
||||
'has-tests',
|
||||
'needs-tests',
|
||||
'needs-docs',
|
||||
'needs-codeowners',
|
||||
'too-big',
|
||||
'labeller-recheck',
|
||||
'bugfix',
|
||||
'new-feature',
|
||||
'breaking-change',
|
||||
'developer-breaking-change',
|
||||
'code-quality',
|
||||
'deprecated-component'
|
||||
],
|
||||
|
||||
DOCS_PR_PATTERNS: [
|
||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||
/esphome\/esphome-docs#\d+/
|
||||
]
|
||||
};
|
||||
373
.github/scripts/auto-label-pr/detectors.js
vendored
373
.github/scripts/auto-label-pr/detectors.js
vendored
@@ -1,373 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const { DOCS_PR_PATTERNS } = require('./constants');
|
||||
|
||||
// Strategy: Merge branch detection
|
||||
async function detectMergeBranch(context) {
|
||||
const labels = new Set();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
if (baseRef === 'release') {
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
} else if (baseRef !== 'dev') {
|
||||
labels.add('chained-pr');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Component and platform labeling
|
||||
async function detectComponentPlatforms(changedFiles, apiData) {
|
||||
const labels = new Set();
|
||||
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
||||
|
||||
for (const file of changedFiles) {
|
||||
const componentMatch = file.match(componentRegex);
|
||||
if (componentMatch) {
|
||||
labels.add(`component: ${componentMatch[1]}`);
|
||||
}
|
||||
|
||||
const platformMatch = file.match(targetPlatformRegex);
|
||||
if (platformMatch) {
|
||||
labels.add(`platform: ${platformMatch[1]}`);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New component detection
|
||||
async function detectNewComponents(prFiles) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
||||
if (componentMatch) {
|
||||
try {
|
||||
const content = fs.readFileSync(file, 'utf8');
|
||||
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
||||
labels.add('new-target-platform');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Failed to read content of ${file}:`, error.message);
|
||||
}
|
||||
labels.add('new-component');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New platform detection
|
||||
async function detectNewPlatforms(prFiles, apiData) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
||||
if (platformFileMatch) {
|
||||
const [, component, platform] = platformFileMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
|
||||
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
||||
if (platformDirMatch) {
|
||||
const [, component, platform] = platformDirMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Core files detection
|
||||
async function detectCoreChanges(changedFiles) {
|
||||
const labels = new Set();
|
||||
const coreFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/core/') ||
|
||||
(file.startsWith('esphome/') && file.split('/').length === 2)
|
||||
);
|
||||
|
||||
if (coreFiles.length > 0) {
|
||||
labels.add('core');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR size detection
|
||||
async function detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD) {
|
||||
const labels = new Set();
|
||||
|
||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||
labels.add('small-pr');
|
||||
return labels;
|
||||
}
|
||||
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
// Don't add too-big if mega-pr label is already present
|
||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||
labels.add('too-big');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Dashboard changes
|
||||
async function detectDashboardChanges(changedFiles) {
|
||||
const labels = new Set();
|
||||
const dashboardFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/dashboard/') ||
|
||||
file.startsWith('esphome/components/dashboard_import/')
|
||||
);
|
||||
|
||||
if (dashboardFiles.length > 0) {
|
||||
labels.add('dashboard');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: GitHub Actions changes
|
||||
async function detectGitHubActionsChanges(changedFiles) {
|
||||
const labels = new Set();
|
||||
const githubActionsFiles = changedFiles.filter(file =>
|
||||
file.startsWith('.github/workflows/')
|
||||
);
|
||||
|
||||
if (githubActionsFiles.length > 0) {
|
||||
labels.add('github-actions');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Code owner detection
|
||||
async function detectCodeOwner(github, context, changedFiles) {
|
||||
const labels = new Set();
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
try {
|
||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: 'CODEOWNERS',
|
||||
});
|
||||
|
||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
const codeownersLines = codeownersContent.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && !line.startsWith('#'));
|
||||
|
||||
const codeownersRegexes = codeownersLines.map(line => {
|
||||
const parts = line.split(/\s+/);
|
||||
const pattern = parts[0];
|
||||
const owners = parts.slice(1);
|
||||
|
||||
let regex;
|
||||
if (pattern.endsWith('*')) {
|
||||
const dir = pattern.slice(0, -1);
|
||||
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
||||
} else if (pattern.includes('*')) {
|
||||
// First escape all regex special chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
regex = new RegExp(`^${regexPattern}$`);
|
||||
} else {
|
||||
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
||||
}
|
||||
|
||||
return { regex, owners };
|
||||
});
|
||||
|
||||
for (const file of changedFiles) {
|
||||
for (const { regex, owners } of codeownersRegexes) {
|
||||
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
||||
labels.add('by-code-owner');
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Test detection
|
||||
async function detectTests(changedFiles) {
|
||||
const labels = new Set();
|
||||
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
labels.add('has-tests');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR Template Checkbox detection
|
||||
async function detectPRTemplateCheckboxes(context) {
|
||||
const labels = new Set();
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
|
||||
console.log('Checking PR template checkboxes...');
|
||||
|
||||
// Check for checked checkboxes in the "Types of changes" section
|
||||
const checkboxPatterns = [
|
||||
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
||||
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
||||
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
||||
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
||||
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
||||
];
|
||||
|
||||
for (const { pattern, label } of checkboxPatterns) {
|
||||
if (pattern.test(prBody)) {
|
||||
console.log(`Found checked checkbox for: ${label}`);
|
||||
labels.add(label);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Deprecated component detection
|
||||
async function detectDeprecatedComponents(github, context, changedFiles) {
|
||||
const labels = new Set();
|
||||
const deprecatedInfo = [];
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
// Compile regex once for better performance
|
||||
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
|
||||
// Get files that are modified or added in components directory
|
||||
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
|
||||
|
||||
if (componentFiles.length === 0) {
|
||||
return { labels, deprecatedInfo };
|
||||
}
|
||||
|
||||
// Extract unique component names using the same regex
|
||||
const components = new Set();
|
||||
for (const file of componentFiles) {
|
||||
const match = file.match(componentFileRegex);
|
||||
if (match) {
|
||||
components.add(match[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// Get PR head to fetch files from the PR branch
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
|
||||
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
|
||||
for (const component of components) {
|
||||
const initFile = `esphome/components/${component}/__init__.py`;
|
||||
try {
|
||||
// Fetch file content from PR head using GitHub API
|
||||
const { data: fileData } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: initFile,
|
||||
ref: `refs/pull/${prNumber}/head`
|
||||
});
|
||||
|
||||
// Decode base64 content
|
||||
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
|
||||
|
||||
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
|
||||
// Support single quotes, double quotes, and triple quotes (for multiline)
|
||||
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
|
||||
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
|
||||
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
|
||||
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
|
||||
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
|
||||
|
||||
if (deprecatedMatch) {
|
||||
labels.add('deprecated-component');
|
||||
deprecatedInfo.push({
|
||||
component: component,
|
||||
message: deprecatedMatch[1].trim()
|
||||
});
|
||||
console.log(`Found deprecated component: ${component}`);
|
||||
}
|
||||
} catch (error) {
|
||||
// Only log if it's not a simple "file not found" error (404)
|
||||
if (error.status !== 404) {
|
||||
console.log(`Error reading ${initFile}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { labels, deprecatedInfo };
|
||||
}
|
||||
|
||||
// Strategy: Requirements detection
|
||||
async function detectRequirements(allLabels, prFiles, context) {
|
||||
const labels = new Set();
|
||||
|
||||
// Check for missing tests
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
||||
labels.add('needs-tests');
|
||||
}
|
||||
|
||||
// Check for missing docs
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||
|
||||
if (!hasDocsLink) {
|
||||
labels.add('needs-docs');
|
||||
}
|
||||
}
|
||||
|
||||
// Check for missing CODEOWNERS
|
||||
if (allLabels.has('new-component')) {
|
||||
const codeownersModified = prFiles.some(file =>
|
||||
file.filename === 'CODEOWNERS' &&
|
||||
(file.status === 'modified' || file.status === 'added') &&
|
||||
(file.additions || 0) > 0
|
||||
);
|
||||
|
||||
if (!codeownersModified) {
|
||||
labels.add('needs-codeowners');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
detectMergeBranch,
|
||||
detectComponentPlatforms,
|
||||
detectNewComponents,
|
||||
detectNewPlatforms,
|
||||
detectCoreChanges,
|
||||
detectPRSize,
|
||||
detectDashboardChanges,
|
||||
detectGitHubActionsChanges,
|
||||
detectCodeOwner,
|
||||
detectTests,
|
||||
detectPRTemplateCheckboxes,
|
||||
detectDeprecatedComponents,
|
||||
detectRequirements
|
||||
};
|
||||
187
.github/scripts/auto-label-pr/index.js
vendored
187
.github/scripts/auto-label-pr/index.js
vendored
@@ -1,187 +0,0 @@
|
||||
const { MANAGED_LABELS } = require('./constants');
|
||||
const {
|
||||
detectMergeBranch,
|
||||
detectComponentPlatforms,
|
||||
detectNewComponents,
|
||||
detectNewPlatforms,
|
||||
detectCoreChanges,
|
||||
detectPRSize,
|
||||
detectDashboardChanges,
|
||||
detectGitHubActionsChanges,
|
||||
detectCodeOwner,
|
||||
detectTests,
|
||||
detectPRTemplateCheckboxes,
|
||||
detectDeprecatedComponents,
|
||||
detectRequirements
|
||||
} = require('./detectors');
|
||||
const { handleReviews } = require('./reviews');
|
||||
const { applyLabels, removeOldLabels } = require('./labels');
|
||||
|
||||
// Fetch API data
|
||||
async function fetchApiData() {
|
||||
try {
|
||||
const response = await fetch('https://data.esphome.io/components.json');
|
||||
const componentsData = await response.json();
|
||||
return {
|
||||
targetPlatforms: componentsData.target_platforms || [],
|
||||
platformComponents: componentsData.platform_components || []
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch components data from API:', error.message);
|
||||
return { targetPlatforms: [], platformComponents: [] };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = async ({ github, context }) => {
|
||||
// Environment variables
|
||||
const SMALL_PR_THRESHOLD = parseInt(process.env.SMALL_PR_THRESHOLD);
|
||||
const MAX_LABELS = parseInt(process.env.MAX_LABELS);
|
||||
const TOO_BIG_THRESHOLD = parseInt(process.env.TOO_BIG_THRESHOLD);
|
||||
const COMPONENT_LABEL_THRESHOLD = parseInt(process.env.COMPONENT_LABEL_THRESHOLD);
|
||||
|
||||
// Global state
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
// Get current labels and PR data
|
||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number
|
||||
});
|
||||
const currentLabels = currentLabelsData.map(label => label.name);
|
||||
const managedLabels = currentLabels.filter(label =>
|
||||
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
||||
);
|
||||
|
||||
// Check for mega-PR early - if present, skip most automatic labeling
|
||||
const isMegaPR = currentLabels.includes('mega-pr');
|
||||
|
||||
// Get all PR files with automatic pagination
|
||||
const prFiles = await github.paginate(
|
||||
github.rest.pulls.listFiles,
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
}
|
||||
);
|
||||
|
||||
// Calculate data from PR files
|
||||
const changedFiles = prFiles.map(file => file.filename);
|
||||
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const totalChanges = totalAdditions + totalDeletions;
|
||||
|
||||
console.log('Current labels:', currentLabels.join(', '));
|
||||
console.log('Changed files:', changedFiles.length);
|
||||
console.log('Total changes:', totalChanges);
|
||||
if (isMegaPR) {
|
||||
console.log('Mega-PR detected - applying limited labeling logic');
|
||||
}
|
||||
|
||||
// Fetch API data
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for release and beta branches only
|
||||
if (baseRef === 'release' || baseRef === 'beta') {
|
||||
const branchLabels = await detectMergeBranch(context);
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||
|
||||
// Apply labels
|
||||
await applyLabels(github, context, finalLabels);
|
||||
|
||||
// Remove old managed labels
|
||||
await removeOldLabels(github, context, managedLabels, finalLabels);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Run all strategies
|
||||
const [
|
||||
branchLabels,
|
||||
componentLabels,
|
||||
newComponentLabels,
|
||||
newPlatformLabels,
|
||||
coreLabels,
|
||||
sizeLabels,
|
||||
dashboardLabels,
|
||||
actionsLabels,
|
||||
codeOwnerLabels,
|
||||
testLabels,
|
||||
checkboxLabels,
|
||||
deprecatedResult
|
||||
] = await Promise.all([
|
||||
detectMergeBranch(context),
|
||||
detectComponentPlatforms(changedFiles, apiData),
|
||||
detectNewComponents(prFiles),
|
||||
detectNewPlatforms(prFiles, apiData),
|
||||
detectCoreChanges(changedFiles),
|
||||
detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD),
|
||||
detectDashboardChanges(changedFiles),
|
||||
detectGitHubActionsChanges(changedFiles),
|
||||
detectCodeOwner(github, context, changedFiles),
|
||||
detectTests(changedFiles),
|
||||
detectPRTemplateCheckboxes(context),
|
||||
detectDeprecatedComponents(github, context, changedFiles)
|
||||
]);
|
||||
|
||||
// Extract deprecated component info
|
||||
const deprecatedLabels = deprecatedResult.labels;
|
||||
const deprecatedInfo = deprecatedResult.deprecatedInfo;
|
||||
|
||||
// Combine all labels
|
||||
const allLabels = new Set([
|
||||
...branchLabels,
|
||||
...componentLabels,
|
||||
...newComponentLabels,
|
||||
...newPlatformLabels,
|
||||
...coreLabels,
|
||||
...sizeLabels,
|
||||
...dashboardLabels,
|
||||
...actionsLabels,
|
||||
...codeOwnerLabels,
|
||||
...testLabels,
|
||||
...checkboxLabels,
|
||||
...deprecatedLabels
|
||||
]);
|
||||
|
||||
// Detect requirements based on all other labels
|
||||
const requirementLabels = await detectRequirements(allLabels, prFiles, context);
|
||||
for (const label of requirementLabels) {
|
||||
allLabels.add(label);
|
||||
}
|
||||
|
||||
let finalLabels = Array.from(allLabels);
|
||||
|
||||
// For mega-PRs, exclude component labels if there are too many
|
||||
if (isMegaPR) {
|
||||
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
||||
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
||||
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
||||
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const originalLabelCount = finalLabels.length;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
}
|
||||
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
|
||||
|
||||
// Apply labels
|
||||
await applyLabels(github, context, finalLabels);
|
||||
|
||||
// Remove old managed labels
|
||||
await removeOldLabels(github, context, managedLabels, finalLabels);
|
||||
};
|
||||
41
.github/scripts/auto-label-pr/labels.js
vendored
41
.github/scripts/auto-label-pr/labels.js
vendored
@@ -1,41 +0,0 @@
|
||||
// Apply labels to PR
|
||||
async function applyLabels(github, context, finalLabels) {
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
if (finalLabels.length > 0) {
|
||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
async function removeOldLabels(github, context, managedLabels, finalLabels) {
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
console.log(`Removing label: ${label}`);
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
applyLabels,
|
||||
removeOldLabels
|
||||
};
|
||||
141
.github/scripts/auto-label-pr/reviews.js
vendored
141
.github/scripts/auto-label-pr/reviews.js
vendored
@@ -1,141 +0,0 @@
|
||||
const {
|
||||
BOT_COMMENT_MARKER,
|
||||
CODEOWNERS_MARKER,
|
||||
TOO_BIG_MARKER,
|
||||
DEPRECATED_COMPONENT_MARKER
|
||||
} = require('./constants');
|
||||
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
|
||||
const messages = [];
|
||||
|
||||
// Deprecated component message
|
||||
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
|
||||
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
|
||||
message += `Hey there @${prAuthor},\n`;
|
||||
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
|
||||
|
||||
for (const info of deprecatedInfo) {
|
||||
message += `#### Component: \`${info.component}\`\n`;
|
||||
message += `${info.message}\n\n`;
|
||||
}
|
||||
|
||||
message += `Consider migrating to the recommended alternative if applicable.`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// Too big message
|
||||
if (finalLabels.includes('too-big')) {
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
|
||||
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
||||
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// CODEOWNERS message
|
||||
if (finalLabels.includes('needs-codeowners')) {
|
||||
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
||||
`Hey there @${prAuthor},\n` +
|
||||
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
||||
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
||||
`In \`__init__.py\` of the integration, please add:\n\n` +
|
||||
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
||||
`And run \`script/build_codeowners.py\``;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
|
||||
);
|
||||
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
|
||||
const botReviews = reviews.filter(review =>
|
||||
review.user.type === 'Bot' &&
|
||||
review.state === 'CHANGES_REQUESTED' &&
|
||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||
);
|
||||
|
||||
if (hasReviewableLabels) {
|
||||
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
||||
|
||||
if (botReviews.length > 0) {
|
||||
// Update existing review
|
||||
await github.rest.pulls.updateReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: botReviews[0].id,
|
||||
body: reviewBody
|
||||
});
|
||||
console.log('Updated existing bot review');
|
||||
} else {
|
||||
// Create new review
|
||||
await github.rest.pulls.createReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
body: reviewBody,
|
||||
event: 'REQUEST_CHANGES'
|
||||
});
|
||||
console.log('Created new bot review');
|
||||
}
|
||||
} else if (botReviews.length > 0) {
|
||||
// Dismiss existing reviews
|
||||
for (const review of botReviews) {
|
||||
try {
|
||||
await github.rest.pulls.dismissReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: review.id,
|
||||
message: 'Review dismissed: All requirements have been met'
|
||||
});
|
||||
console.log(`Dismissed bot review ${review.id}`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
handleReviews
|
||||
};
|
||||
632
.github/workflows/auto-label-pr.yml
vendored
632
.github/workflows/auto-label-pr.yml
vendored
@@ -36,5 +36,633 @@ jobs:
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const script = require('./.github/scripts/auto-label-pr/index.js');
|
||||
await script({ github, context });
|
||||
const fs = require('fs');
|
||||
|
||||
// Constants
|
||||
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
|
||||
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
|
||||
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
|
||||
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
|
||||
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
|
||||
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
|
||||
const TOO_BIG_MARKER = '<!-- too-big-request -->';
|
||||
|
||||
const MANAGED_LABELS = [
|
||||
'new-component',
|
||||
'new-platform',
|
||||
'new-target-platform',
|
||||
'merging-to-release',
|
||||
'merging-to-beta',
|
||||
'chained-pr',
|
||||
'core',
|
||||
'small-pr',
|
||||
'dashboard',
|
||||
'github-actions',
|
||||
'by-code-owner',
|
||||
'has-tests',
|
||||
'needs-tests',
|
||||
'needs-docs',
|
||||
'needs-codeowners',
|
||||
'too-big',
|
||||
'labeller-recheck',
|
||||
'bugfix',
|
||||
'new-feature',
|
||||
'breaking-change',
|
||||
'developer-breaking-change',
|
||||
'code-quality'
|
||||
];
|
||||
|
||||
const DOCS_PR_PATTERNS = [
|
||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||
/esphome\/esphome-docs#\d+/
|
||||
];
|
||||
|
||||
// Global state
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
// Get current labels and PR data
|
||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number
|
||||
});
|
||||
const currentLabels = currentLabelsData.map(label => label.name);
|
||||
const managedLabels = currentLabels.filter(label =>
|
||||
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
||||
);
|
||||
|
||||
// Check for mega-PR early - if present, skip most automatic labeling
|
||||
const isMegaPR = currentLabels.includes('mega-pr');
|
||||
|
||||
// Get all PR files with automatic pagination
|
||||
const prFiles = await github.paginate(
|
||||
github.rest.pulls.listFiles,
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
}
|
||||
);
|
||||
|
||||
// Calculate data from PR files
|
||||
const changedFiles = prFiles.map(file => file.filename);
|
||||
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const totalChanges = totalAdditions + totalDeletions;
|
||||
|
||||
console.log('Current labels:', currentLabels.join(', '));
|
||||
console.log('Changed files:', changedFiles.length);
|
||||
console.log('Total changes:', totalChanges);
|
||||
if (isMegaPR) {
|
||||
console.log('Mega-PR detected - applying limited labeling logic');
|
||||
}
|
||||
|
||||
// Fetch API data
|
||||
async function fetchApiData() {
|
||||
try {
|
||||
const response = await fetch('https://data.esphome.io/components.json');
|
||||
const componentsData = await response.json();
|
||||
return {
|
||||
targetPlatforms: componentsData.target_platforms || [],
|
||||
platformComponents: componentsData.platform_components || []
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch components data from API:', error.message);
|
||||
return { targetPlatforms: [], platformComponents: [] };
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy: Merge branch detection
|
||||
async function detectMergeBranch() {
|
||||
const labels = new Set();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
if (baseRef === 'release') {
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
} else if (baseRef !== 'dev') {
|
||||
labels.add('chained-pr');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Component and platform labeling
|
||||
async function detectComponentPlatforms(apiData) {
|
||||
const labels = new Set();
|
||||
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
||||
|
||||
for (const file of changedFiles) {
|
||||
const componentMatch = file.match(componentRegex);
|
||||
if (componentMatch) {
|
||||
labels.add(`component: ${componentMatch[1]}`);
|
||||
}
|
||||
|
||||
const platformMatch = file.match(targetPlatformRegex);
|
||||
if (platformMatch) {
|
||||
labels.add(`platform: ${platformMatch[1]}`);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New component detection
|
||||
async function detectNewComponents() {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
||||
if (componentMatch) {
|
||||
try {
|
||||
const content = fs.readFileSync(file, 'utf8');
|
||||
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
||||
labels.add('new-target-platform');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Failed to read content of ${file}:`, error.message);
|
||||
}
|
||||
labels.add('new-component');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New platform detection
|
||||
async function detectNewPlatforms(apiData) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
||||
if (platformFileMatch) {
|
||||
const [, component, platform] = platformFileMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
|
||||
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
||||
if (platformDirMatch) {
|
||||
const [, component, platform] = platformDirMatch;
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Core files detection
|
||||
async function detectCoreChanges() {
|
||||
const labels = new Set();
|
||||
const coreFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/core/') ||
|
||||
(file.startsWith('esphome/') && file.split('/').length === 2)
|
||||
);
|
||||
|
||||
if (coreFiles.length > 0) {
|
||||
labels.add('core');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR size detection
|
||||
async function detectPRSize() {
|
||||
const labels = new Set();
|
||||
|
||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||
labels.add('small-pr');
|
||||
return labels;
|
||||
}
|
||||
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
// Don't add too-big if mega-pr label is already present
|
||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||
labels.add('too-big');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Dashboard changes
|
||||
async function detectDashboardChanges() {
|
||||
const labels = new Set();
|
||||
const dashboardFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/dashboard/') ||
|
||||
file.startsWith('esphome/components/dashboard_import/')
|
||||
);
|
||||
|
||||
if (dashboardFiles.length > 0) {
|
||||
labels.add('dashboard');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: GitHub Actions changes
|
||||
async function detectGitHubActionsChanges() {
|
||||
const labels = new Set();
|
||||
const githubActionsFiles = changedFiles.filter(file =>
|
||||
file.startsWith('.github/workflows/')
|
||||
);
|
||||
|
||||
if (githubActionsFiles.length > 0) {
|
||||
labels.add('github-actions');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Code owner detection
|
||||
async function detectCodeOwner() {
|
||||
const labels = new Set();
|
||||
|
||||
try {
|
||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
path: 'CODEOWNERS',
|
||||
});
|
||||
|
||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
const codeownersLines = codeownersContent.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && !line.startsWith('#'));
|
||||
|
||||
const codeownersRegexes = codeownersLines.map(line => {
|
||||
const parts = line.split(/\s+/);
|
||||
const pattern = parts[0];
|
||||
const owners = parts.slice(1);
|
||||
|
||||
let regex;
|
||||
if (pattern.endsWith('*')) {
|
||||
const dir = pattern.slice(0, -1);
|
||||
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
||||
} else if (pattern.includes('*')) {
|
||||
// First escape all regex special chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
regex = new RegExp(`^${regexPattern}$`);
|
||||
} else {
|
||||
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
||||
}
|
||||
|
||||
return { regex, owners };
|
||||
});
|
||||
|
||||
for (const file of changedFiles) {
|
||||
for (const { regex, owners } of codeownersRegexes) {
|
||||
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
||||
labels.add('by-code-owner');
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Test detection
|
||||
async function detectTests() {
|
||||
const labels = new Set();
|
||||
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
labels.add('has-tests');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR Template Checkbox detection
|
||||
async function detectPRTemplateCheckboxes() {
|
||||
const labels = new Set();
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
|
||||
console.log('Checking PR template checkboxes...');
|
||||
|
||||
// Check for checked checkboxes in the "Types of changes" section
|
||||
const checkboxPatterns = [
|
||||
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
||||
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
||||
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
||||
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
||||
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
||||
];
|
||||
|
||||
for (const { pattern, label } of checkboxPatterns) {
|
||||
if (pattern.test(prBody)) {
|
||||
console.log(`Found checked checkbox for: ${label}`);
|
||||
labels.add(label);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Requirements detection
|
||||
async function detectRequirements(allLabels) {
|
||||
const labels = new Set();
|
||||
|
||||
// Check for missing tests
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
||||
labels.add('needs-tests');
|
||||
}
|
||||
|
||||
// Check for missing docs
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||
|
||||
if (!hasDocsLink) {
|
||||
labels.add('needs-docs');
|
||||
}
|
||||
}
|
||||
|
||||
// Check for missing CODEOWNERS
|
||||
if (allLabels.has('new-component')) {
|
||||
const codeownersModified = prFiles.some(file =>
|
||||
file.filename === 'CODEOWNERS' &&
|
||||
(file.status === 'modified' || file.status === 'added') &&
|
||||
(file.additions || 0) > 0
|
||||
);
|
||||
|
||||
if (!codeownersModified) {
|
||||
labels.add('needs-codeowners');
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels, originalLabelCount) {
|
||||
const messages = [];
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
// Too big message
|
||||
if (finalLabels.includes('too-big')) {
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
|
||||
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
||||
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// CODEOWNERS message
|
||||
if (finalLabels.includes('needs-codeowners')) {
|
||||
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
||||
`Hey there @${prAuthor},\n` +
|
||||
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
||||
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
||||
`In \`__init__.py\` of the integration, please add:\n\n` +
|
||||
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
||||
`And run \`script/build_codeowners.py\``;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(finalLabels, originalLabelCount) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners'].includes(label)
|
||||
);
|
||||
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
|
||||
const botReviews = reviews.filter(review =>
|
||||
review.user.type === 'Bot' &&
|
||||
review.state === 'CHANGES_REQUESTED' &&
|
||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||
);
|
||||
|
||||
if (hasReviewableLabels) {
|
||||
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
||||
|
||||
if (botReviews.length > 0) {
|
||||
// Update existing review
|
||||
await github.rest.pulls.updateReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: botReviews[0].id,
|
||||
body: reviewBody
|
||||
});
|
||||
console.log('Updated existing bot review');
|
||||
} else {
|
||||
// Create new review
|
||||
await github.rest.pulls.createReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
body: reviewBody,
|
||||
event: 'REQUEST_CHANGES'
|
||||
});
|
||||
console.log('Created new bot review');
|
||||
}
|
||||
} else if (botReviews.length > 0) {
|
||||
// Dismiss existing reviews
|
||||
for (const review of botReviews) {
|
||||
try {
|
||||
await github.rest.pulls.dismissReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: review.id,
|
||||
message: 'Review dismissed: All requirements have been met'
|
||||
});
|
||||
console.log(`Dismissed bot review ${review.id}`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Main execution
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for release and beta branches only
|
||||
if (baseRef === 'release' || baseRef === 'beta') {
|
||||
const branchLabels = await detectMergeBranch();
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Run all strategies
|
||||
const [
|
||||
branchLabels,
|
||||
componentLabels,
|
||||
newComponentLabels,
|
||||
newPlatformLabels,
|
||||
coreLabels,
|
||||
sizeLabels,
|
||||
dashboardLabels,
|
||||
actionsLabels,
|
||||
codeOwnerLabels,
|
||||
testLabels,
|
||||
checkboxLabels
|
||||
] = await Promise.all([
|
||||
detectMergeBranch(),
|
||||
detectComponentPlatforms(apiData),
|
||||
detectNewComponents(),
|
||||
detectNewPlatforms(apiData),
|
||||
detectCoreChanges(),
|
||||
detectPRSize(),
|
||||
detectDashboardChanges(),
|
||||
detectGitHubActionsChanges(),
|
||||
detectCodeOwner(),
|
||||
detectTests(),
|
||||
detectPRTemplateCheckboxes()
|
||||
]);
|
||||
|
||||
// Combine all labels
|
||||
const allLabels = new Set([
|
||||
...branchLabels,
|
||||
...componentLabels,
|
||||
...newComponentLabels,
|
||||
...newPlatformLabels,
|
||||
...coreLabels,
|
||||
...sizeLabels,
|
||||
...dashboardLabels,
|
||||
...actionsLabels,
|
||||
...codeOwnerLabels,
|
||||
...testLabels,
|
||||
...checkboxLabels
|
||||
]);
|
||||
|
||||
// Detect requirements based on all other labels
|
||||
const requirementLabels = await detectRequirements(allLabels);
|
||||
for (const label of requirementLabels) {
|
||||
allLabels.add(label);
|
||||
}
|
||||
|
||||
let finalLabels = Array.from(allLabels);
|
||||
|
||||
// For mega-PRs, exclude component labels if there are too many
|
||||
if (isMegaPR) {
|
||||
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
||||
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
||||
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
||||
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const originalLabelCount = finalLabels.length;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
}
|
||||
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(finalLabels, originalLabelCount);
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
console.log(`Removing label: ${label}`);
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
@@ -157,7 +157,7 @@ jobs:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Save Python virtual environment cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -193,7 +193,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Restore components graph cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: .temp/components_graph.json
|
||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
|
||||
- name: Save components graph cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: .temp/components_graph.json
|
||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||
@@ -245,7 +245,7 @@ jobs:
|
||||
python-version: "3.13"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -334,14 +334,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
@@ -413,14 +413,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
@@ -502,14 +502,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
@@ -735,7 +735,7 @@ jobs:
|
||||
- name: Restore cached memory analysis
|
||||
id: cache-memory-analysis
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
@@ -759,7 +759,7 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
@@ -800,7 +800,7 @@ jobs:
|
||||
|
||||
- name: Save memory analysis to cache
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
@@ -847,7 +847,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Cache platformio
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -102,12 +102,12 @@ jobs:
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -182,13 +182,13 @@ jobs:
|
||||
|
||||
- name: Log in to docker hub
|
||||
if: matrix.registry == 'dockerhub'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
if: matrix.registry == 'ghcr'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
@@ -104,7 +104,6 @@ esphome/components/cc1101/* @gabest11 @lygris
|
||||
esphome/components/ccs811/* @habbie
|
||||
esphome/components/cd74hc4067/* @asoehlke
|
||||
esphome/components/ch422g/* @clydebarrow @jesterret
|
||||
esphome/components/ch423/* @dwmw2
|
||||
esphome/components/chsc6x/* @kkosik20
|
||||
esphome/components/climate/* @esphome/core
|
||||
esphome/components/climate_ir/* @glmnet
|
||||
@@ -134,7 +133,6 @@ esphome/components/dfplayer/* @glmnet
|
||||
esphome/components/dfrobot_sen0395/* @niklasweber
|
||||
esphome/components/dht/* @OttoWinter
|
||||
esphome/components/display_menu_base/* @numo68
|
||||
esphome/components/dlms_meter/* @SimonFischer04
|
||||
esphome/components/dps310/* @kbx81
|
||||
esphome/components/ds1307/* @badbadc0ffee
|
||||
esphome/components/ds2484/* @mrk-its
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor, voltage_sampler
|
||||
from esphome.components.esp32 import get_esp32_variant, include_builtin_idf_component
|
||||
from esphome.components.esp32 import get_esp32_variant
|
||||
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
|
||||
from esphome.components.zephyr import (
|
||||
zephyr_add_overlay,
|
||||
@@ -118,9 +118,6 @@ async def to_code(config):
|
||||
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
|
||||
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's ADC driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_adc")
|
||||
|
||||
if attenuation := config.get(CONF_ATTENUATION):
|
||||
if attenuation == "auto":
|
||||
cg.add(var.set_autorange(cg.global_ns.true))
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp32 import add_idf_component, include_builtin_idf_component
|
||||
from esphome.components.esp32 import add_idf_component
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
|
||||
import esphome.final_validate as fv
|
||||
@@ -166,9 +166,6 @@ def final_validate_audio_schema(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_http_client")
|
||||
|
||||
add_idf_component(
|
||||
name="esphome/esp-audio-libs",
|
||||
ref="2.0.3",
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c
|
||||
from esphome.components.i2c import I2CBus
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_I2C_ID,
|
||||
CONF_ID,
|
||||
CONF_INPUT,
|
||||
CONF_INVERTED,
|
||||
CONF_MODE,
|
||||
CONF_NUMBER,
|
||||
CONF_OPEN_DRAIN,
|
||||
CONF_OUTPUT,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
|
||||
CODEOWNERS = ["@dwmw2"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
MULTI_CONF = True
|
||||
ch423_ns = cg.esphome_ns.namespace("ch423")
|
||||
|
||||
CH423Component = ch423_ns.class_("CH423Component", cg.Component, i2c.I2CDevice)
|
||||
CH423GPIOPin = ch423_ns.class_(
|
||||
"CH423GPIOPin", cg.GPIOPin, cg.Parented.template(CH423Component)
|
||||
)
|
||||
|
||||
CONF_CH423 = "ch423"
|
||||
|
||||
# Note that no address is configurable - each register in the CH423 has a dedicated i2c address
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_ID): cv.declare_id(CH423Component),
|
||||
cv.GenerateID(CONF_I2C_ID): cv.use_id(I2CBus),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
# Can't use register_i2c_device because there is no CONF_ADDRESS
|
||||
parent = await cg.get_variable(config[CONF_I2C_ID])
|
||||
cg.add(var.set_i2c_bus(parent))
|
||||
|
||||
|
||||
# This is used as a final validation step so that modes have been fully transformed.
|
||||
def pin_mode_check(pin_config, _):
|
||||
if pin_config[CONF_MODE][CONF_INPUT] and pin_config[CONF_NUMBER] >= 8:
|
||||
raise cv.Invalid("CH423 only supports input on pins 0-7")
|
||||
if pin_config[CONF_MODE][CONF_OPEN_DRAIN] and pin_config[CONF_NUMBER] < 8:
|
||||
raise cv.Invalid("CH423 only supports open drain output on pins 8-23")
|
||||
|
||||
ch423_id = pin_config[CONF_CH423]
|
||||
pin_num = pin_config[CONF_NUMBER]
|
||||
is_output = pin_config[CONF_MODE][CONF_OUTPUT]
|
||||
is_open_drain = pin_config[CONF_MODE][CONF_OPEN_DRAIN]
|
||||
|
||||
# Track pin modes per CH423 instance in CORE.data
|
||||
ch423_modes = CORE.data.setdefault(CONF_CH423, {})
|
||||
if ch423_id not in ch423_modes:
|
||||
ch423_modes[ch423_id] = {"gpio_output": None, "gpo_open_drain": None}
|
||||
|
||||
if pin_num < 8:
|
||||
# GPIO pins (0-7): all must have same direction
|
||||
if ch423_modes[ch423_id]["gpio_output"] is None:
|
||||
ch423_modes[ch423_id]["gpio_output"] = is_output
|
||||
elif ch423_modes[ch423_id]["gpio_output"] != is_output:
|
||||
raise cv.Invalid(
|
||||
"CH423 GPIO pins (0-7) must all be configured as input or all as output"
|
||||
)
|
||||
# GPO pins (8-23): all must have same open-drain setting
|
||||
elif ch423_modes[ch423_id]["gpo_open_drain"] is None:
|
||||
ch423_modes[ch423_id]["gpo_open_drain"] = is_open_drain
|
||||
elif ch423_modes[ch423_id]["gpo_open_drain"] != is_open_drain:
|
||||
raise cv.Invalid(
|
||||
"CH423 GPO pins (8-23) must all be configured as push-pull or all as open-drain"
|
||||
)
|
||||
|
||||
|
||||
CH423_PIN_SCHEMA = pins.gpio_base_schema(
|
||||
CH423GPIOPin,
|
||||
cv.int_range(min=0, max=23),
|
||||
modes=[CONF_INPUT, CONF_OUTPUT, CONF_OPEN_DRAIN],
|
||||
).extend(
|
||||
{
|
||||
cv.Required(CONF_CH423): cv.use_id(CH423Component),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pins.PIN_SCHEMA_REGISTRY.register(CONF_CH423, CH423_PIN_SCHEMA, pin_mode_check)
|
||||
async def ch423_pin_to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
parent = await cg.get_variable(config[CONF_CH423])
|
||||
|
||||
cg.add(var.set_parent(parent))
|
||||
|
||||
num = config[CONF_NUMBER]
|
||||
cg.add(var.set_pin(num))
|
||||
cg.add(var.set_inverted(config[CONF_INVERTED]))
|
||||
cg.add(var.set_flags(pins.gpio_flags_expr(config[CONF_MODE])))
|
||||
return var
|
||||
@@ -1,148 +0,0 @@
|
||||
#include "ch423.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/progmem.h"
|
||||
|
||||
namespace esphome::ch423 {
|
||||
|
||||
static constexpr uint8_t CH423_REG_SYS = 0x24; // Set system parameters (0x48 >> 1)
|
||||
static constexpr uint8_t CH423_SYS_IO_OE = 0x01; // IO output enable
|
||||
static constexpr uint8_t CH423_SYS_OD_EN = 0x04; // Open drain enable for OC pins
|
||||
static constexpr uint8_t CH423_REG_IO = 0x30; // Write/read IO7-IO0 (0x60 >> 1)
|
||||
static constexpr uint8_t CH423_REG_IO_RD = 0x26; // Read IO7-IO0 (0x4D >> 1, rounded down)
|
||||
static constexpr uint8_t CH423_REG_OCL = 0x22; // Write OC7-OC0 (0x44 >> 1)
|
||||
static constexpr uint8_t CH423_REG_OCH = 0x23; // Write OC15-OC8 (0x46 >> 1)
|
||||
|
||||
static const char *const TAG = "ch423";
|
||||
|
||||
void CH423Component::setup() {
|
||||
// set outputs before mode
|
||||
this->write_outputs_();
|
||||
// Set system parameters and check for errors
|
||||
bool success = this->write_reg_(CH423_REG_SYS, this->sys_params_);
|
||||
// Only read inputs if pins are configured for input (IO_OE not set)
|
||||
if (success && !(this->sys_params_ & CH423_SYS_IO_OE)) {
|
||||
success = this->read_inputs_();
|
||||
}
|
||||
if (!success) {
|
||||
ESP_LOGE(TAG, "CH423 not detected");
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
|
||||
ESP_LOGCONFIG(TAG, "Initialization complete. Warning: %d, Error: %d", this->status_has_warning(),
|
||||
this->status_has_error());
|
||||
}
|
||||
|
||||
void CH423Component::loop() {
|
||||
// Clear all the previously read flags.
|
||||
this->pin_read_flags_ = 0x00;
|
||||
}
|
||||
|
||||
void CH423Component::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "CH423:");
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
|
||||
}
|
||||
}
|
||||
|
||||
void CH423Component::pin_mode(uint8_t pin, gpio::Flags flags) {
|
||||
if (pin < 8) {
|
||||
if (flags & gpio::FLAG_OUTPUT) {
|
||||
this->sys_params_ |= CH423_SYS_IO_OE;
|
||||
}
|
||||
} else if (pin >= 8 && pin < 24) {
|
||||
if (flags & gpio::FLAG_OPEN_DRAIN) {
|
||||
this->sys_params_ |= CH423_SYS_OD_EN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool CH423Component::digital_read(uint8_t pin) {
|
||||
if (this->pin_read_flags_ == 0 || this->pin_read_flags_ & (1 << pin)) {
|
||||
// Read values on first access or in case it's being read again in the same loop
|
||||
this->read_inputs_();
|
||||
}
|
||||
|
||||
this->pin_read_flags_ |= (1 << pin);
|
||||
return (this->input_bits_ & (1 << pin)) != 0;
|
||||
}
|
||||
|
||||
void CH423Component::digital_write(uint8_t pin, bool value) {
|
||||
if (value) {
|
||||
this->output_bits_ |= (1 << pin);
|
||||
} else {
|
||||
this->output_bits_ &= ~(1 << pin);
|
||||
}
|
||||
this->write_outputs_();
|
||||
}
|
||||
|
||||
bool CH423Component::read_inputs_() {
|
||||
if (this->is_failed()) {
|
||||
return false;
|
||||
}
|
||||
// reading inputs requires IO_OE to be 0
|
||||
if (this->sys_params_ & CH423_SYS_IO_OE) {
|
||||
return false;
|
||||
}
|
||||
uint8_t result = this->read_reg_(CH423_REG_IO_RD);
|
||||
this->input_bits_ = result;
|
||||
this->status_clear_warning();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Write a register. Can't use the standard write_byte() method because there is no single pre-configured i2c address.
|
||||
bool CH423Component::write_reg_(uint8_t reg, uint8_t value) {
|
||||
auto err = this->bus_->write_readv(reg, &value, 1, nullptr, 0);
|
||||
if (err != i2c::ERROR_OK) {
|
||||
char buf[64];
|
||||
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("write failed for register 0x%X, error %d"), reg, err);
|
||||
this->status_set_warning(buf);
|
||||
return false;
|
||||
}
|
||||
this->status_clear_warning();
|
||||
return true;
|
||||
}
|
||||
|
||||
uint8_t CH423Component::read_reg_(uint8_t reg) {
|
||||
uint8_t value;
|
||||
auto err = this->bus_->write_readv(reg, nullptr, 0, &value, 1);
|
||||
if (err != i2c::ERROR_OK) {
|
||||
char buf[64];
|
||||
ESPHOME_snprintf_P(buf, sizeof(buf), ESPHOME_PSTR("read failed for register 0x%X, error %d"), reg, err);
|
||||
this->status_set_warning(buf);
|
||||
return 0;
|
||||
}
|
||||
this->status_clear_warning();
|
||||
return value;
|
||||
}
|
||||
|
||||
bool CH423Component::write_outputs_() {
|
||||
bool success = true;
|
||||
// Write IO7-IO0
|
||||
success &= this->write_reg_(CH423_REG_IO, static_cast<uint8_t>(this->output_bits_));
|
||||
// Write OC7-OC0
|
||||
success &= this->write_reg_(CH423_REG_OCL, static_cast<uint8_t>(this->output_bits_ >> 8));
|
||||
// Write OC15-OC8
|
||||
success &= this->write_reg_(CH423_REG_OCH, static_cast<uint8_t>(this->output_bits_ >> 16));
|
||||
return success;
|
||||
}
|
||||
|
||||
float CH423Component::get_setup_priority() const { return setup_priority::IO; }
|
||||
|
||||
// Run our loop() method very early in the loop, so that we cache read values
|
||||
// before other components call our digital_read() method.
|
||||
float CH423Component::get_loop_priority() const { return 9.0f; } // Just after WIFI
|
||||
|
||||
void CH423GPIOPin::pin_mode(gpio::Flags flags) { this->parent_->pin_mode(this->pin_, flags); }
|
||||
bool CH423GPIOPin::digital_read() { return this->parent_->digital_read(this->pin_) ^ this->inverted_; }
|
||||
|
||||
void CH423GPIOPin::digital_write(bool value) { this->parent_->digital_write(this->pin_, value ^ this->inverted_); }
|
||||
size_t CH423GPIOPin::dump_summary(char *buffer, size_t len) const {
|
||||
return snprintf(buffer, len, "EXIO%u via CH423", this->pin_);
|
||||
}
|
||||
void CH423GPIOPin::set_flags(gpio::Flags flags) {
|
||||
flags_ = flags;
|
||||
this->parent_->pin_mode(this->pin_, flags);
|
||||
}
|
||||
|
||||
} // namespace esphome::ch423
|
||||
@@ -1,67 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome::ch423 {
|
||||
|
||||
class CH423Component : public Component, public i2c::I2CDevice {
|
||||
public:
|
||||
CH423Component() = default;
|
||||
|
||||
/// Check i2c availability and setup masks
|
||||
void setup() override;
|
||||
/// Poll for input changes periodically
|
||||
void loop() override;
|
||||
/// Helper function to read the value of a pin.
|
||||
bool digital_read(uint8_t pin);
|
||||
/// Helper function to write the value of a pin.
|
||||
void digital_write(uint8_t pin, bool value);
|
||||
/// Helper function to set the pin mode of a pin.
|
||||
void pin_mode(uint8_t pin, gpio::Flags flags);
|
||||
|
||||
float get_setup_priority() const override;
|
||||
float get_loop_priority() const override;
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
bool write_reg_(uint8_t reg, uint8_t value);
|
||||
uint8_t read_reg_(uint8_t reg);
|
||||
bool read_inputs_();
|
||||
bool write_outputs_();
|
||||
|
||||
/// The mask to write as output state - 1 means HIGH, 0 means LOW
|
||||
uint32_t output_bits_{0x00};
|
||||
/// Flags to check if read previously during this loop
|
||||
uint8_t pin_read_flags_{0x00};
|
||||
/// Copy of last read values
|
||||
uint8_t input_bits_{0x00};
|
||||
/// System parameters
|
||||
uint8_t sys_params_{0x00};
|
||||
};
|
||||
|
||||
/// Helper class to expose a CH423 pin as a GPIO pin.
|
||||
class CH423GPIOPin : public GPIOPin {
|
||||
public:
|
||||
void setup() override{};
|
||||
void pin_mode(gpio::Flags flags) override;
|
||||
bool digital_read() override;
|
||||
void digital_write(bool value) override;
|
||||
size_t dump_summary(char *buffer, size_t len) const override;
|
||||
|
||||
void set_parent(CH423Component *parent) { parent_ = parent; }
|
||||
void set_pin(uint8_t pin) { pin_ = pin; }
|
||||
void set_inverted(bool inverted) { inverted_ = inverted; }
|
||||
void set_flags(gpio::Flags flags);
|
||||
|
||||
gpio::Flags get_flags() const override { return this->flags_; }
|
||||
|
||||
protected:
|
||||
CH423Component *parent_{};
|
||||
uint8_t pin_{};
|
||||
bool inverted_{};
|
||||
gpio::Flags flags_{};
|
||||
};
|
||||
|
||||
} // namespace esphome::ch423
|
||||
@@ -15,7 +15,7 @@ from esphome.const import (
|
||||
CONF_UPDATE_INTERVAL,
|
||||
SCHEDULER_DONT_RUN,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CoroPriority, coroutine_with_priority
|
||||
|
||||
IS_PLATFORM_COMPONENT = True
|
||||
|
||||
@@ -222,8 +222,3 @@ async def display_is_displaying_page_to_code(config, condition_id, template_arg,
|
||||
async def to_code(config):
|
||||
cg.add_global(display_ns.using)
|
||||
cg.add_define("USE_DISPLAY")
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's LCD driver (excluded by default to save compile time)
|
||||
from esphome.components.esp32 import include_builtin_idf_component
|
||||
|
||||
include_builtin_idf_component("esp_lcd")
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import uart
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID, PLATFORM_ESP32, PLATFORM_ESP8266
|
||||
|
||||
CODEOWNERS = ["@SimonFischer04"]
|
||||
DEPENDENCIES = ["uart"]
|
||||
|
||||
CONF_DLMS_METER_ID = "dlms_meter_id"
|
||||
CONF_DECRYPTION_KEY = "decryption_key"
|
||||
CONF_PROVIDER = "provider"
|
||||
|
||||
PROVIDERS = {"generic": 0, "netznoe": 1}
|
||||
|
||||
dlms_meter_component_ns = cg.esphome_ns.namespace("dlms_meter")
|
||||
DlmsMeterComponent = dlms_meter_component_ns.class_(
|
||||
"DlmsMeterComponent", cg.Component, uart.UARTDevice
|
||||
)
|
||||
|
||||
|
||||
def validate_key(value):
|
||||
value = cv.string_strict(value)
|
||||
if len(value) != 32:
|
||||
raise cv.Invalid("Decryption key must be 32 hex characters (16 bytes)")
|
||||
try:
|
||||
return [int(value[i : i + 2], 16) for i in range(0, 32, 2)]
|
||||
except ValueError as exc:
|
||||
raise cv.Invalid("Decryption key must be hex values from 00 to FF") from exc
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(DlmsMeterComponent),
|
||||
cv.Required(CONF_DECRYPTION_KEY): validate_key,
|
||||
cv.Optional(CONF_PROVIDER, default="generic"): cv.enum(
|
||||
PROVIDERS, lower=True
|
||||
),
|
||||
}
|
||||
)
|
||||
.extend(uart.UART_DEVICE_SCHEMA)
|
||||
.extend(cv.COMPONENT_SCHEMA),
|
||||
cv.only_on([PLATFORM_ESP8266, PLATFORM_ESP32]),
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema(
|
||||
"dlms_meter", baud_rate=2400, require_rx=True
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
await uart.register_uart_device(var, config)
|
||||
key = ", ".join(str(b) for b in config[CONF_DECRYPTION_KEY])
|
||||
cg.add(var.set_decryption_key(cg.RawExpression(f"{{{key}}}")))
|
||||
cg.add(var.set_provider(PROVIDERS[config[CONF_PROVIDER]]))
|
||||
@@ -1,71 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
namespace esphome::dlms_meter {
|
||||
|
||||
/*
|
||||
+-------------------------------+
|
||||
| Ciphering Service |
|
||||
+-------------------------------+
|
||||
| System Title Length |
|
||||
+-------------------------------+
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
| System |
|
||||
| Title |
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
+-------------------------------+
|
||||
| Length | (1 or 3 Bytes)
|
||||
+-------------------------------+
|
||||
| Security Control Byte |
|
||||
+-------------------------------+
|
||||
| |
|
||||
| Frame |
|
||||
| Counter |
|
||||
| |
|
||||
+-------------------------------+
|
||||
| |
|
||||
~ ~
|
||||
Encrypted Payload
|
||||
~ ~
|
||||
| |
|
||||
+-------------------------------+
|
||||
|
||||
Ciphering Service: 0xDB (General-Glo-Ciphering)
|
||||
System Title Length: 0x08
|
||||
System Title: Unique ID of meter
|
||||
Length: 1 Byte=Length <= 127, 3 Bytes=Length > 127 (0x82 & 2 Bytes length)
|
||||
Security Control Byte:
|
||||
- Bit 3…0: Security_Suite_Id
|
||||
- Bit 4: "A" subfield: indicates that authentication is applied
|
||||
- Bit 5: "E" subfield: indicates that encryption is applied
|
||||
- Bit 6: Key_Set subfield: 0 = Unicast, 1 = Broadcast
|
||||
- Bit 7: Indicates the use of compression.
|
||||
*/
|
||||
|
||||
static constexpr uint8_t DLMS_HEADER_LENGTH = 16;
|
||||
static constexpr uint8_t DLMS_HEADER_EXT_OFFSET = 2; // Extra offset for extended length header
|
||||
static constexpr uint8_t DLMS_CIPHER_OFFSET = 0;
|
||||
static constexpr uint8_t DLMS_SYST_OFFSET = 1;
|
||||
static constexpr uint8_t DLMS_LENGTH_OFFSET = 10;
|
||||
static constexpr uint8_t TWO_BYTE_LENGTH = 0x82;
|
||||
static constexpr uint8_t DLMS_LENGTH_CORRECTION = 5; // Header bytes included in length field
|
||||
static constexpr uint8_t DLMS_SECBYTE_OFFSET = 11;
|
||||
static constexpr uint8_t DLMS_FRAMECOUNTER_OFFSET = 12;
|
||||
static constexpr uint8_t DLMS_FRAMECOUNTER_LENGTH = 4;
|
||||
static constexpr uint8_t DLMS_PAYLOAD_OFFSET = 16;
|
||||
static constexpr uint8_t GLO_CIPHERING = 0xDB;
|
||||
static constexpr uint8_t DATA_NOTIFICATION = 0x0F;
|
||||
static constexpr uint8_t TIMESTAMP_DATETIME = 0x0C;
|
||||
static constexpr uint16_t MAX_MESSAGE_LENGTH = 512; // Maximum size of message (when having 2 bytes length in header).
|
||||
|
||||
// Provider specific quirks
|
||||
static constexpr uint8_t NETZ_NOE_MAGIC_BYTE = 0x81; // Magic length byte used by Netz NOE
|
||||
static constexpr uint8_t NETZ_NOE_EXPECTED_MESSAGE_LENGTH = 0xF8;
|
||||
static constexpr uint8_t NETZ_NOE_EXPECTED_SECURITY_CONTROL_BYTE = 0x20;
|
||||
|
||||
} // namespace esphome::dlms_meter
|
||||
@@ -1,468 +0,0 @@
|
||||
#include "dlms_meter.h"
|
||||
|
||||
#include <cmath>
|
||||
|
||||
#if defined(USE_ESP8266_FRAMEWORK_ARDUINO)
|
||||
#include <bearssl/bearssl.h>
|
||||
#elif defined(USE_ESP32)
|
||||
#include "mbedtls/esp_config.h"
|
||||
#include "mbedtls/gcm.h"
|
||||
#endif
|
||||
|
||||
namespace esphome::dlms_meter {
|
||||
|
||||
static constexpr const char *TAG = "dlms_meter";
|
||||
|
||||
void DlmsMeterComponent::dump_config() {
|
||||
const char *provider_name = this->provider_ == PROVIDER_NETZNOE ? "Netz NOE" : "Generic";
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"DLMS Meter:\n"
|
||||
" Provider: %s\n"
|
||||
" Read Timeout: %u ms",
|
||||
provider_name, this->read_timeout_);
|
||||
#define DLMS_METER_LOG_SENSOR(s) LOG_SENSOR(" ", #s, this->s##_sensor_);
|
||||
DLMS_METER_SENSOR_LIST(DLMS_METER_LOG_SENSOR, )
|
||||
#define DLMS_METER_LOG_TEXT_SENSOR(s) LOG_TEXT_SENSOR(" ", #s, this->s##_text_sensor_);
|
||||
DLMS_METER_TEXT_SENSOR_LIST(DLMS_METER_LOG_TEXT_SENSOR, )
|
||||
}
|
||||
|
||||
void DlmsMeterComponent::loop() {
|
||||
// Read while data is available, netznoe uses two frames so allow 2x max frame length
|
||||
while (this->available()) {
|
||||
if (this->receive_buffer_.size() >= MBUS_MAX_FRAME_LENGTH * 2) {
|
||||
ESP_LOGW(TAG, "Receive buffer full, dropping remaining bytes");
|
||||
break;
|
||||
}
|
||||
uint8_t c;
|
||||
this->read_byte(&c);
|
||||
this->receive_buffer_.push_back(c);
|
||||
this->last_read_ = millis();
|
||||
}
|
||||
|
||||
if (!this->receive_buffer_.empty() && millis() - this->last_read_ > this->read_timeout_) {
|
||||
this->mbus_payload_.clear();
|
||||
if (!this->parse_mbus_(this->mbus_payload_))
|
||||
return;
|
||||
|
||||
uint16_t message_length;
|
||||
uint8_t systitle_length;
|
||||
uint16_t header_offset;
|
||||
if (!this->parse_dlms_(this->mbus_payload_, message_length, systitle_length, header_offset))
|
||||
return;
|
||||
|
||||
if (message_length < DECODER_START_OFFSET || message_length > MAX_MESSAGE_LENGTH) {
|
||||
ESP_LOGE(TAG, "DLMS: Message length invalid: %u", message_length);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
// Decrypt in place and then decode the OBIS codes
|
||||
if (!this->decrypt_(this->mbus_payload_, message_length, systitle_length, header_offset))
|
||||
return;
|
||||
this->decode_obis_(&this->mbus_payload_[header_offset + DLMS_PAYLOAD_OFFSET], message_length);
|
||||
}
|
||||
}
|
||||
|
||||
bool DlmsMeterComponent::parse_mbus_(std::vector<uint8_t> &mbus_payload) {
|
||||
ESP_LOGV(TAG, "Parsing M-Bus frames");
|
||||
uint16_t frame_offset = 0; // Offset is used if the M-Bus message is split into multiple frames
|
||||
|
||||
while (frame_offset < this->receive_buffer_.size()) {
|
||||
// Ensure enough bytes remain for the minimal intro header before accessing indices
|
||||
if (this->receive_buffer_.size() - frame_offset < MBUS_HEADER_INTRO_LENGTH) {
|
||||
ESP_LOGE(TAG, "MBUS: Not enough data for frame header (need %d, have %d)", MBUS_HEADER_INTRO_LENGTH,
|
||||
(this->receive_buffer_.size() - frame_offset));
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check start bytes
|
||||
if (this->receive_buffer_[frame_offset + MBUS_START1_OFFSET] != START_BYTE_LONG_FRAME ||
|
||||
this->receive_buffer_[frame_offset + MBUS_START2_OFFSET] != START_BYTE_LONG_FRAME) {
|
||||
ESP_LOGE(TAG, "MBUS: Start bytes do not match");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Both length bytes must be identical
|
||||
if (this->receive_buffer_[frame_offset + MBUS_LENGTH1_OFFSET] !=
|
||||
this->receive_buffer_[frame_offset + MBUS_LENGTH2_OFFSET]) {
|
||||
ESP_LOGE(TAG, "MBUS: Length bytes do not match");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
uint8_t frame_length = this->receive_buffer_[frame_offset + MBUS_LENGTH1_OFFSET]; // Get length of this frame
|
||||
|
||||
// Check if received data is enough for the given frame length
|
||||
if (this->receive_buffer_.size() - frame_offset <
|
||||
frame_length + 3) { // length field inside packet does not account for second start- + checksum- + stop- byte
|
||||
ESP_LOGE(TAG, "MBUS: Frame too big for received data");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Ensure we have full frame (header + payload + checksum + stop byte) before accessing stop byte
|
||||
size_t required_total =
|
||||
frame_length + MBUS_HEADER_INTRO_LENGTH + MBUS_FOOTER_LENGTH; // payload + header + 2 footer bytes
|
||||
if (this->receive_buffer_.size() - frame_offset < required_total) {
|
||||
ESP_LOGE(TAG, "MBUS: Incomplete frame (need %d, have %d)", (unsigned int) required_total,
|
||||
this->receive_buffer_.size() - frame_offset);
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this->receive_buffer_[frame_offset + frame_length + MBUS_HEADER_INTRO_LENGTH + MBUS_FOOTER_LENGTH - 1] !=
|
||||
STOP_BYTE) {
|
||||
ESP_LOGE(TAG, "MBUS: Invalid stop byte");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify checksum: sum of all bytes starting at MBUS_HEADER_INTRO_LENGTH, take last byte
|
||||
uint8_t checksum = 0; // use uint8_t so only the 8 least significant bits are stored
|
||||
for (uint16_t i = 0; i < frame_length; i++) {
|
||||
checksum += this->receive_buffer_[frame_offset + MBUS_HEADER_INTRO_LENGTH + i];
|
||||
}
|
||||
if (checksum != this->receive_buffer_[frame_offset + frame_length + MBUS_HEADER_INTRO_LENGTH]) {
|
||||
ESP_LOGE(TAG, "MBUS: Invalid checksum: %x != %x", checksum,
|
||||
this->receive_buffer_[frame_offset + frame_length + MBUS_HEADER_INTRO_LENGTH]);
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
mbus_payload.insert(mbus_payload.end(), &this->receive_buffer_[frame_offset + MBUS_FULL_HEADER_LENGTH],
|
||||
&this->receive_buffer_[frame_offset + MBUS_HEADER_INTRO_LENGTH + frame_length]);
|
||||
|
||||
frame_offset += MBUS_HEADER_INTRO_LENGTH + frame_length + MBUS_FOOTER_LENGTH;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool DlmsMeterComponent::parse_dlms_(const std::vector<uint8_t> &mbus_payload, uint16_t &message_length,
|
||||
uint8_t &systitle_length, uint16_t &header_offset) {
|
||||
ESP_LOGV(TAG, "Parsing DLMS header");
|
||||
if (mbus_payload.size() < DLMS_HEADER_LENGTH + DLMS_HEADER_EXT_OFFSET) {
|
||||
ESP_LOGE(TAG, "DLMS: Payload too short");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (mbus_payload[DLMS_CIPHER_OFFSET] != GLO_CIPHERING) { // Only general-glo-ciphering is supported (0xDB)
|
||||
ESP_LOGE(TAG, "DLMS: Unsupported cipher");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
systitle_length = mbus_payload[DLMS_SYST_OFFSET];
|
||||
|
||||
if (systitle_length != 0x08) { // Only system titles with length of 8 are supported
|
||||
ESP_LOGE(TAG, "DLMS: Unsupported system title length");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
message_length = mbus_payload[DLMS_LENGTH_OFFSET];
|
||||
header_offset = 0;
|
||||
|
||||
if (this->provider_ == PROVIDER_NETZNOE) {
|
||||
// for some reason EVN seems to set the standard "length" field to 0x81 and then the actual length is in the next
|
||||
// byte. Check some bytes to see if received data still matches expectation
|
||||
if (message_length == NETZ_NOE_MAGIC_BYTE &&
|
||||
mbus_payload[DLMS_LENGTH_OFFSET + 1] == NETZ_NOE_EXPECTED_MESSAGE_LENGTH &&
|
||||
mbus_payload[DLMS_LENGTH_OFFSET + 2] == NETZ_NOE_EXPECTED_SECURITY_CONTROL_BYTE) {
|
||||
message_length = mbus_payload[DLMS_LENGTH_OFFSET + 1];
|
||||
header_offset = 1;
|
||||
} else {
|
||||
ESP_LOGE(TAG, "Wrong Length - Security Control Byte sequence detected for provider EVN");
|
||||
}
|
||||
} else {
|
||||
if (message_length == TWO_BYTE_LENGTH) {
|
||||
message_length = encode_uint16(mbus_payload[DLMS_LENGTH_OFFSET + 1], mbus_payload[DLMS_LENGTH_OFFSET + 2]);
|
||||
header_offset = DLMS_HEADER_EXT_OFFSET;
|
||||
}
|
||||
}
|
||||
if (message_length < DLMS_LENGTH_CORRECTION) {
|
||||
ESP_LOGE(TAG, "DLMS: Message length too short: %u", message_length);
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
message_length -= DLMS_LENGTH_CORRECTION; // Correct message length due to part of header being included in length
|
||||
|
||||
if (mbus_payload.size() - DLMS_HEADER_LENGTH - header_offset != message_length) {
|
||||
ESP_LOGV(TAG, "DLMS: Length mismatch - payload=%d, header=%d, offset=%d, message=%d", mbus_payload.size(),
|
||||
DLMS_HEADER_LENGTH, header_offset, message_length);
|
||||
ESP_LOGE(TAG, "DLMS: Message has invalid length");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (mbus_payload[header_offset + DLMS_SECBYTE_OFFSET] != 0x21 &&
|
||||
mbus_payload[header_offset + DLMS_SECBYTE_OFFSET] !=
|
||||
0x20) { // Only certain security suite is supported (0x21 || 0x20)
|
||||
ESP_LOGE(TAG, "DLMS: Unsupported security control byte");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool DlmsMeterComponent::decrypt_(std::vector<uint8_t> &mbus_payload, uint16_t message_length, uint8_t systitle_length,
|
||||
uint16_t header_offset) {
|
||||
ESP_LOGV(TAG, "Decrypting payload");
|
||||
uint8_t iv[12]; // Reserve space for the IV, always 12 bytes
|
||||
// Copy system title to IV (System title is before length; no header offset needed!)
|
||||
// Add 1 to the offset in order to skip the system title length byte
|
||||
memcpy(&iv[0], &mbus_payload[DLMS_SYST_OFFSET + 1], systitle_length);
|
||||
memcpy(&iv[8], &mbus_payload[header_offset + DLMS_FRAMECOUNTER_OFFSET],
|
||||
DLMS_FRAMECOUNTER_LENGTH); // Copy frame counter to IV
|
||||
|
||||
uint8_t *payload_ptr = &mbus_payload[header_offset + DLMS_PAYLOAD_OFFSET];
|
||||
|
||||
#if defined(USE_ESP8266_FRAMEWORK_ARDUINO)
|
||||
br_gcm_context gcm_ctx;
|
||||
br_aes_ct_ctr_keys bc;
|
||||
br_aes_ct_ctr_init(&bc, this->decryption_key_.data(), this->decryption_key_.size());
|
||||
br_gcm_init(&gcm_ctx, &bc.vtable, br_ghash_ctmul32);
|
||||
br_gcm_reset(&gcm_ctx, iv, sizeof(iv));
|
||||
br_gcm_flip(&gcm_ctx);
|
||||
br_gcm_run(&gcm_ctx, 0, payload_ptr, message_length);
|
||||
#elif defined(USE_ESP32)
|
||||
size_t outlen = 0;
|
||||
mbedtls_gcm_context gcm_ctx;
|
||||
mbedtls_gcm_init(&gcm_ctx);
|
||||
mbedtls_gcm_setkey(&gcm_ctx, MBEDTLS_CIPHER_ID_AES, this->decryption_key_.data(), this->decryption_key_.size() * 8);
|
||||
mbedtls_gcm_starts(&gcm_ctx, MBEDTLS_GCM_DECRYPT, iv, sizeof(iv));
|
||||
auto ret = mbedtls_gcm_update(&gcm_ctx, payload_ptr, message_length, payload_ptr, message_length, &outlen);
|
||||
mbedtls_gcm_free(&gcm_ctx);
|
||||
if (ret != 0) {
|
||||
ESP_LOGE(TAG, "Decryption failed with error: %d", ret);
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
#error "Invalid Platform"
|
||||
#endif
|
||||
|
||||
if (payload_ptr[0] != DATA_NOTIFICATION || payload_ptr[5] != TIMESTAMP_DATETIME) {
|
||||
ESP_LOGE(TAG, "OBIS: Packet was decrypted but data is invalid");
|
||||
this->receive_buffer_.clear();
|
||||
return false;
|
||||
}
|
||||
ESP_LOGV(TAG, "Decrypted payload: %d bytes", message_length);
|
||||
return true;
|
||||
}
|
||||
|
||||
void DlmsMeterComponent::decode_obis_(uint8_t *plaintext, uint16_t message_length) {
|
||||
ESP_LOGV(TAG, "Decoding payload");
|
||||
MeterData data{};
|
||||
uint16_t current_position = DECODER_START_OFFSET;
|
||||
bool power_factor_found = false;
|
||||
|
||||
while (current_position + OBIS_CODE_OFFSET <= message_length) {
|
||||
if (plaintext[current_position + OBIS_TYPE_OFFSET] != DataType::OCTET_STRING) {
|
||||
ESP_LOGE(TAG, "OBIS: Unsupported OBIS header type: %x", plaintext[current_position + OBIS_TYPE_OFFSET]);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
uint8_t obis_code_length = plaintext[current_position + OBIS_LENGTH_OFFSET];
|
||||
if (obis_code_length != OBIS_CODE_LENGTH_STANDARD && obis_code_length != OBIS_CODE_LENGTH_EXTENDED) {
|
||||
ESP_LOGE(TAG, "OBIS: Unsupported OBIS header length: %x", obis_code_length);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
if (current_position + OBIS_CODE_OFFSET + obis_code_length > message_length) {
|
||||
ESP_LOGE(TAG, "OBIS: Buffer too short for OBIS code");
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
uint8_t *obis_code = &plaintext[current_position + OBIS_CODE_OFFSET];
|
||||
uint8_t obis_medium = obis_code[OBIS_A];
|
||||
uint16_t obis_cd = encode_uint16(obis_code[OBIS_C], obis_code[OBIS_D]);
|
||||
|
||||
bool timestamp_found = false;
|
||||
bool meter_number_found = false;
|
||||
if (this->provider_ == PROVIDER_NETZNOE) {
|
||||
// Do not advance Position when reading the Timestamp at DECODER_START_OFFSET
|
||||
if ((obis_code_length == OBIS_CODE_LENGTH_EXTENDED) && (current_position == DECODER_START_OFFSET)) {
|
||||
timestamp_found = true;
|
||||
} else if (power_factor_found) {
|
||||
meter_number_found = true;
|
||||
power_factor_found = false;
|
||||
} else {
|
||||
current_position += obis_code_length + OBIS_CODE_OFFSET; // Advance past code and position
|
||||
}
|
||||
} else {
|
||||
current_position += obis_code_length + OBIS_CODE_OFFSET; // Advance past code, position and type
|
||||
}
|
||||
if (!timestamp_found && !meter_number_found && obis_medium != Medium::ELECTRICITY &&
|
||||
obis_medium != Medium::ABSTRACT) {
|
||||
ESP_LOGE(TAG, "OBIS: Unsupported OBIS medium: %x", obis_medium);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
if (current_position >= message_length) {
|
||||
ESP_LOGE(TAG, "OBIS: Buffer too short for data type");
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
float value = 0.0f;
|
||||
uint8_t value_size = 0;
|
||||
uint8_t data_type = plaintext[current_position];
|
||||
current_position++;
|
||||
|
||||
switch (data_type) {
|
||||
case DataType::DOUBLE_LONG_UNSIGNED: {
|
||||
value_size = 4;
|
||||
if (current_position + value_size > message_length) {
|
||||
ESP_LOGE(TAG, "OBIS: Buffer too short for DOUBLE_LONG_UNSIGNED");
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
value = encode_uint32(plaintext[current_position + 0], plaintext[current_position + 1],
|
||||
plaintext[current_position + 2], plaintext[current_position + 3]);
|
||||
current_position += value_size;
|
||||
break;
|
||||
}
|
||||
case DataType::LONG_UNSIGNED: {
|
||||
value_size = 2;
|
||||
if (current_position + value_size > message_length) {
|
||||
ESP_LOGE(TAG, "OBIS: Buffer too short for LONG_UNSIGNED");
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
value = encode_uint16(plaintext[current_position + 0], plaintext[current_position + 1]);
|
||||
current_position += value_size;
|
||||
break;
|
||||
}
|
||||
case DataType::OCTET_STRING: {
|
||||
uint8_t data_length = plaintext[current_position];
|
||||
current_position++; // Advance past string length
|
||||
if (current_position + data_length > message_length) {
|
||||
ESP_LOGE(TAG, "OBIS: Buffer too short for OCTET_STRING");
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
// Handle timestamp (normal OBIS code or NETZNOE special case)
|
||||
if (obis_cd == OBIS_TIMESTAMP || timestamp_found) {
|
||||
if (data_length < 8) {
|
||||
ESP_LOGE(TAG, "OBIS: Timestamp data too short: %u", data_length);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
uint16_t year = encode_uint16(plaintext[current_position + 0], plaintext[current_position + 1]);
|
||||
uint8_t month = plaintext[current_position + 2];
|
||||
uint8_t day = plaintext[current_position + 3];
|
||||
uint8_t hour = plaintext[current_position + 5];
|
||||
uint8_t minute = plaintext[current_position + 6];
|
||||
uint8_t second = plaintext[current_position + 7];
|
||||
if (year > 9999 || month > 12 || day > 31 || hour > 23 || minute > 59 || second > 59) {
|
||||
ESP_LOGE(TAG, "Invalid timestamp values: %04u-%02u-%02uT%02u:%02u:%02uZ", year, month, day, hour, minute,
|
||||
second);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
snprintf(data.timestamp, sizeof(data.timestamp), "%04u-%02u-%02uT%02u:%02u:%02uZ", year, month, day, hour,
|
||||
minute, second);
|
||||
} else if (meter_number_found) {
|
||||
snprintf(data.meternumber, sizeof(data.meternumber), "%.*s", data_length, &plaintext[current_position]);
|
||||
}
|
||||
current_position += data_length;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
ESP_LOGE(TAG, "OBIS: Unsupported OBIS data type: %x", data_type);
|
||||
this->receive_buffer_.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip break after data
|
||||
if (this->provider_ == PROVIDER_NETZNOE) {
|
||||
// Don't skip the break on the first timestamp, as there's none
|
||||
if (!timestamp_found) {
|
||||
current_position += 2;
|
||||
}
|
||||
} else {
|
||||
current_position += 2;
|
||||
}
|
||||
|
||||
// Check for additional data (scaler-unit structure)
|
||||
if (current_position < message_length && plaintext[current_position] == DataType::INTEGER) {
|
||||
// Apply scaler: real_value = raw_value × 10^scaler
|
||||
if (current_position + 1 < message_length) {
|
||||
int8_t scaler = static_cast<int8_t>(plaintext[current_position + 1]);
|
||||
if (scaler != 0) {
|
||||
value *= powf(10.0f, scaler);
|
||||
}
|
||||
}
|
||||
|
||||
// on EVN Meters there is no additional break
|
||||
if (this->provider_ == PROVIDER_NETZNOE) {
|
||||
current_position += 4;
|
||||
} else {
|
||||
current_position += 6;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle numeric values (LONG_UNSIGNED and DOUBLE_LONG_UNSIGNED)
|
||||
if (value_size > 0) {
|
||||
switch (obis_cd) {
|
||||
case OBIS_VOLTAGE_L1:
|
||||
data.voltage_l1 = value;
|
||||
break;
|
||||
case OBIS_VOLTAGE_L2:
|
||||
data.voltage_l2 = value;
|
||||
break;
|
||||
case OBIS_VOLTAGE_L3:
|
||||
data.voltage_l3 = value;
|
||||
break;
|
||||
case OBIS_CURRENT_L1:
|
||||
data.current_l1 = value;
|
||||
break;
|
||||
case OBIS_CURRENT_L2:
|
||||
data.current_l2 = value;
|
||||
break;
|
||||
case OBIS_CURRENT_L3:
|
||||
data.current_l3 = value;
|
||||
break;
|
||||
case OBIS_ACTIVE_POWER_PLUS:
|
||||
data.active_power_plus = value;
|
||||
break;
|
||||
case OBIS_ACTIVE_POWER_MINUS:
|
||||
data.active_power_minus = value;
|
||||
break;
|
||||
case OBIS_ACTIVE_ENERGY_PLUS:
|
||||
data.active_energy_plus = value;
|
||||
break;
|
||||
case OBIS_ACTIVE_ENERGY_MINUS:
|
||||
data.active_energy_minus = value;
|
||||
break;
|
||||
case OBIS_REACTIVE_ENERGY_PLUS:
|
||||
data.reactive_energy_plus = value;
|
||||
break;
|
||||
case OBIS_REACTIVE_ENERGY_MINUS:
|
||||
data.reactive_energy_minus = value;
|
||||
break;
|
||||
case OBIS_POWER_FACTOR:
|
||||
data.power_factor = value;
|
||||
power_factor_found = true;
|
||||
break;
|
||||
default:
|
||||
ESP_LOGW(TAG, "Unsupported OBIS code 0x%04X", obis_cd);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this->receive_buffer_.clear();
|
||||
|
||||
ESP_LOGI(TAG, "Received valid data");
|
||||
this->publish_sensors(data);
|
||||
this->status_clear_warning();
|
||||
}
|
||||
|
||||
} // namespace esphome::dlms_meter
|
||||
@@ -1,96 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/log.h"
|
||||
#ifdef USE_SENSOR
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#endif
|
||||
#ifdef USE_TEXT_SENSOR
|
||||
#include "esphome/components/text_sensor/text_sensor.h"
|
||||
#endif
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
#include "mbus.h"
|
||||
#include "dlms.h"
|
||||
#include "obis.h"
|
||||
|
||||
#include <array>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome::dlms_meter {
|
||||
|
||||
#ifndef DLMS_METER_SENSOR_LIST
|
||||
#define DLMS_METER_SENSOR_LIST(F, SEP)
|
||||
#endif
|
||||
|
||||
#ifndef DLMS_METER_TEXT_SENSOR_LIST
|
||||
#define DLMS_METER_TEXT_SENSOR_LIST(F, SEP)
|
||||
#endif
|
||||
|
||||
struct MeterData {
|
||||
float voltage_l1 = 0.0f; // Voltage L1
|
||||
float voltage_l2 = 0.0f; // Voltage L2
|
||||
float voltage_l3 = 0.0f; // Voltage L3
|
||||
float current_l1 = 0.0f; // Current L1
|
||||
float current_l2 = 0.0f; // Current L2
|
||||
float current_l3 = 0.0f; // Current L3
|
||||
float active_power_plus = 0.0f; // Active power taken from grid
|
||||
float active_power_minus = 0.0f; // Active power put into grid
|
||||
float active_energy_plus = 0.0f; // Active energy taken from grid
|
||||
float active_energy_minus = 0.0f; // Active energy put into grid
|
||||
float reactive_energy_plus = 0.0f; // Reactive energy taken from grid
|
||||
float reactive_energy_minus = 0.0f; // Reactive energy put into grid
|
||||
char timestamp[27]{}; // Text sensor for the timestamp value
|
||||
|
||||
// Netz NOE
|
||||
float power_factor = 0.0f; // Power Factor
|
||||
char meternumber[13]{}; // Text sensor for the meterNumber value
|
||||
};
|
||||
|
||||
// Provider constants
|
||||
enum Providers : uint32_t { PROVIDER_GENERIC = 0x00, PROVIDER_NETZNOE = 0x01 };
|
||||
|
||||
class DlmsMeterComponent : public Component, public uart::UARTDevice {
|
||||
public:
|
||||
DlmsMeterComponent() = default;
|
||||
|
||||
void dump_config() override;
|
||||
void loop() override;
|
||||
|
||||
void set_decryption_key(const std::array<uint8_t, 16> &key) { this->decryption_key_ = key; }
|
||||
void set_provider(uint32_t provider) { this->provider_ = provider; }
|
||||
|
||||
void publish_sensors(MeterData &data) {
|
||||
#define DLMS_METER_PUBLISH_SENSOR(s) \
|
||||
if (this->s##_sensor_ != nullptr) \
|
||||
s##_sensor_->publish_state(data.s);
|
||||
DLMS_METER_SENSOR_LIST(DLMS_METER_PUBLISH_SENSOR, )
|
||||
|
||||
#define DLMS_METER_PUBLISH_TEXT_SENSOR(s) \
|
||||
if (this->s##_text_sensor_ != nullptr) \
|
||||
s##_text_sensor_->publish_state(data.s);
|
||||
DLMS_METER_TEXT_SENSOR_LIST(DLMS_METER_PUBLISH_TEXT_SENSOR, )
|
||||
}
|
||||
|
||||
DLMS_METER_SENSOR_LIST(SUB_SENSOR, )
|
||||
DLMS_METER_TEXT_SENSOR_LIST(SUB_TEXT_SENSOR, )
|
||||
|
||||
protected:
|
||||
bool parse_mbus_(std::vector<uint8_t> &mbus_payload);
|
||||
bool parse_dlms_(const std::vector<uint8_t> &mbus_payload, uint16_t &message_length, uint8_t &systitle_length,
|
||||
uint16_t &header_offset);
|
||||
bool decrypt_(std::vector<uint8_t> &mbus_payload, uint16_t message_length, uint8_t systitle_length,
|
||||
uint16_t header_offset);
|
||||
void decode_obis_(uint8_t *plaintext, uint16_t message_length);
|
||||
|
||||
std::vector<uint8_t> receive_buffer_; // Stores the packet currently being received
|
||||
std::vector<uint8_t> mbus_payload_; // Parsed M-Bus payload, reused to avoid heap churn
|
||||
uint32_t last_read_ = 0; // Timestamp when data was last read
|
||||
uint32_t read_timeout_ = 1000; // Time to wait after last byte before considering data complete
|
||||
|
||||
uint32_t provider_ = PROVIDER_GENERIC; // Provider of the meter / your grid operator
|
||||
std::array<uint8_t, 16> decryption_key_;
|
||||
};
|
||||
|
||||
} // namespace esphome::dlms_meter
|
||||
@@ -1,69 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
namespace esphome::dlms_meter {
|
||||
|
||||
/*
|
||||
+----------------------------------------------------+ -
|
||||
| Start Character [0x68] | \
|
||||
+----------------------------------------------------+ |
|
||||
| Data Length (L) | |
|
||||
+----------------------------------------------------+ |
|
||||
| Data Length Repeat (L) | |
|
||||
+----------------------------------------------------+ > M-Bus Data link layer
|
||||
| Start Character Repeat [0x68] | |
|
||||
+----------------------------------------------------+ |
|
||||
| Control/Function Field (C) | |
|
||||
+----------------------------------------------------+ |
|
||||
| Address Field (A) | /
|
||||
+----------------------------------------------------+ -
|
||||
| Control Information Field (CI) | \
|
||||
+----------------------------------------------------+ |
|
||||
| Source Transport Service Access Point (STSAP) | > DLMS/COSEM M-Bus transport layer
|
||||
+----------------------------------------------------+ |
|
||||
| Destination Transport Service Access Point (DTSAP) | /
|
||||
+----------------------------------------------------+ -
|
||||
| | \
|
||||
~ ~ |
|
||||
Data > DLMS/COSEM Application Layer
|
||||
~ ~ |
|
||||
| | /
|
||||
+----------------------------------------------------+ -
|
||||
| Checksum | \
|
||||
+----------------------------------------------------+ > M-Bus Data link layer
|
||||
| Stop Character [0x16] | /
|
||||
+----------------------------------------------------+ -
|
||||
|
||||
Data_Length = L - C - A - CI
|
||||
Each line (except Data) is one Byte
|
||||
|
||||
Possible Values found in publicly available docs:
|
||||
- C: 0x53/0x73 (SND_UD)
|
||||
- A: FF (Broadcast)
|
||||
- CI: 0x00-0x1F/0x60/0x61/0x7C/0x7D
|
||||
- STSAP: 0x01 (Management Logical Device ID 1 of the meter)
|
||||
- DTSAP: 0x67 (Consumer Information Push Client ID 103)
|
||||
*/
|
||||
|
||||
// MBUS start bytes for different telegram formats:
|
||||
// - Single Character: 0xE5 (length=1)
|
||||
// - Short Frame: 0x10 (length=5)
|
||||
// - Control Frame: 0x68 (length=9)
|
||||
// - Long Frame: 0x68 (length=9+data_length)
|
||||
// This component currently only uses Long Frame.
|
||||
static constexpr uint8_t START_BYTE_SINGLE_CHARACTER = 0xE5;
|
||||
static constexpr uint8_t START_BYTE_SHORT_FRAME = 0x10;
|
||||
static constexpr uint8_t START_BYTE_CONTROL_FRAME = 0x68;
|
||||
static constexpr uint8_t START_BYTE_LONG_FRAME = 0x68;
|
||||
static constexpr uint8_t MBUS_HEADER_INTRO_LENGTH = 4; // Header length for the intro (0x68, length, length, 0x68)
|
||||
static constexpr uint8_t MBUS_FULL_HEADER_LENGTH = 9; // Total header length
|
||||
static constexpr uint8_t MBUS_FOOTER_LENGTH = 2; // Footer after frame
|
||||
static constexpr uint8_t MBUS_MAX_FRAME_LENGTH = 250; // Maximum size of frame
|
||||
static constexpr uint8_t MBUS_START1_OFFSET = 0; // Offset of first start byte
|
||||
static constexpr uint8_t MBUS_LENGTH1_OFFSET = 1; // Offset of first length byte
|
||||
static constexpr uint8_t MBUS_LENGTH2_OFFSET = 2; // Offset of (duplicated) second length byte
|
||||
static constexpr uint8_t MBUS_START2_OFFSET = 3; // Offset of (duplicated) second start byte
|
||||
static constexpr uint8_t STOP_BYTE = 0x16;
|
||||
|
||||
} // namespace esphome::dlms_meter
|
||||
@@ -1,94 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
namespace esphome::dlms_meter {
|
||||
|
||||
// Data types as per specification
|
||||
enum DataType {
|
||||
NULL_DATA = 0x00,
|
||||
BOOLEAN = 0x03,
|
||||
BIT_STRING = 0x04,
|
||||
DOUBLE_LONG = 0x05,
|
||||
DOUBLE_LONG_UNSIGNED = 0x06,
|
||||
OCTET_STRING = 0x09,
|
||||
VISIBLE_STRING = 0x0A,
|
||||
UTF8_STRING = 0x0C,
|
||||
BINARY_CODED_DECIMAL = 0x0D,
|
||||
INTEGER = 0x0F,
|
||||
LONG = 0x10,
|
||||
UNSIGNED = 0x11,
|
||||
LONG_UNSIGNED = 0x12,
|
||||
LONG64 = 0x14,
|
||||
LONG64_UNSIGNED = 0x15,
|
||||
ENUM = 0x16,
|
||||
FLOAT32 = 0x17,
|
||||
FLOAT64 = 0x18,
|
||||
DATE_TIME = 0x19,
|
||||
DATE = 0x1A,
|
||||
TIME = 0x1B,
|
||||
|
||||
ARRAY = 0x01,
|
||||
STRUCTURE = 0x02,
|
||||
COMPACT_ARRAY = 0x13
|
||||
};
|
||||
|
||||
enum Medium {
|
||||
ABSTRACT = 0x00,
|
||||
ELECTRICITY = 0x01,
|
||||
HEAT_COST_ALLOCATOR = 0x04,
|
||||
COOLING = 0x05,
|
||||
HEAT = 0x06,
|
||||
GAS = 0x07,
|
||||
COLD_WATER = 0x08,
|
||||
HOT_WATER = 0x09,
|
||||
OIL = 0x10,
|
||||
COMPRESSED_AIR = 0x11,
|
||||
NITROGEN = 0x12
|
||||
};
|
||||
|
||||
// Data structure
|
||||
static constexpr uint8_t DECODER_START_OFFSET = 20; // Skip header, timestamp and break block
|
||||
static constexpr uint8_t OBIS_TYPE_OFFSET = 0;
|
||||
static constexpr uint8_t OBIS_LENGTH_OFFSET = 1;
|
||||
static constexpr uint8_t OBIS_CODE_OFFSET = 2;
|
||||
static constexpr uint8_t OBIS_CODE_LENGTH_STANDARD = 0x06; // 6-byte OBIS code (A.B.C.D.E.F)
|
||||
static constexpr uint8_t OBIS_CODE_LENGTH_EXTENDED = 0x0C; // 12-byte extended OBIS code
|
||||
static constexpr uint8_t OBIS_A = 0;
|
||||
static constexpr uint8_t OBIS_B = 1;
|
||||
static constexpr uint8_t OBIS_C = 2;
|
||||
static constexpr uint8_t OBIS_D = 3;
|
||||
static constexpr uint8_t OBIS_E = 4;
|
||||
static constexpr uint8_t OBIS_F = 5;
|
||||
|
||||
// Metadata
|
||||
static constexpr uint16_t OBIS_TIMESTAMP = 0x0100;
|
||||
static constexpr uint16_t OBIS_SERIAL_NUMBER = 0x6001;
|
||||
static constexpr uint16_t OBIS_DEVICE_NAME = 0x2A00;
|
||||
|
||||
// Voltage
|
||||
static constexpr uint16_t OBIS_VOLTAGE_L1 = 0x2007;
|
||||
static constexpr uint16_t OBIS_VOLTAGE_L2 = 0x3407;
|
||||
static constexpr uint16_t OBIS_VOLTAGE_L3 = 0x4807;
|
||||
|
||||
// Current
|
||||
static constexpr uint16_t OBIS_CURRENT_L1 = 0x1F07;
|
||||
static constexpr uint16_t OBIS_CURRENT_L2 = 0x3307;
|
||||
static constexpr uint16_t OBIS_CURRENT_L3 = 0x4707;
|
||||
|
||||
// Power
|
||||
static constexpr uint16_t OBIS_ACTIVE_POWER_PLUS = 0x0107;
|
||||
static constexpr uint16_t OBIS_ACTIVE_POWER_MINUS = 0x0207;
|
||||
|
||||
// Active energy
|
||||
static constexpr uint16_t OBIS_ACTIVE_ENERGY_PLUS = 0x0108;
|
||||
static constexpr uint16_t OBIS_ACTIVE_ENERGY_MINUS = 0x0208;
|
||||
|
||||
// Reactive energy
|
||||
static constexpr uint16_t OBIS_REACTIVE_ENERGY_PLUS = 0x0308;
|
||||
static constexpr uint16_t OBIS_REACTIVE_ENERGY_MINUS = 0x0408;
|
||||
|
||||
// Netz NOE specific
|
||||
static constexpr uint16_t OBIS_POWER_FACTOR = 0x0D07;
|
||||
|
||||
} // namespace esphome::dlms_meter
|
||||
@@ -1,124 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_ENERGY,
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
STATE_CLASS_TOTAL_INCREASING,
|
||||
UNIT_AMPERE,
|
||||
UNIT_VOLT,
|
||||
UNIT_WATT,
|
||||
UNIT_WATT_HOURS,
|
||||
)
|
||||
|
||||
from .. import CONF_DLMS_METER_ID, DlmsMeterComponent
|
||||
|
||||
AUTO_LOAD = ["dlms_meter"]
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_DLMS_METER_ID): cv.use_id(DlmsMeterComponent),
|
||||
cv.Optional("voltage_l1"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_VOLT,
|
||||
accuracy_decimals=1,
|
||||
device_class=DEVICE_CLASS_VOLTAGE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("voltage_l2"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_VOLT,
|
||||
accuracy_decimals=1,
|
||||
device_class=DEVICE_CLASS_VOLTAGE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("voltage_l3"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_VOLT,
|
||||
accuracy_decimals=1,
|
||||
device_class=DEVICE_CLASS_VOLTAGE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("current_l1"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_AMPERE,
|
||||
accuracy_decimals=2,
|
||||
device_class=DEVICE_CLASS_CURRENT,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("current_l2"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_AMPERE,
|
||||
accuracy_decimals=2,
|
||||
device_class=DEVICE_CLASS_CURRENT,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("current_l3"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_AMPERE,
|
||||
accuracy_decimals=2,
|
||||
device_class=DEVICE_CLASS_CURRENT,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("active_power_plus"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_WATT,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_POWER,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("active_power_minus"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_WATT,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_POWER,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
cv.Optional("active_energy_plus"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_WATT_HOURS,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
state_class=STATE_CLASS_TOTAL_INCREASING,
|
||||
),
|
||||
cv.Optional("active_energy_minus"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_WATT_HOURS,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
state_class=STATE_CLASS_TOTAL_INCREASING,
|
||||
),
|
||||
cv.Optional("reactive_energy_plus"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_WATT_HOURS,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
state_class=STATE_CLASS_TOTAL_INCREASING,
|
||||
),
|
||||
cv.Optional("reactive_energy_minus"): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_WATT_HOURS,
|
||||
accuracy_decimals=0,
|
||||
device_class=DEVICE_CLASS_ENERGY,
|
||||
state_class=STATE_CLASS_TOTAL_INCREASING,
|
||||
),
|
||||
# Netz NOE
|
||||
cv.Optional("power_factor"): sensor.sensor_schema(
|
||||
accuracy_decimals=3,
|
||||
device_class=DEVICE_CLASS_POWER_FACTOR,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
hub = await cg.get_variable(config[CONF_DLMS_METER_ID])
|
||||
|
||||
sensors = []
|
||||
for key, conf in config.items():
|
||||
if not isinstance(conf, dict):
|
||||
continue
|
||||
id = conf[CONF_ID]
|
||||
if id and id.type == sensor.Sensor:
|
||||
sens = await sensor.new_sensor(conf)
|
||||
cg.add(getattr(hub, f"set_{key}_sensor")(sens))
|
||||
sensors.append(f"F({key})")
|
||||
|
||||
if sensors:
|
||||
cg.add_define(
|
||||
"DLMS_METER_SENSOR_LIST(F, sep)", cg.RawExpression(" sep ".join(sensors))
|
||||
)
|
||||
@@ -1,37 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import text_sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
from .. import CONF_DLMS_METER_ID, DlmsMeterComponent
|
||||
|
||||
AUTO_LOAD = ["dlms_meter"]
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_DLMS_METER_ID): cv.use_id(DlmsMeterComponent),
|
||||
cv.Optional("timestamp"): text_sensor.text_sensor_schema(),
|
||||
# Netz NOE
|
||||
cv.Optional("meternumber"): text_sensor.text_sensor_schema(),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
hub = await cg.get_variable(config[CONF_DLMS_METER_ID])
|
||||
|
||||
text_sensors = []
|
||||
for key, conf in config.items():
|
||||
if not isinstance(conf, dict):
|
||||
continue
|
||||
id = conf[CONF_ID]
|
||||
if id and id.type == text_sensor.TextSensor:
|
||||
sens = await text_sensor.new_text_sensor(conf)
|
||||
cg.add(getattr(hub, f"set_{key}_text_sensor")(sens))
|
||||
text_sensors.append(f"F({key})")
|
||||
|
||||
if text_sensors:
|
||||
cg.add_define(
|
||||
"DLMS_METER_TEXT_SENSOR_LIST(F, sep)",
|
||||
cg.RawExpression(" sep ".join(text_sensors)),
|
||||
)
|
||||
@@ -2,8 +2,7 @@ import esphome.codegen as cg
|
||||
from esphome.components import i2c
|
||||
from esphome.components.audio_dac import AudioDac
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_AUDIO_DAC, CONF_BITS_PER_SAMPLE, CONF_ID
|
||||
import esphome.final_validate as fv
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
CODEOWNERS = ["@kbx81"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
@@ -22,29 +21,6 @@ CONFIG_SCHEMA = (
|
||||
)
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
full_config = fv.full_config.get()
|
||||
|
||||
# Check all speaker configurations for ones that reference this es8156
|
||||
speaker_configs = full_config.get("speaker", [])
|
||||
for speaker_config in speaker_configs:
|
||||
audio_dac_id = speaker_config.get(CONF_AUDIO_DAC)
|
||||
if (
|
||||
audio_dac_id is not None
|
||||
and audio_dac_id == config[CONF_ID]
|
||||
and (bits_per_sample := speaker_config.get(CONF_BITS_PER_SAMPLE))
|
||||
is not None
|
||||
and bits_per_sample > 24
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"ES8156 does not support more than 24 bits per sample. "
|
||||
f"The speaker referencing this audio_dac has bits_per_sample set to {bits_per_sample}."
|
||||
)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
@@ -17,61 +17,24 @@ static const char *const TAG = "es8156";
|
||||
}
|
||||
|
||||
void ES8156::setup() {
|
||||
// REG02 MODE CONFIG 1: Enable software mode for I2C control of volume/mute
|
||||
// Bit 2: SOFT_MODE_SEL=1 (software mode enabled)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG02_SCLK_MODE, 0x04));
|
||||
|
||||
// Analog system configuration (active-low power down bits, active-high enables)
|
||||
// REG20 ANALOG SYSTEM: Configure analog signal path
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG20_ANALOG_SYS1, 0x2A));
|
||||
|
||||
// REG21 ANALOG SYSTEM: VSEL=0x1C (bias level ~120%), normal VREF ramp speed
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG21_ANALOG_SYS2, 0x3C));
|
||||
|
||||
// REG22 ANALOG SYSTEM: Line out mode (HPSW=0), OUT_MUTE=0 (not muted)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG22_ANALOG_SYS3, 0x00));
|
||||
|
||||
// REG24 ANALOG SYSTEM: Low power mode for VREFBUF, HPCOM, DACVRP; DAC normal power
|
||||
// Bits 2:0 = 0x07: LPVREFBUF=1, LPHPCOM=1, LPDACVRP=1, LPDAC=0
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG24_ANALOG_LP, 0x07));
|
||||
|
||||
// REG23 ANALOG SYSTEM: Lowest bias (IBIAS_SW=0), VMIDLVL=VDDA/2, normal impedance
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG23_ANALOG_SYS4, 0x00));
|
||||
|
||||
// Timing and interface configuration
|
||||
// REG0A/0B TIME CONTROL: Fast state machine transitions
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0A_TIME_CONTROL1, 0x01));
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0B_TIME_CONTROL2, 0x01));
|
||||
|
||||
// REG11 SDP INTERFACE CONFIG: Default I2S format (24-bit, I2S mode)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG11_DAC_SDP, 0x00));
|
||||
|
||||
// REG19 EQ CONTROL 1: EQ disabled (EQ_ON=0), EQ_BAND_NUM=2
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG19_EQ_CONTROL1, 0x20));
|
||||
|
||||
// REG0D P2S CONTROL: Parallel-to-serial converter settings
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG0D_P2S_CONTROL, 0x14));
|
||||
|
||||
// REG09 MISC CONTROL 2: Default settings
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG09_MISC_CONTROL2, 0x00));
|
||||
|
||||
// REG18 MISC CONTROL 3: Stereo channel routing, no inversion
|
||||
// Bits 5:4 CHN_CROSS: 0=L→L/R→R, 1=L to both, 2=R to both, 3=swap L/R
|
||||
// Bits 3:2: LCH_INV/RCH_INV channel inversion
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG18_MISC_CONTROL3, 0x00));
|
||||
|
||||
// REG08 CLOCK OFF: Enable all internal clocks (0x3F = all clock gates open)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG08_CLOCK_ON_OFF, 0x3F));
|
||||
|
||||
// REG00 RESET CONTROL: Reset sequence
|
||||
// First: RST_DIG=1 (assert digital reset)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x02));
|
||||
// Then: CSM_ON=1 (enable chip state machine), RST_DIG=1
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG00_RESET, 0x03));
|
||||
|
||||
// REG25 ANALOG SYSTEM: Power up analog blocks
|
||||
// VMIDSEL=2 (normal VMID operation), PDN_ANA=0, ENREFR=0, ENHPCOM=0
|
||||
// PDN_DACVREFGEN=0, PDN_VREFBUF=0, PDN_DAC=0 (all enabled)
|
||||
ES8156_ERROR_FAILED(this->write_byte(ES8156_REG25_ANALOG_SYS5, 0x20));
|
||||
}
|
||||
|
||||
|
||||
@@ -53,7 +53,6 @@ from .const import ( # noqa
|
||||
KEY_BOARD,
|
||||
KEY_COMPONENTS,
|
||||
KEY_ESP32,
|
||||
KEY_EXCLUDE_COMPONENTS,
|
||||
KEY_EXTRA_BUILD_FILES,
|
||||
KEY_FLASH_SIZE,
|
||||
KEY_FULL_CERT_BUNDLE,
|
||||
@@ -87,7 +86,6 @@ IS_TARGET_PLATFORM = True
|
||||
CONF_ASSERTION_LEVEL = "assertion_level"
|
||||
CONF_COMPILER_OPTIMIZATION = "compiler_optimization"
|
||||
CONF_ENABLE_IDF_EXPERIMENTAL_FEATURES = "enable_idf_experimental_features"
|
||||
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS = "include_builtin_idf_components"
|
||||
CONF_ENABLE_LWIP_ASSERT = "enable_lwip_assert"
|
||||
CONF_ENABLE_OTA_ROLLBACK = "enable_ota_rollback"
|
||||
CONF_EXECUTE_FROM_PSRAM = "execute_from_psram"
|
||||
@@ -116,36 +114,6 @@ COMPILER_OPTIMIZATIONS = {
|
||||
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
|
||||
}
|
||||
|
||||
# ESP-IDF components excluded by default to reduce compile time.
|
||||
# Components can be re-enabled by calling include_builtin_idf_component() in to_code().
|
||||
#
|
||||
# Cannot be excluded (dependencies of required components):
|
||||
# - "console": espressif/mdns unconditionally depends on it
|
||||
# - "sdmmc": driver -> esp_driver_sdmmc -> sdmmc dependency chain
|
||||
DEFAULT_EXCLUDED_IDF_COMPONENTS = (
|
||||
"cmock", # Unit testing mock framework - ESPHome doesn't use IDF's testing
|
||||
"esp_adc", # ADC driver - only needed by adc component
|
||||
"esp_driver_i2s", # I2S driver - only needed by i2s_audio component
|
||||
"esp_driver_rmt", # RMT driver - only needed by remote_transmitter/receiver, neopixelbus
|
||||
"esp_driver_touch_sens", # Touch sensor driver - only needed by esp32_touch
|
||||
"esp_eth", # Ethernet driver - only needed by ethernet component
|
||||
"esp_hid", # HID host/device support - ESPHome doesn't implement HID functionality
|
||||
"esp_http_client", # HTTP client - only needed by http_request component
|
||||
"esp_https_ota", # ESP-IDF HTTPS OTA - ESPHome has its own OTA implementation
|
||||
"esp_https_server", # HTTPS server - ESPHome has its own web server
|
||||
"esp_lcd", # LCD controller drivers - only needed by display component
|
||||
"esp_local_ctrl", # Local control over HTTPS/BLE - ESPHome has native API
|
||||
"espcoredump", # Core dump support - ESPHome has its own debug component
|
||||
"fatfs", # FAT filesystem - ESPHome doesn't use filesystem storage
|
||||
"mqtt", # ESP-IDF MQTT library - ESPHome has its own MQTT implementation
|
||||
"perfmon", # Xtensa performance monitor - ESPHome has its own debug component
|
||||
"protocomm", # Protocol communication for provisioning - unused by ESPHome
|
||||
"spiffs", # SPIFFS filesystem - ESPHome doesn't use filesystem storage (IDF only)
|
||||
"unity", # Unit testing framework - ESPHome doesn't use IDF's testing
|
||||
"wear_levelling", # Flash wear levelling for fatfs - unused since fatfs unused
|
||||
"wifi_provisioning", # WiFi provisioning - ESPHome uses its own improv implementation
|
||||
)
|
||||
|
||||
# ESP32 (original) chip revision options
|
||||
# Setting minimum revision to 3.0 or higher:
|
||||
# - Reduces flash size by excluding workaround code for older chip bugs
|
||||
@@ -235,9 +203,6 @@ def set_core_data(config):
|
||||
)
|
||||
CORE.data[KEY_ESP32][KEY_SDKCONFIG_OPTIONS] = {}
|
||||
CORE.data[KEY_ESP32][KEY_COMPONENTS] = {}
|
||||
# Initialize with default exclusions - components can call include_builtin_idf_component()
|
||||
# to re-enable any they need
|
||||
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS] = set(DEFAULT_EXCLUDED_IDF_COMPONENTS)
|
||||
CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] = cv.Version.parse(
|
||||
config[CONF_FRAMEWORK][CONF_VERSION]
|
||||
)
|
||||
@@ -363,28 +328,6 @@ def add_idf_component(
|
||||
}
|
||||
|
||||
|
||||
def exclude_builtin_idf_component(name: str) -> None:
|
||||
"""Exclude an ESP-IDF component from the build.
|
||||
|
||||
This reduces compile time by skipping components that are not needed.
|
||||
The component will be passed to ESP-IDF's EXCLUDE_COMPONENTS cmake variable.
|
||||
|
||||
Note: Components that are dependencies of other required components
|
||||
cannot be excluded - ESP-IDF will still build them.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].add(name)
|
||||
|
||||
|
||||
def include_builtin_idf_component(name: str) -> None:
|
||||
"""Remove an ESP-IDF component from the exclusion list.
|
||||
|
||||
Call this from components that need an ESP-IDF component that is
|
||||
excluded by default in DEFAULT_EXCLUDED_IDF_COMPONENTS. This ensures the
|
||||
component will be built when needed.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_EXCLUDE_COMPONENTS].discard(name)
|
||||
|
||||
|
||||
def add_extra_script(stage: str, filename: str, path: Path):
|
||||
"""Add an extra script to the project."""
|
||||
key = f"{stage}:{filename}"
|
||||
@@ -729,26 +672,11 @@ CONF_RINGBUF_IN_IRAM = "ringbuf_in_iram"
|
||||
CONF_HEAP_IN_IRAM = "heap_in_iram"
|
||||
CONF_LOOP_TASK_STACK_SIZE = "loop_task_stack_size"
|
||||
CONF_USE_FULL_CERTIFICATE_BUNDLE = "use_full_certificate_bundle"
|
||||
CONF_DISABLE_DEBUG_STUBS = "disable_debug_stubs"
|
||||
CONF_DISABLE_OCD_AWARE = "disable_ocd_aware"
|
||||
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY = "disable_usb_serial_jtag_secondary"
|
||||
CONF_DISABLE_DEV_NULL_VFS = "disable_dev_null_vfs"
|
||||
CONF_DISABLE_MBEDTLS_PEER_CERT = "disable_mbedtls_peer_cert"
|
||||
CONF_DISABLE_MBEDTLS_PKCS7 = "disable_mbedtls_pkcs7"
|
||||
CONF_DISABLE_REGI2C_IN_IRAM = "disable_regi2c_in_iram"
|
||||
CONF_DISABLE_FATFS = "disable_fatfs"
|
||||
|
||||
# VFS requirement tracking
|
||||
# Components that need VFS features can call require_vfs_*() functions
|
||||
# Components that need VFS features can call require_vfs_select() or require_vfs_dir()
|
||||
KEY_VFS_SELECT_REQUIRED = "vfs_select_required"
|
||||
KEY_VFS_DIR_REQUIRED = "vfs_dir_required"
|
||||
KEY_VFS_TERMIOS_REQUIRED = "vfs_termios_required"
|
||||
# Feature requirement tracking - components can call require_* functions to re-enable
|
||||
# These are stored in CORE.data[KEY_ESP32] dict
|
||||
KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED = "usb_serial_jtag_secondary_required"
|
||||
KEY_MBEDTLS_PEER_CERT_REQUIRED = "mbedtls_peer_cert_required"
|
||||
KEY_MBEDTLS_PKCS7_REQUIRED = "mbedtls_pkcs7_required"
|
||||
KEY_FATFS_REQUIRED = "fatfs_required"
|
||||
|
||||
|
||||
def require_vfs_select() -> None:
|
||||
@@ -769,15 +697,6 @@ def require_vfs_dir() -> None:
|
||||
CORE.data[KEY_VFS_DIR_REQUIRED] = True
|
||||
|
||||
|
||||
def require_vfs_termios() -> None:
|
||||
"""Mark that VFS termios support is required by a component.
|
||||
|
||||
Call this from components that use terminal I/O functions (usb_serial_jtag_vfs_*, etc.).
|
||||
This prevents CONFIG_VFS_SUPPORT_TERMIOS from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_VFS_TERMIOS_REQUIRED] = True
|
||||
|
||||
|
||||
def require_full_certificate_bundle() -> None:
|
||||
"""Request the full certificate bundle instead of the common-CAs-only bundle.
|
||||
|
||||
@@ -790,43 +709,6 @@ def require_full_certificate_bundle() -> None:
|
||||
CORE.data[KEY_ESP32][KEY_FULL_CERT_BUNDLE] = True
|
||||
|
||||
|
||||
def require_usb_serial_jtag_secondary() -> None:
|
||||
"""Mark that USB Serial/JTAG secondary console is required by a component.
|
||||
|
||||
Call this from components (e.g., logger) that need USB Serial/JTAG console output.
|
||||
This prevents CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED] = True
|
||||
|
||||
|
||||
def require_mbedtls_peer_cert() -> None:
|
||||
"""Mark that mbedTLS peer certificate retention is required by a component.
|
||||
|
||||
Call this from components that need access to the peer certificate after
|
||||
the TLS handshake is complete. This prevents CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE
|
||||
from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_MBEDTLS_PEER_CERT_REQUIRED] = True
|
||||
|
||||
|
||||
def require_mbedtls_pkcs7() -> None:
|
||||
"""Mark that mbedTLS PKCS#7 support is required by a component.
|
||||
|
||||
Call this from components that need PKCS#7 certificate validation.
|
||||
This prevents CONFIG_MBEDTLS_PKCS7_C from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_MBEDTLS_PKCS7_REQUIRED] = True
|
||||
|
||||
|
||||
def require_fatfs() -> None:
|
||||
"""Mark that FATFS support is required by a component.
|
||||
|
||||
Call this from components that use FATFS (e.g., SD card, storage components).
|
||||
This prevents FATFS from being disabled when disable_fatfs is set.
|
||||
"""
|
||||
CORE.data[KEY_ESP32][KEY_FATFS_REQUIRED] = True
|
||||
|
||||
|
||||
def _parse_idf_component(value: str) -> ConfigType:
|
||||
"""Parse IDF component shorthand syntax like 'owner/component^version'"""
|
||||
# Match operator followed by version-like string (digit or *)
|
||||
@@ -911,19 +793,6 @@ FRAMEWORK_SCHEMA = cv.Schema(
|
||||
cv.Optional(
|
||||
CONF_USE_FULL_CERTIFICATE_BUNDLE, default=False
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, default=[]
|
||||
): cv.ensure_list(cv.string_strict),
|
||||
cv.Optional(CONF_DISABLE_DEBUG_STUBS, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_OCD_AWARE, default=True): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_DEV_NULL_VFS, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_MBEDTLS_PEER_CERT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_MBEDTLS_PKCS7, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_REGI2C_IN_IRAM, default=True): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_FATFS, default=True): cv.boolean,
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_COMPONENTS, default=[]): cv.ensure_list(
|
||||
@@ -1113,19 +982,6 @@ def _configure_lwip_max_sockets(conf: dict) -> None:
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def _write_exclude_components() -> None:
|
||||
"""Write EXCLUDE_COMPONENTS cmake arg after all components have registered exclusions."""
|
||||
if KEY_ESP32 not in CORE.data:
|
||||
return
|
||||
excluded = CORE.data[KEY_ESP32].get(KEY_EXCLUDE_COMPONENTS)
|
||||
if excluded:
|
||||
exclude_list = ";".join(sorted(excluded))
|
||||
cg.add_platformio_option(
|
||||
"board_build.cmake_extra_args", f"-DEXCLUDE_COMPONENTS={exclude_list}"
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def _add_yaml_idf_components(components: list[ConfigType]):
|
||||
"""Add IDF components from YAML config with final priority to override code-added components."""
|
||||
@@ -1287,18 +1143,6 @@ async def to_code(config):
|
||||
# Increase freertos tick speed from 100Hz to 1kHz so that delay() resolution is 1ms
|
||||
add_idf_sdkconfig_option("CONFIG_FREERTOS_HZ", 1000)
|
||||
|
||||
# Reduce FreeRTOS max priorities from 25 to 16 to save RAM
|
||||
# pxReadyTasksLists uses 20 bytes per priority level, so this saves 180 bytes
|
||||
# All ESPHome tasks use relative priorities (configMAX_PRIORITIES - X) to scale automatically
|
||||
# See https://github.com/espressif/esp-idf/issues/13041 for context
|
||||
add_idf_sdkconfig_option("CONFIG_FREERTOS_MAX_PRIORITIES", 16)
|
||||
|
||||
# Set LWIP TCP/IP task priority to fit within reduced priority range (0-15)
|
||||
# Default is 18, which would be invalid with MAX_PRIORITIES=16
|
||||
# Priority 8 maintains the original hierarchy: I2S speaker (10) > LWIP (8) > mixer (6)
|
||||
# This ensures audio I/O tasks aren't blocked by network, while network isn't starved by mixing
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_TCPIP_TASK_PRIO", 8)
|
||||
|
||||
# Place non-ISR FreeRTOS functions into flash instead of IRAM
|
||||
# This saves up to 8KB of IRAM. ISR-safe functions (FromISR variants) stay in IRAM.
|
||||
# In ESP-IDF 6.0 this becomes the default and CONFIG_FREERTOS_PLACE_FUNCTIONS_INTO_FLASH
|
||||
@@ -1341,10 +1185,6 @@ async def to_code(config):
|
||||
# Disable dynamic log level control to save memory
|
||||
add_idf_sdkconfig_option("CONFIG_LOG_DYNAMIC_LEVEL_CONTROL", False)
|
||||
|
||||
# Disable per-tag log level filtering since dynamic level control is disabled above
|
||||
# This saves ~250 bytes of RAM (tag cache) and associated code
|
||||
add_idf_sdkconfig_option("CONFIG_LOG_TAG_LEVEL_IMPL_NONE", True)
|
||||
|
||||
# Reduce PHY TX power in the event of a brownout
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
|
||||
|
||||
@@ -1355,11 +1195,6 @@ async def to_code(config):
|
||||
|
||||
# Apply LWIP optimization settings
|
||||
advanced = conf[CONF_ADVANCED]
|
||||
|
||||
# Re-include any IDF components the user explicitly requested
|
||||
for component_name in advanced.get(CONF_INCLUDE_BUILTIN_IDF_COMPONENTS, []):
|
||||
include_builtin_idf_component(component_name)
|
||||
|
||||
# DHCP server: only disable if explicitly set to false
|
||||
# WiFi component handles its own optimization when AP mode is not used
|
||||
# When using Arduino with Ethernet, DHCP server functions must be available
|
||||
@@ -1398,18 +1233,11 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False)
|
||||
|
||||
# Disable VFS support for termios (terminal I/O functions)
|
||||
# USB Serial JTAG VFS functions require termios support.
|
||||
# Components that need it (e.g., logger when USB_SERIAL_JTAG is supported but not selected
|
||||
# as the logger output) call require_vfs_termios().
|
||||
# ESPHome doesn't use termios functions on ESP32 (only used in host UART driver).
|
||||
# Saves approximately 1.8KB of flash when disabled (default).
|
||||
if CORE.data.get(KEY_VFS_TERMIOS_REQUIRED, False):
|
||||
# Component requires VFS termios - force enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_TERMIOS", True)
|
||||
else:
|
||||
# No component needs it - allow user to control (default: disabled)
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_TERMIOS", not advanced[CONF_DISABLE_VFS_SUPPORT_TERMIOS]
|
||||
)
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_TERMIOS", not advanced[CONF_DISABLE_VFS_SUPPORT_TERMIOS]
|
||||
)
|
||||
|
||||
# Disable VFS support for select() with file descriptors
|
||||
# ESPHome only uses select() with sockets via lwip_select(), which still works.
|
||||
@@ -1488,61 +1316,6 @@ async def to_code(config):
|
||||
|
||||
add_idf_sdkconfig_option(f"CONFIG_LOG_DEFAULT_LEVEL_{conf[CONF_LOG_LEVEL]}", True)
|
||||
|
||||
# Disable OpenOCD debug stubs to save code size
|
||||
# These are used for on-chip debugging with OpenOCD/JTAG, rarely needed for ESPHome
|
||||
if advanced[CONF_DISABLE_DEBUG_STUBS]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_STUBS_ENABLE", False)
|
||||
|
||||
# Disable OCD-aware exception handlers
|
||||
# When enabled, the panic handler detects JTAG debugger and halts instead of resetting
|
||||
# Most ESPHome users don't use JTAG debugging
|
||||
if advanced[CONF_DISABLE_OCD_AWARE]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_DEBUG_OCDAWARE", False)
|
||||
|
||||
# Disable USB Serial/JTAG secondary console
|
||||
# Components like logger can call require_usb_serial_jtag_secondary() to re-enable
|
||||
if CORE.data[KEY_ESP32].get(KEY_USB_SERIAL_JTAG_SECONDARY_REQUIRED, False):
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_USB_SERIAL_JTAG", True)
|
||||
elif advanced[CONF_DISABLE_USB_SERIAL_JTAG_SECONDARY]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_SECONDARY_NONE", True)
|
||||
|
||||
# Disable /dev/null VFS initialization
|
||||
# ESPHome doesn't typically need /dev/null
|
||||
if advanced[CONF_DISABLE_DEV_NULL_VFS]:
|
||||
add_idf_sdkconfig_option("CONFIG_VFS_INITIALIZE_DEV_NULL", False)
|
||||
|
||||
# Disable keeping peer certificate after TLS handshake
|
||||
# Saves ~4KB heap per connection, but prevents certificate inspection after handshake
|
||||
# Components that need it can call require_mbedtls_peer_cert()
|
||||
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PEER_CERT_REQUIRED, False):
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", True)
|
||||
elif advanced[CONF_DISABLE_MBEDTLS_PEER_CERT]:
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_SSL_KEEP_PEER_CERTIFICATE", False)
|
||||
|
||||
# Disable PKCS#7 support in mbedTLS
|
||||
# Only needed for specific certificate validation scenarios
|
||||
# Components that need it can call require_mbedtls_pkcs7()
|
||||
if CORE.data[KEY_ESP32].get(KEY_MBEDTLS_PKCS7_REQUIRED, False):
|
||||
# Component called require_mbedtls_pkcs7() - enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", True)
|
||||
elif advanced[CONF_DISABLE_MBEDTLS_PKCS7]:
|
||||
add_idf_sdkconfig_option("CONFIG_MBEDTLS_PKCS7_C", False)
|
||||
|
||||
# Disable regi2c control functions in IRAM
|
||||
# Only needed if using analog peripherals (ADC, DAC, etc.) from ISRs while cache is disabled
|
||||
if advanced[CONF_DISABLE_REGI2C_IN_IRAM]:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_REGI2C_CTRL_FUNC_IN_IRAM", False)
|
||||
|
||||
# Disable FATFS support
|
||||
# Components that need FATFS (SD card, etc.) can call require_fatfs()
|
||||
if CORE.data[KEY_ESP32].get(KEY_FATFS_REQUIRED, False):
|
||||
# Component called require_fatfs() - enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", False)
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 2)
|
||||
elif advanced[CONF_DISABLE_FATFS]:
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_LFN_NONE", True)
|
||||
add_idf_sdkconfig_option("CONFIG_FATFS_VOLUME_COUNT", 0)
|
||||
|
||||
for name, value in conf[CONF_SDKCONFIG_OPTIONS].items():
|
||||
add_idf_sdkconfig_option(name, RawSdkconfigValue(value))
|
||||
|
||||
@@ -1551,11 +1324,6 @@ async def to_code(config):
|
||||
if conf[CONF_COMPONENTS]:
|
||||
CORE.add_job(_add_yaml_idf_components, conf[CONF_COMPONENTS])
|
||||
|
||||
# Write EXCLUDE_COMPONENTS at FINAL priority after all components have had
|
||||
# a chance to call include_builtin_idf_component() to re-enable components they need.
|
||||
# Default exclusions are added in set_core_data() during config validation.
|
||||
CORE.add_job(_write_exclude_components)
|
||||
|
||||
|
||||
APP_PARTITION_SIZES = {
|
||||
"2MB": 0x0C0000, # 768 KB
|
||||
|
||||
@@ -175,32 +175,6 @@ ESP32_BOARD_PINS = {
|
||||
"LED": 13,
|
||||
"LED_BUILTIN": 13,
|
||||
},
|
||||
"adafruit_feather_esp32s3_reversetft": {
|
||||
"BUTTON": 0,
|
||||
"A0": 18,
|
||||
"A1": 17,
|
||||
"A2": 16,
|
||||
"A3": 15,
|
||||
"A4": 14,
|
||||
"A5": 8,
|
||||
"SCK": 36,
|
||||
"MOSI": 35,
|
||||
"MISO": 37,
|
||||
"RX": 38,
|
||||
"TX": 39,
|
||||
"SCL": 4,
|
||||
"SDA": 3,
|
||||
"NEOPIXEL": 33,
|
||||
"PIN_NEOPIXEL": 33,
|
||||
"NEOPIXEL_POWER": 21,
|
||||
"TFT_I2C_POWER": 7,
|
||||
"TFT_CS": 42,
|
||||
"TFT_DC": 40,
|
||||
"TFT_RESET": 41,
|
||||
"TFT_BACKLIGHT": 45,
|
||||
"LED": 13,
|
||||
"LED_BUILTIN": 13,
|
||||
},
|
||||
"adafruit_feather_esp32s3_tft": {
|
||||
"BUTTON": 0,
|
||||
"A0": 18,
|
||||
|
||||
@@ -6,7 +6,6 @@ KEY_FLASH_SIZE = "flash_size"
|
||||
KEY_VARIANT = "variant"
|
||||
KEY_SDKCONFIG_OPTIONS = "sdkconfig_options"
|
||||
KEY_COMPONENTS = "components"
|
||||
KEY_EXCLUDE_COMPONENTS = "exclude_components"
|
||||
KEY_REPO = "repo"
|
||||
KEY_REF = "ref"
|
||||
KEY_REFRESH = "refresh"
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
#include "preferences.h"
|
||||
#include <esp_clk_tree.h>
|
||||
#include <esp_cpu.h>
|
||||
@@ -67,14 +66,10 @@ void loop_task(void *pv_params) {
|
||||
extern "C" void app_main() {
|
||||
initArduino();
|
||||
esp32::setup_preferences();
|
||||
// TASK_PRIORITY_APPLICATION: baseline priority for main loop - all component loops
|
||||
// run here. Higher priority tasks (audio, network) preempt this when needed.
|
||||
#if CONFIG_FREERTOS_UNICORE
|
||||
xTaskCreate(loop_task, "loopTask", ESPHOME_LOOP_TASK_STACK_SIZE, nullptr, TASK_PRIORITY_APPLICATION,
|
||||
&loop_task_handle);
|
||||
xTaskCreate(loop_task, "loopTask", ESPHOME_LOOP_TASK_STACK_SIZE, nullptr, 1, &loop_task_handle);
|
||||
#else
|
||||
xTaskCreatePinnedToCore(loop_task, "loopTask", ESPHOME_LOOP_TASK_STACK_SIZE, nullptr, TASK_PRIORITY_APPLICATION,
|
||||
&loop_task_handle, 1);
|
||||
xTaskCreatePinnedToCore(loop_task, "loopTask", ESPHOME_LOOP_TASK_STACK_SIZE, nullptr, 1, &loop_task_handle, 1);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include <freertos/task.h>
|
||||
|
||||
@@ -43,13 +42,11 @@ void ESP32Camera::setup() {
|
||||
/* initialize RTOS */
|
||||
this->framebuffer_get_queue_ = xQueueCreate(1, sizeof(camera_fb_t *));
|
||||
this->framebuffer_return_queue_ = xQueueCreate(1, sizeof(camera_fb_t *));
|
||||
// TASK_PRIORITY_APPLICATION: same as main loop - camera capture is buffered,
|
||||
// not real-time critical like audio
|
||||
xTaskCreatePinnedToCore(&ESP32Camera::framebuffer_task,
|
||||
"framebuffer_task", // name
|
||||
FRAMEBUFFER_TASK_STACK_SIZE, // stack size
|
||||
this, // task pv params
|
||||
TASK_PRIORITY_APPLICATION, // priority
|
||||
1, // priority
|
||||
nullptr, // handle
|
||||
1 // core
|
||||
);
|
||||
|
||||
@@ -5,7 +5,6 @@ from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import esp32, light
|
||||
from esphome.components.const import CONF_USE_PSRAM
|
||||
from esphome.components.esp32 import include_builtin_idf_component
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_CHIPSET,
|
||||
@@ -130,9 +129,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_OUTPUT_ID])
|
||||
await light.register_light(var, config)
|
||||
await cg.register_component(var, config)
|
||||
|
||||
@@ -6,7 +6,6 @@ from esphome.components.esp32 import (
|
||||
VARIANT_ESP32S3,
|
||||
get_esp32_variant,
|
||||
gpio,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -267,9 +266,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
# Re-enable ESP-IDF's touch sensor driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_driver_touch_sens")
|
||||
|
||||
touch = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(touch, config)
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ from esphome.components.esp32 import (
|
||||
add_idf_component,
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.components.network import ip_address_literal
|
||||
from esphome.components.spi import CONF_INTERFACE_INDEX, get_spi_interface
|
||||
@@ -420,9 +419,6 @@ async def to_code(config):
|
||||
# Also disable WiFi/BT coexistence since WiFi is disabled
|
||||
add_idf_sdkconfig_option("CONFIG_SW_COEXIST_ENABLE", False)
|
||||
|
||||
# Re-enable ESP-IDF's Ethernet driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_eth")
|
||||
|
||||
if config[CONF_TYPE] == "LAN8670":
|
||||
# Add LAN867x 10BASE-T1S PHY support component
|
||||
add_idf_component(name="espressif/lan867x", ref="2.0.0")
|
||||
|
||||
@@ -9,55 +9,29 @@ from esphome.const import (
|
||||
CONF_VALUE,
|
||||
)
|
||||
from esphome.core import CoroPriority, coroutine_with_priority
|
||||
from esphome.types import ConfigType
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
globals_ns = cg.esphome_ns.namespace("globals")
|
||||
GlobalsComponent = globals_ns.class_("GlobalsComponent", cg.Component)
|
||||
RestoringGlobalsComponent = globals_ns.class_(
|
||||
"RestoringGlobalsComponent", cg.PollingComponent
|
||||
)
|
||||
RestoringGlobalsComponent = globals_ns.class_("RestoringGlobalsComponent", cg.Component)
|
||||
RestoringGlobalStringComponent = globals_ns.class_(
|
||||
"RestoringGlobalStringComponent", cg.PollingComponent
|
||||
"RestoringGlobalStringComponent", cg.Component
|
||||
)
|
||||
GlobalVarSetAction = globals_ns.class_("GlobalVarSetAction", automation.Action)
|
||||
|
||||
CONF_MAX_RESTORE_DATA_LENGTH = "max_restore_data_length"
|
||||
|
||||
# Base schema fields shared by both variants
|
||||
_BASE_SCHEMA = {
|
||||
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
|
||||
cv.Required(CONF_TYPE): cv.string_strict,
|
||||
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
|
||||
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
|
||||
}
|
||||
|
||||
# Non-restoring globals: regular Component (no polling needed)
|
||||
_NON_RESTORING_SCHEMA = cv.Schema(
|
||||
{
|
||||
**_BASE_SCHEMA,
|
||||
cv.Optional(CONF_RESTORE_VALUE, default=False): cv.boolean,
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
# Restoring globals: PollingComponent with configurable update_interval
|
||||
_RESTORING_SCHEMA = cv.Schema(
|
||||
{
|
||||
**_BASE_SCHEMA,
|
||||
cv.Optional(CONF_RESTORE_VALUE, default=True): cv.boolean,
|
||||
}
|
||||
).extend(cv.polling_component_schema("1s"))
|
||||
|
||||
|
||||
def _globals_schema(config: ConfigType) -> ConfigType:
|
||||
"""Select schema based on restore_value setting."""
|
||||
if config.get(CONF_RESTORE_VALUE, False):
|
||||
return _RESTORING_SCHEMA(config)
|
||||
return _NON_RESTORING_SCHEMA(config)
|
||||
|
||||
|
||||
MULTI_CONF = True
|
||||
CONFIG_SCHEMA = _globals_schema
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(GlobalsComponent),
|
||||
cv.Required(CONF_TYPE): cv.string_strict,
|
||||
cv.Optional(CONF_INITIAL_VALUE): cv.string_strict,
|
||||
cv.Optional(CONF_RESTORE_VALUE, default=False): cv.boolean,
|
||||
cv.Optional(CONF_MAX_RESTORE_DATA_LENGTH): cv.int_range(0, 254),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
|
||||
# Run with low priority so that namespaces are registered first
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include <cstring>
|
||||
|
||||
namespace esphome::globals {
|
||||
namespace esphome {
|
||||
namespace globals {
|
||||
|
||||
template<typename T> class GlobalsComponent : public Component {
|
||||
public:
|
||||
@@ -23,14 +24,13 @@ template<typename T> class GlobalsComponent : public Component {
|
||||
T value_{};
|
||||
};
|
||||
|
||||
template<typename T> class RestoringGlobalsComponent : public PollingComponent {
|
||||
template<typename T> class RestoringGlobalsComponent : public Component {
|
||||
public:
|
||||
using value_type = T;
|
||||
explicit RestoringGlobalsComponent() : PollingComponent(1000) {}
|
||||
explicit RestoringGlobalsComponent(T initial_value) : PollingComponent(1000), value_(initial_value) {}
|
||||
explicit RestoringGlobalsComponent() = default;
|
||||
explicit RestoringGlobalsComponent(T initial_value) : value_(initial_value) {}
|
||||
explicit RestoringGlobalsComponent(
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
|
||||
: PollingComponent(1000) {
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
|
||||
memcpy(this->value_, initial_value.data(), sizeof(T));
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ template<typename T> class RestoringGlobalsComponent : public PollingComponent {
|
||||
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
void update() override { store_value_(); }
|
||||
void loop() override { store_value_(); }
|
||||
|
||||
void on_shutdown() override { store_value_(); }
|
||||
|
||||
@@ -66,14 +66,13 @@ template<typename T> class RestoringGlobalsComponent : public PollingComponent {
|
||||
};
|
||||
|
||||
// Use with string or subclasses of strings
|
||||
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public PollingComponent {
|
||||
template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public Component {
|
||||
public:
|
||||
using value_type = T;
|
||||
explicit RestoringGlobalStringComponent() : PollingComponent(1000) {}
|
||||
explicit RestoringGlobalStringComponent(T initial_value) : PollingComponent(1000) { this->value_ = initial_value; }
|
||||
explicit RestoringGlobalStringComponent() = default;
|
||||
explicit RestoringGlobalStringComponent(T initial_value) { this->value_ = initial_value; }
|
||||
explicit RestoringGlobalStringComponent(
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value)
|
||||
: PollingComponent(1000) {
|
||||
std::array<typename std::remove_extent<T>::type, std::extent<T>::value> initial_value) {
|
||||
memcpy(this->value_, initial_value.data(), sizeof(T));
|
||||
}
|
||||
|
||||
@@ -91,7 +90,7 @@ template<typename T, uint8_t SZ> class RestoringGlobalStringComponent : public P
|
||||
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
void update() override { store_value_(); }
|
||||
void loop() override { store_value_(); }
|
||||
|
||||
void on_shutdown() override { store_value_(); }
|
||||
|
||||
@@ -145,4 +144,5 @@ template<typename T> T &id(GlobalsComponent<T> *value) { return value->value();
|
||||
template<typename T> T &id(RestoringGlobalsComponent<T> *value) { return value->value(); }
|
||||
template<typename T, uint8_t SZ> T &id(RestoringGlobalStringComponent<T, SZ> *value) { return value->value(); }
|
||||
|
||||
} // namespace esphome::globals
|
||||
} // namespace globals
|
||||
} // namespace esphome
|
||||
|
||||
@@ -155,9 +155,6 @@ async def to_code(config):
|
||||
cg.add(var.set_watchdog_timeout(timeout_ms))
|
||||
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_http_client")
|
||||
|
||||
cg.add(var.set_buffer_size_rx(config[CONF_BUFFER_SIZE_RX]))
|
||||
cg.add(var.set_buffer_size_tx(config[CONF_BUFFER_SIZE_TX]))
|
||||
cg.add(var.set_verify_ssl(config[CONF_VERIFY_SSL]))
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/version.h"
|
||||
#ifdef USE_ESP32
|
||||
#include "esphome/core/task_priorities.h"
|
||||
#endif
|
||||
|
||||
#include "esphome/components/json/json_util.h"
|
||||
#include "esphome/components/network/util.h"
|
||||
@@ -49,9 +46,7 @@ void HttpRequestUpdate::update() {
|
||||
return;
|
||||
}
|
||||
#ifdef USE_ESP32
|
||||
// TASK_PRIORITY_APPLICATION: same as main loop - update check is background work
|
||||
xTaskCreate(HttpRequestUpdate::update_task, "update_task", 8192, (void *) this, TASK_PRIORITY_APPLICATION,
|
||||
&this->update_task_handle_);
|
||||
xTaskCreate(HttpRequestUpdate::update_task, "update_task", 8192, (void *) this, 1, &this->update_task_handle_);
|
||||
#else
|
||||
this->update_task(this);
|
||||
#endif
|
||||
|
||||
@@ -11,6 +11,12 @@ namespace i2c {
|
||||
static const char *const TAG = "i2c";
|
||||
|
||||
void I2CBus::i2c_scan_() {
|
||||
// suppress logs from the IDF I2C library during the scan
|
||||
#if defined(USE_ESP32) && defined(USE_LOGGER)
|
||||
auto previous = esp_log_level_get("*");
|
||||
esp_log_level_set("*", ESP_LOG_NONE);
|
||||
#endif
|
||||
|
||||
for (uint8_t address = 8; address != 120; address++) {
|
||||
auto err = write_readv(address, nullptr, 0, nullptr, 0);
|
||||
if (err == ERROR_OK) {
|
||||
@@ -21,6 +27,9 @@ void I2CBus::i2c_scan_() {
|
||||
// it takes 16sec to scan on nrf52. It prevents board reset.
|
||||
arch_feed_wdt();
|
||||
}
|
||||
#if defined(USE_ESP32) && defined(USE_LOGGER)
|
||||
esp_log_level_set("*", previous);
|
||||
#endif
|
||||
}
|
||||
|
||||
ErrorCode I2CDevice::read_register(uint8_t a_register, uint8_t *data, size_t len) {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp32 import (
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.components.esp32.const import (
|
||||
VARIANT_ESP32,
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32C5,
|
||||
@@ -15,6 +10,8 @@ from esphome.components.esp32.const import (
|
||||
VARIANT_ESP32P4,
|
||||
VARIANT_ESP32S2,
|
||||
VARIANT_ESP32S3,
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_CHANNEL, CONF_ID, CONF_SAMPLE_RATE
|
||||
@@ -275,10 +272,6 @@ FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
# Re-enable ESP-IDF's I2S driver (excluded by default to save compile time)
|
||||
include_builtin_idf_component("esp_driver_i2s")
|
||||
|
||||
if use_legacy():
|
||||
cg.add_define("USE_I2S_LEGACY")
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include "esphome/components/audio/audio.h"
|
||||
|
||||
@@ -23,6 +22,7 @@ static const UBaseType_t MAX_LISTENERS = 16;
|
||||
static const uint32_t READ_DURATION_MS = 16;
|
||||
|
||||
static const size_t TASK_STACK_SIZE = 4096;
|
||||
static const ssize_t TASK_PRIORITY = 23;
|
||||
|
||||
static const char *const TAG = "i2s_audio.microphone";
|
||||
|
||||
@@ -520,10 +520,8 @@ void I2SAudioMicrophone::loop() {
|
||||
}
|
||||
|
||||
if (this->task_handle_ == nullptr) {
|
||||
// TASK_PRIORITY_AUDIO_CAPTURE: highest application priority - real-time audio
|
||||
// input cannot tolerate delays without dropping samples
|
||||
xTaskCreate(I2SAudioMicrophone::mic_task, "mic_task", TASK_STACK_SIZE, (void *) this,
|
||||
TASK_PRIORITY_AUDIO_CAPTURE, &this->task_handle_);
|
||||
xTaskCreate(I2SAudioMicrophone::mic_task, "mic_task", TASK_STACK_SIZE, (void *) this, TASK_PRIORITY,
|
||||
&this->task_handle_);
|
||||
|
||||
if (this->task_handle_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Task failed to start, retrying in 1 second");
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include "esp_timer.h"
|
||||
|
||||
@@ -25,6 +24,7 @@ static const uint32_t DMA_BUFFER_DURATION_MS = 15;
|
||||
static const size_t DMA_BUFFERS_COUNT = 4;
|
||||
|
||||
static const size_t TASK_STACK_SIZE = 4096;
|
||||
static const ssize_t TASK_PRIORITY = 19;
|
||||
|
||||
static const size_t I2S_EVENT_QUEUE_COUNT = DMA_BUFFERS_COUNT + 1;
|
||||
|
||||
@@ -151,10 +151,8 @@ void I2SAudioSpeaker::loop() {
|
||||
}
|
||||
|
||||
if (this->speaker_task_handle_ == nullptr) {
|
||||
// TASK_PRIORITY_AUDIO_OUTPUT: high priority for real-time audio output,
|
||||
// below capture (TASK_PRIORITY_AUDIO_CAPTURE) but above network tasks
|
||||
xTaskCreate(I2SAudioSpeaker::speaker_task, "speaker_task", TASK_STACK_SIZE, (void *) this,
|
||||
TASK_PRIORITY_AUDIO_OUTPUT, &this->speaker_task_handle_);
|
||||
xTaskCreate(I2SAudioSpeaker::speaker_task, "speaker_task", TASK_STACK_SIZE, (void *) this, TASK_PRIORITY,
|
||||
&this->speaker_task_handle_);
|
||||
|
||||
if (this->speaker_task_handle_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Task failed to start, retrying in 1 second");
|
||||
|
||||
@@ -451,7 +451,7 @@ void LD2450Component::handle_periodic_data_() {
|
||||
int16_t ty = 0;
|
||||
int16_t td = 0;
|
||||
int16_t ts = 0;
|
||||
float angle = 0;
|
||||
int16_t angle = 0;
|
||||
uint8_t index = 0;
|
||||
Direction direction{DIRECTION_UNDEFINED};
|
||||
bool is_moving = false;
|
||||
|
||||
@@ -143,7 +143,6 @@ CONFIG_SCHEMA = CONFIG_SCHEMA.extend(
|
||||
],
|
||||
icon=ICON_FORMAT_TEXT_ROTATION_ANGLE_UP,
|
||||
unit_of_measurement=UNIT_DEGREES,
|
||||
accuracy_decimals=1,
|
||||
),
|
||||
cv.Optional(CONF_DISTANCE): sensor.sensor_schema(
|
||||
device_class=DEVICE_CLASS_DISTANCE,
|
||||
|
||||
@@ -16,8 +16,6 @@ from esphome.components.esp32 import (
|
||||
VARIANT_ESP32S3,
|
||||
add_idf_sdkconfig_option,
|
||||
get_esp32_variant,
|
||||
require_usb_serial_jtag_secondary,
|
||||
require_vfs_termios,
|
||||
)
|
||||
from esphome.components.libretiny import get_libretiny_component, get_libretiny_family
|
||||
from esphome.components.libretiny.const import (
|
||||
@@ -399,15 +397,9 @@ async def to_code(config):
|
||||
elif config[CONF_HARDWARE_UART] == USB_SERIAL_JTAG:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_CONSOLE_USB_SERIAL_JTAG", True)
|
||||
cg.add_define("USE_LOGGER_UART_SELECTION_USB_SERIAL_JTAG")
|
||||
# Define platform support flags for components that need auto-detection
|
||||
try:
|
||||
uart_selection(USB_SERIAL_JTAG)
|
||||
cg.add_define("USE_LOGGER_USB_SERIAL_JTAG")
|
||||
# USB Serial JTAG code is compiled when platform supports it.
|
||||
# Enable secondary USB serial JTAG console so the VFS functions are available.
|
||||
if CORE.is_esp32 and config[CONF_HARDWARE_UART] != USB_SERIAL_JTAG:
|
||||
require_usb_serial_jtag_secondary()
|
||||
require_vfs_termios()
|
||||
except cv.Invalid:
|
||||
pass
|
||||
try:
|
||||
|
||||
@@ -114,6 +114,9 @@ void Logger::pre_setup() {
|
||||
|
||||
global_logger = this;
|
||||
esp_log_set_vprintf(esp_idf_log_vprintf_);
|
||||
if (ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE) {
|
||||
esp_log_level_set("*", ESP_LOG_VERBOSE);
|
||||
}
|
||||
|
||||
ESP_LOGI(TAG, "Log initialized");
|
||||
}
|
||||
|
||||
@@ -28,10 +28,11 @@ CONFIG_SCHEMA = (
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID], config[CONF_NUM_CHIPS])
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await spi.register_spi_device(var, config, write_only=True)
|
||||
await display.register_display(var, config)
|
||||
|
||||
cg.add(var.set_num_chips(config[CONF_NUM_CHIPS]))
|
||||
cg.add(var.set_intensity(config[CONF_INTENSITY]))
|
||||
cg.add(var.set_reverse(config[CONF_REVERSE_ENABLE]))
|
||||
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::max7219 {
|
||||
namespace esphome {
|
||||
namespace max7219 {
|
||||
|
||||
static const char *const TAG = "max7219";
|
||||
|
||||
@@ -114,14 +115,12 @@ const uint8_t MAX7219_ASCII_TO_RAW[95] PROGMEM = {
|
||||
};
|
||||
|
||||
float MAX7219Component::get_setup_priority() const { return setup_priority::PROCESSOR; }
|
||||
|
||||
MAX7219Component::MAX7219Component(uint8_t num_chips) : num_chips_(num_chips) {
|
||||
this->buffer_ = new uint8_t[this->num_chips_ * 8]; // NOLINT
|
||||
memset(this->buffer_, 0, this->num_chips_ * 8);
|
||||
}
|
||||
|
||||
void MAX7219Component::setup() {
|
||||
this->spi_setup();
|
||||
this->buffer_ = new uint8_t[this->num_chips_ * 8]; // NOLINT
|
||||
for (uint8_t i = 0; i < this->num_chips_ * 8; i++)
|
||||
this->buffer_[i] = 0;
|
||||
|
||||
// let's assume the user has all 8 digits connected, only important in daisy chained setups anyway
|
||||
this->send_to_all_(MAX7219_REGISTER_SCAN_LIMIT, 7);
|
||||
// let's use our own ASCII -> led pattern encoding
|
||||
@@ -230,6 +229,7 @@ void MAX7219Component::set_intensity(uint8_t intensity) {
|
||||
this->intensity_ = intensity;
|
||||
}
|
||||
}
|
||||
void MAX7219Component::set_num_chips(uint8_t num_chips) { this->num_chips_ = num_chips; }
|
||||
|
||||
uint8_t MAX7219Component::strftime(uint8_t pos, const char *format, ESPTime time) {
|
||||
char buffer[64];
|
||||
@@ -240,4 +240,5 @@ uint8_t MAX7219Component::strftime(uint8_t pos, const char *format, ESPTime time
|
||||
}
|
||||
uint8_t MAX7219Component::strftime(const char *format, ESPTime time) { return this->strftime(0, format, time); }
|
||||
|
||||
} // namespace esphome::max7219
|
||||
} // namespace max7219
|
||||
} // namespace esphome
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
#include "esphome/components/spi/spi.h"
|
||||
#include "esphome/components/display/display.h"
|
||||
|
||||
namespace esphome::max7219 {
|
||||
namespace esphome {
|
||||
namespace max7219 {
|
||||
|
||||
class MAX7219Component;
|
||||
|
||||
@@ -16,8 +17,6 @@ class MAX7219Component : public PollingComponent,
|
||||
public spi::SPIDevice<spi::BIT_ORDER_MSB_FIRST, spi::CLOCK_POLARITY_LOW,
|
||||
spi::CLOCK_PHASE_LEADING, spi::DATA_RATE_1MHZ> {
|
||||
public:
|
||||
explicit MAX7219Component(uint8_t num_chips);
|
||||
|
||||
void set_writer(max7219_writer_t &&writer);
|
||||
|
||||
void setup() override;
|
||||
@@ -31,6 +30,7 @@ class MAX7219Component : public PollingComponent,
|
||||
void display();
|
||||
|
||||
void set_intensity(uint8_t intensity);
|
||||
void set_num_chips(uint8_t num_chips);
|
||||
void set_reverse(bool reverse) { this->reverse_ = reverse; };
|
||||
|
||||
/// Evaluate the printf-format and print the result at the given position.
|
||||
@@ -56,9 +56,10 @@ class MAX7219Component : public PollingComponent,
|
||||
uint8_t intensity_{15}; // Intensity of the display from 0 to 15 (most)
|
||||
bool intensity_changed_{}; // True if we need to re-send the intensity
|
||||
uint8_t num_chips_{1};
|
||||
uint8_t *buffer_{nullptr};
|
||||
uint8_t *buffer_;
|
||||
bool reverse_{false};
|
||||
max7219_writer_t writer_{};
|
||||
};
|
||||
|
||||
} // namespace esphome::max7219
|
||||
} // namespace max7219
|
||||
} // namespace esphome
|
||||
|
||||
@@ -12,10 +12,6 @@ namespace esphome::mdns {
|
||||
static const char *const TAG = "mdns";
|
||||
|
||||
static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_SERVICE_COUNT> &services) {
|
||||
#ifdef USE_OPENTHREAD
|
||||
// OpenThread handles service registration via SRP client
|
||||
// Services are compiled by MDNSComponent::compile_records_() and consumed by OpenThreadSrpComponent
|
||||
#else
|
||||
esp_err_t err = mdns_init();
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "Init failed: %s", esp_err_to_name(err));
|
||||
@@ -45,16 +41,13 @@ static void register_esp32(MDNSComponent *comp, StaticVector<MDNSService, MDNS_S
|
||||
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void MDNSComponent::setup() { this->setup_buffers_and_register_(register_esp32); }
|
||||
|
||||
void MDNSComponent::on_shutdown() {
|
||||
#ifndef USE_OPENTHREAD
|
||||
mdns_free();
|
||||
delay(40); // Allow the mdns packets announcing service removal to be sent
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace esphome::mdns
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include "esphome/components/audio/audio_transfer_buffer.h"
|
||||
|
||||
@@ -26,6 +25,7 @@ static const size_t DATA_TIMEOUT_MS = 50;
|
||||
static const uint32_t RING_BUFFER_DURATION_MS = 120;
|
||||
|
||||
static const uint32_t INFERENCE_TASK_STACK_SIZE = 3072;
|
||||
static const UBaseType_t INFERENCE_TASK_PRIORITY = 3;
|
||||
|
||||
enum EventGroupBits : uint32_t {
|
||||
COMMAND_STOP = (1 << 0), // Signals the inference task should stop
|
||||
@@ -305,10 +305,8 @@ void MicroWakeWord::loop() {
|
||||
return;
|
||||
}
|
||||
|
||||
// TASK_PRIORITY_INFERENCE: above main loop (TASK_PRIORITY_APPLICATION) but below
|
||||
// protocol tasks (TASK_PRIORITY_PROTOCOL) - ML inference is background work
|
||||
xTaskCreate(MicroWakeWord::inference_task, "mww", INFERENCE_TASK_STACK_SIZE, (void *) this,
|
||||
TASK_PRIORITY_INFERENCE, &this->inference_task_handle_);
|
||||
INFERENCE_TASK_PRIORITY, &this->inference_task_handle_);
|
||||
|
||||
if (this->inference_task_handle_ == nullptr) {
|
||||
FrontendFreeStateContents(&this->frontend_state_); // Deallocate frontend state
|
||||
|
||||
@@ -1,39 +1,6 @@
|
||||
#include "mipi_spi.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::mipi_spi {
|
||||
|
||||
void internal_dump_config(const char *model, int width, int height, int offset_width, int offset_height, uint8_t madctl,
|
||||
bool invert_colors, int display_bits, bool is_big_endian, const optional<uint8_t> &brightness,
|
||||
GPIOPin *cs, GPIOPin *reset, GPIOPin *dc, int spi_mode, uint32_t data_rate, int bus_width) {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"MIPI_SPI Display\n"
|
||||
" Model: %s\n"
|
||||
" Width: %d\n"
|
||||
" Height: %d\n"
|
||||
" Swap X/Y: %s\n"
|
||||
" Mirror X: %s\n"
|
||||
" Mirror Y: %s\n"
|
||||
" Invert colors: %s\n"
|
||||
" Color order: %s\n"
|
||||
" Display pixels: %d bits\n"
|
||||
" Endianness: %s\n"
|
||||
" SPI Mode: %d\n"
|
||||
" SPI Data rate: %uMHz\n"
|
||||
" SPI Bus width: %d",
|
||||
model, width, height, YESNO(madctl & MADCTL_MV), YESNO(madctl & (MADCTL_MX | MADCTL_XFLIP)),
|
||||
YESNO(madctl & (MADCTL_MY | MADCTL_YFLIP)), YESNO(invert_colors), (madctl & MADCTL_BGR) ? "BGR" : "RGB",
|
||||
display_bits, is_big_endian ? "Big" : "Little", spi_mode, static_cast<unsigned>(data_rate / 1000000),
|
||||
bus_width);
|
||||
LOG_PIN(" CS Pin: ", cs);
|
||||
LOG_PIN(" Reset Pin: ", reset);
|
||||
LOG_PIN(" DC Pin: ", dc);
|
||||
if (offset_width != 0)
|
||||
ESP_LOGCONFIG(TAG, " Offset width: %d", offset_width);
|
||||
if (offset_height != 0)
|
||||
ESP_LOGCONFIG(TAG, " Offset height: %d", offset_height);
|
||||
if (brightness.has_value())
|
||||
ESP_LOGCONFIG(TAG, " Brightness: %u", brightness.value());
|
||||
}
|
||||
|
||||
} // namespace esphome::mipi_spi
|
||||
namespace esphome {
|
||||
namespace mipi_spi {} // namespace mipi_spi
|
||||
} // namespace esphome
|
||||
|
||||
@@ -63,11 +63,6 @@ enum BusType {
|
||||
BUS_TYPE_SINGLE_16 = 16, // Single bit bus, but 16 bits per transfer
|
||||
};
|
||||
|
||||
// Helper function for dump_config - defined in mipi_spi.cpp to allow use of LOG_PIN macro
|
||||
void internal_dump_config(const char *model, int width, int height, int offset_width, int offset_height, uint8_t madctl,
|
||||
bool invert_colors, int display_bits, bool is_big_endian, const optional<uint8_t> &brightness,
|
||||
GPIOPin *cs, GPIOPin *reset, GPIOPin *dc, int spi_mode, uint32_t data_rate, int bus_width);
|
||||
|
||||
/**
|
||||
* Base class for MIPI SPI displays.
|
||||
* All the methods are defined here in the header file, as it is not possible to define templated methods in a cpp file.
|
||||
@@ -206,9 +201,37 @@ class MipiSpi : public display::Display,
|
||||
}
|
||||
|
||||
void dump_config() override {
|
||||
internal_dump_config(this->model_, WIDTH, HEIGHT, OFFSET_WIDTH, OFFSET_HEIGHT, this->madctl_, this->invert_colors_,
|
||||
DISPLAYPIXEL * 8, IS_BIG_ENDIAN, this->brightness_, this->cs_, this->reset_pin_, this->dc_pin_,
|
||||
this->mode_, this->data_rate_, BUS_TYPE);
|
||||
esph_log_config(TAG,
|
||||
"MIPI_SPI Display\n"
|
||||
" Model: %s\n"
|
||||
" Width: %u\n"
|
||||
" Height: %u",
|
||||
this->model_, WIDTH, HEIGHT);
|
||||
if constexpr (OFFSET_WIDTH != 0)
|
||||
esph_log_config(TAG, " Offset width: %u", OFFSET_WIDTH);
|
||||
if constexpr (OFFSET_HEIGHT != 0)
|
||||
esph_log_config(TAG, " Offset height: %u", OFFSET_HEIGHT);
|
||||
esph_log_config(TAG,
|
||||
" Swap X/Y: %s\n"
|
||||
" Mirror X: %s\n"
|
||||
" Mirror Y: %s\n"
|
||||
" Invert colors: %s\n"
|
||||
" Color order: %s\n"
|
||||
" Display pixels: %d bits\n"
|
||||
" Endianness: %s\n",
|
||||
YESNO(this->madctl_ & MADCTL_MV), YESNO(this->madctl_ & (MADCTL_MX | MADCTL_XFLIP)),
|
||||
YESNO(this->madctl_ & (MADCTL_MY | MADCTL_YFLIP)), YESNO(this->invert_colors_),
|
||||
this->madctl_ & MADCTL_BGR ? "BGR" : "RGB", DISPLAYPIXEL * 8, IS_BIG_ENDIAN ? "Big" : "Little");
|
||||
if (this->brightness_.has_value())
|
||||
esph_log_config(TAG, " Brightness: %u", this->brightness_.value());
|
||||
log_pin(TAG, " CS Pin: ", this->cs_);
|
||||
log_pin(TAG, " Reset Pin: ", this->reset_pin_);
|
||||
log_pin(TAG, " DC Pin: ", this->dc_pin_);
|
||||
esph_log_config(TAG,
|
||||
" SPI Mode: %d\n"
|
||||
" SPI Data rate: %dMHz\n"
|
||||
" SPI Bus width: %d",
|
||||
this->mode_, static_cast<unsigned>(this->data_rate_ / 1000000), BUS_TYPE);
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
@@ -13,6 +12,8 @@
|
||||
namespace esphome {
|
||||
namespace mixer_speaker {
|
||||
|
||||
static const UBaseType_t MIXER_TASK_PRIORITY = 10;
|
||||
|
||||
static const uint32_t TRANSFER_BUFFER_DURATION_MS = 50;
|
||||
static const uint32_t TASK_DELAY_MS = 25;
|
||||
|
||||
@@ -384,10 +385,8 @@ esp_err_t MixerSpeaker::start_task_() {
|
||||
}
|
||||
|
||||
if (this->task_handle_ == nullptr) {
|
||||
// TASK_PRIORITY_AUDIO_MIXER: below I2S tasks (TASK_PRIORITY_AUDIO_OUTPUT) but
|
||||
// above protocol tasks - mixing is buffered but feeds real-time output
|
||||
this->task_handle_ = xTaskCreateStatic(audio_mixer_task, "mixer", TASK_STACK_SIZE, (void *) this,
|
||||
TASK_PRIORITY_AUDIO_MIXER, this->task_stack_buffer_, &this->task_stack_);
|
||||
MIXER_TASK_PRIORITY, this->task_stack_buffer_, &this->task_stack_);
|
||||
}
|
||||
|
||||
if (this->task_handle_ == nullptr) {
|
||||
|
||||
@@ -4,10 +4,7 @@ from esphome import automation
|
||||
from esphome.automation import Condition
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import logger, socket
|
||||
from esphome.components.esp32 import (
|
||||
add_idf_sdkconfig_option,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.components.esp32 import add_idf_sdkconfig_option
|
||||
from esphome.config_helpers import filter_source_files_from_platform
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
@@ -363,8 +360,6 @@ async def to_code(config):
|
||||
# This enables low-latency MQTT event processing instead of waiting for select() timeout
|
||||
if CORE.is_esp32:
|
||||
socket.require_wake_loop_threadsafe()
|
||||
# Re-enable ESP-IDF's mqtt component (excluded by default to save compile time)
|
||||
include_builtin_idf_component("mqtt")
|
||||
|
||||
cg.add_define("USE_MQTT")
|
||||
cg.add_global(mqtt_ns.using)
|
||||
|
||||
@@ -61,10 +61,7 @@ bool MQTTBackendESP32::initialize_() {
|
||||
// Create the task only after MQTT client is initialized successfully
|
||||
// Use larger stack size when TLS is enabled
|
||||
size_t stack_size = this->ca_certificate_.has_value() ? TASK_STACK_SIZE_TLS : TASK_STACK_SIZE;
|
||||
// TASK_PRIORITY_PROTOCOL: above main loop (TASK_PRIORITY_APPLICATION) but below
|
||||
// audio tasks - MQTT needs responsive scheduling for message handling
|
||||
xTaskCreate(esphome_mqtt_task, "esphome_mqtt", stack_size, (void *) this, TASK_PRIORITY_PROTOCOL,
|
||||
&this->task_handle_);
|
||||
xTaskCreate(esphome_mqtt_task, "esphome_mqtt", stack_size, (void *) this, TASK_PRIORITY, &this->task_handle_);
|
||||
if (this->task_handle_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Failed to create MQTT task");
|
||||
// Clean up MQTT client since we can't start the async task
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/lock_free_queue.h"
|
||||
#include "esphome/core/event_pool.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
namespace esphome::mqtt {
|
||||
|
||||
@@ -118,7 +117,8 @@ class MQTTBackendESP32 final : public MQTTBackend {
|
||||
static const size_t MQTT_BUFFER_SIZE = 4096;
|
||||
static const size_t TASK_STACK_SIZE = 3072;
|
||||
static const size_t TASK_STACK_SIZE_TLS = 4096; // Larger stack for TLS operations
|
||||
static const uint8_t MQTT_QUEUE_LENGTH = 30; // 30*12 bytes = 360
|
||||
static const ssize_t TASK_PRIORITY = 5;
|
||||
static const uint8_t MQTT_QUEUE_LENGTH = 30; // 30*12 bytes = 360
|
||||
|
||||
void set_keep_alive(uint16_t keep_alive) final { this->keep_alive_ = keep_alive; }
|
||||
void set_client_id(const char *client_id) final { this->client_id_ = client_id; }
|
||||
|
||||
@@ -643,34 +643,10 @@ static bool topic_match(const char *message, const char *subscription) {
|
||||
}
|
||||
|
||||
void MQTTClientComponent::on_message(const std::string &topic, const std::string &payload) {
|
||||
#ifdef USE_ESP8266
|
||||
// IMPORTANT: This defer is REQUIRED to prevent stack overflow crashes on ESP8266.
|
||||
//
|
||||
// On ESP8266, this callback is invoked directly from the lwIP/AsyncTCP network stack
|
||||
// which runs in the "sys" context with a very limited stack (~4KB). By the time we
|
||||
// reach this function, the stack is already partially consumed by the network
|
||||
// processing chain: tcp_input -> AsyncClient::_recv -> AsyncMqttClient::_onMessage -> here.
|
||||
//
|
||||
// MQTT subscription callbacks can trigger arbitrary user actions (automations, HTTP
|
||||
// requests, sensor updates, etc.) which may have deep call stacks of their own.
|
||||
// For example, an HTTP request action requires: DNS lookup -> TCP connect -> TLS
|
||||
// handshake (if HTTPS) -> request formatting. This easily overflows the remaining
|
||||
// system stack space, causing a LoadStoreAlignmentCause exception or silent corruption.
|
||||
//
|
||||
// By deferring to the main loop, we ensure callbacks execute with a fresh, full-size
|
||||
// stack in the normal application context rather than the constrained network task.
|
||||
//
|
||||
// DO NOT REMOVE THIS DEFER without understanding the above. It may appear to work
|
||||
// in simple tests but will cause crashes with complex automations.
|
||||
this->defer([this, topic, payload]() {
|
||||
#endif
|
||||
for (auto &subscription : this->subscriptions_) {
|
||||
if (topic_match(topic.c_str(), subscription.topic.c_str()))
|
||||
subscription.callback(topic, payload);
|
||||
}
|
||||
#ifdef USE_ESP8266
|
||||
});
|
||||
#endif
|
||||
for (auto &subscription : this->subscriptions_) {
|
||||
if (topic_match(topic.c_str(), subscription.topic.c_str()))
|
||||
subscription.callback(topic, payload);
|
||||
}
|
||||
}
|
||||
|
||||
// Setters
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import light
|
||||
from esphome.components.esp32 import (
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32S3,
|
||||
get_esp32_variant,
|
||||
include_builtin_idf_component,
|
||||
)
|
||||
from esphome.components.esp32 import VARIANT_ESP32C3, VARIANT_ESP32S3, get_esp32_variant
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_CHANNEL,
|
||||
@@ -210,10 +205,6 @@ async def to_code(config):
|
||||
has_white = "W" in config[CONF_TYPE]
|
||||
method = config[CONF_METHOD]
|
||||
|
||||
# Re-enable ESP-IDF's RMT driver if using RMT method (excluded by default)
|
||||
if CORE.is_esp32 and method[CONF_TYPE] == METHOD_ESP32_RMT:
|
||||
include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
method_template = METHODS[method[CONF_TYPE]].to_code(
|
||||
method, config[CONF_VARIANT], config[CONF_INVERT]
|
||||
)
|
||||
|
||||
@@ -177,8 +177,6 @@ async def to_code(config):
|
||||
cg.add_define("USE_NEXTION_TFT_UPLOAD")
|
||||
cg.add(var.set_tft_url(config[CONF_TFT_URL]))
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_http_client")
|
||||
esp32.add_idf_sdkconfig_option("CONFIG_ESP_TLS_INSECURE", True)
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_ESP_TLS_SKIP_SERVER_CERT_VERIFY", True
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import mqtt, web_server, zigbee
|
||||
from esphome.components import mqtt, web_server
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ABOVE,
|
||||
@@ -189,7 +189,6 @@ validate_unit_of_measurement = cv.string_strict
|
||||
_NUMBER_SCHEMA = (
|
||||
cv.ENTITY_BASE_SCHEMA.extend(web_server.WEBSERVER_SORTING_SCHEMA)
|
||||
.extend(cv.MQTT_COMMAND_COMPONENT_SCHEMA)
|
||||
.extend(zigbee.NUMBER_SCHEMA)
|
||||
.extend(
|
||||
{
|
||||
cv.OnlyWith(CONF_MQTT_ID, "mqtt"): cv.declare_id(mqtt.MQTTNumberComponent),
|
||||
@@ -215,7 +214,6 @@ _NUMBER_SCHEMA = (
|
||||
|
||||
|
||||
_NUMBER_SCHEMA.add_extra(entity_duplicate_validator("number"))
|
||||
_NUMBER_SCHEMA.add_extra(zigbee.validate_number)
|
||||
|
||||
|
||||
def number_schema(
|
||||
@@ -279,8 +277,6 @@ async def setup_number_core_(
|
||||
if web_server_config := config.get(CONF_WEB_SERVER):
|
||||
await web_server.add_entity_config(var, web_server_config)
|
||||
|
||||
await zigbee.setup_number(var, config, min_value, max_value, step)
|
||||
|
||||
|
||||
async def register_number(
|
||||
var, config, *, min_value: float, max_value: float, step: float
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
#include "esp_task_wdt.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include "esp_err.h"
|
||||
#include "esp_event.h"
|
||||
@@ -40,14 +39,12 @@ void OpenThreadComponent::setup() {
|
||||
ESP_ERROR_CHECK(esp_netif_init());
|
||||
ESP_ERROR_CHECK(esp_vfs_eventfd_register(&eventfd_config));
|
||||
|
||||
// TASK_PRIORITY_PROTOCOL: same as USB host/MQTT - network protocol tasks need
|
||||
// responsive scheduling but below audio tasks
|
||||
xTaskCreate(
|
||||
[](void *arg) {
|
||||
static_cast<OpenThreadComponent *>(arg)->ot_main();
|
||||
vTaskDelete(nullptr);
|
||||
},
|
||||
"ot_main", 10240, this, TASK_PRIORITY_PROTOCOL, nullptr);
|
||||
"ot_main", 10240, this, 5, nullptr);
|
||||
}
|
||||
|
||||
static esp_netif_t *init_openthread_netif(const esp_openthread_platform_config_t *config) {
|
||||
|
||||
@@ -2,20 +2,21 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
|
||||
namespace esphome::pmsx003 {
|
||||
namespace esphome {
|
||||
namespace pmsx003 {
|
||||
|
||||
static const char *const TAG = "pmsx003";
|
||||
|
||||
static const uint8_t START_CHARACTER_1 = 0x42;
|
||||
static const uint8_t START_CHARACTER_2 = 0x4D;
|
||||
|
||||
static const uint16_t STABILISING_MS = 30000; // time taken for the sensor to become stable after power on in ms
|
||||
static const uint16_t PMS_STABILISING_MS = 30000; // time taken for the sensor to become stable after power on in ms
|
||||
|
||||
static const uint16_t CMD_MEASUREMENT_MODE_PASSIVE =
|
||||
0x0000; // use `Command::MANUAL_MEASUREMENT` to trigger a measurement
|
||||
static const uint16_t CMD_MEASUREMENT_MODE_ACTIVE = 0x0001; // automatically perform measurements
|
||||
static const uint16_t CMD_SLEEP_MODE_SLEEP = 0x0000; // go to sleep mode
|
||||
static const uint16_t CMD_SLEEP_MODE_WAKEUP = 0x0001; // wake up from sleep mode
|
||||
static const uint16_t PMS_CMD_MEASUREMENT_MODE_PASSIVE =
|
||||
0x0000; // use `PMS_CMD_MANUAL_MEASUREMENT` to trigger a measurement
|
||||
static const uint16_t PMS_CMD_MEASUREMENT_MODE_ACTIVE = 0x0001; // automatically perform measurements
|
||||
static const uint16_t PMS_CMD_SLEEP_MODE_SLEEP = 0x0000; // go to sleep mode
|
||||
static const uint16_t PMS_CMD_SLEEP_MODE_WAKEUP = 0x0001; // wake up from sleep mode
|
||||
|
||||
void PMSX003Component::setup() {}
|
||||
|
||||
@@ -41,7 +42,7 @@ void PMSX003Component::dump_config() {
|
||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
||||
|
||||
if (this->update_interval_ <= STABILISING_MS) {
|
||||
if (this->update_interval_ <= PMS_STABILISING_MS) {
|
||||
ESP_LOGCONFIG(TAG, " Mode: active continuous (sensor default)");
|
||||
} else {
|
||||
ESP_LOGCONFIG(TAG, " Mode: passive with sleep/wake cycles");
|
||||
@@ -54,44 +55,44 @@ void PMSX003Component::loop() {
|
||||
const uint32_t now = App.get_loop_component_start_time();
|
||||
|
||||
// Initialize sensor mode on first loop
|
||||
if (!this->initialised_) {
|
||||
if (this->update_interval_ > STABILISING_MS) {
|
||||
if (this->initialised_ == 0) {
|
||||
if (this->update_interval_ > PMS_STABILISING_MS) {
|
||||
// Long update interval: use passive mode with sleep/wake cycles
|
||||
this->send_command_(Command::MEASUREMENT_MODE, CMD_MEASUREMENT_MODE_PASSIVE);
|
||||
this->send_command_(Command::SLEEP_MODE, CMD_SLEEP_MODE_WAKEUP);
|
||||
this->send_command_(PMS_CMD_MEASUREMENT_MODE, PMS_CMD_MEASUREMENT_MODE_PASSIVE);
|
||||
this->send_command_(PMS_CMD_SLEEP_MODE, PMS_CMD_SLEEP_MODE_WAKEUP);
|
||||
} else {
|
||||
// Short/zero update interval: use active continuous mode
|
||||
this->send_command_(Command::MEASUREMENT_MODE, CMD_MEASUREMENT_MODE_ACTIVE);
|
||||
this->send_command_(PMS_CMD_MEASUREMENT_MODE, PMS_CMD_MEASUREMENT_MODE_ACTIVE);
|
||||
}
|
||||
this->initialised_ = true;
|
||||
this->initialised_ = 1;
|
||||
}
|
||||
|
||||
// If we update less often than it takes the device to stabilise, spin the fan down
|
||||
// rather than running it constantly. It does take some time to stabilise, so we
|
||||
// need to keep track of what state we're in.
|
||||
if (this->update_interval_ > STABILISING_MS) {
|
||||
if (this->update_interval_ > PMS_STABILISING_MS) {
|
||||
switch (this->state_) {
|
||||
case State::IDLE:
|
||||
case PMSX003_STATE_IDLE:
|
||||
// Power on the sensor now so it'll be ready when we hit the update time
|
||||
if (now - this->last_update_ < (this->update_interval_ - STABILISING_MS))
|
||||
if (now - this->last_update_ < (this->update_interval_ - PMS_STABILISING_MS))
|
||||
return;
|
||||
|
||||
this->state_ = State::STABILISING;
|
||||
this->send_command_(Command::SLEEP_MODE, CMD_SLEEP_MODE_WAKEUP);
|
||||
this->state_ = PMSX003_STATE_STABILISING;
|
||||
this->send_command_(PMS_CMD_SLEEP_MODE, PMS_CMD_SLEEP_MODE_WAKEUP);
|
||||
this->fan_on_time_ = now;
|
||||
return;
|
||||
case State::STABILISING:
|
||||
case PMSX003_STATE_STABILISING:
|
||||
// wait for the sensor to be stable
|
||||
if (now - this->fan_on_time_ < STABILISING_MS)
|
||||
if (now - this->fan_on_time_ < PMS_STABILISING_MS)
|
||||
return;
|
||||
// consume any command responses that are in the serial buffer
|
||||
while (this->available())
|
||||
this->read_byte(&this->data_[0]);
|
||||
// Trigger a new read
|
||||
this->send_command_(Command::MANUAL_MEASUREMENT, 0);
|
||||
this->state_ = State::WAITING;
|
||||
this->send_command_(PMS_CMD_MANUAL_MEASUREMENT, 0);
|
||||
this->state_ = PMSX003_STATE_WAITING;
|
||||
break;
|
||||
case State::WAITING:
|
||||
case PMSX003_STATE_WAITING:
|
||||
// Just go ahead and read stuff
|
||||
break;
|
||||
}
|
||||
@@ -179,31 +180,27 @@ optional<bool> PMSX003Component::check_byte_() {
|
||||
}
|
||||
|
||||
bool PMSX003Component::check_payload_length_(uint16_t payload_length) {
|
||||
// https://avaldebe.github.io/PyPMS/sensors/Plantower/
|
||||
switch (this->type_) {
|
||||
case Type::PMS1003:
|
||||
return payload_length == 28; // 2*13+2
|
||||
case Type::PMS3003: // Data 7/8/9 not set/reserved
|
||||
return payload_length == 20; // 2*9+2
|
||||
case Type::PMSX003: // Data 13 not set/reserved
|
||||
// Deprecated: Length 20 is for PMS3003 backwards compatibility
|
||||
case PMSX003_TYPE_X003:
|
||||
// The expected payload length is typically 28 bytes.
|
||||
// However, a 20-byte payload check was already present in the code.
|
||||
// No official documentation was found confirming this.
|
||||
// Retaining this check to avoid breaking existing behavior.
|
||||
return payload_length == 28 || payload_length == 20; // 2*13+2
|
||||
case Type::PMS5003S:
|
||||
case Type::PMS5003T: // Data 13 not set/reserved
|
||||
return payload_length == 28; // 2*13+2
|
||||
case Type::PMS5003ST: // Data 16 not set/reserved
|
||||
return payload_length == 36; // 2*17+2
|
||||
case Type::PMS9003M:
|
||||
return payload_length == 28; // 2*13+2
|
||||
case PMSX003_TYPE_5003T:
|
||||
case PMSX003_TYPE_5003S:
|
||||
return payload_length == 28; // 2*13+2 (Data 13 not set/reserved)
|
||||
case PMSX003_TYPE_5003ST:
|
||||
return payload_length == 36; // 2*17+2 (Data 16 not set/reserved)
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void PMSX003Component::send_command_(Command cmd, uint16_t data) {
|
||||
void PMSX003Component::send_command_(PMSX0003Command cmd, uint16_t data) {
|
||||
uint8_t send_data[7] = {
|
||||
START_CHARACTER_1, // Start Byte 1
|
||||
START_CHARACTER_2, // Start Byte 2
|
||||
static_cast<uint8_t>(cmd), // Command
|
||||
cmd, // Command
|
||||
uint8_t((data >> 8) & 0xFF), // Data 1
|
||||
uint8_t((data >> 0) & 0xFF), // Data 2
|
||||
0, // Verify Byte 1
|
||||
@@ -268,7 +265,7 @@ void PMSX003Component::parse_data_() {
|
||||
if (this->pm_particles_25um_sensor_ != nullptr)
|
||||
this->pm_particles_25um_sensor_->publish_state(pm_particles_25um);
|
||||
|
||||
if (this->type_ == Type::PMS5003T) {
|
||||
if (this->type_ == PMSX003_TYPE_5003T) {
|
||||
ESP_LOGD(TAG,
|
||||
"Got PM0.3 Particles: %u Count/0.1L, PM0.5 Particles: %u Count/0.1L, PM1.0 Particles: %u Count/0.1L, "
|
||||
"PM2.5 Particles %u Count/0.1L",
|
||||
@@ -292,7 +289,7 @@ void PMSX003Component::parse_data_() {
|
||||
}
|
||||
|
||||
// Formaldehyde
|
||||
if (this->type_ == Type::PMS5003S || this->type_ == Type::PMS5003ST) {
|
||||
if (this->type_ == PMSX003_TYPE_5003ST || this->type_ == PMSX003_TYPE_5003S) {
|
||||
const uint16_t formaldehyde = this->get_16_bit_uint_(28);
|
||||
|
||||
ESP_LOGD(TAG, "Got Formaldehyde: %u µg/m^3", formaldehyde);
|
||||
@@ -302,8 +299,8 @@ void PMSX003Component::parse_data_() {
|
||||
}
|
||||
|
||||
// Temperature and Humidity
|
||||
if (this->type_ == Type::PMS5003T || this->type_ == Type::PMS5003ST) {
|
||||
const uint8_t temperature_offset = (this->type_ == Type::PMS5003T) ? 24 : 30;
|
||||
if (this->type_ == PMSX003_TYPE_5003ST || this->type_ == PMSX003_TYPE_5003T) {
|
||||
const uint8_t temperature_offset = (this->type_ == PMSX003_TYPE_5003T) ? 24 : 30;
|
||||
|
||||
const float temperature = static_cast<int16_t>(this->get_16_bit_uint_(temperature_offset)) / 10.0f;
|
||||
const float humidity = this->get_16_bit_uint_(temperature_offset + 2) / 10.0f;
|
||||
@@ -317,22 +314,22 @@ void PMSX003Component::parse_data_() {
|
||||
}
|
||||
|
||||
// Firmware Version and Error Code
|
||||
if (this->type_ == Type::PMS1003 || this->type_ == Type::PMS5003ST || this->type_ == Type::PMS9003M) {
|
||||
const uint8_t firmware_error_code_offset = (this->type_ == Type::PMS5003ST) ? 36 : 28;
|
||||
const uint8_t firmware_version = this->data_[firmware_error_code_offset];
|
||||
const uint8_t error_code = this->data_[firmware_error_code_offset + 1];
|
||||
if (this->type_ == PMSX003_TYPE_5003ST) {
|
||||
const uint8_t firmware_version = this->data_[36];
|
||||
const uint8_t error_code = this->data_[37];
|
||||
|
||||
ESP_LOGD(TAG, "Got Firmware Version: 0x%02X, Error Code: 0x%02X", firmware_version, error_code);
|
||||
}
|
||||
|
||||
// Spin down the sensor again if we aren't going to need it until more time has
|
||||
// passed than it takes to stabilise
|
||||
if (this->update_interval_ > STABILISING_MS) {
|
||||
this->send_command_(Command::SLEEP_MODE, CMD_SLEEP_MODE_SLEEP);
|
||||
this->state_ = State::IDLE;
|
||||
if (this->update_interval_ > PMS_STABILISING_MS) {
|
||||
this->send_command_(PMS_CMD_SLEEP_MODE, PMS_CMD_SLEEP_MODE_SLEEP);
|
||||
this->state_ = PMSX003_STATE_IDLE;
|
||||
}
|
||||
|
||||
this->status_clear_warning();
|
||||
}
|
||||
|
||||
} // namespace esphome::pmsx003
|
||||
} // namespace pmsx003
|
||||
} // namespace esphome
|
||||
|
||||
@@ -5,28 +5,27 @@
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
namespace esphome::pmsx003 {
|
||||
namespace esphome {
|
||||
namespace pmsx003 {
|
||||
|
||||
enum class Type : uint8_t {
|
||||
PMS1003 = 0,
|
||||
PMS3003,
|
||||
PMSX003, // PMS5003, PMS6003, PMS7003, PMSA003 (NOT PMSA003I - see `pmsa003i` component)
|
||||
PMS5003S,
|
||||
PMS5003T,
|
||||
PMS5003ST,
|
||||
PMS9003M,
|
||||
enum PMSX0003Command : uint8_t {
|
||||
PMS_CMD_MEASUREMENT_MODE =
|
||||
0xE1, // Data Options: `PMS_CMD_MEASUREMENT_MODE_PASSIVE`, `PMS_CMD_MEASUREMENT_MODE_ACTIVE`
|
||||
PMS_CMD_MANUAL_MEASUREMENT = 0xE2,
|
||||
PMS_CMD_SLEEP_MODE = 0xE4, // Data Options: `PMS_CMD_SLEEP_MODE_SLEEP`, `PMS_CMD_SLEEP_MODE_WAKEUP`
|
||||
};
|
||||
|
||||
enum class Command : uint8_t {
|
||||
MEASUREMENT_MODE = 0xE1, // Data Options: `CMD_MEASUREMENT_MODE_PASSIVE`, `CMD_MEASUREMENT_MODE_ACTIVE`
|
||||
MANUAL_MEASUREMENT = 0xE2,
|
||||
SLEEP_MODE = 0xE4, // Data Options: `CMD_SLEEP_MODE_SLEEP`, `CMD_SLEEP_MODE_WAKEUP`
|
||||
enum PMSX003Type {
|
||||
PMSX003_TYPE_X003 = 0,
|
||||
PMSX003_TYPE_5003T,
|
||||
PMSX003_TYPE_5003ST,
|
||||
PMSX003_TYPE_5003S,
|
||||
};
|
||||
|
||||
enum class State : uint8_t {
|
||||
IDLE = 0,
|
||||
STABILISING,
|
||||
WAITING,
|
||||
enum PMSX003State {
|
||||
PMSX003_STATE_IDLE = 0,
|
||||
PMSX003_STATE_STABILISING,
|
||||
PMSX003_STATE_WAITING,
|
||||
};
|
||||
|
||||
class PMSX003Component : public uart::UARTDevice, public Component {
|
||||
@@ -38,7 +37,7 @@ class PMSX003Component : public uart::UARTDevice, public Component {
|
||||
|
||||
void set_update_interval(uint32_t update_interval) { this->update_interval_ = update_interval; }
|
||||
|
||||
void set_type(Type type) { this->type_ = type; }
|
||||
void set_type(PMSX003Type type) { this->type_ = type; }
|
||||
|
||||
void set_pm_1_0_std_sensor(sensor::Sensor *pm_1_0_std_sensor) { this->pm_1_0_std_sensor_ = pm_1_0_std_sensor; }
|
||||
void set_pm_2_5_std_sensor(sensor::Sensor *pm_2_5_std_sensor) { this->pm_2_5_std_sensor_ = pm_2_5_std_sensor; }
|
||||
@@ -78,20 +77,20 @@ class PMSX003Component : public uart::UARTDevice, public Component {
|
||||
optional<bool> check_byte_();
|
||||
void parse_data_();
|
||||
bool check_payload_length_(uint16_t payload_length);
|
||||
void send_command_(Command cmd, uint16_t data);
|
||||
void send_command_(PMSX0003Command cmd, uint16_t data);
|
||||
uint16_t get_16_bit_uint_(uint8_t start_index) const {
|
||||
return encode_uint16(this->data_[start_index], this->data_[start_index + 1]);
|
||||
}
|
||||
|
||||
Type type_;
|
||||
State state_{State::IDLE};
|
||||
bool initialised_{false};
|
||||
uint8_t data_[64];
|
||||
uint8_t data_index_{0};
|
||||
uint8_t initialised_{0};
|
||||
uint32_t fan_on_time_{0};
|
||||
uint32_t last_update_{0};
|
||||
uint32_t last_transmission_{0};
|
||||
uint32_t update_interval_{0};
|
||||
PMSX003State state_{PMSX003_STATE_IDLE};
|
||||
PMSX003Type type_;
|
||||
|
||||
// "Standard Particle"
|
||||
sensor::Sensor *pm_1_0_std_sensor_{nullptr};
|
||||
@@ -119,4 +118,5 @@ class PMSX003Component : public uart::UARTDevice, public Component {
|
||||
sensor::Sensor *humidity_sensor_{nullptr};
|
||||
};
|
||||
|
||||
} // namespace esphome::pmsx003
|
||||
} // namespace pmsx003
|
||||
} // namespace esphome
|
||||
|
||||
@@ -40,128 +40,34 @@ pmsx003_ns = cg.esphome_ns.namespace("pmsx003")
|
||||
PMSX003Component = pmsx003_ns.class_("PMSX003Component", uart.UARTDevice, cg.Component)
|
||||
PMSX003Sensor = pmsx003_ns.class_("PMSX003Sensor", sensor.Sensor)
|
||||
|
||||
TYPE_PMS1003 = "PMS1003"
|
||||
TYPE_PMS3003 = "PMS3003"
|
||||
TYPE_PMSX003 = "PMSX003" # PMS5003, PMS6003, PMS7003, PMSA003 (NOT PMSA003I - see `pmsa003i` component)
|
||||
TYPE_PMS5003S = "PMS5003S"
|
||||
TYPE_PMSX003 = "PMSX003"
|
||||
TYPE_PMS5003T = "PMS5003T"
|
||||
TYPE_PMS5003ST = "PMS5003ST"
|
||||
TYPE_PMS9003M = "PMS9003M"
|
||||
TYPE_PMS5003S = "PMS5003S"
|
||||
|
||||
Type = pmsx003_ns.enum("Type", is_class=True)
|
||||
PMSX003Type = pmsx003_ns.enum("PMSX003Type")
|
||||
|
||||
PMSX003_TYPES = {
|
||||
TYPE_PMS1003: Type.PMS1003,
|
||||
TYPE_PMS3003: Type.PMS3003,
|
||||
TYPE_PMSX003: Type.PMSX003,
|
||||
TYPE_PMS5003S: Type.PMS5003S,
|
||||
TYPE_PMS5003T: Type.PMS5003T,
|
||||
TYPE_PMS5003ST: Type.PMS5003ST,
|
||||
TYPE_PMS9003M: Type.PMS9003M,
|
||||
TYPE_PMSX003: PMSX003Type.PMSX003_TYPE_X003,
|
||||
TYPE_PMS5003T: PMSX003Type.PMSX003_TYPE_5003T,
|
||||
TYPE_PMS5003ST: PMSX003Type.PMSX003_TYPE_5003ST,
|
||||
TYPE_PMS5003S: PMSX003Type.PMSX003_TYPE_5003S,
|
||||
}
|
||||
|
||||
SENSORS_TO_TYPE = {
|
||||
CONF_PM_1_0_STD: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMS3003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_2_5_STD: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMS3003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_10_0_STD: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMS3003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_1_0: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMS3003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_2_5: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMS3003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_10_0: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMS3003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_0_3UM: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_0_5UM: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_1_0UM: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_2_5UM: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003T,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_5_0UM: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_PM_10_0UM: [
|
||||
TYPE_PMS1003,
|
||||
TYPE_PMSX003,
|
||||
TYPE_PMS5003S,
|
||||
TYPE_PMS5003ST,
|
||||
TYPE_PMS9003M,
|
||||
],
|
||||
CONF_FORMALDEHYDE: [TYPE_PMS5003S, TYPE_PMS5003ST],
|
||||
CONF_PM_1_0: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_2_5: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_10_0: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_1_0_STD: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_2_5_STD: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_10_0_STD: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_0_3UM: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_0_5UM: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_1_0UM: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_2_5UM: [TYPE_PMSX003, TYPE_PMS5003T, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_5_0UM: [TYPE_PMSX003, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_PM_10_0UM: [TYPE_PMSX003, TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_FORMALDEHYDE: [TYPE_PMS5003ST, TYPE_PMS5003S],
|
||||
CONF_TEMPERATURE: [TYPE_PMS5003T, TYPE_PMS5003ST],
|
||||
CONF_HUMIDITY: [TYPE_PMS5003T, TYPE_PMS5003ST],
|
||||
}
|
||||
|
||||
@@ -170,9 +170,6 @@ CONFIG_SCHEMA = remote_base.validate_triggers(
|
||||
async def to_code(config):
|
||||
pin = await cg.gpio_pin_expression(config[CONF_PIN])
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_ID], pin)
|
||||
cg.add(var.set_rmt_symbols(config[CONF_RMT_SYMBOLS]))
|
||||
cg.add(var.set_receive_symbols(config[CONF_RECEIVE_SYMBOLS]))
|
||||
|
||||
@@ -112,9 +112,6 @@ async def digital_write_action_to_code(config, action_id, template_arg, args):
|
||||
async def to_code(config):
|
||||
pin = await cg.gpio_pin_expression(config[CONF_PIN])
|
||||
if CORE.is_esp32:
|
||||
# Re-enable ESP-IDF's RMT driver (excluded by default to save compile time)
|
||||
esp32.include_builtin_idf_component("esp_driver_rmt")
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_ID], pin)
|
||||
cg.add(var.set_rmt_symbols(config[CONF_RMT_SYMBOLS]))
|
||||
cg.add(var.set_non_blocking(config[CONF_NON_BLOCKING]))
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
@@ -14,6 +13,8 @@
|
||||
namespace esphome {
|
||||
namespace resampler {
|
||||
|
||||
static const UBaseType_t RESAMPLER_TASK_PRIORITY = 1;
|
||||
|
||||
static const uint32_t TRANSFER_BUFFER_DURATION_MS = 50;
|
||||
|
||||
static const uint32_t TASK_DELAY_MS = 20;
|
||||
@@ -184,10 +185,8 @@ esp_err_t ResamplerSpeaker::start_task_() {
|
||||
}
|
||||
|
||||
if (this->task_handle_ == nullptr) {
|
||||
// TASK_PRIORITY_APPLICATION: same as main loop - resampling is buffered audio
|
||||
// processing, not real-time I/O
|
||||
this->task_handle_ = xTaskCreateStatic(resample_task, "sample", TASK_STACK_SIZE, (void *) this,
|
||||
TASK_PRIORITY_APPLICATION, this->task_stack_buffer_, &this->task_stack_);
|
||||
RESAMPLER_TASK_PRIORITY, this->task_stack_buffer_, &this->task_stack_);
|
||||
}
|
||||
|
||||
if (this->task_handle_ == nullptr) {
|
||||
|
||||
@@ -27,61 +27,46 @@ void RuntimeStatsCollector::record_component_time(Component *component, uint32_t
|
||||
}
|
||||
|
||||
void RuntimeStatsCollector::log_stats_() {
|
||||
// First pass: count active components
|
||||
size_t count = 0;
|
||||
for (const auto &it : this->component_stats_) {
|
||||
if (it.second.get_period_count() > 0) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
ESP_LOGI(TAG,
|
||||
"Component Runtime Statistics\n"
|
||||
" Period stats (last %" PRIu32 "ms): %zu active components",
|
||||
this->log_interval_, count);
|
||||
" Period stats (last %" PRIu32 "ms):",
|
||||
this->log_interval_);
|
||||
|
||||
if (count == 0) {
|
||||
return;
|
||||
}
|
||||
// First collect stats we want to display
|
||||
std::vector<ComponentStatPair> stats_to_display;
|
||||
|
||||
// Stack buffer sized to actual active count (up to 256 components), heap fallback for larger
|
||||
SmallBufferWithHeapFallback<256, Component *> buffer(count);
|
||||
Component **sorted = buffer.get();
|
||||
|
||||
// Second pass: fill buffer with active components
|
||||
size_t idx = 0;
|
||||
for (const auto &it : this->component_stats_) {
|
||||
if (it.second.get_period_count() > 0) {
|
||||
sorted[idx++] = it.first;
|
||||
Component *component = it.first;
|
||||
const ComponentRuntimeStats &stats = it.second;
|
||||
if (stats.get_period_count() > 0) {
|
||||
ComponentStatPair pair = {component, &stats};
|
||||
stats_to_display.push_back(pair);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by period runtime (descending)
|
||||
std::sort(sorted, sorted + count, [this](Component *a, Component *b) {
|
||||
return this->component_stats_[a].get_period_time_ms() > this->component_stats_[b].get_period_time_ms();
|
||||
});
|
||||
std::sort(stats_to_display.begin(), stats_to_display.end(), std::greater<ComponentStatPair>());
|
||||
|
||||
// Log top components by period runtime
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
const auto &stats = this->component_stats_[sorted[i]];
|
||||
for (const auto &it : stats_to_display) {
|
||||
ESP_LOGI(TAG, " %s: count=%" PRIu32 ", avg=%.2fms, max=%" PRIu32 "ms, total=%" PRIu32 "ms",
|
||||
LOG_STR_ARG(sorted[i]->get_component_log_str()), stats.get_period_count(), stats.get_period_avg_time_ms(),
|
||||
stats.get_period_max_time_ms(), stats.get_period_time_ms());
|
||||
LOG_STR_ARG(it.component->get_component_log_str()), it.stats->get_period_count(),
|
||||
it.stats->get_period_avg_time_ms(), it.stats->get_period_max_time_ms(), it.stats->get_period_time_ms());
|
||||
}
|
||||
|
||||
// Log total stats since boot (only for active components - idle ones haven't changed)
|
||||
ESP_LOGI(TAG, " Total stats (since boot): %zu active components", count);
|
||||
// Log total stats since boot
|
||||
ESP_LOGI(TAG, " Total stats (since boot):");
|
||||
|
||||
// Re-sort by total runtime for all-time stats
|
||||
std::sort(sorted, sorted + count, [this](Component *a, Component *b) {
|
||||
return this->component_stats_[a].get_total_time_ms() > this->component_stats_[b].get_total_time_ms();
|
||||
});
|
||||
std::sort(stats_to_display.begin(), stats_to_display.end(),
|
||||
[](const ComponentStatPair &a, const ComponentStatPair &b) {
|
||||
return a.stats->get_total_time_ms() > b.stats->get_total_time_ms();
|
||||
});
|
||||
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
const auto &stats = this->component_stats_[sorted[i]];
|
||||
for (const auto &it : stats_to_display) {
|
||||
ESP_LOGI(TAG, " %s: count=%" PRIu32 ", avg=%.2fms, max=%" PRIu32 "ms, total=%" PRIu32 "ms",
|
||||
LOG_STR_ARG(sorted[i]->get_component_log_str()), stats.get_total_count(), stats.get_total_avg_time_ms(),
|
||||
stats.get_total_max_time_ms(), stats.get_total_time_ms());
|
||||
LOG_STR_ARG(it.component->get_component_log_str()), it.stats->get_total_count(),
|
||||
it.stats->get_total_avg_time_ms(), it.stats->get_total_max_time_ms(), it.stats->get_total_time_ms());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
#ifdef USE_RUNTIME_STATS
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <cstdint>
|
||||
#include <cstring>
|
||||
#include "esphome/core/helpers.h"
|
||||
@@ -76,6 +77,17 @@ class ComponentRuntimeStats {
|
||||
uint32_t total_max_time_ms_;
|
||||
};
|
||||
|
||||
// For sorting components by run time
|
||||
struct ComponentStatPair {
|
||||
Component *component;
|
||||
const ComponentRuntimeStats *stats;
|
||||
|
||||
bool operator>(const ComponentStatPair &other) const {
|
||||
// Sort by period time as that's what we're displaying in the logs
|
||||
return stats->get_period_time_ms() > other.stats->get_period_time_ms();
|
||||
}
|
||||
};
|
||||
|
||||
class RuntimeStatsCollector {
|
||||
public:
|
||||
RuntimeStatsCollector();
|
||||
|
||||
@@ -2,7 +2,7 @@ from esphome import automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import audio, audio_dac
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_AUDIO_DAC, CONF_DATA, CONF_ID, CONF_VOLUME
|
||||
from esphome.const import CONF_DATA, CONF_ID, CONF_VOLUME
|
||||
from esphome.core import CORE, ID
|
||||
from esphome.coroutine import CoroPriority, coroutine_with_priority
|
||||
|
||||
@@ -11,6 +11,8 @@ CODEOWNERS = ["@jesserockz", "@kahrendt"]
|
||||
|
||||
IS_PLATFORM_COMPONENT = True
|
||||
|
||||
CONF_AUDIO_DAC = "audio_dac"
|
||||
|
||||
speaker_ns = cg.esphome_ns.namespace("speaker")
|
||||
|
||||
Speaker = speaker_ns.class_("Speaker")
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#ifdef USE_ESP32
|
||||
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include "esphome/components/audio/audio.h"
|
||||
#ifdef USE_OTA
|
||||
@@ -46,6 +45,9 @@ namespace speaker {
|
||||
|
||||
static const uint32_t MEDIA_CONTROLS_QUEUE_LENGTH = 20;
|
||||
|
||||
static const UBaseType_t MEDIA_PIPELINE_TASK_PRIORITY = 1;
|
||||
static const UBaseType_t ANNOUNCEMENT_PIPELINE_TASK_PRIORITY = 1;
|
||||
|
||||
static const char *const TAG = "speaker_media_player";
|
||||
|
||||
void SpeakerMediaPlayer::setup() {
|
||||
@@ -68,10 +70,9 @@ void SpeakerMediaPlayer::setup() {
|
||||
ota::get_global_ota_callback()->add_global_state_listener(this);
|
||||
#endif
|
||||
|
||||
// TASK_PRIORITY_APPLICATION: same as main loop - media pipelines handle buffered
|
||||
// audio streaming, not real-time I/O, so they don't need elevated priority
|
||||
this->announcement_pipeline_ = make_unique<AudioPipeline>(
|
||||
this->announcement_speaker_, this->buffer_size_, this->task_stack_in_psram_, "ann", TASK_PRIORITY_APPLICATION);
|
||||
this->announcement_pipeline_ =
|
||||
make_unique<AudioPipeline>(this->announcement_speaker_, this->buffer_size_, this->task_stack_in_psram_, "ann",
|
||||
ANNOUNCEMENT_PIPELINE_TASK_PRIORITY);
|
||||
|
||||
if (this->announcement_pipeline_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Failed to create announcement pipeline");
|
||||
@@ -80,7 +81,7 @@ void SpeakerMediaPlayer::setup() {
|
||||
|
||||
if (!this->single_pipeline_()) {
|
||||
this->media_pipeline_ = make_unique<AudioPipeline>(this->media_speaker_, this->buffer_size_,
|
||||
this->task_stack_in_psram_, "med", TASK_PRIORITY_APPLICATION);
|
||||
this->task_stack_in_psram_, "med", MEDIA_PIPELINE_TASK_PRIORITY);
|
||||
|
||||
if (this->media_pipeline_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Failed to create media pipeline");
|
||||
|
||||
@@ -27,9 +27,6 @@ void RealTimeClock::dump_config() {
|
||||
#ifdef USE_TIME_TIMEZONE
|
||||
ESP_LOGCONFIG(TAG, "Timezone: '%s'", this->timezone_.c_str());
|
||||
#endif
|
||||
auto time = this->now();
|
||||
ESP_LOGCONFIG(TAG, "Current time: %04d-%02d-%02d %02d:%02d:%02d", time.year, time.month, time.day_of_month, time.hour,
|
||||
time.minute, time.second);
|
||||
}
|
||||
|
||||
void RealTimeClock::synchronize_epoch_(uint32_t epoch) {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include <array>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace tx20 {
|
||||
@@ -45,25 +45,25 @@ std::string Tx20Component::get_wind_cardinal_direction() const { return this->wi
|
||||
void Tx20Component::decode_and_publish_() {
|
||||
ESP_LOGVV(TAG, "Decode Tx20");
|
||||
|
||||
std::array<bool, MAX_BUFFER_SIZE> bit_buffer{};
|
||||
size_t bit_pos = 0;
|
||||
std::string string_buffer;
|
||||
std::string string_buffer_2;
|
||||
std::vector<bool> bit_buffer;
|
||||
bool current_bit = true;
|
||||
// Cap at MAX_BUFFER_SIZE - 1 to prevent out-of-bounds access (buffer_index can exceed MAX_BUFFER_SIZE in ISR)
|
||||
const int max_buffer_index =
|
||||
std::min(static_cast<int>(this->store_.buffer_index), static_cast<int>(MAX_BUFFER_SIZE - 1));
|
||||
|
||||
for (int i = 1; i <= max_buffer_index; i++) {
|
||||
for (int i = 1; i <= this->store_.buffer_index; i++) {
|
||||
string_buffer_2 += to_string(this->store_.buffer[i]) + ", ";
|
||||
uint8_t repeat = this->store_.buffer[i] / TX20_BIT_TIME;
|
||||
// ignore segments at the end that were too short
|
||||
for (uint8_t j = 0; j < repeat && bit_pos < MAX_BUFFER_SIZE; j++) {
|
||||
bit_buffer[bit_pos++] = current_bit;
|
||||
}
|
||||
string_buffer.append(repeat, current_bit ? '1' : '0');
|
||||
bit_buffer.insert(bit_buffer.end(), repeat, current_bit);
|
||||
current_bit = !current_bit;
|
||||
}
|
||||
current_bit = !current_bit;
|
||||
size_t bits_before_padding = bit_pos;
|
||||
while (bit_pos < MAX_BUFFER_SIZE) {
|
||||
bit_buffer[bit_pos++] = current_bit;
|
||||
if (string_buffer.length() < MAX_BUFFER_SIZE) {
|
||||
uint8_t remain = MAX_BUFFER_SIZE - string_buffer.length();
|
||||
string_buffer_2 += to_string(remain) + ", ";
|
||||
string_buffer.append(remain, current_bit ? '1' : '0');
|
||||
bit_buffer.insert(bit_buffer.end(), remain, current_bit);
|
||||
}
|
||||
|
||||
uint8_t tx20_sa = 0;
|
||||
@@ -108,24 +108,8 @@ void Tx20Component::decode_and_publish_() {
|
||||
// 2. Check received checksum matches calculated checksum
|
||||
// 3. Check that Wind Direction matches Wind Direction (Inverted)
|
||||
// 4. Check that Wind Speed matches Wind Speed (Inverted)
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
// Build debug strings from completed data
|
||||
char debug_buf[320]; // buffer values: max 40 entries * 7 chars each
|
||||
size_t debug_pos = 0;
|
||||
for (int i = 1; i <= max_buffer_index; i++) {
|
||||
debug_pos = buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%u, ", this->store_.buffer[i]);
|
||||
}
|
||||
if (bits_before_padding < MAX_BUFFER_SIZE) {
|
||||
buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%zu, ", MAX_BUFFER_SIZE - bits_before_padding);
|
||||
}
|
||||
char bits_buf[MAX_BUFFER_SIZE + 1];
|
||||
for (size_t i = 0; i < MAX_BUFFER_SIZE; i++) {
|
||||
bits_buf[i] = bit_buffer[i] ? '1' : '0';
|
||||
}
|
||||
bits_buf[MAX_BUFFER_SIZE] = '\0';
|
||||
ESP_LOGVV(TAG, "BUFFER %s", debug_buf);
|
||||
ESP_LOGVV(TAG, "Decoded bits %s", bits_buf);
|
||||
#endif
|
||||
ESP_LOGVV(TAG, "BUFFER %s", string_buffer_2.c_str());
|
||||
ESP_LOGVV(TAG, "Decoded bits %s", string_buffer.c_str());
|
||||
|
||||
if (tx20_sa == 4) {
|
||||
if (chk == tx20_sd) {
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/gpio.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
#include "driver/gpio.h"
|
||||
#include "soc/gpio_num.h"
|
||||
#include "soc/uart_pins.h"
|
||||
@@ -368,13 +367,12 @@ void IDFUARTComponent::check_logger_conflict() {}
|
||||
|
||||
#ifdef USE_UART_WAKE_LOOP_ON_RX
|
||||
void IDFUARTComponent::start_rx_event_task_() {
|
||||
// TASK_PRIORITY_APPLICATION: same as main loop - UART RX monitoring is lightweight,
|
||||
// just wakes main loop when data arrives
|
||||
BaseType_t result = xTaskCreate(rx_event_task_func, // Task function
|
||||
"uart_rx_evt", // Task name (max 16 chars)
|
||||
2240, // Stack size in bytes (~2.2KB)
|
||||
this, // Task parameter (this pointer)
|
||||
TASK_PRIORITY_APPLICATION,
|
||||
// Create FreeRTOS task to monitor UART events
|
||||
BaseType_t result = xTaskCreate(rx_event_task_func, // Task function
|
||||
"uart_rx_evt", // Task name (max 16 chars)
|
||||
2240, // Stack size in bytes (~2.2KB); increase if needed for logging
|
||||
this, // Task parameter (this pointer)
|
||||
tskIDLE_PRIORITY + 1, // Priority (low, just above idle)
|
||||
&this->rx_event_task_handle_ // Task handle
|
||||
);
|
||||
|
||||
|
||||
@@ -12,8 +12,8 @@ from esphome.components.packet_transport import (
|
||||
)
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_DATA, CONF_ID, CONF_PORT, CONF_TRIGGER_ID
|
||||
from esphome.core import ID
|
||||
from esphome.cpp_generator import literal
|
||||
from esphome.core import ID, Lambda
|
||||
from esphome.cpp_generator import ExpressionStatement, MockObj
|
||||
|
||||
CODEOWNERS = ["@clydebarrow"]
|
||||
DEPENDENCIES = ["network"]
|
||||
@@ -24,8 +24,6 @@ udp_ns = cg.esphome_ns.namespace("udp")
|
||||
UDPComponent = udp_ns.class_("UDPComponent", cg.Component)
|
||||
UDPWriteAction = udp_ns.class_("UDPWriteAction", automation.Action)
|
||||
trigger_args = cg.std_vector.template(cg.uint8)
|
||||
trigger_argname = "data"
|
||||
trigger_argtype = [(trigger_args, trigger_argname)]
|
||||
|
||||
CONF_ADDRESSES = "addresses"
|
||||
CONF_LISTEN_ADDRESS = "listen_address"
|
||||
@@ -113,14 +111,13 @@ async def to_code(config):
|
||||
cg.add(var.set_addresses([str(addr) for addr in config[CONF_ADDRESSES]]))
|
||||
if on_receive := config.get(CONF_ON_RECEIVE):
|
||||
on_receive = on_receive[0]
|
||||
trigger_id = cg.new_Pvariable(on_receive[CONF_TRIGGER_ID])
|
||||
trigger = cg.new_Pvariable(on_receive[CONF_TRIGGER_ID])
|
||||
trigger = await automation.build_automation(
|
||||
trigger_id, trigger_argtype, on_receive
|
||||
trigger, [(trigger_args, "data")], on_receive
|
||||
)
|
||||
trigger_lambda = await cg.process_lambda(
|
||||
trigger.trigger(literal(trigger_argname)), trigger_argtype
|
||||
)
|
||||
cg.add(var.add_listener(trigger_lambda))
|
||||
trigger = Lambda(str(ExpressionStatement(trigger.trigger(MockObj("data")))))
|
||||
trigger = await cg.process_lambda(trigger, [(trigger_args, "data")])
|
||||
cg.add(var.add_listener(trigger))
|
||||
cg.add(var.set_should_listen())
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
#include "usb_cdc_acm.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
|
||||
#include <cstring>
|
||||
#include <sys/param.h>
|
||||
@@ -156,16 +155,13 @@ void USBCDCACMInstance::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Use a larger stack size for very verbose logging
|
||||
constexpr size_t stack_size =
|
||||
ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE ? USB_TX_TASK_STACK_SIZE_VV : USB_TX_TASK_STACK_SIZE;
|
||||
// Use a larger stack size for (very) verbose logging
|
||||
const size_t stack_size = esp_log_level_get(TAG) > ESP_LOG_DEBUG ? USB_TX_TASK_STACK_SIZE_VV : USB_TX_TASK_STACK_SIZE;
|
||||
|
||||
// Create a simple, unique task name per interface
|
||||
char task_name[] = "usb_tx_0";
|
||||
task_name[sizeof(task_name) - 1] = format_hex_char(static_cast<char>(this->itf_));
|
||||
// TASK_PRIORITY_USB_SERIAL: above main loop (TASK_PRIORITY_APPLICATION) and
|
||||
// wake word (TASK_PRIORITY_INFERENCE), below protocol tasks (TASK_PRIORITY_PROTOCOL)
|
||||
xTaskCreate(usb_tx_task_fn, task_name, stack_size, this, TASK_PRIORITY_USB_SERIAL, &this->usb_tx_task_handle_);
|
||||
xTaskCreate(usb_tx_task_fn, task_name, stack_size, this, 4, &this->usb_tx_task_handle_);
|
||||
|
||||
if (this->usb_tx_task_handle_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Failed to create USB TX task for itf %d", this->itf_);
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
#if defined(USE_ESP32_VARIANT_ESP32P4) || defined(USE_ESP32_VARIANT_ESP32S2) || defined(USE_ESP32_VARIANT_ESP32S3)
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/task_priorities.h"
|
||||
#include <vector>
|
||||
#include "usb/usb_host.h"
|
||||
#include <freertos/FreeRTOS.h>
|
||||
@@ -70,6 +69,7 @@ static constexpr trq_bitmask_t ALL_REQUESTS_IN_USE = MAX_REQUESTS == 32 ? ~0 : (
|
||||
|
||||
static constexpr size_t USB_EVENT_QUEUE_SIZE = 32; // Size of event queue between USB task and main loop
|
||||
static constexpr size_t USB_TASK_STACK_SIZE = 4096; // Stack size for USB task (same as ESP-IDF USB examples)
|
||||
static constexpr UBaseType_t USB_TASK_PRIORITY = 5; // Higher priority than main loop (tskIDLE_PRIORITY + 5)
|
||||
|
||||
// used to report a transfer status
|
||||
struct TransferStatus {
|
||||
|
||||
@@ -215,12 +215,11 @@ void USBClient::setup() {
|
||||
}
|
||||
|
||||
// Create and start USB task
|
||||
// TASK_PRIORITY_PROTOCOL: above main loop (TASK_PRIORITY_APPLICATION) but below
|
||||
// audio tasks - USB host needs responsive scheduling for device communication
|
||||
xTaskCreate(usb_task_fn, "usb_task",
|
||||
USB_TASK_STACK_SIZE, // Stack size
|
||||
this, // Task parameter
|
||||
TASK_PRIORITY_PROTOCOL, &this->usb_task_handle_);
|
||||
USB_TASK_PRIORITY, // Priority (higher than main loop)
|
||||
&this->usb_task_handle_);
|
||||
|
||||
if (this->usb_task_handle_ == nullptr) {
|
||||
ESP_LOGE(TAG, "Failed to create USB task");
|
||||
|
||||
@@ -1,6 +1 @@
|
||||
CODEOWNERS = ["@clydebarrow"]
|
||||
|
||||
DEPRECATED_COMPONENT = """
|
||||
The 'waveshare_epaper' component is deprecated and no new models will be added to it.
|
||||
New model PRs should target the newer and more performant 'epaper_spi' component.
|
||||
"""
|
||||
|
||||
@@ -53,4 +53,4 @@ async def to_code(config):
|
||||
"lib_ignore", ["ESPAsyncTCP", "AsyncTCP", "AsyncTCP_RP2040W"]
|
||||
)
|
||||
# https://github.com/ESP32Async/ESPAsyncWebServer/blob/main/library.json
|
||||
cg.add_library("ESP32Async/ESPAsyncWebServer", "3.9.6")
|
||||
cg.add_library("ESP32Async/ESPAsyncWebServer", "3.9.5")
|
||||
|
||||
@@ -39,10 +39,6 @@
|
||||
#include "esphome/components/esp32_improv/esp32_improv_component.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_IMPROV_SERIAL
|
||||
#include "esphome/components/improv_serial/improv_serial_component.h"
|
||||
#endif
|
||||
|
||||
namespace esphome::wifi {
|
||||
|
||||
static const char *const TAG = "wifi";
|
||||
@@ -369,75 +365,6 @@ bool WiFiComponent::ssid_was_seen_in_scan_(const std::string &ssid) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool WiFiComponent::needs_full_scan_results_() const {
|
||||
// Components that require full scan results (for example, scan result listeners)
|
||||
// are expected to call request_wifi_scan_results(), which sets keep_scan_results_.
|
||||
if (this->keep_scan_results_) {
|
||||
return true;
|
||||
}
|
||||
|
||||
#ifdef USE_CAPTIVE_PORTAL
|
||||
// Captive portal needs full results when active (showing network list to user)
|
||||
if (captive_portal::global_captive_portal != nullptr && captive_portal::global_captive_portal->is_active()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_IMPROV_SERIAL
|
||||
// Improv serial needs results during provisioning (before connected)
|
||||
if (improv_serial::global_improv_serial_component != nullptr && !this->is_connected()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_IMPROV
|
||||
// BLE improv also needs results during provisioning
|
||||
if (esp32_improv::global_improv_component != nullptr && esp32_improv::global_improv_component->is_active()) {
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool WiFiComponent::matches_configured_network_(const char *ssid, const uint8_t *bssid) const {
|
||||
// Hidden networks in scan results have empty SSIDs - skip them
|
||||
if (ssid[0] == '\0') {
|
||||
return false;
|
||||
}
|
||||
for (const auto &sta : this->sta_) {
|
||||
// Skip hidden network configs (they don't appear in normal scans)
|
||||
if (sta.get_hidden()) {
|
||||
continue;
|
||||
}
|
||||
// For BSSID-only configs (empty SSID), match by BSSID
|
||||
if (sta.get_ssid().empty()) {
|
||||
if (sta.has_bssid() && std::memcmp(sta.get_bssid().data(), bssid, 6) == 0) {
|
||||
return true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Match by SSID
|
||||
if (sta.get_ssid() == ssid) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void WiFiComponent::log_discarded_scan_result_(const char *ssid, const uint8_t *bssid, int8_t rssi, uint8_t channel) {
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
// Skip logging during roaming scans to avoid log buffer overflow
|
||||
// (roaming scans typically find many networks but only care about same-SSID APs)
|
||||
if (this->roaming_state_ == RoamingState::SCANNING) {
|
||||
return;
|
||||
}
|
||||
char bssid_s[MAC_ADDRESS_PRETTY_BUFFER_SIZE];
|
||||
format_mac_addr_upper(bssid, bssid_s);
|
||||
ESP_LOGV(TAG, "- " LOG_SECRET("'%s'") " " LOG_SECRET("(%s)") " %ddB Ch:%u", ssid, bssid_s, rssi, channel);
|
||||
#endif
|
||||
}
|
||||
|
||||
int8_t WiFiComponent::find_next_hidden_sta_(int8_t start_index) {
|
||||
// Find next SSID to try in RETRY_HIDDEN phase.
|
||||
//
|
||||
@@ -729,12 +656,8 @@ void WiFiComponent::loop() {
|
||||
ESP_LOGI(TAG, "Starting fallback AP");
|
||||
this->setup_ap_config_();
|
||||
#ifdef USE_CAPTIVE_PORTAL
|
||||
if (captive_portal::global_captive_portal != nullptr) {
|
||||
// Reset so we force one full scan after captive portal starts
|
||||
// (previous scans were filtered because captive portal wasn't active yet)
|
||||
this->has_completed_scan_after_captive_portal_start_ = false;
|
||||
if (captive_portal::global_captive_portal != nullptr)
|
||||
captive_portal::global_captive_portal->start();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -1272,7 +1195,7 @@ template<typename VectorType> static void insertion_sort_scan_results(VectorType
|
||||
// has overhead from UART transmission, so combining INFO+DEBUG into one line halves
|
||||
// the blocking time. Do NOT split this into separate ESP_LOGI/ESP_LOGD calls.
|
||||
__attribute__((noinline)) static void log_scan_result(const WiFiScanResult &res) {
|
||||
char bssid_s[MAC_ADDRESS_PRETTY_BUFFER_SIZE];
|
||||
char bssid_s[18];
|
||||
auto bssid = res.get_bssid();
|
||||
format_mac_addr_upper(bssid.data(), bssid_s);
|
||||
|
||||
@@ -1288,6 +1211,18 @@ __attribute__((noinline)) static void log_scan_result(const WiFiScanResult &res)
|
||||
#endif
|
||||
}
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
// Helper function to log non-matching scan results at verbose level
|
||||
__attribute__((noinline)) static void log_scan_result_non_matching(const WiFiScanResult &res) {
|
||||
char bssid_s[18];
|
||||
auto bssid = res.get_bssid();
|
||||
format_mac_addr_upper(bssid.data(), bssid_s);
|
||||
|
||||
ESP_LOGV(TAG, "- " LOG_SECRET("'%s'") " " LOG_SECRET("(%s) ") "%s", res.get_ssid().c_str(), bssid_s,
|
||||
LOG_STR_ARG(get_signal_bars(res.get_rssi())));
|
||||
}
|
||||
#endif
|
||||
|
||||
void WiFiComponent::check_scanning_finished() {
|
||||
if (!this->scan_done_) {
|
||||
if (millis() - this->action_started_ > WIFI_SCAN_TIMEOUT_MS) {
|
||||
@@ -1297,8 +1232,6 @@ void WiFiComponent::check_scanning_finished() {
|
||||
return;
|
||||
}
|
||||
this->scan_done_ = false;
|
||||
this->has_completed_scan_after_captive_portal_start_ =
|
||||
true; // Track that we've done a scan since captive portal started
|
||||
this->retry_hidden_mode_ = RetryHiddenMode::SCAN_BASED;
|
||||
|
||||
if (this->scan_result_.empty()) {
|
||||
@@ -1326,12 +1259,21 @@ void WiFiComponent::check_scanning_finished() {
|
||||
// Sort scan results using insertion sort for better memory efficiency
|
||||
insertion_sort_scan_results(this->scan_result_);
|
||||
|
||||
// Log matching networks (non-matching already logged at VERBOSE in scan callback)
|
||||
size_t non_matching_count = 0;
|
||||
for (auto &res : this->scan_result_) {
|
||||
if (res.get_matches()) {
|
||||
log_scan_result(res);
|
||||
} else {
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
log_scan_result_non_matching(res);
|
||||
#else
|
||||
non_matching_count++;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
if (non_matching_count > 0) {
|
||||
ESP_LOGD(TAG, "- %zu non-matching (VERBOSE to show)", non_matching_count);
|
||||
}
|
||||
|
||||
// SYNCHRONIZATION POINT: Establish link between scan_result_[0] and selected_sta_index_
|
||||
// After sorting, scan_result_[0] contains the best network. Now find which sta_[i] config
|
||||
@@ -1590,10 +1532,7 @@ WiFiRetryPhase WiFiComponent::determine_next_phase_() {
|
||||
if (this->went_through_explicit_hidden_phase_()) {
|
||||
return WiFiRetryPhase::EXPLICIT_HIDDEN;
|
||||
}
|
||||
// Skip scanning when captive portal/improv is active to avoid disrupting AP,
|
||||
// BUT only if we've already completed at least one scan AFTER the portal started.
|
||||
// When captive portal first starts, scan results may be filtered/stale, so we need
|
||||
// to do one full scan to populate available networks for the captive portal UI.
|
||||
// Skip scanning when captive portal/improv is active to avoid disrupting AP.
|
||||
//
|
||||
// WHY SCANNING DISRUPTS AP MODE:
|
||||
// WiFi scanning requires the radio to leave the AP's channel and hop through
|
||||
@@ -1610,16 +1549,7 @@ WiFiRetryPhase WiFiComponent::determine_next_phase_() {
|
||||
//
|
||||
// This allows users to configure WiFi via captive portal while the device keeps
|
||||
// attempting to connect to all configured networks in sequence.
|
||||
// Captive portal needs scan results to show available networks.
|
||||
// If captive portal is active, only skip scanning if we've done a scan after it started.
|
||||
// If only improv is active (no captive portal), skip scanning since improv doesn't need results.
|
||||
if (this->is_captive_portal_active_()) {
|
||||
if (this->has_completed_scan_after_captive_portal_start_) {
|
||||
return WiFiRetryPhase::RETRY_HIDDEN;
|
||||
}
|
||||
// Need to scan for captive portal
|
||||
} else if (this->is_esp32_improv_active_()) {
|
||||
// Improv doesn't need scan results
|
||||
if (this->is_captive_portal_active_() || this->is_esp32_improv_active_()) {
|
||||
return WiFiRetryPhase::RETRY_HIDDEN;
|
||||
}
|
||||
return WiFiRetryPhase::SCAN_CONNECTING;
|
||||
@@ -2166,7 +2096,7 @@ void WiFiComponent::clear_roaming_state_() {
|
||||
|
||||
void WiFiComponent::release_scan_results_() {
|
||||
if (!this->keep_scan_results_) {
|
||||
#if defined(USE_RP2040) || defined(USE_ESP32)
|
||||
#ifdef USE_RP2040
|
||||
// std::vector - use swap trick since shrink_to_fit is non-binding
|
||||
decltype(this->scan_result_)().swap(this->scan_result_);
|
||||
#else
|
||||
|
||||
@@ -161,12 +161,9 @@ struct EAPAuth {
|
||||
|
||||
using bssid_t = std::array<uint8_t, 6>;
|
||||
|
||||
/// Initial reserve size for filtered scan results (typical: 1-3 matching networks per SSID)
|
||||
static constexpr size_t WIFI_SCAN_RESULT_FILTERED_RESERVE = 8;
|
||||
|
||||
// Use std::vector for RP2040 (callback-based) and ESP32 (destructive scan API)
|
||||
// Use FixedVector for ESP8266 and LibreTiny where two-pass exact allocation is possible
|
||||
#if defined(USE_RP2040) || defined(USE_ESP32)
|
||||
// Use std::vector for RP2040 since scan count is unknown (callback-based)
|
||||
// Use FixedVector for other platforms where count is queried first
|
||||
#ifdef USE_RP2040
|
||||
template<typename T> using wifi_scan_vector_t = std::vector<T>;
|
||||
#else
|
||||
template<typename T> using wifi_scan_vector_t = FixedVector<T>;
|
||||
@@ -542,13 +539,6 @@ class WiFiComponent : public Component {
|
||||
/// Check if an SSID was seen in the most recent scan results
|
||||
/// Used to skip hidden mode for SSIDs we know are visible
|
||||
bool ssid_was_seen_in_scan_(const std::string &ssid) const;
|
||||
/// Check if full scan results are needed (captive portal active, improv, listeners)
|
||||
bool needs_full_scan_results_() const;
|
||||
/// Check if network matches any configured network (for scan result filtering)
|
||||
/// Matches by SSID when configured, or by BSSID for BSSID-only configs
|
||||
bool matches_configured_network_(const char *ssid, const uint8_t *bssid) const;
|
||||
/// Log a discarded scan result at VERBOSE level (skipped during roaming scans to avoid log overflow)
|
||||
void log_discarded_scan_result_(const char *ssid, const uint8_t *bssid, int8_t rssi, uint8_t channel);
|
||||
/// Find next SSID that wasn't in scan results (might be hidden)
|
||||
/// Returns index of next potentially hidden SSID, or -1 if none found
|
||||
/// @param start_index Start searching from index after this (-1 to start from beginning)
|
||||
@@ -720,8 +710,6 @@ class WiFiComponent : public Component {
|
||||
bool enable_on_boot_{true};
|
||||
bool got_ipv4_address_{false};
|
||||
bool keep_scan_results_{false};
|
||||
bool has_completed_scan_after_captive_portal_start_{
|
||||
false}; // Tracks if we've completed a scan after captive portal started
|
||||
RetryHiddenMode retry_hidden_mode_{RetryHiddenMode::BLIND_RETRY};
|
||||
bool skip_cooldown_next_cycle_{false};
|
||||
bool post_connect_roaming_{true}; // Enabled by default
|
||||
|
||||
@@ -756,42 +756,24 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
|
||||
|
||||
if (status != OK) {
|
||||
ESP_LOGV(TAG, "Scan failed: %d", status);
|
||||
// Don't call retry_connect() here - this callback runs in SDK system context
|
||||
// where yield() cannot be called. Instead, just set scan_done_ and let
|
||||
// check_scanning_finished() handle the empty scan_result_ from loop context.
|
||||
this->scan_done_ = true;
|
||||
this->retry_connect();
|
||||
return;
|
||||
}
|
||||
|
||||
// Count the number of results first
|
||||
auto *head = reinterpret_cast<bss_info *>(arg);
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
|
||||
// First pass: count matching networks (linked list is non-destructive)
|
||||
size_t total = 0;
|
||||
size_t count = 0;
|
||||
for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
|
||||
total++;
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(it->ssid);
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, it->bssid)) {
|
||||
count++;
|
||||
}
|
||||
count++;
|
||||
}
|
||||
|
||||
this->scan_result_.init(count); // Exact allocation
|
||||
|
||||
// Second pass: store matching networks
|
||||
this->scan_result_.init(count);
|
||||
for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(it->ssid);
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, it->bssid)) {
|
||||
this->scan_result_.emplace_back(
|
||||
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
|
||||
std::string(ssid_cstr, it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN, it->is_hidden != 0);
|
||||
} else {
|
||||
this->log_discarded_scan_result_(ssid_cstr, it->bssid, it->rssi, it->channel);
|
||||
}
|
||||
this->scan_result_.emplace_back(
|
||||
bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
|
||||
std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN,
|
||||
it->is_hidden != 0);
|
||||
}
|
||||
ESP_LOGV(TAG, "Scan complete: %zu found, %zu stored%s", total, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
this->scan_done_ = true;
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : global_wifi_component->scan_results_listeners_) {
|
||||
|
||||
@@ -828,21 +828,11 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
}
|
||||
|
||||
uint16_t number = it.number;
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
|
||||
// Smart reserve: full capacity if needed, small reserve otherwise
|
||||
if (needs_full) {
|
||||
this->scan_result_.reserve(number);
|
||||
} else {
|
||||
this->scan_result_.reserve(WIFI_SCAN_RESULT_FILTERED_RESERVE);
|
||||
}
|
||||
|
||||
scan_result_.init(number);
|
||||
#ifdef USE_ESP32_HOSTED
|
||||
// getting records one at a time fails on P4 with hosted esp32 WiFi coprocessor
|
||||
// Presumably an upstream bug, work-around by getting all records at once
|
||||
// Use stack buffer (3904 bytes / ~80 bytes per record = ~48 records) with heap fallback
|
||||
static constexpr size_t SCAN_RECORD_STACK_COUNT = 3904 / sizeof(wifi_ap_record_t);
|
||||
SmallBufferWithHeapFallback<SCAN_RECORD_STACK_COUNT, wifi_ap_record_t> records(number);
|
||||
auto records = std::make_unique<wifi_ap_record_t[]>(number);
|
||||
err = esp_wifi_scan_get_ap_records(&number, records.get());
|
||||
if (err != ESP_OK) {
|
||||
esp_wifi_clear_ap_list();
|
||||
@@ -850,7 +840,7 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
return;
|
||||
}
|
||||
for (uint16_t i = 0; i < number; i++) {
|
||||
wifi_ap_record_t &record = records.get()[i];
|
||||
wifi_ap_record_t &record = records[i];
|
||||
#else
|
||||
// Process one record at a time to avoid large buffer allocation
|
||||
for (uint16_t i = 0; i < number; i++) {
|
||||
@@ -862,23 +852,12 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
|
||||
break;
|
||||
}
|
||||
#endif // USE_ESP32_HOSTED
|
||||
|
||||
// Check C string first - avoid std::string construction for non-matching networks
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(record.ssid);
|
||||
|
||||
// Only construct std::string and store if needed
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, record.bssid)) {
|
||||
bssid_t bssid;
|
||||
std::copy(record.bssid, record.bssid + 6, bssid.begin());
|
||||
std::string ssid(ssid_cstr);
|
||||
this->scan_result_.emplace_back(bssid, std::move(ssid), record.primary, record.rssi,
|
||||
record.authmode != WIFI_AUTH_OPEN, ssid_cstr[0] == '\0');
|
||||
} else {
|
||||
this->log_discarded_scan_result_(ssid_cstr, record.bssid, record.rssi, record.primary);
|
||||
}
|
||||
bssid_t bssid;
|
||||
std::copy(record.bssid, record.bssid + 6, bssid.begin());
|
||||
std::string ssid(reinterpret_cast<const char *>(record.ssid));
|
||||
scan_result_.emplace_back(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN,
|
||||
ssid.empty());
|
||||
}
|
||||
ESP_LOGV(TAG, "Scan complete: %u found, %zu stored%s", number, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : this->scan_results_listeners_) {
|
||||
listener->on_wifi_scan_results(this->scan_result_);
|
||||
|
||||
@@ -670,39 +670,18 @@ void WiFiComponent::wifi_scan_done_callback_() {
|
||||
if (num < 0)
|
||||
return;
|
||||
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
|
||||
// Access scan results directly via WiFi.scan struct to avoid Arduino String allocations
|
||||
// WiFi.scan is public in LibreTiny for WiFiEvents & WiFiScan static handlers
|
||||
auto *scan = WiFi.scan;
|
||||
|
||||
// First pass: count matching networks
|
||||
size_t count = 0;
|
||||
this->scan_result_.init(static_cast<unsigned int>(num));
|
||||
for (int i = 0; i < num; i++) {
|
||||
const char *ssid_cstr = scan->ap[i].ssid;
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, scan->ap[i].bssid.addr)) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
String ssid = WiFi.SSID(i);
|
||||
wifi_auth_mode_t authmode = WiFi.encryptionType(i);
|
||||
int32_t rssi = WiFi.RSSI(i);
|
||||
uint8_t *bssid = WiFi.BSSID(i);
|
||||
int32_t channel = WiFi.channel(i);
|
||||
|
||||
this->scan_result_.init(count); // Exact allocation
|
||||
|
||||
// Second pass: store matching networks
|
||||
for (int i = 0; i < num; i++) {
|
||||
const char *ssid_cstr = scan->ap[i].ssid;
|
||||
if (needs_full || this->matches_configured_network_(ssid_cstr, scan->ap[i].bssid.addr)) {
|
||||
auto &ap = scan->ap[i];
|
||||
this->scan_result_.emplace_back(bssid_t{ap.bssid.addr[0], ap.bssid.addr[1], ap.bssid.addr[2], ap.bssid.addr[3],
|
||||
ap.bssid.addr[4], ap.bssid.addr[5]},
|
||||
std::string(ssid_cstr), ap.channel, ap.rssi, ap.auth != WIFI_AUTH_OPEN,
|
||||
ssid_cstr[0] == '\0');
|
||||
} else {
|
||||
auto &ap = scan->ap[i];
|
||||
this->log_discarded_scan_result_(ssid_cstr, ap.bssid.addr, ap.rssi, ap.channel);
|
||||
}
|
||||
this->scan_result_.emplace_back(bssid_t{bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]},
|
||||
std::string(ssid.c_str()), channel, rssi, authmode != WIFI_AUTH_OPEN,
|
||||
ssid.length() == 0);
|
||||
}
|
||||
ESP_LOGV(TAG, "Scan complete: %d found, %zu stored%s", num, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
WiFi.scanDelete();
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : this->scan_results_listeners_) {
|
||||
|
||||
@@ -21,7 +21,6 @@ static const char *const TAG = "wifi_pico_w";
|
||||
// Track previous state for detecting changes
|
||||
static bool s_sta_was_connected = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
static bool s_sta_had_ip = false; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
static size_t s_scan_result_count = 0; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
bool WiFiComponent::wifi_mode_(optional<bool> sta, optional<bool> ap) {
|
||||
if (sta.has_value()) {
|
||||
@@ -138,20 +137,10 @@ int WiFiComponent::s_wifi_scan_result(void *env, const cyw43_ev_scan_result_t *r
|
||||
}
|
||||
|
||||
void WiFiComponent::wifi_scan_result(void *env, const cyw43_ev_scan_result_t *result) {
|
||||
s_scan_result_count++;
|
||||
const char *ssid_cstr = reinterpret_cast<const char *>(result->ssid);
|
||||
|
||||
// Skip networks that don't match any configured network (unless full results needed)
|
||||
if (!this->needs_full_scan_results_() && !this->matches_configured_network_(ssid_cstr, result->bssid)) {
|
||||
this->log_discarded_scan_result_(ssid_cstr, result->bssid, result->rssi, result->channel);
|
||||
return;
|
||||
}
|
||||
|
||||
bssid_t bssid;
|
||||
std::copy(result->bssid, result->bssid + 6, bssid.begin());
|
||||
std::string ssid(ssid_cstr);
|
||||
WiFiScanResult res(bssid, std::move(ssid), result->channel, result->rssi, result->auth_mode != CYW43_AUTH_OPEN,
|
||||
ssid_cstr[0] == '\0');
|
||||
std::string ssid(reinterpret_cast<const char *>(result->ssid));
|
||||
WiFiScanResult res(bssid, ssid, result->channel, result->rssi, result->auth_mode != CYW43_AUTH_OPEN, ssid.empty());
|
||||
if (std::find(this->scan_result_.begin(), this->scan_result_.end(), res) == this->scan_result_.end()) {
|
||||
this->scan_result_.push_back(res);
|
||||
}
|
||||
@@ -160,7 +149,6 @@ void WiFiComponent::wifi_scan_result(void *env, const cyw43_ev_scan_result_t *re
|
||||
bool WiFiComponent::wifi_scan_start_(bool passive) {
|
||||
this->scan_result_.clear();
|
||||
this->scan_done_ = false;
|
||||
s_scan_result_count = 0;
|
||||
cyw43_wifi_scan_options_t scan_options = {0};
|
||||
scan_options.scan_type = passive ? 1 : 0;
|
||||
int err = cyw43_wifi_scan(&cyw43_state, &scan_options, nullptr, &s_wifi_scan_result);
|
||||
@@ -256,9 +244,7 @@ void WiFiComponent::wifi_loop_() {
|
||||
// Handle scan completion
|
||||
if (this->state_ == WIFI_COMPONENT_STATE_STA_SCANNING && !cyw43_wifi_scan_active(&cyw43_state)) {
|
||||
this->scan_done_ = true;
|
||||
bool needs_full = this->needs_full_scan_results_();
|
||||
ESP_LOGV(TAG, "Scan complete: %zu found, %zu stored%s", s_scan_result_count, this->scan_result_.size(),
|
||||
needs_full ? "" : " (filtered)");
|
||||
ESP_LOGV(TAG, "Scan done");
|
||||
#ifdef USE_WIFI_SCAN_RESULTS_LISTENERS
|
||||
for (auto *listener : this->scan_results_listeners_) {
|
||||
listener->on_wifi_scan_results(this->scan_result_);
|
||||
|
||||
@@ -12,7 +12,6 @@ from esphome.core import CORE
|
||||
from esphome.types import ConfigType
|
||||
|
||||
from .const_zephyr import (
|
||||
CONF_IEEE802154_VENDOR_OUI,
|
||||
CONF_MAX_EP_NUMBER,
|
||||
CONF_ON_JOIN,
|
||||
CONF_POWER_SOURCE,
|
||||
@@ -24,12 +23,7 @@ from .const_zephyr import (
|
||||
ZigbeeComponent,
|
||||
zigbee_ns,
|
||||
)
|
||||
from .zigbee_zephyr import (
|
||||
zephyr_binary_sensor,
|
||||
zephyr_number,
|
||||
zephyr_sensor,
|
||||
zephyr_switch,
|
||||
)
|
||||
from .zigbee_zephyr import zephyr_binary_sensor, zephyr_sensor, zephyr_switch
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -48,7 +42,6 @@ def zigbee_set_core_data(config: ConfigType) -> ConfigType:
|
||||
BINARY_SENSOR_SCHEMA = cv.Schema({}).extend(zephyr_binary_sensor)
|
||||
SENSOR_SCHEMA = cv.Schema({}).extend(zephyr_sensor)
|
||||
SWITCH_SCHEMA = cv.Schema({}).extend(zephyr_switch)
|
||||
NUMBER_SCHEMA = cv.Schema({}).extend(zephyr_number)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
@@ -65,13 +58,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_POWER_SOURCE, default="DC_SOURCE"): cv.enum(
|
||||
POWER_SOURCE, upper=True
|
||||
),
|
||||
cv.Optional(CONF_IEEE802154_VENDOR_OUI): cv.All(
|
||||
cv.Any(
|
||||
cv.int_range(min=0x000000, max=0xFFFFFF),
|
||||
cv.one_of(*["random"], lower=True),
|
||||
),
|
||||
cv.requires_component("nrf52"),
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
zigbee_set_core_data,
|
||||
@@ -131,25 +117,10 @@ async def setup_switch(entity: cg.MockObj, config: ConfigType) -> None:
|
||||
await zephyr_setup_switch(entity, config)
|
||||
|
||||
|
||||
async def setup_number(
|
||||
entity: cg.MockObj,
|
||||
config: ConfigType,
|
||||
min_value: float,
|
||||
max_value: float,
|
||||
step: float,
|
||||
) -> None:
|
||||
if not config.get(CONF_ZIGBEE_ID) or config.get(CONF_INTERNAL):
|
||||
return
|
||||
if CORE.using_zephyr:
|
||||
from .zigbee_zephyr import zephyr_setup_number
|
||||
|
||||
await zephyr_setup_number(entity, config, min_value, max_value, step)
|
||||
|
||||
|
||||
def consume_endpoint(config: ConfigType) -> ConfigType:
|
||||
if not config.get(CONF_ZIGBEE_ID) or config.get(CONF_INTERNAL):
|
||||
return config
|
||||
if CONF_NAME in config and " " in config[CONF_NAME]:
|
||||
if " " in config[CONF_NAME]:
|
||||
_LOGGER.warning(
|
||||
"Spaces in '%s' work with ZHA but not Zigbee2MQTT. For Zigbee2MQTT use '%s'",
|
||||
config[CONF_NAME],
|
||||
@@ -173,10 +144,6 @@ def validate_switch(config: ConfigType) -> ConfigType:
|
||||
return consume_endpoint(config)
|
||||
|
||||
|
||||
def validate_number(config: ConfigType) -> ConfigType:
|
||||
return consume_endpoint(config)
|
||||
|
||||
|
||||
ZIGBEE_ACTION_SCHEMA = automation.maybe_simple_id(
|
||||
cv.Schema(
|
||||
{
|
||||
|
||||
@@ -4,7 +4,6 @@ zigbee_ns = cg.esphome_ns.namespace("zigbee")
|
||||
ZigbeeComponent = zigbee_ns.class_("ZigbeeComponent", cg.Component)
|
||||
BinaryAttrs = zigbee_ns.struct("BinaryAttrs")
|
||||
AnalogAttrs = zigbee_ns.struct("AnalogAttrs")
|
||||
AnalogAttrsOutput = zigbee_ns.struct("AnalogAttrsOutput")
|
||||
|
||||
CONF_MAX_EP_NUMBER = 8
|
||||
CONF_ZIGBEE_ID = "zigbee_id"
|
||||
@@ -13,7 +12,6 @@ CONF_WIPE_ON_BOOT = "wipe_on_boot"
|
||||
CONF_ZIGBEE_BINARY_SENSOR = "zigbee_binary_sensor"
|
||||
CONF_ZIGBEE_SENSOR = "zigbee_sensor"
|
||||
CONF_ZIGBEE_SWITCH = "zigbee_switch"
|
||||
CONF_ZIGBEE_NUMBER = "zigbee_number"
|
||||
CONF_POWER_SOURCE = "power_source"
|
||||
POWER_SOURCE = {
|
||||
"UNKNOWN": "ZB_ZCL_BASIC_POWER_SOURCE_UNKNOWN",
|
||||
@@ -24,7 +22,6 @@ POWER_SOURCE = {
|
||||
"EMERGENCY_MAINS_CONST": "ZB_ZCL_BASIC_POWER_SOURCE_EMERGENCY_MAINS_CONST",
|
||||
"EMERGENCY_MAINS_TRANSF": "ZB_ZCL_BASIC_POWER_SOURCE_EMERGENCY_MAINS_TRANSF",
|
||||
}
|
||||
CONF_IEEE802154_VENDOR_OUI = "ieee802154_vendor_oui"
|
||||
|
||||
# Keys for CORE.data storage
|
||||
KEY_ZIGBEE = "zigbee"
|
||||
@@ -40,4 +37,3 @@ ZB_ZCL_CLUSTER_ID_IDENTIFY = "ZB_ZCL_CLUSTER_ID_IDENTIFY"
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_INPUT = "ZB_ZCL_CLUSTER_ID_BINARY_INPUT"
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_INPUT = "ZB_ZCL_CLUSTER_ID_ANALOG_INPUT"
|
||||
ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT = "ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT"
|
||||
ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT = "ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT"
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import time as time_
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.types import ConfigType
|
||||
|
||||
from .. import consume_endpoint
|
||||
from ..const_zephyr import CONF_ZIGBEE_ID, zigbee_ns
|
||||
from ..zigbee_zephyr import (
|
||||
ZigbeeClusterDesc,
|
||||
ZigbeeComponent,
|
||||
get_slot_index,
|
||||
zigbee_new_attr_list,
|
||||
zigbee_new_cluster_list,
|
||||
zigbee_new_variable,
|
||||
zigbee_register_ep,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ["zigbee"]
|
||||
|
||||
ZigbeeTime = zigbee_ns.class_("ZigbeeTime", time_.RealTimeClock)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
time_.TIME_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ZigbeeTime),
|
||||
cv.OnlyWith(CONF_ZIGBEE_ID, ["nrf52", "zigbee"]): cv.use_id(
|
||||
ZigbeeComponent
|
||||
),
|
||||
}
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
.extend(cv.polling_component_schema("1s")),
|
||||
consume_endpoint,
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config: ConfigType) -> None:
|
||||
CORE.add_job(_add_time, config)
|
||||
|
||||
|
||||
async def _add_time(config: ConfigType) -> None:
|
||||
slot_index = get_slot_index()
|
||||
|
||||
# Create unique names for this sensor's variables based on slot index
|
||||
prefix = f"zigbee_ep{slot_index + 1}"
|
||||
attrs_name = f"{prefix}_time_attrs"
|
||||
attr_list_name = f"{prefix}_time_attrib_list"
|
||||
cluster_list_name = f"{prefix}_cluster_list"
|
||||
ep_name = f"{prefix}_ep"
|
||||
|
||||
# Create the binary attributes structure
|
||||
time_attrs = zigbee_new_variable(attrs_name, "zb_zcl_time_attrs_t")
|
||||
attr_list = zigbee_new_attr_list(
|
||||
attr_list_name,
|
||||
"ZB_ZCL_DECLARE_TIME_ATTR_LIST",
|
||||
str(time_attrs),
|
||||
)
|
||||
|
||||
# Create cluster list and register endpoint
|
||||
cluster_list_name, clusters = zigbee_new_cluster_list(
|
||||
cluster_list_name,
|
||||
[
|
||||
ZigbeeClusterDesc("ZB_ZCL_CLUSTER_ID_TIME", attr_list),
|
||||
ZigbeeClusterDesc("ZB_ZCL_CLUSTER_ID_TIME"),
|
||||
],
|
||||
)
|
||||
zigbee_register_ep(
|
||||
ep_name,
|
||||
cluster_list_name,
|
||||
0,
|
||||
clusters,
|
||||
slot_index,
|
||||
"ZB_HA_CUSTOM_ATTR_DEVICE_ID",
|
||||
)
|
||||
|
||||
# Create the ZigbeeTime component
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await time_.register_time(var, config)
|
||||
await cg.register_component(var, config)
|
||||
|
||||
cg.add(var.set_endpoint(slot_index + 1))
|
||||
cg.add(var.set_cluster_attributes(time_attrs))
|
||||
hub = await cg.get_variable(config[CONF_ZIGBEE_ID])
|
||||
cg.add(var.set_parent(hub))
|
||||
@@ -1,87 +0,0 @@
|
||||
#include "zigbee_time_zephyr.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_TIME)
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::zigbee {
|
||||
|
||||
static const char *const TAG = "zigbee.time";
|
||||
|
||||
// This time standard is the number of
|
||||
// seconds since 0 hrs 0 mins 0 sec on 1st January 2000 UTC (Universal Coordinated Time).
|
||||
constexpr time_t EPOCH_2000 = 946684800;
|
||||
|
||||
ZigbeeTime *global_time = nullptr; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
void ZigbeeTime::sync_time(zb_ret_t status, zb_uint32_t auth_level, zb_uint16_t short_addr, zb_uint8_t endpoint,
|
||||
zb_uint32_t nw_time) {
|
||||
if (status == RET_OK && auth_level >= ZB_ZCL_TIME_HAS_SYNCHRONIZED_BIT) {
|
||||
global_time->set_epoch_time(nw_time + EPOCH_2000);
|
||||
} else if (status != RET_TIMEOUT || !global_time->has_time_) {
|
||||
ESP_LOGE(TAG, "Status: %d, auth_level: %u, short_addr: %d, endpoint: %d, nw_time: %u", status, auth_level,
|
||||
short_addr, endpoint, nw_time);
|
||||
}
|
||||
}
|
||||
|
||||
void ZigbeeTime::setup() {
|
||||
global_time = this;
|
||||
this->parent_->add_callback(this->endpoint_, [this](zb_bufid_t bufid) { this->zcl_device_cb_(bufid); });
|
||||
synchronize_epoch_(EPOCH_2000);
|
||||
this->parent_->add_join_callback([this]() { zb_zcl_time_server_synchronize(this->endpoint_, sync_time); });
|
||||
}
|
||||
|
||||
void ZigbeeTime::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Zigbee Time\n"
|
||||
" Endpoint: %d",
|
||||
this->endpoint_);
|
||||
RealTimeClock::dump_config();
|
||||
}
|
||||
|
||||
void ZigbeeTime::update() {
|
||||
time_t time = timestamp_now();
|
||||
this->cluster_attributes_->time = time - EPOCH_2000;
|
||||
}
|
||||
|
||||
void ZigbeeTime::set_epoch_time(uint32_t epoch) {
|
||||
this->defer([this, epoch]() {
|
||||
this->synchronize_epoch_(epoch);
|
||||
this->has_time_ = true;
|
||||
});
|
||||
}
|
||||
|
||||
void ZigbeeTime::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
zb_zcl_device_callback_param_t *p_device_cb_param = ZB_BUF_GET_PARAM(bufid, zb_zcl_device_callback_param_t);
|
||||
zb_zcl_device_callback_id_t device_cb_id = p_device_cb_param->device_cb_id;
|
||||
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
|
||||
switch (device_cb_id) {
|
||||
/* ZCL set attribute value */
|
||||
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
|
||||
if (cluster_id == ZB_ZCL_CLUSTER_ID_TIME) {
|
||||
if (attr_id == ZB_ZCL_ATTR_TIME_TIME_ID) {
|
||||
zb_uint32_t value = p_device_cb_param->cb_param.set_attr_value_param.values.data32;
|
||||
ESP_LOGI(TAG, "Synchronize time to %u", value);
|
||||
this->defer([this, value]() { synchronize_epoch_(value + EPOCH_2000); });
|
||||
} else if (attr_id == ZB_ZCL_ATTR_TIME_TIME_STATUS_ID) {
|
||||
zb_uint8_t value = p_device_cb_param->cb_param.set_attr_value_param.values.data8;
|
||||
ESP_LOGI(TAG, "Time status %hd", value);
|
||||
this->defer([this, value]() { this->has_time_ = ZB_ZCL_TIME_TIME_STATUS_SYNCHRONIZED_BIT_IS_SET(value); });
|
||||
}
|
||||
} else {
|
||||
/* other clusters attribute handled here */
|
||||
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
break;
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Zcl_device_cb_ status: %hd", p_device_cb_param->status);
|
||||
}
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
|
||||
#endif
|
||||
@@ -1,38 +0,0 @@
|
||||
#pragma once
|
||||
#include "esphome/core/defines.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_TIME)
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/time/real_time_clock.h"
|
||||
#include "esphome/components/zigbee/zigbee_zephyr.h"
|
||||
|
||||
extern "C" {
|
||||
#include <zboss_api.h>
|
||||
#include <zboss_api_addons.h>
|
||||
}
|
||||
|
||||
namespace esphome::zigbee {
|
||||
|
||||
class ZigbeeTime : public time::RealTimeClock, public ZigbeeEntity {
|
||||
public:
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void update() override;
|
||||
|
||||
void set_cluster_attributes(zb_zcl_time_attrs_t &cluster_attributes) {
|
||||
this->cluster_attributes_ = &cluster_attributes;
|
||||
}
|
||||
|
||||
void set_epoch_time(uint32_t epoch);
|
||||
|
||||
protected:
|
||||
static void sync_time(zb_ret_t status, zb_uint32_t auth_level, zb_uint16_t short_addr, zb_uint8_t endpoint,
|
||||
zb_uint32_t nw_time);
|
||||
void zcl_device_cb_(zb_bufid_t bufid);
|
||||
zb_zcl_time_attrs_t *cluster_attributes_{nullptr};
|
||||
|
||||
bool has_time_{false};
|
||||
};
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
|
||||
#endif
|
||||
@@ -22,7 +22,7 @@ void ZigbeeBinarySensor::setup() {
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_BINARY_INPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_BINARY_INPUT_PRESENT_VALUE_ID, &this->cluster_attributes_->present_value,
|
||||
ZB_FALSE);
|
||||
this->parent_->force_report();
|
||||
this->parent_->flush();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
#include "zigbee_number_zephyr.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_NUMBER)
|
||||
#include "esphome/core/log.h"
|
||||
extern "C" {
|
||||
#include <zboss_api.h>
|
||||
#include <zboss_api_addons.h>
|
||||
#include <zb_nrf_platform.h>
|
||||
#include <zigbee/zigbee_app_utils.h>
|
||||
#include <zb_error_to_string.h>
|
||||
}
|
||||
namespace esphome::zigbee {
|
||||
|
||||
static const char *const TAG = "zigbee.number";
|
||||
|
||||
void ZigbeeNumber::setup() {
|
||||
this->parent_->add_callback(this->endpoint_, [this](zb_bufid_t bufid) { this->zcl_device_cb_(bufid); });
|
||||
this->number_->add_on_state_callback([this](float state) {
|
||||
this->cluster_attributes_->present_value = state;
|
||||
ESP_LOGD(TAG, "Set attribute endpoint: %d, present_value %f", this->endpoint_,
|
||||
this->cluster_attributes_->present_value);
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, (zb_uint8_t *) &cluster_attributes_->present_value,
|
||||
ZB_FALSE);
|
||||
this->parent_->force_report();
|
||||
});
|
||||
}
|
||||
|
||||
void ZigbeeNumber::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Zigbee Number\n"
|
||||
" Endpoint: %d, present_value %f",
|
||||
this->endpoint_, this->cluster_attributes_->present_value);
|
||||
}
|
||||
|
||||
void ZigbeeNumber::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
zb_zcl_device_callback_param_t *p_device_cb_param = ZB_BUF_GET_PARAM(bufid, zb_zcl_device_callback_param_t);
|
||||
zb_zcl_device_callback_id_t device_cb_id = p_device_cb_param->device_cb_id;
|
||||
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
|
||||
switch (device_cb_id) {
|
||||
/* ZCL set attribute value */
|
||||
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
|
||||
if (cluster_id == ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT) {
|
||||
ESP_LOGI(TAG, "Analog output attribute setting");
|
||||
if (attr_id == ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID) {
|
||||
float value =
|
||||
*reinterpret_cast<const float *>(&p_device_cb_param->cb_param.set_attr_value_param.values.data32);
|
||||
this->defer([this, value]() {
|
||||
this->cluster_attributes_->present_value = value;
|
||||
auto call = this->number_->make_call();
|
||||
call.set_value(value);
|
||||
call.perform();
|
||||
});
|
||||
}
|
||||
} else {
|
||||
/* other clusters attribute handled here */
|
||||
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
break;
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "%s status: %hd", __func__, p_device_cb_param->status);
|
||||
}
|
||||
|
||||
const zb_uint8_t ZB_ZCL_ANALOG_OUTPUT_STATUS_FLAG_MAX_VALUE = 0x0F;
|
||||
|
||||
static zb_ret_t check_value_analog_server(zb_uint16_t attr_id, zb_uint8_t endpoint,
|
||||
zb_uint8_t *value) { // NOLINT(readability-non-const-parameter)
|
||||
zb_ret_t ret = RET_OK;
|
||||
ZVUNUSED(endpoint);
|
||||
|
||||
switch (attr_id) {
|
||||
case ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID:
|
||||
ret = ZB_ZCL_CHECK_BOOL_VALUE(*value) ? RET_OK : RET_ERROR;
|
||||
break;
|
||||
case ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID:
|
||||
break;
|
||||
|
||||
case ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID:
|
||||
if (*value > ZB_ZCL_ANALOG_OUTPUT_STATUS_FLAG_MAX_VALUE) {
|
||||
ret = RET_ERROR;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
|
||||
void zb_zcl_analog_output_init_server() {
|
||||
zb_zcl_add_cluster_handlers(ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
esphome::zigbee::check_value_analog_server, (zb_zcl_cluster_write_attr_hook_t) NULL,
|
||||
(zb_zcl_cluster_handler_t) NULL);
|
||||
}
|
||||
|
||||
void zb_zcl_analog_output_init_client() {
|
||||
zb_zcl_add_cluster_handlers(ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT, ZB_ZCL_CLUSTER_CLIENT_ROLE,
|
||||
(zb_zcl_cluster_check_value_t) NULL, (zb_zcl_cluster_write_attr_hook_t) NULL,
|
||||
(zb_zcl_cluster_handler_t) NULL);
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,118 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/defines.h"
|
||||
#if defined(USE_ZIGBEE) && defined(USE_NRF52) && defined(USE_NUMBER)
|
||||
#include "esphome/components/zigbee/zigbee_zephyr.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/number/number.h"
|
||||
extern "C" {
|
||||
#include <zboss_api.h>
|
||||
#include <zboss_api_addons.h>
|
||||
}
|
||||
|
||||
enum {
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID = 0x001C,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID = 0x0041,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID = 0x0045,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID = 0x0051,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID = 0x0055,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID = 0x006A,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID = 0x006F,
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID = 0x0075,
|
||||
};
|
||||
|
||||
#define ZB_ZCL_ANALOG_OUTPUT_CLUSTER_REVISION_DEFAULT ((zb_uint16_t) 0x0001u)
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID, ZB_ZCL_ATTR_TYPE_CHAR_STRING, ZB_ZCL_ATTR_ACCESS_READ_ONLY, \
|
||||
(ZB_ZCL_NON_MANUFACTURER_SPECIFIC), (void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID, ZB_ZCL_ATTR_TYPE_BOOL, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// PresentValue
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_WRITE | ZB_ZCL_ATTR_ACCESS_REPORTING, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// MaxPresentValue
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// MinPresentValue
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
// Resolution
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID, ZB_ZCL_ATTR_TYPE_SINGLE, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_WRITE_OPTIONAL, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID, ZB_ZCL_ATTR_TYPE_8BITMAP, \
|
||||
ZB_ZCL_ATTR_ACCESS_READ_ONLY | ZB_ZCL_ATTR_ACCESS_REPORTING, (ZB_ZCL_NON_MANUFACTURER_SPECIFIC), \
|
||||
(void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ZB_SET_ATTR_DESCR_WITH_ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID(data_ptr) \
|
||||
{ \
|
||||
ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID, ZB_ZCL_ATTR_TYPE_16BIT_ENUM, ZB_ZCL_ATTR_ACCESS_READ_ONLY, \
|
||||
(ZB_ZCL_NON_MANUFACTURER_SPECIFIC), (void *) (data_ptr) \
|
||||
}
|
||||
|
||||
#define ESPHOME_ZB_ZCL_DECLARE_ANALOG_OUTPUT_ATTRIB_LIST(attr_list, out_of_service, present_value, status_flag, \
|
||||
max_present_value, min_present_value, resolution, \
|
||||
engineering_units, description) \
|
||||
ZB_ZCL_START_DECLARE_ATTRIB_LIST_CLUSTER_REVISION(attr_list, ZB_ZCL_ANALOG_OUTPUT) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_OUT_OF_SERVICE_ID, (out_of_service)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_PRESENT_VALUE_ID, (present_value)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_STATUS_FLAG_ID, (status_flag)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_MAX_PRESENT_VALUE_ID, (max_present_value)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_MIN_PRESENT_VALUE_ID, (min_present_value)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_RESOLUTION_ID, (resolution)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_ENGINEERING_UNITS_ID, (engineering_units)) \
|
||||
ZB_ZCL_SET_ATTR_DESC(ZB_ZCL_ATTR_ANALOG_OUTPUT_DESCRIPTION_ID, (description)) \
|
||||
ZB_ZCL_FINISH_DECLARE_ATTRIB_LIST
|
||||
|
||||
void zb_zcl_analog_output_init_server();
|
||||
void zb_zcl_analog_output_init_client();
|
||||
#define ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT_SERVER_ROLE_INIT zb_zcl_analog_output_init_server
|
||||
#define ZB_ZCL_CLUSTER_ID_ANALOG_OUTPUT_CLIENT_ROLE_INIT zb_zcl_analog_output_init_client
|
||||
|
||||
namespace esphome::zigbee {
|
||||
|
||||
class ZigbeeNumber : public ZigbeeEntity, public Component {
|
||||
public:
|
||||
ZigbeeNumber(number::Number *n) : number_(n) {}
|
||||
void set_cluster_attributes(AnalogAttrsOutput &cluster_attributes) {
|
||||
this->cluster_attributes_ = &cluster_attributes;
|
||||
}
|
||||
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
number::Number *number_;
|
||||
AnalogAttrsOutput *cluster_attributes_{nullptr};
|
||||
void zcl_device_cb_(zb_bufid_t bufid);
|
||||
};
|
||||
|
||||
} // namespace esphome::zigbee
|
||||
#endif
|
||||
@@ -21,7 +21,7 @@ void ZigbeeSensor::setup() {
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_ANALOG_INPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_ANALOG_INPUT_PRESENT_VALUE_ID,
|
||||
(zb_uint8_t *) &this->cluster_attributes_->present_value, ZB_FALSE);
|
||||
this->parent_->force_report();
|
||||
this->parent_->flush();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ void ZigbeeSwitch::setup() {
|
||||
ZB_ZCL_SET_ATTRIBUTE(this->endpoint_, ZB_ZCL_CLUSTER_ID_BINARY_OUTPUT, ZB_ZCL_CLUSTER_SERVER_ROLE,
|
||||
ZB_ZCL_ATTR_BINARY_OUTPUT_PRESENT_VALUE_ID, &this->cluster_attributes_->present_value,
|
||||
ZB_FALSE);
|
||||
this->parent_->force_report();
|
||||
this->parent_->flush();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -41,6 +41,8 @@ void ZigbeeSwitch::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
zb_uint16_t cluster_id = p_device_cb_param->cb_param.set_attr_value_param.cluster_id;
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
|
||||
p_device_cb_param->status = RET_OK;
|
||||
|
||||
switch (device_cb_id) {
|
||||
/* ZCL set attribute value */
|
||||
case ZB_ZCL_SET_ATTR_VALUE_CB_ID:
|
||||
@@ -50,17 +52,16 @@ void ZigbeeSwitch::zcl_device_cb_(zb_bufid_t bufid) {
|
||||
if (attr_id == ZB_ZCL_ATTR_BINARY_OUTPUT_PRESENT_VALUE_ID) {
|
||||
this->defer([this, value]() {
|
||||
this->cluster_attributes_->present_value = value ? ZB_TRUE : ZB_FALSE;
|
||||
this->switch_->control(value);
|
||||
this->switch_->publish_state(value);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
/* other clusters attribute handled here */
|
||||
ESP_LOGI(TAG, "Unhandled cluster attribute id: %d", cluster_id);
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
p_device_cb_param->status = RET_ERROR;
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -101,8 +101,8 @@ void ZigbeeComponent::zcl_device_cb(zb_bufid_t bufid) {
|
||||
zb_uint16_t attr_id = p_device_cb_param->cb_param.set_attr_value_param.attr_id;
|
||||
auto endpoint = p_device_cb_param->endpoint;
|
||||
|
||||
ESP_LOGI(TAG, "%s id %hd, cluster_id %d, attr_id %d, endpoint: %d", __func__, device_cb_id, cluster_id, attr_id,
|
||||
endpoint);
|
||||
ESP_LOGI(TAG, "Zcl_device_cb %s id %hd, cluster_id %d, attr_id %d, endpoint: %d", __func__, device_cb_id, cluster_id,
|
||||
attr_id, endpoint);
|
||||
|
||||
/* Set default response value. */
|
||||
p_device_cb_param->status = RET_OK;
|
||||
@@ -112,10 +112,10 @@ void ZigbeeComponent::zcl_device_cb(zb_bufid_t bufid) {
|
||||
const auto &cb = global_zigbee->callbacks_[endpoint - 1];
|
||||
if (cb) {
|
||||
cb(bufid);
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
p_device_cb_param->status = RET_NOT_IMPLEMENTED;
|
||||
p_device_cb_param->status = RET_ERROR;
|
||||
}
|
||||
|
||||
void ZigbeeComponent::on_join_() {
|
||||
@@ -230,11 +230,11 @@ static void send_attribute_report(zb_bufid_t bufid, zb_uint16_t cmd_id) {
|
||||
zb_buf_free(bufid);
|
||||
}
|
||||
|
||||
void ZigbeeComponent::force_report() { this->force_report_ = true; }
|
||||
void ZigbeeComponent::flush() { this->need_flush_ = true; }
|
||||
|
||||
void ZigbeeComponent::loop() {
|
||||
if (this->force_report_) {
|
||||
this->force_report_ = false;
|
||||
if (this->need_flush_) {
|
||||
this->need_flush_ = false;
|
||||
zb_buf_get_out_delayed_ext(send_attribute_report, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,12 +60,6 @@ struct AnalogAttrs {
|
||||
zb_uchar_t description[ZB_ZCL_MAX_STRING_SIZE];
|
||||
};
|
||||
|
||||
struct AnalogAttrsOutput : AnalogAttrs {
|
||||
float max_present_value;
|
||||
float min_present_value;
|
||||
float resolution;
|
||||
};
|
||||
|
||||
class ZigbeeComponent : public Component {
|
||||
public:
|
||||
void setup() override;
|
||||
@@ -78,7 +72,7 @@ class ZigbeeComponent : public Component {
|
||||
void zboss_signal_handler_esphome(zb_bufid_t bufid);
|
||||
void factory_reset();
|
||||
Trigger<> *get_join_trigger() { return &this->join_trigger_; };
|
||||
void force_report();
|
||||
void flush();
|
||||
void loop() override;
|
||||
|
||||
protected:
|
||||
@@ -90,7 +84,7 @@ class ZigbeeComponent : public Component {
|
||||
std::array<std::function<void(zb_bufid_t bufid)>, ZIGBEE_ENDPOINTS_COUNT> callbacks_{};
|
||||
CallbackManager<void()> join_cb_;
|
||||
Trigger<> join_trigger_;
|
||||
bool force_report_{false};
|
||||
bool need_flush_{false};
|
||||
};
|
||||
|
||||
class ZigbeeEntity {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user