Compare commits

..

10 Commits

Author SHA1 Message Date
Jesse Hills
fb2f0ce62f Merge pull request #13915 from esphome/bump-2026.1.5
2026.1.5
2026-02-11 11:13:08 +13:00
Jesse Hills
a99f75ca71 Bump version to 2026.1.5 2026-02-11 08:45:06 +13:00
Sean Kelly
4168e8c30d [aqi] Fix AQI calculation for specific pm2.5 or pm10 readings (#13770) 2026-02-11 08:45:06 +13:00
Jonathan Swoboda
1f761902b6 [esp32] Set UV_CACHE_DIR inside data dir so Clean All clears it (#13888)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-11 07:48:20 +13:00
Clyde Stubbs
0b047c334d [lvgl] Fix crash with unconfigured top_layer (#13846) 2026-02-11 07:24:32 +13:00
tomaszduda23
a5dc4b0fce [nrf52,logger] fix printk (#13874) 2026-02-11 07:24:32 +13:00
J. Nick Koston
c1455ccc29 [dashboard] Close WebSocket after process exit to prevent zombie connections (#13834) 2026-02-11 07:24:32 +13:00
Jonathan Swoboda
438a0c4289 [ota] Fix CLI upload option shown when only http_request platform configured (#13784)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-11 07:24:32 +13:00
Jonathan Swoboda
9eee4c9924 [core] Add capacity check to register_component_ (#13778)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-11 07:24:32 +13:00
Jas Strong
eea7e9edff [rd03d] Revert incorrect field order swap (#13769)
Co-authored-by: jas <jas@asspa.in>
2026-02-11 07:24:32 +13:00
873 changed files with 6566 additions and 16864 deletions

View File

@@ -1 +1 @@
7b63ea33897064554efa88382ff41c34a55ea58e1161667093b97d8427f7f957
d272a88e8ca28ae9340a9a03295a566432a52cb696501908f57764475bf7ca65

View File

@@ -1,96 +0,0 @@
---
name: pr-workflow
description: Create pull requests for esphome. Use when creating PRs, submitting changes, or preparing contributions.
allowed-tools: Read, Bash, Glob, Grep
---
# ESPHome PR Workflow
When creating a pull request for esphome, follow these steps:
## 1. Create Branch from Upstream
Always base your branch on **upstream** (not origin/fork) to ensure you have the latest code:
```bash
git fetch upstream
git checkout -b <branch-name> upstream/dev
```
## 2. Read the PR Template
Before creating a PR, read `.github/PULL_REQUEST_TEMPLATE.md` to understand required fields.
## 3. Create the PR
Use `gh pr create` with the **full template** filled in. Never skip or abbreviate sections.
Required fields:
- **What does this implement/fix?**: Brief description of changes
- **Types of changes**: Check ONE appropriate box (Bugfix, New feature, Breaking change, etc.)
- **Related issue**: Use `fixes <link>` syntax if applicable
- **Pull request in esphome-docs**: Link if docs are needed
- **Test Environment**: Check platforms you tested on
- **Example config.yaml**: Include working example YAML
- **Checklist**: Verify code is tested and tests added
## 4. Example PR Body
```markdown
# What does this implement/fix?
<describe your changes here>
## Types of changes
- [ ] Bugfix (non-breaking change which fixes an issue)
- [x] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] Developer breaking change (an API change that could break external components)
- [ ] Code quality improvements to existing code or addition of tests
- [ ] Other
**Related issue or feature (if applicable):**
- fixes https://github.com/esphome/esphome/issues/XXX
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):**
- esphome/esphome-docs#XXX
## Test Environment
- [x] ESP32
- [x] ESP32 IDF
- [ ] ESP8266
- [ ] RP2040
- [ ] BK72xx
- [ ] RTL87xx
- [ ] LN882x
- [ ] nRF52840
## Example entry for `config.yaml`:
```yaml
# Example config.yaml
component_name:
id: my_component
option: value
```
## Checklist:
- [x] The code change is tested and works locally.
- [x] Tests have been added to verify that the new code works (under `tests/` folder).
If user exposed functionality or configuration variables are added/changed:
- [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
```
## 5. Push and Create PR
```bash
git push -u origin <branch-name>
gh pr create --repo esphome/esphome --base dev --title "[component] Brief description"
```
Title should be prefixed with the component name in brackets, e.g. `[safe_mode] Add feature`.

View File

@@ -17,12 +17,12 @@ runs:
steps:
- name: Set up Python ${{ inputs.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ inputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
# yamllint disable-line rule:line-length

View File

@@ -1,38 +0,0 @@
// Constants and markers for PR auto-labeling
module.exports = {
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
TOO_BIG_MARKER: '<!-- too-big-request -->',
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
MANAGED_LABELS: [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'chained-pr',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck',
'bugfix',
'new-feature',
'breaking-change',
'developer-breaking-change',
'code-quality',
'deprecated-component'
],
DOCS_PR_PATTERNS: [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
]
};

View File

@@ -1,373 +0,0 @@
const fs = require('fs');
const { DOCS_PR_PATTERNS } = require('./constants');
// Strategy: Merge branch detection
async function detectMergeBranch(context) {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
if (baseRef === 'release') {
labels.add('merging-to-release');
} else if (baseRef === 'beta') {
labels.add('merging-to-beta');
} else if (baseRef !== 'dev') {
labels.add('chained-pr');
}
return labels;
}
// Strategy: Component and platform labeling
async function detectComponentPlatforms(changedFiles, apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents(prFiles) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(prFiles, apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges(changedFiles) {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD) {
const labels = new Set();
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges(changedFiles) {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges(changedFiles) {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner(github, context, changedFiles) {
const labels = new Set();
const { owner, repo } = context.repo;
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests(changedFiles) {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: PR Template Checkbox detection
async function detectPRTemplateCheckboxes(context) {
const labels = new Set();
const prBody = context.payload.pull_request.body || '';
console.log('Checking PR template checkboxes...');
// Check for checked checkboxes in the "Types of changes" section
const checkboxPatterns = [
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
];
for (const { pattern, label } of checkboxPatterns) {
if (pattern.test(prBody)) {
console.log(`Found checked checkbox for: ${label}`);
labels.add(label);
}
}
return labels;
}
// Strategy: Deprecated component detection
async function detectDeprecatedComponents(github, context, changedFiles) {
const labels = new Set();
const deprecatedInfo = [];
const { owner, repo } = context.repo;
// Compile regex once for better performance
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
// Get files that are modified or added in components directory
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
if (componentFiles.length === 0) {
return { labels, deprecatedInfo };
}
// Extract unique component names using the same regex
const components = new Set();
for (const file of componentFiles) {
const match = file.match(componentFileRegex);
if (match) {
components.add(match[1]);
}
}
// Get PR head to fetch files from the PR branch
const prNumber = context.payload.pull_request.number;
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
for (const component of components) {
const initFile = `esphome/components/${component}/__init__.py`;
try {
// Fetch file content from PR head using GitHub API
const { data: fileData } = await github.rest.repos.getContent({
owner,
repo,
path: initFile,
ref: `refs/pull/${prNumber}/head`
});
// Decode base64 content
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
// Support single quotes, double quotes, and triple quotes (for multiline)
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
if (deprecatedMatch) {
labels.add('deprecated-component');
deprecatedInfo.push({
component: component,
message: deprecatedMatch[1].trim()
});
console.log(`Found deprecated component: ${component}`);
}
} catch (error) {
// Only log if it's not a simple "file not found" error (404)
if (error.status !== 404) {
console.log(`Error reading ${initFile}:`, error.message);
}
}
}
return { labels, deprecatedInfo };
}
// Strategy: Requirements detection
async function detectRequirements(allLabels, prFiles, context) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
module.exports = {
detectMergeBranch,
detectComponentPlatforms,
detectNewComponents,
detectNewPlatforms,
detectCoreChanges,
detectPRSize,
detectDashboardChanges,
detectGitHubActionsChanges,
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
};

View File

@@ -1,187 +0,0 @@
const { MANAGED_LABELS } = require('./constants');
const {
detectMergeBranch,
detectComponentPlatforms,
detectNewComponents,
detectNewPlatforms,
detectCoreChanges,
detectPRSize,
detectDashboardChanges,
detectGitHubActionsChanges,
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
} = require('./detectors');
const { handleReviews } = require('./reviews');
const { applyLabels, removeOldLabels } = require('./labels');
// Fetch API data
async function fetchApiData() {
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
module.exports = async ({ github, context }) => {
// Environment variables
const SMALL_PR_THRESHOLD = parseInt(process.env.SMALL_PR_THRESHOLD);
const MAX_LABELS = parseInt(process.env.MAX_LABELS);
const TOO_BIG_THRESHOLD = parseInt(process.env.TOO_BIG_THRESHOLD);
const COMPONENT_LABEL_THRESHOLD = parseInt(process.env.COMPONENT_LABEL_THRESHOLD);
// Global state
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
// Get current labels and PR data
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: pr_number
});
const currentLabels = currentLabelsData.map(label => label.name);
const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
);
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges);
if (isMegaPR) {
console.log('Mega-PR detected - applying limited labeling logic');
}
// Fetch API data
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for release and beta branches only
if (baseRef === 'release' || baseRef === 'beta') {
const branchLabels = await detectMergeBranch(context);
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Apply labels
await applyLabels(github, context, finalLabels);
// Remove old managed labels
await removeOldLabels(github, context, managedLabels, finalLabels);
return;
}
// Run all strategies
const [
branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels,
checkboxLabels,
deprecatedResult
] = await Promise.all([
detectMergeBranch(context),
detectComponentPlatforms(changedFiles, apiData),
detectNewComponents(prFiles),
detectNewPlatforms(prFiles, apiData),
detectCoreChanges(changedFiles),
detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD),
detectDashboardChanges(changedFiles),
detectGitHubActionsChanges(changedFiles),
detectCodeOwner(github, context, changedFiles),
detectTests(changedFiles),
detectPRTemplateCheckboxes(context),
detectDeprecatedComponents(github, context, changedFiles)
]);
// Extract deprecated component info
const deprecatedLabels = deprecatedResult.labels;
const deprecatedInfo = deprecatedResult.deprecatedInfo;
// Combine all labels
const allLabels = new Set([
...branchLabels,
...componentLabels,
...newComponentLabels,
...newPlatformLabels,
...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels,
...checkboxLabels,
...deprecatedLabels
]);
// Detect requirements based on all other labels
const requirementLabels = await detectRequirements(allLabels, prFiles, context);
for (const label of requirementLabels) {
allLabels.add(label);
}
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
}
}
// Handle too many labels (only for non-mega PRs)
const tooManyLabels = finalLabels.length > MAX_LABELS;
const originalLabelCount = finalLabels.length;
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
}
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
// Apply labels
await applyLabels(github, context, finalLabels);
// Remove old managed labels
await removeOldLabels(github, context, managedLabels, finalLabels);
};

View File

@@ -1,41 +0,0 @@
// Apply labels to PR
async function applyLabels(github, context, finalLabels) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
}
// Remove old managed labels
async function removeOldLabels(github, context, managedLabels, finalLabels) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
}
module.exports = {
applyLabels,
removeOldLabels
};

View File

@@ -1,141 +0,0 @@
const {
BOT_COMMENT_MARKER,
CODEOWNERS_MARKER,
TOO_BIG_MARKER,
DEPRECATED_COMPONENT_MARKER
} = require('./constants');
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
const messages = [];
// Deprecated component message
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
message += `Hey there @${prAuthor},\n`;
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
for (const info of deprecatedInfo) {
message += `#### Component: \`${info.component}\`\n`;
message += `${info.message}\n\n`;
}
message += `Consider migrating to the recommended alternative if applicable.`;
messages.push(message);
}
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
const tooManyLabels = originalLabelCount > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${originalLabelCount} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const prAuthor = context.payload.pull_request.user.login;
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
module.exports = {
handleReviews
};

View File

@@ -22,7 +22,7 @@ jobs:
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Generate a token
id: generate-token
@@ -36,5 +36,633 @@ jobs:
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const script = require('./.github/scripts/auto-label-pr/index.js');
await script({ github, context });
const fs = require('fs');
// Constants
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
const TOO_BIG_MARKER = '<!-- too-big-request -->';
const MANAGED_LABELS = [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'chained-pr',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck',
'bugfix',
'new-feature',
'breaking-change',
'developer-breaking-change',
'code-quality'
];
const DOCS_PR_PATTERNS = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
// Global state
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
// Get current labels and PR data
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: pr_number
});
const currentLabels = currentLabelsData.map(label => label.name);
const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
);
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges);
if (isMegaPR) {
console.log('Mega-PR detected - applying limited labeling logic');
}
// Fetch API data
async function fetchApiData() {
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
// Strategy: Merge branch detection
async function detectMergeBranch() {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
if (baseRef === 'release') {
labels.add('merging-to-release');
} else if (baseRef === 'beta') {
labels.add('merging-to-beta');
} else if (baseRef !== 'dev') {
labels.add('chained-pr');
}
return labels;
}
// Strategy: Component and platform labeling
async function detectComponentPlatforms(apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents() {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges() {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize() {
const labels = new Set();
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges() {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges() {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner() {
const labels = new Set();
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests() {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: PR Template Checkbox detection
async function detectPRTemplateCheckboxes() {
const labels = new Set();
const prBody = context.payload.pull_request.body || '';
console.log('Checking PR template checkboxes...');
// Check for checked checkboxes in the "Types of changes" section
const checkboxPatterns = [
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
];
for (const { pattern, label } of checkboxPatterns) {
if (pattern.test(prBody)) {
console.log(`Found checked checkbox for: ${label}`);
labels.add(label);
}
}
return labels;
}
// Strategy: Requirements detection
async function detectRequirements(allLabels) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount) {
const messages = [];
const prAuthor = context.payload.pull_request.user.login;
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
const tooManyLabels = originalLabelCount > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${originalLabelCount} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(finalLabels, originalLabelCount) {
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Main execution
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for release and beta branches only
if (baseRef === 'release' || baseRef === 'beta') {
const branchLabels = await detectMergeBranch();
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Apply labels
if (finalLabels.length > 0) {
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
// Remove old managed labels
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}
return;
}
// Run all strategies
const [
branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels,
checkboxLabels
] = await Promise.all([
detectMergeBranch(),
detectComponentPlatforms(apiData),
detectNewComponents(),
detectNewPlatforms(apiData),
detectCoreChanges(),
detectPRSize(),
detectDashboardChanges(),
detectGitHubActionsChanges(),
detectCodeOwner(),
detectTests(),
detectPRTemplateCheckboxes()
]);
// Combine all labels
const allLabels = new Set([
...branchLabels,
...componentLabels,
...newComponentLabels,
...newPlatformLabels,
...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels,
...checkboxLabels
]);
// Detect requirements based on all other labels
const requirementLabels = await detectRequirements(allLabels);
for (const label of requirementLabels) {
allLabels.add(label);
}
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
}
}
// Handle too many labels (only for non-mega PRs)
const tooManyLabels = finalLabels.length > MAX_LABELS;
const originalLabelCount = finalLabels.length;
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
}
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(finalLabels, originalLabelCount);
// Apply labels
if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({
owner,
repo,
issue_number: pr_number,
labels: finalLabels
});
}
// Remove old managed labels
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try {
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: pr_number,
name: label
});
} catch (error) {
console.log(`Failed to remove label ${label}:`, error.message);
}
}

View File

@@ -21,9 +21,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: "3.11"

View File

@@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: "3.11"

View File

@@ -43,9 +43,9 @@ jobs:
- "docker"
# - "lint"
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: "3.11"
- name: Set up Docker Buildx

View File

@@ -49,7 +49,7 @@ jobs:
- name: Check out code from base repository
if: steps.pr.outputs.skip != 'true'
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Always check out from the base repository (esphome/esphome), never from forks
# Use the PR's target branch to ensure we run trusted code from the main repo

View File

@@ -36,18 +36,18 @@ jobs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Generate cache-key
id: cache-key
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
# yamllint disable-line rule:line-length
@@ -70,7 +70,7 @@ jobs:
if: needs.determine-jobs.outputs.python-linters == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -91,7 +91,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -132,7 +132,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
id: restore-python
uses: ./.github/actions/restore-python
@@ -157,7 +157,7 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Save Python virtual environment cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -183,7 +183,7 @@ jobs:
component-test-batches: ${{ steps.determine.outputs.component-test-batches }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Fetch enough history to find the merge base
fetch-depth: 2
@@ -193,7 +193,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Restore components graph cache
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: .temp/components_graph.json
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
@@ -223,7 +223,7 @@ jobs:
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
- name: Save components graph cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: .temp/components_graph.json
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
@@ -237,15 +237,15 @@ jobs:
if: needs.determine-jobs.outputs.integration-tests == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python 3.13
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: "3.13"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -273,7 +273,7 @@ jobs:
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
@@ -321,7 +321,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -334,14 +334,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
@@ -400,7 +400,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -413,14 +413,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
@@ -489,7 +489,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -502,14 +502,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
@@ -577,7 +577,7 @@ jobs:
version: 1.0
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -662,7 +662,7 @@ jobs:
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -688,7 +688,7 @@ jobs:
skip: ${{ steps.check-script.outputs.skip }}
steps:
- name: Check out target branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.base_ref }}
@@ -735,7 +735,7 @@ jobs:
- name: Restore cached memory analysis
id: cache-memory-analysis
if: steps.check-script.outputs.skip != 'true'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
@@ -759,7 +759,7 @@ jobs:
- name: Cache platformio
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
@@ -800,7 +800,7 @@ jobs:
- name: Save memory analysis to cache
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
@@ -840,14 +840,14 @@ jobs:
flash_usage: ${{ steps.extract.outputs.flash_usage }}
steps:
- name: Check out PR branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
@@ -908,7 +908,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
steps:
- name: Check out code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Restore Python
uses: ./.github/actions/restore-python
with:

View File

@@ -54,11 +54,11 @@ jobs:
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
@@ -86,6 +86,6 @@ jobs:
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
with:
category: "/language:${{matrix.language}}"

View File

@@ -20,7 +20,7 @@ jobs:
branch_build: ${{ steps.tag.outputs.branch_build }}
deploy_env: ${{ steps.tag.outputs.deploy_env }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Get tag
id: tag
# yamllint disable rule:line-length
@@ -60,9 +60,9 @@ jobs:
contents: read
id-token: write
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: "3.x"
- name: Build
@@ -92,9 +92,9 @@ jobs:
os: "ubuntu-24.04-arm"
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: "3.11"
@@ -102,12 +102,12 @@ jobs:
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Log in to docker hub
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -168,7 +168,7 @@ jobs:
- ghcr
- dockerhub
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Download digests
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
@@ -182,13 +182,13 @@ jobs:
- name: Log in to docker hub
if: matrix.registry == 'dockerhub'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
if: matrix.registry == 'ghcr'
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@@ -13,16 +13,16 @@ jobs:
if: github.repository == 'esphome/esphome'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Checkout Home Assistant
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
repository: home-assistant/core
path: lib/home-assistant
- name: Setup Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: 3.13
@@ -41,7 +41,7 @@ jobs:
python script/run-in-env.py pre-commit run --all-files
- name: Commit changes
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
with:
commit-message: "Synchronise Device Classes from Home Assistant"
committer: esphomebot <esphome@openhomefoundation.org>

View File

@@ -11,7 +11,7 @@ ci:
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.15.0
rev: v0.14.11
hooks:
# Run the linter.
- id: ruff

View File

@@ -88,8 +88,7 @@ esphome/components/bmp3xx/* @latonita
esphome/components/bmp3xx_base/* @latonita @martgras
esphome/components/bmp3xx_i2c/* @latonita
esphome/components/bmp3xx_spi/* @latonita
esphome/components/bmp581_base/* @danielkent-net @kahrendt
esphome/components/bmp581_i2c/* @danielkent-net @kahrendt
esphome/components/bmp581/* @kahrendt
esphome/components/bp1658cj/* @Cossid
esphome/components/bp5758d/* @Cossid
esphome/components/bthome_mithermometer/* @nagyrobi
@@ -104,7 +103,6 @@ esphome/components/cc1101/* @gabest11 @lygris
esphome/components/ccs811/* @habbie
esphome/components/cd74hc4067/* @asoehlke
esphome/components/ch422g/* @clydebarrow @jesterret
esphome/components/ch423/* @dwmw2
esphome/components/chsc6x/* @kkosik20
esphome/components/climate/* @esphome/core
esphome/components/climate_ir/* @glmnet
@@ -134,7 +132,6 @@ esphome/components/dfplayer/* @glmnet
esphome/components/dfrobot_sen0395/* @niklasweber
esphome/components/dht/* @OttoWinter
esphome/components/display_menu_base/* @numo68
esphome/components/dlms_meter/* @SimonFischer04
esphome/components/dps310/* @kbx81
esphome/components/ds1307/* @badbadc0ffee
esphome/components/ds2484/* @mrk-its
@@ -484,7 +481,6 @@ esphome/components/switch/* @esphome/core
esphome/components/switch/binary_sensor/* @ssieb
esphome/components/sx126x/* @swoboda1337
esphome/components/sx127x/* @swoboda1337
esphome/components/sy6970/* @linkedupbits
esphome/components/syslog/* @clydebarrow
esphome/components/t6615/* @tylermenezes
esphome/components/tc74/* @sethgirvan

View File

@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2026.2.0-dev
PROJECT_NUMBER = 2026.1.5
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -43,7 +43,6 @@ from esphome.const import (
CONF_SUBSTITUTIONS,
CONF_TOPIC,
ENV_NOGITIGNORE,
KEY_NATIVE_IDF,
PLATFORM_ESP32,
PLATFORM_ESP8266,
PLATFORM_RP2040,
@@ -117,7 +116,6 @@ class ArgsProtocol(Protocol):
configuration: str
name: str
upload_speed: str | None
native_idf: bool
def choose_prompt(options, purpose: str = None):
@@ -225,13 +223,8 @@ def choose_upload_log_host(
else:
resolved.append(device)
if not resolved:
if CORE.dashboard:
hint = "If you know the IP, set 'use_address' in your network config."
else:
hint = "If you know the IP, try --device <IP>"
raise EsphomeError(
f"All specified devices {defaults} could not be resolved. "
f"Is the device connected to the network? {hint}"
f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
)
return resolved
@@ -507,15 +500,12 @@ def wrap_to_code(name, comp):
return wrapped
def write_cpp(config: ConfigType, native_idf: bool = False) -> int:
def write_cpp(config: ConfigType) -> int:
if not get_bool_env(ENV_NOGITIGNORE):
writer.write_gitignore()
# Store native_idf flag so esp32 component can check it
CORE.data[KEY_NATIVE_IDF] = native_idf
generate_cpp_contents(config)
return write_cpp_file(native_idf=native_idf)
return write_cpp_file()
def generate_cpp_contents(config: ConfigType) -> None:
@@ -529,54 +519,32 @@ def generate_cpp_contents(config: ConfigType) -> None:
CORE.flush_tasks()
def write_cpp_file(native_idf: bool = False) -> int:
def write_cpp_file() -> int:
code_s = indent(CORE.cpp_main_section)
writer.write_cpp(code_s)
if native_idf and CORE.is_esp32 and CORE.target_framework == "esp-idf":
from esphome.build_gen import espidf
from esphome.build_gen import platformio
espidf.write_project()
else:
from esphome.build_gen import platformio
platformio.write_project()
platformio.write_project()
return 0
def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
native_idf = getattr(args, "native_idf", False)
from esphome import platformio_api
# NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py
# If you change this format, update the regex in that script as well
_LOGGER.info("Compiling app... Build path: %s", CORE.build_path)
if native_idf and CORE.is_esp32 and CORE.target_framework == "esp-idf":
from esphome import espidf_api
rc = espidf_api.run_compile(config, CORE.verbose)
if rc != 0:
return rc
# Create factory.bin and ota.bin
espidf_api.create_factory_bin()
espidf_api.create_ota_bin()
else:
from esphome import platformio_api
rc = platformio_api.run_compile(config, CORE.verbose)
if rc != 0:
return rc
idedata = platformio_api.get_idedata(config)
if idedata is None:
return 1
rc = platformio_api.run_compile(config, CORE.verbose)
if rc != 0:
return rc
# Check if firmware was rebuilt and emit build_info + create manifest
_check_and_emit_build_info()
return 0
idedata = platformio_api.get_idedata(config)
return 0 if idedata is not None else 1
def _check_and_emit_build_info() -> None:
@@ -833,8 +801,7 @@ def command_vscode(args: ArgsProtocol) -> int | None:
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
native_idf = getattr(args, "native_idf", False)
exit_code = write_cpp(config, native_idf=native_idf)
exit_code = write_cpp(config)
if exit_code != 0:
return exit_code
if args.only_generate:
@@ -889,8 +856,7 @@ def command_logs(args: ArgsProtocol, config: ConfigType) -> int | None:
def command_run(args: ArgsProtocol, config: ConfigType) -> int | None:
native_idf = getattr(args, "native_idf", False)
exit_code = write_cpp(config, native_idf=native_idf)
exit_code = write_cpp(config)
if exit_code != 0:
return exit_code
exit_code = compile_program(args, config)
@@ -1344,11 +1310,6 @@ def parse_args(argv):
help="Only generate source code, do not compile.",
action="store_true",
)
parser_compile.add_argument(
"--native-idf",
help="Build with native ESP-IDF instead of PlatformIO (ESP32 esp-idf framework only).",
action="store_true",
)
parser_upload = subparsers.add_parser(
"upload",
@@ -1430,11 +1391,6 @@ def parse_args(argv):
help="Reset the device before starting serial logs.",
default=os.getenv("ESPHOME_SERIAL_LOGGING_RESET"),
)
parser_run.add_argument(
"--native-idf",
help="Build with native ESP-IDF instead of PlatformIO (ESP32 esp-idf framework only).",
action="store_true",
)
parser_clean = subparsers.add_parser(
"clean-mqtt",

View File

@@ -12,6 +12,7 @@ from .const import (
CORE_SUBCATEGORY_PATTERNS,
DEMANGLED_PATTERNS,
ESPHOME_COMPONENT_PATTERN,
SECTION_TO_ATTR,
SYMBOL_PATTERNS,
)
from .demangle import batch_demangle
@@ -21,7 +22,7 @@ from .helpers import (
map_section_name,
parse_symbol_line,
)
from .toolchain import find_tool, resolve_tool_path, run_tool
from .toolchain import find_tool, run_tool
if TYPE_CHECKING:
from esphome.platformio_api import IDEData
@@ -90,17 +91,6 @@ class ComponentMemory:
bss_size: int = 0 # Uninitialized data (ram only)
symbol_count: int = 0
def add_section_size(self, section_name: str, size: int) -> None:
"""Add size to the appropriate attribute for a section."""
if section_name == ".text":
self.text_size += size
elif section_name == ".rodata":
self.rodata_size += size
elif section_name == ".data":
self.data_size += size
elif section_name == ".bss":
self.bss_size += size
@property
def flash_total(self) -> int:
"""Total flash usage (text + rodata + data)."""
@@ -142,12 +132,6 @@ class MemoryAnalyzer:
readelf_path = readelf_path or idedata.readelf_path
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
# Validate paths exist, fall back to find_tool if they don't
# This handles cases like Zephyr where cc_path doesn't include full path
# and the toolchain prefix may differ (e.g., arm-zephyr-eabi- vs arm-none-eabi-)
objdump_path = resolve_tool_path("objdump", objdump_path, objdump_path)
readelf_path = resolve_tool_path("readelf", readelf_path, objdump_path)
self.objdump_path = objdump_path or "objdump"
self.readelf_path = readelf_path or "readelf"
self.external_components = external_components or set()
@@ -177,15 +161,12 @@ class MemoryAnalyzer:
self._elf_symbol_names: set[str] = set()
# SDK symbols not in ELF (static/local symbols from closed-source libs)
self._sdk_symbols: list[SDKSymbol] = []
# CSWTCH symbols: list of (name, size, source_file, component)
self._cswtch_symbols: list[tuple[str, int, str, str]] = []
def analyze(self) -> dict[str, ComponentMemory]:
"""Analyze the ELF file and return component memory usage."""
self._parse_sections()
self._parse_symbols()
self._categorize_symbols()
self._analyze_cswtch_symbols()
self._analyze_sdk_libraries()
return dict(self.components)
@@ -268,7 +249,8 @@ class MemoryAnalyzer:
comp_mem.symbol_count += 1
# Update the appropriate size attribute based on section
comp_mem.add_section_size(section_name, size)
if attr_name := SECTION_TO_ATTR.get(section_name):
setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size)
# Track uncategorized symbols
if component == "other" and size > 0:
@@ -384,277 +366,6 @@ class MemoryAnalyzer:
return "Other Core"
def _find_object_files_dir(self) -> Path | None:
"""Find the directory containing object files for this build.
Returns:
Path to the directory containing .o files, or None if not found.
"""
# The ELF is typically at .pioenvs/<env>/firmware.elf
# Object files are in .pioenvs/<env>/src/ and .pioenvs/<env>/lib*/
pioenvs_dir = self.elf_path.parent
if pioenvs_dir.exists() and any(pioenvs_dir.glob("src/*.o")):
return pioenvs_dir
return None
@staticmethod
def _parse_nm_cswtch_output(
output: str,
base_dir: Path | None,
cswtch_map: dict[str, list[tuple[str, int]]],
) -> None:
"""Parse nm output for CSWTCH symbols and add to cswtch_map.
Handles both ``.o`` files and ``.a`` archives.
nm output formats::
.o files: /path/file.o:hex_addr hex_size type name
.a files: /path/lib.a:member.o:hex_addr hex_size type name
For ``.o`` files, paths are made relative to *base_dir* when possible.
For ``.a`` archives (detected by ``:`` in the file portion), paths are
formatted as ``archive_stem/member.o`` (e.g. ``liblwip2-536-feat/lwip-esp.o``).
Args:
output: Raw stdout from ``nm --print-file-name -S``.
base_dir: Base directory for computing relative paths of ``.o`` files.
Pass ``None`` when scanning archives outside the build tree.
cswtch_map: Dict to populate, mapping ``"CSWTCH$N:size"`` to source list.
"""
for line in output.splitlines():
if "CSWTCH$" not in line:
continue
# Split on last ":" that precedes a hex address.
# For .o: "filepath.o" : "hex_addr hex_size type name"
# For .a: "filepath.a:member.o" : "hex_addr hex_size type name"
parts_after_colon = line.rsplit(":", 1)
if len(parts_after_colon) != 2:
continue
file_path = parts_after_colon[0]
fields = parts_after_colon[1].split()
# fields: [address, size, type, name]
if len(fields) < 4:
continue
sym_name = fields[3]
if not sym_name.startswith("CSWTCH$"):
continue
try:
size = int(fields[1], 16)
except ValueError:
continue
# Determine readable source path
# Use ".a:" to detect archive format (not bare ":" which matches
# Windows drive letters like "C:\...\file.o").
if ".a:" in file_path:
# Archive format: "archive.a:member.o" → "archive_stem/member.o"
archive_part, member = file_path.rsplit(":", 1)
archive_name = Path(archive_part).stem
rel_path = f"{archive_name}/{member}"
elif base_dir is not None:
try:
rel_path = str(Path(file_path).relative_to(base_dir))
except ValueError:
rel_path = file_path
else:
rel_path = file_path
key = f"{sym_name}:{size}"
cswtch_map[key].append((rel_path, size))
def _run_nm_cswtch_scan(
self,
files: list[Path],
base_dir: Path | None,
cswtch_map: dict[str, list[tuple[str, int]]],
) -> None:
"""Run nm on *files* and add any CSWTCH symbols to *cswtch_map*.
Args:
files: Object (``.o``) or archive (``.a``) files to scan.
base_dir: Base directory for relative path computation (see
:meth:`_parse_nm_cswtch_output`).
cswtch_map: Dict to populate with results.
"""
if not self.nm_path or not files:
return
_LOGGER.debug("Scanning %d files for CSWTCH symbols", len(files))
result = run_tool(
[self.nm_path, "--print-file-name", "-S"] + [str(f) for f in files],
timeout=30,
)
if result is None or result.returncode != 0:
_LOGGER.debug(
"nm failed or timed out scanning %d files for CSWTCH symbols",
len(files),
)
return
self._parse_nm_cswtch_output(result.stdout, base_dir, cswtch_map)
def _scan_cswtch_in_sdk_archives(
self, cswtch_map: dict[str, list[tuple[str, int]]]
) -> None:
"""Scan SDK library archives (.a) for CSWTCH symbols.
Prebuilt SDK libraries (e.g. lwip, bearssl) are not compiled from source,
so their CSWTCH symbols only exist inside ``.a`` archives. Results are
merged into *cswtch_map* for keys not already found in ``.o`` files.
The same source file (e.g. ``lwip-esp.o``) often appears in multiple
library variants (``liblwip2-536.a``, ``liblwip2-1460-feat.a``, etc.),
so results are deduplicated by member name.
"""
sdk_dirs = self._find_sdk_library_dirs()
if not sdk_dirs:
return
sdk_archives = sorted(a for sdk_dir in sdk_dirs for a in sdk_dir.glob("*.a"))
sdk_map: dict[str, list[tuple[str, int]]] = defaultdict(list)
self._run_nm_cswtch_scan(sdk_archives, None, sdk_map)
# Merge SDK results, deduplicating by member name.
for key, sources in sdk_map.items():
if key in cswtch_map:
continue
seen: dict[str, tuple[str, int]] = {}
for path, sz in sources:
member = Path(path).name
if member not in seen:
seen[member] = (path, sz)
cswtch_map[key] = list(seen.values())
def _source_file_to_component(self, source_file: str) -> str:
"""Map a source object file path to its component name.
Args:
source_file: Relative path like 'src/esphome/components/wifi/wifi_component.cpp.o'
Returns:
Component name like '[esphome]wifi' or the source file if unknown.
"""
parts = Path(source_file).parts
# ESPHome component: src/esphome/components/<name>/...
if "components" in parts:
idx = parts.index("components")
if idx + 1 < len(parts):
component_name = parts[idx + 1]
if component_name in get_esphome_components():
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
if component_name in self.external_components:
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
# ESPHome core: src/esphome/core/... or src/esphome/...
if "core" in parts and "esphome" in parts:
return _COMPONENT_CORE
if "esphome" in parts and "components" not in parts:
return _COMPONENT_CORE
# Framework/library files - return the first path component
# e.g., lib65b/ESPAsyncTCP/... -> lib65b
# FrameworkArduino/... -> FrameworkArduino
return parts[0] if parts else source_file
def _analyze_cswtch_symbols(self) -> None:
"""Analyze CSWTCH (GCC switch table) symbols by tracing to source objects.
CSWTCH symbols are compiler-generated lookup tables for switch statements.
They are local symbols, so the same name can appear in different object files.
This method scans .o files and SDK archives to attribute them to their
source components.
"""
obj_dir = self._find_object_files_dir()
if obj_dir is None:
_LOGGER.debug("No object files directory found, skipping CSWTCH analysis")
return
# Scan build-dir object files for CSWTCH symbols
cswtch_map: dict[str, list[tuple[str, int]]] = defaultdict(list)
self._run_nm_cswtch_scan(sorted(obj_dir.rglob("*.o")), obj_dir, cswtch_map)
# Also scan SDK library archives (.a) for CSWTCH symbols.
# Prebuilt SDK libraries (e.g. lwip, bearssl) are not compiled from source
# so their symbols only exist inside .a archives, not as loose .o files.
self._scan_cswtch_in_sdk_archives(cswtch_map)
if not cswtch_map:
_LOGGER.debug("No CSWTCH symbols found in object files or SDK archives")
return
# Collect CSWTCH symbols from the ELF (already parsed in sections)
# Include section_name for re-attribution of component totals
elf_cswtch = [
(symbol_name, size, section_name)
for section_name, section in self.sections.items()
for symbol_name, size, _ in section.symbols
if symbol_name.startswith("CSWTCH$")
]
_LOGGER.debug(
"Found %d CSWTCH symbols in ELF, %d unique in object files",
len(elf_cswtch),
len(cswtch_map),
)
# Match ELF CSWTCH symbols to source files and re-attribute component totals.
# _categorize_symbols() already ran and put these into "other" since CSWTCH$
# names don't match any component pattern. We move the bytes to the correct
# component based on the object file mapping.
other_mem = self.components.get("other")
for sym_name, size, section_name in elf_cswtch:
key = f"{sym_name}:{size}"
sources = cswtch_map.get(key, [])
if len(sources) == 1:
source_file = sources[0][0]
component = self._source_file_to_component(source_file)
elif len(sources) > 1:
# Ambiguous - multiple object files have same CSWTCH name+size
source_file = "ambiguous"
component = "ambiguous"
_LOGGER.debug(
"Ambiguous CSWTCH %s (%d B) found in %d files: %s",
sym_name,
size,
len(sources),
", ".join(src for src, _ in sources),
)
else:
source_file = "unknown"
component = "unknown"
self._cswtch_symbols.append((sym_name, size, source_file, component))
# Re-attribute from "other" to the correct component
if (
component not in ("other", "unknown", "ambiguous")
and other_mem is not None
):
other_mem.add_section_size(section_name, -size)
if component not in self.components:
self.components[component] = ComponentMemory(component)
self.components[component].add_section_size(section_name, size)
# Sort by size descending
self._cswtch_symbols.sort(key=lambda x: x[1], reverse=True)
total_size = sum(size for _, size, _, _ in self._cswtch_symbols)
_LOGGER.debug(
"CSWTCH analysis: %d symbols, %d bytes total",
len(self._cswtch_symbols),
total_size,
)
def get_unattributed_ram(self) -> tuple[int, int, int]:
"""Get unattributed RAM sizes (SDK/framework overhead).

View File

@@ -4,8 +4,6 @@ from __future__ import annotations
from collections import defaultdict
from collections.abc import Callable
import heapq
from operator import itemgetter
import sys
from typing import TYPE_CHECKING
@@ -31,10 +29,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
)
# Lower threshold for RAM symbols (RAM is more constrained)
RAM_SYMBOL_SIZE_THRESHOLD: int = 24
# Number of top symbols to show in the largest symbols report
TOP_SYMBOLS_LIMIT: int = 30
# Width for symbol name display in top symbols report
COL_TOP_SYMBOL_NAME: int = 55
# Column width constants
COL_COMPONENT: int = 29
@@ -153,83 +147,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
section_label = f" [{section[1:]}]" # .data -> [data], .bss -> [bss]
return f"{demangled} ({size:,} B){section_label}"
def _add_top_symbols(self, lines: list[str]) -> None:
"""Add a section showing the top largest symbols in the binary."""
# Collect all symbols from all components: (symbol, demangled, size, section, component)
all_symbols = [
(symbol, demangled, size, section, component)
for component, symbols in self._component_symbols.items()
for symbol, demangled, size, section in symbols
]
# Get top N symbols by size using heapq for efficiency
top_symbols = heapq.nlargest(
self.TOP_SYMBOLS_LIMIT, all_symbols, key=itemgetter(2)
)
lines.append("")
lines.append(f"Top {self.TOP_SYMBOLS_LIMIT} Largest Symbols:")
# Calculate truncation limit from column width (leaving room for "...")
truncate_limit = self.COL_TOP_SYMBOL_NAME - 3
for i, (_, demangled, size, section, component) in enumerate(top_symbols):
# Format section label
section_label = f"[{section[1:]}]" if section else ""
# Truncate demangled name if too long
demangled_display = (
f"{demangled[:truncate_limit]}..."
if len(demangled) > self.COL_TOP_SYMBOL_NAME
else demangled
)
lines.append(
f"{i + 1:>2}. {size:>7,} B {section_label:<8} {demangled_display:<{self.COL_TOP_SYMBOL_NAME}} {component}"
)
def _add_cswtch_analysis(self, lines: list[str]) -> None:
"""Add CSWTCH (GCC switch table lookup) analysis section."""
self._add_section_header(lines, "CSWTCH Analysis (GCC Switch Table Lookups)")
total_size = sum(size for _, size, _, _ in self._cswtch_symbols)
lines.append(
f"Total: {len(self._cswtch_symbols)} switch table(s), {total_size:,} B"
)
lines.append("")
# Group by component
by_component: dict[str, list[tuple[str, int, str]]] = defaultdict(list)
for sym_name, size, source_file, component in self._cswtch_symbols:
by_component[component].append((sym_name, size, source_file))
# Sort components by total size descending
sorted_components = sorted(
by_component.items(),
key=lambda x: sum(s[1] for s in x[1]),
reverse=True,
)
for component, symbols in sorted_components:
comp_total = sum(s[1] for s in symbols)
lines.append(f"{component} ({comp_total:,} B, {len(symbols)} tables):")
# Group by source file within component
by_file: dict[str, list[tuple[str, int]]] = defaultdict(list)
for sym_name, size, source_file in symbols:
by_file[source_file].append((sym_name, size))
for source_file, file_symbols in sorted(
by_file.items(),
key=lambda x: sum(s[1] for s in x[1]),
reverse=True,
):
file_total = sum(s[1] for s in file_symbols)
lines.append(
f" {source_file} ({file_total:,} B, {len(file_symbols)} tables)"
)
for sym_name, size in sorted(
file_symbols, key=lambda x: x[1], reverse=True
):
lines.append(f" {size:>6,} B {sym_name}")
lines.append("")
def generate_report(self, detailed: bool = False) -> str:
"""Generate a formatted memory report."""
components = sorted(
@@ -331,9 +248,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
"RAM",
)
# Top largest symbols in the binary
self._add_top_symbols(lines)
# Add ESPHome core detailed analysis if there are core symbols
if self._esphome_core_symbols:
self._add_section_header(lines, f"{_COMPONENT_CORE} Detailed Analysis")
@@ -517,10 +431,6 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
lines.append(f" ... and {len(large_ram_syms) - 10} more")
lines.append("")
# CSWTCH (GCC switch table) analysis
if self._cswtch_symbols:
self._add_cswtch_analysis(lines)
lines.append(
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
)

View File

@@ -9,61 +9,20 @@ ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::")
# Maps standard section names to their various platform-specific variants
# Note: Order matters! More specific patterns (.bss) must come before general ones (.dram)
# because ESP-IDF uses names like ".dram0.bss" which would match ".dram" otherwise
#
# Platform-specific sections:
# - ESP8266/ESP32: .iram*, .dram*
# - LibreTiny RTL87xx: .xip.code_* (flash), .ram.code_* (RAM)
# - LibreTiny BK7231: .itcm.code (fast RAM), .vectors (interrupt vectors)
# - LibreTiny LN882X: .flash_text, .flash_copy* (flash code)
# - Zephyr/nRF52: text, rodata, datas, bss (no leading dots)
SECTION_MAPPING = {
".text": frozenset(
[
".text",
".iram",
# LibreTiny RTL87xx XIP (eXecute In Place) flash code
".xip.code",
# LibreTiny RTL87xx RAM code
".ram.code_text",
# LibreTiny BK7231 fast RAM code and vectors
".itcm.code",
".vectors",
# LibreTiny LN882X flash code
".flash_text",
".flash_copy",
# Zephyr/nRF52 sections (no leading dots)
"text",
"rom_start",
]
),
".rodata": frozenset(
[
".rodata",
# LibreTiny RTL87xx read-only data in RAM
".ram.code_rodata",
# Zephyr/nRF52 sections (no leading dots)
"rodata",
]
),
# .bss patterns - must be before .data to catch ".dram0.bss"
".bss": frozenset(
[
".bss",
# LibreTiny LN882X BSS
".bss_ram",
# Zephyr/nRF52 sections (no leading dots)
"bss",
"noinit",
]
),
".data": frozenset(
[
".data",
".dram",
# Zephyr/nRF52 sections (no leading dots)
"datas",
]
),
".text": frozenset([".text", ".iram"]),
".rodata": frozenset([".rodata"]),
".bss": frozenset([".bss"]), # Must be before .data to catch ".dram0.bss"
".data": frozenset([".data", ".dram"]),
}
# Section to ComponentMemory attribute mapping
# Maps section names to the attribute name in ComponentMemory dataclass
SECTION_TO_ATTR = {
".text": "text_size",
".rodata": "rodata_size",
".data": "data_size",
".bss": "bss_size",
}
# Component identification rules
@@ -504,9 +463,7 @@ SYMBOL_PATTERNS = {
"__FUNCTION__$",
"DAYS_IN_MONTH",
"_DAYS_BEFORE_MONTH",
# Note: CSWTCH$ symbols are GCC switch table lookup tables.
# They are attributed to their source object files via _analyze_cswtch_symbols()
# rather than being lumped into libc.
"CSWTCH$",
"dst$",
"sulp",
"_strtol_l", # String to long with locale

View File

@@ -94,13 +94,13 @@ def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None:
return None
# Find section, size, and name
# Try each part as a potential section name
for i, part in enumerate(parts):
# Skip parts that are clearly flags, addresses, or other metadata
# Sections start with '.' (standard ELF) or are known section names (Zephyr)
if not part.startswith("."):
continue
section = map_section_name(part)
if not section:
continue
break
# Need at least size field after section
if i + 1 >= len(parts):

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import logging
import os
from pathlib import Path
import subprocess
from typing import TYPE_CHECKING
@@ -18,82 +17,10 @@ TOOLCHAIN_PREFIXES = [
"xtensa-lx106-elf-", # ESP8266
"xtensa-esp32-elf-", # ESP32
"xtensa-esp-elf-", # ESP32 (newer IDF)
"arm-zephyr-eabi-", # nRF52/Zephyr SDK
"arm-none-eabi-", # Generic ARM (RP2040, etc.)
"", # System default (no prefix)
]
def _find_in_platformio_packages(tool_name: str) -> str | None:
"""Search for a tool in PlatformIO package directories.
This handles cases like Zephyr SDK where tools are installed in nested
directories that aren't in PATH.
Args:
tool_name: Name of the tool (e.g., "readelf", "objdump")
Returns:
Full path to the tool or None if not found
"""
# Get PlatformIO packages directory
platformio_home = Path(os.path.expanduser("~/.platformio/packages"))
if not platformio_home.exists():
return None
# Search patterns for toolchains that might contain the tool
# Order matters - more specific patterns first
search_patterns = [
# Zephyr SDK deeply nested structure (4 levels)
# e.g., toolchain-gccarmnoneeabi/zephyr-sdk-0.17.4/arm-zephyr-eabi/bin/arm-zephyr-eabi-objdump
f"toolchain-*/*/*/bin/*-{tool_name}",
# Zephyr SDK nested structure (3 levels)
f"toolchain-*/*/bin/*-{tool_name}",
f"toolchain-*/bin/*-{tool_name}",
# Standard PlatformIO toolchain structure
f"toolchain-*/bin/*{tool_name}",
]
for pattern in search_patterns:
matches = list(platformio_home.glob(pattern))
if matches:
# Sort to get consistent results, prefer arm-zephyr-eabi over arm-none-eabi
matches.sort(key=lambda p: ("zephyr" not in str(p), str(p)))
tool_path = str(matches[0])
_LOGGER.debug("Found %s in PlatformIO packages: %s", tool_name, tool_path)
return tool_path
return None
def resolve_tool_path(
tool_name: str,
derived_path: str | None,
objdump_path: str | None = None,
) -> str | None:
"""Resolve a tool path, falling back to find_tool if derived path doesn't exist.
Args:
tool_name: Name of the tool (e.g., "objdump", "readelf")
derived_path: Path derived from idedata (may not exist for some platforms)
objdump_path: Path to objdump binary to derive other tool paths from
Returns:
Resolved path to the tool, or the original derived_path if it exists
"""
if derived_path and not Path(derived_path).exists():
found = find_tool(tool_name, objdump_path)
if found:
_LOGGER.debug(
"Derived %s path %s not found, using %s",
tool_name,
derived_path,
found,
)
return found
return derived_path
def find_tool(
tool_name: str,
objdump_path: str | None = None,
@@ -101,8 +28,7 @@ def find_tool(
"""Find a toolchain tool by name.
First tries to derive the tool path from objdump_path (if provided),
then searches PlatformIO package directories (for cross-compile toolchains),
and finally falls back to searching for platform-specific tools in PATH.
then falls back to searching for platform-specific tools.
Args:
tool_name: Name of the tool (e.g., "objdump", "nm", "c++filt")
@@ -121,13 +47,7 @@ def find_tool(
_LOGGER.debug("Found %s at: %s", tool_name, potential_path)
return potential_path
# Search in PlatformIO packages directory first (handles Zephyr SDK, etc.)
# This must come before PATH search because system tools (e.g., /usr/bin/objdump)
# are for the host architecture, not the target (ARM, Xtensa, etc.)
if found := _find_in_platformio_packages(tool_name):
return found
# Try platform-specific tools in PATH (fallback for when tools are installed globally)
# Try platform-specific tools
for prefix in TOOLCHAIN_PREFIXES:
cmd = f"{prefix}{tool_name}"
try:

View File

@@ -1,139 +0,0 @@
"""ESP-IDF direct build generator for ESPHome."""
import json
from pathlib import Path
from esphome.components.esp32 import get_esp32_variant
from esphome.core import CORE
from esphome.helpers import mkdir_p, write_file_if_changed
def get_available_components() -> list[str] | None:
"""Get list of available ESP-IDF components from project_description.json.
Returns only internal ESP-IDF components, excluding external/managed
components (from idf_component.yml).
"""
project_desc = Path(CORE.build_path) / "build" / "project_description.json"
if not project_desc.exists():
return None
try:
with open(project_desc, encoding="utf-8") as f:
data = json.load(f)
component_info = data.get("build_component_info", {})
result = []
for name, info in component_info.items():
# Exclude our own src component
if name == "src":
continue
# Exclude managed/external components
comp_dir = info.get("dir", "")
if "managed_components" in comp_dir:
continue
result.append(name)
return result
except (json.JSONDecodeError, OSError):
return None
def has_discovered_components() -> bool:
"""Check if we have discovered components from a previous configure."""
return get_available_components() is not None
def get_project_cmakelists() -> str:
"""Generate the top-level CMakeLists.txt for ESP-IDF project."""
# Get IDF target from ESP32 variant (e.g., ESP32S3 -> esp32s3)
variant = get_esp32_variant()
idf_target = variant.lower().replace("-", "")
return f"""\
# Auto-generated by ESPHome
cmake_minimum_required(VERSION 3.16)
set(IDF_TARGET {idf_target})
set(EXTRA_COMPONENT_DIRS ${{CMAKE_SOURCE_DIR}}/src)
include($ENV{{IDF_PATH}}/tools/cmake/project.cmake)
project({CORE.name})
"""
def get_component_cmakelists(minimal: bool = False) -> str:
"""Generate the main component CMakeLists.txt."""
idf_requires = [] if minimal else (get_available_components() or [])
requires_str = " ".join(idf_requires)
# Extract compile definitions from build flags (-DXXX -> XXX)
compile_defs = [flag[2:] for flag in CORE.build_flags if flag.startswith("-D")]
compile_defs_str = "\n ".join(compile_defs) if compile_defs else ""
# Extract compile options (-W flags, excluding linker flags)
compile_opts = [
flag
for flag in CORE.build_flags
if flag.startswith("-W") and not flag.startswith("-Wl,")
]
compile_opts_str = "\n ".join(compile_opts) if compile_opts else ""
# Extract linker options (-Wl, flags)
link_opts = [flag for flag in CORE.build_flags if flag.startswith("-Wl,")]
link_opts_str = "\n ".join(link_opts) if link_opts else ""
return f"""\
# Auto-generated by ESPHome
file(GLOB_RECURSE app_sources
"${{CMAKE_CURRENT_SOURCE_DIR}}/*.cpp"
"${{CMAKE_CURRENT_SOURCE_DIR}}/*.c"
"${{CMAKE_CURRENT_SOURCE_DIR}}/esphome/*.cpp"
"${{CMAKE_CURRENT_SOURCE_DIR}}/esphome/*.c"
)
idf_component_register(
SRCS ${{app_sources}}
INCLUDE_DIRS "." "esphome"
REQUIRES {requires_str}
)
# Apply C++ standard
target_compile_features(${{COMPONENT_LIB}} PUBLIC cxx_std_20)
# ESPHome compile definitions
target_compile_definitions(${{COMPONENT_LIB}} PUBLIC
{compile_defs_str}
)
# ESPHome compile options
target_compile_options(${{COMPONENT_LIB}} PUBLIC
{compile_opts_str}
)
# ESPHome linker options
target_link_options(${{COMPONENT_LIB}} PUBLIC
{link_opts_str}
)
"""
def write_project(minimal: bool = False) -> None:
"""Write ESP-IDF project files."""
mkdir_p(CORE.build_path)
mkdir_p(CORE.relative_src_path())
# Write top-level CMakeLists.txt
write_file_if_changed(
CORE.relative_build_path("CMakeLists.txt"),
get_project_cmakelists(),
)
# Write component CMakeLists.txt in src/
write_file_if_changed(
CORE.relative_src_path("CMakeLists.txt"),
get_component_cmakelists(minimal=minimal),
)

View File

@@ -69,7 +69,6 @@ from esphome.cpp_types import ( # noqa: F401
JsonObjectConst,
Parented,
PollingComponent,
StringRef,
arduino_json_ns,
bool_,
const_char_ptr,
@@ -87,7 +86,6 @@ from esphome.cpp_types import ( # noqa: F401
size_t,
std_ns,
std_shared_ptr,
std_span,
std_string,
std_string_ref,
std_vector,

View File

@@ -45,6 +45,8 @@ void AbsoluteHumidityComponent::dump_config() {
this->temperature_sensor_->get_name().c_str(), this->humidity_sensor_->get_name().c_str());
}
float AbsoluteHumidityComponent::get_setup_priority() const { return setup_priority::DATA; }
void AbsoluteHumidityComponent::loop() {
if (!this->next_update_) {
return;

View File

@@ -24,6 +24,7 @@ class AbsoluteHumidityComponent : public sensor::Sensor, public Component {
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void loop() override;
protected:

View File

@@ -68,6 +68,11 @@ class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage
/// This method is called during the ESPHome setup process to log the configuration.
void dump_config() override;
/// Return the setup priority for this component.
/// Components with higher priority are initialized earlier during setup.
/// @return A float representing the setup priority.
float get_setup_priority() const override;
#ifdef USE_ZEPHYR
/// Set the ADC channel to be used by the ADC sensor.
/// @param channel Pointer to an adc_dt_spec structure representing the ADC channel.

View File

@@ -79,5 +79,7 @@ void ADCSensor::set_sample_count(uint8_t sample_count) {
void ADCSensor::set_sampling_mode(SamplingMode sampling_mode) { this->sampling_mode_ = sampling_mode; }
float ADCSensor::get_setup_priority() const { return setup_priority::DATA; }
} // namespace adc
} // namespace esphome

View File

@@ -42,11 +42,11 @@ void ADCSensor::setup() {
adc_oneshot_unit_init_cfg_t init_config = {}; // Zero initialize
init_config.unit_id = this->adc_unit_;
init_config.ulp_mode = ADC_ULP_MODE_DISABLE;
#if USE_ESP32_VARIANT_ESP32C2 || USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || \
USE_ESP32_VARIANT_ESP32C6 || USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
init_config.clk_src = ADC_DIGI_CLK_SRC_DEFAULT;
#endif // USE_ESP32_VARIANT_ESP32C2 || USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 ||
// USE_ESP32_VARIANT_ESP32C6 || USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
#endif // USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 ||
// USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
esp_err_t err = adc_oneshot_new_unit(&init_config, &ADCSensor::shared_adc_handles[this->adc_unit_]);
if (err != ESP_OK) {
ESP_LOGE(TAG, "Error initializing %s: %d", LOG_STR_ARG(adc_unit_to_str(this->adc_unit_)), err);
@@ -76,7 +76,7 @@ void ADCSensor::setup() {
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
// RISC-V variants (except C2) and S3 use curve fitting calibration
// RISC-V variants and S3 use curve fitting calibration
adc_cali_curve_fitting_config_t cali_config = {}; // Zero initialize first
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 3, 0)
cali_config.chan = this->channel_;
@@ -94,14 +94,14 @@ void ADCSensor::setup() {
ESP_LOGW(TAG, "Curve fitting calibration failed with error %d, will use uncalibrated readings", err);
this->setup_flags_.calibration_complete = false;
}
#else // ESP32, ESP32-S2, and ESP32-C2 use line fitting calibration
#else // Other ESP32 variants use line fitting calibration
adc_cali_line_fitting_config_t cali_config = {
.unit_id = this->adc_unit_,
.atten = this->attenuation_,
.bitwidth = ADC_BITWIDTH_DEFAULT,
#if !defined(USE_ESP32_VARIANT_ESP32S2) && !defined(USE_ESP32_VARIANT_ESP32C2)
#if !defined(USE_ESP32_VARIANT_ESP32S2)
.default_vref = 1100, // Default reference voltage in mV
#endif // !defined(USE_ESP32_VARIANT_ESP32S2) && !defined(USE_ESP32_VARIANT_ESP32C2)
#endif // !defined(USE_ESP32_VARIANT_ESP32S2)
};
err = adc_cali_create_scheme_line_fitting(&cali_config, &handle);
if (err == ESP_OK) {
@@ -112,7 +112,7 @@ void ADCSensor::setup() {
ESP_LOGW(TAG, "Line fitting calibration failed with error %d, will use uncalibrated readings", err);
this->setup_flags_.calibration_complete = false;
}
#endif // ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
}
this->setup_flags_.init_complete = true;
@@ -189,7 +189,7 @@ float ADCSensor::sample_fixed_attenuation_() {
adc_cali_delete_scheme_curve_fitting(this->calibration_handle_);
#else // Other ESP32 variants use line fitting calibration
adc_cali_delete_scheme_line_fitting(this->calibration_handle_);
#endif // ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
this->calibration_handle_ = nullptr;
}
}
@@ -247,7 +247,7 @@ float ADCSensor::sample_autorange_() {
.unit_id = this->adc_unit_,
.atten = atten,
.bitwidth = ADC_BITWIDTH_DEFAULT,
#if !defined(USE_ESP32_VARIANT_ESP32S2) && !defined(USE_ESP32_VARIANT_ESP32C2)
#if !defined(USE_ESP32_VARIANT_ESP32S2)
.default_vref = 1100,
#endif
};

View File

@@ -2,7 +2,7 @@ import logging
import esphome.codegen as cg
from esphome.components import sensor, voltage_sampler
from esphome.components.esp32 import get_esp32_variant, include_builtin_idf_component
from esphome.components.esp32 import get_esp32_variant
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
from esphome.components.zephyr import (
zephyr_add_overlay,
@@ -118,9 +118,6 @@ async def to_code(config):
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
if CORE.is_esp32:
# Re-enable ESP-IDF's ADC driver (excluded by default to save compile time)
include_builtin_idf_component("esp_adc")
if attenuation := config.get(CONF_ATTENUATION):
if attenuation == "auto":
cg.add(var.set_autorange(cg.global_ns.true))
@@ -163,21 +160,21 @@ async def to_code(config):
zephyr_add_user("io-channels", f"<&adc {channel_id}>")
zephyr_add_overlay(
f"""
&adc {{
#address-cells = <1>;
#size-cells = <0>;
&adc {{
#address-cells = <1>;
#size-cells = <0>;
channel@{channel_id} {{
reg = <{channel_id}>;
zephyr,gain = "{gain}";
zephyr,reference = "ADC_REF_INTERNAL";
zephyr,acquisition-time = <ADC_ACQ_TIME_DEFAULT>;
zephyr,input-positive = <NRF_SAADC_{pin_number}>;
zephyr,resolution = <14>;
zephyr,oversampling = <8>;
}};
}};
"""
channel@{channel_id} {{
reg = <{channel_id}>;
zephyr,gain = "{gain}";
zephyr,reference = "ADC_REF_INTERNAL";
zephyr,acquisition-time = <ADC_ACQ_TIME_DEFAULT>;
zephyr,input-positive = <NRF_SAADC_{pin_number}>;
zephyr,resolution = <14>;
zephyr,oversampling = <8>;
}};
}};
"""
)

View File

@@ -9,6 +9,8 @@ static const char *const TAG = "adc128s102.sensor";
ADC128S102Sensor::ADC128S102Sensor(uint8_t channel) : channel_(channel) {}
float ADC128S102Sensor::get_setup_priority() const { return setup_priority::DATA; }
void ADC128S102Sensor::dump_config() {
LOG_SENSOR("", "ADC128S102 Sensor", this);
ESP_LOGCONFIG(TAG, " Pin: %u", this->channel_);

View File

@@ -19,6 +19,7 @@ class ADC128S102Sensor : public PollingComponent,
void update() override;
void dump_config() override;
float get_setup_priority() const override;
float sample() override;
protected:

View File

@@ -150,6 +150,8 @@ void AHT10Component::update() {
this->restart_read_();
}
float AHT10Component::get_setup_priority() const { return setup_priority::DATA; }
void AHT10Component::dump_config() {
ESP_LOGCONFIG(TAG, "AHT10:");
LOG_I2C_DEVICE(this);

View File

@@ -16,6 +16,7 @@ class AHT10Component : public PollingComponent, public i2c::I2CDevice {
void setup() override;
void update() override;
void dump_config() override;
float get_setup_priority() const override;
void set_variant(AHT10Variant variant) { this->variant_ = variant; }
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }

View File

@@ -67,29 +67,52 @@ void AlarmControlPanel::add_on_ready_callback(std::function<void()> &&callback)
this->ready_callback_.add(std::move(callback));
}
void AlarmControlPanel::arm_with_code_(AlarmControlPanelCall &(AlarmControlPanelCall::*arm_method)(),
const char *code) {
void AlarmControlPanel::arm_away(optional<std::string> code) {
auto call = this->make_call();
(call.*arm_method)();
if (code != nullptr)
call.set_code(code);
call.arm_away();
if (code.has_value())
call.set_code(code.value());
call.perform();
}
void AlarmControlPanel::arm_away(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::arm_away, code); }
void AlarmControlPanel::arm_home(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::arm_home, code); }
void AlarmControlPanel::arm_night(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::arm_night, code); }
void AlarmControlPanel::arm_vacation(const char *code) {
this->arm_with_code_(&AlarmControlPanelCall::arm_vacation, code);
void AlarmControlPanel::arm_home(optional<std::string> code) {
auto call = this->make_call();
call.arm_home();
if (code.has_value())
call.set_code(code.value());
call.perform();
}
void AlarmControlPanel::arm_custom_bypass(const char *code) {
this->arm_with_code_(&AlarmControlPanelCall::arm_custom_bypass, code);
void AlarmControlPanel::arm_night(optional<std::string> code) {
auto call = this->make_call();
call.arm_night();
if (code.has_value())
call.set_code(code.value());
call.perform();
}
void AlarmControlPanel::disarm(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::disarm, code); }
void AlarmControlPanel::arm_vacation(optional<std::string> code) {
auto call = this->make_call();
call.arm_vacation();
if (code.has_value())
call.set_code(code.value());
call.perform();
}
void AlarmControlPanel::arm_custom_bypass(optional<std::string> code) {
auto call = this->make_call();
call.arm_custom_bypass();
if (code.has_value())
call.set_code(code.value());
call.perform();
}
void AlarmControlPanel::disarm(optional<std::string> code) {
auto call = this->make_call();
call.disarm();
if (code.has_value())
call.set_code(code.value());
call.perform();
}
} // namespace esphome::alarm_control_panel

View File

@@ -76,53 +76,37 @@ class AlarmControlPanel : public EntityBase {
*
* @param code The code
*/
void arm_away(const char *code = nullptr);
void arm_away(const optional<std::string> &code) {
this->arm_away(code.has_value() ? code.value().c_str() : nullptr);
}
void arm_away(optional<std::string> code = nullopt);
/** arm the alarm in home mode
*
* @param code The code
*/
void arm_home(const char *code = nullptr);
void arm_home(const optional<std::string> &code) {
this->arm_home(code.has_value() ? code.value().c_str() : nullptr);
}
void arm_home(optional<std::string> code = nullopt);
/** arm the alarm in night mode
*
* @param code The code
*/
void arm_night(const char *code = nullptr);
void arm_night(const optional<std::string> &code) {
this->arm_night(code.has_value() ? code.value().c_str() : nullptr);
}
void arm_night(optional<std::string> code = nullopt);
/** arm the alarm in vacation mode
*
* @param code The code
*/
void arm_vacation(const char *code = nullptr);
void arm_vacation(const optional<std::string> &code) {
this->arm_vacation(code.has_value() ? code.value().c_str() : nullptr);
}
void arm_vacation(optional<std::string> code = nullopt);
/** arm the alarm in custom bypass mode
*
* @param code The code
*/
void arm_custom_bypass(const char *code = nullptr);
void arm_custom_bypass(const optional<std::string> &code) {
this->arm_custom_bypass(code.has_value() ? code.value().c_str() : nullptr);
}
void arm_custom_bypass(optional<std::string> code = nullopt);
/** disarm the alarm
*
* @param code The code
*/
void disarm(const char *code = nullptr);
void disarm(const optional<std::string> &code) { this->disarm(code.has_value() ? code.value().c_str() : nullptr); }
void disarm(optional<std::string> code = nullopt);
/** Get the state
*
@@ -134,8 +118,6 @@ class AlarmControlPanel : public EntityBase {
protected:
friend AlarmControlPanelCall;
// Helper to reduce code duplication for arm/disarm methods
void arm_with_code_(AlarmControlPanelCall &(AlarmControlPanelCall::*arm_method)(), const char *code);
// in order to store last panel state in flash
ESPPreferenceObject pref_;
// current state

View File

@@ -10,10 +10,8 @@ static const char *const TAG = "alarm_control_panel";
AlarmControlPanelCall::AlarmControlPanelCall(AlarmControlPanel *parent) : parent_(parent) {}
AlarmControlPanelCall &AlarmControlPanelCall::set_code(const char *code) {
if (code != nullptr) {
this->code_ = std::string(code);
}
AlarmControlPanelCall &AlarmControlPanelCall::set_code(const std::string &code) {
this->code_ = code;
return *this;
}

View File

@@ -14,8 +14,7 @@ class AlarmControlPanelCall {
public:
AlarmControlPanelCall(AlarmControlPanel *parent);
AlarmControlPanelCall &set_code(const char *code);
AlarmControlPanelCall &set_code(const std::string &code) { return this->set_code(code.c_str()); }
AlarmControlPanelCall &set_code(const std::string &code);
AlarmControlPanelCall &arm_away();
AlarmControlPanelCall &arm_home();
AlarmControlPanelCall &arm_night();

View File

@@ -1,15 +1,32 @@
#include "alarm_control_panel_state.h"
#include "esphome/core/progmem.h"
namespace esphome::alarm_control_panel {
// Alarm control panel state strings indexed by AlarmControlPanelState enum (0-9)
PROGMEM_STRING_TABLE(AlarmControlPanelStateStrings, "DISARMED", "ARMED_HOME", "ARMED_AWAY", "ARMED_NIGHT",
"ARMED_VACATION", "ARMED_CUSTOM_BYPASS", "PENDING", "ARMING", "DISARMING", "TRIGGERED", "UNKNOWN");
const LogString *alarm_control_panel_state_to_string(AlarmControlPanelState state) {
return AlarmControlPanelStateStrings::get_log_str(static_cast<uint8_t>(state),
AlarmControlPanelStateStrings::LAST_INDEX);
switch (state) {
case ACP_STATE_DISARMED:
return LOG_STR("DISARMED");
case ACP_STATE_ARMED_HOME:
return LOG_STR("ARMED_HOME");
case ACP_STATE_ARMED_AWAY:
return LOG_STR("ARMED_AWAY");
case ACP_STATE_ARMED_NIGHT:
return LOG_STR("ARMED_NIGHT");
case ACP_STATE_ARMED_VACATION:
return LOG_STR("ARMED_VACATION");
case ACP_STATE_ARMED_CUSTOM_BYPASS:
return LOG_STR("ARMED_CUSTOM_BYPASS");
case ACP_STATE_PENDING:
return LOG_STR("PENDING");
case ACP_STATE_ARMING:
return LOG_STR("ARMING");
case ACP_STATE_DISARMING:
return LOG_STR("DISARMING");
case ACP_STATE_TRIGGERED:
return LOG_STR("TRIGGERED");
default:
return LOG_STR("UNKNOWN");
}
}
} // namespace esphome::alarm_control_panel

View File

@@ -66,7 +66,15 @@ template<typename... Ts> class ArmAwayAction : public Action<Ts...> {
TEMPLATABLE_VALUE(std::string, code)
void play(const Ts &...x) override { this->alarm_control_panel_->arm_away(this->code_.optional_value(x...)); }
void play(const Ts &...x) override {
auto call = this->alarm_control_panel_->make_call();
auto code = this->code_.optional_value(x...);
if (code.has_value()) {
call.set_code(code.value());
}
call.arm_away();
call.perform();
}
protected:
AlarmControlPanel *alarm_control_panel_;
@@ -78,7 +86,15 @@ template<typename... Ts> class ArmHomeAction : public Action<Ts...> {
TEMPLATABLE_VALUE(std::string, code)
void play(const Ts &...x) override { this->alarm_control_panel_->arm_home(this->code_.optional_value(x...)); }
void play(const Ts &...x) override {
auto call = this->alarm_control_panel_->make_call();
auto code = this->code_.optional_value(x...);
if (code.has_value()) {
call.set_code(code.value());
}
call.arm_home();
call.perform();
}
protected:
AlarmControlPanel *alarm_control_panel_;
@@ -90,7 +106,15 @@ template<typename... Ts> class ArmNightAction : public Action<Ts...> {
TEMPLATABLE_VALUE(std::string, code)
void play(const Ts &...x) override { this->alarm_control_panel_->arm_night(this->code_.optional_value(x...)); }
void play(const Ts &...x) override {
auto call = this->alarm_control_panel_->make_call();
auto code = this->code_.optional_value(x...);
if (code.has_value()) {
call.set_code(code.value());
}
call.arm_night();
call.perform();
}
protected:
AlarmControlPanel *alarm_control_panel_;

View File

@@ -176,5 +176,7 @@ void AM2315C::dump_config() {
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
}
float AM2315C::get_setup_priority() const { return setup_priority::DATA; }
} // namespace am2315c
} // namespace esphome

View File

@@ -33,6 +33,7 @@ class AM2315C : public PollingComponent, public i2c::I2CDevice {
void dump_config() override;
void update() override;
void setup() override;
float get_setup_priority() const override;
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { this->temperature_sensor_ = temperature_sensor; }
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }

View File

@@ -51,6 +51,7 @@ void AM2320Component::dump_config() {
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
}
float AM2320Component::get_setup_priority() const { return setup_priority::DATA; }
bool AM2320Component::read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion) {
if (!this->write_bytes(a_register, data, 2)) {

View File

@@ -11,6 +11,7 @@ class AM2320Component : public PollingComponent, public i2c::I2CDevice {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void update() override;
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }

View File

@@ -1,12 +1,21 @@
#include "am43_base.h"
#include "esphome/core/helpers.h"
#include <cstring>
#include <cstdio>
namespace esphome {
namespace am43 {
const uint8_t START_PACKET[5] = {0x00, 0xff, 0x00, 0x00, 0x9a};
std::string pkt_to_hex(const uint8_t *data, uint16_t len) {
char buf[64];
memset(buf, 0, 64);
for (int i = 0; i < len; i++)
sprintf(&buf[i * 2], "%02x", data[i]);
std::string ret = buf;
return ret;
}
Am43Packet *Am43Encoder::get_battery_level_request() {
uint8_t data = 0x1;
return this->encode_(0xA2, &data, 1);
@@ -64,9 +73,7 @@ Am43Packet *Am43Encoder::encode_(uint8_t command, uint8_t *data, uint8_t length)
memcpy(&this->packet_.data[7], data, length);
this->packet_.length = length + 7;
this->checksum_();
char hex_buf[format_hex_size(sizeof(this->packet_.data))];
ESP_LOGV("am43", "ENC(%d): 0x%s", this->packet_.length,
format_hex_to(hex_buf, this->packet_.data, this->packet_.length));
ESP_LOGV("am43", "ENC(%d): 0x%s", packet_.length, pkt_to_hex(packet_.data, packet_.length).c_str());
return &this->packet_;
}
@@ -81,8 +88,7 @@ void Am43Decoder::decode(const uint8_t *data, uint16_t length) {
this->has_set_state_response_ = false;
this->has_position_ = false;
this->has_pin_response_ = false;
char hex_buf[format_hex_size(24)]; // Max expected packet size
ESP_LOGV("am43", "DEC(%d): 0x%s", length, format_hex_to(hex_buf, data, length));
ESP_LOGV("am43", "DEC(%d): 0x%s", length, pkt_to_hex(data, length).c_str());
if (length < 2 || data[0] != 0x9a)
return;

View File

@@ -18,31 +18,31 @@ AnovaPacket *AnovaCodec::clean_packet_() {
AnovaPacket *AnovaCodec::get_read_device_status_request() {
this->current_query_ = READ_DEVICE_STATUS;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_DEVICE_STATUS);
sprintf((char *) this->packet_.data, "%s", CMD_READ_DEVICE_STATUS);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_read_target_temp_request() {
this->current_query_ = READ_TARGET_TEMPERATURE;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_TARGET_TEMP);
sprintf((char *) this->packet_.data, "%s", CMD_READ_TARGET_TEMP);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_read_current_temp_request() {
this->current_query_ = READ_CURRENT_TEMPERATURE;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_CURRENT_TEMP);
sprintf((char *) this->packet_.data, "%s", CMD_READ_CURRENT_TEMP);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_read_unit_request() {
this->current_query_ = READ_UNIT;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_UNIT);
sprintf((char *) this->packet_.data, "%s", CMD_READ_UNIT);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_read_data_request() {
this->current_query_ = READ_DATA;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_DATA);
sprintf((char *) this->packet_.data, "%s", CMD_READ_DATA);
return this->clean_packet_();
}
@@ -50,25 +50,25 @@ AnovaPacket *AnovaCodec::get_set_target_temp_request(float temperature) {
this->current_query_ = SET_TARGET_TEMPERATURE;
if (this->fahrenheit_)
temperature = ctof(temperature);
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), CMD_SET_TARGET_TEMP, temperature);
sprintf((char *) this->packet_.data, CMD_SET_TARGET_TEMP, temperature);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_set_unit_request(char unit) {
this->current_query_ = SET_UNIT;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), CMD_SET_TEMP_UNIT, unit);
sprintf((char *) this->packet_.data, CMD_SET_TEMP_UNIT, unit);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_start_request() {
this->current_query_ = START;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_START);
sprintf((char *) this->packet_.data, CMD_START);
return this->clean_packet_();
}
AnovaPacket *AnovaCodec::get_stop_request() {
this->current_query_ = STOP;
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_STOP);
sprintf((char *) this->packet_.data, CMD_STOP);
return this->clean_packet_();
}

View File

@@ -384,6 +384,7 @@ void APDS9960::process_dataset_(int up, int down, int left, int right) {
}
}
}
float APDS9960::get_setup_priority() const { return setup_priority::DATA; }
bool APDS9960::is_proximity_enabled_() const {
return
#ifdef USE_SENSOR

View File

@@ -32,6 +32,7 @@ class APDS9960 : public PollingComponent, public i2c::I2CDevice {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void update() override;
void loop() override;

View File

@@ -45,7 +45,6 @@ service APIConnection {
rpc time_command (TimeCommandRequest) returns (void) {}
rpc update_command (UpdateCommandRequest) returns (void) {}
rpc valve_command (ValveCommandRequest) returns (void) {}
rpc water_heater_command (WaterHeaterCommandRequest) returns (void) {}
rpc subscribe_bluetooth_le_advertisements(SubscribeBluetoothLEAdvertisementsRequest) returns (void) {}
rpc bluetooth_device_request(BluetoothDeviceRequest) returns (void) {}

View File

@@ -283,7 +283,7 @@ void APIConnection::loop() {
#endif
}
bool APIConnection::send_disconnect_response() {
bool APIConnection::send_disconnect_response(const DisconnectRequest &msg) {
// remote initiated disconnect_client
// don't close yet, we still need to send the disconnect response
// close will happen on next loop
@@ -292,7 +292,7 @@ bool APIConnection::send_disconnect_response() {
DisconnectResponse resp;
return this->send_message(resp, DisconnectResponse::MESSAGE_TYPE);
}
void APIConnection::on_disconnect_response() {
void APIConnection::on_disconnect_response(const DisconnectResponse &value) {
this->helper_->close();
this->flags_.remove = true;
}
@@ -300,7 +300,7 @@ void APIConnection::on_disconnect_response() {
// Encodes a message to the buffer and returns the total number of bytes used,
// including header and footer overhead. Returns 0 if the message doesn't fit.
uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn,
uint32_t remaining_size) {
uint32_t remaining_size, bool is_single) {
#ifdef HAS_PROTO_MESSAGE_DUMP
// If in log-only mode, just log and return
if (conn->flags_.log_only_mode) {
@@ -330,9 +330,12 @@ uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint8_t mess
// Get buffer size after allocation (which includes header padding)
std::vector<uint8_t> &shared_buf = conn->parent_->get_shared_buffer_ref();
if (conn->flags_.batch_first_message) {
// First message - buffer already prepared by caller, just clear flag
conn->flags_.batch_first_message = false;
if (is_single || conn->flags_.batch_first_message) {
// Single message or first batch message
conn->prepare_first_message_buffer(shared_buf, header_padding, total_calculated_size);
if (conn->flags_.batch_first_message) {
conn->flags_.batch_first_message = false;
}
} else {
// Batch message second or later
// Add padding for previous message footer + this message header
@@ -362,22 +365,24 @@ bool APIConnection::send_binary_sensor_state(binary_sensor::BinarySensor *binary
BinarySensorStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *binary_sensor = static_cast<binary_sensor::BinarySensor *>(entity);
BinarySensorStateResponse resp;
resp.state = binary_sensor->state;
resp.missing_state = !binary_sensor->has_state();
return fill_and_encode_entity_state(binary_sensor, resp, BinarySensorStateResponse::MESSAGE_TYPE, conn,
remaining_size);
remaining_size, is_single);
}
uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *binary_sensor = static_cast<binary_sensor::BinarySensor *>(entity);
ListEntitiesBinarySensorResponse msg;
msg.device_class = binary_sensor->get_device_class_ref();
msg.is_status_binary_sensor = binary_sensor->is_status_binary_sensor();
return fill_and_encode_entity_info(binary_sensor, msg, ListEntitiesBinarySensorResponse::MESSAGE_TYPE, conn,
remaining_size);
remaining_size, is_single);
}
#endif
@@ -385,7 +390,8 @@ uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConne
bool APIConnection::send_cover_state(cover::Cover *cover) {
return this->send_message_smart_(cover, CoverStateResponse::MESSAGE_TYPE, CoverStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *cover = static_cast<cover::Cover *>(entity);
CoverStateResponse msg;
auto traits = cover->get_traits();
@@ -393,9 +399,10 @@ uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *
if (traits.get_supports_tilt())
msg.tilt = cover->tilt;
msg.current_operation = static_cast<enums::CoverOperation>(cover->current_operation);
return fill_and_encode_entity_state(cover, msg, CoverStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(cover, msg, CoverStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *cover = static_cast<cover::Cover *>(entity);
ListEntitiesCoverResponse msg;
auto traits = cover->get_traits();
@@ -404,7 +411,8 @@ uint16_t APIConnection::try_send_cover_info(EntityBase *entity, APIConnection *c
msg.supports_tilt = traits.get_supports_tilt();
msg.supports_stop = traits.get_supports_stop();
msg.device_class = cover->get_device_class_ref();
return fill_and_encode_entity_info(cover, msg, ListEntitiesCoverResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(cover, msg, ListEntitiesCoverResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::cover_command(const CoverCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(cover::Cover, cover, cover)
@@ -422,7 +430,8 @@ void APIConnection::cover_command(const CoverCommandRequest &msg) {
bool APIConnection::send_fan_state(fan::Fan *fan) {
return this->send_message_smart_(fan, FanStateResponse::MESSAGE_TYPE, FanStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *fan = static_cast<fan::Fan *>(entity);
FanStateResponse msg;
auto traits = fan->get_traits();
@@ -436,9 +445,10 @@ uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *co
msg.direction = static_cast<enums::FanDirection>(fan->direction);
if (traits.supports_preset_modes() && fan->has_preset_mode())
msg.preset_mode = fan->get_preset_mode();
return fill_and_encode_entity_state(fan, msg, FanStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(fan, msg, FanStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *fan = static_cast<fan::Fan *>(entity);
ListEntitiesFanResponse msg;
auto traits = fan->get_traits();
@@ -447,7 +457,7 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con
msg.supports_direction = traits.supports_direction();
msg.supported_speed_count = traits.supported_speed_count();
msg.supported_preset_modes = &traits.supported_preset_modes();
return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
void APIConnection::fan_command(const FanCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(fan::Fan, fan, fan)
@@ -471,7 +481,8 @@ void APIConnection::fan_command(const FanCommandRequest &msg) {
bool APIConnection::send_light_state(light::LightState *light) {
return this->send_message_smart_(light, LightStateResponse::MESSAGE_TYPE, LightStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *light = static_cast<light::LightState *>(entity);
LightStateResponse resp;
auto values = light->remote_values;
@@ -490,9 +501,10 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *
if (light->supports_effects()) {
resp.effect = light->get_effect_name();
}
return fill_and_encode_entity_state(light, resp, LightStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(light, resp, LightStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *light = static_cast<light::LightState *>(entity);
ListEntitiesLightResponse msg;
auto traits = light->get_traits();
@@ -515,7 +527,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c
}
}
msg.effects = &effects_list;
return fill_and_encode_entity_info(light, msg, ListEntitiesLightResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(light, msg, ListEntitiesLightResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::light_command(const LightCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(light::LightState, light, light)
@@ -555,15 +568,17 @@ bool APIConnection::send_sensor_state(sensor::Sensor *sensor) {
return this->send_message_smart_(sensor, SensorStateResponse::MESSAGE_TYPE, SensorStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *sensor = static_cast<sensor::Sensor *>(entity);
SensorStateResponse resp;
resp.state = sensor->state;
resp.missing_state = !sensor->has_state();
return fill_and_encode_entity_state(sensor, resp, SensorStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(sensor, resp, SensorStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *sensor = static_cast<sensor::Sensor *>(entity);
ListEntitiesSensorResponse msg;
msg.unit_of_measurement = sensor->get_unit_of_measurement_ref();
@@ -571,7 +586,8 @@ uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection *
msg.force_update = sensor->get_force_update();
msg.device_class = sensor->get_device_class_ref();
msg.state_class = static_cast<enums::SensorStateClass>(sensor->get_state_class());
return fill_and_encode_entity_info(sensor, msg, ListEntitiesSensorResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(sensor, msg, ListEntitiesSensorResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
#endif
@@ -580,19 +596,23 @@ bool APIConnection::send_switch_state(switch_::Switch *a_switch) {
return this->send_message_smart_(a_switch, SwitchStateResponse::MESSAGE_TYPE, SwitchStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *a_switch = static_cast<switch_::Switch *>(entity);
SwitchStateResponse resp;
resp.state = a_switch->state;
return fill_and_encode_entity_state(a_switch, resp, SwitchStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(a_switch, resp, SwitchStateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
uint16_t APIConnection::try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *a_switch = static_cast<switch_::Switch *>(entity);
ListEntitiesSwitchResponse msg;
msg.assumed_state = a_switch->assumed_state();
msg.device_class = a_switch->get_device_class_ref();
return fill_and_encode_entity_info(a_switch, msg, ListEntitiesSwitchResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(a_switch, msg, ListEntitiesSwitchResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::switch_command(const SwitchCommandRequest &msg) {
ENTITY_COMMAND_GET(switch_::Switch, a_switch, switch)
@@ -611,19 +631,22 @@ bool APIConnection::send_text_sensor_state(text_sensor::TextSensor *text_sensor)
TextSensorStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *text_sensor = static_cast<text_sensor::TextSensor *>(entity);
TextSensorStateResponse resp;
resp.state = StringRef(text_sensor->state);
resp.missing_state = !text_sensor->has_state();
return fill_and_encode_entity_state(text_sensor, resp, TextSensorStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(text_sensor, resp, TextSensorStateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *text_sensor = static_cast<text_sensor::TextSensor *>(entity);
ListEntitiesTextSensorResponse msg;
msg.device_class = text_sensor->get_device_class_ref();
return fill_and_encode_entity_info(text_sensor, msg, ListEntitiesTextSensorResponse::MESSAGE_TYPE, conn,
remaining_size);
remaining_size, is_single);
}
#endif
@@ -631,7 +654,8 @@ uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnect
bool APIConnection::send_climate_state(climate::Climate *climate) {
return this->send_message_smart_(climate, ClimateStateResponse::MESSAGE_TYPE, ClimateStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *climate = static_cast<climate::Climate *>(entity);
ClimateStateResponse resp;
auto traits = climate->get_traits();
@@ -663,9 +687,11 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
resp.current_humidity = climate->current_humidity;
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
resp.target_humidity = climate->target_humidity;
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *climate = static_cast<climate::Climate *>(entity);
ListEntitiesClimateResponse msg;
auto traits = climate->get_traits();
@@ -690,7 +716,8 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
msg.supported_presets = &traits.get_supported_presets();
msg.supported_custom_presets = &traits.get_supported_custom_presets();
msg.supported_swing_modes = &traits.get_supported_swing_modes();
return fill_and_encode_entity_info(climate, msg, ListEntitiesClimateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(climate, msg, ListEntitiesClimateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::climate_command(const ClimateCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(climate::Climate, climate, climate)
@@ -723,15 +750,17 @@ bool APIConnection::send_number_state(number::Number *number) {
return this->send_message_smart_(number, NumberStateResponse::MESSAGE_TYPE, NumberStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *number = static_cast<number::Number *>(entity);
NumberStateResponse resp;
resp.state = number->state;
resp.missing_state = !number->has_state();
return fill_and_encode_entity_state(number, resp, NumberStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(number, resp, NumberStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *number = static_cast<number::Number *>(entity);
ListEntitiesNumberResponse msg;
msg.unit_of_measurement = number->traits.get_unit_of_measurement_ref();
@@ -740,7 +769,8 @@ uint16_t APIConnection::try_send_number_info(EntityBase *entity, APIConnection *
msg.min_value = number->traits.get_min_value();
msg.max_value = number->traits.get_max_value();
msg.step = number->traits.get_step();
return fill_and_encode_entity_info(number, msg, ListEntitiesNumberResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(number, msg, ListEntitiesNumberResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::number_command(const NumberCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(number::Number, number, number)
@@ -753,19 +783,22 @@ void APIConnection::number_command(const NumberCommandRequest &msg) {
bool APIConnection::send_date_state(datetime::DateEntity *date) {
return this->send_message_smart_(date, DateStateResponse::MESSAGE_TYPE, DateStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *date = static_cast<datetime::DateEntity *>(entity);
DateStateResponse resp;
resp.missing_state = !date->has_state();
resp.year = date->year;
resp.month = date->month;
resp.day = date->day;
return fill_and_encode_entity_state(date, resp, DateStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(date, resp, DateStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *date = static_cast<datetime::DateEntity *>(entity);
ListEntitiesDateResponse msg;
return fill_and_encode_entity_info(date, msg, ListEntitiesDateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(date, msg, ListEntitiesDateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::date_command(const DateCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(datetime::DateEntity, date, date)
@@ -778,19 +811,22 @@ void APIConnection::date_command(const DateCommandRequest &msg) {
bool APIConnection::send_time_state(datetime::TimeEntity *time) {
return this->send_message_smart_(time, TimeStateResponse::MESSAGE_TYPE, TimeStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *time = static_cast<datetime::TimeEntity *>(entity);
TimeStateResponse resp;
resp.missing_state = !time->has_state();
resp.hour = time->hour;
resp.minute = time->minute;
resp.second = time->second;
return fill_and_encode_entity_state(time, resp, TimeStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(time, resp, TimeStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *time = static_cast<datetime::TimeEntity *>(entity);
ListEntitiesTimeResponse msg;
return fill_and_encode_entity_info(time, msg, ListEntitiesTimeResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(time, msg, ListEntitiesTimeResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::time_command(const TimeCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(datetime::TimeEntity, time, time)
@@ -804,7 +840,8 @@ bool APIConnection::send_datetime_state(datetime::DateTimeEntity *datetime) {
return this->send_message_smart_(datetime, DateTimeStateResponse::MESSAGE_TYPE,
DateTimeStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *datetime = static_cast<datetime::DateTimeEntity *>(entity);
DateTimeStateResponse resp;
resp.missing_state = !datetime->has_state();
@@ -812,12 +849,15 @@ uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnectio
ESPTime state = datetime->state_as_esptime();
resp.epoch_seconds = state.timestamp;
}
return fill_and_encode_entity_state(datetime, resp, DateTimeStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(datetime, resp, DateTimeStateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
uint16_t APIConnection::try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *datetime = static_cast<datetime::DateTimeEntity *>(entity);
ListEntitiesDateTimeResponse msg;
return fill_and_encode_entity_info(datetime, msg, ListEntitiesDateTimeResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(datetime, msg, ListEntitiesDateTimeResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::datetime_command(const DateTimeCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(datetime::DateTimeEntity, datetime, datetime)
@@ -831,22 +871,25 @@ bool APIConnection::send_text_state(text::Text *text) {
return this->send_message_smart_(text, TextStateResponse::MESSAGE_TYPE, TextStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *text = static_cast<text::Text *>(entity);
TextStateResponse resp;
resp.state = StringRef(text->state);
resp.missing_state = !text->has_state();
return fill_and_encode_entity_state(text, resp, TextStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(text, resp, TextStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *text = static_cast<text::Text *>(entity);
ListEntitiesTextResponse msg;
msg.mode = static_cast<enums::TextMode>(text->traits.get_mode());
msg.min_length = text->traits.get_min_length();
msg.max_length = text->traits.get_max_length();
msg.pattern = text->traits.get_pattern_ref();
return fill_and_encode_entity_info(text, msg, ListEntitiesTextResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(text, msg, ListEntitiesTextResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::text_command(const TextCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(text::Text, text, text)
@@ -860,19 +903,22 @@ bool APIConnection::send_select_state(select::Select *select) {
return this->send_message_smart_(select, SelectStateResponse::MESSAGE_TYPE, SelectStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *select = static_cast<select::Select *>(entity);
SelectStateResponse resp;
resp.state = select->current_option();
resp.missing_state = !select->has_state();
return fill_and_encode_entity_state(select, resp, SelectStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(select, resp, SelectStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *select = static_cast<select::Select *>(entity);
ListEntitiesSelectResponse msg;
msg.options = &select->traits.get_options();
return fill_and_encode_entity_info(select, msg, ListEntitiesSelectResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(select, msg, ListEntitiesSelectResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::select_command(const SelectCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(select::Select, select, select)
@@ -882,11 +928,13 @@ void APIConnection::select_command(const SelectCommandRequest &msg) {
#endif
#ifdef USE_BUTTON
uint16_t APIConnection::try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *button = static_cast<button::Button *>(entity);
ListEntitiesButtonResponse msg;
msg.device_class = button->get_device_class_ref();
return fill_and_encode_entity_info(button, msg, ListEntitiesButtonResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(button, msg, ListEntitiesButtonResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void esphome::api::APIConnection::button_command(const ButtonCommandRequest &msg) {
ENTITY_COMMAND_GET(button::Button, button, button)
@@ -899,20 +947,23 @@ bool APIConnection::send_lock_state(lock::Lock *a_lock) {
return this->send_message_smart_(a_lock, LockStateResponse::MESSAGE_TYPE, LockStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *a_lock = static_cast<lock::Lock *>(entity);
LockStateResponse resp;
resp.state = static_cast<enums::LockState>(a_lock->state);
return fill_and_encode_entity_state(a_lock, resp, LockStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(a_lock, resp, LockStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *a_lock = static_cast<lock::Lock *>(entity);
ListEntitiesLockResponse msg;
msg.assumed_state = a_lock->traits.get_assumed_state();
msg.supports_open = a_lock->traits.get_supports_open();
msg.requires_code = a_lock->traits.get_requires_code();
return fill_and_encode_entity_info(a_lock, msg, ListEntitiesLockResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(a_lock, msg, ListEntitiesLockResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::lock_command(const LockCommandRequest &msg) {
ENTITY_COMMAND_GET(lock::Lock, a_lock, lock)
@@ -935,14 +986,16 @@ void APIConnection::lock_command(const LockCommandRequest &msg) {
bool APIConnection::send_valve_state(valve::Valve *valve) {
return this->send_message_smart_(valve, ValveStateResponse::MESSAGE_TYPE, ValveStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *valve = static_cast<valve::Valve *>(entity);
ValveStateResponse resp;
resp.position = valve->position;
resp.current_operation = static_cast<enums::ValveOperation>(valve->current_operation);
return fill_and_encode_entity_state(valve, resp, ValveStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(valve, resp, ValveStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *valve = static_cast<valve::Valve *>(entity);
ListEntitiesValveResponse msg;
auto traits = valve->get_traits();
@@ -950,7 +1003,8 @@ uint16_t APIConnection::try_send_valve_info(EntityBase *entity, APIConnection *c
msg.assumed_state = traits.get_is_assumed_state();
msg.supports_position = traits.get_supports_position();
msg.supports_stop = traits.get_supports_stop();
return fill_and_encode_entity_info(valve, msg, ListEntitiesValveResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(valve, msg, ListEntitiesValveResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::valve_command(const ValveCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(valve::Valve, valve, valve)
@@ -967,7 +1021,8 @@ bool APIConnection::send_media_player_state(media_player::MediaPlayer *media_pla
return this->send_message_smart_(media_player, MediaPlayerStateResponse::MESSAGE_TYPE,
MediaPlayerStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *media_player = static_cast<media_player::MediaPlayer *>(entity);
MediaPlayerStateResponse resp;
media_player::MediaPlayerState report_state = media_player->state == media_player::MEDIA_PLAYER_STATE_ANNOUNCING
@@ -976,9 +1031,11 @@ uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConne
resp.state = static_cast<enums::MediaPlayerState>(report_state);
resp.volume = media_player->volume;
resp.muted = media_player->is_muted();
return fill_and_encode_entity_state(media_player, resp, MediaPlayerStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(media_player, resp, MediaPlayerStateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
uint16_t APIConnection::try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *media_player = static_cast<media_player::MediaPlayer *>(entity);
ListEntitiesMediaPlayerResponse msg;
auto traits = media_player->get_traits();
@@ -994,7 +1051,7 @@ uint16_t APIConnection::try_send_media_player_info(EntityBase *entity, APIConnec
media_format.sample_bytes = supported_format.sample_bytes;
}
return fill_and_encode_entity_info(media_player, msg, ListEntitiesMediaPlayerResponse::MESSAGE_TYPE, conn,
remaining_size);
remaining_size, is_single);
}
void APIConnection::media_player_command(const MediaPlayerCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(media_player::MediaPlayer, media_player, media_player)
@@ -1035,7 +1092,7 @@ void APIConnection::try_send_camera_image_() {
msg.device_id = camera::Camera::instance()->get_device_id();
#endif
if (!this->send_message_impl(msg, CameraImageResponse::MESSAGE_TYPE)) {
if (!this->send_message_(msg, CameraImageResponse::MESSAGE_TYPE)) {
return; // Send failed, try again later
}
this->image_reader_->consume_data(to_send);
@@ -1058,10 +1115,12 @@ void APIConnection::set_camera_state(std::shared_ptr<camera::CameraImage> image)
this->try_send_camera_image_();
}
}
uint16_t APIConnection::try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *camera = static_cast<camera::Camera *>(entity);
ListEntitiesCameraResponse msg;
return fill_and_encode_entity_info(camera, msg, ListEntitiesCameraResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(camera, msg, ListEntitiesCameraResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::camera_image(const CameraImageRequest &msg) {
if (camera::Camera::instance() == nullptr)
@@ -1095,7 +1154,7 @@ void APIConnection::on_get_time_response(const GetTimeResponse &value) {
void APIConnection::subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) {
bluetooth_proxy::global_bluetooth_proxy->subscribe_api_connection(this, msg.flags);
}
void APIConnection::unsubscribe_bluetooth_le_advertisements() {
void APIConnection::unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) {
bluetooth_proxy::global_bluetooth_proxy->unsubscribe_api_connection(this);
}
void APIConnection::bluetooth_device_request(const BluetoothDeviceRequest &msg) {
@@ -1121,7 +1180,8 @@ void APIConnection::bluetooth_gatt_notify(const BluetoothGATTNotifyRequest &msg)
bluetooth_proxy::global_bluetooth_proxy->bluetooth_gatt_notify(msg);
}
bool APIConnection::send_subscribe_bluetooth_connections_free_response() {
bool APIConnection::send_subscribe_bluetooth_connections_free_response(
const SubscribeBluetoothConnectionsFreeRequest &msg) {
bluetooth_proxy::global_bluetooth_proxy->send_connections_free(this);
return true;
}
@@ -1245,22 +1305,22 @@ bool APIConnection::send_alarm_control_panel_state(alarm_control_panel::AlarmCon
AlarmControlPanelStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn,
uint32_t remaining_size) {
uint32_t remaining_size, bool is_single) {
auto *a_alarm_control_panel = static_cast<alarm_control_panel::AlarmControlPanel *>(entity);
AlarmControlPanelStateResponse resp;
resp.state = static_cast<enums::AlarmControlPanelState>(a_alarm_control_panel->get_state());
return fill_and_encode_entity_state(a_alarm_control_panel, resp, AlarmControlPanelStateResponse::MESSAGE_TYPE, conn,
remaining_size);
remaining_size, is_single);
}
uint16_t APIConnection::try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn,
uint32_t remaining_size) {
uint32_t remaining_size, bool is_single) {
auto *a_alarm_control_panel = static_cast<alarm_control_panel::AlarmControlPanel *>(entity);
ListEntitiesAlarmControlPanelResponse msg;
msg.supported_features = a_alarm_control_panel->get_supported_features();
msg.requires_code = a_alarm_control_panel->get_requires_code();
msg.requires_code_to_arm = a_alarm_control_panel->get_requires_code_to_arm();
return fill_and_encode_entity_info(a_alarm_control_panel, msg, ListEntitiesAlarmControlPanelResponse::MESSAGE_TYPE,
conn, remaining_size);
conn, remaining_size, is_single);
}
void APIConnection::alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(alarm_control_panel::AlarmControlPanel, a_alarm_control_panel, alarm_control_panel)
@@ -1297,7 +1357,8 @@ bool APIConnection::send_water_heater_state(water_heater::WaterHeater *water_hea
return this->send_message_smart_(water_heater, WaterHeaterStateResponse::MESSAGE_TYPE,
WaterHeaterStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *wh = static_cast<water_heater::WaterHeater *>(entity);
WaterHeaterStateResponse resp;
resp.mode = static_cast<enums::WaterHeaterMode>(wh->get_mode());
@@ -1308,9 +1369,10 @@ uint16_t APIConnection::try_send_water_heater_state(EntityBase *entity, APIConne
resp.state = wh->get_state();
resp.key = wh->get_object_id_hash();
return encode_message_to_buffer(resp, WaterHeaterStateResponse::MESSAGE_TYPE, conn, remaining_size);
return encode_message_to_buffer(resp, WaterHeaterStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *wh = static_cast<water_heater::WaterHeater *>(entity);
ListEntitiesWaterHeaterResponse msg;
auto traits = wh->get_traits();
@@ -1319,10 +1381,11 @@ uint16_t APIConnection::try_send_water_heater_info(EntityBase *entity, APIConnec
msg.target_temperature_step = traits.get_target_temperature_step();
msg.supported_modes = &traits.get_supported_modes();
msg.supported_features = traits.get_feature_flags();
return fill_and_encode_entity_info(wh, msg, ListEntitiesWaterHeaterResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(wh, msg, ListEntitiesWaterHeaterResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::water_heater_command(const WaterHeaterCommandRequest &msg) {
void APIConnection::on_water_heater_command_request(const WaterHeaterCommandRequest &msg) {
ENTITY_COMMAND_MAKE_CALL(water_heater::WaterHeater, water_heater, water_heater)
if (msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_MODE)
call.set_mode(static_cast<water_heater::WaterHeaterMode>(msg.mode));
@@ -1348,18 +1411,20 @@ void APIConnection::send_event(event::Event *event) {
event->get_last_event_type_index());
}
uint16_t APIConnection::try_send_event_response(event::Event *event, StringRef event_type, APIConnection *conn,
uint32_t remaining_size) {
uint32_t remaining_size, bool is_single) {
EventResponse resp;
resp.event_type = event_type;
return fill_and_encode_entity_state(event, resp, EventResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(event, resp, EventResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *event = static_cast<event::Event *>(entity);
ListEntitiesEventResponse msg;
msg.device_class = event->get_device_class_ref();
msg.event_types = &event->get_event_types();
return fill_and_encode_entity_info(event, msg, ListEntitiesEventResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(event, msg, ListEntitiesEventResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
#endif
@@ -1382,11 +1447,13 @@ void APIConnection::send_infrared_rf_receive_event(const InfraredRFReceiveEvent
#endif
#ifdef USE_INFRARED
uint16_t APIConnection::try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *infrared = static_cast<infrared::Infrared *>(entity);
ListEntitiesInfraredResponse msg;
msg.capabilities = infrared->get_capability_flags();
return fill_and_encode_entity_info(infrared, msg, ListEntitiesInfraredResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(infrared, msg, ListEntitiesInfraredResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
#endif
@@ -1394,7 +1461,8 @@ uint16_t APIConnection::try_send_infrared_info(EntityBase *entity, APIConnection
bool APIConnection::send_update_state(update::UpdateEntity *update) {
return this->send_message_smart_(update, UpdateStateResponse::MESSAGE_TYPE, UpdateStateResponse::ESTIMATED_SIZE);
}
uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *update = static_cast<update::UpdateEntity *>(entity);
UpdateStateResponse resp;
resp.missing_state = !update->has_state();
@@ -1410,13 +1478,15 @@ uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection
resp.release_summary = StringRef(update->update_info.summary);
resp.release_url = StringRef(update->update_info.release_url);
}
return fill_and_encode_entity_state(update, resp, UpdateStateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_state(update, resp, UpdateStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
auto *update = static_cast<update::UpdateEntity *>(entity);
ListEntitiesUpdateResponse msg;
msg.device_class = update->get_device_class_ref();
return fill_and_encode_entity_info(update, msg, ListEntitiesUpdateResponse::MESSAGE_TYPE, conn, remaining_size);
return fill_and_encode_entity_info(update, msg, ListEntitiesUpdateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
}
void APIConnection::update_command(const UpdateCommandRequest &msg) {
ENTITY_COMMAND_GET(update::UpdateEntity, update, update)
@@ -1442,7 +1512,7 @@ bool APIConnection::try_send_log_message(int level, const char *tag, const char
SubscribeLogsResponse msg;
msg.level = static_cast<enums::LogLevel>(level);
msg.set_message(reinterpret_cast<const uint8_t *>(line), message_len);
return this->send_message_impl(msg, SubscribeLogsResponse::MESSAGE_TYPE);
return this->send_message_(msg, SubscribeLogsResponse::MESSAGE_TYPE);
}
void APIConnection::complete_authentication_() {
@@ -1490,12 +1560,12 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
return this->send_message(resp, HelloResponse::MESSAGE_TYPE);
}
bool APIConnection::send_ping_response() {
bool APIConnection::send_ping_response(const PingRequest &msg) {
PingResponse resp;
return this->send_message(resp, PingResponse::MESSAGE_TYPE);
}
bool APIConnection::send_device_info_response() {
bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
DeviceInfoResponse resp{};
resp.name = StringRef(App.get_name());
resp.friendly_name = StringRef(App.get_friendly_name());
@@ -1645,7 +1715,7 @@ void APIConnection::on_home_assistant_state_response(const HomeAssistantStateRes
// HA state max length is 255 characters, but attributes can be much longer
// Use stack buffer for common case (states), heap fallback for large attributes
size_t state_len = msg.state.size();
SmallBufferWithHeapFallback<MAX_STATE_LEN + 1> state_buf_alloc(state_len + 1);
SmallBufferWithHeapFallback<256> state_buf_alloc(state_len + 1);
char *state_buf = reinterpret_cast<char *>(state_buf_alloc.get());
if (state_len > 0) {
memcpy(state_buf, msg.state.c_str(), state_len);
@@ -1745,7 +1815,9 @@ bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryption
}
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
void APIConnection::subscribe_home_assistant_states() { state_subs_at_ = 0; }
void APIConnection::subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) {
state_subs_at_ = 0;
}
#endif
bool APIConnection::try_to_clear_buffer(bool log_out_of_space) {
if (this->flags_.remove)
@@ -1765,14 +1837,6 @@ bool APIConnection::try_to_clear_buffer(bool log_out_of_space) {
}
return false;
}
bool APIConnection::send_message_impl(const ProtoMessage &msg, uint8_t message_type) {
ProtoSize size;
msg.calculate_size(size);
std::vector<uint8_t> &shared_buf = this->parent_->get_shared_buffer_ref();
this->prepare_first_message_buffer(shared_buf, size.get_size());
msg.encode({&shared_buf});
return this->send_buffer({&shared_buf}, message_type);
}
bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
const bool is_log_message = (message_type == SubscribeLogsResponse::MESSAGE_TYPE);
@@ -1833,23 +1897,6 @@ void APIConnection::DeferredBatch::add_item_front(EntityBase *entity, uint8_t me
}
}
bool APIConnection::send_message_smart_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
uint8_t aux_data_index) {
if (this->should_send_immediately_(message_type) && this->helper_->can_write_without_blocking()) {
auto &shared_buf = this->parent_->get_shared_buffer_ref();
this->prepare_first_message_buffer(shared_buf, estimated_size);
DeferredBatch::BatchItem item{entity, message_type, estimated_size, aux_data_index};
if (this->dispatch_message_(item, MAX_BATCH_PACKET_SIZE, true) &&
this->send_buffer(ProtoWriteBuffer{&shared_buf}, message_type)) {
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_batch_item_(item);
#endif
return true;
}
}
return this->schedule_message_(entity, message_type, estimated_size, aux_data_index);
}
bool APIConnection::schedule_batch_() {
if (!this->flags_.batch_scheduled) {
this->flags_.batch_scheduled = true;
@@ -1878,21 +1925,10 @@ void APIConnection::process_batch_() {
auto &shared_buf = this->parent_->get_shared_buffer_ref();
size_t num_items = this->deferred_batch_.size();
// Cache these values to avoid repeated virtual calls
const uint8_t header_padding = this->helper_->frame_header_padding();
const uint8_t footer_size = this->helper_->frame_footer_size();
// Pre-calculate exact buffer size needed based on message types
uint32_t total_estimated_size = num_items * (header_padding + footer_size);
for (size_t i = 0; i < num_items; i++) {
total_estimated_size += this->deferred_batch_[i].estimated_size;
}
this->prepare_first_message_buffer(shared_buf, header_padding, total_estimated_size);
// Fast path for single message - buffer already allocated above
// Fast path for single message - allocate exact size needed
if (num_items == 1) {
const auto &item = this->deferred_batch_[0];
// Let dispatch_message_ calculate size and encode if it fits
uint16_t payload_size = this->dispatch_message_(item, std::numeric_limits<uint16_t>::max(), true);
@@ -1915,8 +1951,30 @@ void APIConnection::process_batch_() {
// Stack-allocated array for message info
alignas(MessageInfo) char message_info_storage[MAX_MESSAGES_PER_BATCH * sizeof(MessageInfo)];
MessageInfo *message_info = reinterpret_cast<MessageInfo *>(message_info_storage);
size_t message_count = 0;
// Cache these values to avoid repeated virtual calls
const uint8_t header_padding = this->helper_->frame_header_padding();
const uint8_t footer_size = this->helper_->frame_footer_size();
// Initialize buffer and tracking variables
shared_buf.clear();
// Pre-calculate exact buffer size needed based on message types
uint32_t total_estimated_size = num_items * (header_padding + footer_size);
for (size_t i = 0; i < this->deferred_batch_.size(); i++) {
const auto &item = this->deferred_batch_[i];
total_estimated_size += item.estimated_size;
}
// Calculate total overhead for all messages
// Reserve based on estimated size (much more accurate than 24-byte worst-case)
shared_buf.reserve(total_estimated_size);
this->flags_.batch_first_message = true;
size_t items_processed = 0;
uint16_t remaining_size = std::numeric_limits<uint16_t>::max();
// Track where each message's header padding begins in the buffer
// For plaintext: this is where the 6-byte header padding starts
// For noise: this is where the 7-byte header padding starts
@@ -1928,7 +1986,7 @@ void APIConnection::process_batch_() {
const auto &item = this->deferred_batch_[i];
// Try to encode message via dispatch
// The dispatch function calculates overhead to determine if the message fits
uint16_t payload_size = this->dispatch_message_(item, remaining_size, i == 0);
uint16_t payload_size = this->dispatch_message_(item, remaining_size, false);
if (payload_size == 0) {
// Message won't fit, stop processing
@@ -1942,7 +2000,10 @@ void APIConnection::process_batch_() {
// This avoids default-constructing all MAX_MESSAGES_PER_BATCH elements
// Explicit destruction is not needed because MessageInfo is trivially destructible,
// as ensured by the static_assert in its definition.
new (&message_info[items_processed++]) MessageInfo(item.message_type, current_offset, proto_payload_size);
new (&message_info[message_count++]) MessageInfo(item.message_type, current_offset, proto_payload_size);
// Update tracking variables
items_processed++;
// After first message, set remaining size to MAX_BATCH_PACKET_SIZE to avoid fragmentation
if (items_processed == 1) {
remaining_size = MAX_BATCH_PACKET_SIZE;
@@ -1965,7 +2026,7 @@ void APIConnection::process_batch_() {
// Send all collected messages
APIError err = this->helper_->write_protobuf_messages(ProtoWriteBuffer{&shared_buf},
std::span<const MessageInfo>(message_info, items_processed));
std::span<const MessageInfo>(message_info, message_count));
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
}
@@ -1994,8 +2055,7 @@ void APIConnection::process_batch_() {
// Dispatch message encoding based on message_type
// Switch assigns function pointer, single call site for smaller code size
uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size,
bool batch_first) {
this->flags_.batch_first_message = batch_first;
bool is_single) {
#ifdef USE_EVENT
// Events need aux_data_index to look up event type from entity
if (item.message_type == EventResponse::MESSAGE_TYPE) {
@@ -2004,7 +2064,7 @@ uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item,
return 0;
auto *event = static_cast<event::Event *>(item.entity);
return try_send_event_response(event, StringRef::from_maybe_nullptr(event->get_event_type(item.aux_data_index)),
this, remaining_size);
this, remaining_size, is_single);
}
#endif
@@ -2114,22 +2174,25 @@ uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item,
#undef CASE_STATE_INFO
#undef CASE_INFO_ONLY
return func(item.entity, this, remaining_size);
return func(item.entity, this, remaining_size, is_single);
}
uint16_t APIConnection::try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
ListEntitiesDoneResponse resp;
return encode_message_to_buffer(resp, ListEntitiesDoneResponse::MESSAGE_TYPE, conn, remaining_size);
return encode_message_to_buffer(resp, ListEntitiesDoneResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
DisconnectRequest req;
return encode_message_to_buffer(req, DisconnectRequest::MESSAGE_TYPE, conn, remaining_size);
return encode_message_to_buffer(req, DisconnectRequest::MESSAGE_TYPE, conn, remaining_size, is_single);
}
uint16_t APIConnection::try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
uint16_t APIConnection::try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single) {
PingRequest req;
return encode_message_to_buffer(req, PingRequest::MESSAGE_TYPE, conn, remaining_size);
return encode_message_to_buffer(req, PingRequest::MESSAGE_TYPE, conn, remaining_size, is_single);
}
#ifdef USE_API_HOMEASSISTANT_STATES

View File

@@ -127,7 +127,7 @@ class APIConnection final : public APIServerConnection {
#endif // USE_API_HOMEASSISTANT_SERVICES
#ifdef USE_BLUETOOTH_PROXY
void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
void unsubscribe_bluetooth_le_advertisements() override;
void unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) override;
void bluetooth_device_request(const BluetoothDeviceRequest &msg) override;
void bluetooth_gatt_read(const BluetoothGATTReadRequest &msg) override;
@@ -136,7 +136,7 @@ class APIConnection final : public APIServerConnection {
void bluetooth_gatt_write_descriptor(const BluetoothGATTWriteDescriptorRequest &msg) override;
void bluetooth_gatt_get_services(const BluetoothGATTGetServicesRequest &msg) override;
void bluetooth_gatt_notify(const BluetoothGATTNotifyRequest &msg) override;
bool send_subscribe_bluetooth_connections_free_response() override;
bool send_subscribe_bluetooth_connections_free_response(const SubscribeBluetoothConnectionsFreeRequest &msg) override;
void bluetooth_scanner_set_mode(const BluetoothScannerSetModeRequest &msg) override;
#endif
@@ -170,7 +170,7 @@ class APIConnection final : public APIServerConnection {
#ifdef USE_WATER_HEATER
bool send_water_heater_state(water_heater::WaterHeater *water_heater);
void water_heater_command(const WaterHeaterCommandRequest &msg) override;
void on_water_heater_command_request(const WaterHeaterCommandRequest &msg) override;
#endif
#ifdef USE_IR_RF
@@ -187,8 +187,8 @@ class APIConnection final : public APIServerConnection {
void update_command(const UpdateCommandRequest &msg) override;
#endif
void on_disconnect_response() override;
void on_ping_response() override {
void on_disconnect_response(const DisconnectResponse &value) override;
void on_ping_response(const PingResponse &value) override {
// we initiated ping
this->flags_.sent_ping = false;
}
@@ -199,11 +199,11 @@ class APIConnection final : public APIServerConnection {
void on_get_time_response(const GetTimeResponse &value) override;
#endif
bool send_hello_response(const HelloRequest &msg) override;
bool send_disconnect_response() override;
bool send_ping_response() override;
bool send_device_info_response() override;
void list_entities() override { this->begin_iterator_(ActiveIterator::LIST_ENTITIES); }
void subscribe_states() override {
bool send_disconnect_response(const DisconnectRequest &msg) override;
bool send_ping_response(const PingRequest &msg) override;
bool send_device_info_response(const DeviceInfoRequest &msg) override;
void list_entities(const ListEntitiesRequest &msg) override { this->begin_iterator_(ActiveIterator::LIST_ENTITIES); }
void subscribe_states(const SubscribeStatesRequest &msg) override {
this->flags_.state_subscription = true;
// Start initial state iterator only if no iterator is active
// If list_entities is running, we'll start initial_state when it completes
@@ -217,10 +217,12 @@ class APIConnection final : public APIServerConnection {
App.schedule_dump_config();
}
#ifdef USE_API_HOMEASSISTANT_SERVICES
void subscribe_homeassistant_services() override { this->flags_.service_call_subscription = true; }
void subscribe_homeassistant_services(const SubscribeHomeassistantServicesRequest &msg) override {
this->flags_.service_call_subscription = true;
}
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
void subscribe_home_assistant_states() override;
void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) override;
#endif
#ifdef USE_API_USER_DEFINED_ACTIONS
void execute_service(const ExecuteServiceRequest &msg) override;
@@ -253,7 +255,17 @@ class APIConnection final : public APIServerConnection {
void on_fatal_error() override;
void on_no_setup_connection() override;
bool send_message_impl(const ProtoMessage &msg, uint8_t message_type) override;
ProtoWriteBuffer create_buffer(uint32_t reserve_size) override {
// FIXME: ensure no recursive writes can happen
// Get header padding size - used for both reserve and insert
uint8_t header_padding = this->helper_->frame_header_padding();
// Get shared buffer from parent server
std::vector<uint8_t> &shared_buf = this->parent_->get_shared_buffer_ref();
this->prepare_first_message_buffer(shared_buf, header_padding,
reserve_size + header_padding + this->helper_->frame_footer_size());
return {&shared_buf};
}
void prepare_first_message_buffer(std::vector<uint8_t> &shared_buf, size_t header_padding, size_t total_size) {
shared_buf.clear();
@@ -265,13 +277,6 @@ class APIConnection final : public APIServerConnection {
shared_buf.resize(header_padding);
}
// Convenience overload - computes frame overhead internally
void prepare_first_message_buffer(std::vector<uint8_t> &shared_buf, size_t payload_size) {
const uint8_t header_padding = this->helper_->frame_header_padding();
const uint8_t footer_size = this->helper_->frame_footer_size();
this->prepare_first_message_buffer(shared_buf, header_padding, payload_size + header_padding + footer_size);
}
bool try_to_clear_buffer(bool log_out_of_space);
bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override;
@@ -293,21 +298,21 @@ class APIConnection final : public APIServerConnection {
// Non-template helper to encode any ProtoMessage
static uint16_t encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn,
uint32_t remaining_size);
uint32_t remaining_size, bool is_single);
// Helper to fill entity state base and encode message
static uint16_t fill_and_encode_entity_state(EntityBase *entity, StateResponseProtoMessage &msg, uint8_t message_type,
APIConnection *conn, uint32_t remaining_size) {
APIConnection *conn, uint32_t remaining_size, bool is_single) {
msg.key = entity->get_object_id_hash();
#ifdef USE_DEVICES
msg.device_id = entity->get_device_id();
#endif
return encode_message_to_buffer(msg, message_type, conn, remaining_size);
return encode_message_to_buffer(msg, message_type, conn, remaining_size, is_single);
}
// Helper to fill entity info base and encode message
static uint16_t fill_and_encode_entity_info(EntityBase *entity, InfoResponseProtoMessage &msg, uint8_t message_type,
APIConnection *conn, uint32_t remaining_size) {
APIConnection *conn, uint32_t remaining_size, bool is_single) {
// Set common fields that are shared by all entity types
msg.key = entity->get_object_id_hash();
@@ -334,7 +339,7 @@ class APIConnection final : public APIServerConnection {
#ifdef USE_DEVICES
msg.device_id = entity->get_device_id();
#endif
return encode_message_to_buffer(msg, message_type, conn, remaining_size);
return encode_message_to_buffer(msg, message_type, conn, remaining_size, is_single);
}
#ifdef USE_VOICE_ASSISTANT
@@ -365,108 +370,141 @@ class APIConnection final : public APIServerConnection {
}
#ifdef USE_BINARY_SENSOR
static uint16_t try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_COVER
static uint16_t try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_FAN
static uint16_t try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
static uint16_t try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_LIGHT
static uint16_t try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_SENSOR
static uint16_t try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_SWITCH
static uint16_t try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_TEXT_SENSOR
static uint16_t try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_CLIMATE
static uint16_t try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_NUMBER
static uint16_t try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_DATETIME_DATE
static uint16_t try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
static uint16_t try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_DATETIME_TIME
static uint16_t try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
static uint16_t try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_DATETIME_DATETIME
static uint16_t try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_TEXT
static uint16_t try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
static uint16_t try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_SELECT
static uint16_t try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_BUTTON
static uint16_t try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_LOCK
static uint16_t try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
static uint16_t try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_VALVE
static uint16_t try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_MEDIA_PLAYER
static uint16_t try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_ALARM_CONTROL_PANEL
static uint16_t try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_WATER_HEATER
static uint16_t try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_INFRARED
static uint16_t try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_EVENT
static uint16_t try_send_event_response(event::Event *event, StringRef event_type, APIConnection *conn,
uint32_t remaining_size);
static uint16_t try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
uint32_t remaining_size, bool is_single);
static uint16_t try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
#endif
#ifdef USE_UPDATE
static uint16_t try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
static uint16_t try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
#ifdef USE_CAMERA
static uint16_t try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
#endif
// Method for ListEntitiesDone batching
static uint16_t try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
// Method for DisconnectRequest batching
static uint16_t try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
// Batch message method for ping requests
static uint16_t try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
static uint16_t try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
bool is_single);
// === Optimal member ordering for 32-bit systems ===
@@ -501,7 +539,7 @@ class APIConnection final : public APIServerConnection {
#endif
// Function pointer type for message encoding
using MessageCreatorPtr = uint16_t (*)(EntityBase *, APIConnection *, uint32_t remaining_size);
using MessageCreatorPtr = uint16_t (*)(EntityBase *, APIConnection *, uint32_t remaining_size, bool is_single);
// Generic batching mechanism for both state updates and entity info
struct DeferredBatch {
@@ -614,7 +652,7 @@ class APIConnection final : public APIServerConnection {
// Dispatch message encoding based on message_type - replaces function pointer storage
// Switch assigns pointer, single call site for smaller code size
uint16_t dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size, bool batch_first);
uint16_t dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size, bool is_single);
#ifdef HAS_PROTO_MESSAGE_DUMP
void log_batch_item_(const DeferredBatch::BatchItem &item) {
@@ -646,7 +684,19 @@ class APIConnection final : public APIServerConnection {
// Tries immediate send if should_send_immediately_() returns true and buffer has space
// Falls back to batching if immediate send fails or isn't applicable
bool send_message_smart_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
uint8_t aux_data_index = DeferredBatch::AUX_DATA_UNUSED);
uint8_t aux_data_index = DeferredBatch::AUX_DATA_UNUSED) {
if (this->should_send_immediately_(message_type) && this->helper_->can_write_without_blocking()) {
DeferredBatch::BatchItem item{entity, message_type, estimated_size, aux_data_index};
if (this->dispatch_message_(item, MAX_BATCH_PACKET_SIZE, true) &&
this->send_buffer(ProtoWriteBuffer{&this->parent_->get_shared_buffer_ref()}, message_type)) {
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_batch_item_(item);
#endif
return true;
}
}
return this->schedule_message_(entity, message_type, estimated_size, aux_data_index);
}
// Helper function to schedule a deferred message with known message type
bool schedule_message_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,

View File

@@ -3,7 +3,6 @@
#ifdef USE_API_NOISE
#include "api_connection.h" // For ClientInfo struct
#include "esphome/core/application.h"
#include "esphome/core/entity_base.h"
#include "esphome/core/hal.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
@@ -257,30 +256,28 @@ APIError APINoiseFrameHelper::state_action_() {
}
if (state_ == State::SERVER_HELLO) {
// send server hello
constexpr size_t mac_len = 13; // 12 hex chars + null terminator
const std::string &name = App.get_name();
char mac[MAC_ADDRESS_BUFFER_SIZE];
char mac[mac_len];
get_mac_address_into_buffer(mac);
// Calculate positions and sizes
size_t name_len = name.size() + 1; // including null terminator
size_t name_offset = 1;
size_t mac_offset = name_offset + name_len;
size_t total_size = 1 + name_len + MAC_ADDRESS_BUFFER_SIZE;
size_t total_size = 1 + name_len + mac_len;
// 1 (proto) + name (max ESPHOME_DEVICE_NAME_MAX_LEN) + 1 (name null)
// + mac (MAC_ADDRESS_BUFFER_SIZE - 1) + 1 (mac null)
constexpr size_t max_msg_size = 1 + ESPHOME_DEVICE_NAME_MAX_LEN + 1 + MAC_ADDRESS_BUFFER_SIZE;
uint8_t msg[max_msg_size];
auto msg = std::make_unique<uint8_t[]>(total_size);
// chosen proto
msg[0] = 0x01;
// node name, terminated by null byte
std::memcpy(msg + name_offset, name.c_str(), name_len);
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
// node mac, terminated by null byte
std::memcpy(msg + mac_offset, mac, MAC_ADDRESS_BUFFER_SIZE);
std::memcpy(msg.get() + mac_offset, mac, mac_len);
aerr = write_frame_(msg, total_size);
aerr = write_frame_(msg.get(), total_size);
if (aerr != APIError::OK)
return aerr;
@@ -356,32 +353,35 @@ APIError APINoiseFrameHelper::state_action_() {
return APIError::OK;
}
void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reason) {
// Max reject message: "Bad handshake packet len" (24) + 1 (failure byte) = 25 bytes
uint8_t data[32];
data[0] = 0x01; // failure
#ifdef USE_STORE_LOG_STR_IN_FLASH
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
size_t data_size = reason_len + 1;
auto data = std::make_unique<uint8_t[]>(data_size);
data[0] = 0x01; // failure
// Copy error message from PROGMEM
if (reason_len > 0) {
memcpy_P(data + 1, reinterpret_cast<PGM_P>(reason), reason_len);
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
}
#else
// Normal memory access
const char *reason_str = LOG_STR_ARG(reason);
size_t reason_len = strlen(reason_str);
size_t data_size = reason_len + 1;
auto data = std::make_unique<uint8_t[]>(data_size);
data[0] = 0x01; // failure
// Copy error message in bulk
if (reason_len > 0) {
// NOLINTNEXTLINE(bugprone-not-null-terminated-result) - binary protocol, not a C string
std::memcpy(data + 1, reason_str, reason_len);
std::memcpy(data.get() + 1, reason_str, reason_len);
}
#endif
size_t data_size = reason_len + 1;
// temporarily remove failed state
auto orig_state = state_;
state_ = State::EXPLICIT_REJECT;
write_frame_(data, data_size);
write_frame_(data.get(), data_size);
state_ = orig_state;
}
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {

View File

@@ -23,8 +23,15 @@ static inline void append_field_prefix(DumpBuffer &out, const char *field_name,
out.append(indent, ' ').append(field_name).append(": ");
}
static inline void append_with_newline(DumpBuffer &out, const char *str) {
out.append(str);
out.append("\n");
}
static inline void append_uint(DumpBuffer &out, uint32_t value) {
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRIu32, value));
char buf[16];
snprintf(buf, sizeof(buf), "%" PRIu32, value);
out.append(buf);
}
// RAII helper for message dump formatting
@@ -42,23 +49,31 @@ class MessageDumpHelper {
// Helper functions to reduce code duplication in dump methods
static void dump_field(DumpBuffer &out, const char *field_name, int32_t value, int indent = 2) {
char buffer[64];
append_field_prefix(out, field_name, indent);
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRId32 "\n", value));
snprintf(buffer, 64, "%" PRId32, value);
append_with_newline(out, buffer);
}
static void dump_field(DumpBuffer &out, const char *field_name, uint32_t value, int indent = 2) {
char buffer[64];
append_field_prefix(out, field_name, indent);
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRIu32 "\n", value));
snprintf(buffer, 64, "%" PRIu32, value);
append_with_newline(out, buffer);
}
static void dump_field(DumpBuffer &out, const char *field_name, float value, int indent = 2) {
char buffer[64];
append_field_prefix(out, field_name, indent);
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%g\n", value));
snprintf(buffer, 64, "%g", value);
append_with_newline(out, buffer);
}
static void dump_field(DumpBuffer &out, const char *field_name, uint64_t value, int indent = 2) {
char buffer[64];
append_field_prefix(out, field_name, indent);
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRIu64 "\n", value));
snprintf(buffer, 64, "%" PRIu64, value);
append_with_newline(out, buffer);
}
static void dump_field(DumpBuffer &out, const char *field_name, bool value, int indent = 2) {
@@ -97,7 +112,7 @@ static void dump_bytes_field(DumpBuffer &out, const char *field_name, const uint
char hex_buf[format_hex_pretty_size(160)];
append_field_prefix(out, field_name, indent);
format_hex_pretty_to(hex_buf, data, len);
out.append(hex_buf).append("\n");
append_with_newline(out, hex_buf);
}
template<> const char *proto_enum_to_string<enums::EntityCategory>(enums::EntityCategory value) {

View File

@@ -15,9 +15,6 @@ void APIServerConnectionBase::log_receive_message_(const LogString *name, const
DumpBuffer dump_buf;
ESP_LOGVV(TAG, "%s: %s", LOG_STR_ARG(name), msg.dump_to(dump_buf));
}
void APIServerConnectionBase::log_receive_message_(const LogString *name) {
ESP_LOGVV(TAG, "%s: {}", LOG_STR_ARG(name));
}
#endif
void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) {
@@ -32,52 +29,66 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
break;
}
case DisconnectRequest::MESSAGE_TYPE: {
DisconnectRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_disconnect_request"));
this->log_receive_message_(LOG_STR("on_disconnect_request"), msg);
#endif
this->on_disconnect_request();
this->on_disconnect_request(msg);
break;
}
case DisconnectResponse::MESSAGE_TYPE: {
DisconnectResponse msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_disconnect_response"));
this->log_receive_message_(LOG_STR("on_disconnect_response"), msg);
#endif
this->on_disconnect_response();
this->on_disconnect_response(msg);
break;
}
case PingRequest::MESSAGE_TYPE: {
PingRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_ping_request"));
this->log_receive_message_(LOG_STR("on_ping_request"), msg);
#endif
this->on_ping_request();
this->on_ping_request(msg);
break;
}
case PingResponse::MESSAGE_TYPE: {
PingResponse msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_ping_response"));
this->log_receive_message_(LOG_STR("on_ping_response"), msg);
#endif
this->on_ping_response();
this->on_ping_response(msg);
break;
}
case DeviceInfoRequest::MESSAGE_TYPE: {
DeviceInfoRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_device_info_request"));
this->log_receive_message_(LOG_STR("on_device_info_request"), msg);
#endif
this->on_device_info_request();
this->on_device_info_request(msg);
break;
}
case ListEntitiesRequest::MESSAGE_TYPE: {
ListEntitiesRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_list_entities_request"));
this->log_receive_message_(LOG_STR("on_list_entities_request"), msg);
#endif
this->on_list_entities_request();
this->on_list_entities_request(msg);
break;
}
case SubscribeStatesRequest::MESSAGE_TYPE: {
SubscribeStatesRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_subscribe_states_request"));
this->log_receive_message_(LOG_STR("on_subscribe_states_request"), msg);
#endif
this->on_subscribe_states_request();
this->on_subscribe_states_request(msg);
break;
}
case SubscribeLogsRequest::MESSAGE_TYPE: {
@@ -135,10 +146,12 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
#endif
#ifdef USE_API_HOMEASSISTANT_SERVICES
case SubscribeHomeassistantServicesRequest::MESSAGE_TYPE: {
SubscribeHomeassistantServicesRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_subscribe_homeassistant_services_request"));
this->log_receive_message_(LOG_STR("on_subscribe_homeassistant_services_request"), msg);
#endif
this->on_subscribe_homeassistant_services_request();
this->on_subscribe_homeassistant_services_request(msg);
break;
}
#endif
@@ -153,10 +166,12 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
}
#ifdef USE_API_HOMEASSISTANT_STATES
case SubscribeHomeAssistantStatesRequest::MESSAGE_TYPE: {
SubscribeHomeAssistantStatesRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_subscribe_home_assistant_states_request"));
this->log_receive_message_(LOG_STR("on_subscribe_home_assistant_states_request"), msg);
#endif
this->on_subscribe_home_assistant_states_request();
this->on_subscribe_home_assistant_states_request(msg);
break;
}
#endif
@@ -360,19 +375,23 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
#endif
#ifdef USE_BLUETOOTH_PROXY
case SubscribeBluetoothConnectionsFreeRequest::MESSAGE_TYPE: {
SubscribeBluetoothConnectionsFreeRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_subscribe_bluetooth_connections_free_request"));
this->log_receive_message_(LOG_STR("on_subscribe_bluetooth_connections_free_request"), msg);
#endif
this->on_subscribe_bluetooth_connections_free_request();
this->on_subscribe_bluetooth_connections_free_request(msg);
break;
}
#endif
#ifdef USE_BLUETOOTH_PROXY
case UnsubscribeBluetoothLEAdvertisementsRequest::MESSAGE_TYPE: {
UnsubscribeBluetoothLEAdvertisementsRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
this->log_receive_message_(LOG_STR("on_unsubscribe_bluetooth_le_advertisements_request"));
this->log_receive_message_(LOG_STR("on_unsubscribe_bluetooth_le_advertisements_request"), msg);
#endif
this->on_unsubscribe_bluetooth_le_advertisements_request();
this->on_unsubscribe_bluetooth_le_advertisements_request(msg);
break;
}
#endif
@@ -628,29 +647,36 @@ void APIServerConnection::on_hello_request(const HelloRequest &msg) {
this->on_fatal_error();
}
}
void APIServerConnection::on_disconnect_request() {
if (!this->send_disconnect_response()) {
void APIServerConnection::on_disconnect_request(const DisconnectRequest &msg) {
if (!this->send_disconnect_response(msg)) {
this->on_fatal_error();
}
}
void APIServerConnection::on_ping_request() {
if (!this->send_ping_response()) {
void APIServerConnection::on_ping_request(const PingRequest &msg) {
if (!this->send_ping_response(msg)) {
this->on_fatal_error();
}
}
void APIServerConnection::on_device_info_request() {
if (!this->send_device_info_response()) {
void APIServerConnection::on_device_info_request(const DeviceInfoRequest &msg) {
if (!this->send_device_info_response(msg)) {
this->on_fatal_error();
}
}
void APIServerConnection::on_list_entities_request() { this->list_entities(); }
void APIServerConnection::on_subscribe_states_request() { this->subscribe_states(); }
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) { this->list_entities(msg); }
void APIServerConnection::on_subscribe_states_request(const SubscribeStatesRequest &msg) {
this->subscribe_states(msg);
}
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) { this->subscribe_logs(msg); }
#ifdef USE_API_HOMEASSISTANT_SERVICES
void APIServerConnection::on_subscribe_homeassistant_services_request() { this->subscribe_homeassistant_services(); }
void APIServerConnection::on_subscribe_homeassistant_services_request(
const SubscribeHomeassistantServicesRequest &msg) {
this->subscribe_homeassistant_services(msg);
}
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
void APIServerConnection::on_subscribe_home_assistant_states_request() { this->subscribe_home_assistant_states(); }
void APIServerConnection::on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) {
this->subscribe_home_assistant_states(msg);
}
#endif
#ifdef USE_API_USER_DEFINED_ACTIONS
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) { this->execute_service(msg); }
@@ -720,11 +746,6 @@ void APIServerConnection::on_update_command_request(const UpdateCommandRequest &
#ifdef USE_VALVE
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) { this->valve_command(msg); }
#endif
#ifdef USE_WATER_HEATER
void APIServerConnection::on_water_heater_command_request(const WaterHeaterCommandRequest &msg) {
this->water_heater_command(msg);
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_subscribe_bluetooth_le_advertisements_request(
const SubscribeBluetoothLEAdvertisementsRequest &msg) {
@@ -767,15 +788,17 @@ void APIServerConnection::on_bluetooth_gatt_notify_request(const BluetoothGATTNo
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_subscribe_bluetooth_connections_free_request() {
if (!this->send_subscribe_bluetooth_connections_free_response()) {
void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
const SubscribeBluetoothConnectionsFreeRequest &msg) {
if (!this->send_subscribe_bluetooth_connections_free_response(msg)) {
this->on_fatal_error();
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_unsubscribe_bluetooth_le_advertisements_request() {
this->unsubscribe_bluetooth_le_advertisements();
void APIServerConnection::on_unsubscribe_bluetooth_le_advertisements_request(
const UnsubscribeBluetoothLEAdvertisementsRequest &msg) {
this->unsubscribe_bluetooth_le_advertisements(msg);
}
#endif
#ifdef USE_BLUETOOTH_PROXY

View File

@@ -14,7 +14,6 @@ class APIServerConnectionBase : public ProtoService {
protected:
void log_send_message_(const char *name, const char *dump);
void log_receive_message_(const LogString *name, const ProtoMessage &msg);
void log_receive_message_(const LogString *name);
public:
#endif
@@ -24,20 +23,20 @@ class APIServerConnectionBase : public ProtoService {
DumpBuffer dump_buf;
this->log_send_message_(msg.message_name(), msg.dump_to(dump_buf));
#endif
return this->send_message_impl(msg, message_type);
return this->send_message_(msg, message_type);
}
virtual void on_hello_request(const HelloRequest &value){};
virtual void on_disconnect_request(){};
virtual void on_disconnect_response(){};
virtual void on_ping_request(){};
virtual void on_ping_response(){};
virtual void on_device_info_request(){};
virtual void on_disconnect_request(const DisconnectRequest &value){};
virtual void on_disconnect_response(const DisconnectResponse &value){};
virtual void on_ping_request(const PingRequest &value){};
virtual void on_ping_response(const PingResponse &value){};
virtual void on_device_info_request(const DeviceInfoRequest &value){};
virtual void on_list_entities_request(){};
virtual void on_list_entities_request(const ListEntitiesRequest &value){};
virtual void on_subscribe_states_request(){};
virtual void on_subscribe_states_request(const SubscribeStatesRequest &value){};
#ifdef USE_COVER
virtual void on_cover_command_request(const CoverCommandRequest &value){};
@@ -62,14 +61,14 @@ class APIServerConnectionBase : public ProtoService {
#endif
#ifdef USE_API_HOMEASSISTANT_SERVICES
virtual void on_subscribe_homeassistant_services_request(){};
virtual void on_subscribe_homeassistant_services_request(const SubscribeHomeassistantServicesRequest &value){};
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
virtual void on_homeassistant_action_response(const HomeassistantActionResponse &value){};
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void on_subscribe_home_assistant_states_request(){};
virtual void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &value){};
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
@@ -148,11 +147,12 @@ class APIServerConnectionBase : public ProtoService {
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void on_subscribe_bluetooth_connections_free_request(){};
virtual void on_subscribe_bluetooth_connections_free_request(const SubscribeBluetoothConnectionsFreeRequest &value){};
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void on_unsubscribe_bluetooth_le_advertisements_request(){};
virtual void on_unsubscribe_bluetooth_le_advertisements_request(
const UnsubscribeBluetoothLEAdvertisementsRequest &value){};
#endif
#ifdef USE_BLUETOOTH_PROXY
@@ -231,17 +231,17 @@ class APIServerConnectionBase : public ProtoService {
class APIServerConnection : public APIServerConnectionBase {
public:
virtual bool send_hello_response(const HelloRequest &msg) = 0;
virtual bool send_disconnect_response() = 0;
virtual bool send_ping_response() = 0;
virtual bool send_device_info_response() = 0;
virtual void list_entities() = 0;
virtual void subscribe_states() = 0;
virtual bool send_disconnect_response(const DisconnectRequest &msg) = 0;
virtual bool send_ping_response(const PingRequest &msg) = 0;
virtual bool send_device_info_response(const DeviceInfoRequest &msg) = 0;
virtual void list_entities(const ListEntitiesRequest &msg) = 0;
virtual void subscribe_states(const SubscribeStatesRequest &msg) = 0;
virtual void subscribe_logs(const SubscribeLogsRequest &msg) = 0;
#ifdef USE_API_HOMEASSISTANT_SERVICES
virtual void subscribe_homeassistant_services() = 0;
virtual void subscribe_homeassistant_services(const SubscribeHomeassistantServicesRequest &msg) = 0;
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void subscribe_home_assistant_states() = 0;
virtual void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) = 0;
#endif
#ifdef USE_API_USER_DEFINED_ACTIONS
virtual void execute_service(const ExecuteServiceRequest &msg) = 0;
@@ -303,9 +303,6 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_VALVE
virtual void valve_command(const ValveCommandRequest &msg) = 0;
#endif
#ifdef USE_WATER_HEATER
virtual void water_heater_command(const WaterHeaterCommandRequest &msg) = 0;
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) = 0;
#endif
@@ -331,10 +328,11 @@ class APIServerConnection : public APIServerConnectionBase {
virtual void bluetooth_gatt_notify(const BluetoothGATTNotifyRequest &msg) = 0;
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual bool send_subscribe_bluetooth_connections_free_response() = 0;
virtual bool send_subscribe_bluetooth_connections_free_response(
const SubscribeBluetoothConnectionsFreeRequest &msg) = 0;
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void unsubscribe_bluetooth_le_advertisements() = 0;
virtual void unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) = 0;
#endif
#ifdef USE_BLUETOOTH_PROXY
virtual void bluetooth_scanner_set_mode(const BluetoothScannerSetModeRequest &msg) = 0;
@@ -362,17 +360,17 @@ class APIServerConnection : public APIServerConnectionBase {
#endif
protected:
void on_hello_request(const HelloRequest &msg) override;
void on_disconnect_request() override;
void on_ping_request() override;
void on_device_info_request() override;
void on_list_entities_request() override;
void on_subscribe_states_request() override;
void on_disconnect_request(const DisconnectRequest &msg) override;
void on_ping_request(const PingRequest &msg) override;
void on_device_info_request(const DeviceInfoRequest &msg) override;
void on_list_entities_request(const ListEntitiesRequest &msg) override;
void on_subscribe_states_request(const SubscribeStatesRequest &msg) override;
void on_subscribe_logs_request(const SubscribeLogsRequest &msg) override;
#ifdef USE_API_HOMEASSISTANT_SERVICES
void on_subscribe_homeassistant_services_request() override;
void on_subscribe_homeassistant_services_request(const SubscribeHomeassistantServicesRequest &msg) override;
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
void on_subscribe_home_assistant_states_request() override;
void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) override;
#endif
#ifdef USE_API_USER_DEFINED_ACTIONS
void on_execute_service_request(const ExecuteServiceRequest &msg) override;
@@ -434,9 +432,6 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_VALVE
void on_valve_command_request(const ValveCommandRequest &msg) override;
#endif
#ifdef USE_WATER_HEATER
void on_water_heater_command_request(const WaterHeaterCommandRequest &msg) override;
#endif
#ifdef USE_BLUETOOTH_PROXY
void on_subscribe_bluetooth_le_advertisements_request(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
#endif
@@ -462,10 +457,11 @@ class APIServerConnection : public APIServerConnectionBase {
void on_bluetooth_gatt_notify_request(const BluetoothGATTNotifyRequest &msg) override;
#endif
#ifdef USE_BLUETOOTH_PROXY
void on_subscribe_bluetooth_connections_free_request() override;
void on_subscribe_bluetooth_connections_free_request(const SubscribeBluetoothConnectionsFreeRequest &msg) override;
#endif
#ifdef USE_BLUETOOTH_PROXY
void on_unsubscribe_bluetooth_le_advertisements_request() override;
void on_unsubscribe_bluetooth_le_advertisements_request(
const UnsubscribeBluetoothLEAdvertisementsRequest &msg) override;
#endif
#ifdef USE_BLUETOOTH_PROXY
void on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &msg) override;

View File

@@ -211,7 +211,7 @@ void APIServer::loop() {
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
// Fire trigger after client is removed so api.connected reflects the true state
this->client_disconnected_trigger_.trigger(client_name, client_peername);
this->client_disconnected_trigger_->trigger(client_name, client_peername);
#endif
// Don't increment client_index since we need to process the swapped element
}

View File

@@ -227,10 +227,12 @@ class APIServer : public Component,
#endif
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
Trigger<std::string, std::string> *get_client_connected_trigger() { return &this->client_connected_trigger_; }
Trigger<std::string, std::string> *get_client_connected_trigger() const { return this->client_connected_trigger_; }
#endif
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
Trigger<std::string, std::string> *get_client_disconnected_trigger() { return &this->client_disconnected_trigger_; }
Trigger<std::string, std::string> *get_client_disconnected_trigger() const {
return this->client_disconnected_trigger_;
}
#endif
protected:
@@ -251,10 +253,10 @@ class APIServer : public Component,
// Pointers and pointer-like types first (4 bytes each)
std::unique_ptr<socket::Socket> socket_ = nullptr;
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
Trigger<std::string, std::string> client_connected_trigger_;
Trigger<std::string, std::string> *client_connected_trigger_ = new Trigger<std::string, std::string>();
#endif
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
Trigger<std::string, std::string> client_disconnected_trigger_;
Trigger<std::string, std::string> *client_disconnected_trigger_ = new Trigger<std::string, std::string>();
#endif
// 4-byte aligned types

View File

@@ -136,10 +136,12 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
void set_wants_response() { this->flags_.wants_response = true; }
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
Trigger<JsonObjectConst, Ts...> *get_success_trigger_with_response() { return &this->success_trigger_with_response_; }
Trigger<JsonObjectConst, Ts...> *get_success_trigger_with_response() const {
return this->success_trigger_with_response_;
}
#endif
Trigger<Ts...> *get_success_trigger() { return &this->success_trigger_; }
Trigger<std::string, Ts...> *get_error_trigger() { return &this->error_trigger_; }
Trigger<Ts...> *get_success_trigger() const { return this->success_trigger_; }
Trigger<std::string, Ts...> *get_error_trigger() const { return this->error_trigger_; }
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
void play(const Ts &...x) override {
@@ -185,14 +187,14 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
if (response.is_success()) {
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
if (this->flags_.wants_response) {
this->success_trigger_with_response_.trigger(response.get_json(), args...);
this->success_trigger_with_response_->trigger(response.get_json(), args...);
} else
#endif
{
this->success_trigger_.trigger(args...);
this->success_trigger_->trigger(args...);
}
} else {
this->error_trigger_.trigger(response.get_error_message(), args...);
this->error_trigger_->trigger(response.get_error_message(), args...);
}
},
captured_args);
@@ -249,10 +251,10 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
TemplatableStringValue<Ts...> response_template_{""};
Trigger<JsonObjectConst, Ts...> success_trigger_with_response_;
Trigger<JsonObjectConst, Ts...> *success_trigger_with_response_ = new Trigger<JsonObjectConst, Ts...>();
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
Trigger<Ts...> success_trigger_;
Trigger<std::string, Ts...> error_trigger_;
Trigger<Ts...> *success_trigger_ = new Trigger<Ts...>();
Trigger<std::string, Ts...> *error_trigger_ = new Trigger<std::string, Ts...>();
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
struct Flags {

View File

@@ -112,12 +112,8 @@ class ProtoVarInt {
uint64_t result = buffer[0] & 0x7F;
uint8_t bitpos = 7;
// A 64-bit varint is at most 10 bytes (ceil(64/7)). Reject overlong encodings
// to avoid undefined behavior from shifting uint64_t by >= 64 bits.
uint32_t max_len = std::min(len, uint32_t(10));
// Start from the second byte since we've already processed the first
for (uint32_t i = 1; i < max_len; i++) {
for (uint32_t i = 1; i < len; i++) {
uint8_t val = buffer[i];
result |= uint64_t(val & 0x7F) << uint64_t(bitpos);
bitpos += 7;
@@ -406,20 +402,6 @@ class DumpBuffer {
const char *c_str() const { return buf_; }
size_t size() const { return pos_; }
/// Get writable buffer pointer for use with buf_append_printf
char *data() { return buf_; }
/// Get current position for use with buf_append_printf
size_t pos() const { return pos_; }
/// Update position after buf_append_printf call
void set_pos(size_t pos) {
if (pos >= CAPACITY) {
pos_ = CAPACITY - 1;
} else {
pos_ = pos;
}
buf_[pos_] = '\0';
}
private:
void append_impl_(const char *str, size_t len) {
size_t space = CAPACITY - 1 - pos_;
@@ -961,16 +943,32 @@ class ProtoService {
virtual bool is_connection_setup() = 0;
virtual void on_fatal_error() = 0;
virtual void on_no_setup_connection() = 0;
/**
* Create a buffer with a reserved size.
* @param reserve_size The number of bytes to pre-allocate in the buffer. This is a hint
* to optimize memory usage and avoid reallocations during encoding.
* Implementations should aim to allocate at least this size.
* @return A ProtoWriteBuffer object with the reserved size.
*/
virtual ProtoWriteBuffer create_buffer(uint32_t reserve_size) = 0;
virtual bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) = 0;
virtual void read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) = 0;
/**
* Send a protobuf message by calculating its size, allocating a buffer, encoding, and sending.
* This is the implementation method - callers should use send_message() which adds logging.
* @param msg The protobuf message to send.
* @param message_type The message type identifier.
* @return True if the message was sent successfully, false otherwise.
*/
virtual bool send_message_impl(const ProtoMessage &msg, uint8_t message_type) = 0;
// Optimized method that pre-allocates buffer based on message size
bool send_message_(const ProtoMessage &msg, uint8_t message_type) {
ProtoSize size;
msg.calculate_size(size);
uint32_t msg_size = size.get_size();
// Create a pre-sized buffer
auto buffer = this->create_buffer(msg_size);
// Encode message into the buffer
msg.encode(buffer);
// Send the buffer
return this->send_buffer(buffer, message_type);
}
// Authentication helper methods
inline bool check_connection_setup_() {

View File

@@ -1,5 +1,6 @@
#pragma once
#include <algorithm>
#include <cmath>
#include <limits>
#include "abstract_aqi_calculator.h"
@@ -14,7 +15,11 @@ class AQICalculator : public AbstractAQICalculator {
float pm2_5_index = calculate_index(pm2_5_value, PM2_5_GRID);
float pm10_0_index = calculate_index(pm10_0_value, PM10_0_GRID);
return static_cast<uint16_t>(std::round((pm2_5_index < pm10_0_index) ? pm10_0_index : pm2_5_index));
float aqi = std::max(pm2_5_index, pm10_0_index);
if (aqi < 0.0f) {
aqi = 0.0f;
}
return static_cast<uint16_t>(std::lround(aqi));
}
protected:
@@ -22,13 +27,27 @@ class AQICalculator : public AbstractAQICalculator {
static constexpr int INDEX_GRID[NUM_LEVELS][2] = {{0, 50}, {51, 100}, {101, 150}, {151, 200}, {201, 300}, {301, 500}};
static constexpr float PM2_5_GRID[NUM_LEVELS][2] = {{0.0f, 9.0f}, {9.1f, 35.4f},
{35.5f, 55.4f}, {55.5f, 125.4f},
{125.5f, 225.4f}, {225.5f, std::numeric_limits<float>::max()}};
static constexpr float PM2_5_GRID[NUM_LEVELS][2] = {
// clang-format off
{0.0f, 9.1f},
{9.1f, 35.5f},
{35.5f, 55.5f},
{55.5f, 125.5f},
{125.5f, 225.5f},
{225.5f, std::numeric_limits<float>::max()}
// clang-format on
};
static constexpr float PM10_0_GRID[NUM_LEVELS][2] = {{0.0f, 54.0f}, {55.0f, 154.0f},
{155.0f, 254.0f}, {255.0f, 354.0f},
{355.0f, 424.0f}, {425.0f, std::numeric_limits<float>::max()}};
static constexpr float PM10_0_GRID[NUM_LEVELS][2] = {
// clang-format off
{0.0f, 55.0f},
{55.0f, 155.0f},
{155.0f, 255.0f},
{255.0f, 355.0f},
{355.0f, 425.0f},
{425.0f, std::numeric_limits<float>::max()}
// clang-format on
};
static float calculate_index(float value, const float array[NUM_LEVELS][2]) {
int grid_index = get_grid_index(value, array);
@@ -45,7 +64,10 @@ class AQICalculator : public AbstractAQICalculator {
static int get_grid_index(float value, const float array[NUM_LEVELS][2]) {
for (int i = 0; i < NUM_LEVELS; i++) {
if (value >= array[i][0] && value <= array[i][1]) {
const bool in_range =
(value >= array[i][0]) && ((i == NUM_LEVELS - 1) ? (value <= array[i][1]) // last bucket inclusive
: (value < array[i][1])); // others exclusive on hi
if (in_range) {
return i;
}
}

View File

@@ -10,6 +10,7 @@ class AQISensor : public sensor::Sensor, public Component {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override { return setup_priority::DATA; }
void set_pm_2_5_sensor(sensor::Sensor *sensor) { this->pm_2_5_sensor_ = sensor; }
void set_pm_10_0_sensor(sensor::Sensor *sensor) { this->pm_10_0_sensor_ = sensor; }

View File

@@ -1,5 +1,6 @@
#pragma once
#include <algorithm>
#include <cmath>
#include <limits>
#include "abstract_aqi_calculator.h"
@@ -12,7 +13,11 @@ class CAQICalculator : public AbstractAQICalculator {
float pm2_5_index = calculate_index(pm2_5_value, PM2_5_GRID);
float pm10_0_index = calculate_index(pm10_0_value, PM10_0_GRID);
return static_cast<uint16_t>(std::round((pm2_5_index < pm10_0_index) ? pm10_0_index : pm2_5_index));
float aqi = std::max(pm2_5_index, pm10_0_index);
if (aqi < 0.0f) {
aqi = 0.0f;
}
return static_cast<uint16_t>(std::lround(aqi));
}
protected:
@@ -21,10 +26,24 @@ class CAQICalculator : public AbstractAQICalculator {
static constexpr int INDEX_GRID[NUM_LEVELS][2] = {{0, 25}, {26, 50}, {51, 75}, {76, 100}, {101, 400}};
static constexpr float PM2_5_GRID[NUM_LEVELS][2] = {
{0.0f, 15.0f}, {15.1f, 30.0f}, {30.1f, 55.0f}, {55.1f, 110.0f}, {110.1f, std::numeric_limits<float>::max()}};
// clang-format off
{0.0f, 15.1f},
{15.1f, 30.1f},
{30.1f, 55.1f},
{55.1f, 110.1f},
{110.1f, std::numeric_limits<float>::max()}
// clang-format on
};
static constexpr float PM10_0_GRID[NUM_LEVELS][2] = {
{0.0f, 25.0f}, {25.1f, 50.0f}, {50.1f, 90.0f}, {90.1f, 180.0f}, {180.1f, std::numeric_limits<float>::max()}};
// clang-format off
{0.0f, 25.1f},
{25.1f, 50.1f},
{50.1f, 90.1f},
{90.1f, 180.1f},
{180.1f, std::numeric_limits<float>::max()}
// clang-format on
};
static float calculate_index(float value, const float array[NUM_LEVELS][2]) {
int grid_index = get_grid_index(value, array);
@@ -42,7 +61,10 @@ class CAQICalculator : public AbstractAQICalculator {
static int get_grid_index(float value, const float array[NUM_LEVELS][2]) {
for (int i = 0; i < NUM_LEVELS; i++) {
if (value >= array[i][0] && value <= array[i][1]) {
const bool in_range =
(value >= array[i][0]) && ((i == NUM_LEVELS - 1) ? (value <= array[i][1]) // last bucket inclusive
: (value < array[i][1])); // others exclusive on hi
if (in_range) {
return i;
}
}

View File

@@ -41,6 +41,8 @@ void AS3935Component::dump_config() {
#endif
}
float AS3935Component::get_setup_priority() const { return setup_priority::DATA; }
void AS3935Component::loop() {
if (!this->irq_pin_->digital_read())
return;

View File

@@ -74,6 +74,7 @@ class AS3935Component : public Component {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void loop() override;
void set_irq_pin(GPIOPin *irq_pin) { irq_pin_ = irq_pin; }

View File

@@ -22,6 +22,8 @@ static const uint8_t REGISTER_STATUS = 0x0B; // 8 bytes / R
static const uint8_t REGISTER_AGC = 0x1A; // 8 bytes / R
static const uint8_t REGISTER_MAGNITUDE = 0x1B; // 16 bytes / R
float AS5600Sensor::get_setup_priority() const { return setup_priority::DATA; }
void AS5600Sensor::dump_config() {
LOG_SENSOR("", "AS5600 Sensor", this);
ESP_LOGCONFIG(TAG, " Out of Range Mode: %u", this->out_of_range_mode_);

View File

@@ -14,6 +14,7 @@ class AS5600Sensor : public PollingComponent, public Parented<AS5600Component>,
public:
void update() override;
void dump_config() override;
float get_setup_priority() const override;
void set_angle_sensor(sensor::Sensor *angle_sensor) { this->angle_sensor_ = angle_sensor; }
void set_raw_angle_sensor(sensor::Sensor *raw_angle_sensor) { this->raw_angle_sensor_ = raw_angle_sensor; }

View File

@@ -58,6 +58,8 @@ void AS7341Component::dump_config() {
LOG_SENSOR(" ", "NIR", this->nir_);
}
float AS7341Component::get_setup_priority() const { return setup_priority::DATA; }
void AS7341Component::update() {
this->read_channels(this->channel_readings_);

View File

@@ -78,6 +78,7 @@ class AS7341Component : public PollingComponent, public i2c::I2CDevice {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void update() override;
void set_f1_sensor(sensor::Sensor *f1_sensor) { this->f1_ = f1_sensor; }

View File

@@ -38,10 +38,8 @@ async def to_code(config):
# https://github.com/ESP32Async/ESPAsyncTCP
cg.add_library("ESP32Async/ESPAsyncTCP", "2.0.0")
elif CORE.is_rp2040:
# https://github.com/ayushsharma82/RPAsyncTCP
# RPAsyncTCP is a drop-in replacement for AsyncTCP_RP2040W with better
# ESPAsyncWebServer compatibility
cg.add_library("ayushsharma82/RPAsyncTCP", "1.3.2")
# https://github.com/khoih-prog/AsyncTCP_RP2040W
cg.add_library("khoih-prog/AsyncTCP_RP2040W", "1.2.0")
# Other platforms (host, etc) use socket-based implementation

View File

@@ -8,8 +8,8 @@
// Use ESPAsyncTCP library for ESP8266 (always Arduino)
#include <ESPAsyncTCP.h>
#elif defined(USE_RP2040)
// Use RPAsyncTCP library for RP2040
#include <RPAsyncTCP.h>
// Use AsyncTCP_RP2040W library for RP2040
#include <AsyncTCP_RP2040W.h>
#else
// Use socket-based implementation for other platforms
#include "async_tcp_socket.h"

View File

@@ -146,6 +146,7 @@ void ATM90E26Component::dump_config() {
LOG_SENSOR(" ", "Active Reverse Energy A", this->reverse_active_energy_sensor_);
LOG_SENSOR(" ", "Frequency", this->freq_sensor_);
}
float ATM90E26Component::get_setup_priority() const { return setup_priority::DATA; }
uint16_t ATM90E26Component::read16_(uint8_t a_register) {
uint8_t data[2];

View File

@@ -13,6 +13,7 @@ class ATM90E26Component : public PollingComponent,
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void update() override;
void set_voltage_sensor(sensor::Sensor *obj) { this->voltage_sensor_ = obj; }

View File

@@ -108,14 +108,10 @@ void ATM90E32Component::update() {
#endif
}
void ATM90E32Component::get_cs_summary_(std::span<char, GPIO_SUMMARY_MAX_LEN> buffer) {
this->cs_->dump_summary(buffer.data(), buffer.size());
}
void ATM90E32Component::setup() {
this->spi_setup();
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
this->cs_summary_ = this->cs_->dump_summary();
const char *cs = this->cs_summary_.c_str();
uint16_t mmode0 = 0x87; // 3P4W 50Hz
uint16_t high_thresh = 0;
@@ -162,14 +158,12 @@ void ATM90E32Component::setup() {
if (this->enable_offset_calibration_) {
// Initialize flash storage for offset calibrations
uint32_t o_hash = fnv1_hash("_offset_calibration_");
o_hash = fnv1_hash_extend(o_hash, cs);
uint32_t o_hash = fnv1_hash(std::string("_offset_calibration_") + this->cs_summary_);
this->offset_pref_ = global_preferences->make_preference<OffsetCalibration[3]>(o_hash, true);
this->restore_offset_calibrations_();
// Initialize flash storage for power offset calibrations
uint32_t po_hash = fnv1_hash("_power_offset_calibration_");
po_hash = fnv1_hash_extend(po_hash, cs);
uint32_t po_hash = fnv1_hash(std::string("_power_offset_calibration_") + this->cs_summary_);
this->power_offset_pref_ = global_preferences->make_preference<PowerOffsetCalibration[3]>(po_hash, true);
this->restore_power_offset_calibrations_();
} else {
@@ -189,8 +183,7 @@ void ATM90E32Component::setup() {
if (this->enable_gain_calibration_) {
// Initialize flash storage for gain calibration
uint32_t g_hash = fnv1_hash("_gain_calibration_");
g_hash = fnv1_hash_extend(g_hash, cs);
uint32_t g_hash = fnv1_hash(std::string("_gain_calibration_") + this->cs_summary_);
this->gain_calibration_pref_ = global_preferences->make_preference<GainCalibration[3]>(g_hash, true);
this->restore_gain_calibrations_();
@@ -221,8 +214,7 @@ void ATM90E32Component::setup() {
}
void ATM90E32Component::log_calibration_status_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
bool offset_mismatch = false;
bool power_mismatch = false;
@@ -573,8 +565,7 @@ float ATM90E32Component::get_chip_temperature_() {
}
void ATM90E32Component::run_gain_calibrations() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
if (!this->enable_gain_calibration_) {
ESP_LOGW(TAG, "[CALIBRATION][%s] Gain calibration is disabled! Enable it first with enable_gain_calibration: true",
cs);
@@ -674,8 +665,7 @@ void ATM90E32Component::run_gain_calibrations() {
}
void ATM90E32Component::save_gain_calibration_to_memory_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
bool success = this->gain_calibration_pref_.save(&this->gain_phase_);
global_preferences->sync();
if (success) {
@@ -688,8 +678,7 @@ void ATM90E32Component::save_gain_calibration_to_memory_() {
}
void ATM90E32Component::save_offset_calibration_to_memory_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
bool success = this->offset_pref_.save(&this->offset_phase_);
global_preferences->sync();
if (success) {
@@ -705,8 +694,7 @@ void ATM90E32Component::save_offset_calibration_to_memory_() {
}
void ATM90E32Component::save_power_offset_calibration_to_memory_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
bool success = this->power_offset_pref_.save(&this->power_offset_phase_);
global_preferences->sync();
if (success) {
@@ -722,8 +710,7 @@ void ATM90E32Component::save_power_offset_calibration_to_memory_() {
}
void ATM90E32Component::run_offset_calibrations() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
if (!this->enable_offset_calibration_) {
ESP_LOGW(TAG,
"[CALIBRATION][%s] Offset calibration is disabled! Enable it first with enable_offset_calibration: true",
@@ -753,8 +740,7 @@ void ATM90E32Component::run_offset_calibrations() {
}
void ATM90E32Component::run_power_offset_calibrations() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
if (!this->enable_offset_calibration_) {
ESP_LOGW(
TAG,
@@ -827,8 +813,7 @@ void ATM90E32Component::write_power_offsets_to_registers_(uint8_t phase, int16_t
}
void ATM90E32Component::restore_gain_calibrations_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
for (uint8_t i = 0; i < 3; ++i) {
this->config_gain_phase_[i].voltage_gain = this->phase_[i].voltage_gain_;
this->config_gain_phase_[i].current_gain = this->phase_[i].ct_gain_;
@@ -882,8 +867,7 @@ void ATM90E32Component::restore_gain_calibrations_() {
}
void ATM90E32Component::restore_offset_calibrations_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
for (uint8_t i = 0; i < 3; ++i)
this->config_offset_phase_[i] = this->offset_phase_[i];
@@ -925,8 +909,7 @@ void ATM90E32Component::restore_offset_calibrations_() {
}
void ATM90E32Component::restore_power_offset_calibrations_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
for (uint8_t i = 0; i < 3; ++i)
this->config_power_offset_phase_[i] = this->power_offset_phase_[i];
@@ -968,8 +951,7 @@ void ATM90E32Component::restore_power_offset_calibrations_() {
}
void ATM90E32Component::clear_gain_calibrations() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
if (!this->using_saved_calibrations_) {
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored gain calibrations to clear. Current values:", cs);
ESP_LOGI(TAG, "[CALIBRATION][%s] ----------------------------------------------------------", cs);
@@ -1018,8 +1000,7 @@ void ATM90E32Component::clear_gain_calibrations() {
}
void ATM90E32Component::clear_offset_calibrations() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
if (!this->restored_offset_calibration_) {
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored offset calibrations to clear. Current values:", cs);
ESP_LOGI(TAG, "[CALIBRATION][%s] --------------------------------------------------------------", cs);
@@ -1061,8 +1042,7 @@ void ATM90E32Component::clear_offset_calibrations() {
}
void ATM90E32Component::clear_power_offset_calibrations() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
if (!this->restored_power_offset_calibration_) {
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored power offsets to clear. Current values:", cs);
ESP_LOGI(TAG, "[CALIBRATION][%s] ---------------------------------------------------------------------", cs);
@@ -1137,8 +1117,7 @@ int16_t ATM90E32Component::calibrate_power_offset(uint8_t phase, bool reactive)
}
bool ATM90E32Component::verify_gain_writes_() {
char cs[GPIO_SUMMARY_MAX_LEN];
this->get_cs_summary_(cs);
const char *cs = this->cs_summary_.c_str();
bool success = true;
for (uint8_t phase = 0; phase < 3; phase++) {
uint16_t read_voltage = this->read16_(voltage_gain_registers[phase]);

View File

@@ -1,13 +1,11 @@
#pragma once
#include <span>
#include <unordered_map>
#include "atm90e32_reg.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/spi/spi.h"
#include "esphome/core/application.h"
#include "esphome/core/component.h"
#include "esphome/core/gpio.h"
#include "esphome/core/helpers.h"
#include "esphome/core/preferences.h"
@@ -184,7 +182,6 @@ class ATM90E32Component : public PollingComponent,
bool verify_gain_writes_();
bool validate_spi_read_(uint16_t expected, const char *context = nullptr);
void log_calibration_status_();
void get_cs_summary_(std::span<char, GPIO_SUMMARY_MAX_LEN> buffer);
struct ATM90E32Phase {
uint16_t voltage_gain_{0};
@@ -250,6 +247,7 @@ class ATM90E32Component : public PollingComponent,
ESPPreferenceObject offset_pref_;
ESPPreferenceObject power_offset_pref_;
ESPPreferenceObject gain_calibration_pref_;
std::string cs_summary_;
sensor::Sensor *freq_sensor_{nullptr};
#ifdef USE_TEXT_SENSOR

View File

@@ -1,5 +1,4 @@
import esphome.codegen as cg
from esphome.components.esp32 import add_idf_component, include_builtin_idf_component
import esphome.config_validation as cv
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
import esphome.final_validate as fv
@@ -166,10 +165,4 @@ def final_validate_audio_schema(
async def to_code(config):
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
include_builtin_idf_component("esp_http_client")
add_idf_component(
name="esphome/esp-audio-libs",
ref="2.0.3",
)
cg.add_library("esphome/esp-audio-libs", "2.0.1")

View File

@@ -300,7 +300,7 @@ FileDecoderState AudioDecoder::decode_mp3_() {
// Advance read pointer to match the offset for the syncword
this->input_transfer_buffer_->decrease_buffer_length(offset);
const uint8_t *buffer_start = this->input_transfer_buffer_->get_buffer_start();
uint8_t *buffer_start = this->input_transfer_buffer_->get_buffer_start();
buffer_length = (int) this->input_transfer_buffer_->available();
int err = esp_audio_libs::helix_decoder::MP3Decode(this->mp3_decoder_, &buffer_start, &buffer_length,

View File

@@ -185,16 +185,18 @@ esp_err_t AudioReader::start(const std::string &uri, AudioFileType &file_type) {
return err;
}
if (str_endswith_ignore_case(url, ".wav")) {
std::string url_string = str_lower_case(url);
if (str_endswith(url_string, ".wav")) {
file_type = AudioFileType::WAV;
}
#ifdef USE_AUDIO_MP3_SUPPORT
else if (str_endswith_ignore_case(url, ".mp3")) {
else if (str_endswith(url_string, ".mp3")) {
file_type = AudioFileType::MP3;
}
#endif
#ifdef USE_AUDIO_FLAC_SUPPORT
else if (str_endswith_ignore_case(url, ".flac")) {
else if (str_endswith(url_string, ".flac")) {
file_type = AudioFileType::FLAC;
}
#endif

View File

@@ -6,7 +6,8 @@ namespace bang_bang {
static const char *const TAG = "bang_bang.climate";
BangBangClimate::BangBangClimate() = default;
BangBangClimate::BangBangClimate()
: idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {}
void BangBangClimate::setup() {
this->sensor_->add_on_state_callback([this](float state) {
@@ -159,13 +160,13 @@ void BangBangClimate::switch_to_action_(climate::ClimateAction action) {
switch (action) {
case climate::CLIMATE_ACTION_OFF:
case climate::CLIMATE_ACTION_IDLE:
trig = &this->idle_trigger_;
trig = this->idle_trigger_;
break;
case climate::CLIMATE_ACTION_COOLING:
trig = &this->cool_trigger_;
trig = this->cool_trigger_;
break;
case climate::CLIMATE_ACTION_HEATING:
trig = &this->heat_trigger_;
trig = this->heat_trigger_;
break;
default:
trig = nullptr;
@@ -203,9 +204,9 @@ void BangBangClimate::set_away_config(const BangBangClimateTargetTempConfig &awa
void BangBangClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
void BangBangClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
Trigger<> *BangBangClimate::get_idle_trigger() { return &this->idle_trigger_; }
Trigger<> *BangBangClimate::get_cool_trigger() { return &this->cool_trigger_; }
Trigger<> *BangBangClimate::get_heat_trigger() { return &this->heat_trigger_; }
Trigger<> *BangBangClimate::get_idle_trigger() const { return this->idle_trigger_; }
Trigger<> *BangBangClimate::get_cool_trigger() const { return this->cool_trigger_; }
Trigger<> *BangBangClimate::get_heat_trigger() const { return this->heat_trigger_; }
void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
void BangBangClimate::set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }

View File

@@ -30,9 +30,9 @@ class BangBangClimate : public climate::Climate, public Component {
void set_normal_config(const BangBangClimateTargetTempConfig &normal_config);
void set_away_config(const BangBangClimateTargetTempConfig &away_config);
Trigger<> *get_idle_trigger();
Trigger<> *get_cool_trigger();
Trigger<> *get_heat_trigger();
Trigger<> *get_idle_trigger() const;
Trigger<> *get_cool_trigger() const;
Trigger<> *get_heat_trigger() const;
protected:
/// Override control to change settings of the climate device.
@@ -57,13 +57,17 @@ class BangBangClimate : public climate::Climate, public Component {
*
* In idle mode, the controller is assumed to have both heating and cooling disabled.
*/
Trigger<> idle_trigger_;
Trigger<> *idle_trigger_{nullptr};
/** The trigger to call when the controller should switch to cooling mode.
*/
Trigger<> cool_trigger_;
Trigger<> *cool_trigger_{nullptr};
/** The trigger to call when the controller should switch to heating mode.
*
* A null value for this attribute means that the controller has no heating action
* For example window blinds, where only cooling (blinds closed) and not-cooling
* (blinds open) is possible.
*/
Trigger<> heat_trigger_;
Trigger<> *heat_trigger_{nullptr};
/** A reference to the trigger that was previously active.
*
* This is so that the previous trigger can be stopped before enabling a new one.

View File

@@ -265,4 +265,6 @@ void BH1750Sensor::fail_and_reset_() {
this->state_ = IDLE;
}
float BH1750Sensor::get_setup_priority() const { return setup_priority::DATA; }
} // namespace esphome::bh1750

View File

@@ -21,6 +21,7 @@ class BH1750Sensor : public sensor::Sensor, public PollingComponent, public i2c:
void dump_config() override;
void update() override;
void loop() override;
float get_setup_priority() const override;
protected:
// State machine states

View File

@@ -14,7 +14,10 @@ void log_binary_sensor(const char *tag, const char *prefix, const char *type, Bi
}
ESP_LOGCONFIG(tag, "%s%s '%s'", prefix, type, obj->get_name().c_str());
LOG_ENTITY_DEVICE_CLASS(tag, prefix, *obj);
if (!obj->get_device_class_ref().empty()) {
ESP_LOGCONFIG(tag, "%s Device Class: '%s'", prefix, obj->get_device_class_ref().c_str());
}
}
void BinarySensor::publish_state(bool new_state) {

View File

@@ -9,7 +9,7 @@ static const char *const TAG = "bl0940.number";
void CalibrationNumber::setup() {
float value = 0.0f;
if (this->restore_value_) {
this->pref_ = this->make_entity_preference<float>();
this->pref_ = global_preferences->make_preference<float>(this->get_preference_hash());
if (!this->pref_.load(&value)) {
value = 0.0f;
}

View File

@@ -135,8 +135,8 @@ void BluetoothConnection::loop() {
// - For V3_WITH_CACHE: Services are never sent, disable after INIT state
// - For V3_WITHOUT_CACHE: Disable only after service discovery is complete
// (send_service_ == DONE_SENDING_SERVICES, which is only set after services are sent)
if (this->state() != espbt::ClientState::INIT && (this->connection_type_ == espbt::ConnectionType::V3_WITH_CACHE ||
this->send_service_ == DONE_SENDING_SERVICES)) {
if (this->state_ != espbt::ClientState::INIT && (this->connection_type_ == espbt::ConnectionType::V3_WITH_CACHE ||
this->send_service_ == DONE_SENDING_SERVICES)) {
this->disable_loop();
}
}

View File

@@ -199,6 +199,7 @@ void BME280Component::dump_config() {
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
ESP_LOGCONFIG(TAG, " Oversampling: %s", oversampling_to_str(this->humidity_oversampling_));
}
float BME280Component::get_setup_priority() const { return setup_priority::DATA; }
inline uint8_t oversampling_to_time(BME280Oversampling over_sampling) { return (1 << uint8_t(over_sampling)) >> 1; }

View File

@@ -76,6 +76,7 @@ class BME280Component : public PollingComponent {
// (In most use cases you won't need these)
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void update() override;
protected:

View File

@@ -233,6 +233,8 @@ void BME680Component::dump_config() {
}
}
float BME680Component::get_setup_priority() const { return setup_priority::DATA; }
void BME680Component::update() {
uint8_t meas_control = 0; // No need to fetch, we're setting all fields
meas_control |= (this->temperature_oversampling_ & 0b111) << 5;

View File

@@ -99,6 +99,7 @@ class BME680Component : public PollingComponent, public i2c::I2CDevice {
// (In most use cases you won't need these)
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void update() override;
protected:

View File

@@ -89,9 +89,8 @@ async def to_code(config):
var.set_state_save_interval(config[CONF_STATE_SAVE_INTERVAL].total_milliseconds)
)
# Although this component does not use SPI/Wire directly, the BSEC library requires them
# Although this component does not use SPI, the BSEC library requires the SPI library
cg.add_library("SPI", None)
cg.add_library("Wire", None)
cg.add_define("USE_BSEC")
cg.add_library("boschsensortec/BSEC Software Library", "1.6.1480")

View File

@@ -181,6 +181,8 @@ void BME680BSECComponent::dump_config() {
LOG_SENSOR(" ", "Breath VOC Equivalent", this->breath_voc_equivalent_sensor_);
}
float BME680BSECComponent::get_setup_priority() const { return setup_priority::DATA; }
void BME680BSECComponent::loop() {
this->run_();

View File

@@ -64,6 +64,7 @@ class BME680BSECComponent : public Component, public i2c::I2CDevice {
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void loop() override;
protected:

View File

@@ -106,6 +106,8 @@ void BME68xBSEC2Component::dump_config() {
#endif
}
float BME68xBSEC2Component::get_setup_priority() const { return setup_priority::DATA; }
void BME68xBSEC2Component::loop() {
this->run_();

View File

@@ -48,6 +48,7 @@ class BME68xBSEC2Component : public Component {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void loop() override;
void set_algorithm_output(AlgorithmOutput algorithm_output) { this->algorithm_output_ = algorithm_output; }

View File

@@ -263,6 +263,7 @@ void BMI160Component::update() {
this->status_clear_warning();
}
float BMI160Component::get_setup_priority() const { return setup_priority::DATA; }
} // namespace bmi160
} // namespace esphome

View File

@@ -14,6 +14,8 @@ class BMI160Component : public PollingComponent, public i2c::I2CDevice {
void update() override;
float get_setup_priority() const override;
void set_accel_x_sensor(sensor::Sensor *accel_x_sensor) { accel_x_sensor_ = accel_x_sensor; }
void set_accel_y_sensor(sensor::Sensor *accel_y_sensor) { accel_y_sensor_ = accel_y_sensor; }
void set_accel_z_sensor(sensor::Sensor *accel_z_sensor) { accel_z_sensor_ = accel_z_sensor; }

View File

@@ -131,6 +131,7 @@ bool BMP085Component::set_mode_(uint8_t mode) {
ESP_LOGV(TAG, "Setting mode to 0x%02X", mode);
return this->write_byte(BMP085_REGISTER_CONTROL, mode);
}
float BMP085Component::get_setup_priority() const { return setup_priority::DATA; }
} // namespace bmp085
} // namespace esphome

Some files were not shown because too many files have changed in this diff Show More