mirror of
https://github.com/esphome/esphome.git
synced 2026-01-20 01:49:11 -07:00
Compare commits
3 Commits
integratio
...
revert-943
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01ac30f210 | ||
|
|
35c7937df3 | ||
|
|
a18adb8f6e |
@@ -9,7 +9,7 @@ This document provides essential context for AI models interacting with this pro
|
||||
|
||||
## 2. Core Technologies & Stack
|
||||
|
||||
* **Languages:** Python (>=3.11), C++ (gnu++20)
|
||||
* **Languages:** Python (>=3.10), C++ (gnu++20)
|
||||
* **Frameworks & Runtimes:** PlatformIO, Arduino, ESP-IDF.
|
||||
* **Build Systems:** PlatformIO is the primary build system. CMake is used as an alternative.
|
||||
* **Configuration:** YAML.
|
||||
@@ -38,7 +38,7 @@ This document provides essential context for AI models interacting with this pro
|
||||
5. **Dashboard** (`esphome/dashboard/`): A web-based interface for device configuration, management, and OTA updates.
|
||||
|
||||
* **Platform Support:**
|
||||
1. **ESP32** (`components/esp32/`): Espressif ESP32 family. Supports multiple variants (Original, C2, C3, C5, C6, H2, P4, S2, S3) with ESP-IDF framework. Arduino framework supports only a subset of the variants (Original, C3, S2, S3).
|
||||
1. **ESP32** (`components/esp32/`): Espressif ESP32 family. Supports multiple variants (S2, S3, C3, etc.) and both IDF and Arduino frameworks.
|
||||
2. **ESP8266** (`components/esp8266/`): Espressif ESP8266. Arduino framework only, with memory constraints.
|
||||
3. **RP2040** (`components/rp2040/`): Raspberry Pi Pico/RP2040. Arduino framework with PIO (Programmable I/O) support.
|
||||
4. **LibreTiny** (`components/libretiny/`): Realtek and Beken chips. Supports multiple chip families and auto-generated components.
|
||||
@@ -51,79 +51,7 @@ This document provides essential context for AI models interacting with this pro
|
||||
|
||||
* **Naming Conventions:**
|
||||
* **Python:** Follows PEP 8. Use clear, descriptive names following snake_case.
|
||||
* **C++:** Follows the Google C++ Style Guide with these specifics (following clang-tidy conventions):
|
||||
- Function, method, and variable names: `lower_snake_case`
|
||||
- Class/struct/enum names: `UpperCamelCase`
|
||||
- Top-level constants (global/namespace scope): `UPPER_SNAKE_CASE`
|
||||
- Function-local constants: `lower_snake_case`
|
||||
- Protected/private fields: `lower_snake_case_with_trailing_underscore_`
|
||||
- Favor descriptive names over abbreviations
|
||||
|
||||
* **C++ Field Visibility:**
|
||||
* **Prefer `protected`:** Use `protected` for most class fields to enable extensibility and testing. Fields should be `lower_snake_case_with_trailing_underscore_`.
|
||||
* **Use `private` for safety-critical cases:** Use `private` visibility when direct field access could introduce bugs or violate invariants:
|
||||
1. **Pointer lifetime issues:** When setters validate and store pointers from known lists to prevent dangling references.
|
||||
```cpp
|
||||
// Helper to find matching string in vector and return its pointer
|
||||
inline const char *vector_find(const std::vector<const char *> &vec, const char *value) {
|
||||
for (const char *item : vec) {
|
||||
if (strcmp(item, value) == 0)
|
||||
return item;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
class ClimateDevice {
|
||||
public:
|
||||
void set_custom_fan_modes(std::initializer_list<const char *> modes) {
|
||||
this->custom_fan_modes_ = modes;
|
||||
this->active_custom_fan_mode_ = nullptr; // Reset when modes change
|
||||
}
|
||||
bool set_custom_fan_mode(const char *mode) {
|
||||
// Find mode in supported list and store that pointer (not the input pointer)
|
||||
const char *validated_mode = vector_find(this->custom_fan_modes_, mode);
|
||||
if (validated_mode != nullptr) {
|
||||
this->active_custom_fan_mode_ = validated_mode;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
private:
|
||||
std::vector<const char *> custom_fan_modes_; // Pointers to string literals in flash
|
||||
const char *active_custom_fan_mode_{nullptr}; // Must point to entry in custom_fan_modes_
|
||||
};
|
||||
```
|
||||
2. **Invariant coupling:** When multiple fields must remain synchronized to prevent buffer overflows or data corruption.
|
||||
```cpp
|
||||
class Buffer {
|
||||
public:
|
||||
void resize(size_t new_size) {
|
||||
auto new_data = std::make_unique<uint8_t[]>(new_size);
|
||||
if (this->data_) {
|
||||
std::memcpy(new_data.get(), this->data_.get(), std::min(this->size_, new_size));
|
||||
}
|
||||
this->data_ = std::move(new_data);
|
||||
this->size_ = new_size; // Must stay in sync with data_
|
||||
}
|
||||
private:
|
||||
std::unique_ptr<uint8_t[]> data_;
|
||||
size_t size_{0}; // Must match allocated size of data_
|
||||
};
|
||||
```
|
||||
3. **Resource management:** When setters perform cleanup or registration operations that derived classes might skip.
|
||||
* **Provide `protected` accessor methods:** When derived classes need controlled access to `private` members.
|
||||
|
||||
* **C++ Preprocessor Directives:**
|
||||
* **Avoid `#define` for constants:** Using `#define` for constants is discouraged and should be replaced with `const` variables or enums.
|
||||
* **Use `#define` only for:**
|
||||
- Conditional compilation (`#ifdef`, `#ifndef`)
|
||||
- Compile-time sizes calculated during Python code generation (e.g., configuring `std::array` or `StaticVector` dimensions via `cg.add_define()`)
|
||||
|
||||
* **C++ Additional Conventions:**
|
||||
* **Member access:** Prefix all class member access with `this->` (e.g., `this->value_` not `value_`)
|
||||
* **Indentation:** Use spaces (two per indentation level), not tabs
|
||||
* **Type aliases:** Prefer `using type_t = int;` over `typedef int type_t;`
|
||||
* **Line length:** Wrap lines at no more than 120 characters
|
||||
* **C++:** Follows the Google C++ Style Guide.
|
||||
|
||||
* **Component Structure:**
|
||||
* **Standard Files:**
|
||||
@@ -132,7 +60,7 @@ This document provides essential context for AI models interacting with this pro
|
||||
├── __init__.py # Component configuration schema and code generation
|
||||
├── [component].h # C++ header file (if needed)
|
||||
├── [component].cpp # C++ implementation (if needed)
|
||||
└── [platform]/ # Platform-specific implementations
|
||||
└── [platform]/ # Platform-specific implementations
|
||||
├── __init__.py # Platform-specific configuration
|
||||
├── [platform].h # Platform C++ header
|
||||
└── [platform].cpp # Platform C++ implementation
|
||||
@@ -172,7 +100,8 @@ This document provides essential context for AI models interacting with this pro
|
||||
|
||||
* **C++ Class Pattern:**
|
||||
```cpp
|
||||
namespace esphome::my_component {
|
||||
namespace esphome {
|
||||
namespace my_component {
|
||||
|
||||
class MyComponent : public Component {
|
||||
public:
|
||||
@@ -188,7 +117,8 @@ This document provides essential context for AI models interacting with this pro
|
||||
int param_{0};
|
||||
};
|
||||
|
||||
} // namespace esphome::my_component
|
||||
} // namespace my_component
|
||||
} // namespace esphome
|
||||
```
|
||||
|
||||
* **Common Component Examples:**
|
||||
@@ -220,8 +150,7 @@ This document provides essential context for AI models interacting with this pro
|
||||
* **Configuration Validation:**
|
||||
* **Common Validators:** `cv.int_`, `cv.float_`, `cv.string`, `cv.boolean`, `cv.int_range(min=0, max=100)`, `cv.positive_int`, `cv.percentage`.
|
||||
* **Complex Validation:** `cv.All(cv.string, cv.Length(min=1, max=50))`, `cv.Any(cv.int_, cv.string)`.
|
||||
* **Platform-Specific:** `cv.only_on(["esp32", "esp8266"])`, `esp32.only_on_variant(...)`, `cv.only_on_esp32`, `cv.only_on_esp8266`, `cv.only_on_rp2040`.
|
||||
* **Framework-Specific:** `cv.only_with_framework(...)`, `cv.only_with_arduino`, `cv.only_with_esp_idf`.
|
||||
* **Platform-Specific:** `cv.only_on(["esp32", "esp8266"])`, `cv.only_with_arduino`.
|
||||
* **Schema Extensions:**
|
||||
```python
|
||||
CONFIG_SCHEMA = cv.Schema({ ... })
|
||||
@@ -239,8 +168,6 @@ This document provides essential context for AI models interacting with this pro
|
||||
* `platformio.ini`: Configures the PlatformIO build environments for different microcontrollers.
|
||||
* `.pre-commit-config.yaml`: Configures the pre-commit hooks for linting and formatting.
|
||||
* **CI/CD Pipeline:** Defined in `.github/workflows`.
|
||||
* **Static Analysis & Development:**
|
||||
* `esphome/core/defines.h`: A comprehensive header file containing all `#define` directives that can be added by components using `cg.add_define()` in Python. This file is used exclusively for development, static analysis tools, and CI testing - it is not used during runtime compilation. When developing components that add new defines, they must be added to this file to ensure proper IDE support and static analysis coverage. The file includes feature flags, build configurations, and platform-specific defines that help static analyzers understand the complete codebase without needing to compile for specific platforms.
|
||||
|
||||
## 6. Development & Testing Workflow
|
||||
|
||||
@@ -256,11 +183,6 @@ This document provides essential context for AI models interacting with this pro
|
||||
└── components/[component]/ # Component-specific tests
|
||||
```
|
||||
Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
|
||||
* **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
|
||||
```bash
|
||||
./script/test_component_grouping.py -e config --all
|
||||
```
|
||||
This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
|
||||
* **Debugging and Troubleshooting:**
|
||||
* **Debug Tools:**
|
||||
- `esphome config <file>.yaml` to validate configuration.
|
||||
@@ -276,12 +198,12 @@ This document provides essential context for AI models interacting with this pro
|
||||
## 7. Specific Instructions for AI Collaboration
|
||||
|
||||
* **Contribution Workflow (Pull Request Process):**
|
||||
1. **Fork & Branch:** Create a new branch based on the `dev` branch (always use `git checkout -b <branch-name> dev` to ensure you're branching from `dev`, not the currently checked out branch).
|
||||
1. **Fork & Branch:** Create a new branch in your fork.
|
||||
2. **Make Changes:** Adhere to all coding conventions and patterns.
|
||||
3. **Test:** Create component tests for all supported platforms and run the full test suite locally.
|
||||
4. **Lint:** Run `pre-commit` to ensure code is compliant.
|
||||
5. **Commit:** Commit your changes. There is no strict format for commit messages.
|
||||
6. **Pull Request:** Submit a PR against the `dev` branch. The Pull Request title should have a prefix of the component being worked on (e.g., `[display] Fix bug`, `[abc123] Add new component`). Update documentation, examples, and add `CODEOWNERS` entries as needed. Pull requests should always be made using the `.github/PULL_REQUEST_TEMPLATE.md` template - fill out all sections completely without removing any parts of the template.
|
||||
6. **Pull Request:** Submit a PR against the `dev` branch. The Pull Request title should have a prefix of the component being worked on (e.g., `[display] Fix bug`, `[abc123] Add new component`). Update documentation, examples, and add `CODEOWNERS` entries as needed. Pull requests should always be made with the PULL_REQUEST_TEMPLATE.md template filled out correctly.
|
||||
|
||||
* **Documentation Contributions:**
|
||||
* Documentation is hosted in the separate `esphome/esphome-docs` repository.
|
||||
@@ -291,156 +213,6 @@ This document provides essential context for AI models interacting with this pro
|
||||
* **Component Development:** Keep dependencies minimal, provide clear error messages, and write comprehensive docstrings and tests.
|
||||
* **Code Generation:** Generate minimal and efficient C++ code. Validate all user inputs thoroughly. Support multiple platform variations.
|
||||
* **Configuration Design:** Aim for simplicity with sensible defaults, while allowing for advanced customization.
|
||||
* **Embedded Systems Optimization:** ESPHome targets resource-constrained microcontrollers. Be mindful of flash size and RAM usage.
|
||||
|
||||
**STL Container Guidelines:**
|
||||
|
||||
ESPHome runs on embedded systems with limited resources. Choose containers carefully:
|
||||
|
||||
1. **Compile-time-known sizes:** Use `std::array` instead of `std::vector` when size is known at compile time.
|
||||
```cpp
|
||||
// Bad - generates STL realloc code
|
||||
std::vector<int> values;
|
||||
|
||||
// Good - no dynamic allocation
|
||||
std::array<int, MAX_VALUES> values;
|
||||
```
|
||||
Use `cg.add_define("MAX_VALUES", count)` to set the size from Python configuration.
|
||||
|
||||
**For byte buffers:** Avoid `std::vector<uint8_t>` unless the buffer needs to grow. Use `std::unique_ptr<uint8_t[]>` instead.
|
||||
|
||||
> **Note:** `std::unique_ptr<uint8_t[]>` does **not** provide bounds checking or iterator support like `std::vector<uint8_t>`. Use it only when you do not need these features and want minimal overhead.
|
||||
|
||||
```cpp
|
||||
// Bad - STL overhead for simple byte buffer
|
||||
std::vector<uint8_t> buffer;
|
||||
buffer.resize(256);
|
||||
|
||||
// Good - minimal overhead, single allocation
|
||||
std::unique_ptr<uint8_t[]> buffer = std::make_unique<uint8_t[]>(256);
|
||||
// Or if size is constant:
|
||||
std::array<uint8_t, 256> buffer;
|
||||
```
|
||||
|
||||
2. **Compile-time-known fixed sizes with vector-like API:** Use `StaticVector` from `esphome/core/helpers.h` for fixed-size stack allocation with `push_back()` interface.
|
||||
```cpp
|
||||
// Bad - generates STL realloc code (_M_realloc_insert)
|
||||
std::vector<ServiceRecord> services;
|
||||
services.reserve(5); // Still includes reallocation machinery
|
||||
|
||||
// Good - compile-time fixed size, stack allocated, no reallocation machinery
|
||||
StaticVector<ServiceRecord, MAX_SERVICES> services; // Allocates all MAX_SERVICES on stack
|
||||
services.push_back(record1); // Tracks count but all slots allocated
|
||||
```
|
||||
Use `cg.add_define("MAX_SERVICES", count)` to set the size from Python configuration.
|
||||
Like `std::array` but with vector-like API (`push_back()`, `size()`) and no STL reallocation code.
|
||||
|
||||
3. **Runtime-known sizes:** Use `FixedVector` from `esphome/core/helpers.h` when the size is only known at runtime initialization.
|
||||
```cpp
|
||||
// Bad - generates STL realloc code (_M_realloc_insert)
|
||||
std::vector<TxtRecord> txt_records;
|
||||
txt_records.reserve(5); // Still includes reallocation machinery
|
||||
|
||||
// Good - runtime size, single allocation, no reallocation machinery
|
||||
FixedVector<TxtRecord> txt_records;
|
||||
txt_records.init(record_count); // Initialize with exact size at runtime
|
||||
```
|
||||
**Benefits:**
|
||||
- Eliminates `_M_realloc_insert`, `_M_default_append` template instantiations (saves 200-500 bytes per instance)
|
||||
- Single allocation, no upper bound needed
|
||||
- No reallocation overhead
|
||||
- Compatible with protobuf code generation when using `[(fixed_vector) = true]` option
|
||||
|
||||
4. **Small datasets (1-16 elements):** Use `std::vector` or `std::array` with simple structs instead of `std::map`/`std::set`/`std::unordered_map`.
|
||||
```cpp
|
||||
// Bad - 2KB+ overhead for red-black tree/hash table
|
||||
std::map<std::string, int> small_lookup;
|
||||
std::unordered_map<int, std::string> tiny_map;
|
||||
|
||||
// Good - simple struct with linear search (std::vector is fine)
|
||||
struct LookupEntry {
|
||||
const char *key;
|
||||
int value;
|
||||
};
|
||||
std::vector<LookupEntry> small_lookup = {
|
||||
{"key1", 10},
|
||||
{"key2", 20},
|
||||
{"key3", 30},
|
||||
};
|
||||
// Or std::array if size is compile-time constant:
|
||||
// std::array<LookupEntry, 3> small_lookup = {{ ... }};
|
||||
```
|
||||
Linear search on small datasets (1-16 elements) is often faster than hashing/tree overhead, but this depends on lookup frequency and access patterns. For frequent lookups in hot code paths, the O(1) vs O(n) complexity difference may still matter even for small datasets. `std::vector` with simple structs is usually fine—it's the heavy containers (`map`, `set`, `unordered_map`) that should be avoided for small datasets unless profiling shows otherwise.
|
||||
|
||||
5. **Detection:** Look for these patterns in compiler output:
|
||||
- Large code sections with STL symbols (vector, map, set)
|
||||
- `alloc`, `realloc`, `dealloc` in symbol names
|
||||
- `_M_realloc_insert`, `_M_default_append` (vector reallocation)
|
||||
- Red-black tree code (`rb_tree`, `_Rb_tree`)
|
||||
- Hash table infrastructure (`unordered_map`, `hash`)
|
||||
|
||||
**When to optimize:**
|
||||
- Core components (API, network, logger)
|
||||
- Widely-used components (mdns, wifi, ble)
|
||||
- Components causing flash size complaints
|
||||
|
||||
**When not to optimize:**
|
||||
- Single-use niche components
|
||||
- Code where readability matters more than bytes
|
||||
- Already using appropriate containers
|
||||
|
||||
* **State Management:** Use `CORE.data` for component state that needs to persist during configuration generation. Avoid module-level mutable globals.
|
||||
|
||||
**Bad Pattern (Module-Level Globals):**
|
||||
```python
|
||||
# Don't do this - state persists between compilation runs
|
||||
_component_state = []
|
||||
_use_feature = None
|
||||
|
||||
def enable_feature():
|
||||
global _use_feature
|
||||
_use_feature = True
|
||||
```
|
||||
|
||||
**Bad Pattern (Flat Keys):**
|
||||
```python
|
||||
# Don't do this - keys should be namespaced under component domain
|
||||
MY_FEATURE_KEY = "my_component_feature"
|
||||
CORE.data[MY_FEATURE_KEY] = True
|
||||
```
|
||||
|
||||
**Good Pattern (dataclass):**
|
||||
```python
|
||||
from dataclasses import dataclass, field
|
||||
from esphome.core import CORE
|
||||
|
||||
DOMAIN = "my_component"
|
||||
|
||||
@dataclass
|
||||
class MyComponentData:
|
||||
feature_enabled: bool = False
|
||||
item_count: int = 0
|
||||
items: list[str] = field(default_factory=list)
|
||||
|
||||
def _get_data() -> MyComponentData:
|
||||
if DOMAIN not in CORE.data:
|
||||
CORE.data[DOMAIN] = MyComponentData()
|
||||
return CORE.data[DOMAIN]
|
||||
|
||||
def request_feature() -> None:
|
||||
_get_data().feature_enabled = True
|
||||
|
||||
def add_item(item: str) -> None:
|
||||
_get_data().items.append(item)
|
||||
```
|
||||
|
||||
If you need a real-world example, search for components that use `@dataclass` with `CORE.data` in the codebase. Note: Some components may use `TypedDict` for dictionary-based storage; both patterns are acceptable depending on your needs.
|
||||
|
||||
**Why this matters:**
|
||||
- Module-level globals persist between compilation runs if the dashboard doesn't fork/exec
|
||||
- `CORE.data` automatically clears between runs
|
||||
- Namespacing under `DOMAIN` prevents key collisions between components
|
||||
- `@dataclass` provides type safety and cleaner attribute access
|
||||
|
||||
* **Security:** Be mindful of security when making changes to the API, web server, or any other network-related code. Do not hardcode secrets or keys.
|
||||
|
||||
@@ -448,45 +220,3 @@ This document provides essential context for AI models interacting with this pro
|
||||
* **Python:** When adding a new Python dependency, add it to the appropriate `requirements*.txt` file and `pyproject.toml`.
|
||||
* **C++ / PlatformIO:** When adding a new C++ dependency, add it to `platformio.ini` and use `cg.add_library`.
|
||||
* **Build Flags:** Use `cg.add_build_flag(...)` to add compiler flags.
|
||||
|
||||
## 8. Public API and Breaking Changes
|
||||
|
||||
* **Public C++ API:**
|
||||
* **Components**: Only documented features at [esphome.io](https://esphome.io) are public API. Undocumented `public` members are internal.
|
||||
* **Core/Base Classes** (`esphome/core/`, `Component`, `Sensor`, etc.): All `public` members are public API.
|
||||
* **Components with Global Accessors** (`global_api_server`, etc.): All `public` members are public API (except config setters).
|
||||
|
||||
* **Public Python API:**
|
||||
* All documented configuration options at [esphome.io](https://esphome.io) are public API.
|
||||
* Python code in `esphome/core/` actively used by existing core components is considered stable API.
|
||||
* Other Python code is internal unless explicitly documented for external component use.
|
||||
|
||||
* **Breaking Changes Policy:**
|
||||
* Aim for **6-month deprecation window** when possible
|
||||
* Clean breaks allowed for: signature changes, deep refactorings, resource constraints
|
||||
* Must document migration path in PR description (generates release notes)
|
||||
* Blog post required for core/base class changes or significant architectural changes
|
||||
* Full details: https://developers.esphome.io/contributing/code/#public-api-and-breaking-changes
|
||||
|
||||
* **Breaking Change Checklist:**
|
||||
- [ ] Clear justification (RAM/flash savings, architectural improvement)
|
||||
- [ ] Explored non-breaking alternatives
|
||||
- [ ] Added deprecation warnings if possible (use `ESPDEPRECATED` macro for C++)
|
||||
- [ ] Documented migration path in PR description with before/after examples
|
||||
- [ ] Updated all internal usage and esphome-docs
|
||||
- [ ] Tested backward compatibility during deprecation period
|
||||
|
||||
* **Deprecation Pattern (C++):**
|
||||
```cpp
|
||||
// Remove before 2026.6.0
|
||||
ESPDEPRECATED("Use new_method() instead. Removed in 2026.6.0", "2025.12.0")
|
||||
void old_method() { this->new_method(); }
|
||||
```
|
||||
|
||||
* **Deprecation Pattern (Python):**
|
||||
```python
|
||||
# Remove before 2026.6.0
|
||||
if CONF_OLD_KEY in config:
|
||||
_LOGGER.warning(f"'{CONF_OLD_KEY}' deprecated, use '{CONF_NEW_KEY}'. Removed in 2026.6.0")
|
||||
config[CONF_NEW_KEY] = config.pop(CONF_OLD_KEY) # Auto-migrate
|
||||
```
|
||||
|
||||
@@ -1 +1 @@
|
||||
4268ab0b5150f79ab1c317e8f3834c8bb0b4c8122da4f6b1fd67c49d0f2098c9
|
||||
4df2fc55e977ba821978fac5f1e721ce2338e23647050b7005b4c801b1770739
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
[run]
|
||||
omit =
|
||||
esphome/components/*
|
||||
esphome/analyze_memory/*
|
||||
tests/integration/*
|
||||
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -7,7 +7,6 @@
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Developer breaking change (an API change that could break external components)
|
||||
- [ ] Code quality improvements to existing code or addition of tests
|
||||
- [ ] Other
|
||||
|
||||
|
||||
4
.github/actions/build-image/action.yaml
vendored
4
.github/actions/build-image/action.yaml
vendored
@@ -47,7 +47,7 @@ runs:
|
||||
|
||||
- name: Build and push to ghcr by digest
|
||||
id: build-ghcr
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
DOCKER_BUILD_RECORD_UPLOAD: false
|
||||
@@ -73,7 +73,7 @@ runs:
|
||||
|
||||
- name: Build and push to dockerhub by digest
|
||||
id: build-dockerhub
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
DOCKER_BUILD_RECORD_UPLOAD: false
|
||||
|
||||
4
.github/actions/restore-python/action.yml
vendored
4
.github/actions/restore-python/action.yml
vendored
@@ -17,12 +17,12 @@ runs:
|
||||
steps:
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
|
||||
102
.github/workflows/auto-label-pr.yml
vendored
102
.github/workflows/auto-label-pr.yml
vendored
@@ -22,17 +22,17 @@ jobs:
|
||||
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.ESPHOME_GITHUB_APP_ID }}
|
||||
private-key: ${{ secrets.ESPHOME_GITHUB_APP_PRIVATE_KEY }}
|
||||
|
||||
- name: Auto Label PR
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
@@ -53,7 +53,6 @@ jobs:
|
||||
'new-target-platform',
|
||||
'merging-to-release',
|
||||
'merging-to-beta',
|
||||
'chained-pr',
|
||||
'core',
|
||||
'small-pr',
|
||||
'dashboard',
|
||||
@@ -64,12 +63,7 @@ jobs:
|
||||
'needs-docs',
|
||||
'needs-codeowners',
|
||||
'too-big',
|
||||
'labeller-recheck',
|
||||
'bugfix',
|
||||
'new-feature',
|
||||
'breaking-change',
|
||||
'developer-breaking-change',
|
||||
'code-quality'
|
||||
'labeller-recheck'
|
||||
];
|
||||
|
||||
const DOCS_PR_PATTERNS = [
|
||||
@@ -107,9 +101,7 @@ jobs:
|
||||
|
||||
// Calculate data from PR files
|
||||
const changedFiles = prFiles.map(file => file.filename);
|
||||
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const totalChanges = totalAdditions + totalDeletions;
|
||||
const totalChanges = prFiles.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
|
||||
console.log('Current labels:', currentLabels.join(', '));
|
||||
console.log('Changed files:', changedFiles.length);
|
||||
@@ -142,8 +134,6 @@ jobs:
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
} else if (baseRef !== 'dev') {
|
||||
labels.add('chained-pr');
|
||||
}
|
||||
|
||||
return labels;
|
||||
@@ -237,21 +227,16 @@ jobs:
|
||||
// Strategy: PR size detection
|
||||
async function detectPRSize() {
|
||||
const labels = new Set();
|
||||
const testChanges = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = totalChanges - testChanges;
|
||||
|
||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||
labels.add('small-pr');
|
||||
return labels;
|
||||
}
|
||||
|
||||
const testAdditions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
|
||||
// Don't add too-big if mega-pr label is already present
|
||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||
labels.add('too-big');
|
||||
@@ -356,43 +341,17 @@ jobs:
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR Template Checkbox detection
|
||||
async function detectPRTemplateCheckboxes() {
|
||||
const labels = new Set();
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
|
||||
console.log('Checking PR template checkboxes...');
|
||||
|
||||
// Check for checked checkboxes in the "Types of changes" section
|
||||
const checkboxPatterns = [
|
||||
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
||||
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
||||
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
||||
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
||||
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
||||
];
|
||||
|
||||
for (const { pattern, label } of checkboxPatterns) {
|
||||
if (pattern.test(prBody)) {
|
||||
console.log(`Found checked checkbox for: ${label}`);
|
||||
labels.add(label);
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Requirements detection
|
||||
async function detectRequirements(allLabels) {
|
||||
const labels = new Set();
|
||||
|
||||
// Check for missing tests
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform')) && !allLabels.has('has-tests')) {
|
||||
labels.add('needs-tests');
|
||||
}
|
||||
|
||||
// Check for missing docs
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform')) {
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||
|
||||
@@ -418,29 +377,26 @@ jobs:
|
||||
}
|
||||
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels, originalLabelCount) {
|
||||
function generateReviewMessages(finalLabels) {
|
||||
const messages = [];
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
// Too big message
|
||||
if (finalLabels.includes('too-big')) {
|
||||
const testAdditions = prFiles
|
||||
const testChanges = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||
const testDeletions = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
const nonTestChanges = totalChanges - testChanges;
|
||||
|
||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${finalLabels.length} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||
message += `This PR affects ${finalLabels.length} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
@@ -468,8 +424,8 @@ jobs:
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(finalLabels, originalLabelCount) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
|
||||
async function handleReviews(finalLabels) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners'].includes(label)
|
||||
);
|
||||
@@ -533,8 +489,8 @@ jobs:
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for release and beta branches only
|
||||
if (baseRef === 'release' || baseRef === 'beta') {
|
||||
// Early exit for non-dev branches
|
||||
if (baseRef !== 'dev') {
|
||||
const branchLabels = await detectMergeBranch();
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
@@ -579,8 +535,7 @@ jobs:
|
||||
dashboardLabels,
|
||||
actionsLabels,
|
||||
codeOwnerLabels,
|
||||
testLabels,
|
||||
checkboxLabels
|
||||
testLabels
|
||||
] = await Promise.all([
|
||||
detectMergeBranch(),
|
||||
detectComponentPlatforms(apiData),
|
||||
@@ -591,8 +546,7 @@ jobs:
|
||||
detectDashboardChanges(),
|
||||
detectGitHubActionsChanges(),
|
||||
detectCodeOwner(),
|
||||
detectTests(),
|
||||
detectPRTemplateCheckboxes()
|
||||
detectTests()
|
||||
]);
|
||||
|
||||
// Combine all labels
|
||||
@@ -606,8 +560,7 @@ jobs:
|
||||
...dashboardLabels,
|
||||
...actionsLabels,
|
||||
...codeOwnerLabels,
|
||||
...testLabels,
|
||||
...checkboxLabels
|
||||
...testLabels
|
||||
]);
|
||||
|
||||
// Detect requirements based on all other labels
|
||||
@@ -629,7 +582,6 @@ jobs:
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const originalLabelCount = finalLabels.length;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
@@ -638,7 +590,7 @@ jobs:
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(finalLabels, originalLabelCount);
|
||||
await handleReviews(finalLabels);
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
|
||||
10
.github/workflows/ci-api-proto.yml
vendored
10
.github/workflows/ci-api-proto.yml
vendored
@@ -21,9 +21,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
fi
|
||||
- if: failure()
|
||||
name: Review PR
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
await github.rest.pulls.createReview({
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
run: git diff
|
||||
- if: failure()
|
||||
name: Archive artifacts
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: generated-proto-files
|
||||
path: |
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
esphome/components/api/api_pb2_service.*
|
||||
- if: success()
|
||||
name: Dismiss review
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
let reviews = await github.rest.pulls.listReviews({
|
||||
|
||||
9
.github/workflows/ci-clang-tidy-hash.yml
vendored
9
.github/workflows/ci-clang-tidy-hash.yml
vendored
@@ -6,7 +6,6 @@ on:
|
||||
- ".clang-tidy"
|
||||
- "platformio.ini"
|
||||
- "requirements_dev.txt"
|
||||
- "sdkconfig.defaults"
|
||||
- ".clang-tidy.hash"
|
||||
- "script/clang_tidy_hash.py"
|
||||
- ".github/workflows/ci-clang-tidy-hash.yml"
|
||||
@@ -21,10 +20,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
@@ -42,7 +41,7 @@ jobs:
|
||||
|
||||
- if: failure()
|
||||
name: Request changes
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
await github.rest.pulls.createReview({
|
||||
@@ -55,7 +54,7 @@ jobs:
|
||||
|
||||
- if: success()
|
||||
name: Dismiss review
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
let reviews = await github.rest.pulls.listReviews({
|
||||
|
||||
6
.github/workflows/ci-docker.yml
vendored
6
.github/workflows/ci-docker.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
- "docker"
|
||||
# - "lint"
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@v3.11.1
|
||||
|
||||
- name: Set TAG
|
||||
run: |
|
||||
|
||||
111
.github/workflows/ci-memory-impact-comment.yml
vendored
111
.github/workflows/ci-memory-impact-comment.yml
vendored
@@ -1,111 +0,0 @@
|
||||
---
|
||||
name: Memory Impact Comment (Forks)
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["CI"]
|
||||
types: [completed]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
memory-impact-comment:
|
||||
name: Post memory impact comment (fork PRs only)
|
||||
runs-on: ubuntu-24.04
|
||||
# Only run for PRs from forks that had successful CI runs
|
||||
if: >
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_repository.full_name != github.repository
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Get PR details
|
||||
id: pr
|
||||
run: |
|
||||
# Get PR details by searching for PR with matching head SHA
|
||||
# The workflow_run.pull_requests field is often empty for forks
|
||||
# Use paginate to handle repos with many open PRs
|
||||
head_sha="${{ github.event.workflow_run.head_sha }}"
|
||||
pr_data=$(gh api --paginate "/repos/${{ github.repository }}/pulls" \
|
||||
--jq ".[] | select(.head.sha == \"$head_sha\") | {number: .number, base_ref: .base.ref}" \
|
||||
| head -n 1)
|
||||
|
||||
if [ -z "$pr_data" ]; then
|
||||
echo "No PR found for SHA $head_sha, skipping"
|
||||
echo "skip=true" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
pr_number=$(echo "$pr_data" | jq -r '.number')
|
||||
base_ref=$(echo "$pr_data" | jq -r '.base_ref')
|
||||
|
||||
echo "pr_number=$pr_number" >> "$GITHUB_OUTPUT"
|
||||
echo "base_ref=$base_ref" >> "$GITHUB_OUTPUT"
|
||||
echo "Found PR #$pr_number targeting base branch: $base_ref"
|
||||
|
||||
- name: Check out code from base repository
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
# Always check out from the base repository (esphome/esphome), never from forks
|
||||
# Use the PR's target branch to ensure we run trusted code from the main repo
|
||||
repository: ${{ github.repository }}
|
||||
ref: ${{ steps.pr.outputs.base_ref }}
|
||||
|
||||
- name: Restore Python
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache-key: ${{ hashFiles('.cache-key') }}
|
||||
|
||||
- name: Download memory analysis artifacts
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
run_id="${{ github.event.workflow_run.id }}"
|
||||
echo "Downloading artifacts from workflow run $run_id"
|
||||
|
||||
mkdir -p memory-analysis
|
||||
|
||||
# Download target analysis artifact
|
||||
if gh run download --name "memory-analysis-target" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
|
||||
echo "Downloaded memory-analysis-target artifact."
|
||||
else
|
||||
echo "No memory-analysis-target artifact found."
|
||||
fi
|
||||
|
||||
# Download PR analysis artifact
|
||||
if gh run download --name "memory-analysis-pr" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then
|
||||
echo "Downloaded memory-analysis-pr artifact."
|
||||
else
|
||||
echo "No memory-analysis-pr artifact found."
|
||||
fi
|
||||
|
||||
- name: Check if artifacts exist
|
||||
id: check
|
||||
if: steps.pr.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then
|
||||
echo "found=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "found=false" >> "$GITHUB_OUTPUT"
|
||||
echo "Memory analysis artifacts not found, skipping comment"
|
||||
fi
|
||||
|
||||
- name: Post or update PR comment
|
||||
if: steps.pr.outputs.skip != 'true' && steps.check.outputs.found == 'true'
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.pr.outputs.pr_number }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Pass PR number and JSON file paths directly to Python script
|
||||
# Let Python parse the JSON to avoid shell injection risks
|
||||
# The script will validate and sanitize all inputs
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "$PR_NUMBER" \
|
||||
--target-json ./memory-analysis/memory-analysis-target.json \
|
||||
--pr-json ./memory-analysis/memory-analysis-pr.json
|
||||
687
.github/workflows/ci.yml
vendored
687
.github/workflows/ci.yml
vendored
@@ -36,18 +36,18 @@ jobs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Generate cache-key
|
||||
id: cache-key
|
||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
# yamllint disable-line rule:line-length
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
if: needs.determine-jobs.outputs.python-linters == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
@@ -105,7 +105,6 @@ jobs:
|
||||
script/ci-custom.py
|
||||
script/build_codeowners.py --check
|
||||
script/build_language_schema.py --check
|
||||
script/generate-esp32-boards.py --check
|
||||
|
||||
pytest:
|
||||
name: Run pytest
|
||||
@@ -114,6 +113,7 @@ jobs:
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
os:
|
||||
- ubuntu-latest
|
||||
@@ -125,14 +125,18 @@ jobs:
|
||||
# version used for docker images on Windows and macOS
|
||||
- python-version: "3.13"
|
||||
os: windows-latest
|
||||
- python-version: "3.12"
|
||||
os: windows-latest
|
||||
- python-version: "3.13"
|
||||
os: macOS-latest
|
||||
- python-version: "3.12"
|
||||
os: macOS-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs:
|
||||
- common
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
id: restore-python
|
||||
uses: ./.github/actions/restore-python
|
||||
@@ -152,12 +156,12 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --cov-report=xml --tb=native -n auto tests --ignore=tests/integration/
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Save Python virtual environment cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: actions/cache/save@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -170,20 +174,12 @@ jobs:
|
||||
outputs:
|
||||
integration-tests: ${{ steps.determine.outputs.integration-tests }}
|
||||
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
|
||||
clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }}
|
||||
python-linters: ${{ steps.determine.outputs.python-linters }}
|
||||
changed-components: ${{ steps.determine.outputs.changed-components }}
|
||||
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
|
||||
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }}
|
||||
memory_impact: ${{ steps.determine.outputs.memory-impact }}
|
||||
cpp-unit-tests-run-all: ${{ steps.determine.outputs.cpp-unit-tests-run-all }}
|
||||
cpp-unit-tests-components: ${{ steps.determine.outputs.cpp-unit-tests-components }}
|
||||
component-test-batches: ${{ steps.determine.outputs.component-test-batches }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
# Fetch enough history to find the merge base
|
||||
fetch-depth: 2
|
||||
@@ -192,11 +188,6 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Restore components graph cache
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: .temp/components_graph.json
|
||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||
- name: Determine which tests to run
|
||||
id: determine
|
||||
env:
|
||||
@@ -210,23 +201,9 @@ jobs:
|
||||
# Extract individual fields
|
||||
echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
|
||||
echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT
|
||||
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT
|
||||
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
|
||||
echo "cpp-unit-tests-run-all=$(echo "$output" | jq -r '.cpp_unit_tests_run_all')" >> $GITHUB_OUTPUT
|
||||
echo "cpp-unit-tests-components=$(echo "$output" | jq -c '.cpp_unit_tests_components')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
|
||||
- name: Save components graph cache
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: .temp/components_graph.json
|
||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||
|
||||
integration-tests:
|
||||
name: Run integration tests
|
||||
@@ -237,15 +214,15 @@ jobs:
|
||||
if: needs.determine-jobs.outputs.integration-tests == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python 3.13
|
||||
id: python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||
@@ -264,34 +241,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --no-cov --tb=native -n auto tests/integration/
|
||||
|
||||
cpp-unit-tests:
|
||||
name: Run C++ unit tests
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Run cpp_unit_test.py
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ needs.determine-jobs.outputs.cpp-unit-tests-run-all }}" = "true" ]; then
|
||||
script/cpp_unit_test.py --all
|
||||
else
|
||||
ARGS=$(echo '${{ needs.determine-jobs.outputs.cpp-unit-tests-components }}' | jq -r '.[] | @sh' | xargs)
|
||||
script/cpp_unit_test.py $ARGS
|
||||
fi
|
||||
|
||||
clang-tidy-single:
|
||||
clang-tidy:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
@@ -309,19 +259,35 @@ jobs:
|
||||
name: Run script/clang-tidy for ESP8266
|
||||
options: --environment esp8266-arduino-tidy --grep USE_ESP8266
|
||||
pio_cache_key: tidyesp8266
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
pio_cache_key: tidyesp32
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 IDF
|
||||
options: --environment esp32-idf-tidy --grep USE_ESP_IDF
|
||||
pio_cache_key: tidyesp32-idf
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ZEPHYR
|
||||
options: --environment nrf52-tidy --grep USE_ZEPHYR --grep USE_NRF52
|
||||
options: --environment nrf52-tidy --grep USE_ZEPHYR
|
||||
pio_cache_key: tidy-zephyr
|
||||
ignore_errors: false
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
@@ -334,14 +300,14 @@ jobs:
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
@@ -389,556 +355,122 @@ jobs:
|
||||
# yamllint disable-line rule:line-length
|
||||
if: always()
|
||||
|
||||
clang-tidy-nosplit:
|
||||
name: Run script/clang-tidy for ESP32 Arduino
|
||||
test-build-components:
|
||||
name: Component test ${{ matrix.file }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy
|
||||
fi
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
clang-tidy-split:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: needs.determine-jobs.outputs.clang-tidy-mode == 'split'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 && fromJSON(needs.determine-jobs.outputs.component-test-count) < 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
include:
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 1/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 2/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 3/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
|
||||
- id: clang-tidy
|
||||
name: Run script/clang-tidy for ESP32 Arduino 4/4
|
||||
options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
|
||||
|
||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
# Need history for HEAD~1 to work for checking changed files
|
||||
fetch-depth: 2
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: github.ref != 'refs/heads/dev'
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/gcc.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
|
||||
|
||||
- name: Check if full clang-tidy scan needed
|
||||
id: check_full_scan
|
||||
- name: test_build_components -e config -c ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if python script/clang_tidy_hash.py --check; then
|
||||
echo "full_scan=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=hash_changed" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "full_scan=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=normal" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Run clang-tidy
|
||||
./script/test_build_components -e config -c ${{ matrix.file }}
|
||||
- name: test_build_components -e compile -c ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then
|
||||
echo "Running FULL clang-tidy scan (hash changed)"
|
||||
script/clang-tidy --all-headers --fix ${{ matrix.options }}
|
||||
else
|
||||
echo "Running clang-tidy on changed files only"
|
||||
script/clang-tidy --all-headers --fix --changed ${{ matrix.options }}
|
||||
fi
|
||||
env:
|
||||
# Also cache libdeps, store them in a ~/.platformio subfolder
|
||||
PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
|
||||
./script/test_build_components -e compile -c ${{ matrix.file }}
|
||||
|
||||
- name: Suggested changes
|
||||
run: script/ci-suggest-changes
|
||||
if: always()
|
||||
|
||||
test-build-components-split:
|
||||
name: Test components batch (${{ matrix.components }})
|
||||
test-build-components-splitter:
|
||||
name: Split components for testing into 20 groups maximum
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
outputs:
|
||||
matrix: ${{ steps.split.outputs.components }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Split components into 20 groups
|
||||
id: split
|
||||
run: |
|
||||
components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
|
||||
echo "components=$components" >> $GITHUB_OUTPUT
|
||||
|
||||
test-build-components-split:
|
||||
name: Test split components
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- test-build-components-splitter
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }}
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
components: ${{ fromJson(needs.determine-jobs.outputs.component-test-batches) }}
|
||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Show disk space
|
||||
run: |
|
||||
echo "Available disk space:"
|
||||
df -h
|
||||
|
||||
- name: List components
|
||||
run: echo ${{ matrix.components }}
|
||||
|
||||
- name: Cache apt packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||
with:
|
||||
packages: libsdl2-dev
|
||||
version: 1.0
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
- name: Validate config
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
||||
# Check if /mnt has more free space than / before bind mounting
|
||||
# Extract available space in KB for comparison
|
||||
root_avail=$(df -k / | awk 'NR==2 {print $4}')
|
||||
mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB"
|
||||
|
||||
# Only use /mnt if it has more space than /
|
||||
if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then
|
||||
echo "Using /mnt for build files (more space available)"
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
else
|
||||
echo "Using / for build files (more space available than /mnt or /mnt unavailable)"
|
||||
fi
|
||||
|
||||
# Convert space-separated components to comma-separated for Python script
|
||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||
|
||||
# Only isolate directly changed components when targeting dev branch
|
||||
# For beta/release branches, group everything for faster CI
|
||||
#
|
||||
# WHY ISOLATE DIRECTLY CHANGED COMPONENTS?
|
||||
# - Isolated tests run WITHOUT --testing-mode, enabling full validation
|
||||
# - This catches pin conflicts and other issues in directly changed code
|
||||
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
||||
# - Dependencies are safe to group since they weren't modified in this PR
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed_csv=""
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
else
|
||||
directly_changed_csv=$(echo '${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}' | jq -r 'join(",")')
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components: $directly_changed_csv"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Show disk space before validation (after bind mounts setup)
|
||||
echo "Disk space before config validation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run config validation with grouping and isolation
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
echo ""
|
||||
echo "Config validation passed! Starting compilation..."
|
||||
echo ""
|
||||
|
||||
# Show disk space before compilation
|
||||
echo "Disk space before compilation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run compilation with grouping and isolation
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e config -c $component
|
||||
done
|
||||
- name: Compile config
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir build_cache
|
||||
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e compile -c $component
|
||||
done
|
||||
|
||||
pre-commit-ci-lite:
|
||||
name: pre-commit.ci lite
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
||||
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- uses: esphome/pre-commit-action@43cd1109c09c544d97196f7730ee5b2e0cc6d81e # v3.0.1 fork with pinned actions/cache
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
env:
|
||||
SKIP: pylint,clang-tidy-hash
|
||||
- uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
|
||||
- uses: pre-commit-ci/lite-action@v1.1.0
|
||||
if: always()
|
||||
|
||||
memory-impact-target-branch:
|
||||
name: Build target branch for memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
|
||||
outputs:
|
||||
ram_usage: ${{ steps.extract.outputs.ram_usage }}
|
||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||
cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }}
|
||||
skip: ${{ steps.check-script.outputs.skip }}
|
||||
steps:
|
||||
- name: Check out target branch
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
|
||||
# Check if memory impact extraction script exists on target branch
|
||||
# If not, skip the analysis (this handles older branches that don't have the feature)
|
||||
- name: Check for memory impact script
|
||||
id: check-script
|
||||
run: |
|
||||
if [ -f "script/ci_memory_impact_extract.py" ]; then
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis"
|
||||
fi
|
||||
|
||||
# All remaining steps only run if script exists
|
||||
- name: Generate cache key
|
||||
id: cache-key
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
# Get the commit SHA of the target branch
|
||||
target_sha=$(git rev-parse HEAD)
|
||||
|
||||
# Hash the build infrastructure files (all files that affect build/analysis)
|
||||
infra_hash=$(cat \
|
||||
script/test_build_components.py \
|
||||
script/ci_memory_impact_extract.py \
|
||||
script/analyze_component_buses.py \
|
||||
script/merge_component_configs.py \
|
||||
script/ci_helpers.py \
|
||||
.github/workflows/ci.yml \
|
||||
| sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Get platform and components from job inputs
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Combine into cache key
|
||||
cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}"
|
||||
echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT
|
||||
echo "Cache key: ${cache_key}"
|
||||
|
||||
- name: Restore cached memory analysis
|
||||
id: cache-memory-analysis
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
|
||||
- name: Cache status
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then
|
||||
echo "✓ Cache hit! Using cached memory analysis results."
|
||||
echo " Skipping build step to save time."
|
||||
else
|
||||
echo "✗ Cache miss. Will build and analyze memory usage."
|
||||
fi
|
||||
|
||||
- name: Restore Python
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Build, compile, and analyze memory
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
id: build
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
|
||||
echo "Building with test_build_components.py for $platform with components:"
|
||||
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
|
||||
|
||||
# Use test_build_components.py which handles grouping automatically
|
||||
# Pass components as comma-separated list
|
||||
component_list=$(echo "$components" | jq -r 'join(",")')
|
||||
|
||||
echo "Compiling with test_build_components.py..."
|
||||
|
||||
# Run build and extract memory with auto-detection of build directory for detailed analysis
|
||||
# Use tee to show output in CI while also piping to extraction script
|
||||
python script/test_build_components.py \
|
||||
-e compile \
|
||||
-c "$component_list" \
|
||||
-t "$platform" 2>&1 | \
|
||||
tee /dev/stderr | \
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-target.json
|
||||
|
||||
# Add metadata to JSON before caching
|
||||
python script/ci_add_metadata_to_json.py \
|
||||
--json-file memory-analysis-target.json \
|
||||
--components "$components" \
|
||||
--platform "$platform"
|
||||
|
||||
- name: Save memory analysis to cache
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
|
||||
- name: Extract memory usage for outputs
|
||||
id: extract
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f memory-analysis-target.json ]; then
|
||||
ram=$(jq -r '.ram_bytes' memory-analysis-target.json)
|
||||
flash=$(jq -r '.flash_bytes' memory-analysis-target.json)
|
||||
echo "ram_usage=${ram}" >> $GITHUB_OUTPUT
|
||||
echo "flash_usage=${flash}" >> $GITHUB_OUTPUT
|
||||
echo "RAM: ${ram} bytes, Flash: ${flash} bytes"
|
||||
else
|
||||
echo "Error: memory-analysis-target.json not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: memory-analysis-target.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 1
|
||||
|
||||
memory-impact-pr-branch:
|
||||
name: Build PR branch for memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
|
||||
outputs:
|
||||
ram_usage: ${{ steps.extract.outputs.ram_usage }}
|
||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||
steps:
|
||||
- name: Check out PR branch
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Cache platformio
|
||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
- name: Build, compile, and analyze memory
|
||||
id: extract
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
|
||||
echo "Building with test_build_components.py for $platform with components:"
|
||||
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
|
||||
|
||||
# Use test_build_components.py which handles grouping automatically
|
||||
# Pass components as comma-separated list
|
||||
component_list=$(echo "$components" | jq -r 'join(",")')
|
||||
|
||||
echo "Compiling with test_build_components.py..."
|
||||
|
||||
# Run build and extract memory with auto-detection of build directory for detailed analysis
|
||||
# Use tee to show output in CI while also piping to extraction script
|
||||
python script/test_build_components.py \
|
||||
-e compile \
|
||||
-c "$component_list" \
|
||||
-t "$platform" 2>&1 | \
|
||||
tee /dev/stderr | \
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-pr.json
|
||||
|
||||
# Add metadata to JSON (components and platform are in shell variables above)
|
||||
python script/ci_add_metadata_to_json.py \
|
||||
--json-file memory-analysis-pr.json \
|
||||
--components "$components" \
|
||||
--platform "$platform"
|
||||
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: memory-analysis-pr.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 1
|
||||
|
||||
memory-impact-comment:
|
||||
name: Comment memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Download target analysis JSON
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Download PR analysis JSON
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Post or update PR comment
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
||||
# Pass JSON file paths directly to Python script
|
||||
# All data is extracted from JSON files for security
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "$PR_NUMBER" \
|
||||
--target-json ./memory-analysis/memory-analysis-target.json \
|
||||
--pr-json ./memory-analysis/memory-analysis-pr.json
|
||||
|
||||
ci-status:
|
||||
name: CI Status
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -948,24 +480,21 @@ jobs:
|
||||
- pylint
|
||||
- pytest
|
||||
- integration-tests
|
||||
- clang-tidy-single
|
||||
- clang-tidy-nosplit
|
||||
- clang-tidy-split
|
||||
- clang-tidy
|
||||
- determine-jobs
|
||||
- test-build-components
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
- pre-commit-ci-lite
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
- memory-impact-comment
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check job results
|
||||
- name: Success
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: exit 0
|
||||
- name: Failure
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
env:
|
||||
NEEDS_JSON: ${{ toJSON(needs) }}
|
||||
JSON_DOC: ${{ toJSON(needs) }}
|
||||
run: |
|
||||
# memory-impact-target-branch is allowed to fail without blocking CI.
|
||||
# This job builds the target branch (dev/beta/release) which may fail because:
|
||||
# 1. The target branch has a build issue independent of this PR
|
||||
# 2. This PR fixes a build issue on the target branch
|
||||
# In either case, we only care that the PR branch builds successfully.
|
||||
echo "$NEEDS_JSON" | jq -e 'del(.["memory-impact-target-branch"]) | all(.result != "failure")'
|
||||
echo $JSON_DOC | jq
|
||||
exit 1
|
||||
|
||||
@@ -21,11 +21,11 @@ permissions:
|
||||
jobs:
|
||||
request-codeowner-reviews:
|
||||
name: Run
|
||||
if: ${{ github.repository == 'esphome/esphome' && !github.event.pull_request.draft }}
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Request reviews from component codeowners
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
const owner = context.repo.owner;
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -54,11 +54,11 @@ jobs:
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -86,6 +86,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
2
.github/workflows/external-component-bot.yml
vendored
2
.github/workflows/external-component-bot.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Add external component comment
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
2
.github/workflows/issue-codeowner-notify.yml
vendored
2
.github/workflows/issue-codeowner-notify.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Notify codeowners for component issues
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
const owner = context.repo.owner;
|
||||
|
||||
24
.github/workflows/needs-docs.yml
vendored
Normal file
24
.github/workflows/needs-docs.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Needs Docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check for needs-docs label
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number
|
||||
});
|
||||
const needsDocs = labels.find(label => label.name === 'needs-docs');
|
||||
if (needsDocs) {
|
||||
core.setFailed('Pull request needs docs');
|
||||
}
|
||||
87
.github/workflows/release.yml
vendored
87
.github/workflows/release.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
branch_build: ${{ steps.tag.outputs.branch_build }}
|
||||
deploy_env: ${{ steps.tag.outputs.deploy_env }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Get tag
|
||||
id: tag
|
||||
# yamllint disable rule:line-length
|
||||
@@ -60,9 +60,9 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Build
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
pip3 install build
|
||||
python3 -m build
|
||||
- name: Publish
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -92,22 +92,22 @@ jobs:
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@v3.11.1
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -138,7 +138,7 @@ jobs:
|
||||
# version: ${{ needs.init.outputs.tag }}
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
with:
|
||||
name: digests-${{ matrix.platform.arch }}
|
||||
path: /tmp/digests
|
||||
@@ -168,27 +168,27 @@ jobs:
|
||||
- ghcr
|
||||
- dockerhub
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: digests-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@v3.11.1
|
||||
|
||||
- name: Log in to docker hub
|
||||
if: matrix.registry == 'dockerhub'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
if: matrix.registry == 'ghcr'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -219,19 +219,10 @@ jobs:
|
||||
- init
|
||||
- deploy-manifest
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.ESPHOME_GITHUB_APP_ID }}
|
||||
private-key: ${{ secrets.ESPHOME_GITHUB_APP_PRIVATE_KEY }}
|
||||
owner: esphome
|
||||
repositories: home-assistant-addon
|
||||
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
|
||||
script: |
|
||||
let description = "ESPHome";
|
||||
if (context.eventName == "release") {
|
||||
@@ -254,19 +245,10 @@ jobs:
|
||||
needs: [init]
|
||||
environment: ${{ needs.init.outputs.deploy_env }}
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.ESPHOME_GITHUB_APP_ID }}
|
||||
private-key: ${{ secrets.ESPHOME_GITHUB_APP_PRIVATE_KEY }}
|
||||
owner: esphome
|
||||
repositories: esphome-schema
|
||||
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
github-token: ${{ secrets.DEPLOY_ESPHOME_SCHEMA_REPO_TOKEN }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: "esphome",
|
||||
@@ -277,34 +259,3 @@ jobs:
|
||||
version: "${{ needs.init.outputs.tag }}",
|
||||
}
|
||||
})
|
||||
|
||||
version-notifier:
|
||||
if: github.repository == 'esphome/esphome' && needs.init.outputs.branch_build == 'false'
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- init
|
||||
- deploy-manifest
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.ESPHOME_GITHUB_APP_ID }}
|
||||
private-key: ${{ secrets.ESPHOME_GITHUB_APP_PRIVATE_KEY }}
|
||||
owner: esphome
|
||||
repositories: version-notifier
|
||||
|
||||
- name: Trigger Workflow
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
github.rest.actions.createWorkflowDispatch({
|
||||
owner: "esphome",
|
||||
repo: "version-notifier",
|
||||
workflow_id: "notify.yml",
|
||||
ref: "main",
|
||||
inputs: {
|
||||
version: "${{ needs.init.outputs.tag }}",
|
||||
}
|
||||
})
|
||||
|
||||
52
.github/workflows/stale.yml
vendored
52
.github/workflows/stale.yml
vendored
@@ -15,52 +15,36 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
if: github.repository_owner == 'esphome'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Stale
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
- uses: actions/stale@v9.1.0
|
||||
with:
|
||||
debug-only: ${{ github.ref != 'refs/heads/dev' }} # Dry-run when not run on dev branch
|
||||
remove-stale-when-updated: true
|
||||
operations-per-run: 400
|
||||
|
||||
# The 90 day stale policy for PRs
|
||||
# - PRs
|
||||
# - No PRs marked as "not-stale"
|
||||
# - No Issues (see below)
|
||||
days-before-pr-stale: 90
|
||||
days-before-pr-close: 7
|
||||
days-before-issue-stale: -1
|
||||
days-before-issue-close: -1
|
||||
remove-stale-when-updated: true
|
||||
stale-pr-label: "stale"
|
||||
exempt-pr-labels: "not-stale"
|
||||
stale-pr-message: >
|
||||
There hasn't been any activity on this pull request recently. This
|
||||
pull request has been automatically marked as stale because of that
|
||||
and will be closed if no further activity occurs within 7 days.
|
||||
Thank you for your contributions.
|
||||
|
||||
If you are the author of this PR, please leave a comment if you want
|
||||
to keep it open. Also, please rebase your PR onto the latest dev
|
||||
branch to ensure that it's up to date with the latest changes.
|
||||
|
||||
Thank you for your contribution!
|
||||
|
||||
# The 90 day stale policy for Issues
|
||||
# - Issues
|
||||
# - No Issues marked as "not-stale"
|
||||
# - No PRs (see above)
|
||||
days-before-issue-stale: 90
|
||||
days-before-issue-close: 7
|
||||
# Use stale to automatically close issues with a
|
||||
# reference to the issue tracker
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9.1.0
|
||||
with:
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
days-before-issue-stale: 1
|
||||
days-before-issue-close: 1
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "not-stale"
|
||||
stale-issue-message: >
|
||||
There hasn't been any activity on this issue recently. Due to the
|
||||
high number of incoming GitHub notifications, we have to clean some
|
||||
of the old issues, as many of them have already been resolved with
|
||||
the latest updates.
|
||||
|
||||
Please make sure to update to the latest ESPHome version and
|
||||
check if that solves the issue. Let us know if that works for you by
|
||||
adding a comment 👍
|
||||
|
||||
This issue has now been marked as stale and will be closed if no
|
||||
further activity occurs. Thank you for your contributions.
|
||||
https://github.com/esphome/esphome/issues/430
|
||||
|
||||
31
.github/workflows/status-check-labels.yml
vendored
31
.github/workflows/status-check-labels.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Status check labels
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check ${{ matrix.label }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
label:
|
||||
- needs-docs
|
||||
- merge-after-release
|
||||
- chained-pr
|
||||
steps:
|
||||
- name: Check for ${{ matrix.label }} label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number
|
||||
});
|
||||
const hasLabel = labels.find(label => label.name === '${{ matrix.label }}');
|
||||
if (hasLabel) {
|
||||
core.setFailed('Pull request cannot be merged, it is labeled as ${{ matrix.label }}');
|
||||
}
|
||||
13
.github/workflows/sync-device-classes.yml
vendored
13
.github/workflows/sync-device-classes.yml
vendored
@@ -13,16 +13,16 @@ jobs:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Checkout Home Assistant
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
repository: home-assistant/core
|
||||
path: lib/home-assistant
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
|
||||
@@ -30,18 +30,13 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -e lib/home-assistant
|
||||
pip install -r requirements_test.txt pre-commit
|
||||
|
||||
- name: Sync
|
||||
run: |
|
||||
python ./script/sync-device_class.py
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: |
|
||||
python script/run-in-env.py pre-commit run --all-files
|
||||
|
||||
- name: Commit changes
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
uses: peter-evans/create-pull-request@v7.0.8
|
||||
with:
|
||||
commit-message: "Synchronise Device Classes from Home Assistant"
|
||||
committer: esphomebot <esphome@openhomefoundation.org>
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -91,10 +91,6 @@ venv-*/
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# nix
|
||||
/default.nix
|
||||
/shell.nix
|
||||
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
|
||||
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.10
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
91
CODEOWNERS
91
CODEOWNERS
@@ -21,7 +21,6 @@ esphome/components/adc128s102/* @DeerMaximum
|
||||
esphome/components/addressable_light/* @justfalter
|
||||
esphome/components/ade7880/* @kpfleming
|
||||
esphome/components/ade7953/* @angelnu
|
||||
esphome/components/ade7953_base/* @angelnu
|
||||
esphome/components/ade7953_i2c/* @angelnu
|
||||
esphome/components/ade7953_spi/* @angelnu
|
||||
esphome/components/ads1118/* @solomondg1
|
||||
@@ -41,12 +40,11 @@ esphome/components/analog_threshold/* @ianchi
|
||||
esphome/components/animation/* @syndlex
|
||||
esphome/components/anova/* @buxtronix
|
||||
esphome/components/apds9306/* @aodrenah
|
||||
esphome/components/api/* @esphome/core
|
||||
esphome/components/aqi/* @freekode @jasstrong @ximex
|
||||
esphome/components/api/* @OttoWinter
|
||||
esphome/components/as5600/* @ammmze
|
||||
esphome/components/as5600/sensor/* @ammmze
|
||||
esphome/components/as7341/* @mrgnr
|
||||
esphome/components/async_tcp/* @esphome/core
|
||||
esphome/components/async_tcp/* @OttoWinter
|
||||
esphome/components/at581x/* @X-Ryl669
|
||||
esphome/components/atc_mithermometer/* @ahpohl
|
||||
esphome/components/atm90e26/* @danieltwagner
|
||||
@@ -64,17 +62,14 @@ esphome/components/bedjet/fan/* @jhansche
|
||||
esphome/components/bedjet/sensor/* @javawizard @jhansche
|
||||
esphome/components/beken_spi_led_strip/* @Mat931
|
||||
esphome/components/bh1750/* @OttoWinter
|
||||
esphome/components/bh1900nux/* @B48D81EFCC
|
||||
esphome/components/binary_sensor/* @esphome/core
|
||||
esphome/components/bk72xx/* @kuba2k2
|
||||
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
|
||||
esphome/components/bl0939/* @ziceva
|
||||
esphome/components/bl0940/* @dan-s-github @tobias-
|
||||
esphome/components/bl0940/* @tobias-
|
||||
esphome/components/bl0942/* @dbuezas @dwmw2
|
||||
esphome/components/ble_client/* @buxtronix @clydebarrow
|
||||
esphome/components/ble_nus/* @tomaszduda23
|
||||
esphome/components/bluetooth_proxy/* @bdraco @jesserockz
|
||||
esphome/components/bm8563/* @abmantis
|
||||
esphome/components/bluetooth_proxy/* @jesserockz
|
||||
esphome/components/bme280_base/* @esphome/core
|
||||
esphome/components/bme280_spi/* @apbodrov
|
||||
esphome/components/bme680_bsec/* @trvrnrth
|
||||
@@ -93,12 +88,10 @@ esphome/components/bp1658cj/* @Cossid
|
||||
esphome/components/bp5758d/* @Cossid
|
||||
esphome/components/button/* @esphome/core
|
||||
esphome/components/bytebuffer/* @clydebarrow
|
||||
esphome/components/camera/* @bdraco @DT-art1
|
||||
esphome/components/camera_encoder/* @DT-art1
|
||||
esphome/components/camera/* @DT-art1 @bdraco
|
||||
esphome/components/canbus/* @danielschramm @mvturnho
|
||||
esphome/components/cap1188/* @mreditor97
|
||||
esphome/components/captive_portal/* @esphome/core
|
||||
esphome/components/cc1101/* @gabest11 @lygris
|
||||
esphome/components/captive_portal/* @OttoWinter
|
||||
esphome/components/ccs811/* @habbie
|
||||
esphome/components/cd74hc4067/* @asoehlke
|
||||
esphome/components/ch422g/* @clydebarrow @jesterret
|
||||
@@ -125,7 +118,7 @@ esphome/components/dallas_temp/* @ssieb
|
||||
esphome/components/daly_bms/* @s1lvi0
|
||||
esphome/components/dashboard_import/* @esphome/core
|
||||
esphome/components/datetime/* @jesserockz @rfdarter
|
||||
esphome/components/debug/* @esphome/core
|
||||
esphome/components/debug/* @OttoWinter
|
||||
esphome/components/delonghi/* @grob6000
|
||||
esphome/components/dfplayer/* @glmnet
|
||||
esphome/components/dfrobot_sen0395/* @niklasweber
|
||||
@@ -145,37 +138,33 @@ esphome/components/ens160_base/* @latonita @vincentscode
|
||||
esphome/components/ens160_i2c/* @latonita
|
||||
esphome/components/ens160_spi/* @latonita
|
||||
esphome/components/ens210/* @itn3rd77
|
||||
esphome/components/epaper_spi/* @esphome/core
|
||||
esphome/components/es7210/* @kahrendt
|
||||
esphome/components/es7243e/* @kbx81
|
||||
esphome/components/es8156/* @kbx81
|
||||
esphome/components/es8311/* @kahrendt @kroimon
|
||||
esphome/components/es8388/* @P4uLT
|
||||
esphome/components/esp32/* @esphome/core
|
||||
esphome/components/esp32_ble/* @bdraco @jesserockz @Rapsssito
|
||||
esphome/components/esp32_ble_client/* @bdraco @jesserockz
|
||||
esphome/components/esp32_ble_server/* @clydebarrow @jesserockz @Rapsssito
|
||||
esphome/components/esp32_ble_tracker/* @bdraco
|
||||
esphome/components/esp32_ble/* @Rapsssito @jesserockz
|
||||
esphome/components/esp32_ble_client/* @jesserockz
|
||||
esphome/components/esp32_ble_server/* @Rapsssito @clydebarrow @jesserockz
|
||||
esphome/components/esp32_camera_web_server/* @ayufan
|
||||
esphome/components/esp32_can/* @Sympatron
|
||||
esphome/components/esp32_hosted/* @swoboda1337
|
||||
esphome/components/esp32_hosted/update/* @swoboda1337
|
||||
esphome/components/esp32_improv/* @jesserockz
|
||||
esphome/components/esp32_rmt/* @jesserockz
|
||||
esphome/components/esp32_rmt_led_strip/* @jesserockz
|
||||
esphome/components/esp8266/* @esphome/core
|
||||
esphome/components/esp_ldo/* @clydebarrow
|
||||
esphome/components/espnow/* @jesserockz
|
||||
esphome/components/espnow/packet_transport/* @EasilyBoredEngineer
|
||||
esphome/components/ethernet_info/* @gtjadsonsantos
|
||||
esphome/components/event/* @nohat
|
||||
esphome/components/event_emitter/* @Rapsssito
|
||||
esphome/components/exposure_notifications/* @OttoWinter
|
||||
esphome/components/ezo/* @ssieb
|
||||
esphome/components/ezo_pmp/* @carlos-sarmiento
|
||||
esphome/components/factory_reset/* @anatoly-savchenkov
|
||||
esphome/components/fastled_base/* @OttoWinter
|
||||
esphome/components/feedback/* @ianchi
|
||||
esphome/components/fingerprint_grow/* @alexborro @loongyh @OnFreund
|
||||
esphome/components/fingerprint_grow/* @OnFreund @alexborro @loongyh
|
||||
esphome/components/font/* @clydebarrow @esphome/core
|
||||
esphome/components/fs3000/* @kahrendt
|
||||
esphome/components/ft5x06/* @clydebarrow
|
||||
@@ -185,14 +174,13 @@ esphome/components/gdk101/* @Szewcson
|
||||
esphome/components/gl_r01_i2c/* @pkejval
|
||||
esphome/components/globals/* @esphome/core
|
||||
esphome/components/gp2y1010au0f/* @zry98
|
||||
esphome/components/gp8403/* @jesserockz @sebydocky
|
||||
esphome/components/gp8403/* @jesserockz
|
||||
esphome/components/gpio/* @esphome/core
|
||||
esphome/components/gpio/one_wire/* @ssieb
|
||||
esphome/components/gps/* @coogle @ximex
|
||||
esphome/components/graph/* @synco
|
||||
esphome/components/graphical_display_menu/* @MrMDavidson
|
||||
esphome/components/gree/* @orestismers
|
||||
esphome/components/gree/switch/* @nagyrobi
|
||||
esphome/components/grove_gas_mc_v2/* @YorkshireIoT
|
||||
esphome/components/grove_tb6612fng/* @max246
|
||||
esphome/components/growatt_solar/* @leeuwte
|
||||
@@ -207,17 +195,12 @@ esphome/components/havells_solar/* @sourabhjaiswal
|
||||
esphome/components/hbridge/fan/* @WeekendWarrior
|
||||
esphome/components/hbridge/light/* @DotNetDann
|
||||
esphome/components/hbridge/switch/* @dwmw2
|
||||
esphome/components/hc8/* @omartijn
|
||||
esphome/components/hdc2010/* @optimusprimespace @ssieb
|
||||
esphome/components/he60r/* @clydebarrow
|
||||
esphome/components/heatpumpir/* @rob-deutsch
|
||||
esphome/components/hitachi_ac424/* @sourabhjaiswal
|
||||
esphome/components/hlk_fm22x/* @OnFreund
|
||||
esphome/components/hlw8032/* @rici4kubicek
|
||||
esphome/components/hm3301/* @freekode
|
||||
esphome/components/hmac_md5/* @dwmw2
|
||||
esphome/components/hmac_sha256/* @dwmw2
|
||||
esphome/components/homeassistant/* @esphome/core @OttoWinter
|
||||
esphome/components/homeassistant/* @OttoWinter @esphome/core
|
||||
esphome/components/homeassistant/number/* @landonr
|
||||
esphome/components/homeassistant/switch/* @Links2004
|
||||
esphome/components/honeywell_hih_i2c/* @Benichou34
|
||||
@@ -230,7 +213,6 @@ esphome/components/hte501/* @Stock-M
|
||||
esphome/components/http_request/ota/* @oarcher
|
||||
esphome/components/http_request/update/* @jesserockz
|
||||
esphome/components/htu31d/* @betterengineering
|
||||
esphome/components/hub75/* @stuartparmenter
|
||||
esphome/components/hydreon_rgxx/* @functionpointer
|
||||
esphome/components/hyt271/* @Philippe12
|
||||
esphome/components/i2c/* @esphome/core
|
||||
@@ -243,18 +225,18 @@ esphome/components/iaqcore/* @yozik04
|
||||
esphome/components/ili9xxx/* @clydebarrow @nielsnl68
|
||||
esphome/components/improv_base/* @esphome/core
|
||||
esphome/components/improv_serial/* @esphome/core
|
||||
esphome/components/ina226/* @latonita @Sergio303
|
||||
esphome/components/ina226/* @Sergio303 @latonita
|
||||
esphome/components/ina260/* @mreditor97
|
||||
esphome/components/ina2xx_base/* @latonita
|
||||
esphome/components/ina2xx_i2c/* @latonita
|
||||
esphome/components/ina2xx_spi/* @latonita
|
||||
esphome/components/inkbird_ibsth1_mini/* @fkirill
|
||||
esphome/components/inkplate/* @jesserockz @JosipKuci
|
||||
esphome/components/inkplate6/* @jesserockz
|
||||
esphome/components/integration/* @OttoWinter
|
||||
esphome/components/internal_temperature/* @Mat931
|
||||
esphome/components/interval/* @esphome/core
|
||||
esphome/components/jsn_sr04t/* @Mafus1
|
||||
esphome/components/json/* @esphome/core
|
||||
esphome/components/json/* @OttoWinter
|
||||
esphome/components/kamstrup_kmp/* @cfeenstra1024
|
||||
esphome/components/key_collector/* @ssieb
|
||||
esphome/components/key_provider/* @ssieb
|
||||
@@ -262,7 +244,6 @@ esphome/components/kuntze/* @ssieb
|
||||
esphome/components/lc709203f/* @ilikecake
|
||||
esphome/components/lcd_menu/* @numo68
|
||||
esphome/components/ld2410/* @regevbr @sebcaps
|
||||
esphome/components/ld2412/* @Rihan9
|
||||
esphome/components/ld2420/* @descipher
|
||||
esphome/components/ld2450/* @hareeshmu
|
||||
esphome/components/ld24xx/* @kbx81
|
||||
@@ -272,7 +253,6 @@ esphome/components/libretiny_pwm/* @kuba2k2
|
||||
esphome/components/light/* @esphome/core
|
||||
esphome/components/lightwaverf/* @max246
|
||||
esphome/components/lilygo_t5_47/touchscreen/* @jesserockz
|
||||
esphome/components/lm75b/* @beormund
|
||||
esphome/components/ln882x/* @lamauny
|
||||
esphome/components/lock/* @esphome/core
|
||||
esphome/components/logger/* @esphome/core
|
||||
@@ -293,14 +273,13 @@ esphome/components/max7219digit/* @rspaargaren
|
||||
esphome/components/max9611/* @mckaymatthew
|
||||
esphome/components/mcp23008/* @jesserockz
|
||||
esphome/components/mcp23017/* @jesserockz
|
||||
esphome/components/mcp23s08/* @jesserockz @SenexCrenshaw
|
||||
esphome/components/mcp23s17/* @jesserockz @SenexCrenshaw
|
||||
esphome/components/mcp23s08/* @SenexCrenshaw @jesserockz
|
||||
esphome/components/mcp23s17/* @SenexCrenshaw @jesserockz
|
||||
esphome/components/mcp23x08_base/* @jesserockz
|
||||
esphome/components/mcp23x17_base/* @jesserockz
|
||||
esphome/components/mcp23xxx_base/* @jesserockz
|
||||
esphome/components/mcp2515/* @danielschramm @mvturnho
|
||||
esphome/components/mcp3204/* @rsumner
|
||||
esphome/components/mcp3221/* @philippderdiedas
|
||||
esphome/components/mcp4461/* @p1ngb4ck
|
||||
esphome/components/mcp4728/* @berfenger
|
||||
esphome/components/mcp47a1/* @jesserockz
|
||||
@@ -310,13 +289,12 @@ esphome/components/md5/* @esphome/core
|
||||
esphome/components/mdns/* @esphome/core
|
||||
esphome/components/media_player/* @jesserockz
|
||||
esphome/components/micro_wake_word/* @jesserockz @kahrendt
|
||||
esphome/components/micronova/* @edenhaus @jorre05
|
||||
esphome/components/micronova/* @jorre05
|
||||
esphome/components/microphone/* @jesserockz @kahrendt
|
||||
esphome/components/mics_4514/* @jesserockz
|
||||
esphome/components/midea/* @dudanov
|
||||
esphome/components/midea_ir/* @dudanov
|
||||
esphome/components/mipi_dsi/* @clydebarrow
|
||||
esphome/components/mipi_rgb/* @clydebarrow
|
||||
esphome/components/mipi_spi/* @clydebarrow
|
||||
esphome/components/mitsubishi/* @RubyBailey
|
||||
esphome/components/mixer/speaker/* @kahrendt
|
||||
@@ -360,7 +338,7 @@ esphome/components/ota/* @esphome/core
|
||||
esphome/components/output/* @esphome/core
|
||||
esphome/components/packet_transport/* @clydebarrow
|
||||
esphome/components/pca6416a/* @Mat931
|
||||
esphome/components/pca9554/* @bdraco @clydebarrow @hwstar
|
||||
esphome/components/pca9554/* @clydebarrow @hwstar
|
||||
esphome/components/pcf85063/* @brogon
|
||||
esphome/components/pcf8563/* @KoenBreeman
|
||||
esphome/components/pi4ioe5v6408/* @jesserockz
|
||||
@@ -371,9 +349,9 @@ esphome/components/pm2005/* @andrewjswan
|
||||
esphome/components/pmsa003i/* @sjtrny
|
||||
esphome/components/pmsx003/* @ximex
|
||||
esphome/components/pmwcs3/* @SeByDocKy
|
||||
esphome/components/pn532/* @jesserockz @OttoWinter
|
||||
esphome/components/pn532_i2c/* @jesserockz @OttoWinter
|
||||
esphome/components/pn532_spi/* @jesserockz @OttoWinter
|
||||
esphome/components/pn532/* @OttoWinter @jesserockz
|
||||
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
|
||||
esphome/components/pn532_spi/* @OttoWinter @jesserockz
|
||||
esphome/components/pn7150/* @jesserockz @kbx81
|
||||
esphome/components/pn7150_i2c/* @jesserockz @kbx81
|
||||
esphome/components/pn7160/* @jesserockz @kbx81
|
||||
@@ -382,7 +360,7 @@ esphome/components/pn7160_spi/* @jesserockz @kbx81
|
||||
esphome/components/power_supply/* @esphome/core
|
||||
esphome/components/preferences/* @esphome/core
|
||||
esphome/components/psram/* @esphome/core
|
||||
esphome/components/pulse_meter/* @cstaahl @stevebaxter @TrentHouliston
|
||||
esphome/components/pulse_meter/* @TrentHouliston @cstaahl @stevebaxter
|
||||
esphome/components/pvvx_mithermometer/* @pasiz
|
||||
esphome/components/pylontech/* @functionpointer
|
||||
esphome/components/qmp6988/* @andrewpc
|
||||
@@ -405,7 +383,6 @@ esphome/components/rpi_dpi_rgb/* @clydebarrow
|
||||
esphome/components/rtl87xx/* @kuba2k2
|
||||
esphome/components/rtttl/* @glmnet
|
||||
esphome/components/runtime_stats/* @bdraco
|
||||
esphome/components/rx8130/* @beormund
|
||||
esphome/components/safe_mode/* @jsuanet @kbx81 @paulmonigatti
|
||||
esphome/components/scd4x/* @martgras @sjtrny
|
||||
esphome/components/script/* @esphome/core
|
||||
@@ -424,8 +401,7 @@ esphome/components/sensirion_common/* @martgras
|
||||
esphome/components/sensor/* @esphome/core
|
||||
esphome/components/sfa30/* @ghsensdev
|
||||
esphome/components/sgp40/* @SenexCrenshaw
|
||||
esphome/components/sgp4x/* @martgras @SenexCrenshaw
|
||||
esphome/components/sha256/* @esphome/core
|
||||
esphome/components/sgp4x/* @SenexCrenshaw @martgras
|
||||
esphome/components/shelly_dimmer/* @edge90 @rnauber
|
||||
esphome/components/sht3xd/* @mrtoy-me
|
||||
esphome/components/sht4x/* @sjtrny
|
||||
@@ -447,7 +423,6 @@ esphome/components/speaker/media_player/* @kahrendt @synesthesiam
|
||||
esphome/components/spi/* @clydebarrow @esphome/core
|
||||
esphome/components/spi_device/* @clydebarrow
|
||||
esphome/components/spi_led_strip/* @clydebarrow
|
||||
esphome/components/split_buffer/* @jesserockz
|
||||
esphome/components/sprinkler/* @kbx81
|
||||
esphome/components/sps30/* @martgras
|
||||
esphome/components/ssd1322_base/* @kbx81
|
||||
@@ -469,7 +444,6 @@ esphome/components/st7735/* @SenexCrenshaw
|
||||
esphome/components/st7789v/* @kbx81
|
||||
esphome/components/st7920/* @marsjan155
|
||||
esphome/components/statsd/* @Links2004
|
||||
esphome/components/stts22h/* @B48D81EFCC
|
||||
esphome/components/substitutions/* @esphome/core
|
||||
esphome/components/sun/* @OttoWinter
|
||||
esphome/components/sun_gtil2/* @Mat931
|
||||
@@ -491,16 +465,14 @@ esphome/components/template/datetime/* @rfdarter
|
||||
esphome/components/template/event/* @nohat
|
||||
esphome/components/template/fan/* @ssieb
|
||||
esphome/components/text/* @mauritskorse
|
||||
esphome/components/thermopro_ble/* @sittner
|
||||
esphome/components/thermostat/* @kbx81
|
||||
esphome/components/time/* @esphome/core
|
||||
esphome/components/tinyusb/* @kbx81
|
||||
esphome/components/time/* @OttoWinter
|
||||
esphome/components/tlc5947/* @rnauber
|
||||
esphome/components/tlc5971/* @IJIJI
|
||||
esphome/components/tm1621/* @Philippe12
|
||||
esphome/components/tm1637/* @glmnet
|
||||
esphome/components/tm1638/* @skykingjwc
|
||||
esphome/components/tm1651/* @mrtoy-me
|
||||
esphome/components/tm1651/* @freekode
|
||||
esphome/components/tmp102/* @timsavage
|
||||
esphome/components/tmp1075/* @sybrenstuvel
|
||||
esphome/components/tmp117/* @Azimath
|
||||
@@ -519,7 +491,6 @@ esphome/components/tuya/switch/* @jesserockz
|
||||
esphome/components/tuya/text_sensor/* @dentra
|
||||
esphome/components/uart/* @esphome/core
|
||||
esphome/components/uart/button/* @ssieb
|
||||
esphome/components/uart/event/* @eoasmxd
|
||||
esphome/components/uart/packet_transport/* @clydebarrow
|
||||
esphome/components/udp/* @clydebarrow
|
||||
esphome/components/ufire_ec/* @pvizeli
|
||||
@@ -527,7 +498,6 @@ esphome/components/ufire_ise/* @pvizeli
|
||||
esphome/components/ultrasonic/* @OttoWinter
|
||||
esphome/components/update/* @jesserockz
|
||||
esphome/components/uponor_smatrix/* @kroimon
|
||||
esphome/components/usb_cdc_acm/* @kbx81
|
||||
esphome/components/usb_host/* @clydebarrow
|
||||
esphome/components/usb_uart/* @clydebarrow
|
||||
esphome/components/valve/* @esphome/core
|
||||
@@ -538,10 +508,9 @@ esphome/components/version/* @esphome/core
|
||||
esphome/components/voice_assistant/* @jesserockz @kahrendt
|
||||
esphome/components/wake_on_lan/* @clydebarrow @willwill2will54
|
||||
esphome/components/watchdog/* @oarcher
|
||||
esphome/components/water_heater/* @dhoeben
|
||||
esphome/components/waveshare_epaper/* @clydebarrow
|
||||
esphome/components/web_server/ota/* @esphome/core
|
||||
esphome/components/web_server_base/* @esphome/core
|
||||
esphome/components/web_server_base/* @OttoWinter
|
||||
esphome/components/web_server_idf/* @dentra
|
||||
esphome/components/weikai/* @DrCoolZic
|
||||
esphome/components/weikai_i2c/* @DrCoolZic
|
||||
@@ -559,7 +528,6 @@ esphome/components/wk2204_spi/* @DrCoolZic
|
||||
esphome/components/wk2212_i2c/* @DrCoolZic
|
||||
esphome/components/wk2212_spi/* @DrCoolZic
|
||||
esphome/components/wl_134/* @hobbypunk90
|
||||
esphome/components/wts01/* @alepee
|
||||
esphome/components/x9c/* @EtienneMD
|
||||
esphome/components/xgzp68xx/* @gcormier
|
||||
esphome/components/xiaomi_hhccjcy10/* @fariouche
|
||||
@@ -575,4 +543,3 @@ esphome/components/xxtea/* @clydebarrow
|
||||
esphome/components/zephyr/* @tomaszduda23
|
||||
esphome/components/zhlt01/* @cfeenstra1024
|
||||
esphome/components/zio_ultrasonic/* @kahrendt
|
||||
esphome/components/zwave_proxy/* @kbx81
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
We welcome contributions to the ESPHome suite of code and documentation!
|
||||
|
||||
Please read our [contributing guide](https://developers.esphome.io/contributing/code/) if you wish to contribute to the
|
||||
Please read our [contributing guide](https://esphome.io/guides/contributing.html) if you wish to contribute to the
|
||||
project and be sure to join us on [Discord](https://discord.gg/KhAMKrd).
|
||||
|
||||
**See also:**
|
||||
|
||||
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2026.1.0-dev
|
||||
PROJECT_NUMBER = 2025.8.0-dev
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
<a href="https://esphome.io/">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://media.esphome.io/logo/logo-text-on-dark.svg">
|
||||
<img src="https://media.esphome.io/logo/logo-text-on-light.svg" alt="ESPHome Logo">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://esphome.io/_static/logo-text-on-dark.svg", alt="ESPHome Logo">
|
||||
<img src="https://esphome.io/_static/logo-text-on-light.svg" alt="ESPHome Logo">
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ def main():
|
||||
def run_command(*cmd, ignore_error: bool = False):
|
||||
print(f"$ {shlex.join(list(cmd))}")
|
||||
if not args.dry_run:
|
||||
rc = subprocess.call(list(cmd), close_fds=False)
|
||||
rc = subprocess.call(list(cmd))
|
||||
if rc != 0 and not ignore_error:
|
||||
print("Command failed")
|
||||
sys.exit(1)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,142 +0,0 @@
|
||||
"""Address cache for DNS and mDNS lookups."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_hostname(hostname: str) -> str:
|
||||
"""Normalize hostname for cache lookups.
|
||||
|
||||
Removes trailing dots and converts to lowercase.
|
||||
"""
|
||||
return hostname.rstrip(".").lower()
|
||||
|
||||
|
||||
class AddressCache:
|
||||
"""Cache for DNS and mDNS address lookups.
|
||||
|
||||
This cache stores pre-resolved addresses from command-line arguments
|
||||
to avoid slow DNS/mDNS lookups during builds.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mdns_cache: dict[str, list[str]] | None = None,
|
||||
dns_cache: dict[str, list[str]] | None = None,
|
||||
) -> None:
|
||||
"""Initialize the address cache.
|
||||
|
||||
Args:
|
||||
mdns_cache: Pre-populated mDNS addresses (hostname -> IPs)
|
||||
dns_cache: Pre-populated DNS addresses (hostname -> IPs)
|
||||
"""
|
||||
self.mdns_cache = mdns_cache or {}
|
||||
self.dns_cache = dns_cache or {}
|
||||
|
||||
def _get_cached_addresses(
|
||||
self, hostname: str, cache: dict[str, list[str]], cache_type: str
|
||||
) -> list[str] | None:
|
||||
"""Get cached addresses from a specific cache.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
cache: The cache dictionary to check
|
||||
cache_type: Type of cache for logging ("mDNS" or "DNS")
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
normalized = normalize_hostname(hostname)
|
||||
if addresses := cache.get(normalized):
|
||||
_LOGGER.debug("Using %s cache for %s: %s", cache_type, hostname, addresses)
|
||||
return addresses
|
||||
return None
|
||||
|
||||
def get_mdns_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached mDNS addresses for a hostname.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up (should end with .local)
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
return self._get_cached_addresses(hostname, self.mdns_cache, "mDNS")
|
||||
|
||||
def get_dns_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached DNS addresses for a hostname.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
return self._get_cached_addresses(hostname, self.dns_cache, "DNS")
|
||||
|
||||
def get_addresses(self, hostname: str) -> list[str] | None:
|
||||
"""Get cached addresses for a hostname.
|
||||
|
||||
Checks mDNS cache for .local domains, DNS cache otherwise.
|
||||
|
||||
Args:
|
||||
hostname: The hostname to look up
|
||||
|
||||
Returns:
|
||||
List of IP addresses if found in cache, None otherwise
|
||||
"""
|
||||
normalized = normalize_hostname(hostname)
|
||||
if normalized.endswith(".local"):
|
||||
return self.get_mdns_addresses(hostname)
|
||||
return self.get_dns_addresses(hostname)
|
||||
|
||||
def has_cache(self) -> bool:
|
||||
"""Check if any cache entries exist."""
|
||||
return bool(self.mdns_cache or self.dns_cache)
|
||||
|
||||
@classmethod
|
||||
def from_cli_args(
|
||||
cls, mdns_args: Iterable[str], dns_args: Iterable[str]
|
||||
) -> AddressCache:
|
||||
"""Create cache from command-line arguments.
|
||||
|
||||
Args:
|
||||
mdns_args: List of mDNS cache entries like ['host=ip1,ip2']
|
||||
dns_args: List of DNS cache entries like ['host=ip1,ip2']
|
||||
|
||||
Returns:
|
||||
Configured AddressCache instance
|
||||
"""
|
||||
mdns_cache = cls._parse_cache_args(mdns_args)
|
||||
dns_cache = cls._parse_cache_args(dns_args)
|
||||
return cls(mdns_cache=mdns_cache, dns_cache=dns_cache)
|
||||
|
||||
@staticmethod
|
||||
def _parse_cache_args(cache_args: Iterable[str]) -> dict[str, list[str]]:
|
||||
"""Parse cache arguments into a dictionary.
|
||||
|
||||
Args:
|
||||
cache_args: List of cache mappings like ['host1=ip1,ip2', 'host2=ip3']
|
||||
|
||||
Returns:
|
||||
Dictionary mapping normalized hostnames to list of IP addresses
|
||||
"""
|
||||
cache: dict[str, list[str]] = {}
|
||||
for arg in cache_args:
|
||||
if "=" not in arg:
|
||||
_LOGGER.warning(
|
||||
"Invalid cache format: %s (expected 'hostname=ip1,ip2')", arg
|
||||
)
|
||||
continue
|
||||
hostname, ips = arg.split("=", 1)
|
||||
# Normalize hostname for consistent lookups
|
||||
normalized = normalize_hostname(hostname)
|
||||
cache[normalized] = [ip.strip() for ip in ips.split(",")]
|
||||
return cache
|
||||
@@ -1,335 +0,0 @@
|
||||
"""Memory usage analyzer for ESPHome compiled binaries."""
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .const import (
|
||||
CORE_SUBCATEGORY_PATTERNS,
|
||||
DEMANGLED_PATTERNS,
|
||||
ESPHOME_COMPONENT_PATTERN,
|
||||
SECTION_TO_ATTR,
|
||||
SYMBOL_PATTERNS,
|
||||
)
|
||||
from .demangle import batch_demangle
|
||||
from .helpers import (
|
||||
get_component_class_patterns,
|
||||
get_esphome_components,
|
||||
map_section_name,
|
||||
parse_symbol_line,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from esphome.platformio_api import IDEData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# C++ runtime patterns for categorization
|
||||
_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"])
|
||||
|
||||
# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.)
|
||||
_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"])
|
||||
|
||||
# Regex pattern for parsing readelf section headers
|
||||
# Format: [ #] name type addr off size
|
||||
_READELF_SECTION_PATTERN = re.compile(
|
||||
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)"
|
||||
)
|
||||
|
||||
# Component category prefixes
|
||||
_COMPONENT_PREFIX_ESPHOME = "[esphome]"
|
||||
_COMPONENT_PREFIX_EXTERNAL = "[external]"
|
||||
_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core"
|
||||
_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api"
|
||||
|
||||
# C++ namespace prefixes
|
||||
_NAMESPACE_ESPHOME = "esphome::"
|
||||
_NAMESPACE_STD = "std::"
|
||||
|
||||
# Type alias for symbol information: (symbol_name, size, component)
|
||||
SymbolInfoType = tuple[str, int, str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MemorySection:
|
||||
"""Represents a memory section with its symbols."""
|
||||
|
||||
name: str
|
||||
symbols: list[SymbolInfoType] = field(default_factory=list)
|
||||
total_size: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentMemory:
|
||||
"""Tracks memory usage for a component."""
|
||||
|
||||
name: str
|
||||
text_size: int = 0 # Code in flash
|
||||
rodata_size: int = 0 # Read-only data in flash
|
||||
data_size: int = 0 # Initialized data (flash + ram)
|
||||
bss_size: int = 0 # Uninitialized data (ram only)
|
||||
symbol_count: int = 0
|
||||
|
||||
@property
|
||||
def flash_total(self) -> int:
|
||||
"""Total flash usage (text + rodata + data)."""
|
||||
return self.text_size + self.rodata_size + self.data_size
|
||||
|
||||
@property
|
||||
def ram_total(self) -> int:
|
||||
"""Total RAM usage (data + bss)."""
|
||||
return self.data_size + self.bss_size
|
||||
|
||||
|
||||
class MemoryAnalyzer:
|
||||
"""Analyzes memory usage from ELF files."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
readelf_path: str | None = None,
|
||||
external_components: set[str] | None = None,
|
||||
idedata: "IDEData | None" = None,
|
||||
) -> None:
|
||||
"""Initialize memory analyzer.
|
||||
|
||||
Args:
|
||||
elf_path: Path to ELF file to analyze
|
||||
objdump_path: Path to objdump binary (auto-detected from idedata if not provided)
|
||||
readelf_path: Path to readelf binary (auto-detected from idedata if not provided)
|
||||
external_components: Set of external component names
|
||||
idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths
|
||||
"""
|
||||
self.elf_path = Path(elf_path)
|
||||
if not self.elf_path.exists():
|
||||
raise FileNotFoundError(f"ELF file not found: {elf_path}")
|
||||
|
||||
# Auto-detect toolchain paths from idedata if not provided
|
||||
if idedata is not None and (objdump_path is None or readelf_path is None):
|
||||
objdump_path = objdump_path or idedata.objdump_path
|
||||
readelf_path = readelf_path or idedata.readelf_path
|
||||
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
|
||||
|
||||
self.objdump_path = objdump_path or "objdump"
|
||||
self.readelf_path = readelf_path or "readelf"
|
||||
self.external_components = external_components or set()
|
||||
|
||||
self.sections: dict[str, MemorySection] = {}
|
||||
self.components: dict[str, ComponentMemory] = defaultdict(
|
||||
lambda: ComponentMemory("")
|
||||
)
|
||||
self._demangle_cache: dict[str, str] = {}
|
||||
self._uncategorized_symbols: list[tuple[str, str, int]] = []
|
||||
self._esphome_core_symbols: list[
|
||||
tuple[str, str, int]
|
||||
] = [] # Track core symbols
|
||||
self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict(
|
||||
list
|
||||
) # Track symbols for all components
|
||||
|
||||
def analyze(self) -> dict[str, ComponentMemory]:
|
||||
"""Analyze the ELF file and return component memory usage."""
|
||||
self._parse_sections()
|
||||
self._parse_symbols()
|
||||
self._categorize_symbols()
|
||||
return dict(self.components)
|
||||
|
||||
def _parse_sections(self) -> None:
|
||||
"""Parse section headers from ELF file."""
|
||||
result = subprocess.run(
|
||||
[self.readelf_path, "-S", str(self.elf_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Parse section headers
|
||||
for line in result.stdout.splitlines():
|
||||
# Look for section entries
|
||||
if not (match := _READELF_SECTION_PATTERN.match(line)):
|
||||
continue
|
||||
|
||||
section_name = match.group(1)
|
||||
size_hex = match.group(2)
|
||||
size = int(size_hex, 16)
|
||||
|
||||
# Map to standard section name
|
||||
mapped_section = map_section_name(section_name)
|
||||
if not mapped_section:
|
||||
continue
|
||||
|
||||
if mapped_section not in self.sections:
|
||||
self.sections[mapped_section] = MemorySection(mapped_section)
|
||||
self.sections[mapped_section].total_size += size
|
||||
|
||||
def _parse_symbols(self) -> None:
|
||||
"""Parse symbols from ELF file."""
|
||||
result = subprocess.run(
|
||||
[self.objdump_path, "-t", str(self.elf_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Track seen addresses to avoid duplicates
|
||||
seen_addresses: set[str] = set()
|
||||
|
||||
for line in result.stdout.splitlines():
|
||||
if not (symbol_info := parse_symbol_line(line)):
|
||||
continue
|
||||
|
||||
section, name, size, address = symbol_info
|
||||
|
||||
# Skip duplicate symbols at the same address (e.g., C1/C2 constructors)
|
||||
if address in seen_addresses or section not in self.sections:
|
||||
continue
|
||||
|
||||
self.sections[section].symbols.append((name, size, ""))
|
||||
seen_addresses.add(address)
|
||||
|
||||
def _categorize_symbols(self) -> None:
|
||||
"""Categorize symbols by component."""
|
||||
# First, collect all unique symbol names for batch demangling
|
||||
all_symbols = {
|
||||
symbol_name
|
||||
for section in self.sections.values()
|
||||
for symbol_name, _, _ in section.symbols
|
||||
}
|
||||
|
||||
# Batch demangle all symbols at once
|
||||
self._batch_demangle_symbols(list(all_symbols))
|
||||
|
||||
# Now categorize with cached demangled names
|
||||
for section_name, section in self.sections.items():
|
||||
for symbol_name, size, _ in section.symbols:
|
||||
component = self._identify_component(symbol_name)
|
||||
|
||||
if component not in self.components:
|
||||
self.components[component] = ComponentMemory(component)
|
||||
|
||||
comp_mem = self.components[component]
|
||||
comp_mem.symbol_count += 1
|
||||
|
||||
# Update the appropriate size attribute based on section
|
||||
if attr_name := SECTION_TO_ATTR.get(section_name):
|
||||
setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size)
|
||||
|
||||
# Track uncategorized symbols
|
||||
if component == "other" and size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._uncategorized_symbols.append((symbol_name, demangled, size))
|
||||
|
||||
# Track ESPHome core symbols for detailed analysis
|
||||
if component == _COMPONENT_CORE and size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._esphome_core_symbols.append((symbol_name, demangled, size))
|
||||
|
||||
# Track all component symbols for detailed analysis
|
||||
if size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._component_symbols[component].append(
|
||||
(symbol_name, demangled, size)
|
||||
)
|
||||
|
||||
def _identify_component(self, symbol_name: str) -> str:
|
||||
"""Identify which component a symbol belongs to."""
|
||||
# Demangle C++ names if needed
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
|
||||
# Check for special component classes first (before namespace pattern)
|
||||
# This handles cases like esphome::ESPHomeOTAComponent which should map to ota
|
||||
if _NAMESPACE_ESPHOME in demangled:
|
||||
# Check for special component classes that include component name in the class
|
||||
# For example: esphome::ESPHomeOTAComponent -> ota component
|
||||
for component_name in get_esphome_components():
|
||||
patterns = get_component_class_patterns(component_name)
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||
|
||||
# Check for ESPHome component namespaces
|
||||
match = ESPHOME_COMPONENT_PATTERN.search(demangled)
|
||||
if match:
|
||||
component_name = match.group(1)
|
||||
# Strip trailing underscore if present (e.g., switch_ -> switch)
|
||||
component_name = component_name.rstrip("_")
|
||||
|
||||
# Check if this is an actual component in the components directory
|
||||
if component_name in get_esphome_components():
|
||||
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||
# Check if this is a known external component from the config
|
||||
if component_name in self.external_components:
|
||||
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
|
||||
# Everything else in esphome:: namespace is core
|
||||
return _COMPONENT_CORE
|
||||
|
||||
# Check for esphome core namespace (no component namespace)
|
||||
if _NAMESPACE_ESPHOME in demangled:
|
||||
# If no component match found, it's core
|
||||
return _COMPONENT_CORE
|
||||
|
||||
# Check against symbol patterns
|
||||
for component, patterns in SYMBOL_PATTERNS.items():
|
||||
if any(pattern in symbol_name for pattern in patterns):
|
||||
return component
|
||||
|
||||
# Check against demangled patterns
|
||||
for component, patterns in DEMANGLED_PATTERNS.items():
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return component
|
||||
|
||||
# Special cases that need more complex logic
|
||||
|
||||
# Check if spi_flash vs spi_driver
|
||||
if "spi_" in symbol_name or "SPI" in symbol_name:
|
||||
return "spi_flash" if "spi_flash" in symbol_name else "spi_driver"
|
||||
|
||||
# libc special printf variants
|
||||
if (
|
||||
symbol_name.startswith("_")
|
||||
and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "")
|
||||
in _LIBC_PRINTF_SCANF_FAMILY
|
||||
):
|
||||
return "libc"
|
||||
|
||||
# Track uncategorized symbols for analysis
|
||||
return "other"
|
||||
|
||||
def _batch_demangle_symbols(self, symbols: list[str]) -> None:
|
||||
"""Batch demangle C++ symbol names for efficiency."""
|
||||
if not symbols:
|
||||
return
|
||||
|
||||
_LOGGER.info("Demangling %d symbols", len(symbols))
|
||||
self._demangle_cache = batch_demangle(symbols, objdump_path=self.objdump_path)
|
||||
_LOGGER.info("Successfully demangled %d symbols", len(self._demangle_cache))
|
||||
|
||||
def _demangle_symbol(self, symbol: str) -> str:
|
||||
"""Get demangled C++ symbol name from cache."""
|
||||
return self._demangle_cache.get(symbol, symbol)
|
||||
|
||||
def _categorize_esphome_core_symbol(self, demangled: str) -> str:
|
||||
"""Categorize ESPHome core symbols into subcategories."""
|
||||
# Special patterns that need to be checked separately
|
||||
if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS):
|
||||
return "C++ Runtime (vtables/RTTI)"
|
||||
|
||||
if demangled.startswith(_NAMESPACE_STD):
|
||||
return "C++ STL"
|
||||
|
||||
# Check against patterns from const.py
|
||||
for category, patterns in CORE_SUBCATEGORY_PATTERNS.items():
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return category
|
||||
|
||||
return "Other Core"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from .cli import main
|
||||
|
||||
main()
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Main entry point for running the memory analyzer as a module."""
|
||||
|
||||
from .cli import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,458 +0,0 @@
|
||||
"""CLI interface for memory analysis with report generation."""
|
||||
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import sys
|
||||
|
||||
from . import (
|
||||
_COMPONENT_API,
|
||||
_COMPONENT_CORE,
|
||||
_COMPONENT_PREFIX_ESPHOME,
|
||||
_COMPONENT_PREFIX_EXTERNAL,
|
||||
MemoryAnalyzer,
|
||||
)
|
||||
|
||||
|
||||
class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
"""Memory analyzer with CLI-specific report generation."""
|
||||
|
||||
# Symbol size threshold for detailed analysis
|
||||
SYMBOL_SIZE_THRESHOLD: int = (
|
||||
100 # Show symbols larger than this in detailed analysis
|
||||
)
|
||||
|
||||
# Column width constants
|
||||
COL_COMPONENT: int = 29
|
||||
COL_FLASH_TEXT: int = 14
|
||||
COL_FLASH_DATA: int = 14
|
||||
COL_RAM_DATA: int = 12
|
||||
COL_RAM_BSS: int = 12
|
||||
COL_TOTAL_FLASH: int = 15
|
||||
COL_TOTAL_RAM: int = 12
|
||||
COL_SEPARATOR: int = 3 # " | "
|
||||
|
||||
# Core analysis column widths
|
||||
COL_CORE_SUBCATEGORY: int = 30
|
||||
COL_CORE_SIZE: int = 12
|
||||
COL_CORE_COUNT: int = 6
|
||||
COL_CORE_PERCENT: int = 10
|
||||
|
||||
# Calculate table width once at class level
|
||||
TABLE_WIDTH: int = (
|
||||
COL_COMPONENT
|
||||
+ COL_SEPARATOR
|
||||
+ COL_FLASH_TEXT
|
||||
+ COL_SEPARATOR
|
||||
+ COL_FLASH_DATA
|
||||
+ COL_SEPARATOR
|
||||
+ COL_RAM_DATA
|
||||
+ COL_SEPARATOR
|
||||
+ COL_RAM_BSS
|
||||
+ COL_SEPARATOR
|
||||
+ COL_TOTAL_FLASH
|
||||
+ COL_SEPARATOR
|
||||
+ COL_TOTAL_RAM
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _make_separator_line(*widths: int) -> str:
|
||||
"""Create a separator line with given column widths.
|
||||
|
||||
Args:
|
||||
widths: Column widths to create separators for
|
||||
|
||||
Returns:
|
||||
Separator line like "----+---------+-----"
|
||||
"""
|
||||
return "-+-".join("-" * width for width in widths)
|
||||
|
||||
# Pre-computed separator lines
|
||||
MAIN_TABLE_SEPARATOR: str = _make_separator_line(
|
||||
COL_COMPONENT,
|
||||
COL_FLASH_TEXT,
|
||||
COL_FLASH_DATA,
|
||||
COL_RAM_DATA,
|
||||
COL_RAM_BSS,
|
||||
COL_TOTAL_FLASH,
|
||||
COL_TOTAL_RAM,
|
||||
)
|
||||
|
||||
CORE_TABLE_SEPARATOR: str = _make_separator_line(
|
||||
COL_CORE_SUBCATEGORY,
|
||||
COL_CORE_SIZE,
|
||||
COL_CORE_COUNT,
|
||||
COL_CORE_PERCENT,
|
||||
)
|
||||
|
||||
def generate_report(self, detailed: bool = False) -> str:
|
||||
"""Generate a formatted memory report."""
|
||||
components = sorted(
|
||||
self.components.items(), key=lambda x: x[1].flash_total, reverse=True
|
||||
)
|
||||
|
||||
# Calculate totals
|
||||
total_flash = sum(c.flash_total for _, c in components)
|
||||
total_ram = sum(c.ram_total for _, c in components)
|
||||
|
||||
# Build report
|
||||
lines: list[str] = []
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("Component Memory Analysis".center(self.TABLE_WIDTH))
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Main table - fixed column widths
|
||||
lines.append(
|
||||
f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}"
|
||||
)
|
||||
lines.append(self.MAIN_TABLE_SEPARATOR)
|
||||
|
||||
for name, mem in components:
|
||||
if mem.flash_total > 0 or mem.ram_total > 0:
|
||||
flash_rodata = mem.rodata_size + mem.data_size
|
||||
lines.append(
|
||||
f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | "
|
||||
f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | "
|
||||
f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B"
|
||||
)
|
||||
|
||||
lines.append(self.MAIN_TABLE_SEPARATOR)
|
||||
lines.append(
|
||||
f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | "
|
||||
f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | "
|
||||
f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B"
|
||||
)
|
||||
|
||||
# Top consumers
|
||||
lines.append("")
|
||||
lines.append("Top Flash Consumers:")
|
||||
for i, (name, mem) in enumerate(components[:25]):
|
||||
if mem.flash_total > 0:
|
||||
percentage = (
|
||||
(mem.flash_total / total_flash * 100) if total_flash > 0 else 0
|
||||
)
|
||||
lines.append(
|
||||
f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
lines.append("Top RAM Consumers:")
|
||||
ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True)
|
||||
for i, (name, mem) in enumerate(ram_components[:25]):
|
||||
if mem.ram_total > 0:
|
||||
percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0
|
||||
lines.append(
|
||||
f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
lines.append(
|
||||
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
|
||||
)
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
# Add ESPHome core detailed analysis if there are core symbols
|
||||
if self._esphome_core_symbols:
|
||||
lines.append("")
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append(
|
||||
f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH)
|
||||
)
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Group core symbols by subcategory
|
||||
core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict(
|
||||
list
|
||||
)
|
||||
|
||||
for symbol, demangled, size in self._esphome_core_symbols:
|
||||
# Categorize based on demangled name patterns
|
||||
subcategory = self._categorize_esphome_core_symbol(demangled)
|
||||
core_subcategories[subcategory].append((symbol, demangled, size))
|
||||
|
||||
# Sort subcategories by total size
|
||||
sorted_subcategories = sorted(
|
||||
[
|
||||
(name, symbols, sum(s[2] for s in symbols))
|
||||
for name, symbols in core_subcategories.items()
|
||||
],
|
||||
key=lambda x: x[2],
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
lines.append(
|
||||
f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | "
|
||||
f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}"
|
||||
)
|
||||
lines.append(self.CORE_TABLE_SEPARATOR)
|
||||
|
||||
core_total = sum(size for _, _, size in self._esphome_core_symbols)
|
||||
|
||||
for subcategory, symbols, total_size in sorted_subcategories:
|
||||
percentage = (total_size / core_total * 100) if core_total > 0 else 0
|
||||
lines.append(
|
||||
f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | "
|
||||
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
|
||||
)
|
||||
|
||||
# All core symbols above threshold
|
||||
lines.append("")
|
||||
sorted_core_symbols = sorted(
|
||||
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
large_core_symbols = [
|
||||
(symbol, demangled, size)
|
||||
for symbol, demangled, size in sorted_core_symbols
|
||||
if size > self.SYMBOL_SIZE_THRESHOLD
|
||||
]
|
||||
|
||||
lines.append(
|
||||
f"{_COMPONENT_CORE} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_core_symbols)} symbols):"
|
||||
)
|
||||
for i, (symbol, demangled, size) in enumerate(large_core_symbols):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
# Add detailed analysis for top ESPHome and external components
|
||||
esphome_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE
|
||||
]
|
||||
external_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name.startswith(_COMPONENT_PREFIX_EXTERNAL)
|
||||
]
|
||||
|
||||
top_esphome_components = sorted(
|
||||
esphome_components, key=lambda x: x[1].flash_total, reverse=True
|
||||
)[:30]
|
||||
|
||||
# Include all external components (they're usually important)
|
||||
top_external_components = sorted(
|
||||
external_components, key=lambda x: x[1].flash_total, reverse=True
|
||||
)
|
||||
|
||||
# Check if API component exists and ensure it's included
|
||||
api_component = None
|
||||
for name, mem in components:
|
||||
if name == _COMPONENT_API:
|
||||
api_component = (name, mem)
|
||||
break
|
||||
|
||||
# Also include wifi_stack and other important system components if they exist
|
||||
system_components_to_include = [
|
||||
# Empty list - we've finished debugging symbol categorization
|
||||
# Add component names here if you need to debug their symbols
|
||||
]
|
||||
system_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name in system_components_to_include
|
||||
]
|
||||
|
||||
# Combine all components to analyze: top ESPHome + all external + API if not already included + system components
|
||||
components_to_analyze = (
|
||||
list(top_esphome_components)
|
||||
+ list(top_external_components)
|
||||
+ system_components
|
||||
)
|
||||
if api_component and api_component not in components_to_analyze:
|
||||
components_to_analyze.append(api_component)
|
||||
|
||||
if components_to_analyze:
|
||||
for comp_name, comp_mem in components_to_analyze:
|
||||
if not (comp_symbols := self._component_symbols.get(comp_name, [])):
|
||||
continue
|
||||
lines.append("")
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH))
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Sort symbols by size
|
||||
sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True)
|
||||
|
||||
lines.append(f"Total symbols: {len(sorted_symbols)}")
|
||||
lines.append(f"Total size: {comp_mem.flash_total:,} B")
|
||||
lines.append("")
|
||||
|
||||
# Show all symbols above threshold for better visibility
|
||||
large_symbols = [
|
||||
(sym, dem, size)
|
||||
for sym, dem, size in sorted_symbols
|
||||
if size > self.SYMBOL_SIZE_THRESHOLD
|
||||
]
|
||||
|
||||
lines.append(
|
||||
f"{comp_name} Symbols > {self.SYMBOL_SIZE_THRESHOLD} B ({len(large_symbols)} symbols):"
|
||||
)
|
||||
for i, (symbol, demangled, size) in enumerate(large_symbols):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def to_json(self) -> str:
|
||||
"""Export analysis results as JSON."""
|
||||
data = {
|
||||
"components": {
|
||||
name: {
|
||||
"text": mem.text_size,
|
||||
"rodata": mem.rodata_size,
|
||||
"data": mem.data_size,
|
||||
"bss": mem.bss_size,
|
||||
"flash_total": mem.flash_total,
|
||||
"ram_total": mem.ram_total,
|
||||
"symbol_count": mem.symbol_count,
|
||||
}
|
||||
for name, mem in self.components.items()
|
||||
},
|
||||
"totals": {
|
||||
"flash": sum(c.flash_total for c in self.components.values()),
|
||||
"ram": sum(c.ram_total for c in self.components.values()),
|
||||
},
|
||||
}
|
||||
return json.dumps(data, indent=2)
|
||||
|
||||
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
|
||||
"""Dump uncategorized symbols for analysis."""
|
||||
# Sort by size descending
|
||||
sorted_symbols = sorted(
|
||||
self._uncategorized_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
|
||||
lines = ["Uncategorized Symbols Analysis", "=" * 80]
|
||||
lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}")
|
||||
lines.append(
|
||||
f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes"
|
||||
)
|
||||
lines.append("")
|
||||
lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled")
|
||||
lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40)
|
||||
|
||||
for symbol, demangled, size in sorted_symbols[:100]: # Top 100
|
||||
demangled_display = (
|
||||
demangled[:100] if symbol != demangled else "[not demangled]"
|
||||
)
|
||||
lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}")
|
||||
|
||||
if len(sorted_symbols) > 100:
|
||||
lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols")
|
||||
|
||||
content = "\n".join(lines)
|
||||
|
||||
if output_file:
|
||||
with open(output_file, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
else:
|
||||
print(content)
|
||||
|
||||
|
||||
def analyze_elf(
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
readelf_path: str | None = None,
|
||||
detailed: bool = False,
|
||||
external_components: set[str] | None = None,
|
||||
) -> str:
|
||||
"""Analyze an ELF file and return a memory report."""
|
||||
analyzer = MemoryAnalyzerCLI(
|
||||
elf_path, objdump_path, readelf_path, external_components
|
||||
)
|
||||
analyzer.analyze()
|
||||
return analyzer.generate_report(detailed)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entrypoint for memory analysis."""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python -m esphome.analyze_memory <build_directory>")
|
||||
print("\nAnalyze memory usage from an ESPHome build directory.")
|
||||
print("The build directory should contain firmware.elf and idedata will be")
|
||||
print("loaded from ~/.esphome/.internal/idedata/<device>.json")
|
||||
print("\nExamples:")
|
||||
print(" python -m esphome.analyze_memory ~/.esphome/build/my-device")
|
||||
print(" python -m esphome.analyze_memory .esphome/build/my-device")
|
||||
print(" python -m esphome.analyze_memory my-device # Short form")
|
||||
sys.exit(1)
|
||||
|
||||
build_dir = sys.argv[1]
|
||||
|
||||
# Load build directory
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from esphome.platformio_api import IDEData
|
||||
|
||||
build_path = Path(build_dir)
|
||||
|
||||
# If no path separator in name, assume it's a device name
|
||||
if "/" not in build_dir and not build_path.is_dir():
|
||||
# Try current directory first
|
||||
cwd_path = Path.cwd() / ".esphome" / "build" / build_dir
|
||||
if cwd_path.is_dir():
|
||||
build_path = cwd_path
|
||||
print(f"Using build directory: {build_path}", file=sys.stderr)
|
||||
else:
|
||||
# Fall back to home directory
|
||||
build_path = Path.home() / ".esphome" / "build" / build_dir
|
||||
print(f"Using build directory: {build_path}", file=sys.stderr)
|
||||
|
||||
if not build_path.is_dir():
|
||||
print(f"Error: {build_path} is not a directory", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find firmware.elf
|
||||
elf_file = None
|
||||
for elf_candidate in [
|
||||
build_path / "firmware.elf",
|
||||
build_path / ".pioenvs" / build_path.name / "firmware.elf",
|
||||
]:
|
||||
if elf_candidate.exists():
|
||||
elf_file = str(elf_candidate)
|
||||
break
|
||||
|
||||
if not elf_file:
|
||||
print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find idedata.json - check current directory first, then home
|
||||
device_name = build_path.name
|
||||
idedata_candidates = [
|
||||
Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
Path.home() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
]
|
||||
|
||||
idedata = None
|
||||
for idedata_path in idedata_candidates:
|
||||
if not idedata_path.exists():
|
||||
continue
|
||||
try:
|
||||
with open(idedata_path, encoding="utf-8") as f:
|
||||
raw_data = json.load(f)
|
||||
idedata = IDEData(raw_data)
|
||||
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
|
||||
break
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
|
||||
|
||||
if not idedata:
|
||||
print(
|
||||
f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata)
|
||||
analyzer.analyze()
|
||||
report = analyzer.generate_report()
|
||||
print(report)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,182 +0,0 @@
|
||||
"""Symbol demangling utilities for memory analysis.
|
||||
|
||||
This module provides functions for demangling C++ symbol names using c++filt.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from .toolchain import find_tool
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# GCC global constructor/destructor prefix annotations
|
||||
GCC_PREFIX_ANNOTATIONS = {
|
||||
"_GLOBAL__sub_I_": "global constructor for",
|
||||
"_GLOBAL__sub_D_": "global destructor for",
|
||||
}
|
||||
|
||||
# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2)
|
||||
GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)")
|
||||
|
||||
|
||||
def _strip_gcc_annotations(symbol: str) -> tuple[str, str]:
|
||||
"""Strip GCC optimization suffixes and prefixes from a symbol.
|
||||
|
||||
Args:
|
||||
symbol: The mangled symbol name
|
||||
|
||||
Returns:
|
||||
Tuple of (stripped_symbol, removed_prefix)
|
||||
"""
|
||||
# Remove GCC optimization markers
|
||||
stripped = GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol)
|
||||
|
||||
# Handle GCC global constructor/initializer prefixes
|
||||
prefix = ""
|
||||
for gcc_prefix in GCC_PREFIX_ANNOTATIONS:
|
||||
if stripped.startswith(gcc_prefix):
|
||||
prefix = gcc_prefix
|
||||
stripped = stripped[len(prefix) :]
|
||||
break
|
||||
|
||||
return stripped, prefix
|
||||
|
||||
|
||||
def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str:
|
||||
"""Restore prefix that was removed before demangling.
|
||||
|
||||
Args:
|
||||
prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_")
|
||||
stripped: Stripped symbol name
|
||||
demangled: Demangled symbol name
|
||||
|
||||
Returns:
|
||||
Demangled name with prefix restored/annotated
|
||||
"""
|
||||
if not prefix:
|
||||
return demangled
|
||||
|
||||
# Successfully demangled - add descriptive prefix
|
||||
if demangled != stripped and (annotation := GCC_PREFIX_ANNOTATIONS.get(prefix)):
|
||||
return f"[{annotation}: {demangled}]"
|
||||
|
||||
# Failed to demangle - restore original prefix
|
||||
return prefix + demangled
|
||||
|
||||
|
||||
def _restore_symbol_suffix(original: str, demangled: str) -> str:
|
||||
"""Restore GCC optimization suffix that was removed before demangling.
|
||||
|
||||
Args:
|
||||
original: Original symbol name with suffix
|
||||
demangled: Demangled symbol name without suffix
|
||||
|
||||
Returns:
|
||||
Demangled name with suffix annotation
|
||||
"""
|
||||
if suffix_match := GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original):
|
||||
return f"{demangled} [{suffix_match.group(1)}]"
|
||||
return demangled
|
||||
|
||||
|
||||
def batch_demangle(
|
||||
symbols: list[str],
|
||||
cppfilt_path: str | None = None,
|
||||
objdump_path: str | None = None,
|
||||
) -> dict[str, str]:
|
||||
"""Batch demangle C++ symbol names.
|
||||
|
||||
Args:
|
||||
symbols: List of symbol names to demangle
|
||||
cppfilt_path: Path to c++filt binary (auto-detected if not provided)
|
||||
objdump_path: Path to objdump binary to derive c++filt path from
|
||||
|
||||
Returns:
|
||||
Dictionary mapping original symbol names to demangled names
|
||||
"""
|
||||
cache: dict[str, str] = {}
|
||||
|
||||
if not symbols:
|
||||
return cache
|
||||
|
||||
# Find c++filt tool
|
||||
cppfilt_cmd = cppfilt_path or find_tool("c++filt", objdump_path)
|
||||
if not cppfilt_cmd:
|
||||
_LOGGER.warning("Could not find c++filt, symbols will not be demangled")
|
||||
return {s: s for s in symbols}
|
||||
|
||||
_LOGGER.debug("Demangling %d symbols using %s", len(symbols), cppfilt_cmd)
|
||||
|
||||
# Strip GCC optimization suffixes and prefixes before demangling
|
||||
symbols_stripped: list[str] = []
|
||||
symbols_prefixes: list[str] = []
|
||||
for symbol in symbols:
|
||||
stripped, prefix = _strip_gcc_annotations(symbol)
|
||||
symbols_stripped.append(stripped)
|
||||
symbols_prefixes.append(prefix)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[cppfilt_cmd],
|
||||
input="\n".join(symbols_stripped),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e:
|
||||
_LOGGER.warning("Failed to batch demangle symbols: %s", e)
|
||||
return {s: s for s in symbols}
|
||||
|
||||
if result.returncode != 0:
|
||||
_LOGGER.warning(
|
||||
"c++filt exited with code %d: %s",
|
||||
result.returncode,
|
||||
result.stderr[:200] if result.stderr else "(no error output)",
|
||||
)
|
||||
return {s: s for s in symbols}
|
||||
|
||||
# Process demangled output
|
||||
demangled_lines = result.stdout.strip().split("\n")
|
||||
|
||||
# Check for output length mismatch
|
||||
if len(demangled_lines) != len(symbols):
|
||||
_LOGGER.warning(
|
||||
"c++filt output mismatch: expected %d lines, got %d",
|
||||
len(symbols),
|
||||
len(demangled_lines),
|
||||
)
|
||||
return {s: s for s in symbols}
|
||||
|
||||
failed_count = 0
|
||||
|
||||
for original, stripped, prefix, demangled in zip(
|
||||
symbols, symbols_stripped, symbols_prefixes, demangled_lines
|
||||
):
|
||||
# Add back any prefix that was removed
|
||||
demangled = _restore_symbol_prefix(prefix, stripped, demangled)
|
||||
|
||||
# If we stripped a suffix, add it back to the demangled name for clarity
|
||||
if original != stripped and not prefix:
|
||||
demangled = _restore_symbol_suffix(original, demangled)
|
||||
|
||||
cache[original] = demangled
|
||||
|
||||
# Count symbols that failed to demangle
|
||||
if stripped == demangled and stripped.startswith("_Z"):
|
||||
failed_count += 1
|
||||
if failed_count <= 5:
|
||||
_LOGGER.debug("Failed to demangle: %s", original)
|
||||
|
||||
if failed_count > 0:
|
||||
_LOGGER.debug(
|
||||
"Failed to demangle %d/%d symbols using %s",
|
||||
failed_count,
|
||||
len(symbols),
|
||||
cppfilt_cmd,
|
||||
)
|
||||
|
||||
return cache
|
||||
@@ -1,121 +0,0 @@
|
||||
"""Helper functions for memory analysis."""
|
||||
|
||||
from functools import cache
|
||||
from pathlib import Path
|
||||
|
||||
from .const import SECTION_MAPPING
|
||||
|
||||
# Import namespace constant from parent module
|
||||
# Note: This would create a circular import if done at module level,
|
||||
# so we'll define it locally here as well
|
||||
_NAMESPACE_ESPHOME = "esphome::"
|
||||
|
||||
|
||||
# Get the list of actual ESPHome components by scanning the components directory
|
||||
@cache
|
||||
def get_esphome_components():
|
||||
"""Get set of actual ESPHome components from the components directory."""
|
||||
# Find the components directory relative to this file
|
||||
# Go up two levels from analyze_memory/helpers.py to esphome/
|
||||
current_dir = Path(__file__).parent.parent
|
||||
components_dir = current_dir / "components"
|
||||
|
||||
if not components_dir.exists() or not components_dir.is_dir():
|
||||
return frozenset()
|
||||
|
||||
return frozenset(
|
||||
item.name
|
||||
for item in components_dir.iterdir()
|
||||
if item.is_dir()
|
||||
and not item.name.startswith(".")
|
||||
and not item.name.startswith("__")
|
||||
)
|
||||
|
||||
|
||||
@cache
|
||||
def get_component_class_patterns(component_name: str) -> list[str]:
|
||||
"""Generate component class name patterns for symbol matching.
|
||||
|
||||
Args:
|
||||
component_name: The component name (e.g., "ota", "wifi", "api")
|
||||
|
||||
Returns:
|
||||
List of pattern strings to match against demangled symbols
|
||||
"""
|
||||
component_upper = component_name.upper()
|
||||
component_camel = component_name.replace("_", "").title()
|
||||
return [
|
||||
f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent
|
||||
f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
|
||||
f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent
|
||||
f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
|
||||
]
|
||||
|
||||
|
||||
def map_section_name(raw_section: str) -> str | None:
|
||||
"""Map raw section name to standard section.
|
||||
|
||||
Args:
|
||||
raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1")
|
||||
|
||||
Returns:
|
||||
Standard section name (".text", ".rodata", ".data", ".bss") or None
|
||||
"""
|
||||
for standard_section, patterns in SECTION_MAPPING.items():
|
||||
if any(pattern in raw_section for pattern in patterns):
|
||||
return standard_section
|
||||
return None
|
||||
|
||||
|
||||
def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None:
|
||||
"""Parse a single symbol line from objdump output.
|
||||
|
||||
Args:
|
||||
line: Line from objdump -t output
|
||||
|
||||
Returns:
|
||||
Tuple of (section, name, size, address) or None if not a valid symbol.
|
||||
Format: address l/g w/d F/O section size name
|
||||
Example: 40084870 l F .iram0.text 00000000 _xt_user_exc
|
||||
"""
|
||||
parts = line.split()
|
||||
if len(parts) < 5:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Validate and extract address
|
||||
address = parts[0]
|
||||
int(address, 16)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
# Look for F (function) or O (object) flag
|
||||
if "F" not in parts and "O" not in parts:
|
||||
return None
|
||||
|
||||
# Find section, size, and name
|
||||
for i, part in enumerate(parts):
|
||||
if not part.startswith("."):
|
||||
continue
|
||||
|
||||
section = map_section_name(part)
|
||||
if not section:
|
||||
break
|
||||
|
||||
# Need at least size field after section
|
||||
if i + 1 >= len(parts):
|
||||
break
|
||||
|
||||
try:
|
||||
size = int(parts[i + 1], 16)
|
||||
except ValueError:
|
||||
break
|
||||
|
||||
# Need symbol name and non-zero size
|
||||
if i + 2 >= len(parts) or size == 0:
|
||||
break
|
||||
|
||||
name = " ".join(parts[i + 2 :])
|
||||
return (section, name, size, address)
|
||||
|
||||
return None
|
||||
@@ -1,493 +0,0 @@
|
||||
"""Analyzer for RAM-stored strings in ESP8266/ESP32 firmware ELF files.
|
||||
|
||||
This module identifies strings that are stored in RAM sections (.data, .bss, .rodata)
|
||||
rather than in flash sections (.irom0.text, .irom.text), which is important for
|
||||
memory-constrained platforms like ESP8266.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from .demangle import batch_demangle
|
||||
from .toolchain import find_tool
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# ESP8266: .rodata is in RAM (DRAM), not flash
|
||||
# ESP32: .rodata is in flash, mapped to data bus
|
||||
ESP8266_RAM_SECTIONS = frozenset([".data", ".rodata", ".bss"])
|
||||
ESP8266_FLASH_SECTIONS = frozenset([".irom0.text", ".irom.text", ".text"])
|
||||
|
||||
# ESP32: .rodata is memory-mapped from flash
|
||||
ESP32_RAM_SECTIONS = frozenset([".data", ".bss", ".dram0.data", ".dram0.bss"])
|
||||
ESP32_FLASH_SECTIONS = frozenset([".text", ".rodata", ".flash.text", ".flash.rodata"])
|
||||
|
||||
# nm symbol types for data symbols (D=global data, d=local data, R=rodata, B=bss)
|
||||
DATA_SYMBOL_TYPES = frozenset(["D", "d", "R", "r", "B", "b"])
|
||||
|
||||
|
||||
@dataclass
|
||||
class SectionInfo:
|
||||
"""Information about an ELF section."""
|
||||
|
||||
name: str
|
||||
address: int
|
||||
size: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class RamString:
|
||||
"""A string found in RAM."""
|
||||
|
||||
section: str
|
||||
address: int
|
||||
content: str
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
"""Size in bytes including null terminator."""
|
||||
return len(self.content) + 1
|
||||
|
||||
|
||||
@dataclass
|
||||
class RamSymbol:
|
||||
"""A symbol found in RAM."""
|
||||
|
||||
name: str
|
||||
sym_type: str
|
||||
address: int
|
||||
size: int
|
||||
section: str
|
||||
demangled: str = "" # Demangled name, set after batch demangling
|
||||
|
||||
|
||||
class RamStringsAnalyzer:
|
||||
"""Analyzes ELF files to find strings stored in RAM."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
min_length: int = 8,
|
||||
platform: str = "esp32",
|
||||
) -> None:
|
||||
"""Initialize the RAM strings analyzer.
|
||||
|
||||
Args:
|
||||
elf_path: Path to the ELF file to analyze
|
||||
objdump_path: Path to objdump binary (used to find other tools)
|
||||
min_length: Minimum string length to report (default: 8)
|
||||
platform: Platform name ("esp8266", "esp32", etc.) for section mapping
|
||||
"""
|
||||
self.elf_path = Path(elf_path)
|
||||
if not self.elf_path.exists():
|
||||
raise FileNotFoundError(f"ELF file not found: {elf_path}")
|
||||
|
||||
self.objdump_path = objdump_path
|
||||
self.min_length = min_length
|
||||
self.platform = platform
|
||||
|
||||
# Set RAM/flash sections based on platform
|
||||
if self.platform == "esp8266":
|
||||
self.ram_sections = ESP8266_RAM_SECTIONS
|
||||
self.flash_sections = ESP8266_FLASH_SECTIONS
|
||||
else:
|
||||
# ESP32 and other platforms
|
||||
self.ram_sections = ESP32_RAM_SECTIONS
|
||||
self.flash_sections = ESP32_FLASH_SECTIONS
|
||||
|
||||
self.sections: dict[str, SectionInfo] = {}
|
||||
self.ram_strings: list[RamString] = []
|
||||
self.ram_symbols: list[RamSymbol] = []
|
||||
|
||||
def _run_command(self, cmd: list[str]) -> str:
|
||||
"""Run a command and return its output."""
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
return result.stdout
|
||||
except subprocess.CalledProcessError as e:
|
||||
_LOGGER.debug("Command failed: %s - %s", " ".join(cmd), e.stderr)
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
_LOGGER.warning("Command not found: %s", cmd[0])
|
||||
raise
|
||||
|
||||
def analyze(self) -> None:
|
||||
"""Perform the full RAM analysis."""
|
||||
self._parse_sections()
|
||||
self._extract_strings()
|
||||
self._analyze_symbols()
|
||||
self._demangle_symbols()
|
||||
|
||||
def _parse_sections(self) -> None:
|
||||
"""Parse section headers from ELF file."""
|
||||
objdump = find_tool("objdump", self.objdump_path)
|
||||
if not objdump:
|
||||
_LOGGER.error("Could not find objdump command")
|
||||
return
|
||||
|
||||
try:
|
||||
output = self._run_command([objdump, "-h", str(self.elf_path)])
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
return
|
||||
|
||||
# Parse section headers
|
||||
# Format: Idx Name Size VMA LMA File off Algn
|
||||
section_pattern = re.compile(
|
||||
r"^\s*\d+\s+(\S+)\s+([0-9a-fA-F]+)\s+([0-9a-fA-F]+)"
|
||||
)
|
||||
|
||||
for line in output.split("\n"):
|
||||
if match := section_pattern.match(line):
|
||||
name = match.group(1)
|
||||
size = int(match.group(2), 16)
|
||||
vma = int(match.group(3), 16)
|
||||
self.sections[name] = SectionInfo(name, vma, size)
|
||||
|
||||
def _extract_strings(self) -> None:
|
||||
"""Extract strings from RAM sections."""
|
||||
objdump = find_tool("objdump", self.objdump_path)
|
||||
if not objdump:
|
||||
return
|
||||
|
||||
for section_name in self.ram_sections:
|
||||
if section_name not in self.sections:
|
||||
continue
|
||||
|
||||
try:
|
||||
output = self._run_command(
|
||||
[objdump, "-s", "-j", section_name, str(self.elf_path)]
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
# Section may exist but have no content (e.g., .bss)
|
||||
continue
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
|
||||
strings = self._parse_hex_dump(output, section_name)
|
||||
self.ram_strings.extend(strings)
|
||||
|
||||
def _parse_hex_dump(self, output: str, section_name: str) -> list[RamString]:
|
||||
"""Parse hex dump output to extract strings.
|
||||
|
||||
Args:
|
||||
output: Output from objdump -s
|
||||
section_name: Name of the section being parsed
|
||||
|
||||
Returns:
|
||||
List of RamString objects
|
||||
"""
|
||||
strings: list[RamString] = []
|
||||
current_string = bytearray()
|
||||
string_start_addr = 0
|
||||
|
||||
for line in output.split("\n"):
|
||||
# Lines look like: " 3ffef8a0 00000000 00000000 00000000 00000000 ................"
|
||||
match = re.match(r"^\s+([0-9a-fA-F]+)\s+((?:[0-9a-fA-F]{2,8}\s*)+)", line)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
addr = int(match.group(1), 16)
|
||||
hex_data = match.group(2).strip()
|
||||
|
||||
# Convert hex to bytes
|
||||
hex_bytes = hex_data.split()
|
||||
byte_offset = 0
|
||||
for hex_chunk in hex_bytes:
|
||||
# Handle both byte-by-byte and word formats
|
||||
for i in range(0, len(hex_chunk), 2):
|
||||
byte_val = int(hex_chunk[i : i + 2], 16)
|
||||
if 0x20 <= byte_val <= 0x7E: # Printable ASCII
|
||||
if not current_string:
|
||||
string_start_addr = addr + byte_offset
|
||||
current_string.append(byte_val)
|
||||
else:
|
||||
if byte_val == 0 and len(current_string) >= self.min_length:
|
||||
# Found null terminator
|
||||
strings.append(
|
||||
RamString(
|
||||
section=section_name,
|
||||
address=string_start_addr,
|
||||
content=current_string.decode(
|
||||
"ascii", errors="ignore"
|
||||
),
|
||||
)
|
||||
)
|
||||
current_string = bytearray()
|
||||
byte_offset += 1
|
||||
|
||||
return strings
|
||||
|
||||
def _analyze_symbols(self) -> None:
|
||||
"""Analyze symbols in RAM sections."""
|
||||
nm = find_tool("nm", self.objdump_path)
|
||||
if not nm:
|
||||
return
|
||||
|
||||
try:
|
||||
output = self._run_command([nm, "-S", "--size-sort", str(self.elf_path)])
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
return
|
||||
|
||||
for line in output.split("\n"):
|
||||
parts = line.split()
|
||||
if len(parts) < 4:
|
||||
continue
|
||||
|
||||
try:
|
||||
addr = int(parts[0], 16)
|
||||
size = int(parts[1], 16) if parts[1] != "?" else 0
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
sym_type = parts[2]
|
||||
name = " ".join(parts[3:])
|
||||
|
||||
# Filter for data symbols
|
||||
if sym_type not in DATA_SYMBOL_TYPES:
|
||||
continue
|
||||
|
||||
# Check if symbol is in a RAM section
|
||||
for section_name in self.ram_sections:
|
||||
if section_name not in self.sections:
|
||||
continue
|
||||
|
||||
section = self.sections[section_name]
|
||||
if section.address <= addr < section.address + section.size:
|
||||
self.ram_symbols.append(
|
||||
RamSymbol(
|
||||
name=name,
|
||||
sym_type=sym_type,
|
||||
address=addr,
|
||||
size=size,
|
||||
section=section_name,
|
||||
)
|
||||
)
|
||||
break
|
||||
|
||||
def _demangle_symbols(self) -> None:
|
||||
"""Batch demangle all RAM symbol names."""
|
||||
if not self.ram_symbols:
|
||||
return
|
||||
|
||||
# Collect all symbol names and demangle them
|
||||
symbol_names = [s.name for s in self.ram_symbols]
|
||||
demangle_cache = batch_demangle(symbol_names, objdump_path=self.objdump_path)
|
||||
|
||||
# Assign demangled names to symbols
|
||||
for symbol in self.ram_symbols:
|
||||
symbol.demangled = demangle_cache.get(symbol.name, symbol.name)
|
||||
|
||||
def _get_sections_size(self, section_names: frozenset[str]) -> int:
|
||||
"""Get total size of specified sections."""
|
||||
return sum(
|
||||
section.size
|
||||
for name, section in self.sections.items()
|
||||
if name in section_names
|
||||
)
|
||||
|
||||
def get_total_ram_usage(self) -> int:
|
||||
"""Get total RAM usage from RAM sections."""
|
||||
return self._get_sections_size(self.ram_sections)
|
||||
|
||||
def get_total_flash_usage(self) -> int:
|
||||
"""Get total flash usage from flash sections."""
|
||||
return self._get_sections_size(self.flash_sections)
|
||||
|
||||
def get_total_string_bytes(self) -> int:
|
||||
"""Get total bytes used by strings in RAM."""
|
||||
return sum(s.size for s in self.ram_strings)
|
||||
|
||||
def get_repeated_strings(self) -> list[tuple[str, int]]:
|
||||
"""Find strings that appear multiple times.
|
||||
|
||||
Returns:
|
||||
List of (string, count) tuples sorted by potential savings
|
||||
"""
|
||||
string_counts: dict[str, int] = defaultdict(int)
|
||||
for ram_string in self.ram_strings:
|
||||
string_counts[ram_string.content] += 1
|
||||
|
||||
return sorted(
|
||||
[(s, c) for s, c in string_counts.items() if c > 1],
|
||||
key=lambda x: x[1] * (len(x[0]) + 1),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
def get_long_strings(self, min_len: int = 20) -> list[RamString]:
|
||||
"""Get strings longer than the specified length.
|
||||
|
||||
Args:
|
||||
min_len: Minimum string length
|
||||
|
||||
Returns:
|
||||
List of RamString objects sorted by length
|
||||
"""
|
||||
return sorted(
|
||||
[s for s in self.ram_strings if len(s.content) >= min_len],
|
||||
key=lambda x: len(x.content),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
def get_largest_symbols(self, min_size: int = 100) -> list[RamSymbol]:
|
||||
"""Get RAM symbols larger than the specified size.
|
||||
|
||||
Args:
|
||||
min_size: Minimum symbol size in bytes
|
||||
|
||||
Returns:
|
||||
List of RamSymbol objects sorted by size
|
||||
"""
|
||||
return sorted(
|
||||
[s for s in self.ram_symbols if s.size >= min_size],
|
||||
key=lambda x: x.size,
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
def generate_report(self, show_all_sections: bool = False) -> str:
|
||||
"""Generate a formatted RAM strings analysis report.
|
||||
|
||||
Args:
|
||||
show_all_sections: If True, show all sections, not just RAM
|
||||
|
||||
Returns:
|
||||
Formatted report string
|
||||
"""
|
||||
lines: list[str] = []
|
||||
table_width = 80
|
||||
|
||||
lines.append("=" * table_width)
|
||||
lines.append(
|
||||
f"RAM Strings Analysis ({self.platform.upper()})".center(table_width)
|
||||
)
|
||||
lines.append("=" * table_width)
|
||||
lines.append("")
|
||||
|
||||
# Section Analysis
|
||||
lines.append("SECTION ANALYSIS")
|
||||
lines.append("-" * table_width)
|
||||
lines.append(f"{'Section':<20} {'Address':<12} {'Size':<12} {'Location'}")
|
||||
lines.append("-" * table_width)
|
||||
|
||||
total_ram_usage = 0
|
||||
total_flash_usage = 0
|
||||
|
||||
for name, section in sorted(self.sections.items(), key=lambda x: x[1].address):
|
||||
if name in self.ram_sections:
|
||||
location = "RAM"
|
||||
total_ram_usage += section.size
|
||||
elif name in self.flash_sections:
|
||||
location = "FLASH"
|
||||
total_flash_usage += section.size
|
||||
else:
|
||||
location = "OTHER"
|
||||
|
||||
if show_all_sections or name in self.ram_sections:
|
||||
lines.append(
|
||||
f"{name:<20} 0x{section.address:08x} {section.size:>8} B {location}"
|
||||
)
|
||||
|
||||
lines.append("-" * table_width)
|
||||
lines.append(f"Total RAM sections size: {total_ram_usage:,} bytes")
|
||||
lines.append(f"Total Flash sections size: {total_flash_usage:,} bytes")
|
||||
|
||||
# Strings in RAM
|
||||
lines.append("")
|
||||
lines.append("=" * table_width)
|
||||
lines.append("STRINGS IN RAM SECTIONS")
|
||||
lines.append("=" * table_width)
|
||||
lines.append(
|
||||
"Note: .bss sections contain uninitialized data (no strings to extract)"
|
||||
)
|
||||
|
||||
# Group strings by section
|
||||
strings_by_section: dict[str, list[RamString]] = defaultdict(list)
|
||||
for ram_string in self.ram_strings:
|
||||
strings_by_section[ram_string.section].append(ram_string)
|
||||
|
||||
for section_name in sorted(strings_by_section.keys()):
|
||||
section_strings = strings_by_section[section_name]
|
||||
lines.append(f"\nSection: {section_name}")
|
||||
lines.append("-" * 40)
|
||||
for ram_string in sorted(section_strings, key=lambda x: x.address):
|
||||
clean_string = ram_string.content[:100] + (
|
||||
"..." if len(ram_string.content) > 100 else ""
|
||||
)
|
||||
lines.append(
|
||||
f' 0x{ram_string.address:08x}: "{clean_string}" (len={len(ram_string.content)})'
|
||||
)
|
||||
|
||||
# Large RAM symbols
|
||||
lines.append("")
|
||||
lines.append("=" * table_width)
|
||||
lines.append("LARGE DATA SYMBOLS IN RAM (>= 50 bytes)")
|
||||
lines.append("=" * table_width)
|
||||
|
||||
largest_symbols = self.get_largest_symbols(50)
|
||||
lines.append(f"\n{'Symbol':<50} {'Type':<6} {'Size':<10} {'Section'}")
|
||||
lines.append("-" * table_width)
|
||||
|
||||
for symbol in largest_symbols:
|
||||
# Use demangled name if available, otherwise raw name
|
||||
display_name = symbol.demangled or symbol.name
|
||||
name_display = display_name[:49] if len(display_name) > 49 else display_name
|
||||
lines.append(
|
||||
f"{name_display:<50} {symbol.sym_type:<6} {symbol.size:>8} B {symbol.section}"
|
||||
)
|
||||
|
||||
# Summary
|
||||
lines.append("")
|
||||
lines.append("=" * table_width)
|
||||
lines.append("SUMMARY")
|
||||
lines.append("=" * table_width)
|
||||
lines.append(f"Total strings found in RAM: {len(self.ram_strings)}")
|
||||
total_string_bytes = self.get_total_string_bytes()
|
||||
lines.append(f"Total bytes used by strings: {total_string_bytes:,}")
|
||||
|
||||
# Optimization targets
|
||||
lines.append("")
|
||||
lines.append("=" * table_width)
|
||||
lines.append("POTENTIAL OPTIMIZATION TARGETS")
|
||||
lines.append("=" * table_width)
|
||||
|
||||
# Repeated strings
|
||||
repeated = self.get_repeated_strings()[:10]
|
||||
if repeated:
|
||||
lines.append("\nRepeated strings (could be deduplicated):")
|
||||
for string, count in repeated:
|
||||
savings = (count - 1) * (len(string) + 1)
|
||||
clean_string = string[:50] + ("..." if len(string) > 50 else "")
|
||||
lines.append(
|
||||
f' "{clean_string}" - appears {count} times (potential savings: {savings} bytes)'
|
||||
)
|
||||
|
||||
# Long strings - platform-specific advice
|
||||
long_strings = self.get_long_strings(20)[:10]
|
||||
if long_strings:
|
||||
if self.platform == "esp8266":
|
||||
lines.append(
|
||||
"\nLong strings that could be moved to PROGMEM (>= 20 chars):"
|
||||
)
|
||||
else:
|
||||
# ESP32: strings in DRAM are typically there for a reason
|
||||
# (interrupt handlers, pre-flash-init code, etc.)
|
||||
lines.append("\nLong strings in DRAM (>= 20 chars):")
|
||||
lines.append(
|
||||
"Note: ESP32 DRAM strings may be required for interrupt/early-boot contexts"
|
||||
)
|
||||
for ram_string in long_strings:
|
||||
clean_string = ram_string.content[:60] + (
|
||||
"..." if len(ram_string.content) > 60 else ""
|
||||
)
|
||||
lines.append(
|
||||
f' {ram_string.section} @ 0x{ram_string.address:08x}: "{clean_string}" ({len(ram_string.content)} bytes)'
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
@@ -1,57 +0,0 @@
|
||||
"""Toolchain utilities for memory analysis."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Platform-specific toolchain prefixes
|
||||
TOOLCHAIN_PREFIXES = [
|
||||
"xtensa-lx106-elf-", # ESP8266
|
||||
"xtensa-esp32-elf-", # ESP32
|
||||
"xtensa-esp-elf-", # ESP32 (newer IDF)
|
||||
"", # System default (no prefix)
|
||||
]
|
||||
|
||||
|
||||
def find_tool(
|
||||
tool_name: str,
|
||||
objdump_path: str | None = None,
|
||||
) -> str | None:
|
||||
"""Find a toolchain tool by name.
|
||||
|
||||
First tries to derive the tool path from objdump_path (if provided),
|
||||
then falls back to searching for platform-specific tools.
|
||||
|
||||
Args:
|
||||
tool_name: Name of the tool (e.g., "objdump", "nm", "c++filt")
|
||||
objdump_path: Path to objdump binary to derive other tool paths from
|
||||
|
||||
Returns:
|
||||
Path to the tool or None if not found
|
||||
"""
|
||||
# Try to derive from objdump path first (most reliable)
|
||||
if objdump_path and objdump_path != "objdump":
|
||||
objdump_file = Path(objdump_path)
|
||||
# Replace just the filename portion, preserving any prefix (e.g., xtensa-esp32-elf-)
|
||||
new_name = objdump_file.name.replace("objdump", tool_name)
|
||||
potential_path = str(objdump_file.with_name(new_name))
|
||||
if Path(potential_path).exists():
|
||||
_LOGGER.debug("Found %s at: %s", tool_name, potential_path)
|
||||
return potential_path
|
||||
|
||||
# Try platform-specific tools
|
||||
for prefix in TOOLCHAIN_PREFIXES:
|
||||
cmd = f"{prefix}{tool_name}"
|
||||
try:
|
||||
subprocess.run([cmd, "--version"], capture_output=True, check=True)
|
||||
_LOGGER.debug("Found %s: %s", tool_name, cmd)
|
||||
return cmd
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
continue
|
||||
|
||||
_LOGGER.warning("Could not find %s tool", tool_name)
|
||||
return None
|
||||
@@ -15,15 +15,7 @@ from esphome.const import (
|
||||
CONF_TYPE_ID,
|
||||
CONF_UPDATE_INTERVAL,
|
||||
)
|
||||
from esphome.core import ID, Lambda
|
||||
from esphome.cpp_generator import (
|
||||
LambdaExpression,
|
||||
MockObj,
|
||||
MockObjClass,
|
||||
TemplateArgsType,
|
||||
)
|
||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
|
||||
from esphome.types import ConfigType
|
||||
from esphome.util import Registry
|
||||
|
||||
|
||||
@@ -57,11 +49,11 @@ def maybe_conf(conf, *validators):
|
||||
return validate
|
||||
|
||||
|
||||
def register_action(name: str, action_type: MockObjClass, schema: cv.Schema):
|
||||
def register_action(name, action_type, schema):
|
||||
return ACTION_REGISTRY.register(name, action_type, schema)
|
||||
|
||||
|
||||
def register_condition(name: str, condition_type: MockObjClass, schema: cv.Schema):
|
||||
def register_condition(name, condition_type, schema):
|
||||
return CONDITION_REGISTRY.register(name, condition_type, schema)
|
||||
|
||||
|
||||
@@ -92,7 +84,6 @@ def validate_potentially_or_condition(value):
|
||||
|
||||
DelayAction = cg.esphome_ns.class_("DelayAction", Action, cg.Component)
|
||||
LambdaAction = cg.esphome_ns.class_("LambdaAction", Action)
|
||||
StatelessLambdaAction = cg.esphome_ns.class_("StatelessLambdaAction", Action)
|
||||
IfAction = cg.esphome_ns.class_("IfAction", Action)
|
||||
WhileAction = cg.esphome_ns.class_("WhileAction", Action)
|
||||
RepeatAction = cg.esphome_ns.class_("RepeatAction", Action)
|
||||
@@ -103,40 +94,9 @@ ResumeComponentAction = cg.esphome_ns.class_("ResumeComponentAction", Action)
|
||||
Automation = cg.esphome_ns.class_("Automation")
|
||||
|
||||
LambdaCondition = cg.esphome_ns.class_("LambdaCondition", Condition)
|
||||
StatelessLambdaCondition = cg.esphome_ns.class_("StatelessLambdaCondition", Condition)
|
||||
ForCondition = cg.esphome_ns.class_("ForCondition", Condition, cg.Component)
|
||||
|
||||
|
||||
def new_lambda_pvariable(
|
||||
id_obj: ID,
|
||||
lambda_expr: LambdaExpression,
|
||||
stateless_class: MockObjClass,
|
||||
template_arg: cg.TemplateArguments | None = None,
|
||||
) -> MockObj:
|
||||
"""Create Pvariable for lambda, using stateless class if applicable.
|
||||
|
||||
Combines ID selection and Pvariable creation in one call. For stateless
|
||||
lambdas (empty capture), uses function pointer instead of std::function.
|
||||
|
||||
Args:
|
||||
id_obj: The ID object (action_id, condition_id, or filter_id)
|
||||
lambda_expr: The lambda expression object
|
||||
stateless_class: The stateless class to use for stateless lambdas
|
||||
template_arg: Optional template arguments (for actions/conditions)
|
||||
|
||||
Returns:
|
||||
The created Pvariable
|
||||
"""
|
||||
# For stateless lambdas, use function pointer instead of std::function
|
||||
if lambda_expr.capture == "":
|
||||
id_obj = id_obj.copy()
|
||||
id_obj.type = stateless_class
|
||||
|
||||
if template_arg is not None:
|
||||
return cg.new_Pvariable(id_obj, template_arg, lambda_expr)
|
||||
return cg.new_Pvariable(id_obj, lambda_expr)
|
||||
|
||||
|
||||
def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
if extra_schema is None:
|
||||
extra_schema = {}
|
||||
@@ -182,7 +142,7 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
value = cv.Schema([extra_validators])(value)
|
||||
if single:
|
||||
if len(value) != 1:
|
||||
raise cv.Invalid("This trigger allows only a single automation")
|
||||
raise cv.Invalid("Cannot have more than 1 automation for templates")
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
@@ -204,82 +164,45 @@ XorCondition = cg.esphome_ns.class_("XorCondition", Condition)
|
||||
|
||||
|
||||
@register_condition("and", AndCondition, validate_condition_list)
|
||||
async def and_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def and_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("or", OrCondition, validate_condition_list)
|
||||
async def or_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def or_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("all", AndCondition, validate_condition_list)
|
||||
async def all_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def all_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("any", OrCondition, validate_condition_list)
|
||||
async def any_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def any_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("not", NotCondition, validate_potentially_and_condition)
|
||||
async def not_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def not_condition_to_code(config, condition_id, template_arg, args):
|
||||
condition = await build_condition(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, condition)
|
||||
|
||||
|
||||
@register_condition("xor", XorCondition, validate_condition_list)
|
||||
async def xor_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def xor_condition_to_code(config, condition_id, template_arg, args):
|
||||
conditions = await build_condition_list(config, template_arg, args)
|
||||
return cg.new_Pvariable(condition_id, template_arg, conditions)
|
||||
|
||||
|
||||
@register_condition("lambda", LambdaCondition, cv.returning_lambda)
|
||||
async def lambda_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def lambda_condition_to_code(config, condition_id, template_arg, args):
|
||||
lambda_ = await cg.process_lambda(config, args, return_type=bool)
|
||||
return new_lambda_pvariable(
|
||||
condition_id, lambda_, StatelessLambdaCondition, template_arg
|
||||
)
|
||||
return cg.new_Pvariable(condition_id, template_arg, lambda_)
|
||||
|
||||
|
||||
@register_condition(
|
||||
@@ -294,12 +217,7 @@ async def lambda_condition_to_code(
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
)
|
||||
async def for_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def for_condition_to_code(config, condition_id, template_arg, args):
|
||||
condition = await build_condition(
|
||||
config[CONF_CONDITION], cg.TemplateArguments(), []
|
||||
)
|
||||
@@ -310,39 +228,10 @@ async def for_condition_to_code(
|
||||
return var
|
||||
|
||||
|
||||
@register_condition(
|
||||
"component.is_idle",
|
||||
LambdaCondition,
|
||||
maybe_simple_id(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.use_id(cg.Component),
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_is_idle_condition_to_code(
|
||||
config: ConfigType,
|
||||
condition_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
lambda_ = await cg.process_lambda(
|
||||
Lambda(f"return {comp}->is_idle();"), args, return_type=bool
|
||||
)
|
||||
return new_lambda_pvariable(
|
||||
condition_id, lambda_, StatelessLambdaCondition, template_arg
|
||||
)
|
||||
|
||||
|
||||
@register_action(
|
||||
"delay", DelayAction, cv.templatable(cv.positive_time_period_milliseconds)
|
||||
)
|
||||
async def delay_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def delay_action_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
await cg.register_component(var, {})
|
||||
template_ = await cg.templatable(config, args, cg.uint32)
|
||||
@@ -367,15 +256,10 @@ async def delay_action_to_code(
|
||||
cv.has_at_least_one_key(CONF_CONDITION, CONF_ANY, CONF_ALL),
|
||||
),
|
||||
)
|
||||
async def if_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def if_action_to_code(config, action_id, template_arg, args):
|
||||
cond_conf = next(el for el in config if el in (CONF_ANY, CONF_ALL, CONF_CONDITION))
|
||||
condition = await build_condition(config[cond_conf], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, condition)
|
||||
conditions = await build_condition(config[cond_conf], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
if CONF_THEN in config:
|
||||
actions = await build_action_list(config[CONF_THEN], template_arg, args)
|
||||
cg.add(var.add_then(actions))
|
||||
@@ -395,14 +279,9 @@ async def if_action_to_code(
|
||||
}
|
||||
),
|
||||
)
|
||||
async def while_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
condition = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, condition)
|
||||
async def while_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
actions = await build_action_list(config[CONF_THEN], template_arg, args)
|
||||
cg.add(var.add_then(actions))
|
||||
return var
|
||||
@@ -418,12 +297,7 @@ async def while_action_to_code(
|
||||
}
|
||||
),
|
||||
)
|
||||
async def repeat_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def repeat_action_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
count_template = await cg.templatable(config[CONF_COUNT], args, cg.uint32)
|
||||
cg.add(var.set_count(count_template))
|
||||
@@ -446,14 +320,9 @@ _validate_wait_until = cv.maybe_simple_value(
|
||||
|
||||
|
||||
@register_action("wait_until", WaitUntilAction, _validate_wait_until)
|
||||
async def wait_until_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
condition = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, condition)
|
||||
async def wait_until_action_to_code(config, action_id, template_arg, args):
|
||||
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
|
||||
var = cg.new_Pvariable(action_id, template_arg, conditions)
|
||||
if CONF_TIMEOUT in config:
|
||||
template_ = await cg.templatable(config[CONF_TIMEOUT], args, cg.uint32)
|
||||
cg.add(var.set_timeout_value(template_))
|
||||
@@ -462,14 +331,9 @@ async def wait_until_action_to_code(
|
||||
|
||||
|
||||
@register_action("lambda", LambdaAction, cv.lambda_)
|
||||
async def lambda_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def lambda_action_to_code(config, action_id, template_arg, args):
|
||||
lambda_ = await cg.process_lambda(config, args, return_type=cg.void)
|
||||
return new_lambda_pvariable(action_id, lambda_, StatelessLambdaAction, template_arg)
|
||||
return cg.new_Pvariable(action_id, template_arg, lambda_)
|
||||
|
||||
|
||||
@register_action(
|
||||
@@ -481,12 +345,7 @@ async def lambda_action_to_code(
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_update_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def component_update_action_to_code(config, action_id, template_arg, args):
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
@@ -500,12 +359,7 @@ async def component_update_action_to_code(
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_suspend_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def component_suspend_action_to_code(config, action_id, template_arg, args):
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, comp)
|
||||
|
||||
@@ -522,12 +376,7 @@ async def component_suspend_action_to_code(
|
||||
}
|
||||
),
|
||||
)
|
||||
async def component_resume_action_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
async def component_resume_action_to_code(config, action_id, template_arg, args):
|
||||
comp = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, comp)
|
||||
if CONF_UPDATE_INTERVAL in config:
|
||||
@@ -536,51 +385,43 @@ async def component_resume_action_to_code(
|
||||
return var
|
||||
|
||||
|
||||
async def build_action(
|
||||
full_config: ConfigType, template_arg: cg.TemplateArguments, args: TemplateArgsType
|
||||
) -> MockObj:
|
||||
async def build_action(full_config, template_arg, args):
|
||||
registry_entry, config = cg.extract_registry_entry_config(
|
||||
ACTION_REGISTRY, full_config
|
||||
)
|
||||
action_id = full_config[CONF_TYPE_ID]
|
||||
builder = registry_entry.coroutine_fun
|
||||
return await builder(config, action_id, template_arg, args)
|
||||
ret = await builder(config, action_id, template_arg, args)
|
||||
return ret
|
||||
|
||||
|
||||
async def build_action_list(
|
||||
config: list[ConfigType], templ: cg.TemplateArguments, arg_type: TemplateArgsType
|
||||
) -> list[MockObj]:
|
||||
actions: list[MockObj] = []
|
||||
async def build_action_list(config, templ, arg_type):
|
||||
actions = []
|
||||
for conf in config:
|
||||
action = await build_action(conf, templ, arg_type)
|
||||
actions.append(action)
|
||||
return actions
|
||||
|
||||
|
||||
async def build_condition(
|
||||
full_config: ConfigType, template_arg: cg.TemplateArguments, args: TemplateArgsType
|
||||
) -> MockObj:
|
||||
async def build_condition(full_config, template_arg, args):
|
||||
registry_entry, config = cg.extract_registry_entry_config(
|
||||
CONDITION_REGISTRY, full_config
|
||||
)
|
||||
action_id = full_config[CONF_TYPE_ID]
|
||||
builder = registry_entry.coroutine_fun
|
||||
return await builder(config, action_id, template_arg, args)
|
||||
ret = await builder(config, action_id, template_arg, args)
|
||||
return ret
|
||||
|
||||
|
||||
async def build_condition_list(
|
||||
config: ConfigType, templ: cg.TemplateArguments, args: TemplateArgsType
|
||||
) -> list[MockObj]:
|
||||
conditions: list[MockObj] = []
|
||||
async def build_condition_list(config, templ, args):
|
||||
conditions = []
|
||||
for conf in config:
|
||||
condition = await build_condition(conf, templ, args)
|
||||
conditions.append(condition)
|
||||
return conditions
|
||||
|
||||
|
||||
async def build_automation(
|
||||
trigger: MockObj, args: TemplateArgsType, config: ConfigType
|
||||
) -> MockObj:
|
||||
async def build_automation(trigger, args, config):
|
||||
arg_types = [arg[0] for arg in args]
|
||||
templ = cg.TemplateArguments(*arg_types)
|
||||
obj = cg.new_Pvariable(config[CONF_AUTOMATION_ID], templ, trigger)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import os
|
||||
|
||||
from esphome.const import __version__
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import mkdir_p, read_file, write_file_if_changed
|
||||
@@ -61,7 +63,7 @@ def write_ini(content):
|
||||
update_storage_json()
|
||||
path = CORE.relative_build_path("platformio.ini")
|
||||
|
||||
if path.is_file():
|
||||
if os.path.isfile(path):
|
||||
text = read_file(path)
|
||||
content_format = find_begin_end(
|
||||
text, INI_AUTO_GENERATE_BEGIN, INI_AUTO_GENERATE_END
|
||||
|
||||
@@ -12,7 +12,6 @@ from esphome.cpp_generator import ( # noqa: F401
|
||||
ArrayInitializer,
|
||||
Expression,
|
||||
LineComment,
|
||||
LogStringLiteral,
|
||||
MockObj,
|
||||
MockObjClass,
|
||||
Pvariable,
|
||||
@@ -62,7 +61,6 @@ from esphome.cpp_types import ( # noqa: F401
|
||||
EntityBase,
|
||||
EntityCategory,
|
||||
ESPTime,
|
||||
FixedVector,
|
||||
GPIOPin,
|
||||
InternalGPIOPin,
|
||||
JsonObject,
|
||||
|
||||
@@ -61,10 +61,11 @@ void AbsoluteHumidityComponent::loop() {
|
||||
ESP_LOGW(TAG, "No valid state from temperature sensor!");
|
||||
}
|
||||
if (no_humidity) {
|
||||
ESP_LOGW(TAG, "No valid state from humidity sensor!");
|
||||
ESP_LOGW(TAG, "No valid state from temperature sensor!");
|
||||
}
|
||||
ESP_LOGW(TAG, "Unable to calculate absolute humidity.");
|
||||
this->publish_state(NAN);
|
||||
this->status_set_warning(LOG_STR("Unable to calculate absolute humidity."));
|
||||
this->status_set_warning();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -86,8 +87,9 @@ void AbsoluteHumidityComponent::loop() {
|
||||
es = es_wobus(temperature_c);
|
||||
break;
|
||||
default:
|
||||
ESP_LOGE(TAG, "Invalid saturation vapor pressure equation selection!");
|
||||
this->publish_state(NAN);
|
||||
this->status_set_error(LOG_STR("Invalid saturation vapor pressure equation selection!"));
|
||||
this->status_set_error();
|
||||
return;
|
||||
}
|
||||
ESP_LOGD(TAG, "Saturation vapor pressure %f kPa", es);
|
||||
@@ -163,7 +165,7 @@ float AbsoluteHumidityComponent::es_wobus(float t) {
|
||||
}
|
||||
|
||||
// From https://www.environmentalbiophysics.org/chalk-talk-how-to-calculate-absolute-humidity/
|
||||
// H/T to https://esphome.io/cookbook/bme280_environment/
|
||||
// H/T to https://esphome.io/cookbook/bme280_environment.html
|
||||
// H/T to https://carnotcycle.wordpress.com/2012/08/04/how-to-convert-relative-humidity-to-absolute-humidity/
|
||||
float AbsoluteHumidityComponent::vapor_density(float es, float hr, float ta) {
|
||||
// es = saturated vapor pressure (kPa)
|
||||
|
||||
@@ -5,7 +5,7 @@ from esphome.const import (
|
||||
CONF_EQUATION,
|
||||
CONF_HUMIDITY,
|
||||
CONF_TEMPERATURE,
|
||||
DEVICE_CLASS_ABSOLUTE_HUMIDITY,
|
||||
ICON_WATER,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_GRAMS_PER_CUBIC_METER,
|
||||
)
|
||||
@@ -27,8 +27,8 @@ EQUATION = {
|
||||
CONFIG_SCHEMA = (
|
||||
sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_GRAMS_PER_CUBIC_METER,
|
||||
icon=ICON_WATER,
|
||||
accuracy_decimals=2,
|
||||
device_class=DEVICE_CLASS_ABSOLUTE_HUMIDITY,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
)
|
||||
.extend(
|
||||
|
||||
@@ -9,7 +9,7 @@ static const char *const TAG = "adalight_light_effect";
|
||||
static const uint32_t ADALIGHT_ACK_INTERVAL = 1000;
|
||||
static const uint32_t ADALIGHT_RECEIVE_TIMEOUT = 1000;
|
||||
|
||||
AdalightLightEffect::AdalightLightEffect(const char *name) : AddressableLightEffect(name) {}
|
||||
AdalightLightEffect::AdalightLightEffect(const std::string &name) : AddressableLightEffect(name) {}
|
||||
|
||||
void AdalightLightEffect::start() {
|
||||
AddressableLightEffect::start();
|
||||
|
||||
@@ -11,7 +11,7 @@ namespace adalight {
|
||||
|
||||
class AdalightLightEffect : public light::AddressableLightEffect, public uart::UARTDevice {
|
||||
public:
|
||||
AdalightLightEffect(const char *name);
|
||||
AdalightLightEffect(const std::string &name);
|
||||
|
||||
void start() override;
|
||||
void stop() override;
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
from esphome import pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp32 import (
|
||||
from esphome.components.esp32 import get_esp32_variant
|
||||
from esphome.components.esp32.const import (
|
||||
VARIANT_ESP32,
|
||||
VARIANT_ESP32C2,
|
||||
VARIANT_ESP32C3,
|
||||
VARIANT_ESP32C5,
|
||||
VARIANT_ESP32C6,
|
||||
VARIANT_ESP32C61,
|
||||
VARIANT_ESP32H2,
|
||||
VARIANT_ESP32P4,
|
||||
VARIANT_ESP32S2,
|
||||
VARIANT_ESP32S3,
|
||||
get_esp32_variant,
|
||||
)
|
||||
from esphome.config_helpers import filter_source_files_from_platform
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_ANALOG, CONF_INPUT, CONF_NUMBER, PLATFORM_ESP8266
|
||||
from esphome.const import (
|
||||
CONF_ANALOG,
|
||||
CONF_INPUT,
|
||||
CONF_NUMBER,
|
||||
PLATFORM_ESP8266,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
@@ -101,13 +106,6 @@ ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
|
||||
5: adc_channel_t.ADC_CHANNEL_5,
|
||||
6: adc_channel_t.ADC_CHANNEL_6,
|
||||
},
|
||||
# https://docs.espressif.com/projects/esp-idf/en/latest/esp32c61/api-reference/peripherals/gpio.html
|
||||
VARIANT_ESP32C61: {
|
||||
1: adc_channel_t.ADC_CHANNEL_0,
|
||||
3: adc_channel_t.ADC_CHANNEL_1,
|
||||
4: adc_channel_t.ADC_CHANNEL_2,
|
||||
5: adc_channel_t.ADC_CHANNEL_3,
|
||||
},
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32h2/include/soc/adc_channel.h
|
||||
VARIANT_ESP32H2: {
|
||||
1: adc_channel_t.ADC_CHANNEL_0,
|
||||
@@ -116,17 +114,6 @@ ESP32_VARIANT_ADC1_PIN_TO_CHANNEL = {
|
||||
4: adc_channel_t.ADC_CHANNEL_3,
|
||||
5: adc_channel_t.ADC_CHANNEL_4,
|
||||
},
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32p4/include/soc/adc_channel.h
|
||||
VARIANT_ESP32P4: {
|
||||
16: adc_channel_t.ADC_CHANNEL_0,
|
||||
17: adc_channel_t.ADC_CHANNEL_1,
|
||||
18: adc_channel_t.ADC_CHANNEL_2,
|
||||
19: adc_channel_t.ADC_CHANNEL_3,
|
||||
20: adc_channel_t.ADC_CHANNEL_4,
|
||||
21: adc_channel_t.ADC_CHANNEL_5,
|
||||
22: adc_channel_t.ADC_CHANNEL_6,
|
||||
23: adc_channel_t.ADC_CHANNEL_7,
|
||||
},
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32s2/include/soc/adc_channel.h
|
||||
VARIANT_ESP32S2: {
|
||||
1: adc_channel_t.ADC_CHANNEL_0,
|
||||
@@ -183,19 +170,8 @@ ESP32_VARIANT_ADC2_PIN_TO_CHANNEL = {
|
||||
VARIANT_ESP32C5: {}, # no ADC2
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32c6/include/soc/adc_channel.h
|
||||
VARIANT_ESP32C6: {}, # no ADC2
|
||||
# ESP32-C61 has no ADC2
|
||||
VARIANT_ESP32C61: {}, # no ADC2
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32h2/include/soc/adc_channel.h
|
||||
VARIANT_ESP32H2: {}, # no ADC2
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32p4/include/soc/adc_channel.h
|
||||
VARIANT_ESP32P4: {
|
||||
49: adc_channel_t.ADC_CHANNEL_0,
|
||||
50: adc_channel_t.ADC_CHANNEL_1,
|
||||
51: adc_channel_t.ADC_CHANNEL_2,
|
||||
52: adc_channel_t.ADC_CHANNEL_3,
|
||||
53: adc_channel_t.ADC_CHANNEL_4,
|
||||
54: adc_channel_t.ADC_CHANNEL_5,
|
||||
},
|
||||
# https://github.com/espressif/esp-idf/blob/master/components/soc/esp32s2/include/soc/adc_channel.h
|
||||
VARIANT_ESP32S2: {
|
||||
11: adc_channel_t.ADC_CHANNEL_0,
|
||||
@@ -273,9 +249,21 @@ def validate_adc_pin(value):
|
||||
{CONF_ANALOG: True, CONF_INPUT: True}, internal=True
|
||||
)(value)
|
||||
|
||||
if CORE.is_nrf52:
|
||||
return pins.gpio_pin_schema(
|
||||
{CONF_ANALOG: True, CONF_INPUT: True}, internal=True
|
||||
)(value)
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
{
|
||||
"adc_sensor_esp32.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP32_IDF,
|
||||
},
|
||||
"adc_sensor_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
|
||||
"adc_sensor_rp2040.cpp": {PlatformFramework.RP2040_ARDUINO},
|
||||
"adc_sensor_libretiny.cpp": {
|
||||
PlatformFramework.BK72XX_ARDUINO,
|
||||
PlatformFramework.RTL87XX_ARDUINO,
|
||||
PlatformFramework.LN882X_ARDUINO,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -13,10 +13,6 @@
|
||||
#include "hal/adc_types.h" // This defines ADC_CHANNEL_MAX
|
||||
#endif // USE_ESP32
|
||||
|
||||
#ifdef USE_ZEPHYR
|
||||
#include <zephyr/drivers/adc.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace adc {
|
||||
|
||||
@@ -42,15 +38,15 @@ enum class SamplingMode : uint8_t {
|
||||
|
||||
const LogString *sampling_mode_to_str(SamplingMode mode);
|
||||
|
||||
template<typename T> class Aggregator {
|
||||
class Aggregator {
|
||||
public:
|
||||
Aggregator(SamplingMode mode);
|
||||
void add_sample(T value);
|
||||
T aggregate();
|
||||
void add_sample(uint32_t value);
|
||||
uint32_t aggregate();
|
||||
|
||||
protected:
|
||||
T aggr_{0};
|
||||
uint8_t samples_{0};
|
||||
uint32_t aggr_{0};
|
||||
uint32_t samples_{0};
|
||||
SamplingMode mode_{SamplingMode::AVG};
|
||||
};
|
||||
|
||||
@@ -73,11 +69,6 @@ class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage
|
||||
/// @return A float representing the setup priority.
|
||||
float get_setup_priority() const override;
|
||||
|
||||
#ifdef USE_ZEPHYR
|
||||
/// Set the ADC channel to be used by the ADC sensor.
|
||||
/// @param channel Pointer to an adc_dt_spec structure representing the ADC channel.
|
||||
void set_adc_channel(const adc_dt_spec *channel) { this->channel_ = channel; }
|
||||
#endif
|
||||
/// Set the GPIO pin to be used by the ADC sensor.
|
||||
/// @param pin Pointer to an InternalGPIOPin representing the ADC input pin.
|
||||
void set_pin(InternalGPIOPin *pin) { this->pin_ = pin; }
|
||||
@@ -145,8 +136,8 @@ class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage
|
||||
adc_oneshot_unit_handle_t adc_handle_{nullptr};
|
||||
adc_cali_handle_t calibration_handle_{nullptr};
|
||||
adc_atten_t attenuation_{ADC_ATTEN_DB_0};
|
||||
adc_channel_t channel_{};
|
||||
adc_unit_t adc_unit_{};
|
||||
adc_channel_t channel_;
|
||||
adc_unit_t adc_unit_;
|
||||
struct SetupFlags {
|
||||
uint8_t init_complete : 1;
|
||||
uint8_t config_complete : 1;
|
||||
@@ -160,10 +151,6 @@ class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage
|
||||
#ifdef USE_RP2040
|
||||
bool is_temperature_{false};
|
||||
#endif // USE_RP2040
|
||||
|
||||
#ifdef USE_ZEPHYR
|
||||
const struct adc_dt_spec *channel_ = nullptr;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace adc
|
||||
|
||||
@@ -18,15 +18,15 @@ const LogString *sampling_mode_to_str(SamplingMode mode) {
|
||||
return LOG_STR("unknown");
|
||||
}
|
||||
|
||||
template<typename T> Aggregator<T>::Aggregator(SamplingMode mode) {
|
||||
Aggregator::Aggregator(SamplingMode mode) {
|
||||
this->mode_ = mode;
|
||||
// set to max uint if mode is "min"
|
||||
if (mode == SamplingMode::MIN) {
|
||||
this->aggr_ = std::numeric_limits<T>::max();
|
||||
this->aggr_ = UINT32_MAX;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T> void Aggregator<T>::add_sample(T value) {
|
||||
void Aggregator::add_sample(uint32_t value) {
|
||||
this->samples_ += 1;
|
||||
|
||||
switch (this->mode_) {
|
||||
@@ -47,7 +47,7 @@ template<typename T> void Aggregator<T>::add_sample(T value) {
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T> T Aggregator<T>::aggregate() {
|
||||
uint32_t Aggregator::aggregate() {
|
||||
if (this->mode_ == SamplingMode::AVG) {
|
||||
if (this->samples_ == 0) {
|
||||
return this->aggr_;
|
||||
@@ -59,12 +59,6 @@ template<typename T> T Aggregator<T>::aggregate() {
|
||||
return this->aggr_;
|
||||
}
|
||||
|
||||
#ifdef USE_ZEPHYR
|
||||
template class Aggregator<int32_t>;
|
||||
#else
|
||||
template class Aggregator<uint32_t>;
|
||||
#endif
|
||||
|
||||
void ADCSensor::update() {
|
||||
float value_v = this->sample();
|
||||
ESP_LOGV(TAG, "'%s': Voltage=%.4fV", this->get_name().c_str(), value_v);
|
||||
|
||||
@@ -42,11 +42,10 @@ void ADCSensor::setup() {
|
||||
adc_oneshot_unit_init_cfg_t init_config = {}; // Zero initialize
|
||||
init_config.unit_id = this->adc_unit_;
|
||||
init_config.ulp_mode = ADC_ULP_MODE_DISABLE;
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || USE_ESP32_VARIANT_ESP32H2
|
||||
init_config.clk_src = ADC_DIGI_CLK_SRC_DEFAULT;
|
||||
#endif // USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 ||
|
||||
// USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
|
||||
// USE_ESP32_VARIANT_ESP32H2
|
||||
esp_err_t err = adc_oneshot_new_unit(&init_config, &ADCSensor::shared_adc_handles[this->adc_unit_]);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGE(TAG, "Error initializing %s: %d", LOG_STR_ARG(adc_unit_to_str(this->adc_unit_)), err);
|
||||
@@ -73,9 +72,10 @@ void ADCSensor::setup() {
|
||||
// Initialize ADC calibration
|
||||
if (this->calibration_handle_ == nullptr) {
|
||||
adc_cali_handle_t handle = nullptr;
|
||||
esp_err_t err;
|
||||
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||
USE_ESP32_VARIANT_ESP32S3 || USE_ESP32_VARIANT_ESP32H2
|
||||
// RISC-V variants and S3 use curve fitting calibration
|
||||
adc_cali_curve_fitting_config_t cali_config = {}; // Zero initialize first
|
||||
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 3, 0)
|
||||
@@ -112,7 +112,7 @@ void ADCSensor::setup() {
|
||||
ESP_LOGW(TAG, "Line fitting calibration failed with error %d, will use uncalibrated readings", err);
|
||||
this->setup_flags_.calibration_complete = false;
|
||||
}
|
||||
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
|
||||
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32S3 || ESP32H2
|
||||
}
|
||||
|
||||
this->setup_flags_.init_complete = true;
|
||||
@@ -153,7 +153,7 @@ float ADCSensor::sample() {
|
||||
}
|
||||
|
||||
float ADCSensor::sample_fixed_attenuation_() {
|
||||
auto aggr = Aggregator<uint32_t>(this->sampling_mode_);
|
||||
auto aggr = Aggregator(this->sampling_mode_);
|
||||
|
||||
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
|
||||
int raw;
|
||||
@@ -187,11 +187,11 @@ float ADCSensor::sample_fixed_attenuation_() {
|
||||
ESP_LOGW(TAG, "ADC calibration conversion failed with error %d, disabling calibration", err);
|
||||
if (this->calibration_handle_ != nullptr) {
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||
USE_ESP32_VARIANT_ESP32S3 || USE_ESP32_VARIANT_ESP32H2
|
||||
adc_cali_delete_scheme_curve_fitting(this->calibration_handle_);
|
||||
#else // Other ESP32 variants use line fitting calibration
|
||||
adc_cali_delete_scheme_line_fitting(this->calibration_handle_);
|
||||
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
|
||||
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32S3 || ESP32H2
|
||||
this->calibration_handle_ = nullptr;
|
||||
}
|
||||
}
|
||||
@@ -220,7 +220,7 @@ float ADCSensor::sample_autorange_() {
|
||||
if (this->calibration_handle_ != nullptr) {
|
||||
// Delete old calibration handle
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||
USE_ESP32_VARIANT_ESP32S3 || USE_ESP32_VARIANT_ESP32H2
|
||||
adc_cali_delete_scheme_curve_fitting(this->calibration_handle_);
|
||||
#else
|
||||
adc_cali_delete_scheme_line_fitting(this->calibration_handle_);
|
||||
@@ -232,7 +232,7 @@ float ADCSensor::sample_autorange_() {
|
||||
adc_cali_handle_t handle = nullptr;
|
||||
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||
USE_ESP32_VARIANT_ESP32S3 || USE_ESP32_VARIANT_ESP32H2
|
||||
adc_cali_curve_fitting_config_t cali_config = {};
|
||||
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 3, 0)
|
||||
cali_config.chan = this->channel_;
|
||||
@@ -242,8 +242,6 @@ float ADCSensor::sample_autorange_() {
|
||||
cali_config.bitwidth = ADC_BITWIDTH_DEFAULT;
|
||||
|
||||
err = adc_cali_create_scheme_curve_fitting(&cali_config, &handle);
|
||||
ESP_LOGVV(TAG, "Autorange atten=%d: Calibration handle creation %s (err=%d)", atten,
|
||||
(err == ESP_OK) ? "SUCCESS" : "FAILED", err);
|
||||
#else
|
||||
adc_cali_line_fitting_config_t cali_config = {
|
||||
.unit_id = this->adc_unit_,
|
||||
@@ -254,20 +252,16 @@ float ADCSensor::sample_autorange_() {
|
||||
#endif
|
||||
};
|
||||
err = adc_cali_create_scheme_line_fitting(&cali_config, &handle);
|
||||
ESP_LOGVV(TAG, "Autorange atten=%d: Calibration handle creation %s (err=%d)", atten,
|
||||
(err == ESP_OK) ? "SUCCESS" : "FAILED", err);
|
||||
#endif
|
||||
|
||||
int raw;
|
||||
err = adc_oneshot_read(this->adc_handle_, this->channel_, &raw);
|
||||
ESP_LOGVV(TAG, "Autorange atten=%d: Raw ADC read %s, value=%d (err=%d)", atten,
|
||||
(err == ESP_OK) ? "SUCCESS" : "FAILED", raw, err);
|
||||
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "ADC read failed in autorange with error %d", err);
|
||||
if (handle != nullptr) {
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||
USE_ESP32_VARIANT_ESP32S3 || USE_ESP32_VARIANT_ESP32H2
|
||||
adc_cali_delete_scheme_curve_fitting(handle);
|
||||
#else
|
||||
adc_cali_delete_scheme_line_fitting(handle);
|
||||
@@ -282,21 +276,18 @@ float ADCSensor::sample_autorange_() {
|
||||
err = adc_cali_raw_to_voltage(handle, raw, &voltage_mv);
|
||||
if (err == ESP_OK) {
|
||||
voltage = voltage_mv / 1000.0f;
|
||||
ESP_LOGVV(TAG, "Autorange atten=%d: CALIBRATED - raw=%d -> %dmV -> %.6fV", atten, raw, voltage_mv, voltage);
|
||||
} else {
|
||||
voltage = raw * 3.3f / 4095.0f;
|
||||
ESP_LOGVV(TAG, "Autorange atten=%d: UNCALIBRATED FALLBACK - raw=%d -> %.6fV (3.3V ref)", atten, raw, voltage);
|
||||
}
|
||||
// Clean up calibration handle
|
||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||
USE_ESP32_VARIANT_ESP32S3 || USE_ESP32_VARIANT_ESP32H2
|
||||
adc_cali_delete_scheme_curve_fitting(handle);
|
||||
#else
|
||||
adc_cali_delete_scheme_line_fitting(handle);
|
||||
#endif
|
||||
} else {
|
||||
voltage = raw * 3.3f / 4095.0f;
|
||||
ESP_LOGVV(TAG, "Autorange atten=%d: NO CALIBRATION - raw=%d -> %.6fV (3.3V ref)", atten, raw, voltage);
|
||||
}
|
||||
|
||||
return {raw, voltage};
|
||||
@@ -334,32 +325,18 @@ float ADCSensor::sample_autorange_() {
|
||||
}
|
||||
|
||||
const int adc_half = 2048;
|
||||
const uint32_t c12 = std::min(raw12, adc_half);
|
||||
|
||||
const int32_t c6_signed = adc_half - std::abs(raw6 - adc_half);
|
||||
const uint32_t c6 = (c6_signed > 0) ? c6_signed : 0; // Clamp to prevent underflow
|
||||
|
||||
const int32_t c2_signed = adc_half - std::abs(raw2 - adc_half);
|
||||
const uint32_t c2 = (c2_signed > 0) ? c2_signed : 0; // Clamp to prevent underflow
|
||||
|
||||
const uint32_t c0 = std::min(4095 - raw0, adc_half);
|
||||
const uint32_t csum = c12 + c6 + c2 + c0;
|
||||
|
||||
ESP_LOGVV(TAG, "Autorange summary:");
|
||||
ESP_LOGVV(TAG, " Raw readings: 12db=%d, 6db=%d, 2.5db=%d, 0db=%d", raw12, raw6, raw2, raw0);
|
||||
ESP_LOGVV(TAG, " Voltages: 12db=%.6f, 6db=%.6f, 2.5db=%.6f, 0db=%.6f", mv12, mv6, mv2, mv0);
|
||||
ESP_LOGVV(TAG, " Coefficients: c12=%u, c6=%u, c2=%u, c0=%u, sum=%u", c12, c6, c2, c0, csum);
|
||||
uint32_t c12 = std::min(raw12, adc_half);
|
||||
uint32_t c6 = adc_half - std::abs(raw6 - adc_half);
|
||||
uint32_t c2 = adc_half - std::abs(raw2 - adc_half);
|
||||
uint32_t c0 = std::min(4095 - raw0, adc_half);
|
||||
uint32_t csum = c12 + c6 + c2 + c0;
|
||||
|
||||
if (csum == 0) {
|
||||
ESP_LOGE(TAG, "Invalid weight sum in autorange calculation");
|
||||
return NAN;
|
||||
}
|
||||
|
||||
const float final_result = (mv12 * c12 + mv6 * c6 + mv2 * c2 + mv0 * c0) / csum;
|
||||
ESP_LOGV(TAG, "Autorange final: (%.6f*%u + %.6f*%u + %.6f*%u + %.6f*%u)/%u = %.6fV", mv12, c12, mv6, c6, mv2, c2, mv0,
|
||||
c0, csum, final_result);
|
||||
|
||||
return final_result;
|
||||
return (mv12 * c12 + mv6 * c6 + mv2 * c2 + mv0 * c0) / csum;
|
||||
}
|
||||
|
||||
} // namespace adc
|
||||
|
||||
@@ -37,7 +37,7 @@ void ADCSensor::dump_config() {
|
||||
}
|
||||
|
||||
float ADCSensor::sample() {
|
||||
auto aggr = Aggregator<uint32_t>(this->sampling_mode_);
|
||||
auto aggr = Aggregator(this->sampling_mode_);
|
||||
|
||||
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
|
||||
uint32_t raw = 0;
|
||||
|
||||
@@ -30,7 +30,7 @@ void ADCSensor::dump_config() {
|
||||
|
||||
float ADCSensor::sample() {
|
||||
uint32_t raw = 0;
|
||||
auto aggr = Aggregator<uint32_t>(this->sampling_mode_);
|
||||
auto aggr = Aggregator(this->sampling_mode_);
|
||||
|
||||
if (this->output_raw_) {
|
||||
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
|
||||
|
||||
@@ -41,7 +41,7 @@ void ADCSensor::dump_config() {
|
||||
|
||||
float ADCSensor::sample() {
|
||||
uint32_t raw = 0;
|
||||
auto aggr = Aggregator<uint32_t>(this->sampling_mode_);
|
||||
auto aggr = Aggregator(this->sampling_mode_);
|
||||
|
||||
if (this->is_temperature_) {
|
||||
adc_set_temp_sensor_enabled(true);
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
|
||||
#include "adc_sensor.h"
|
||||
#ifdef USE_ZEPHYR
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include "hal/nrf_saadc.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace adc {
|
||||
|
||||
static const char *const TAG = "adc.zephyr";
|
||||
|
||||
void ADCSensor::setup() {
|
||||
if (!adc_is_ready_dt(this->channel_)) {
|
||||
ESP_LOGE(TAG, "ADC controller device %s not ready", this->channel_->dev->name);
|
||||
return;
|
||||
}
|
||||
|
||||
auto err = adc_channel_setup_dt(this->channel_);
|
||||
if (err < 0) {
|
||||
ESP_LOGE(TAG, "Could not setup channel %s (%d)", this->channel_->dev->name, err);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
static const LogString *gain_to_str(enum adc_gain gain) {
|
||||
switch (gain) {
|
||||
case ADC_GAIN_1_6:
|
||||
return LOG_STR("1/6");
|
||||
case ADC_GAIN_1_5:
|
||||
return LOG_STR("1/5");
|
||||
case ADC_GAIN_1_4:
|
||||
return LOG_STR("1/4");
|
||||
case ADC_GAIN_1_3:
|
||||
return LOG_STR("1/3");
|
||||
case ADC_GAIN_2_5:
|
||||
return LOG_STR("2/5");
|
||||
case ADC_GAIN_1_2:
|
||||
return LOG_STR("1/2");
|
||||
case ADC_GAIN_2_3:
|
||||
return LOG_STR("2/3");
|
||||
case ADC_GAIN_4_5:
|
||||
return LOG_STR("4/5");
|
||||
case ADC_GAIN_1:
|
||||
return LOG_STR("1");
|
||||
case ADC_GAIN_2:
|
||||
return LOG_STR("2");
|
||||
case ADC_GAIN_3:
|
||||
return LOG_STR("3");
|
||||
case ADC_GAIN_4:
|
||||
return LOG_STR("4");
|
||||
case ADC_GAIN_6:
|
||||
return LOG_STR("6");
|
||||
case ADC_GAIN_8:
|
||||
return LOG_STR("8");
|
||||
case ADC_GAIN_12:
|
||||
return LOG_STR("12");
|
||||
case ADC_GAIN_16:
|
||||
return LOG_STR("16");
|
||||
case ADC_GAIN_24:
|
||||
return LOG_STR("24");
|
||||
case ADC_GAIN_32:
|
||||
return LOG_STR("32");
|
||||
case ADC_GAIN_64:
|
||||
return LOG_STR("64");
|
||||
case ADC_GAIN_128:
|
||||
return LOG_STR("128");
|
||||
}
|
||||
return LOG_STR("undefined gain");
|
||||
}
|
||||
|
||||
static const LogString *reference_to_str(enum adc_reference reference) {
|
||||
switch (reference) {
|
||||
case ADC_REF_VDD_1:
|
||||
return LOG_STR("VDD");
|
||||
case ADC_REF_VDD_1_2:
|
||||
return LOG_STR("VDD/2");
|
||||
case ADC_REF_VDD_1_3:
|
||||
return LOG_STR("VDD/3");
|
||||
case ADC_REF_VDD_1_4:
|
||||
return LOG_STR("VDD/4");
|
||||
case ADC_REF_INTERNAL:
|
||||
return LOG_STR("INTERNAL");
|
||||
case ADC_REF_EXTERNAL0:
|
||||
return LOG_STR("External, input 0");
|
||||
case ADC_REF_EXTERNAL1:
|
||||
return LOG_STR("External, input 1");
|
||||
}
|
||||
return LOG_STR("undefined reference");
|
||||
}
|
||||
|
||||
static const LogString *input_to_str(uint8_t input) {
|
||||
switch (input) {
|
||||
case NRF_SAADC_INPUT_AIN0:
|
||||
return LOG_STR("AIN0");
|
||||
case NRF_SAADC_INPUT_AIN1:
|
||||
return LOG_STR("AIN1");
|
||||
case NRF_SAADC_INPUT_AIN2:
|
||||
return LOG_STR("AIN2");
|
||||
case NRF_SAADC_INPUT_AIN3:
|
||||
return LOG_STR("AIN3");
|
||||
case NRF_SAADC_INPUT_AIN4:
|
||||
return LOG_STR("AIN4");
|
||||
case NRF_SAADC_INPUT_AIN5:
|
||||
return LOG_STR("AIN5");
|
||||
case NRF_SAADC_INPUT_AIN6:
|
||||
return LOG_STR("AIN6");
|
||||
case NRF_SAADC_INPUT_AIN7:
|
||||
return LOG_STR("AIN7");
|
||||
case NRF_SAADC_INPUT_VDD:
|
||||
return LOG_STR("VDD");
|
||||
case NRF_SAADC_INPUT_VDDHDIV5:
|
||||
return LOG_STR("VDDHDIV5");
|
||||
}
|
||||
return LOG_STR("undefined input");
|
||||
}
|
||||
#endif // ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
|
||||
void ADCSensor::dump_config() {
|
||||
LOG_SENSOR("", "ADC Sensor", this);
|
||||
LOG_PIN(" Pin: ", this->pin_);
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
ESP_LOGV(TAG,
|
||||
" Name: %s\n"
|
||||
" Channel: %d\n"
|
||||
" vref_mv: %d\n"
|
||||
" Resolution %d\n"
|
||||
" Oversampling %d",
|
||||
this->channel_->dev->name, this->channel_->channel_id, this->channel_->vref_mv, this->channel_->resolution,
|
||||
this->channel_->oversampling);
|
||||
|
||||
ESP_LOGV(TAG,
|
||||
" Gain: %s\n"
|
||||
" reference: %s\n"
|
||||
" acquisition_time: %d\n"
|
||||
" differential %s",
|
||||
LOG_STR_ARG(gain_to_str(this->channel_->channel_cfg.gain)),
|
||||
LOG_STR_ARG(reference_to_str(this->channel_->channel_cfg.reference)),
|
||||
this->channel_->channel_cfg.acquisition_time, YESNO(this->channel_->channel_cfg.differential));
|
||||
if (this->channel_->channel_cfg.differential) {
|
||||
ESP_LOGV(TAG,
|
||||
" Positive: %s\n"
|
||||
" Negative: %s",
|
||||
LOG_STR_ARG(input_to_str(this->channel_->channel_cfg.input_positive)),
|
||||
LOG_STR_ARG(input_to_str(this->channel_->channel_cfg.input_negative)));
|
||||
} else {
|
||||
ESP_LOGV(TAG, " Positive: %s", LOG_STR_ARG(input_to_str(this->channel_->channel_cfg.input_positive)));
|
||||
}
|
||||
#endif
|
||||
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
}
|
||||
|
||||
float ADCSensor::sample() {
|
||||
auto aggr = Aggregator<int32_t>(this->sampling_mode_);
|
||||
int err;
|
||||
for (uint8_t sample = 0; sample < this->sample_count_; sample++) {
|
||||
int16_t buf = 0;
|
||||
struct adc_sequence sequence = {
|
||||
.buffer = &buf,
|
||||
/* buffer size in bytes, not number of samples */
|
||||
.buffer_size = sizeof(buf),
|
||||
};
|
||||
int32_t val_raw;
|
||||
|
||||
err = adc_sequence_init_dt(this->channel_, &sequence);
|
||||
if (err < 0) {
|
||||
ESP_LOGE(TAG, "Could sequence init %s (%d)", this->channel_->dev->name, err);
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
err = adc_read(this->channel_->dev, &sequence);
|
||||
if (err < 0) {
|
||||
ESP_LOGE(TAG, "Could not read %s (%d)", this->channel_->dev->name, err);
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
val_raw = (int32_t) buf;
|
||||
if (!this->channel_->channel_cfg.differential) {
|
||||
// https://github.com/adafruit/Adafruit_nRF52_Arduino/blob/0ed4d9ffc674ae407be7cacf5696a02f5e789861/cores/nRF5/wiring_analog_nRF52.c#L222
|
||||
if (val_raw < 0) {
|
||||
val_raw = 0;
|
||||
}
|
||||
}
|
||||
aggr.add_sample(val_raw);
|
||||
}
|
||||
|
||||
int32_t val_mv = aggr.aggregate();
|
||||
|
||||
if (this->output_raw_) {
|
||||
return val_mv;
|
||||
}
|
||||
|
||||
err = adc_raw_to_millivolts_dt(this->channel_, &val_mv);
|
||||
/* conversion to mV may not be supported, skip if not */
|
||||
if (err < 0) {
|
||||
ESP_LOGE(TAG, "Value in mV not available %s (%d)", this->channel_->dev->name, err);
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
return val_mv / 1000.0f;
|
||||
}
|
||||
|
||||
} // namespace adc
|
||||
} // namespace esphome
|
||||
#endif
|
||||
@@ -3,13 +3,6 @@ import logging
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import sensor, voltage_sampler
|
||||
from esphome.components.esp32 import get_esp32_variant
|
||||
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
|
||||
from esphome.components.zephyr import (
|
||||
zephyr_add_overlay,
|
||||
zephyr_add_prj_conf,
|
||||
zephyr_add_user,
|
||||
)
|
||||
from esphome.config_helpers import filter_source_files_from_platform
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ATTENUATION,
|
||||
@@ -18,10 +11,8 @@ from esphome.const import (
|
||||
CONF_PIN,
|
||||
CONF_RAW,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
PLATFORM_NRF52,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_VOLT,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
|
||||
@@ -69,10 +60,6 @@ ADCSensor = adc_ns.class_(
|
||||
"ADCSensor", sensor.Sensor, cg.PollingComponent, voltage_sampler.VoltageSampler
|
||||
)
|
||||
|
||||
CONF_NRF_SAADC = "nrf_saadc"
|
||||
|
||||
adc_dt_spec = cg.global_ns.class_("adc_dt_spec")
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
sensor.sensor_schema(
|
||||
ADCSensor,
|
||||
@@ -88,7 +75,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.SplitDefault(CONF_ATTENUATION, esp32="0db"): cv.All(
|
||||
cv.only_on_esp32, _attenuation
|
||||
),
|
||||
cv.OnlyWith(CONF_NRF_SAADC, PLATFORM_NRF52): cv.declare_id(adc_dt_spec),
|
||||
cv.Optional(CONF_SAMPLES, default=1): cv.int_range(min=1, max=255),
|
||||
cv.Optional(CONF_SAMPLING_MODE, default="avg"): _sampling_mode,
|
||||
}
|
||||
@@ -97,8 +83,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
validate_config,
|
||||
)
|
||||
|
||||
CONF_ADC_CHANNEL_ID = "adc_channel_id"
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
@@ -109,7 +93,7 @@ async def to_code(config):
|
||||
cg.add_define("USE_ADC_SENSOR_VCC")
|
||||
elif config[CONF_PIN] == "TEMPERATURE":
|
||||
cg.add(var.set_is_temperature())
|
||||
elif not CORE.is_nrf52 or config[CONF_PIN][CONF_NUMBER] not in EXTRA_ADC:
|
||||
else:
|
||||
pin = await cg.gpio_pin_expression(config[CONF_PIN])
|
||||
cg.add(var.set_pin(pin))
|
||||
|
||||
@@ -138,59 +122,3 @@ async def to_code(config):
|
||||
):
|
||||
chan = ESP32_VARIANT_ADC2_PIN_TO_CHANNEL[variant][pin_num]
|
||||
cg.add(var.set_channel(adc_unit_t.ADC_UNIT_2, chan))
|
||||
|
||||
elif CORE.is_nrf52:
|
||||
CORE.data.setdefault(CONF_ADC_CHANNEL_ID, 0)
|
||||
channel_id = CORE.data[CONF_ADC_CHANNEL_ID]
|
||||
CORE.data[CONF_ADC_CHANNEL_ID] = channel_id + 1
|
||||
zephyr_add_prj_conf("ADC", True)
|
||||
nrf_saadc = config[CONF_NRF_SAADC]
|
||||
rhs = cg.RawExpression(
|
||||
f"ADC_DT_SPEC_GET_BY_IDX(DT_PATH(zephyr_user), {channel_id})"
|
||||
)
|
||||
adc = cg.new_Pvariable(nrf_saadc, rhs)
|
||||
cg.add(var.set_adc_channel(adc))
|
||||
gain = "ADC_GAIN_1_6"
|
||||
pin_number = config[CONF_PIN][CONF_NUMBER]
|
||||
if pin_number == "VDDHDIV5":
|
||||
gain = "ADC_GAIN_1_2"
|
||||
if isinstance(pin_number, int):
|
||||
GPIO_TO_AIN = {v: k for k, v in AIN_TO_GPIO.items()}
|
||||
pin_number = GPIO_TO_AIN[pin_number]
|
||||
zephyr_add_user("io-channels", f"<&adc {channel_id}>")
|
||||
zephyr_add_overlay(
|
||||
f"""
|
||||
&adc {{
|
||||
#address-cells = <1>;
|
||||
#size-cells = <0>;
|
||||
|
||||
channel@{channel_id} {{
|
||||
reg = <{channel_id}>;
|
||||
zephyr,gain = "{gain}";
|
||||
zephyr,reference = "ADC_REF_INTERNAL";
|
||||
zephyr,acquisition-time = <ADC_ACQ_TIME_DEFAULT>;
|
||||
zephyr,input-positive = <NRF_SAADC_{pin_number}>;
|
||||
zephyr,resolution = <14>;
|
||||
zephyr,oversampling = <8>;
|
||||
}};
|
||||
}};
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
FILTER_SOURCE_FILES = filter_source_files_from_platform(
|
||||
{
|
||||
"adc_sensor_esp32.cpp": {
|
||||
PlatformFramework.ESP32_ARDUINO,
|
||||
PlatformFramework.ESP32_IDF,
|
||||
},
|
||||
"adc_sensor_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
|
||||
"adc_sensor_rp2040.cpp": {PlatformFramework.RP2040_ARDUINO},
|
||||
"adc_sensor_libretiny.cpp": {
|
||||
PlatformFramework.BK72XX_ARDUINO,
|
||||
PlatformFramework.RTL87XX_ARDUINO,
|
||||
PlatformFramework.LN882X_ARDUINO,
|
||||
},
|
||||
"adc_sensor_zephyr.cpp": {PlatformFramework.NRF52_ZEPHYR},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -113,7 +113,7 @@ void ADE7880::update() {
|
||||
if (this->channel_a_ != nullptr) {
|
||||
auto *chan = this->channel_a_;
|
||||
this->update_sensor_from_s24zp_register16_(chan->current, AIRMS, [](float val) { return val / 100000.0f; });
|
||||
this->update_sensor_from_s24zp_register16_(chan->voltage, AVRMS, [](float val) { return val / 10000.0f; });
|
||||
this->update_sensor_from_s24zp_register16_(chan->voltage, BVRMS, [](float val) { return val / 10000.0f; });
|
||||
this->update_sensor_from_s24zp_register16_(chan->active_power, AWATT, [](float val) { return val / 100.0f; });
|
||||
this->update_sensor_from_s24zp_register16_(chan->apparent_power, AVA, [](float val) { return val / 100.0f; });
|
||||
this->update_sensor_from_s16_register16_(chan->power_factor, APF,
|
||||
|
||||
@@ -36,7 +36,6 @@ from esphome.const import (
|
||||
UNIT_WATT,
|
||||
UNIT_WATT_HOURS,
|
||||
)
|
||||
from esphome.types import ConfigType
|
||||
|
||||
DEPENDENCIES = ["i2c"]
|
||||
|
||||
@@ -52,20 +51,6 @@ CONF_POWER_GAIN = "power_gain"
|
||||
|
||||
CONF_NEUTRAL = "neutral"
|
||||
|
||||
# Tuple of power channel phases
|
||||
POWER_PHASES = (CONF_PHASE_A, CONF_PHASE_B, CONF_PHASE_C)
|
||||
|
||||
# Tuple of sensor types that can be configured for power channels
|
||||
POWER_SENSOR_TYPES = (
|
||||
CONF_CURRENT,
|
||||
CONF_VOLTAGE,
|
||||
CONF_ACTIVE_POWER,
|
||||
CONF_APPARENT_POWER,
|
||||
CONF_POWER_FACTOR,
|
||||
CONF_FORWARD_ACTIVE_ENERGY,
|
||||
CONF_REVERSE_ACTIVE_ENERGY,
|
||||
)
|
||||
|
||||
NEUTRAL_CHANNEL_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(NeutralChannel),
|
||||
@@ -165,69 +150,12 @@ POWER_CHANNEL_SCHEMA = cv.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def prefix_sensor_name(
|
||||
sensor_conf: ConfigType,
|
||||
channel_name: str,
|
||||
channel_config: ConfigType,
|
||||
sensor_type: str,
|
||||
) -> None:
|
||||
"""Helper to prefix sensor name with channel name.
|
||||
|
||||
Args:
|
||||
sensor_conf: The sensor configuration (dict or string)
|
||||
channel_name: The channel name to prefix with
|
||||
channel_config: The channel configuration to update
|
||||
sensor_type: The sensor type key in the channel config
|
||||
"""
|
||||
if isinstance(sensor_conf, dict) and CONF_NAME in sensor_conf:
|
||||
sensor_name = sensor_conf[CONF_NAME]
|
||||
if sensor_name and not sensor_name.startswith(channel_name):
|
||||
sensor_conf[CONF_NAME] = f"{channel_name} {sensor_name}"
|
||||
elif isinstance(sensor_conf, str):
|
||||
# Simple value case - convert to dict with prefixed name
|
||||
channel_config[sensor_type] = {CONF_NAME: f"{channel_name} {sensor_conf}"}
|
||||
|
||||
|
||||
def process_channel_sensors(
|
||||
config: ConfigType, channel_key: str, sensor_types: tuple
|
||||
) -> None:
|
||||
"""Process sensors for a channel and prefix their names.
|
||||
|
||||
Args:
|
||||
config: The main configuration
|
||||
channel_key: The channel key (e.g., CONF_PHASE_A, CONF_NEUTRAL)
|
||||
sensor_types: Tuple of sensor types to process for this channel
|
||||
"""
|
||||
if not (channel_config := config.get(channel_key)) or not (
|
||||
channel_name := channel_config.get(CONF_NAME)
|
||||
):
|
||||
return
|
||||
|
||||
for sensor_type in sensor_types:
|
||||
if sensor_conf := channel_config.get(sensor_type):
|
||||
prefix_sensor_name(sensor_conf, channel_name, channel_config, sensor_type)
|
||||
|
||||
|
||||
def preprocess_channels(config: ConfigType) -> ConfigType:
|
||||
"""Preprocess channel configurations to add channel name prefix to sensor names."""
|
||||
# Process power channels
|
||||
for channel in POWER_PHASES:
|
||||
process_channel_sensors(config, channel, POWER_SENSOR_TYPES)
|
||||
|
||||
# Process neutral channel
|
||||
process_channel_sensors(config, CONF_NEUTRAL, (CONF_CURRENT,))
|
||||
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
preprocess_channels,
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ADE7880),
|
||||
cv.Optional(CONF_FREQUENCY, default="50Hz"): cv.All(
|
||||
cv.frequency, cv.float_range(min=45.0, max=66.0)
|
||||
cv.frequency, cv.Range(min=45.0, max=66.0)
|
||||
),
|
||||
cv.Optional(CONF_IRQ0_PIN): pins.internal_gpio_input_pin_schema,
|
||||
cv.Required(CONF_IRQ1_PIN): pins.internal_gpio_input_pin_schema,
|
||||
@@ -239,7 +167,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
}
|
||||
)
|
||||
.extend(cv.polling_component_schema("60s"))
|
||||
.extend(i2c.i2c_device_schema(0x38)),
|
||||
.extend(i2c.i2c_device_schema(0x38))
|
||||
)
|
||||
|
||||
|
||||
@@ -260,7 +188,15 @@ async def neutral_channel(config):
|
||||
async def power_channel(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
|
||||
for sensor_type in POWER_SENSOR_TYPES:
|
||||
for sensor_type in [
|
||||
CONF_CURRENT,
|
||||
CONF_VOLTAGE,
|
||||
CONF_ACTIVE_POWER,
|
||||
CONF_APPARENT_POWER,
|
||||
CONF_POWER_FACTOR,
|
||||
CONF_FORWARD_ACTIVE_ENERGY,
|
||||
CONF_REVERSE_ACTIVE_ENERGY,
|
||||
]:
|
||||
if conf := config.get(sensor_type):
|
||||
sens = await sensor.new_sensor(conf)
|
||||
cg.add(getattr(var, f"set_{sensor_type}")(sens))
|
||||
@@ -280,6 +216,44 @@ async def power_channel(config):
|
||||
return var
|
||||
|
||||
|
||||
def final_validate(config):
|
||||
for channel in [CONF_PHASE_A, CONF_PHASE_B, CONF_PHASE_C]:
|
||||
if channel := config.get(channel):
|
||||
channel_name = channel.get(CONF_NAME)
|
||||
|
||||
for sensor_type in [
|
||||
CONF_CURRENT,
|
||||
CONF_VOLTAGE,
|
||||
CONF_ACTIVE_POWER,
|
||||
CONF_APPARENT_POWER,
|
||||
CONF_POWER_FACTOR,
|
||||
CONF_FORWARD_ACTIVE_ENERGY,
|
||||
CONF_REVERSE_ACTIVE_ENERGY,
|
||||
]:
|
||||
if conf := channel.get(sensor_type):
|
||||
sensor_name = conf.get(CONF_NAME)
|
||||
if (
|
||||
sensor_name
|
||||
and channel_name
|
||||
and not sensor_name.startswith(channel_name)
|
||||
):
|
||||
conf[CONF_NAME] = f"{channel_name} {sensor_name}"
|
||||
|
||||
if channel := config.get(CONF_NEUTRAL):
|
||||
channel_name = channel.get(CONF_NAME)
|
||||
if conf := channel.get(CONF_CURRENT):
|
||||
sensor_name = conf.get(CONF_NAME)
|
||||
if (
|
||||
sensor_name
|
||||
and channel_name
|
||||
and not sensor_name.startswith(channel_name)
|
||||
):
|
||||
conf[CONF_NAME] = f"{channel_name} {sensor_name}"
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
@@ -24,8 +24,6 @@ from esphome.const import (
|
||||
UNIT_WATT,
|
||||
)
|
||||
|
||||
CODEOWNERS = ["@angelnu"]
|
||||
|
||||
CONF_CURRENT_A = "current_a"
|
||||
CONF_CURRENT_B = "current_b"
|
||||
CONF_ACTIVE_POWER_A = "active_power_a"
|
||||
|
||||
@@ -25,8 +25,7 @@ void ADE7953::setup() {
|
||||
this->ade_write_8(PGA_V_8, pga_v_);
|
||||
this->ade_write_8(PGA_IA_8, pga_ia_);
|
||||
this->ade_write_8(PGA_IB_8, pga_ib_);
|
||||
this->ade_write_32(AVGAIN_32, avgain_);
|
||||
this->ade_write_32(BVGAIN_32, bvgain_);
|
||||
this->ade_write_32(AVGAIN_32, vgain_);
|
||||
this->ade_write_32(AIGAIN_32, aigain_);
|
||||
this->ade_write_32(BIGAIN_32, bigain_);
|
||||
this->ade_write_32(AWGAIN_32, awgain_);
|
||||
@@ -35,8 +34,7 @@ void ADE7953::setup() {
|
||||
this->ade_read_8(PGA_V_8, &pga_v_);
|
||||
this->ade_read_8(PGA_IA_8, &pga_ia_);
|
||||
this->ade_read_8(PGA_IB_8, &pga_ib_);
|
||||
this->ade_read_32(AVGAIN_32, &avgain_);
|
||||
this->ade_read_32(BVGAIN_32, &bvgain_);
|
||||
this->ade_read_32(AVGAIN_32, &vgain_);
|
||||
this->ade_read_32(AIGAIN_32, &aigain_);
|
||||
this->ade_read_32(BIGAIN_32, &bigain_);
|
||||
this->ade_read_32(AWGAIN_32, &awgain_);
|
||||
@@ -65,14 +63,13 @@ void ADE7953::dump_config() {
|
||||
" PGA_V_8: 0x%X\n"
|
||||
" PGA_IA_8: 0x%X\n"
|
||||
" PGA_IB_8: 0x%X\n"
|
||||
" AVGAIN_32: 0x%08jX\n"
|
||||
" BVGAIN_32: 0x%08jX\n"
|
||||
" VGAIN_32: 0x%08jX\n"
|
||||
" AIGAIN_32: 0x%08jX\n"
|
||||
" BIGAIN_32: 0x%08jX\n"
|
||||
" AWGAIN_32: 0x%08jX\n"
|
||||
" BWGAIN_32: 0x%08jX",
|
||||
this->use_acc_energy_regs_, pga_v_, pga_ia_, pga_ib_, (uintmax_t) avgain_, (uintmax_t) bvgain_,
|
||||
(uintmax_t) aigain_, (uintmax_t) bigain_, (uintmax_t) awgain_, (uintmax_t) bwgain_);
|
||||
this->use_acc_energy_regs_, pga_v_, pga_ia_, pga_ib_, (uintmax_t) vgain_, (uintmax_t) aigain_,
|
||||
(uintmax_t) bigain_, (uintmax_t) awgain_, (uintmax_t) bwgain_);
|
||||
}
|
||||
|
||||
#define ADE_PUBLISH_(name, val, factor) \
|
||||
|
||||
@@ -46,12 +46,7 @@ class ADE7953 : public PollingComponent, public sensor::Sensor {
|
||||
void set_pga_ib(uint8_t pga_ib) { pga_ib_ = pga_ib; }
|
||||
|
||||
// Set input gains
|
||||
void set_vgain(uint32_t vgain) {
|
||||
// Datasheet says: "to avoid discrepancies in other registers,
|
||||
// if AVGAIN is set then BVGAIN should be set to the same value."
|
||||
avgain_ = vgain;
|
||||
bvgain_ = vgain;
|
||||
}
|
||||
void set_vgain(uint32_t vgain) { vgain_ = vgain; }
|
||||
void set_aigain(uint32_t aigain) { aigain_ = aigain; }
|
||||
void set_bigain(uint32_t bigain) { bigain_ = bigain; }
|
||||
void set_awgain(uint32_t awgain) { awgain_ = awgain; }
|
||||
@@ -105,8 +100,7 @@ class ADE7953 : public PollingComponent, public sensor::Sensor {
|
||||
uint8_t pga_v_;
|
||||
uint8_t pga_ia_;
|
||||
uint8_t pga_ib_;
|
||||
uint32_t avgain_;
|
||||
uint32_t bvgain_;
|
||||
uint32_t vgain_;
|
||||
uint32_t aigain_;
|
||||
uint32_t bigain_;
|
||||
uint32_t awgain_;
|
||||
|
||||
@@ -89,7 +89,7 @@ void AGS10Component::dump_config() {
|
||||
bool AGS10Component::new_i2c_address(uint8_t newaddress) {
|
||||
uint8_t rev_newaddress = ~newaddress;
|
||||
std::array<uint8_t, 5> data{newaddress, rev_newaddress, newaddress, rev_newaddress, 0};
|
||||
data[4] = crc8(data.data(), 4, 0xFF, 0x31, true);
|
||||
data[4] = calc_crc8_(data, 4);
|
||||
if (!this->write_bytes(REG_ADDRESS, data)) {
|
||||
this->error_code_ = COMMUNICATION_FAILED;
|
||||
this->status_set_warning();
|
||||
@@ -109,7 +109,7 @@ bool AGS10Component::set_zero_point_with_current_resistance() { return this->set
|
||||
|
||||
bool AGS10Component::set_zero_point_with(uint16_t value) {
|
||||
std::array<uint8_t, 5> data{0x00, 0x0C, (uint8_t) ((value >> 8) & 0xFF), (uint8_t) (value & 0xFF), 0};
|
||||
data[4] = crc8(data.data(), 4, 0xFF, 0x31, true);
|
||||
data[4] = calc_crc8_(data, 4);
|
||||
if (!this->write_bytes(REG_CALIBRATION, data)) {
|
||||
this->error_code_ = COMMUNICATION_FAILED;
|
||||
this->status_set_warning();
|
||||
@@ -184,7 +184,7 @@ template<size_t N> optional<std::array<uint8_t, N>> AGS10Component::read_and_che
|
||||
auto res = *data;
|
||||
auto crc_byte = res[len];
|
||||
|
||||
if (crc_byte != crc8(res.data(), len, 0xFF, 0x31, true)) {
|
||||
if (crc_byte != calc_crc8_(res, len)) {
|
||||
this->error_code_ = CRC_CHECK_FAILED;
|
||||
ESP_LOGE(TAG, "Reading AGS10 version failed: crc error!");
|
||||
return optional<std::array<uint8_t, N>>();
|
||||
@@ -192,5 +192,20 @@ template<size_t N> optional<std::array<uint8_t, N>> AGS10Component::read_and_che
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
template<size_t N> uint8_t AGS10Component::calc_crc8_(std::array<uint8_t, N> dat, uint8_t num) {
|
||||
uint8_t i, byte1, crc = 0xFF;
|
||||
for (byte1 = 0; byte1 < num; byte1++) {
|
||||
crc ^= (dat[byte1]);
|
||||
for (i = 0; i < 8; i++) {
|
||||
if (crc & 0x80) {
|
||||
crc = (crc << 1) ^ 0x31;
|
||||
} else {
|
||||
crc = (crc << 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
return crc;
|
||||
}
|
||||
} // namespace ags10
|
||||
} // namespace esphome
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace ags10 {
|
||||
@@ -99,13 +99,23 @@ class AGS10Component : public PollingComponent, public i2c::I2CDevice {
|
||||
* Read, checks and returns data from the sensor.
|
||||
*/
|
||||
template<size_t N> optional<std::array<uint8_t, N>> read_and_check_(uint8_t a_register);
|
||||
|
||||
/**
|
||||
* Calculates CRC8 value.
|
||||
*
|
||||
* CRC8 calculation, initial value: 0xFF, polynomial: 0x31 (x8+ x5+ x4+1)
|
||||
*
|
||||
* @param[in] dat the data buffer
|
||||
* @param num number of bytes in the buffer
|
||||
*/
|
||||
template<size_t N> uint8_t calc_crc8_(std::array<uint8_t, N> dat, uint8_t num);
|
||||
};
|
||||
|
||||
template<typename... Ts> class AGS10NewI2cAddressAction : public Action<Ts...>, public Parented<AGS10Component> {
|
||||
public:
|
||||
TEMPLATABLE_VALUE(uint8_t, new_address)
|
||||
|
||||
void play(const Ts &...x) override { this->parent_->new_i2c_address(this->new_address_.value(x...)); }
|
||||
void play(Ts... x) override { this->parent_->new_i2c_address(this->new_address_.value(x...)); }
|
||||
};
|
||||
|
||||
enum AGS10SetZeroPointActionMode {
|
||||
@@ -122,7 +132,7 @@ template<typename... Ts> class AGS10SetZeroPointAction : public Action<Ts...>, p
|
||||
TEMPLATABLE_VALUE(uint16_t, value)
|
||||
TEMPLATABLE_VALUE(AGS10SetZeroPointActionMode, mode)
|
||||
|
||||
void play(const Ts &...x) override {
|
||||
void play(Ts... x) override {
|
||||
switch (this->mode_.value(x...)) {
|
||||
case FACTORY_DEFAULT:
|
||||
this->parent_->set_zero_point_with_factory_defaults();
|
||||
|
||||
@@ -83,7 +83,7 @@ void AHT10Component::setup() {
|
||||
void AHT10Component::restart_read_() {
|
||||
if (this->read_count_ == AHT10_ATTEMPTS) {
|
||||
this->read_count_ = 0;
|
||||
this->status_set_error(LOG_STR("Reading timed out"));
|
||||
this->status_set_error("Reading timed out");
|
||||
return;
|
||||
}
|
||||
this->read_count_++;
|
||||
@@ -96,7 +96,7 @@ void AHT10Component::read_data_() {
|
||||
ESP_LOGD(TAG, "Read attempt %d at %ums", this->read_count_, (unsigned) (millis() - this->start_time_));
|
||||
}
|
||||
if (this->read(data, 6) != i2c::ERROR_OK) {
|
||||
this->status_set_warning(LOG_STR("Read failed, will retry"));
|
||||
this->status_set_warning("Read failed, will retry");
|
||||
this->restart_read_();
|
||||
return;
|
||||
}
|
||||
@@ -113,7 +113,7 @@ void AHT10Component::read_data_() {
|
||||
} else {
|
||||
ESP_LOGD(TAG, "Invalid humidity, retrying");
|
||||
if (this->write(AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD)) != i2c::ERROR_OK) {
|
||||
this->status_set_warning(LOG_STR(ESP_LOG_MSG_COMM_FAIL));
|
||||
this->status_set_warning(ESP_LOG_MSG_COMM_FAIL);
|
||||
}
|
||||
this->restart_read_();
|
||||
return;
|
||||
@@ -144,7 +144,7 @@ void AHT10Component::update() {
|
||||
return;
|
||||
this->start_time_ = millis();
|
||||
if (this->write(AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD)) != i2c::ERROR_OK) {
|
||||
this->status_set_warning(LOG_STR(ESP_LOG_MSG_COMM_FAIL));
|
||||
this->status_set_warning(ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
this->restart_read_();
|
||||
|
||||
@@ -13,7 +13,7 @@ template<typename... Ts> class SetAutoMuteAction : public Action<Ts...> {
|
||||
|
||||
TEMPLATABLE_VALUE(uint8_t, auto_mute_mode)
|
||||
|
||||
void play(const Ts &...x) override { this->aic3204_->set_auto_mute_mode(this->auto_mute_mode_.value(x...)); }
|
||||
void play(Ts... x) override { this->aic3204_->set_auto_mute_mode(this->auto_mute_mode_.value(x...)); }
|
||||
|
||||
protected:
|
||||
AIC3204 *aic3204_;
|
||||
|
||||
@@ -18,6 +18,6 @@ CONFIG_SCHEMA = cv.Schema(
|
||||
).extend(esp32_ble_tracker.ESP_BLE_DEVICE_SCHEMA)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await esp32_ble_tracker.register_ble_device(var, config)
|
||||
yield esp32_ble_tracker.register_ble_device(var, config)
|
||||
|
||||
@@ -13,7 +13,7 @@ from esphome.const import (
|
||||
CONF_TRIGGER_ID,
|
||||
CONF_WEB_SERVER,
|
||||
)
|
||||
from esphome.core import CORE, CoroPriority, coroutine_with_priority
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.core.entity_helpers import entity_duplicate_validator, setup_entity
|
||||
from esphome.cpp_generator import MockObjClass
|
||||
|
||||
@@ -172,6 +172,12 @@ def alarm_control_panel_schema(
|
||||
return _ALARM_CONTROL_PANEL_SCHEMA.extend(schema)
|
||||
|
||||
|
||||
# Remove before 2025.11.0
|
||||
ALARM_CONTROL_PANEL_SCHEMA = alarm_control_panel_schema(AlarmControlPanel)
|
||||
ALARM_CONTROL_PANEL_SCHEMA.add_extra(
|
||||
cv.deprecated_schema_constant("alarm_control_panel")
|
||||
)
|
||||
|
||||
ALARM_CONTROL_PANEL_ACTION_SCHEMA = maybe_simple_id(
|
||||
{
|
||||
cv.GenerateID(): cv.use_id(AlarmControlPanel),
|
||||
@@ -295,7 +301,8 @@ async def alarm_action_disarm_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_pending_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -303,7 +310,8 @@ async def alarm_action_pending_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_trigger_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -311,7 +319,8 @@ async def alarm_action_trigger_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_chime_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -324,7 +333,8 @@ async def alarm_action_chime_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_ready_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_condition(
|
||||
@@ -339,6 +349,7 @@ async def alarm_control_panel_is_armed_to_code(
|
||||
return cg.new_Pvariable(condition_id, template_arg, paren)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.CORE)
|
||||
@coroutine_with_priority(100.0)
|
||||
async def to_code(config):
|
||||
cg.add_global(alarm_control_panel_ns.using)
|
||||
cg.add_define("USE_ALARM_CONTROL_PANEL")
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
#include "alarm_control_panel.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/controller_registry.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "alarm_control_panel.h"
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
@@ -35,12 +33,23 @@ void AlarmControlPanel::publish_state(AlarmControlPanelState state) {
|
||||
ESP_LOGD(TAG, "Set state to: %s, previous: %s", LOG_STR_ARG(alarm_control_panel_state_to_string(state)),
|
||||
LOG_STR_ARG(alarm_control_panel_state_to_string(prev_state)));
|
||||
this->current_state_ = state;
|
||||
// Single state callback - triggers check get_state() for specific states
|
||||
this->state_callback_.call();
|
||||
#if defined(USE_ALARM_CONTROL_PANEL) && defined(USE_CONTROLLER_REGISTRY)
|
||||
ControllerRegistry::notify_alarm_control_panel_update(this);
|
||||
#endif
|
||||
// Cleared fires when leaving TRIGGERED state
|
||||
if (state == ACP_STATE_TRIGGERED) {
|
||||
this->triggered_callback_.call();
|
||||
} else if (state == ACP_STATE_ARMING) {
|
||||
this->arming_callback_.call();
|
||||
} else if (state == ACP_STATE_PENDING) {
|
||||
this->pending_callback_.call();
|
||||
} else if (state == ACP_STATE_ARMED_HOME) {
|
||||
this->armed_home_callback_.call();
|
||||
} else if (state == ACP_STATE_ARMED_NIGHT) {
|
||||
this->armed_night_callback_.call();
|
||||
} else if (state == ACP_STATE_ARMED_AWAY) {
|
||||
this->armed_away_callback_.call();
|
||||
} else if (state == ACP_STATE_DISARMED) {
|
||||
this->disarmed_callback_.call();
|
||||
}
|
||||
|
||||
if (prev_state == ACP_STATE_TRIGGERED) {
|
||||
this->cleared_callback_.call();
|
||||
}
|
||||
@@ -55,6 +64,34 @@ void AlarmControlPanel::add_on_state_callback(std::function<void()> &&callback)
|
||||
this->state_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_triggered_callback(std::function<void()> &&callback) {
|
||||
this->triggered_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_arming_callback(std::function<void()> &&callback) {
|
||||
this->arming_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_armed_home_callback(std::function<void()> &&callback) {
|
||||
this->armed_home_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_armed_night_callback(std::function<void()> &&callback) {
|
||||
this->armed_night_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_armed_away_callback(std::function<void()> &&callback) {
|
||||
this->armed_away_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_pending_callback(std::function<void()> &&callback) {
|
||||
this->pending_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_disarmed_callback(std::function<void()> &&callback) {
|
||||
this->disarmed_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
void AlarmControlPanel::add_on_cleared_callback(std::function<void()> &&callback) {
|
||||
this->cleared_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
@@ -35,13 +35,54 @@ class AlarmControlPanel : public EntityBase {
|
||||
*/
|
||||
void publish_state(AlarmControlPanelState state);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel changes.
|
||||
* Triggers can check get_state() to determine the new state.
|
||||
/** Add a callback for when the state of the alarm_control_panel changes
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_state_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel chanes to triggered
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_triggered_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel chanes to arming
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_arming_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel changes to pending
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_pending_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel changes to armed_home
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_armed_home_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel changes to armed_night
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_armed_night_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel changes to armed_away
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_armed_away_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel changes to disarmed
|
||||
*
|
||||
* @param callback The callback function
|
||||
*/
|
||||
void add_on_disarmed_callback(std::function<void()> &&callback);
|
||||
|
||||
/** Add a callback for when the state of the alarm_control_panel clears from triggered
|
||||
*
|
||||
* @param callback The callback function
|
||||
@@ -131,14 +172,28 @@ class AlarmControlPanel : public EntityBase {
|
||||
uint32_t last_update_;
|
||||
// the call control function
|
||||
virtual void control(const AlarmControlPanelCall &call) = 0;
|
||||
// state callback - triggers check get_state() for specific state
|
||||
LazyCallbackManager<void()> state_callback_{};
|
||||
// clear callback - fires when leaving TRIGGERED state
|
||||
LazyCallbackManager<void()> cleared_callback_{};
|
||||
// state callback
|
||||
CallbackManager<void()> state_callback_{};
|
||||
// trigger callback
|
||||
CallbackManager<void()> triggered_callback_{};
|
||||
// arming callback
|
||||
CallbackManager<void()> arming_callback_{};
|
||||
// pending callback
|
||||
CallbackManager<void()> pending_callback_{};
|
||||
// armed_home callback
|
||||
CallbackManager<void()> armed_home_callback_{};
|
||||
// armed_night callback
|
||||
CallbackManager<void()> armed_night_callback_{};
|
||||
// armed_away callback
|
||||
CallbackManager<void()> armed_away_callback_{};
|
||||
// disarmed callback
|
||||
CallbackManager<void()> disarmed_callback_{};
|
||||
// clear callback
|
||||
CallbackManager<void()> cleared_callback_{};
|
||||
// chime callback
|
||||
LazyCallbackManager<void()> chime_callback_{};
|
||||
CallbackManager<void()> chime_callback_{};
|
||||
// ready callback
|
||||
LazyCallbackManager<void()> ready_callback_{};
|
||||
CallbackManager<void()> ready_callback_{};
|
||||
};
|
||||
|
||||
} // namespace alarm_control_panel
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
namespace esphome {
|
||||
namespace alarm_control_panel {
|
||||
|
||||
/// Trigger on any state change
|
||||
class StateTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit StateTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
@@ -14,30 +13,55 @@ class StateTrigger : public Trigger<> {
|
||||
}
|
||||
};
|
||||
|
||||
/// Template trigger that fires when entering a specific state
|
||||
template<AlarmControlPanelState State> class StateEnterTrigger : public Trigger<> {
|
||||
class TriggeredTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit StateEnterTrigger(AlarmControlPanel *alarm_control_panel) : alarm_control_panel_(alarm_control_panel) {
|
||||
alarm_control_panel->add_on_state_callback([this]() {
|
||||
if (this->alarm_control_panel_->get_state() == State)
|
||||
this->trigger();
|
||||
});
|
||||
explicit TriggeredTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_triggered_callback([this]() { this->trigger(); });
|
||||
}
|
||||
|
||||
protected:
|
||||
AlarmControlPanel *alarm_control_panel_;
|
||||
};
|
||||
|
||||
// Type aliases for state-specific triggers
|
||||
using TriggeredTrigger = StateEnterTrigger<ACP_STATE_TRIGGERED>;
|
||||
using ArmingTrigger = StateEnterTrigger<ACP_STATE_ARMING>;
|
||||
using PendingTrigger = StateEnterTrigger<ACP_STATE_PENDING>;
|
||||
using ArmedHomeTrigger = StateEnterTrigger<ACP_STATE_ARMED_HOME>;
|
||||
using ArmedNightTrigger = StateEnterTrigger<ACP_STATE_ARMED_NIGHT>;
|
||||
using ArmedAwayTrigger = StateEnterTrigger<ACP_STATE_ARMED_AWAY>;
|
||||
using DisarmedTrigger = StateEnterTrigger<ACP_STATE_DISARMED>;
|
||||
class ArmingTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ArmingTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_arming_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
class PendingTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit PendingTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_pending_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
class ArmedHomeTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ArmedHomeTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_armed_home_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
class ArmedNightTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ArmedNightTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_armed_night_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
class ArmedAwayTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ArmedAwayTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_armed_away_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
class DisarmedTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit DisarmedTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
alarm_control_panel->add_on_disarmed_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
/// Trigger when leaving TRIGGERED state (alarm cleared)
|
||||
class ClearedTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ClearedTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
@@ -45,7 +69,6 @@ class ClearedTrigger : public Trigger<> {
|
||||
}
|
||||
};
|
||||
|
||||
/// Trigger on chime event (zone opened while disarmed)
|
||||
class ChimeTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ChimeTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
@@ -53,7 +76,6 @@ class ChimeTrigger : public Trigger<> {
|
||||
}
|
||||
};
|
||||
|
||||
/// Trigger on ready state change
|
||||
class ReadyTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit ReadyTrigger(AlarmControlPanel *alarm_control_panel) {
|
||||
@@ -67,7 +89,7 @@ template<typename... Ts> class ArmAwayAction : public Action<Ts...> {
|
||||
|
||||
TEMPLATABLE_VALUE(std::string, code)
|
||||
|
||||
void play(const Ts &...x) override {
|
||||
void play(Ts... x) override {
|
||||
auto call = this->alarm_control_panel_->make_call();
|
||||
auto code = this->code_.optional_value(x...);
|
||||
if (code.has_value()) {
|
||||
@@ -87,7 +109,7 @@ template<typename... Ts> class ArmHomeAction : public Action<Ts...> {
|
||||
|
||||
TEMPLATABLE_VALUE(std::string, code)
|
||||
|
||||
void play(const Ts &...x) override {
|
||||
void play(Ts... x) override {
|
||||
auto call = this->alarm_control_panel_->make_call();
|
||||
auto code = this->code_.optional_value(x...);
|
||||
if (code.has_value()) {
|
||||
@@ -107,7 +129,7 @@ template<typename... Ts> class ArmNightAction : public Action<Ts...> {
|
||||
|
||||
TEMPLATABLE_VALUE(std::string, code)
|
||||
|
||||
void play(const Ts &...x) override {
|
||||
void play(Ts... x) override {
|
||||
auto call = this->alarm_control_panel_->make_call();
|
||||
auto code = this->code_.optional_value(x...);
|
||||
if (code.has_value()) {
|
||||
@@ -127,7 +149,7 @@ template<typename... Ts> class DisarmAction : public Action<Ts...> {
|
||||
|
||||
TEMPLATABLE_VALUE(std::string, code)
|
||||
|
||||
void play(const Ts &...x) override { this->alarm_control_panel_->disarm(this->code_.optional_value(x...)); }
|
||||
void play(Ts... x) override { this->alarm_control_panel_->disarm(this->code_.optional_value(x...)); }
|
||||
|
||||
protected:
|
||||
AlarmControlPanel *alarm_control_panel_;
|
||||
@@ -137,7 +159,7 @@ template<typename... Ts> class PendingAction : public Action<Ts...> {
|
||||
public:
|
||||
explicit PendingAction(AlarmControlPanel *alarm_control_panel) : alarm_control_panel_(alarm_control_panel) {}
|
||||
|
||||
void play(const Ts &...x) override { this->alarm_control_panel_->make_call().pending().perform(); }
|
||||
void play(Ts... x) override { this->alarm_control_panel_->make_call().pending().perform(); }
|
||||
|
||||
protected:
|
||||
AlarmControlPanel *alarm_control_panel_;
|
||||
@@ -147,7 +169,7 @@ template<typename... Ts> class TriggeredAction : public Action<Ts...> {
|
||||
public:
|
||||
explicit TriggeredAction(AlarmControlPanel *alarm_control_panel) : alarm_control_panel_(alarm_control_panel) {}
|
||||
|
||||
void play(const Ts &...x) override { this->alarm_control_panel_->make_call().triggered().perform(); }
|
||||
void play(Ts... x) override { this->alarm_control_panel_->make_call().triggered().perform(); }
|
||||
|
||||
protected:
|
||||
AlarmControlPanel *alarm_control_panel_;
|
||||
@@ -156,7 +178,7 @@ template<typename... Ts> class TriggeredAction : public Action<Ts...> {
|
||||
template<typename... Ts> class AlarmControlPanelCondition : public Condition<Ts...> {
|
||||
public:
|
||||
AlarmControlPanelCondition(AlarmControlPanel *parent) : parent_(parent) {}
|
||||
bool check(const Ts &...x) override {
|
||||
bool check(Ts... x) override {
|
||||
return this->parent_->is_state_armed(this->parent_->get_state()) ||
|
||||
this->parent_->get_state() == ACP_STATE_PENDING || this->parent_->get_state() == ACP_STATE_TRIGGERED;
|
||||
}
|
||||
|
||||
@@ -56,13 +56,13 @@ bool Alpha3::is_current_response_type_(const uint8_t *response_type) {
|
||||
|
||||
void Alpha3::handle_geni_response_(const uint8_t *response, uint16_t length) {
|
||||
if (this->response_offset_ >= this->response_length_) {
|
||||
ESP_LOGD(TAG, "[%s] GENI response begin", this->parent_->address_str());
|
||||
ESP_LOGD(TAG, "[%s] GENI response begin", this->parent_->address_str().c_str());
|
||||
if (length < GENI_RESPONSE_HEADER_LENGTH) {
|
||||
ESP_LOGW(TAG, "[%s] response too short", this->parent_->address_str());
|
||||
ESP_LOGW(TAG, "[%s] response to short", this->parent_->address_str().c_str());
|
||||
return;
|
||||
}
|
||||
if (response[0] != 36 || response[2] != 248 || response[3] != 231 || response[4] != 10) {
|
||||
ESP_LOGW(TAG, "[%s] response bytes %d %d %d %d %d don't match GENI HEADER", this->parent_->address_str(),
|
||||
ESP_LOGW(TAG, "[%s] response bytes %d %d %d %d %d don't match GENI HEADER", this->parent_->address_str().c_str(),
|
||||
response[0], response[1], response[2], response[3], response[4]);
|
||||
return;
|
||||
}
|
||||
@@ -77,11 +77,11 @@ void Alpha3::handle_geni_response_(const uint8_t *response, uint16_t length) {
|
||||
};
|
||||
|
||||
if (this->is_current_response_type_(GENI_RESPONSE_TYPE_FLOW_HEAD)) {
|
||||
ESP_LOGD(TAG, "[%s] FLOW HEAD Response", this->parent_->address_str());
|
||||
ESP_LOGD(TAG, "[%s] FLOW HEAD Response", this->parent_->address_str().c_str());
|
||||
extract_publish_sensor_value(GENI_RESPONSE_FLOW_OFFSET, this->flow_sensor_, 3600.0F);
|
||||
extract_publish_sensor_value(GENI_RESPONSE_HEAD_OFFSET, this->head_sensor_, .0001F);
|
||||
} else if (this->is_current_response_type_(GENI_RESPONSE_TYPE_POWER)) {
|
||||
ESP_LOGD(TAG, "[%s] POWER Response", this->parent_->address_str());
|
||||
ESP_LOGD(TAG, "[%s] POWER Response", this->parent_->address_str().c_str());
|
||||
extract_publish_sensor_value(GENI_RESPONSE_POWER_OFFSET, this->power_sensor_, 1.0F);
|
||||
extract_publish_sensor_value(GENI_RESPONSE_CURRENT_OFFSET, this->current_sensor_, 1.0F);
|
||||
extract_publish_sensor_value(GENI_RESPONSE_MOTOR_SPEED_OFFSET, this->speed_sensor_, 1.0F);
|
||||
@@ -100,7 +100,7 @@ void Alpha3::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc
|
||||
if (param->open.status == ESP_GATT_OK) {
|
||||
this->response_offset_ = 0;
|
||||
this->response_length_ = 0;
|
||||
ESP_LOGI(TAG, "[%s] connection open", this->parent_->address_str());
|
||||
ESP_LOGI(TAG, "[%s] connection open", this->parent_->address_str().c_str());
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -132,7 +132,7 @@ void Alpha3::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc
|
||||
case ESP_GATTC_SEARCH_CMPL_EVT: {
|
||||
auto *chr = this->parent_->get_characteristic(ALPHA3_GENI_SERVICE_UUID, ALPHA3_GENI_CHARACTERISTIC_UUID);
|
||||
if (chr == nullptr) {
|
||||
ESP_LOGE(TAG, "[%s] No GENI service found at device, not an Alpha3..?", this->parent_->address_str());
|
||||
ESP_LOGE(TAG, "[%s] No GENI service found at device, not an Alpha3..?", this->parent_->address_str().c_str());
|
||||
break;
|
||||
}
|
||||
auto status = esp_ble_gattc_register_for_notify(this->parent_->get_gattc_if(), this->parent_->get_remote_bda(),
|
||||
@@ -164,12 +164,12 @@ void Alpha3::send_request_(uint8_t *request, size_t len) {
|
||||
esp_ble_gattc_write_char(this->parent_->get_gattc_if(), this->parent_->get_conn_id(), this->geni_handle_, len,
|
||||
request, ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status)
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(), status);
|
||||
}
|
||||
|
||||
void Alpha3::update() {
|
||||
if (this->node_state != espbt::ClientState::ESTABLISHED) {
|
||||
ESP_LOGW(TAG, "[%s] Cannot poll, not connected", this->parent_->address_str());
|
||||
ESP_LOGW(TAG, "[%s] Cannot poll, not connected", this->parent_->address_str().c_str());
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,22 @@ namespace am2315c {
|
||||
|
||||
static const char *const TAG = "am2315c";
|
||||
|
||||
uint8_t AM2315C::crc8_(uint8_t *data, uint8_t len) {
|
||||
uint8_t crc = 0xFF;
|
||||
while (len--) {
|
||||
crc ^= *data++;
|
||||
for (uint8_t i = 0; i < 8; i++) {
|
||||
if (crc & 0x80) {
|
||||
crc <<= 1;
|
||||
crc ^= 0x31;
|
||||
} else {
|
||||
crc <<= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return crc;
|
||||
}
|
||||
|
||||
bool AM2315C::reset_register_(uint8_t reg) {
|
||||
// code based on demo code sent by www.aosong.com
|
||||
// no further documentation.
|
||||
@@ -70,7 +86,7 @@ bool AM2315C::convert_(uint8_t *data, float &humidity, float &temperature) {
|
||||
humidity = raw * 9.5367431640625e-5;
|
||||
raw = ((data[3] & 0x0F) << 16) | (data[4] << 8) | data[5];
|
||||
temperature = raw * 1.9073486328125e-4 - 50;
|
||||
return crc8(data, 6, 0xFF, 0x31, true) == data[6];
|
||||
return this->crc8_(data, 6) == data[6];
|
||||
}
|
||||
|
||||
void AM2315C::setup() {
|
||||
|
||||
@@ -21,9 +21,9 @@
|
||||
// SOFTWARE.
|
||||
#pragma once
|
||||
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace am2315c {
|
||||
@@ -39,6 +39,7 @@ class AM2315C : public PollingComponent, public i2c::I2CDevice {
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
|
||||
|
||||
protected:
|
||||
uint8_t crc8_(uint8_t *data, uint8_t len);
|
||||
bool convert_(uint8_t *data, float &humidity, float &temperature);
|
||||
bool reset_register_(uint8_t reg);
|
||||
|
||||
|
||||
@@ -44,9 +44,11 @@ void Am43::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_i
|
||||
auto *chr = this->parent_->get_characteristic(AM43_SERVICE_UUID, AM43_CHARACTERISTIC_UUID);
|
||||
if (chr == nullptr) {
|
||||
if (this->parent_->get_characteristic(AM43_TUYA_SERVICE_UUID, AM43_TUYA_CHARACTERISTIC_UUID) != nullptr) {
|
||||
ESP_LOGE(TAG, "[%s] Detected a Tuya AM43 which is not supported, sorry.", this->parent_->address_str());
|
||||
ESP_LOGE(TAG, "[%s] Detected a Tuya AM43 which is not supported, sorry.",
|
||||
this->parent_->address_str().c_str());
|
||||
} else {
|
||||
ESP_LOGE(TAG, "[%s] No control service found at device, not an AM43..?", this->parent_->address_str());
|
||||
ESP_LOGE(TAG, "[%s] No control service found at device, not an AM43..?",
|
||||
this->parent_->address_str().c_str());
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -80,7 +82,8 @@ void Am43::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_i
|
||||
this->char_handle_, packet->length, packet->data,
|
||||
ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(),
|
||||
status);
|
||||
}
|
||||
}
|
||||
this->current_sensor_ = 0;
|
||||
@@ -94,7 +97,7 @@ void Am43::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_i
|
||||
|
||||
void Am43::update() {
|
||||
if (this->node_state != espbt::ClientState::ESTABLISHED) {
|
||||
ESP_LOGW(TAG, "[%s] Cannot poll, not connected", this->parent_->address_str());
|
||||
ESP_LOGW(TAG, "[%s] Cannot poll, not connected", this->parent_->address_str().c_str());
|
||||
return;
|
||||
}
|
||||
if (this->current_sensor_ == 0) {
|
||||
@@ -104,7 +107,7 @@ void Am43::update() {
|
||||
esp_ble_gattc_write_char(this->parent_->get_gattc_if(), this->parent_->get_conn_id(), this->char_handle_,
|
||||
packet->length, packet->data, ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(), status);
|
||||
}
|
||||
}
|
||||
this->current_sensor_++;
|
||||
|
||||
@@ -12,11 +12,10 @@ void AnalogThresholdBinarySensor::setup() {
|
||||
// TRUE state is defined to be when sensor is >= threshold
|
||||
// so when undefined sensor value initialize to FALSE
|
||||
if (std::isnan(sensor_value)) {
|
||||
this->raw_state_ = false;
|
||||
this->publish_initial_state(false);
|
||||
} else {
|
||||
this->raw_state_ = sensor_value >= (this->lower_threshold_.value() + this->upper_threshold_.value()) / 2.0f;
|
||||
this->publish_initial_state(this->raw_state_);
|
||||
this->publish_initial_state(sensor_value >=
|
||||
(this->lower_threshold_.value() + this->upper_threshold_.value()) / 2.0f);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,10 +25,8 @@ void AnalogThresholdBinarySensor::set_sensor(sensor::Sensor *analog_sensor) {
|
||||
this->sensor_->add_on_state_callback([this](float sensor_value) {
|
||||
// if there is an invalid sensor reading, ignore the change and keep the current state
|
||||
if (!std::isnan(sensor_value)) {
|
||||
// Use raw_state_ for hysteresis logic, not this->state which is post-filter
|
||||
this->raw_state_ =
|
||||
sensor_value >= (this->raw_state_ ? this->lower_threshold_.value() : this->upper_threshold_.value());
|
||||
this->publish_state(this->raw_state_);
|
||||
this->publish_state(sensor_value >=
|
||||
(this->state ? this->lower_threshold_.value() : this->upper_threshold_.value()));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ class AnalogThresholdBinarySensor : public Component, public binary_sensor::Bina
|
||||
sensor::Sensor *sensor_{nullptr};
|
||||
TemplatableValue<float> upper_threshold_{};
|
||||
TemplatableValue<float> lower_threshold_{};
|
||||
bool raw_state_{false}; // Pre-filter state for hysteresis logic
|
||||
};
|
||||
|
||||
} // namespace analog_threshold
|
||||
|
||||
@@ -34,20 +34,17 @@ SetFrameAction = animation_ns.class_(
|
||||
"AnimationSetFrameAction", automation.Action, cg.Parented.template(Animation_)
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
espImage.IMAGE_SCHEMA.extend(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(Animation_),
|
||||
cv.Optional(CONF_LOOP): cv.All(
|
||||
{
|
||||
cv.Optional(CONF_START_FRAME, default=0): cv.positive_int,
|
||||
cv.Optional(CONF_END_FRAME): cv.positive_int,
|
||||
cv.Optional(CONF_REPEAT): cv.positive_int,
|
||||
}
|
||||
),
|
||||
},
|
||||
),
|
||||
espImage.validate_settings,
|
||||
CONFIG_SCHEMA = espImage.IMAGE_SCHEMA.extend(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(Animation_),
|
||||
cv.Optional(CONF_LOOP): cv.All(
|
||||
{
|
||||
cv.Optional(CONF_START_FRAME, default=0): cv.positive_int,
|
||||
cv.Optional(CONF_END_FRAME): cv.positive_int,
|
||||
cv.Optional(CONF_REPEAT): cv.positive_int,
|
||||
}
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -26,12 +26,12 @@ uint32_t Animation::get_animation_frame_count() const { return this->animation_f
|
||||
int Animation::get_current_frame() const { return this->current_frame_; }
|
||||
void Animation::next_frame() {
|
||||
this->current_frame_++;
|
||||
if (loop_count_ && static_cast<uint32_t>(this->current_frame_) == loop_end_frame_ &&
|
||||
if (loop_count_ && this->current_frame_ == loop_end_frame_ &&
|
||||
(this->loop_current_iteration_ < loop_count_ || loop_count_ < 0)) {
|
||||
this->current_frame_ = loop_start_frame_;
|
||||
this->loop_current_iteration_++;
|
||||
}
|
||||
if (static_cast<uint32_t>(this->current_frame_) >= animation_frame_count_) {
|
||||
if (this->current_frame_ >= animation_frame_count_) {
|
||||
this->loop_current_iteration_ = 1;
|
||||
this->current_frame_ = 0;
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ class Animation : public image::Image {
|
||||
template<typename... Ts> class AnimationNextFrameAction : public Action<Ts...> {
|
||||
public:
|
||||
AnimationNextFrameAction(Animation *parent) : parent_(parent) {}
|
||||
void play(const Ts &...x) override { this->parent_->next_frame(); }
|
||||
void play(Ts... x) override { this->parent_->next_frame(); }
|
||||
|
||||
protected:
|
||||
Animation *parent_;
|
||||
@@ -48,7 +48,7 @@ template<typename... Ts> class AnimationNextFrameAction : public Action<Ts...> {
|
||||
template<typename... Ts> class AnimationPrevFrameAction : public Action<Ts...> {
|
||||
public:
|
||||
AnimationPrevFrameAction(Animation *parent) : parent_(parent) {}
|
||||
void play(const Ts &...x) override { this->parent_->prev_frame(); }
|
||||
void play(Ts... x) override { this->parent_->prev_frame(); }
|
||||
|
||||
protected:
|
||||
Animation *parent_;
|
||||
@@ -58,7 +58,7 @@ template<typename... Ts> class AnimationSetFrameAction : public Action<Ts...> {
|
||||
public:
|
||||
AnimationSetFrameAction(Animation *parent) : parent_(parent) {}
|
||||
TEMPLATABLE_VALUE(uint16_t, frame)
|
||||
void play(const Ts &...x) override { this->parent_->set_frame(this->frame_.value(x...)); }
|
||||
void play(Ts... x) override { this->parent_->set_frame(this->frame_.value(x...)); }
|
||||
|
||||
protected:
|
||||
Animation *parent_;
|
||||
|
||||
@@ -42,7 +42,7 @@ void Anova::control(const ClimateCall &call) {
|
||||
esp_ble_gattc_write_char(this->parent_->get_gattc_if(), this->parent_->get_conn_id(), this->char_handle_,
|
||||
pkt->length, pkt->data, ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(), status);
|
||||
}
|
||||
}
|
||||
if (call.get_target_temperature().has_value()) {
|
||||
@@ -51,7 +51,7 @@ void Anova::control(const ClimateCall &call) {
|
||||
esp_ble_gattc_write_char(this->parent_->get_gattc_if(), this->parent_->get_conn_id(), this->char_handle_,
|
||||
pkt->length, pkt->data, ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(), status);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,7 +124,8 @@ void Anova::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_
|
||||
esp_ble_gattc_write_char(this->parent_->get_gattc_if(), this->parent_->get_conn_id(), this->char_handle_,
|
||||
pkt->length, pkt->data, ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(),
|
||||
status);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -149,7 +150,7 @@ void Anova::update() {
|
||||
esp_ble_gattc_write_char(this->parent_->get_gattc_if(), this->parent_->get_conn_id(), this->char_handle_,
|
||||
pkt->length, pkt->data, ESP_GATT_WRITE_TYPE_NO_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str(), status);
|
||||
ESP_LOGW(TAG, "[%s] esp_ble_gattc_write_char failed, status=%d", this->parent_->address_str().c_str(), status);
|
||||
}
|
||||
this->current_request_++;
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ class Anova : public climate::Climate, public esphome::ble_client::BLEClientNode
|
||||
void dump_config() override;
|
||||
climate::ClimateTraits traits() override {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
traits.set_supports_current_temperature(true);
|
||||
traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::ClimateMode::CLIMATE_MODE_HEAT});
|
||||
traits.set_visual_min_temperature(25.0);
|
||||
traits.set_visual_max_temperature(100.0);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from esphome import automation
|
||||
from esphome.automation import Condition
|
||||
@@ -9,88 +8,53 @@ import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_CAPTURE_RESPONSE,
|
||||
CONF_DATA,
|
||||
CONF_DATA_TEMPLATE,
|
||||
CONF_EVENT,
|
||||
CONF_ID,
|
||||
CONF_KEY,
|
||||
CONF_MAX_CONNECTIONS,
|
||||
CONF_ON_CLIENT_CONNECTED,
|
||||
CONF_ON_CLIENT_DISCONNECTED,
|
||||
CONF_ON_ERROR,
|
||||
CONF_ON_SUCCESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_REBOOT_TIMEOUT,
|
||||
CONF_RESPONSE_TEMPLATE,
|
||||
CONF_SERVICE,
|
||||
CONF_SERVICES,
|
||||
CONF_TAG,
|
||||
CONF_THEN,
|
||||
CONF_TRIGGER_ID,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from esphome.core import CORE, ID, CoroPriority, EsphomeError, coroutine_with_priority
|
||||
from esphome.cpp_generator import MockObj, TemplateArgsType
|
||||
from esphome.types import ConfigFragmentType, ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
|
||||
DOMAIN = "api"
|
||||
DEPENDENCIES = ["network"]
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
|
||||
|
||||
def AUTO_LOAD(config: ConfigType) -> list[str]:
|
||||
"""Conditionally auto-load json only when capture_response is used."""
|
||||
base = ["socket"]
|
||||
|
||||
# Check if any homeassistant.action/homeassistant.service has capture_response: true
|
||||
# This flag is set during config validation in _validate_response_config
|
||||
if not config or CORE.data.get(DOMAIN, {}).get(CONF_CAPTURE_RESPONSE, False):
|
||||
return base + ["json"]
|
||||
|
||||
return base
|
||||
|
||||
AUTO_LOAD = ["socket"]
|
||||
CODEOWNERS = ["@OttoWinter"]
|
||||
|
||||
api_ns = cg.esphome_ns.namespace("api")
|
||||
APIServer = api_ns.class_("APIServer", cg.Component, cg.Controller)
|
||||
HomeAssistantServiceCallAction = api_ns.class_(
|
||||
"HomeAssistantServiceCallAction", automation.Action
|
||||
)
|
||||
ActionResponse = api_ns.class_("ActionResponse")
|
||||
HomeAssistantActionResponseTrigger = api_ns.class_(
|
||||
"HomeAssistantActionResponseTrigger", automation.Trigger
|
||||
)
|
||||
APIConnectedCondition = api_ns.class_("APIConnectedCondition", Condition)
|
||||
APIRespondAction = api_ns.class_("APIRespondAction", automation.Action)
|
||||
APIUnregisterServiceCallAction = api_ns.class_(
|
||||
"APIUnregisterServiceCallAction", automation.Action
|
||||
)
|
||||
|
||||
UserServiceTrigger = api_ns.class_("UserServiceTrigger", automation.Trigger)
|
||||
ListEntitiesServicesArgument = api_ns.class_("ListEntitiesServicesArgument")
|
||||
SERVICE_ARG_NATIVE_TYPES: dict[str, MockObj] = {
|
||||
"bool": cg.bool_,
|
||||
SERVICE_ARG_NATIVE_TYPES = {
|
||||
"bool": bool,
|
||||
"int": cg.int32,
|
||||
"float": cg.float_,
|
||||
"float": float,
|
||||
"string": cg.std_string,
|
||||
"bool[]": cg.FixedVector.template(cg.bool_).operator("const").operator("ref"),
|
||||
"int[]": cg.FixedVector.template(cg.int32).operator("const").operator("ref"),
|
||||
"float[]": cg.FixedVector.template(cg.float_).operator("const").operator("ref"),
|
||||
"string[]": cg.FixedVector.template(cg.std_string)
|
||||
.operator("const")
|
||||
.operator("ref"),
|
||||
"bool[]": cg.std_vector.template(bool),
|
||||
"int[]": cg.std_vector.template(cg.int32),
|
||||
"float[]": cg.std_vector.template(float),
|
||||
"string[]": cg.std_vector.template(cg.std_string),
|
||||
}
|
||||
CONF_ENCRYPTION = "encryption"
|
||||
CONF_BATCH_DELAY = "batch_delay"
|
||||
CONF_CUSTOM_SERVICES = "custom_services"
|
||||
CONF_HOMEASSISTANT_SERVICES = "homeassistant_services"
|
||||
CONF_HOMEASSISTANT_STATES = "homeassistant_states"
|
||||
CONF_LISTEN_BACKLOG = "listen_backlog"
|
||||
CONF_MAX_SEND_QUEUE = "max_send_queue"
|
||||
CONF_STATE_SUBSCRIPTION_ONLY = "state_subscription_only"
|
||||
|
||||
|
||||
def validate_encryption_key(value):
|
||||
@@ -107,85 +71,6 @@ def validate_encryption_key(value):
|
||||
return value
|
||||
|
||||
|
||||
CONF_SUPPORTS_RESPONSE = "supports_response"
|
||||
|
||||
# Enum values in api::enums namespace
|
||||
enums_ns = api_ns.namespace("enums")
|
||||
SUPPORTS_RESPONSE_OPTIONS = {
|
||||
"none": enums_ns.SUPPORTS_RESPONSE_NONE,
|
||||
"optional": enums_ns.SUPPORTS_RESPONSE_OPTIONAL,
|
||||
"only": enums_ns.SUPPORTS_RESPONSE_ONLY,
|
||||
"status": enums_ns.SUPPORTS_RESPONSE_STATUS,
|
||||
}
|
||||
|
||||
|
||||
def _auto_detect_supports_response(config: ConfigType) -> ConfigType:
|
||||
"""Auto-detect supports_response based on api.respond usage in the action's then block.
|
||||
|
||||
- If api.respond with data found: set to "optional" (unless user explicitly set)
|
||||
- If api.respond without data found: set to "status" (unless user explicitly set)
|
||||
- If no api.respond found: set to "none" (unless user explicitly set)
|
||||
"""
|
||||
|
||||
def scan_actions(items: ConfigFragmentType) -> tuple[bool, bool]:
|
||||
"""Recursively scan actions for api.respond.
|
||||
|
||||
Returns: (found, has_data) tuple - has_data is True if ANY api.respond has data
|
||||
"""
|
||||
found_any = False
|
||||
has_data_any = False
|
||||
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
found, has_data = scan_actions(item)
|
||||
if found:
|
||||
found_any = True
|
||||
has_data_any = has_data_any or has_data
|
||||
elif isinstance(items, dict):
|
||||
# Check if this is an api.respond action
|
||||
if "api.respond" in items:
|
||||
respond_config = items["api.respond"]
|
||||
has_data = isinstance(respond_config, dict) and "data" in respond_config
|
||||
return True, has_data
|
||||
# Recursively check all values
|
||||
for value in items.values():
|
||||
found, has_data = scan_actions(value)
|
||||
if found:
|
||||
found_any = True
|
||||
has_data_any = has_data_any or has_data
|
||||
|
||||
return found_any, has_data_any
|
||||
|
||||
then = config.get(CONF_THEN, [])
|
||||
action_name = config.get(CONF_ACTION)
|
||||
found, has_data = scan_actions(then)
|
||||
|
||||
# If user explicitly set supports_response, validate and use that
|
||||
if CONF_SUPPORTS_RESPONSE in config:
|
||||
user_value = config[CONF_SUPPORTS_RESPONSE]
|
||||
# Validate: "only" requires api.respond with data
|
||||
if user_value == "only" and not has_data:
|
||||
raise cv.Invalid(
|
||||
f"Action '{action_name}' has supports_response=only but no api.respond "
|
||||
"action with 'data:' was found. Use 'status' for responses without data, "
|
||||
"or add 'data:' to your api.respond action."
|
||||
)
|
||||
return config
|
||||
|
||||
# Auto-detect based on api.respond usage
|
||||
if found:
|
||||
config[CONF_SUPPORTS_RESPONSE] = "optional" if has_data else "status"
|
||||
else:
|
||||
config[CONF_SUPPORTS_RESPONSE] = "none"
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _validate_supports_response(value):
|
||||
"""Validate supports_response after auto-detection has set the value."""
|
||||
return cv.enum(SUPPORTS_RESPONSE_OPTIONS, lower=True)(value)
|
||||
|
||||
|
||||
ACTIONS_SCHEMA = automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(UserServiceTrigger),
|
||||
@@ -196,20 +81,10 @@ ACTIONS_SCHEMA = automation.validate_automation(
|
||||
cv.validate_id_name: cv.one_of(*SERVICE_ARG_NATIVE_TYPES, lower=True),
|
||||
}
|
||||
),
|
||||
# No default - auto-detected by _auto_detect_supports_response
|
||||
cv.Optional(CONF_SUPPORTS_RESPONSE): cv.enum(
|
||||
SUPPORTS_RESPONSE_OPTIONS, lower=True
|
||||
),
|
||||
},
|
||||
cv.All(
|
||||
cv.has_exactly_one_key(CONF_SERVICE, CONF_ACTION),
|
||||
cv.rename_key(CONF_SERVICE, CONF_ACTION),
|
||||
_auto_detect_supports_response,
|
||||
# Re-validate supports_response after auto-detection sets it
|
||||
cv.Schema(
|
||||
{cv.Required(CONF_SUPPORTS_RESPONSE): _validate_supports_response},
|
||||
extra=cv.ALLOW_EXTRA,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -226,43 +101,6 @@ def _encryption_schema(config):
|
||||
return ENCRYPTION_SCHEMA(config)
|
||||
|
||||
|
||||
def _validate_api_config(config: ConfigType) -> ConfigType:
|
||||
"""Validate API configuration with mutual exclusivity check and deprecation warning."""
|
||||
# Check if both password and encryption are configured
|
||||
has_password = CONF_PASSWORD in config and config[CONF_PASSWORD]
|
||||
has_encryption = CONF_ENCRYPTION in config
|
||||
|
||||
if has_password and has_encryption:
|
||||
raise cv.Invalid(
|
||||
"The 'password' and 'encryption' options are mutually exclusive. "
|
||||
"The API client only supports one authentication method at a time. "
|
||||
"Please remove one of them. "
|
||||
"Note: 'password' authentication is deprecated and will be removed in version 2026.1.0. "
|
||||
"We strongly recommend using 'encryption' instead for better security."
|
||||
)
|
||||
|
||||
# Warn about password deprecation
|
||||
if has_password:
|
||||
_LOGGER.warning(
|
||||
"API 'password' authentication has been deprecated since May 2022 and will be removed in version 2026.1.0. "
|
||||
"Please migrate to the 'encryption' configuration. "
|
||||
"See https://esphome.io/components/api/#configuration-variables"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _consume_api_sockets(config: ConfigType) -> ConfigType:
|
||||
"""Register socket needs for API component."""
|
||||
from esphome.components import socket
|
||||
|
||||
# API needs 1 listening socket + typically 3 concurrent client connections
|
||||
# (not max_connections, which is the upper limit rarely reached)
|
||||
sockets_needed = 1 + 3
|
||||
socket.consume_sockets(sockets_needed, "api")(config)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -290,78 +128,27 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_ON_CLIENT_DISCONNECTED): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
# Connection limits to prevent memory exhaustion on resource-constrained devices
|
||||
# Each connection uses ~500-1000 bytes of RAM plus system resources
|
||||
# Platform defaults based on available RAM and network stack implementation:
|
||||
cv.SplitDefault(
|
||||
CONF_LISTEN_BACKLOG,
|
||||
esp8266=1, # Limited RAM (~40KB free), LWIP raw sockets
|
||||
esp32=4, # More RAM (520KB), BSD sockets
|
||||
rp2040=1, # Limited RAM (264KB), LWIP raw sockets like ESP8266
|
||||
bk72xx=4, # Moderate RAM, BSD-style sockets
|
||||
rtl87xx=4, # Moderate RAM, BSD-style sockets
|
||||
host=4, # Abundant resources
|
||||
ln882x=4, # Moderate RAM
|
||||
): cv.int_range(min=1, max=10),
|
||||
cv.SplitDefault(
|
||||
CONF_MAX_CONNECTIONS,
|
||||
esp8266=4, # ~40KB free RAM, each connection uses ~500-1000 bytes
|
||||
esp32=8, # 520KB RAM available
|
||||
rp2040=4, # 264KB RAM but LWIP constraints
|
||||
bk72xx=8, # Moderate RAM
|
||||
rtl87xx=8, # Moderate RAM
|
||||
host=8, # Abundant resources
|
||||
ln882x=8, # Moderate RAM
|
||||
): cv.int_range(min=1, max=20),
|
||||
# Maximum queued send buffers per connection before dropping connection
|
||||
# Each buffer uses ~8-12 bytes overhead plus actual message size
|
||||
# Platform defaults based on available RAM and typical message rates:
|
||||
cv.SplitDefault(
|
||||
CONF_MAX_SEND_QUEUE,
|
||||
esp8266=5, # Limited RAM, need to fail fast
|
||||
esp32=8, # More RAM, can buffer more
|
||||
rp2040=5, # Limited RAM
|
||||
bk72xx=8, # Moderate RAM
|
||||
nrf52=8, # Moderate RAM
|
||||
rtl87xx=8, # Moderate RAM
|
||||
host=16, # Abundant resources
|
||||
ln882x=8, # Moderate RAM
|
||||
): cv.int_range(min=1, max=64),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
cv.rename_key(CONF_SERVICES, CONF_ACTIONS),
|
||||
_validate_api_config,
|
||||
_consume_api_sockets,
|
||||
)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.WEB)
|
||||
async def to_code(config: ConfigType) -> None:
|
||||
@coroutine_with_priority(40.0)
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
# Track controller registration for StaticVector sizing
|
||||
CORE.register_controller()
|
||||
|
||||
cg.add(var.set_port(config[CONF_PORT]))
|
||||
if config[CONF_PASSWORD]:
|
||||
cg.add_define("USE_API_PASSWORD")
|
||||
cg.add(var.set_password(config[CONF_PASSWORD]))
|
||||
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
|
||||
cg.add(var.set_batch_delay(config[CONF_BATCH_DELAY]))
|
||||
if CONF_LISTEN_BACKLOG in config:
|
||||
cg.add(var.set_listen_backlog(config[CONF_LISTEN_BACKLOG]))
|
||||
if CONF_MAX_CONNECTIONS in config:
|
||||
cg.add(var.set_max_connections(config[CONF_MAX_CONNECTIONS]))
|
||||
cg.add_define("API_MAX_SEND_QUEUE", config[CONF_MAX_SEND_QUEUE])
|
||||
|
||||
# Set USE_API_USER_DEFINED_ACTIONS if any services are enabled
|
||||
# Set USE_API_SERVICES if any services are enabled
|
||||
if config.get(CONF_ACTIONS) or config[CONF_CUSTOM_SERVICES]:
|
||||
cg.add_define("USE_API_USER_DEFINED_ACTIONS")
|
||||
|
||||
# Set USE_API_CUSTOM_SERVICES if external components need dynamic service registration
|
||||
if config[CONF_CUSTOM_SERVICES]:
|
||||
cg.add_define("USE_API_CUSTOM_SERVICES")
|
||||
cg.add_define("USE_API_SERVICES")
|
||||
|
||||
if config[CONF_HOMEASSISTANT_SERVICES]:
|
||||
cg.add_define("USE_API_HOMEASSISTANT_SERVICES")
|
||||
@@ -370,66 +157,21 @@ async def to_code(config: ConfigType) -> None:
|
||||
cg.add_define("USE_API_HOMEASSISTANT_STATES")
|
||||
|
||||
if actions := config.get(CONF_ACTIONS, []):
|
||||
# Collect all triggers first, then register all at once with initializer_list
|
||||
triggers: list[cg.Pvariable] = []
|
||||
for conf in actions:
|
||||
func_args: list[tuple[MockObj, str]] = []
|
||||
service_template_args: list[MockObj] = [] # User service argument types
|
||||
|
||||
# Determine supports_response mode
|
||||
# cv.enum returns the key with enum_value attribute containing the MockObj
|
||||
supports_response_key = conf[CONF_SUPPORTS_RESPONSE]
|
||||
supports_response = supports_response_key.enum_value
|
||||
is_none = supports_response_key == "none"
|
||||
is_optional = supports_response_key == "optional"
|
||||
|
||||
# Add call_id and return_response based on supports_response mode
|
||||
# These must match the C++ Trigger template arguments
|
||||
# - none: no extra args
|
||||
# - status: call_id only (for reporting success/error without data)
|
||||
# - only: call_id only (response always expected with data)
|
||||
# - optional: call_id + return_response (client decides)
|
||||
if not is_none:
|
||||
# call_id is present for "optional", "only", and "status"
|
||||
func_args.append((cg.uint32, "call_id"))
|
||||
# return_response only present for "optional"
|
||||
if is_optional:
|
||||
func_args.append((cg.bool_, "return_response"))
|
||||
|
||||
service_arg_names: list[str] = []
|
||||
template_args = []
|
||||
func_args = []
|
||||
service_arg_names = []
|
||||
for name, var_ in conf[CONF_VARIABLES].items():
|
||||
native = SERVICE_ARG_NATIVE_TYPES[var_]
|
||||
service_template_args.append(native)
|
||||
template_args.append(native)
|
||||
func_args.append((native, name))
|
||||
service_arg_names.append(name)
|
||||
# Template args: supports_response mode, then user service arg types
|
||||
templ = cg.TemplateArguments(supports_response, *service_template_args)
|
||||
templ = cg.TemplateArguments(*template_args)
|
||||
trigger = cg.new_Pvariable(
|
||||
conf[CONF_TRIGGER_ID],
|
||||
templ,
|
||||
conf[CONF_ACTION],
|
||||
service_arg_names,
|
||||
conf[CONF_TRIGGER_ID], templ, conf[CONF_ACTION], service_arg_names
|
||||
)
|
||||
triggers.append(trigger)
|
||||
auto = await automation.build_automation(trigger, func_args, conf)
|
||||
|
||||
# For non-none response modes, automatically append unregister action
|
||||
# This ensures the call is unregistered after all actions complete (including async ones)
|
||||
if not is_none:
|
||||
arg_types = [arg[0] for arg in func_args]
|
||||
action_templ = cg.TemplateArguments(*arg_types)
|
||||
unregister_id = ID(
|
||||
f"{conf[CONF_TRIGGER_ID]}__unregister",
|
||||
is_declaration=True,
|
||||
type=APIUnregisterServiceCallAction.template(action_templ),
|
||||
)
|
||||
unregister_action = cg.new_Pvariable(
|
||||
unregister_id,
|
||||
var,
|
||||
)
|
||||
cg.add(auto.add_actions([unregister_action]))
|
||||
# Register all services at once - single allocation, no reallocations
|
||||
cg.add(var.initialize_user_services(triggers))
|
||||
cg.add(var.register_user_service(trigger))
|
||||
await automation.build_automation(trigger, func_args, conf)
|
||||
|
||||
if CONF_ON_CLIENT_CONNECTED in config:
|
||||
cg.add_define("USE_API_CLIENT_CONNECTED_TRIGGER")
|
||||
@@ -451,7 +193,6 @@ async def to_code(config: ConfigType) -> None:
|
||||
if key := encryption_config.get(CONF_KEY):
|
||||
decoded = base64.b64decode(key)
|
||||
cg.add(var.set_noise_psk(list(decoded)))
|
||||
cg.add_define("USE_API_NOISE_PSK_FROM_YAML")
|
||||
else:
|
||||
# No key provided, but encryption desired
|
||||
# This will allow a plaintext client to provide a noise key,
|
||||
@@ -471,29 +212,6 @@ async def to_code(config: ConfigType) -> None:
|
||||
KEY_VALUE_SCHEMA = cv.Schema({cv.string: cv.templatable(cv.string_strict)})
|
||||
|
||||
|
||||
def _validate_response_config(config: ConfigType) -> ConfigType:
|
||||
# Validate dependencies:
|
||||
# - response_template requires capture_response: true
|
||||
# - capture_response: true requires on_success
|
||||
if CONF_RESPONSE_TEMPLATE in config and not config[CONF_CAPTURE_RESPONSE]:
|
||||
raise cv.Invalid(
|
||||
f"`{CONF_RESPONSE_TEMPLATE}` requires `{CONF_CAPTURE_RESPONSE}: true` to be set.",
|
||||
path=[CONF_RESPONSE_TEMPLATE],
|
||||
)
|
||||
|
||||
if config[CONF_CAPTURE_RESPONSE] and CONF_ON_SUCCESS not in config:
|
||||
raise cv.Invalid(
|
||||
f"`{CONF_CAPTURE_RESPONSE}: true` requires `{CONF_ON_SUCCESS}` to be set.",
|
||||
path=[CONF_CAPTURE_RESPONSE],
|
||||
)
|
||||
|
||||
# Track if any action uses capture_response for AUTO_LOAD
|
||||
if config[CONF_CAPTURE_RESPONSE]:
|
||||
CORE.data.setdefault(DOMAIN, {})[CONF_CAPTURE_RESPONSE] = True
|
||||
|
||||
return config
|
||||
|
||||
|
||||
HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -509,15 +227,10 @@ HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_VARIABLES, default={}): cv.Schema(
|
||||
{cv.string: cv.returning_lambda}
|
||||
),
|
||||
cv.Optional(CONF_RESPONSE_TEMPLATE): cv.templatable(cv.string),
|
||||
cv.Optional(CONF_CAPTURE_RESPONSE, default=False): cv.boolean,
|
||||
cv.Optional(CONF_ON_SUCCESS): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_ON_ERROR): automation.validate_automation(single=True),
|
||||
}
|
||||
),
|
||||
cv.has_exactly_one_key(CONF_SERVICE, CONF_ACTION),
|
||||
cv.rename_key(CONF_SERVICE, CONF_ACTION),
|
||||
_validate_response_config,
|
||||
)
|
||||
|
||||
|
||||
@@ -531,67 +244,21 @@ HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
|
||||
HomeAssistantServiceCallAction,
|
||||
HOMEASSISTANT_ACTION_ACTION_SCHEMA,
|
||||
)
|
||||
async def homeassistant_service_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
):
|
||||
async def homeassistant_service_to_code(config, action_id, template_arg, args):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_SERVICES")
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, False)
|
||||
templ = await cg.templatable(config[CONF_ACTION], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
|
||||
if on_error := config.get(CONF_ON_ERROR):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES")
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES_ERRORS")
|
||||
cg.add(var.set_wants_status())
|
||||
await automation.build_automation(
|
||||
var.get_error_trigger(),
|
||||
[(cg.std_string, "error"), *args],
|
||||
on_error,
|
||||
)
|
||||
|
||||
if on_success := config.get(CONF_ON_SUCCESS):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES")
|
||||
cg.add(var.set_wants_status())
|
||||
if config[CONF_CAPTURE_RESPONSE]:
|
||||
cg.add(var.set_wants_response())
|
||||
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON")
|
||||
await automation.build_automation(
|
||||
var.get_success_trigger_with_response(),
|
||||
[(cg.JsonObjectConst, "response"), *args],
|
||||
on_success,
|
||||
)
|
||||
|
||||
if response_template := config.get(CONF_RESPONSE_TEMPLATE):
|
||||
templ = await cg.templatable(response_template, args, cg.std_string)
|
||||
cg.add(var.set_response_template(templ))
|
||||
|
||||
else:
|
||||
await automation.build_automation(
|
||||
var.get_success_trigger(),
|
||||
args,
|
||||
on_success,
|
||||
)
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -627,23 +294,15 @@ async def homeassistant_event_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
templ = await cg.templatable(config[CONF_EVENT], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -662,109 +321,17 @@ HOMEASSISTANT_TAG_SCANNED_ACTION_SCHEMA = cv.maybe_simple_value(
|
||||
HOMEASSISTANT_TAG_SCANNED_ACTION_SCHEMA,
|
||||
)
|
||||
async def homeassistant_tag_scanned_to_code(config, action_id, template_arg, args):
|
||||
cg.add_define("USE_API_HOMEASSISTANT_SERVICES")
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
cg.add(var.set_service("esphome.tag_scanned"))
|
||||
# Initialize FixedVector with exact size (1 data field)
|
||||
cg.add(var.init_data(1))
|
||||
templ = await cg.templatable(config[CONF_TAG], args, cg.std_string)
|
||||
cg.add(var.add_data("tag_id", templ))
|
||||
return var
|
||||
|
||||
|
||||
CONF_SUCCESS = "success"
|
||||
CONF_ERROR_MESSAGE = "error_message"
|
||||
|
||||
|
||||
def _validate_api_respond_data(config):
|
||||
"""Set flag during validation so AUTO_LOAD can include json component."""
|
||||
if CONF_DATA in config:
|
||||
CORE.data.setdefault(DOMAIN, {})[CONF_CAPTURE_RESPONSE] = True
|
||||
return config
|
||||
|
||||
|
||||
API_RESPOND_ACTION_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.use_id(APIServer),
|
||||
cv.Optional(CONF_SUCCESS, default=True): cv.templatable(cv.boolean),
|
||||
cv.Optional(CONF_ERROR_MESSAGE, default=""): cv.templatable(cv.string),
|
||||
cv.Optional(CONF_DATA): cv.lambda_,
|
||||
}
|
||||
),
|
||||
_validate_api_respond_data,
|
||||
)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
"api.respond",
|
||||
APIRespondAction,
|
||||
API_RESPOND_ACTION_SCHEMA,
|
||||
)
|
||||
async def api_respond_to_code(
|
||||
config: ConfigType,
|
||||
action_id: ID,
|
||||
template_arg: cg.TemplateArguments,
|
||||
args: TemplateArgsType,
|
||||
) -> MockObj:
|
||||
# Validate that api.respond is used inside an API action context.
|
||||
# We can't easily validate this at config time since the schema validation
|
||||
# doesn't have access to the parent action context. Validating here in to_code
|
||||
# is still much better than a cryptic C++ compile error.
|
||||
has_call_id = any(name == "call_id" for _, name in args)
|
||||
if not has_call_id:
|
||||
raise EsphomeError(
|
||||
"api.respond can only be used inside an API action's 'then:' block. "
|
||||
"The 'call_id' variable is required to send a response."
|
||||
)
|
||||
|
||||
cg.add_define("USE_API_USER_DEFINED_ACTION_RESPONSES")
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv)
|
||||
|
||||
# Check if we're in optional mode (has return_response arg)
|
||||
is_optional = any(name == "return_response" for _, name in args)
|
||||
if is_optional:
|
||||
cg.add(var.set_is_optional_mode(True))
|
||||
|
||||
templ = await cg.templatable(config[CONF_SUCCESS], args, cg.bool_)
|
||||
cg.add(var.set_success(templ))
|
||||
|
||||
templ = await cg.templatable(config[CONF_ERROR_MESSAGE], args, cg.std_string)
|
||||
cg.add(var.set_error_message(templ))
|
||||
|
||||
if CONF_DATA in config:
|
||||
cg.add_define("USE_API_USER_DEFINED_ACTION_RESPONSES_JSON")
|
||||
# Lambda populates the JsonObject root - no return value needed
|
||||
lambda_ = await cg.process_lambda(
|
||||
config[CONF_DATA],
|
||||
args + [(cg.JsonObject, "root")],
|
||||
return_type=cg.void,
|
||||
)
|
||||
cg.add(var.set_data(lambda_))
|
||||
|
||||
return var
|
||||
|
||||
|
||||
API_CONNECTED_CONDITION_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.use_id(APIServer),
|
||||
cv.Optional(CONF_STATE_SUBSCRIPTION_ONLY, default=False): cv.templatable(
|
||||
cv.boolean
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@automation.register_condition(
|
||||
"api.connected", APIConnectedCondition, API_CONNECTED_CONDITION_SCHEMA
|
||||
)
|
||||
@automation.register_condition("api.connected", APIConnectedCondition, {})
|
||||
async def api_connected_to_code(config, condition_id, template_arg, args):
|
||||
var = cg.new_Pvariable(condition_id, template_arg)
|
||||
templ = await cg.templatable(config[CONF_STATE_SUBSCRIPTION_ONLY], args, cg.bool_)
|
||||
cg.add(var.set_state_subscription_only(templ))
|
||||
return var
|
||||
return cg.new_Pvariable(condition_id, template_arg)
|
||||
|
||||
|
||||
def FILTER_SOURCE_FILES() -> list[str]:
|
||||
|
||||
@@ -7,7 +7,7 @@ service APIConnection {
|
||||
option (needs_setup_connection) = false;
|
||||
option (needs_authentication) = false;
|
||||
}
|
||||
rpc authenticate (AuthenticationRequest) returns (AuthenticationResponse) {
|
||||
rpc connect (ConnectRequest) returns (ConnectResponse) {
|
||||
option (needs_setup_connection) = false;
|
||||
option (needs_authentication) = false;
|
||||
}
|
||||
@@ -27,6 +27,9 @@ service APIConnection {
|
||||
rpc subscribe_logs (SubscribeLogsRequest) returns (void) {}
|
||||
rpc subscribe_homeassistant_services (SubscribeHomeassistantServicesRequest) returns (void) {}
|
||||
rpc subscribe_home_assistant_states (SubscribeHomeAssistantStatesRequest) returns (void) {}
|
||||
rpc get_time (GetTimeRequest) returns (GetTimeResponse) {
|
||||
option (needs_authentication) = false;
|
||||
}
|
||||
rpc execute_service (ExecuteServiceRequest) returns (void) {}
|
||||
rpc noise_encryption_set_key (NoiseEncryptionSetKeyRequest) returns (NoiseEncryptionSetKeyResponse) {}
|
||||
|
||||
@@ -66,9 +69,6 @@ service APIConnection {
|
||||
rpc voice_assistant_set_configuration(VoiceAssistantSetConfiguration) returns (void) {}
|
||||
|
||||
rpc alarm_control_panel_command (AlarmControlPanelCommandRequest) returns (void) {}
|
||||
|
||||
rpc zwave_proxy_frame(ZWaveProxyFrame) returns (void) {}
|
||||
rpc zwave_proxy_request(ZWaveProxyRequest) returns (void) {}
|
||||
}
|
||||
|
||||
|
||||
@@ -102,7 +102,7 @@ message HelloRequest {
|
||||
// For example "Home Assistant"
|
||||
// Not strictly necessary to send but nice for debugging
|
||||
// purposes.
|
||||
string client_info = 1 [(pointer_to_buffer) = true];
|
||||
string client_info = 1;
|
||||
uint32 api_version_major = 2;
|
||||
uint32 api_version_minor = 3;
|
||||
}
|
||||
@@ -132,23 +132,21 @@ message HelloResponse {
|
||||
|
||||
// Message sent at the beginning of each connection to authenticate the client
|
||||
// Can only be sent by the client and only at the beginning of the connection
|
||||
message AuthenticationRequest {
|
||||
message ConnectRequest {
|
||||
option (id) = 3;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_PASSWORD";
|
||||
|
||||
// The password to log in with
|
||||
string password = 1 [(pointer_to_buffer) = true];
|
||||
string password = 1;
|
||||
}
|
||||
|
||||
// Confirmation of successful connection. After this the connection is available for all traffic.
|
||||
// Can only be sent by the server and only at the beginning of the connection
|
||||
message AuthenticationResponse {
|
||||
message ConnectResponse {
|
||||
option (id) = 4;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_PASSWORD";
|
||||
|
||||
bool invalid_password = 1;
|
||||
}
|
||||
@@ -252,15 +250,11 @@ message DeviceInfoResponse {
|
||||
// Supports receiving and saving api encryption key
|
||||
bool api_encryption_supported = 19 [(field_ifdef) = "USE_API_NOISE"];
|
||||
|
||||
repeated DeviceInfo devices = 20 [(field_ifdef) = "USE_DEVICES", (fixed_array_size_define) = "ESPHOME_DEVICE_COUNT"];
|
||||
repeated AreaInfo areas = 21 [(field_ifdef) = "USE_AREAS", (fixed_array_size_define) = "ESPHOME_AREA_COUNT"];
|
||||
repeated DeviceInfo devices = 20 [(field_ifdef) = "USE_DEVICES"];
|
||||
repeated AreaInfo areas = 21 [(field_ifdef) = "USE_AREAS"];
|
||||
|
||||
// Top-level area info to phase out suggested_area
|
||||
AreaInfo area = 22 [(field_ifdef) = "USE_AREAS"];
|
||||
|
||||
// Indicates if Z-Wave proxy support is available and features supported
|
||||
uint32 zwave_proxy_feature_flags = 23 [(field_ifdef) = "USE_ZWAVE_PROXY"];
|
||||
uint32 zwave_home_id = 24 [(field_ifdef) = "USE_ZWAVE_PROXY"];
|
||||
}
|
||||
|
||||
message ListEntitiesRequest {
|
||||
@@ -425,7 +419,7 @@ message ListEntitiesFanResponse {
|
||||
bool disabled_by_default = 9;
|
||||
string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
EntityCategory entity_category = 11;
|
||||
repeated string supported_preset_modes = 12 [(container_pointer_no_template) = "std::vector<const char *>"];
|
||||
repeated string supported_preset_modes = 12;
|
||||
uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
// Deprecated in API version 1.6 - only used in deprecated fields
|
||||
@@ -477,7 +471,7 @@ message FanCommandRequest {
|
||||
bool has_speed_level = 10;
|
||||
int32 speed_level = 11;
|
||||
bool has_preset_mode = 12;
|
||||
string preset_mode = 13 [(pointer_to_buffer) = true];
|
||||
string preset_mode = 13;
|
||||
uint32 device_id = 14 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
@@ -506,7 +500,7 @@ message ListEntitiesLightResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"];
|
||||
repeated ColorMode supported_color_modes = 12;
|
||||
// next four supports_* are for legacy clients, newer clients should use color modes
|
||||
// Deprecated in API version 1.6
|
||||
bool legacy_supports_brightness = 5 [deprecated=true];
|
||||
@@ -518,7 +512,7 @@ message ListEntitiesLightResponse {
|
||||
bool legacy_supports_color_temperature = 8 [deprecated=true];
|
||||
float min_mireds = 9;
|
||||
float max_mireds = 10;
|
||||
repeated string effects = 11 [(container_pointer_no_template) = "FixedVector<const char *>"];
|
||||
repeated string effects = 11;
|
||||
bool disabled_by_default = 13;
|
||||
string icon = 14 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
EntityCategory entity_category = 15;
|
||||
@@ -579,7 +573,7 @@ message LightCommandRequest {
|
||||
bool has_flash_length = 16;
|
||||
uint32 flash_length = 17;
|
||||
bool has_effect = 18;
|
||||
string effect = 19 [(pointer_to_buffer) = true];
|
||||
string effect = 19;
|
||||
uint32 device_id = 28 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
@@ -589,7 +583,6 @@ enum SensorStateClass {
|
||||
STATE_CLASS_MEASUREMENT = 1;
|
||||
STATE_CLASS_TOTAL_INCREASING = 2;
|
||||
STATE_CLASS_TOTAL = 3;
|
||||
STATE_CLASS_MEASUREMENT_ANGLE = 4;
|
||||
}
|
||||
|
||||
// Deprecated in API version 1.5
|
||||
@@ -747,7 +740,7 @@ message NoiseEncryptionSetKeyRequest {
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (ifdef) = "USE_API_NOISE";
|
||||
|
||||
bytes key = 1 [(pointer_to_buffer) = true];
|
||||
bytes key = 1;
|
||||
}
|
||||
|
||||
message NoiseEncryptionSetKeyResponse {
|
||||
@@ -770,33 +763,17 @@ message HomeassistantServiceMap {
|
||||
string value = 2 [(no_zero_copy) = true];
|
||||
}
|
||||
|
||||
message HomeassistantActionRequest {
|
||||
message HomeassistantServiceResponse {
|
||||
option (id) = 35;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_HOMEASSISTANT_SERVICES";
|
||||
|
||||
string service = 1;
|
||||
repeated HomeassistantServiceMap data = 2 [(fixed_vector) = true];
|
||||
repeated HomeassistantServiceMap data_template = 3 [(fixed_vector) = true];
|
||||
repeated HomeassistantServiceMap variables = 4 [(fixed_vector) = true];
|
||||
repeated HomeassistantServiceMap data = 2;
|
||||
repeated HomeassistantServiceMap data_template = 3;
|
||||
repeated HomeassistantServiceMap variables = 4;
|
||||
bool is_event = 5;
|
||||
uint32 call_id = 6 [(field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES"];
|
||||
bool wants_response = 7 [(field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
|
||||
string response_template = 8 [(no_zero_copy) = true, (field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
|
||||
}
|
||||
|
||||
// Message sent by Home Assistant to ESPHome with service call response data
|
||||
message HomeassistantActionResponse {
|
||||
option (id) = 130;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES";
|
||||
|
||||
uint32 call_id = 1; // Matches the call_id from HomeassistantActionRequest
|
||||
bool success = 2; // Whether the service call succeeded
|
||||
string error_message = 3; // Error message if success = false
|
||||
bytes response_data = 4 [(pointer_to_buffer) = true, (field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
|
||||
}
|
||||
|
||||
// ==================== IMPORT HOME ASSISTANT STATES ====================
|
||||
@@ -824,24 +801,23 @@ message HomeAssistantStateResponse {
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_HOMEASSISTANT_STATES";
|
||||
|
||||
string entity_id = 1 [(pointer_to_buffer) = true];
|
||||
string state = 2 [(pointer_to_buffer) = true];
|
||||
string attribute = 3 [(pointer_to_buffer) = true];
|
||||
string entity_id = 1;
|
||||
string state = 2;
|
||||
string attribute = 3;
|
||||
}
|
||||
|
||||
// ==================== IMPORT TIME ====================
|
||||
message GetTimeRequest {
|
||||
option (id) = 36;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (source) = SOURCE_BOTH;
|
||||
}
|
||||
|
||||
message GetTimeResponse {
|
||||
option (id) = 37;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (source) = SOURCE_BOTH;
|
||||
option (no_delay) = true;
|
||||
|
||||
fixed32 epoch_seconds = 1;
|
||||
string timezone = 2 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
// ==================== USER-DEFINES SERVICES ====================
|
||||
@@ -855,65 +831,41 @@ enum ServiceArgType {
|
||||
SERVICE_ARG_TYPE_FLOAT_ARRAY = 6;
|
||||
SERVICE_ARG_TYPE_STRING_ARRAY = 7;
|
||||
}
|
||||
enum SupportsResponseType {
|
||||
SUPPORTS_RESPONSE_NONE = 0;
|
||||
SUPPORTS_RESPONSE_OPTIONAL = 1;
|
||||
SUPPORTS_RESPONSE_ONLY = 2;
|
||||
// Status-only response - reports success/error without data payload
|
||||
// Value is higher to avoid conflicts with future Home Assistant values
|
||||
SUPPORTS_RESPONSE_STATUS = 100;
|
||||
}
|
||||
message ListEntitiesServicesArgument {
|
||||
option (ifdef) = "USE_API_USER_DEFINED_ACTIONS";
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
string name = 1;
|
||||
ServiceArgType type = 2;
|
||||
}
|
||||
message ListEntitiesServicesResponse {
|
||||
option (id) = 41;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (ifdef) = "USE_API_USER_DEFINED_ACTIONS";
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
|
||||
string name = 1;
|
||||
fixed32 key = 2;
|
||||
repeated ListEntitiesServicesArgument args = 3 [(fixed_vector) = true];
|
||||
SupportsResponseType supports_response = 4;
|
||||
repeated ListEntitiesServicesArgument args = 3;
|
||||
}
|
||||
message ExecuteServiceArgument {
|
||||
option (ifdef) = "USE_API_USER_DEFINED_ACTIONS";
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
bool bool_ = 1;
|
||||
int32 legacy_int = 2;
|
||||
float float_ = 3;
|
||||
string string_ = 4;
|
||||
// ESPHome 1.14 (api v1.3) make int a signed value
|
||||
sint32 int_ = 5;
|
||||
repeated bool bool_array = 6 [packed=false, (fixed_vector) = true];
|
||||
repeated sint32 int_array = 7 [packed=false, (fixed_vector) = true];
|
||||
repeated float float_array = 8 [packed=false, (fixed_vector) = true];
|
||||
repeated string string_array = 9 [(fixed_vector) = true];
|
||||
repeated bool bool_array = 6 [packed=false];
|
||||
repeated sint32 int_array = 7 [packed=false];
|
||||
repeated float float_array = 8 [packed=false];
|
||||
repeated string string_array = 9;
|
||||
}
|
||||
message ExecuteServiceRequest {
|
||||
option (id) = 42;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_USER_DEFINED_ACTIONS";
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
|
||||
fixed32 key = 1;
|
||||
repeated ExecuteServiceArgument args = 2 [(fixed_vector) = true];
|
||||
uint32 call_id = 3 [(field_ifdef) = "USE_API_USER_DEFINED_ACTION_RESPONSES"];
|
||||
bool return_response = 4 [(field_ifdef) = "USE_API_USER_DEFINED_ACTION_RESPONSES"];
|
||||
}
|
||||
|
||||
// Message sent by ESPHome to Home Assistant with service execution response data
|
||||
message ExecuteServiceResponse {
|
||||
option (id) = 131;
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (no_delay) = true;
|
||||
option (ifdef) = "USE_API_USER_DEFINED_ACTION_RESPONSES";
|
||||
|
||||
uint32 call_id = 1; // Matches the call_id from ExecuteServiceRequest
|
||||
bool success = 2; // Whether the service execution succeeded
|
||||
string error_message = 3; // Error message if success = false
|
||||
bytes response_data = 4 [(pointer_to_buffer) = true, (field_ifdef) = "USE_API_USER_DEFINED_ACTION_RESPONSES_JSON"];
|
||||
repeated ExecuteServiceArgument args = 2;
|
||||
}
|
||||
|
||||
// ==================== CAMERA ====================
|
||||
@@ -1012,9 +964,9 @@ message ListEntitiesClimateResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
bool supports_current_temperature = 5; // Deprecated: use feature_flags
|
||||
bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags
|
||||
repeated ClimateMode supported_modes = 7 [(container_pointer_no_template) = "climate::ClimateModeMask"];
|
||||
bool supports_current_temperature = 5;
|
||||
bool supports_two_point_target_temperature = 6;
|
||||
repeated ClimateMode supported_modes = 7;
|
||||
float visual_min_temperature = 8;
|
||||
float visual_max_temperature = 9;
|
||||
float visual_target_temperature_step = 10;
|
||||
@@ -1022,22 +974,21 @@ message ListEntitiesClimateResponse {
|
||||
// is if CLIMATE_PRESET_AWAY exists is supported_presets
|
||||
// Deprecated in API version 1.5
|
||||
bool legacy_supports_away = 11 [deprecated=true];
|
||||
bool supports_action = 12; // Deprecated: use feature_flags
|
||||
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer_no_template) = "climate::ClimateFanModeMask"];
|
||||
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer_no_template) = "climate::ClimateSwingModeMask"];
|
||||
repeated string supported_custom_fan_modes = 15 [(container_pointer_no_template) = "std::vector<const char *>"];
|
||||
repeated ClimatePreset supported_presets = 16 [(container_pointer_no_template) = "climate::ClimatePresetMask"];
|
||||
repeated string supported_custom_presets = 17 [(container_pointer_no_template) = "std::vector<const char *>"];
|
||||
bool supports_action = 12;
|
||||
repeated ClimateFanMode supported_fan_modes = 13;
|
||||
repeated ClimateSwingMode supported_swing_modes = 14;
|
||||
repeated string supported_custom_fan_modes = 15;
|
||||
repeated ClimatePreset supported_presets = 16;
|
||||
repeated string supported_custom_presets = 17;
|
||||
bool disabled_by_default = 18;
|
||||
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
EntityCategory entity_category = 20;
|
||||
float visual_current_temperature_step = 21;
|
||||
bool supports_current_humidity = 22; // Deprecated: use feature_flags
|
||||
bool supports_target_humidity = 23; // Deprecated: use feature_flags
|
||||
bool supports_current_humidity = 22;
|
||||
bool supports_target_humidity = 23;
|
||||
float visual_min_humidity = 24;
|
||||
float visual_max_humidity = 25;
|
||||
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
|
||||
uint32 feature_flags = 27;
|
||||
}
|
||||
message ClimateStateResponse {
|
||||
option (id) = 47;
|
||||
@@ -1091,95 +1042,16 @@ message ClimateCommandRequest {
|
||||
bool has_swing_mode = 14;
|
||||
ClimateSwingMode swing_mode = 15;
|
||||
bool has_custom_fan_mode = 16;
|
||||
string custom_fan_mode = 17 [(pointer_to_buffer) = true];
|
||||
string custom_fan_mode = 17;
|
||||
bool has_preset = 18;
|
||||
ClimatePreset preset = 19;
|
||||
bool has_custom_preset = 20;
|
||||
string custom_preset = 21 [(pointer_to_buffer) = true];
|
||||
string custom_preset = 21;
|
||||
bool has_target_humidity = 22;
|
||||
float target_humidity = 23;
|
||||
uint32 device_id = 24 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
// ==================== WATER_HEATER ====================
|
||||
enum WaterHeaterMode {
|
||||
WATER_HEATER_MODE_OFF = 0;
|
||||
WATER_HEATER_MODE_ECO = 1;
|
||||
WATER_HEATER_MODE_ELECTRIC = 2;
|
||||
WATER_HEATER_MODE_PERFORMANCE = 3;
|
||||
WATER_HEATER_MODE_HIGH_DEMAND = 4;
|
||||
WATER_HEATER_MODE_HEAT_PUMP = 5;
|
||||
WATER_HEATER_MODE_GAS = 6;
|
||||
}
|
||||
|
||||
message ListEntitiesWaterHeaterResponse {
|
||||
option (id) = 132;
|
||||
option (base_class) = "InfoResponseProtoMessage";
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (ifdef) = "USE_WATER_HEATER";
|
||||
|
||||
string object_id = 1;
|
||||
fixed32 key = 2;
|
||||
string name = 3;
|
||||
string icon = 4 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
bool disabled_by_default = 5;
|
||||
EntityCategory entity_category = 6;
|
||||
uint32 device_id = 7 [(field_ifdef) = "USE_DEVICES"];
|
||||
float min_temperature = 8;
|
||||
float max_temperature = 9;
|
||||
float target_temperature_step = 10;
|
||||
repeated WaterHeaterMode supported_modes = 11 [(container_pointer_no_template) = "water_heater::WaterHeaterModeMask"];
|
||||
// Bitmask of WaterHeaterFeature flags
|
||||
uint32 supported_features = 12;
|
||||
}
|
||||
|
||||
message WaterHeaterStateResponse {
|
||||
option (id) = 133;
|
||||
option (base_class) = "StateResponseProtoMessage";
|
||||
option (source) = SOURCE_SERVER;
|
||||
option (ifdef) = "USE_WATER_HEATER";
|
||||
option (no_delay) = true;
|
||||
|
||||
fixed32 key = 1;
|
||||
float current_temperature = 2;
|
||||
float target_temperature = 3;
|
||||
WaterHeaterMode mode = 4;
|
||||
uint32 device_id = 5 [(field_ifdef) = "USE_DEVICES"];
|
||||
// Bitmask of current state flags (bit 0 = away, bit 1 = on)
|
||||
uint32 state = 6;
|
||||
float target_temperature_low = 7;
|
||||
float target_temperature_high = 8;
|
||||
}
|
||||
|
||||
// Bitmask for WaterHeaterCommandRequest.has_fields
|
||||
enum WaterHeaterCommandHasField {
|
||||
WATER_HEATER_COMMAND_HAS_NONE = 0;
|
||||
WATER_HEATER_COMMAND_HAS_MODE = 1;
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE = 2;
|
||||
WATER_HEATER_COMMAND_HAS_STATE = 4;
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_LOW = 8;
|
||||
WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH = 16;
|
||||
}
|
||||
|
||||
message WaterHeaterCommandRequest {
|
||||
option (id) = 134;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (ifdef) = "USE_WATER_HEATER";
|
||||
option (no_delay) = true;
|
||||
option (base_class) = "CommandProtoMessage";
|
||||
|
||||
fixed32 key = 1;
|
||||
// Bitmask of which fields are set (see WaterHeaterCommandHasField)
|
||||
uint32 has_fields = 2;
|
||||
WaterHeaterMode mode = 3;
|
||||
float target_temperature = 4;
|
||||
uint32 device_id = 5 [(field_ifdef) = "USE_DEVICES"];
|
||||
// State flags bitmask (bit 0 = away, bit 1 = on)
|
||||
uint32 state = 6;
|
||||
float target_temperature_low = 7;
|
||||
float target_temperature_high = 8;
|
||||
}
|
||||
|
||||
// ==================== NUMBER ====================
|
||||
enum NumberMode {
|
||||
NUMBER_MODE_AUTO = 0;
|
||||
@@ -1247,7 +1119,7 @@ message ListEntitiesSelectResponse {
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
string icon = 5 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
repeated string options = 6 [(container_pointer_no_template) = "FixedVector<const char *>"];
|
||||
repeated string options = 6;
|
||||
bool disabled_by_default = 7;
|
||||
EntityCategory entity_category = 8;
|
||||
uint32 device_id = 9 [(field_ifdef) = "USE_DEVICES"];
|
||||
@@ -1274,7 +1146,7 @@ message SelectCommandRequest {
|
||||
option (base_class) = "CommandProtoMessage";
|
||||
|
||||
fixed32 key = 1;
|
||||
string state = 2 [(pointer_to_buffer) = true];
|
||||
string state = 2;
|
||||
uint32 device_id = 3 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
@@ -1425,9 +1297,6 @@ enum MediaPlayerState {
|
||||
MEDIA_PLAYER_STATE_IDLE = 1;
|
||||
MEDIA_PLAYER_STATE_PLAYING = 2;
|
||||
MEDIA_PLAYER_STATE_PAUSED = 3;
|
||||
MEDIA_PLAYER_STATE_ANNOUNCING = 4;
|
||||
MEDIA_PLAYER_STATE_OFF = 5;
|
||||
MEDIA_PLAYER_STATE_ON = 6;
|
||||
}
|
||||
enum MediaPlayerCommand {
|
||||
MEDIA_PLAYER_COMMAND_PLAY = 0;
|
||||
@@ -1435,15 +1304,6 @@ enum MediaPlayerCommand {
|
||||
MEDIA_PLAYER_COMMAND_STOP = 2;
|
||||
MEDIA_PLAYER_COMMAND_MUTE = 3;
|
||||
MEDIA_PLAYER_COMMAND_UNMUTE = 4;
|
||||
MEDIA_PLAYER_COMMAND_TOGGLE = 5;
|
||||
MEDIA_PLAYER_COMMAND_VOLUME_UP = 6;
|
||||
MEDIA_PLAYER_COMMAND_VOLUME_DOWN = 7;
|
||||
MEDIA_PLAYER_COMMAND_ENQUEUE = 8;
|
||||
MEDIA_PLAYER_COMMAND_REPEAT_ONE = 9;
|
||||
MEDIA_PLAYER_COMMAND_REPEAT_OFF = 10;
|
||||
MEDIA_PLAYER_COMMAND_CLEAR_PLAYLIST = 11;
|
||||
MEDIA_PLAYER_COMMAND_TURN_ON = 12;
|
||||
MEDIA_PLAYER_COMMAND_TURN_OFF = 13;
|
||||
}
|
||||
enum MediaPlayerFormatPurpose {
|
||||
MEDIA_PLAYER_FORMAT_PURPOSE_DEFAULT = 0;
|
||||
@@ -1478,8 +1338,6 @@ message ListEntitiesMediaPlayerResponse {
|
||||
repeated MediaPlayerSupportedFormat supported_formats = 9;
|
||||
|
||||
uint32 device_id = 10 [(field_ifdef) = "USE_DEVICES"];
|
||||
|
||||
uint32 feature_flags = 11;
|
||||
}
|
||||
message MediaPlayerStateResponse {
|
||||
option (id) = 64;
|
||||
@@ -1566,11 +1424,11 @@ message BluetoothLERawAdvertisementsResponse {
|
||||
option (ifdef) = "USE_BLUETOOTH_PROXY";
|
||||
option (no_delay) = true;
|
||||
|
||||
repeated BluetoothLERawAdvertisement advertisements = 1 [(fixed_array_with_length_define) = "BLUETOOTH_PROXY_ADVERTISEMENT_BATCH_SIZE"];
|
||||
repeated BluetoothLERawAdvertisement advertisements = 1;
|
||||
}
|
||||
|
||||
enum BluetoothDeviceRequestType {
|
||||
BLUETOOTH_DEVICE_REQUEST_TYPE_CONNECT = 0 [deprecated = true]; // V1 removed, use V3 variants
|
||||
BLUETOOTH_DEVICE_REQUEST_TYPE_CONNECT = 0;
|
||||
BLUETOOTH_DEVICE_REQUEST_TYPE_DISCONNECT = 1;
|
||||
BLUETOOTH_DEVICE_REQUEST_TYPE_PAIR = 2;
|
||||
BLUETOOTH_DEVICE_REQUEST_TYPE_UNPAIR = 3;
|
||||
@@ -1586,7 +1444,7 @@ message BluetoothDeviceRequest {
|
||||
|
||||
uint64 address = 1;
|
||||
BluetoothDeviceRequestType request_type = 2;
|
||||
bool has_address_type = 3; // Deprecated, should be removed in 2027.8 - https://github.com/esphome/esphome/pull/10318
|
||||
bool has_address_type = 3;
|
||||
uint32 address_type = 4;
|
||||
}
|
||||
|
||||
@@ -1610,39 +1468,21 @@ message BluetoothGATTGetServicesRequest {
|
||||
}
|
||||
|
||||
message BluetoothGATTDescriptor {
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2];
|
||||
uint32 handle = 2;
|
||||
|
||||
// New field for efficient UUID (v1.12+)
|
||||
// Only one of uuid or short_uuid will be set.
|
||||
// short_uuid is used for both 16-bit and 32-bit UUIDs with v1.12+ clients.
|
||||
// 128-bit UUIDs always use the uuid field for backwards compatibility.
|
||||
uint32 short_uuid = 3; // 16-bit or 32-bit UUID
|
||||
}
|
||||
|
||||
message BluetoothGATTCharacteristic {
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2];
|
||||
uint32 handle = 2;
|
||||
uint32 properties = 3;
|
||||
repeated BluetoothGATTDescriptor descriptors = 4 [(fixed_vector) = true];
|
||||
|
||||
// New field for efficient UUID (v1.12+)
|
||||
// Only one of uuid or short_uuid will be set.
|
||||
// short_uuid is used for both 16-bit and 32-bit UUIDs with v1.12+ clients.
|
||||
// 128-bit UUIDs always use the uuid field for backwards compatibility.
|
||||
uint32 short_uuid = 5; // 16-bit or 32-bit UUID
|
||||
repeated BluetoothGATTDescriptor descriptors = 4;
|
||||
}
|
||||
|
||||
message BluetoothGATTService {
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
|
||||
repeated uint64 uuid = 1 [(fixed_array_size) = 2];
|
||||
uint32 handle = 2;
|
||||
repeated BluetoothGATTCharacteristic characteristics = 3 [(fixed_vector) = true];
|
||||
|
||||
// New field for efficient UUID (v1.12+)
|
||||
// Only one of uuid or short_uuid will be set.
|
||||
// short_uuid is used for both 16-bit and 32-bit UUIDs with v1.12+ clients.
|
||||
// 128-bit UUIDs always use the uuid field for backwards compatibility.
|
||||
uint32 short_uuid = 4; // 16-bit or 32-bit UUID
|
||||
repeated BluetoothGATTCharacteristic characteristics = 3;
|
||||
}
|
||||
|
||||
message BluetoothGATTGetServicesResponse {
|
||||
@@ -1651,7 +1491,7 @@ message BluetoothGATTGetServicesResponse {
|
||||
option (ifdef) = "USE_BLUETOOTH_PROXY";
|
||||
|
||||
uint64 address = 1;
|
||||
repeated BluetoothGATTService services = 2;
|
||||
repeated BluetoothGATTService services = 2 [(fixed_array_size) = 1];
|
||||
}
|
||||
|
||||
message BluetoothGATTGetServicesDoneResponse {
|
||||
@@ -1692,7 +1532,7 @@ message BluetoothGATTWriteRequest {
|
||||
uint32 handle = 2;
|
||||
bool response = 3;
|
||||
|
||||
bytes data = 4 [(pointer_to_buffer) = true];
|
||||
bytes data = 4;
|
||||
}
|
||||
|
||||
message BluetoothGATTReadDescriptorRequest {
|
||||
@@ -1712,7 +1552,7 @@ message BluetoothGATTWriteDescriptorRequest {
|
||||
uint64 address = 1;
|
||||
uint32 handle = 2;
|
||||
|
||||
bytes data = 3 [(pointer_to_buffer) = true];
|
||||
bytes data = 3;
|
||||
}
|
||||
|
||||
message BluetoothGATTNotifyRequest {
|
||||
@@ -1749,10 +1589,7 @@ message BluetoothConnectionsFreeResponse {
|
||||
|
||||
uint32 free = 1;
|
||||
uint32 limit = 2;
|
||||
repeated uint64 allocated = 3 [
|
||||
(fixed_array_size_define) = "BLUETOOTH_PROXY_MAX_CONNECTIONS",
|
||||
(fixed_array_skip_zero) = true
|
||||
];
|
||||
repeated uint64 allocated = 3;
|
||||
}
|
||||
|
||||
message BluetoothGATTErrorResponse {
|
||||
@@ -1840,7 +1677,6 @@ message BluetoothScannerStateResponse {
|
||||
|
||||
BluetoothScannerState state = 1;
|
||||
BluetoothScannerMode mode = 2;
|
||||
BluetoothScannerMode configured_mode = 3;
|
||||
}
|
||||
|
||||
message BluetoothScannerSetModeRequest {
|
||||
@@ -1986,22 +1822,10 @@ message VoiceAssistantWakeWord {
|
||||
repeated string trained_languages = 3;
|
||||
}
|
||||
|
||||
message VoiceAssistantExternalWakeWord {
|
||||
string id = 1;
|
||||
string wake_word = 2;
|
||||
repeated string trained_languages = 3;
|
||||
string model_type = 4;
|
||||
uint32 model_size = 5;
|
||||
string model_hash = 6;
|
||||
string url = 7;
|
||||
}
|
||||
|
||||
message VoiceAssistantConfigurationRequest {
|
||||
option (id) = 121;
|
||||
option (source) = SOURCE_CLIENT;
|
||||
option (ifdef) = "USE_VOICE_ASSISTANT";
|
||||
|
||||
repeated VoiceAssistantExternalWakeWord external_wake_words = 1;
|
||||
}
|
||||
|
||||
message VoiceAssistantConfigurationResponse {
|
||||
@@ -2010,7 +1834,7 @@ message VoiceAssistantConfigurationResponse {
|
||||
option (ifdef) = "USE_VOICE_ASSISTANT";
|
||||
|
||||
repeated VoiceAssistantWakeWord available_wake_words = 1;
|
||||
repeated string active_wake_words = 2 [(container_pointer) = "std::vector"];
|
||||
repeated string active_wake_words = 2;
|
||||
uint32 max_active_wake_words = 3;
|
||||
}
|
||||
|
||||
@@ -2251,7 +2075,7 @@ message ListEntitiesEventResponse {
|
||||
EntityCategory entity_category = 7;
|
||||
string device_class = 8;
|
||||
|
||||
repeated string event_types = 9 [(container_pointer_no_template) = "FixedVector<const char *>"];
|
||||
repeated string event_types = 9;
|
||||
uint32 device_id = 10 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
message EventResponse {
|
||||
@@ -2416,28 +2240,3 @@ message UpdateCommandRequest {
|
||||
UpdateCommand command = 2;
|
||||
uint32 device_id = 3 [(field_ifdef) = "USE_DEVICES"];
|
||||
}
|
||||
|
||||
// ==================== Z-WAVE ====================
|
||||
|
||||
message ZWaveProxyFrame {
|
||||
option (id) = 128;
|
||||
option (source) = SOURCE_BOTH;
|
||||
option (ifdef) = "USE_ZWAVE_PROXY";
|
||||
option (no_delay) = true;
|
||||
|
||||
bytes data = 1 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
enum ZWaveProxyRequestType {
|
||||
ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE = 0;
|
||||
ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE = 1;
|
||||
ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE = 2;
|
||||
}
|
||||
message ZWaveProxyRequest {
|
||||
option (id) = 129;
|
||||
option (source) = SOURCE_BOTH;
|
||||
option (ifdef) = "USE_ZWAVE_PROXY";
|
||||
|
||||
ZWaveProxyRequestType type = 1;
|
||||
bytes data = 2 [(pointer_to_buffer) = true];
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,16 +10,23 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/entity_base.h"
|
||||
|
||||
#include <functional>
|
||||
#include <vector>
|
||||
#include <functional>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// Client information structure
|
||||
struct ClientInfo {
|
||||
std::string name; // Client name from Hello message
|
||||
// Note: peername (IP address) is not stored here to save memory.
|
||||
// Use helper_->getpeername_to() or helper_->getpeername() when needed.
|
||||
std::string name; // Client name from Hello message
|
||||
std::string peername; // IP:port from socket
|
||||
|
||||
std::string get_combined_info() const {
|
||||
if (name == peername) {
|
||||
// Before Hello message, both are the same
|
||||
return name;
|
||||
}
|
||||
return name + " (" + peername + ")";
|
||||
}
|
||||
};
|
||||
|
||||
// Keepalive timeout in milliseconds
|
||||
@@ -37,7 +44,7 @@ static constexpr size_t MAX_PACKETS_PER_BATCH = 64; // ESP32 has 8KB+ stack, HO
|
||||
static constexpr size_t MAX_PACKETS_PER_BATCH = 32; // ESP8266/RP2040/etc have smaller stacks
|
||||
#endif
|
||||
|
||||
class APIConnection final : public APIServerConnection {
|
||||
class APIConnection : public APIServerConnection {
|
||||
public:
|
||||
friend class APIServer;
|
||||
friend class ListEntitiesIterator;
|
||||
@@ -125,15 +132,12 @@ class APIConnection final : public APIServerConnection {
|
||||
#endif
|
||||
bool try_send_log_message(int level, const char *tag, const char *line, size_t message_len);
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void send_homeassistant_action(const HomeassistantActionRequest &call) {
|
||||
void send_homeassistant_service_call(const HomeassistantServiceResponse &call) {
|
||||
if (!this->flags_.service_call_subscription)
|
||||
return;
|
||||
this->send_message(call, HomeassistantActionRequest::MESSAGE_TYPE);
|
||||
this->send_message(call, HomeassistantServiceResponse::MESSAGE_TYPE);
|
||||
}
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void on_homeassistant_action_response(const HomeassistantActionResponse &msg) override;
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#endif // USE_API_HOMEASSISTANT_SERVICES
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
|
||||
void unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) override;
|
||||
@@ -167,23 +171,13 @@ class APIConnection final : public APIServerConnection {
|
||||
void voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void zwave_proxy_frame(const ZWaveProxyFrame &msg) override;
|
||||
void zwave_proxy_request(const ZWaveProxyRequest &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
bool send_alarm_control_panel_state(alarm_control_panel::AlarmControlPanel *a_alarm_control_panel);
|
||||
void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_WATER_HEATER
|
||||
bool send_water_heater_state(water_heater::WaterHeater *water_heater);
|
||||
void on_water_heater_command_request(const WaterHeaterCommandRequest &msg) override;
|
||||
#endif
|
||||
|
||||
#ifdef USE_EVENT
|
||||
void send_event(event::Event *event, const char *event_type);
|
||||
void send_event(event::Event *event, const std::string &event_type);
|
||||
#endif
|
||||
|
||||
#ifdef USE_UPDATE
|
||||
@@ -203,20 +197,14 @@ class APIConnection final : public APIServerConnection {
|
||||
void on_get_time_response(const GetTimeResponse &value) override;
|
||||
#endif
|
||||
bool send_hello_response(const HelloRequest &msg) override;
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool send_authenticate_response(const AuthenticationRequest &msg) override;
|
||||
#endif
|
||||
bool send_connect_response(const ConnectRequest &msg) override;
|
||||
bool send_disconnect_response(const DisconnectRequest &msg) override;
|
||||
bool send_ping_response(const PingRequest &msg) override;
|
||||
bool send_device_info_response(const DeviceInfoRequest &msg) override;
|
||||
void list_entities(const ListEntitiesRequest &msg) override { this->begin_iterator_(ActiveIterator::LIST_ENTITIES); }
|
||||
void list_entities(const ListEntitiesRequest &msg) override { this->list_entities_iterator_.begin(); }
|
||||
void subscribe_states(const SubscribeStatesRequest &msg) override {
|
||||
this->flags_.state_subscription = true;
|
||||
// Start initial state iterator only if no iterator is active
|
||||
// If list_entities is running, we'll start initial_state when it completes
|
||||
if (this->active_iterator_ == ActiveIterator::NONE) {
|
||||
this->begin_iterator_(ActiveIterator::INITIAL_STATE);
|
||||
}
|
||||
this->initial_state_iterator_.begin();
|
||||
}
|
||||
void subscribe_logs(const SubscribeLogsRequest &msg) override {
|
||||
this->flags_.log_subscription = msg.level;
|
||||
@@ -231,15 +219,9 @@ class APIConnection final : public APIServerConnection {
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) override;
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
bool send_get_time_response(const GetTimeRequest &msg) override;
|
||||
#ifdef USE_API_SERVICES
|
||||
void execute_service(const ExecuteServiceRequest &msg) override;
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
void send_execute_service_response(uint32_t call_id, bool success, const std::string &error_message);
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
void send_execute_service_response(uint32_t call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len);
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
bool send_noise_encryption_set_key_response(const NoiseEncryptionSetKeyRequest &msg) override;
|
||||
@@ -253,13 +235,6 @@ class APIConnection final : public APIServerConnection {
|
||||
this->is_authenticated();
|
||||
}
|
||||
uint8_t get_log_subscription_level() const { return this->flags_.log_subscription; }
|
||||
|
||||
// Get client API version for feature detection
|
||||
bool client_supports_api_version(uint16_t major, uint16_t minor) const {
|
||||
return this->client_api_version_major_ > major ||
|
||||
(this->client_api_version_major_ == major && this->client_api_version_minor_ >= minor);
|
||||
}
|
||||
|
||||
void on_fatal_error() override;
|
||||
#ifdef USE_API_PASSWORD
|
||||
void on_unauthenticated_access() override;
|
||||
@@ -270,42 +245,59 @@ class APIConnection final : public APIServerConnection {
|
||||
|
||||
// Get header padding size - used for both reserve and insert
|
||||
uint8_t header_padding = this->helper_->frame_header_padding();
|
||||
|
||||
// Get shared buffer from parent server
|
||||
std::vector<uint8_t> &shared_buf = this->parent_->get_shared_buffer_ref();
|
||||
this->prepare_first_message_buffer(shared_buf, header_padding,
|
||||
reserve_size + header_padding + this->helper_->frame_footer_size());
|
||||
return {&shared_buf};
|
||||
}
|
||||
|
||||
void prepare_first_message_buffer(std::vector<uint8_t> &shared_buf, size_t header_padding, size_t total_size) {
|
||||
shared_buf.clear();
|
||||
// Reserve space for header padding + message + footer
|
||||
// - Header padding: space for protocol headers (7 bytes for Noise, 6 for Plaintext)
|
||||
// - Footer: space for MAC (16 bytes for Noise, 0 for Plaintext)
|
||||
shared_buf.reserve(total_size);
|
||||
shared_buf.reserve(reserve_size + header_padding + this->helper_->frame_footer_size());
|
||||
// Resize to add header padding so message encoding starts at the correct position
|
||||
shared_buf.resize(header_padding);
|
||||
return {&shared_buf};
|
||||
}
|
||||
|
||||
// Prepare buffer for next message in batch
|
||||
ProtoWriteBuffer prepare_message_buffer(uint16_t message_size, bool is_first_message) {
|
||||
// Get reference to shared buffer (it maintains state between batch messages)
|
||||
std::vector<uint8_t> &shared_buf = this->parent_->get_shared_buffer_ref();
|
||||
|
||||
if (is_first_message) {
|
||||
shared_buf.clear();
|
||||
}
|
||||
|
||||
size_t current_size = shared_buf.size();
|
||||
|
||||
// Calculate padding to add:
|
||||
// - First message: just header padding
|
||||
// - Subsequent messages: footer for previous message + header padding for this message
|
||||
size_t padding_to_add = is_first_message
|
||||
? this->helper_->frame_header_padding()
|
||||
: this->helper_->frame_header_padding() + this->helper_->frame_footer_size();
|
||||
|
||||
// Reserve space for padding + message
|
||||
shared_buf.reserve(current_size + padding_to_add + message_size);
|
||||
|
||||
// Resize to add the padding bytes
|
||||
shared_buf.resize(current_size + padding_to_add);
|
||||
|
||||
return {&shared_buf};
|
||||
}
|
||||
|
||||
bool try_to_clear_buffer(bool log_out_of_space);
|
||||
bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override;
|
||||
|
||||
const std::string &get_name() const { return this->client_info_.name; }
|
||||
/// Get peer name (IP address) into a stack buffer - avoids heap allocation
|
||||
size_t get_peername_to(std::span<char, socket::PEERNAME_MAX_LEN> buf) const {
|
||||
return this->helper_->getpeername_to(buf);
|
||||
}
|
||||
/// Get peer name as std::string - use sparingly, allocates on heap
|
||||
std::string get_peername() const { return this->helper_->getpeername(); }
|
||||
std::string get_client_combined_info() const { return this->client_info_.get_combined_info(); }
|
||||
|
||||
// Buffer allocator methods for batch processing
|
||||
ProtoWriteBuffer allocate_single_message_buffer(uint16_t size);
|
||||
ProtoWriteBuffer allocate_batch_message_buffer(uint16_t size);
|
||||
|
||||
protected:
|
||||
// Helper function to handle authentication completion
|
||||
void complete_authentication_();
|
||||
|
||||
#ifdef USE_CAMERA
|
||||
void try_send_camera_image_();
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
void process_state_subscriptions_();
|
||||
#endif
|
||||
@@ -329,10 +321,9 @@ class APIConnection final : public APIServerConnection {
|
||||
APIConnection *conn, uint32_t remaining_size, bool is_single) {
|
||||
// Set common fields that are shared by all entity types
|
||||
msg.key = entity->get_object_id_hash();
|
||||
// Get object_id with zero heap allocation
|
||||
// Static case returns direct reference, dynamic case uses buffer
|
||||
char object_id_buf[OBJECT_ID_MAX_LEN];
|
||||
msg.set_object_id(entity->get_object_id_to(object_id_buf));
|
||||
// IMPORTANT: get_object_id() may return a temporary std::string
|
||||
std::string object_id = entity->get_object_id();
|
||||
msg.set_object_id(StringRef(object_id));
|
||||
|
||||
if (entity->has_own_name()) {
|
||||
msg.set_name(entity->get_name());
|
||||
@@ -468,14 +459,8 @@ class APIConnection final : public APIServerConnection {
|
||||
static uint16_t try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
||||
bool is_single);
|
||||
#endif
|
||||
#ifdef USE_WATER_HEATER
|
||||
static uint16_t try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
||||
bool is_single);
|
||||
static uint16_t try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
||||
bool is_single);
|
||||
#endif
|
||||
#ifdef USE_EVENT
|
||||
static uint16_t try_send_event_response(event::Event *event, const char *event_type, APIConnection *conn,
|
||||
static uint16_t try_send_event_response(event::Event *event, const std::string &event_type, APIConnection *conn,
|
||||
uint32_t remaining_size, bool is_single);
|
||||
static uint16_t try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
||||
#endif
|
||||
@@ -508,22 +493,10 @@ class APIConnection final : public APIServerConnection {
|
||||
std::unique_ptr<APIFrameHelper> helper_;
|
||||
APIServer *parent_;
|
||||
|
||||
// Group 2: Iterator union (saves ~16 bytes vs separate iterators)
|
||||
// These iterators are never active simultaneously - list_entities runs to completion
|
||||
// before initial_state begins, so we use a union with explicit construction/destruction.
|
||||
enum class ActiveIterator : uint8_t { NONE, LIST_ENTITIES, INITIAL_STATE };
|
||||
|
||||
union IteratorUnion {
|
||||
ListEntitiesIterator list_entities;
|
||||
InitialStateIterator initial_state;
|
||||
// Constructor/destructor do nothing - use placement new/explicit destructor
|
||||
IteratorUnion() {}
|
||||
~IteratorUnion() {}
|
||||
} iterator_storage_;
|
||||
|
||||
// Helper methods for iterator lifecycle management
|
||||
void destroy_active_iterator_();
|
||||
void begin_iterator_(ActiveIterator type);
|
||||
// Group 2: Larger objects (must be 4-byte aligned)
|
||||
// These contain vectors/pointers internally, so putting them early ensures good alignment
|
||||
InitialStateIterator initial_state_iterator_;
|
||||
ListEntitiesIterator list_entities_iterator_;
|
||||
#ifdef USE_CAMERA
|
||||
std::unique_ptr<camera::CameraImageReader> image_reader_;
|
||||
#endif
|
||||
@@ -542,18 +515,51 @@ class APIConnection final : public APIServerConnection {
|
||||
|
||||
class MessageCreator {
|
||||
public:
|
||||
// Constructor for function pointer
|
||||
MessageCreator(MessageCreatorPtr ptr) { data_.function_ptr = ptr; }
|
||||
explicit MessageCreator(const char *str_value) { data_.const_char_ptr = str_value; }
|
||||
|
||||
// Constructor for string state capture
|
||||
explicit MessageCreator(const std::string &str_value) { data_.string_ptr = new std::string(str_value); }
|
||||
|
||||
// No destructor - cleanup must be called explicitly with message_type
|
||||
|
||||
// Delete copy operations - MessageCreator should only be moved
|
||||
MessageCreator(const MessageCreator &other) = delete;
|
||||
MessageCreator &operator=(const MessageCreator &other) = delete;
|
||||
|
||||
// Move constructor
|
||||
MessageCreator(MessageCreator &&other) noexcept : data_(other.data_) { other.data_.function_ptr = nullptr; }
|
||||
|
||||
// Move assignment
|
||||
MessageCreator &operator=(MessageCreator &&other) noexcept {
|
||||
if (this != &other) {
|
||||
// IMPORTANT: Caller must ensure cleanup() was called if this contains a string!
|
||||
// In our usage, this happens in add_item() deduplication and vector::erase()
|
||||
data_ = other.data_;
|
||||
other.data_.function_ptr = nullptr;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
// Call operator - uses message_type to determine union type
|
||||
uint16_t operator()(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single,
|
||||
uint8_t message_type) const;
|
||||
|
||||
// Manual cleanup method - must be called before destruction for string types
|
||||
void cleanup(uint8_t message_type) {
|
||||
#ifdef USE_EVENT
|
||||
if (message_type == EventResponse::MESSAGE_TYPE && data_.string_ptr != nullptr) {
|
||||
delete data_.string_ptr;
|
||||
data_.string_ptr = nullptr;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private:
|
||||
union Data {
|
||||
MessageCreatorPtr function_ptr;
|
||||
const char *const_char_ptr;
|
||||
} data_; // 4 bytes on 32-bit, 8 bytes on 64-bit
|
||||
std::string *string_ptr;
|
||||
} data_; // 4 bytes on 32-bit, 8 bytes on 64-bit - same as before
|
||||
};
|
||||
|
||||
// Generic batching mechanism for both state updates and entity info
|
||||
@@ -566,41 +572,52 @@ class APIConnection final : public APIServerConnection {
|
||||
|
||||
// Constructor for creating BatchItem
|
||||
BatchItem(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size)
|
||||
: entity(entity), creator(creator), message_type(message_type), estimated_size(estimated_size) {}
|
||||
: entity(entity), creator(std::move(creator)), message_type(message_type), estimated_size(estimated_size) {}
|
||||
};
|
||||
|
||||
std::vector<BatchItem> items;
|
||||
uint32_t batch_start_time{0};
|
||||
|
||||
// No pre-allocation - log connections never use batching, and for
|
||||
// connections that do, buffers are released after initial sync anyway
|
||||
private:
|
||||
// Helper to cleanup items from the beginning
|
||||
void cleanup_items_(size_t count) {
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
items[i].creator.cleanup(items[i].message_type);
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
DeferredBatch() {
|
||||
// Pre-allocate capacity for typical batch sizes to avoid reallocation
|
||||
items.reserve(8);
|
||||
}
|
||||
|
||||
~DeferredBatch() {
|
||||
// Ensure cleanup of any remaining items
|
||||
clear();
|
||||
}
|
||||
|
||||
// Add item to the batch
|
||||
void add_item(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size);
|
||||
// Add item to the front of the batch (for high priority messages like ping)
|
||||
void add_item_front(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size);
|
||||
|
||||
// Clear all items
|
||||
// Clear all items with proper cleanup
|
||||
void clear() {
|
||||
cleanup_items_(items.size());
|
||||
items.clear();
|
||||
batch_start_time = 0;
|
||||
}
|
||||
|
||||
// Remove processed items from the front
|
||||
void remove_front(size_t count) { items.erase(items.begin(), items.begin() + count); }
|
||||
// Remove processed items from the front with proper cleanup
|
||||
void remove_front(size_t count) {
|
||||
cleanup_items_(count);
|
||||
items.erase(items.begin(), items.begin() + count);
|
||||
}
|
||||
|
||||
bool empty() const { return items.empty(); }
|
||||
size_t size() const { return items.size(); }
|
||||
const BatchItem &operator[](size_t index) const { return items[index]; }
|
||||
// Release excess capacity - only releases if items already empty
|
||||
void release_buffer() {
|
||||
// Safe to call: batch is processed before release_buffer is called,
|
||||
// and if any items remain (partial processing), we must not clear them.
|
||||
// Use swap trick since shrink_to_fit() is non-binding and may be ignored.
|
||||
if (items.empty()) {
|
||||
std::vector<BatchItem>().swap(items);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// DeferredBatch here (16 bytes, 4-byte aligned)
|
||||
@@ -638,9 +655,7 @@ class APIConnection final : public APIServerConnection {
|
||||
// 2-byte types immediately after flags_ (no padding between them)
|
||||
uint16_t client_api_version_major_{0};
|
||||
uint16_t client_api_version_minor_{0};
|
||||
// 1-byte type to fill padding
|
||||
ActiveIterator active_iterator_{ActiveIterator::NONE};
|
||||
// Total: 2 (flags) + 2 + 2 + 1 = 7 bytes, then 1 byte padding to next 4-byte boundary
|
||||
// Total: 2 (flags) + 2 + 2 = 6 bytes, then 2 bytes padding to next 4-byte boundary
|
||||
|
||||
uint32_t get_batch_delay_ms_() const;
|
||||
// Message will use 8 more bytes than the minimum size, and typical
|
||||
@@ -677,30 +692,15 @@ class APIConnection final : public APIServerConnection {
|
||||
}
|
||||
#endif
|
||||
|
||||
// Helper to check if a message type should bypass batching
|
||||
// Returns true if:
|
||||
// 1. It's an UpdateStateResponse (always send immediately to handle cases where
|
||||
// the main loop is blocked, e.g., during OTA updates)
|
||||
// 2. It's an EventResponse (events are edge-triggered - every occurrence matters)
|
||||
// 3. OR: User has opted into immediate sending (should_try_send_immediately = true
|
||||
// AND batch_delay = 0)
|
||||
inline bool should_send_immediately_(uint8_t message_type) const {
|
||||
return (
|
||||
#ifdef USE_UPDATE
|
||||
message_type == UpdateStateResponse::MESSAGE_TYPE ||
|
||||
#endif
|
||||
#ifdef USE_EVENT
|
||||
message_type == EventResponse::MESSAGE_TYPE ||
|
||||
#endif
|
||||
(this->flags_.should_try_send_immediately && this->get_batch_delay_ms_() == 0));
|
||||
}
|
||||
|
||||
// Helper method to send a message either immediately or via batching
|
||||
// Tries immediate send if should_send_immediately_() returns true and buffer has space
|
||||
// Falls back to batching if immediate send fails or isn't applicable
|
||||
bool send_message_smart_(EntityBase *entity, MessageCreatorPtr creator, uint8_t message_type,
|
||||
uint8_t estimated_size) {
|
||||
if (this->should_send_immediately_(message_type) && this->helper_->can_write_without_blocking()) {
|
||||
// Try to send immediately if:
|
||||
// 1. We should try to send immediately (should_try_send_immediately = true)
|
||||
// 2. Batch delay is 0 (user has opted in to immediate sending)
|
||||
// 3. Buffer has space available
|
||||
if (this->flags_.should_try_send_immediately && this->get_batch_delay_ms_() == 0 &&
|
||||
this->helper_->can_write_without_blocking()) {
|
||||
// Now actually encode and send
|
||||
if (creator(entity, this, MAX_BATCH_PACKET_SIZE, true) &&
|
||||
this->send_buffer(ProtoWriteBuffer{&this->parent_->get_shared_buffer_ref()}, message_type)) {
|
||||
@@ -718,30 +718,9 @@ class APIConnection final : public APIServerConnection {
|
||||
return this->schedule_message_(entity, creator, message_type, estimated_size);
|
||||
}
|
||||
|
||||
// Overload for MessageCreator (used by events which need to capture event_type)
|
||||
bool send_message_smart_(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size) {
|
||||
// Try to send immediately if message type should bypass batching and buffer has space
|
||||
if (this->should_send_immediately_(message_type) && this->helper_->can_write_without_blocking()) {
|
||||
// Now actually encode and send
|
||||
if (creator(entity, this, MAX_BATCH_PACKET_SIZE, true, message_type) &&
|
||||
this->send_buffer(ProtoWriteBuffer{&this->parent_->get_shared_buffer_ref()}, message_type)) {
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
// Log the message in verbose mode
|
||||
this->log_proto_message_(entity, creator, message_type);
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
// If immediate send failed, fall through to batching
|
||||
}
|
||||
|
||||
// Fall back to scheduled batching
|
||||
return this->schedule_message_(entity, creator, message_type, estimated_size);
|
||||
}
|
||||
|
||||
// Helper function to schedule a deferred message with known message type
|
||||
bool schedule_message_(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size) {
|
||||
this->deferred_batch_.add_item(entity, creator, message_type, estimated_size);
|
||||
this->deferred_batch_.add_item(entity, std::move(creator), message_type, estimated_size);
|
||||
return this->schedule_batch_();
|
||||
}
|
||||
|
||||
@@ -757,16 +736,6 @@ class APIConnection final : public APIServerConnection {
|
||||
this->deferred_batch_.add_item_front(entity, MessageCreator(function_ptr), message_type, estimated_size);
|
||||
return this->schedule_batch_();
|
||||
}
|
||||
|
||||
// Helper function to log client messages with name and peername
|
||||
void log_client_(int level, const LogString *message);
|
||||
// Helper function to log API errors with errno
|
||||
void log_warning_(const LogString *message, APIError err);
|
||||
// Helper to handle fatal errors with logging
|
||||
inline void fatal_error_with_log_(const LogString *message, APIError err) {
|
||||
this->on_fatal_error();
|
||||
this->log_warning_(message, err);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -13,16 +13,7 @@ namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.frame_helper";
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
#define HELPER_LOG(msg, ...) \
|
||||
do { \
|
||||
char peername__[socket::PEERNAME_MAX_LEN]; \
|
||||
this->socket_->getpeername_to(peername__); \
|
||||
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), peername__, ##__VA_ARGS__); \
|
||||
} while (0)
|
||||
#else
|
||||
#define HELPER_LOG(msg, ...) ((void) 0)
|
||||
#endif
|
||||
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
|
||||
|
||||
#ifdef HELPER_LOG_PACKETS
|
||||
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
|
||||
@@ -32,64 +23,64 @@ static const char *const TAG = "api.frame_helper";
|
||||
#define LOG_PACKET_SENDING(data, len) ((void) 0)
|
||||
#endif
|
||||
|
||||
const LogString *api_error_to_logstr(APIError err) {
|
||||
const char *api_error_to_str(APIError err) {
|
||||
// not using switch to ensure compiler doesn't try to build a big table out of it
|
||||
if (err == APIError::OK) {
|
||||
return LOG_STR("OK");
|
||||
return "OK";
|
||||
} else if (err == APIError::WOULD_BLOCK) {
|
||||
return LOG_STR("WOULD_BLOCK");
|
||||
return "WOULD_BLOCK";
|
||||
} else if (err == APIError::BAD_INDICATOR) {
|
||||
return LOG_STR("BAD_INDICATOR");
|
||||
return "BAD_INDICATOR";
|
||||
} else if (err == APIError::BAD_DATA_PACKET) {
|
||||
return LOG_STR("BAD_DATA_PACKET");
|
||||
return "BAD_DATA_PACKET";
|
||||
} else if (err == APIError::TCP_NODELAY_FAILED) {
|
||||
return LOG_STR("TCP_NODELAY_FAILED");
|
||||
return "TCP_NODELAY_FAILED";
|
||||
} else if (err == APIError::TCP_NONBLOCKING_FAILED) {
|
||||
return LOG_STR("TCP_NONBLOCKING_FAILED");
|
||||
return "TCP_NONBLOCKING_FAILED";
|
||||
} else if (err == APIError::CLOSE_FAILED) {
|
||||
return LOG_STR("CLOSE_FAILED");
|
||||
return "CLOSE_FAILED";
|
||||
} else if (err == APIError::SHUTDOWN_FAILED) {
|
||||
return LOG_STR("SHUTDOWN_FAILED");
|
||||
return "SHUTDOWN_FAILED";
|
||||
} else if (err == APIError::BAD_STATE) {
|
||||
return LOG_STR("BAD_STATE");
|
||||
return "BAD_STATE";
|
||||
} else if (err == APIError::BAD_ARG) {
|
||||
return LOG_STR("BAD_ARG");
|
||||
return "BAD_ARG";
|
||||
} else if (err == APIError::SOCKET_READ_FAILED) {
|
||||
return LOG_STR("SOCKET_READ_FAILED");
|
||||
return "SOCKET_READ_FAILED";
|
||||
} else if (err == APIError::SOCKET_WRITE_FAILED) {
|
||||
return LOG_STR("SOCKET_WRITE_FAILED");
|
||||
return "SOCKET_WRITE_FAILED";
|
||||
} else if (err == APIError::OUT_OF_MEMORY) {
|
||||
return LOG_STR("OUT_OF_MEMORY");
|
||||
return "OUT_OF_MEMORY";
|
||||
} else if (err == APIError::CONNECTION_CLOSED) {
|
||||
return LOG_STR("CONNECTION_CLOSED");
|
||||
return "CONNECTION_CLOSED";
|
||||
}
|
||||
#ifdef USE_API_NOISE
|
||||
else if (err == APIError::BAD_HANDSHAKE_PACKET_LEN) {
|
||||
return LOG_STR("BAD_HANDSHAKE_PACKET_LEN");
|
||||
return "BAD_HANDSHAKE_PACKET_LEN";
|
||||
} else if (err == APIError::HANDSHAKESTATE_READ_FAILED) {
|
||||
return LOG_STR("HANDSHAKESTATE_READ_FAILED");
|
||||
return "HANDSHAKESTATE_READ_FAILED";
|
||||
} else if (err == APIError::HANDSHAKESTATE_WRITE_FAILED) {
|
||||
return LOG_STR("HANDSHAKESTATE_WRITE_FAILED");
|
||||
return "HANDSHAKESTATE_WRITE_FAILED";
|
||||
} else if (err == APIError::HANDSHAKESTATE_BAD_STATE) {
|
||||
return LOG_STR("HANDSHAKESTATE_BAD_STATE");
|
||||
return "HANDSHAKESTATE_BAD_STATE";
|
||||
} else if (err == APIError::CIPHERSTATE_DECRYPT_FAILED) {
|
||||
return LOG_STR("CIPHERSTATE_DECRYPT_FAILED");
|
||||
return "CIPHERSTATE_DECRYPT_FAILED";
|
||||
} else if (err == APIError::CIPHERSTATE_ENCRYPT_FAILED) {
|
||||
return LOG_STR("CIPHERSTATE_ENCRYPT_FAILED");
|
||||
return "CIPHERSTATE_ENCRYPT_FAILED";
|
||||
} else if (err == APIError::HANDSHAKESTATE_SETUP_FAILED) {
|
||||
return LOG_STR("HANDSHAKESTATE_SETUP_FAILED");
|
||||
return "HANDSHAKESTATE_SETUP_FAILED";
|
||||
} else if (err == APIError::HANDSHAKESTATE_SPLIT_FAILED) {
|
||||
return LOG_STR("HANDSHAKESTATE_SPLIT_FAILED");
|
||||
return "HANDSHAKESTATE_SPLIT_FAILED";
|
||||
} else if (err == APIError::BAD_HANDSHAKE_ERROR_BYTE) {
|
||||
return LOG_STR("BAD_HANDSHAKE_ERROR_BYTE");
|
||||
return "BAD_HANDSHAKE_ERROR_BYTE";
|
||||
}
|
||||
#endif
|
||||
return LOG_STR("UNKNOWN");
|
||||
return "UNKNOWN";
|
||||
}
|
||||
|
||||
// Default implementation for loop - handles sending buffered data
|
||||
APIError APIFrameHelper::loop() {
|
||||
if (this->tx_buf_count_ > 0) {
|
||||
if (!this->tx_buf_.empty()) {
|
||||
APIError err = try_send_tx_buf_();
|
||||
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
||||
return err;
|
||||
@@ -111,20 +102,9 @@ APIError APIFrameHelper::handle_socket_write_error_() {
|
||||
// Helper method to buffer data from IOVs
|
||||
void APIFrameHelper::buffer_data_from_iov_(const struct iovec *iov, int iovcnt, uint16_t total_write_len,
|
||||
uint16_t offset) {
|
||||
// Check if queue is full
|
||||
if (this->tx_buf_count_ >= API_MAX_SEND_QUEUE) {
|
||||
HELPER_LOG("Send queue full (%u buffers), dropping connection", this->tx_buf_count_);
|
||||
this->state_ = State::FAILED;
|
||||
return;
|
||||
}
|
||||
|
||||
uint16_t buffer_size = total_write_len - offset;
|
||||
auto &buffer = this->tx_buf_[this->tx_buf_tail_];
|
||||
buffer = std::make_unique<SendBuffer>(SendBuffer{
|
||||
.data = std::make_unique<uint8_t[]>(buffer_size),
|
||||
.size = buffer_size,
|
||||
.offset = 0,
|
||||
});
|
||||
SendBuffer buffer;
|
||||
buffer.size = total_write_len - offset;
|
||||
buffer.data = std::make_unique<uint8_t[]>(buffer.size);
|
||||
|
||||
uint16_t to_skip = offset;
|
||||
uint16_t write_pos = 0;
|
||||
@@ -137,15 +117,12 @@ void APIFrameHelper::buffer_data_from_iov_(const struct iovec *iov, int iovcnt,
|
||||
// Include this segment (partially or fully)
|
||||
const uint8_t *src = reinterpret_cast<uint8_t *>(iov[i].iov_base) + to_skip;
|
||||
uint16_t len = static_cast<uint16_t>(iov[i].iov_len) - to_skip;
|
||||
std::memcpy(buffer->data.get() + write_pos, src, len);
|
||||
std::memcpy(buffer.data.get() + write_pos, src, len);
|
||||
write_pos += len;
|
||||
to_skip = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Update circular buffer tracking
|
||||
this->tx_buf_tail_ = (this->tx_buf_tail_ + 1) % API_MAX_SEND_QUEUE;
|
||||
this->tx_buf_count_++;
|
||||
this->tx_buf_.push_back(std::move(buffer));
|
||||
}
|
||||
|
||||
// This method writes data to socket or buffers it
|
||||
@@ -163,7 +140,7 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
|
||||
#endif
|
||||
|
||||
// Try to send any existing buffered data first if there is any
|
||||
if (this->tx_buf_count_ > 0) {
|
||||
if (!this->tx_buf_.empty()) {
|
||||
APIError send_result = try_send_tx_buf_();
|
||||
// If real error occurred (not just WOULD_BLOCK), return it
|
||||
if (send_result != APIError::OK && send_result != APIError::WOULD_BLOCK) {
|
||||
@@ -172,16 +149,14 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
|
||||
|
||||
// If there is still data in the buffer, we can't send, buffer
|
||||
// the new data and return
|
||||
if (this->tx_buf_count_ > 0) {
|
||||
if (!this->tx_buf_.empty()) {
|
||||
this->buffer_data_from_iov_(iov, iovcnt, total_write_len, 0);
|
||||
return APIError::OK; // Success, data buffered
|
||||
}
|
||||
}
|
||||
|
||||
// Try to send directly if no buffered data
|
||||
// Optimize for single iovec case (common for plaintext API)
|
||||
ssize_t sent =
|
||||
(iovcnt == 1) ? this->socket_->write(iov[0].iov_base, iov[0].iov_len) : this->socket_->writev(iov, iovcnt);
|
||||
ssize_t sent = this->socket_->writev(iov, iovcnt);
|
||||
|
||||
if (sent == -1) {
|
||||
APIError err = this->handle_socket_write_error_();
|
||||
@@ -200,31 +175,32 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
|
||||
}
|
||||
|
||||
// Common implementation for trying to send buffered data
|
||||
// IMPORTANT: Caller MUST ensure tx_buf_count_ > 0 before calling this method
|
||||
// IMPORTANT: Caller MUST ensure tx_buf_ is not empty before calling this method
|
||||
APIError APIFrameHelper::try_send_tx_buf_() {
|
||||
// Try to send from tx_buf - we assume it's not empty as it's the caller's responsibility to check
|
||||
while (this->tx_buf_count_ > 0) {
|
||||
bool tx_buf_empty = false;
|
||||
while (!tx_buf_empty) {
|
||||
// Get the first buffer in the queue
|
||||
SendBuffer *front_buffer = this->tx_buf_[this->tx_buf_head_].get();
|
||||
SendBuffer &front_buffer = this->tx_buf_.front();
|
||||
|
||||
// Try to send the remaining data in this buffer
|
||||
ssize_t sent = this->socket_->write(front_buffer->current_data(), front_buffer->remaining());
|
||||
ssize_t sent = this->socket_->write(front_buffer.current_data(), front_buffer.remaining());
|
||||
|
||||
if (sent == -1) {
|
||||
return this->handle_socket_write_error_();
|
||||
} else if (sent == 0) {
|
||||
// Nothing sent but not an error
|
||||
return APIError::WOULD_BLOCK;
|
||||
} else if (static_cast<uint16_t>(sent) < front_buffer->remaining()) {
|
||||
} else if (static_cast<uint16_t>(sent) < front_buffer.remaining()) {
|
||||
// Partially sent, update offset
|
||||
// Cast to ensure no overflow issues with uint16_t
|
||||
front_buffer->offset += static_cast<uint16_t>(sent);
|
||||
front_buffer.offset += static_cast<uint16_t>(sent);
|
||||
return APIError::WOULD_BLOCK; // Stop processing more buffers if we couldn't send a complete buffer
|
||||
} else {
|
||||
// Buffer completely sent, remove it from the queue
|
||||
this->tx_buf_[this->tx_buf_head_].reset();
|
||||
this->tx_buf_head_ = (this->tx_buf_head_ + 1) % API_MAX_SEND_QUEUE;
|
||||
this->tx_buf_count_--;
|
||||
this->tx_buf_.pop_front();
|
||||
// Update empty status for the loop condition
|
||||
tx_buf_empty = this->tx_buf_.empty();
|
||||
// Continue loop to try sending the next buffer
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
#pragma once
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <deque>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <span>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
@@ -18,26 +17,16 @@ namespace esphome::api {
|
||||
// uncomment to log raw packets
|
||||
//#define HELPER_LOG_PACKETS
|
||||
|
||||
// Maximum message size limits to prevent OOM on constrained devices
|
||||
// Handshake messages are limited to a small size for security
|
||||
static constexpr uint16_t MAX_HANDSHAKE_SIZE = 128;
|
||||
|
||||
// Data message limits vary by platform based on available memory
|
||||
#ifdef USE_ESP8266
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 8192; // 8 KiB for ESP8266
|
||||
#else
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 32768; // 32 KiB for ESP32 and other platforms
|
||||
#endif
|
||||
|
||||
// Forward declaration
|
||||
struct ClientInfo;
|
||||
|
||||
class ProtoWriteBuffer;
|
||||
|
||||
struct ReadPacketBuffer {
|
||||
const uint8_t *data; // Points directly into frame helper's rx_buf_ (valid until next read_packet call)
|
||||
uint16_t data_len;
|
||||
std::vector<uint8_t> container;
|
||||
uint16_t type;
|
||||
uint16_t data_offset;
|
||||
uint16_t data_len;
|
||||
};
|
||||
|
||||
// Packed packet info structure to minimize memory usage
|
||||
@@ -77,21 +66,22 @@ enum class APIError : uint16_t {
|
||||
#endif
|
||||
};
|
||||
|
||||
const LogString *api_error_to_logstr(APIError err);
|
||||
const char *api_error_to_str(APIError err);
|
||||
|
||||
class APIFrameHelper {
|
||||
public:
|
||||
APIFrameHelper() = default;
|
||||
explicit APIFrameHelper(std::unique_ptr<socket::Socket> socket, const ClientInfo *client_info)
|
||||
: socket_(std::move(socket)), client_info_(client_info) {}
|
||||
: socket_owned_(std::move(socket)), client_info_(client_info) {
|
||||
socket_ = socket_owned_.get();
|
||||
}
|
||||
virtual ~APIFrameHelper() = default;
|
||||
virtual APIError init() = 0;
|
||||
virtual APIError loop();
|
||||
virtual APIError read_packet(ReadPacketBuffer *buffer) = 0;
|
||||
bool can_write_without_blocking() { return this->state_ == State::DATA && this->tx_buf_count_ == 0; }
|
||||
bool can_write_without_blocking() { return state_ == State::DATA && tx_buf_.empty(); }
|
||||
std::string getpeername() { return socket_->getpeername(); }
|
||||
int getpeername(struct sockaddr *addr, socklen_t *addrlen) { return socket_->getpeername(addr, addrlen); }
|
||||
size_t getpeername_to(std::span<char, socket::PEERNAME_MAX_LEN> buf) { return socket_->getpeername_to(buf); }
|
||||
APIError close() {
|
||||
state_ = State::CLOSED;
|
||||
int err = this->socket_->close();
|
||||
@@ -114,27 +104,11 @@ class APIFrameHelper {
|
||||
// The buffer contains all messages with appropriate padding before each
|
||||
virtual APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) = 0;
|
||||
// Get the frame header padding required by this protocol
|
||||
uint8_t frame_header_padding() const { return frame_header_padding_; }
|
||||
virtual uint8_t frame_header_padding() = 0;
|
||||
// Get the frame footer size required by this protocol
|
||||
uint8_t frame_footer_size() const { return frame_footer_size_; }
|
||||
virtual uint8_t frame_footer_size() = 0;
|
||||
// Check if socket has data ready to read
|
||||
bool is_socket_ready() const { return socket_ != nullptr && socket_->ready(); }
|
||||
// Release excess memory from internal buffers after initial sync
|
||||
void release_buffers() {
|
||||
// rx_buf_: Safe to clear only if no partial read in progress.
|
||||
// rx_buf_len_ tracks bytes read so far; if non-zero, we're mid-frame
|
||||
// and clearing would lose partially received data.
|
||||
if (this->rx_buf_len_ == 0) {
|
||||
// Use swap trick since shrink_to_fit() is non-binding and may be ignored
|
||||
std::vector<uint8_t>().swap(this->rx_buf_);
|
||||
}
|
||||
// reusable_iovs_: Safe to release unconditionally.
|
||||
// Only used within write_protobuf_packets() calls - cleared at start,
|
||||
// populated with pointers, used for writev(), then function returns.
|
||||
// The iovecs contain stale pointers after the call (data was either sent
|
||||
// or copied to tx_buf_), and are cleared on next write_protobuf_packets().
|
||||
std::vector<struct iovec>().swap(this->reusable_iovs_);
|
||||
}
|
||||
|
||||
protected:
|
||||
// Buffer containing data to be sent
|
||||
@@ -163,8 +137,9 @@ class APIFrameHelper {
|
||||
APIError write_raw_(const struct iovec *iov, int iovcnt, socket::Socket *socket, std::vector<uint8_t> &tx_buf,
|
||||
const std::string &info, StateEnum &state, StateEnum failed_state);
|
||||
|
||||
// Socket ownership (4 bytes on 32-bit, 8 bytes on 64-bit)
|
||||
std::unique_ptr<socket::Socket> socket_;
|
||||
// Pointers first (4 bytes each)
|
||||
socket::Socket *socket_{nullptr};
|
||||
std::unique_ptr<socket::Socket> socket_owned_;
|
||||
|
||||
// Common state enum for all frame helpers
|
||||
// Note: Not all states are used by all implementations
|
||||
@@ -186,7 +161,7 @@ class APIFrameHelper {
|
||||
};
|
||||
|
||||
// Containers (size varies, but typically 12+ bytes on 32-bit)
|
||||
std::array<std::unique_ptr<SendBuffer>, API_MAX_SEND_QUEUE> tx_buf_;
|
||||
std::deque<SendBuffer> tx_buf_;
|
||||
std::vector<struct iovec> reusable_iovs_;
|
||||
std::vector<uint8_t> rx_buf_;
|
||||
|
||||
@@ -199,10 +174,7 @@ class APIFrameHelper {
|
||||
State state_{State::INITIALIZE};
|
||||
uint8_t frame_header_padding_{0};
|
||||
uint8_t frame_footer_size_{0};
|
||||
uint8_t tx_buf_head_{0};
|
||||
uint8_t tx_buf_tail_{0};
|
||||
uint8_t tx_buf_count_{0};
|
||||
// 8 bytes total, 0 bytes padding
|
||||
// 5 bytes total, 3 bytes padding
|
||||
|
||||
// Common initialization for both plaintext and noise protocols
|
||||
APIError init_common_();
|
||||
|
||||
@@ -10,30 +10,13 @@
|
||||
#include <cstring>
|
||||
#include <cinttypes>
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
#endif
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.noise";
|
||||
#ifdef USE_ESP8266
|
||||
static const char PROLOGUE_INIT[] PROGMEM = "NoiseAPIInit";
|
||||
#else
|
||||
static const char *const PROLOGUE_INIT = "NoiseAPIInit";
|
||||
#endif
|
||||
static constexpr size_t PROLOGUE_INIT_LEN = 12; // strlen("NoiseAPIInit")
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
#define HELPER_LOG(msg, ...) \
|
||||
do { \
|
||||
char peername__[socket::PEERNAME_MAX_LEN]; \
|
||||
this->socket_->getpeername_to(peername__); \
|
||||
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), peername__, ##__VA_ARGS__); \
|
||||
} while (0)
|
||||
#else
|
||||
#define HELPER_LOG(msg, ...) ((void) 0)
|
||||
#endif
|
||||
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
|
||||
|
||||
#ifdef HELPER_LOG_PACKETS
|
||||
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
|
||||
@@ -44,42 +27,42 @@ static constexpr size_t PROLOGUE_INIT_LEN = 12; // strlen("NoiseAPIInit")
|
||||
#endif
|
||||
|
||||
/// Convert a noise error code to a readable error
|
||||
const LogString *noise_err_to_logstr(int err) {
|
||||
std::string noise_err_to_str(int err) {
|
||||
if (err == NOISE_ERROR_NO_MEMORY)
|
||||
return LOG_STR("NO_MEMORY");
|
||||
return "NO_MEMORY";
|
||||
if (err == NOISE_ERROR_UNKNOWN_ID)
|
||||
return LOG_STR("UNKNOWN_ID");
|
||||
return "UNKNOWN_ID";
|
||||
if (err == NOISE_ERROR_UNKNOWN_NAME)
|
||||
return LOG_STR("UNKNOWN_NAME");
|
||||
return "UNKNOWN_NAME";
|
||||
if (err == NOISE_ERROR_MAC_FAILURE)
|
||||
return LOG_STR("MAC_FAILURE");
|
||||
return "MAC_FAILURE";
|
||||
if (err == NOISE_ERROR_NOT_APPLICABLE)
|
||||
return LOG_STR("NOT_APPLICABLE");
|
||||
return "NOT_APPLICABLE";
|
||||
if (err == NOISE_ERROR_SYSTEM)
|
||||
return LOG_STR("SYSTEM");
|
||||
return "SYSTEM";
|
||||
if (err == NOISE_ERROR_REMOTE_KEY_REQUIRED)
|
||||
return LOG_STR("REMOTE_KEY_REQUIRED");
|
||||
return "REMOTE_KEY_REQUIRED";
|
||||
if (err == NOISE_ERROR_LOCAL_KEY_REQUIRED)
|
||||
return LOG_STR("LOCAL_KEY_REQUIRED");
|
||||
return "LOCAL_KEY_REQUIRED";
|
||||
if (err == NOISE_ERROR_PSK_REQUIRED)
|
||||
return LOG_STR("PSK_REQUIRED");
|
||||
return "PSK_REQUIRED";
|
||||
if (err == NOISE_ERROR_INVALID_LENGTH)
|
||||
return LOG_STR("INVALID_LENGTH");
|
||||
return "INVALID_LENGTH";
|
||||
if (err == NOISE_ERROR_INVALID_PARAM)
|
||||
return LOG_STR("INVALID_PARAM");
|
||||
return "INVALID_PARAM";
|
||||
if (err == NOISE_ERROR_INVALID_STATE)
|
||||
return LOG_STR("INVALID_STATE");
|
||||
return "INVALID_STATE";
|
||||
if (err == NOISE_ERROR_INVALID_NONCE)
|
||||
return LOG_STR("INVALID_NONCE");
|
||||
return "INVALID_NONCE";
|
||||
if (err == NOISE_ERROR_INVALID_PRIVATE_KEY)
|
||||
return LOG_STR("INVALID_PRIVATE_KEY");
|
||||
return "INVALID_PRIVATE_KEY";
|
||||
if (err == NOISE_ERROR_INVALID_PUBLIC_KEY)
|
||||
return LOG_STR("INVALID_PUBLIC_KEY");
|
||||
return "INVALID_PUBLIC_KEY";
|
||||
if (err == NOISE_ERROR_INVALID_FORMAT)
|
||||
return LOG_STR("INVALID_FORMAT");
|
||||
return "INVALID_FORMAT";
|
||||
if (err == NOISE_ERROR_INVALID_SIGNATURE)
|
||||
return LOG_STR("INVALID_SIGNATURE");
|
||||
return LOG_STR("UNKNOWN");
|
||||
return "INVALID_SIGNATURE";
|
||||
return to_string(err);
|
||||
}
|
||||
|
||||
/// Initialize the frame helper, returns OK if successful.
|
||||
@@ -92,11 +75,7 @@ APIError APINoiseFrameHelper::init() {
|
||||
// init prologue
|
||||
size_t old_size = prologue_.size();
|
||||
prologue_.resize(old_size + PROLOGUE_INIT_LEN);
|
||||
#ifdef USE_ESP8266
|
||||
memcpy_P(prologue_.data() + old_size, PROLOGUE_INIT, PROLOGUE_INIT_LEN);
|
||||
#else
|
||||
std::memcpy(prologue_.data() + old_size, PROLOGUE_INIT, PROLOGUE_INIT_LEN);
|
||||
#endif
|
||||
|
||||
state_ = State::CLIENT_HELLO;
|
||||
return APIError::OK;
|
||||
@@ -104,18 +83,18 @@ APIError APINoiseFrameHelper::init() {
|
||||
// Helper for handling handshake frame errors
|
||||
APIError APINoiseFrameHelper::handle_handshake_frame_error_(APIError aerr) {
|
||||
if (aerr == APIError::BAD_INDICATOR) {
|
||||
send_explicit_handshake_reject_(LOG_STR("Bad indicator byte"));
|
||||
send_explicit_handshake_reject_("Bad indicator byte");
|
||||
} else if (aerr == APIError::BAD_HANDSHAKE_PACKET_LEN) {
|
||||
send_explicit_handshake_reject_(LOG_STR("Bad handshake packet len"));
|
||||
send_explicit_handshake_reject_("Bad handshake packet len");
|
||||
}
|
||||
return aerr;
|
||||
}
|
||||
|
||||
// Helper for handling noise library errors
|
||||
APIError APINoiseFrameHelper::handle_noise_error_(int err, const LogString *func_name, APIError api_err) {
|
||||
APIError APINoiseFrameHelper::handle_noise_error_(int err, const char *func_name, APIError api_err) {
|
||||
if (err != 0) {
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("%s failed: %s", LOG_STR_ARG(func_name), LOG_STR_ARG(noise_err_to_logstr(err)));
|
||||
HELPER_LOG("%s failed: %s", func_name, noise_err_to_str(err).c_str());
|
||||
return api_err;
|
||||
}
|
||||
return APIError::OK;
|
||||
@@ -140,16 +119,26 @@ APIError APINoiseFrameHelper::loop() {
|
||||
return APIFrameHelper::loop();
|
||||
}
|
||||
|
||||
/** Read a packet into the rx_buf_.
|
||||
/** Read a packet into the rx_buf_. If successful, stores frame data in the frame parameter
|
||||
*
|
||||
* @return APIError::OK if a full packet is in rx_buf_
|
||||
* @param frame: The struct to hold the frame information in.
|
||||
* msg_start: points to the start of the payload - this pointer is only valid until the next
|
||||
* try_receive_raw_ call
|
||||
*
|
||||
* @return 0 if a full packet is in rx_buf_
|
||||
* @return -1 if error, check errno.
|
||||
*
|
||||
* errno EWOULDBLOCK: Packet could not be read without blocking. Try again later.
|
||||
* errno ENOMEM: Not enough memory for reading packet.
|
||||
* errno API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
|
||||
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
|
||||
*/
|
||||
APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
APIError APINoiseFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
if (frame == nullptr) {
|
||||
HELPER_LOG("Bad argument for try_read_frame_");
|
||||
return APIError::BAD_ARG;
|
||||
}
|
||||
|
||||
// read header
|
||||
if (rx_header_buf_len_ < 3) {
|
||||
// no header information yet
|
||||
@@ -176,17 +165,16 @@ APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
// read body
|
||||
uint16_t msg_size = (((uint16_t) rx_header_buf_[1]) << 8) | rx_header_buf_[2];
|
||||
|
||||
// Check against size limits to prevent OOM: MAX_HANDSHAKE_SIZE for handshake, MAX_MESSAGE_SIZE for data
|
||||
uint16_t limit = (state_ == State::DATA) ? MAX_MESSAGE_SIZE : MAX_HANDSHAKE_SIZE;
|
||||
if (msg_size > limit) {
|
||||
if (state_ != State::DATA && msg_size > 128) {
|
||||
// for handshake message only permit up to 128 bytes
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad packet: message size %u exceeds maximum %u", msg_size, limit);
|
||||
return (state_ == State::DATA) ? APIError::BAD_DATA_PACKET : APIError::BAD_HANDSHAKE_PACKET_LEN;
|
||||
HELPER_LOG("Bad packet len for handshake: %d", msg_size);
|
||||
return APIError::BAD_HANDSHAKE_PACKET_LEN;
|
||||
}
|
||||
|
||||
// Reserve space for body
|
||||
if (this->rx_buf_.size() != msg_size) {
|
||||
this->rx_buf_.resize(msg_size);
|
||||
// reserve space for body
|
||||
if (rx_buf_.size() != msg_size) {
|
||||
rx_buf_.resize(msg_size);
|
||||
}
|
||||
|
||||
if (rx_buf_len_ < msg_size) {
|
||||
@@ -204,12 +192,12 @@ APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
}
|
||||
}
|
||||
|
||||
LOG_PACKET_RECEIVED(this->rx_buf_);
|
||||
|
||||
// Clear state for next frame (rx_buf_ still contains data for caller)
|
||||
this->rx_buf_len_ = 0;
|
||||
this->rx_header_buf_len_ = 0;
|
||||
|
||||
LOG_PACKET_RECEIVED(rx_buf_);
|
||||
*frame = std::move(rx_buf_);
|
||||
// consume msg
|
||||
rx_buf_ = {};
|
||||
rx_buf_len_ = 0;
|
||||
rx_header_buf_len_ = 0;
|
||||
return APIError::OK;
|
||||
}
|
||||
|
||||
@@ -231,44 +219,45 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
}
|
||||
if (state_ == State::CLIENT_HELLO) {
|
||||
// waiting for client hello
|
||||
aerr = this->try_read_frame_();
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
if (aerr != APIError::OK) {
|
||||
return handle_handshake_frame_error_(aerr);
|
||||
}
|
||||
// ignore contents, may be used in future for flags
|
||||
// Resize for: existing prologue + 2 size bytes + frame data
|
||||
size_t old_size = this->prologue_.size();
|
||||
this->prologue_.resize(old_size + 2 + this->rx_buf_.size());
|
||||
this->prologue_[old_size] = (uint8_t) (this->rx_buf_.size() >> 8);
|
||||
this->prologue_[old_size + 1] = (uint8_t) this->rx_buf_.size();
|
||||
std::memcpy(this->prologue_.data() + old_size + 2, this->rx_buf_.data(), this->rx_buf_.size());
|
||||
size_t old_size = prologue_.size();
|
||||
prologue_.resize(old_size + 2 + frame.size());
|
||||
prologue_[old_size] = (uint8_t) (frame.size() >> 8);
|
||||
prologue_[old_size + 1] = (uint8_t) frame.size();
|
||||
std::memcpy(prologue_.data() + old_size + 2, frame.data(), frame.size());
|
||||
|
||||
state_ = State::SERVER_HELLO;
|
||||
}
|
||||
if (state_ == State::SERVER_HELLO) {
|
||||
// send server hello
|
||||
constexpr size_t mac_len = 13; // 12 hex chars + null terminator
|
||||
const std::string &name = App.get_name();
|
||||
char mac[mac_len];
|
||||
get_mac_address_into_buffer(mac);
|
||||
const std::string &mac = get_mac_address();
|
||||
|
||||
std::vector<uint8_t> msg;
|
||||
// Calculate positions and sizes
|
||||
size_t name_len = name.size() + 1; // including null terminator
|
||||
size_t mac_len = mac.size() + 1; // including null terminator
|
||||
size_t name_offset = 1;
|
||||
size_t mac_offset = name_offset + name_len;
|
||||
size_t total_size = 1 + name_len + mac_len;
|
||||
|
||||
auto msg = std::make_unique<uint8_t[]>(total_size);
|
||||
msg.resize(total_size);
|
||||
|
||||
// chosen proto
|
||||
msg[0] = 0x01;
|
||||
|
||||
// node name, terminated by null byte
|
||||
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
|
||||
std::memcpy(msg.data() + name_offset, name.c_str(), name_len);
|
||||
// node mac, terminated by null byte
|
||||
std::memcpy(msg.get() + mac_offset, mac, mac_len);
|
||||
std::memcpy(msg.data() + mac_offset, mac.c_str(), mac_len);
|
||||
|
||||
aerr = write_frame_(msg.get(), total_size);
|
||||
aerr = write_frame_(msg.data(), msg.size());
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
@@ -283,30 +272,29 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
int action = noise_handshakestate_get_action(handshake_);
|
||||
if (action == NOISE_ACTION_READ_MESSAGE) {
|
||||
// waiting for handshake msg
|
||||
aerr = this->try_read_frame_();
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
if (aerr != APIError::OK) {
|
||||
return handle_handshake_frame_error_(aerr);
|
||||
}
|
||||
|
||||
if (this->rx_buf_.empty()) {
|
||||
send_explicit_handshake_reject_(LOG_STR("Empty handshake message"));
|
||||
if (frame.empty()) {
|
||||
send_explicit_handshake_reject_("Empty handshake message");
|
||||
return APIError::BAD_HANDSHAKE_ERROR_BYTE;
|
||||
} else if (this->rx_buf_[0] != 0x00) {
|
||||
HELPER_LOG("Bad handshake error byte: %u", this->rx_buf_[0]);
|
||||
send_explicit_handshake_reject_(LOG_STR("Bad handshake error byte"));
|
||||
} else if (frame[0] != 0x00) {
|
||||
HELPER_LOG("Bad handshake error byte: %u", frame[0]);
|
||||
send_explicit_handshake_reject_("Bad handshake error byte");
|
||||
return APIError::BAD_HANDSHAKE_ERROR_BYTE;
|
||||
}
|
||||
|
||||
NoiseBuffer mbuf;
|
||||
noise_buffer_init(mbuf);
|
||||
noise_buffer_set_input(mbuf, this->rx_buf_.data() + 1, this->rx_buf_.size() - 1);
|
||||
noise_buffer_set_input(mbuf, frame.data() + 1, frame.size() - 1);
|
||||
err = noise_handshakestate_read_message(handshake_, &mbuf, nullptr);
|
||||
if (err != 0) {
|
||||
// Special handling for MAC failure
|
||||
send_explicit_handshake_reject_(err == NOISE_ERROR_MAC_FAILURE ? LOG_STR("Handshake MAC failure")
|
||||
: LOG_STR("Handshake error"));
|
||||
return handle_noise_error_(err, LOG_STR("noise_handshakestate_read_message"),
|
||||
APIError::HANDSHAKESTATE_READ_FAILED);
|
||||
send_explicit_handshake_reject_(err == NOISE_ERROR_MAC_FAILURE ? "Handshake MAC failure" : "Handshake error");
|
||||
return handle_noise_error_(err, "noise_handshakestate_read_message", APIError::HANDSHAKESTATE_READ_FAILED);
|
||||
}
|
||||
|
||||
aerr = check_handshake_finished_();
|
||||
@@ -319,8 +307,8 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
noise_buffer_set_output(mbuf, buffer + 1, sizeof(buffer) - 1);
|
||||
|
||||
err = noise_handshakestate_write_message(handshake_, &mbuf, nullptr);
|
||||
APIError aerr_write = handle_noise_error_(err, LOG_STR("noise_handshakestate_write_message"),
|
||||
APIError::HANDSHAKESTATE_WRITE_FAILED);
|
||||
APIError aerr_write =
|
||||
handle_noise_error_(err, "noise_handshakestate_write_message", APIError::HANDSHAKESTATE_WRITE_FAILED);
|
||||
if (aerr_write != APIError::OK)
|
||||
return aerr_write;
|
||||
buffer[0] = 0x00; // success
|
||||
@@ -343,66 +331,51 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
}
|
||||
return APIError::OK;
|
||||
}
|
||||
void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reason) {
|
||||
#ifdef USE_STORE_LOG_STR_IN_FLASH
|
||||
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
|
||||
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message from PROGMEM
|
||||
if (reason_len > 0) {
|
||||
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
}
|
||||
#else
|
||||
// Normal memory access
|
||||
const char *reason_str = LOG_STR_ARG(reason);
|
||||
size_t reason_len = strlen(reason_str);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
void APINoiseFrameHelper::send_explicit_handshake_reject_(const std::string &reason) {
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason.length() + 1);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message in bulk
|
||||
if (reason_len > 0) {
|
||||
std::memcpy(data.get() + 1, reason_str, reason_len);
|
||||
if (!reason.empty()) {
|
||||
std::memcpy(data.data() + 1, reason.c_str(), reason.length());
|
||||
}
|
||||
#endif
|
||||
|
||||
// temporarily remove failed state
|
||||
auto orig_state = state_;
|
||||
state_ = State::EXPLICIT_REJECT;
|
||||
write_frame_(data.get(), data_size);
|
||||
write_frame_(data.data(), data.size());
|
||||
state_ = orig_state;
|
||||
}
|
||||
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
APIError aerr = this->state_action_();
|
||||
int err;
|
||||
APIError aerr;
|
||||
aerr = state_action_();
|
||||
if (aerr != APIError::OK) {
|
||||
return aerr;
|
||||
}
|
||||
|
||||
if (this->state_ != State::DATA) {
|
||||
if (state_ != State::DATA) {
|
||||
return APIError::WOULD_BLOCK;
|
||||
}
|
||||
|
||||
aerr = this->try_read_frame_();
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
NoiseBuffer mbuf;
|
||||
noise_buffer_init(mbuf);
|
||||
noise_buffer_set_inout(mbuf, this->rx_buf_.data(), this->rx_buf_.size(), this->rx_buf_.size());
|
||||
int err = noise_cipherstate_decrypt(this->recv_cipher_, &mbuf);
|
||||
APIError decrypt_err =
|
||||
handle_noise_error_(err, LOG_STR("noise_cipherstate_decrypt"), APIError::CIPHERSTATE_DECRYPT_FAILED);
|
||||
if (decrypt_err != APIError::OK) {
|
||||
noise_buffer_set_inout(mbuf, frame.data(), frame.size(), frame.size());
|
||||
err = noise_cipherstate_decrypt(recv_cipher_, &mbuf);
|
||||
APIError decrypt_err = handle_noise_error_(err, "noise_cipherstate_decrypt", APIError::CIPHERSTATE_DECRYPT_FAILED);
|
||||
if (decrypt_err != APIError::OK)
|
||||
return decrypt_err;
|
||||
}
|
||||
|
||||
uint16_t msg_size = mbuf.size;
|
||||
uint8_t *msg_data = this->rx_buf_.data();
|
||||
uint8_t *msg_data = frame.data();
|
||||
if (msg_size < 4) {
|
||||
this->state_ = State::FAILED;
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad data packet: size %d too short", msg_size);
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
}
|
||||
@@ -410,12 +383,13 @@ APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
uint16_t type = (((uint16_t) msg_data[0]) << 8) | msg_data[1];
|
||||
uint16_t data_len = (((uint16_t) msg_data[2]) << 8) | msg_data[3];
|
||||
if (data_len > msg_size - 4) {
|
||||
this->state_ = State::FAILED;
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad data packet: data_len %u greater than msg_size %u", data_len, msg_size);
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
}
|
||||
|
||||
buffer->data = msg_data + 4; // Skip 4-byte header (type + length)
|
||||
buffer->container = std::move(frame);
|
||||
buffer->data_offset = 4;
|
||||
buffer->data_len = data_len;
|
||||
buffer->type = type;
|
||||
return APIError::OK;
|
||||
@@ -442,7 +416,8 @@ APIError APINoiseFrameHelper::write_protobuf_packets(ProtoWriteBuffer buffer, st
|
||||
return APIError::OK;
|
||||
}
|
||||
|
||||
uint8_t *buffer_data = buffer.get_buffer()->data();
|
||||
std::vector<uint8_t> *raw_buffer = buffer.get_buffer();
|
||||
uint8_t *buffer_data = raw_buffer->data(); // Cache buffer pointer
|
||||
|
||||
this->reusable_iovs_.clear();
|
||||
this->reusable_iovs_.reserve(packets.size());
|
||||
@@ -475,8 +450,7 @@ APIError APINoiseFrameHelper::write_protobuf_packets(ProtoWriteBuffer buffer, st
|
||||
4 + packet.payload_size + frame_footer_size_);
|
||||
|
||||
int err = noise_cipherstate_encrypt(send_cipher_, &mbuf);
|
||||
APIError aerr =
|
||||
handle_noise_error_(err, LOG_STR("noise_cipherstate_encrypt"), APIError::CIPHERSTATE_ENCRYPT_FAILED);
|
||||
APIError aerr = handle_noise_error_(err, "noise_cipherstate_encrypt", APIError::CIPHERSTATE_ENCRYPT_FAILED);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
@@ -530,28 +504,25 @@ APIError APINoiseFrameHelper::init_handshake_() {
|
||||
nid_.modifier_ids[0] = NOISE_MODIFIER_PSK0;
|
||||
|
||||
err = noise_handshakestate_new_by_id(&handshake_, &nid_, NOISE_ROLE_RESPONDER);
|
||||
APIError aerr =
|
||||
handle_noise_error_(err, LOG_STR("noise_handshakestate_new_by_id"), APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
APIError aerr = handle_noise_error_(err, "noise_handshakestate_new_by_id", APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
const auto &psk = this->ctx_.get_psk();
|
||||
const auto &psk = ctx_->get_psk();
|
||||
err = noise_handshakestate_set_pre_shared_key(handshake_, psk.data(), psk.size());
|
||||
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_set_pre_shared_key"),
|
||||
APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
aerr = handle_noise_error_(err, "noise_handshakestate_set_pre_shared_key", APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
err = noise_handshakestate_set_prologue(handshake_, prologue_.data(), prologue_.size());
|
||||
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_set_prologue"), APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
aerr = handle_noise_error_(err, "noise_handshakestate_set_prologue", APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
// set_prologue copies it into handshakestate, so we can get rid of it now
|
||||
// Use swap idiom to actually release memory (= {} only clears size, not capacity)
|
||||
std::vector<uint8_t>().swap(prologue_);
|
||||
prologue_ = {};
|
||||
|
||||
err = noise_handshakestate_start(handshake_);
|
||||
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_start"), APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
aerr = handle_noise_error_(err, "noise_handshakestate_start", APIError::HANDSHAKESTATE_SETUP_FAILED);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
return APIError::OK;
|
||||
@@ -569,8 +540,7 @@ APIError APINoiseFrameHelper::check_handshake_finished_() {
|
||||
return APIError::HANDSHAKESTATE_BAD_STATE;
|
||||
}
|
||||
int err = noise_handshakestate_split(handshake_, &send_cipher_, &recv_cipher_);
|
||||
APIError aerr =
|
||||
handle_noise_error_(err, LOG_STR("noise_handshakestate_split"), APIError::HANDSHAKESTATE_SPLIT_FAILED);
|
||||
APIError aerr = handle_noise_error_(err, "noise_handshakestate_split", APIError::HANDSHAKESTATE_SPLIT_FAILED);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
|
||||
@@ -7,10 +7,11 @@
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
class APINoiseFrameHelper final : public APIFrameHelper {
|
||||
class APINoiseFrameHelper : public APIFrameHelper {
|
||||
public:
|
||||
APINoiseFrameHelper(std::unique_ptr<socket::Socket> socket, APINoiseContext &ctx, const ClientInfo *client_info)
|
||||
: APIFrameHelper(std::move(socket), client_info), ctx_(ctx) {
|
||||
APINoiseFrameHelper(std::unique_ptr<socket::Socket> socket, std::shared_ptr<APINoiseContext> ctx,
|
||||
const ClientInfo *client_info)
|
||||
: APIFrameHelper(std::move(socket), client_info), ctx_(std::move(ctx)) {
|
||||
// Noise header structure:
|
||||
// Pos 0: indicator (0x01)
|
||||
// Pos 1-2: encrypted payload size (16-bit big-endian)
|
||||
@@ -24,24 +25,28 @@ class APINoiseFrameHelper final : public APIFrameHelper {
|
||||
APIError read_packet(ReadPacketBuffer *buffer) override;
|
||||
APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) override;
|
||||
APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override;
|
||||
// Get the frame header padding required by this protocol
|
||||
uint8_t frame_header_padding() override { return frame_header_padding_; }
|
||||
// Get the frame footer size required by this protocol
|
||||
uint8_t frame_footer_size() override { return frame_footer_size_; }
|
||||
|
||||
protected:
|
||||
APIError state_action_();
|
||||
APIError try_read_frame_();
|
||||
APIError try_read_frame_(std::vector<uint8_t> *frame);
|
||||
APIError write_frame_(const uint8_t *data, uint16_t len);
|
||||
APIError init_handshake_();
|
||||
APIError check_handshake_finished_();
|
||||
void send_explicit_handshake_reject_(const LogString *reason);
|
||||
void send_explicit_handshake_reject_(const std::string &reason);
|
||||
APIError handle_handshake_frame_error_(APIError aerr);
|
||||
APIError handle_noise_error_(int err, const LogString *func_name, APIError api_err);
|
||||
APIError handle_noise_error_(int err, const char *func_name, APIError api_err);
|
||||
|
||||
// Pointers first (4 bytes each)
|
||||
NoiseHandshakeState *handshake_{nullptr};
|
||||
NoiseCipherState *send_cipher_{nullptr};
|
||||
NoiseCipherState *recv_cipher_{nullptr};
|
||||
|
||||
// Reference to noise context (4 bytes on 32-bit)
|
||||
APINoiseContext &ctx_;
|
||||
// Shared pointer (8 bytes on 32-bit = 4 bytes control block pointer + 4 bytes object pointer)
|
||||
std::shared_ptr<APINoiseContext> ctx_;
|
||||
|
||||
// Vector (12 bytes on 32-bit)
|
||||
std::vector<uint8_t> prologue_;
|
||||
|
||||
@@ -10,24 +10,11 @@
|
||||
#include <cstring>
|
||||
#include <cinttypes>
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
#endif
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.plaintext";
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
#define HELPER_LOG(msg, ...) \
|
||||
do { \
|
||||
char peername__[socket::PEERNAME_MAX_LEN]; \
|
||||
this->socket_->getpeername_to(peername__); \
|
||||
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), peername__, ##__VA_ARGS__); \
|
||||
} while (0)
|
||||
#else
|
||||
#define HELPER_LOG(msg, ...) ((void) 0)
|
||||
#endif
|
||||
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
|
||||
|
||||
#ifdef HELPER_LOG_PACKETS
|
||||
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
|
||||
@@ -55,13 +42,21 @@ APIError APIPlaintextFrameHelper::loop() {
|
||||
return APIFrameHelper::loop();
|
||||
}
|
||||
|
||||
/** Read a packet into the rx_buf_.
|
||||
/** Read a packet into the rx_buf_. If successful, stores frame data in the frame parameter
|
||||
*
|
||||
* @param frame: The struct to hold the frame information in.
|
||||
* msg: store the parsed frame in that struct
|
||||
*
|
||||
* @return See APIError
|
||||
*
|
||||
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
|
||||
*/
|
||||
APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
APIError APIPlaintextFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
|
||||
if (frame == nullptr) {
|
||||
HELPER_LOG("Bad argument for try_read_frame_");
|
||||
return APIError::BAD_ARG;
|
||||
}
|
||||
|
||||
// read header
|
||||
while (!rx_header_parsed_) {
|
||||
// Now that we know when the socket is ready, we can read up to 3 bytes
|
||||
@@ -123,10 +118,10 @@ APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg_size_varint->as_uint32() > MAX_MESSAGE_SIZE) {
|
||||
if (msg_size_varint->as_uint32() > std::numeric_limits<uint16_t>::max()) {
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad packet: message size %" PRIu32 " exceeds maximum %u", msg_size_varint->as_uint32(),
|
||||
MAX_MESSAGE_SIZE);
|
||||
std::numeric_limits<uint16_t>::max());
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
}
|
||||
rx_header_parsed_len_ = msg_size_varint->as_uint16();
|
||||
@@ -150,9 +145,9 @@ APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
}
|
||||
// header reading done
|
||||
|
||||
// Reserve space for body
|
||||
if (this->rx_buf_.size() != this->rx_header_parsed_len_) {
|
||||
this->rx_buf_.resize(this->rx_header_parsed_len_);
|
||||
// reserve space for body
|
||||
if (rx_buf_.size() != rx_header_parsed_len_) {
|
||||
rx_buf_.resize(rx_header_parsed_len_);
|
||||
}
|
||||
|
||||
if (rx_buf_len_ < rx_header_parsed_len_) {
|
||||
@@ -170,22 +165,24 @@ APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
}
|
||||
}
|
||||
|
||||
LOG_PACKET_RECEIVED(this->rx_buf_);
|
||||
|
||||
// Clear state for next frame (rx_buf_ still contains data for caller)
|
||||
this->rx_buf_len_ = 0;
|
||||
this->rx_header_buf_pos_ = 0;
|
||||
this->rx_header_parsed_ = false;
|
||||
|
||||
LOG_PACKET_RECEIVED(rx_buf_);
|
||||
*frame = std::move(rx_buf_);
|
||||
// consume msg
|
||||
rx_buf_ = {};
|
||||
rx_buf_len_ = 0;
|
||||
rx_header_buf_pos_ = 0;
|
||||
rx_header_parsed_ = false;
|
||||
return APIError::OK;
|
||||
}
|
||||
|
||||
APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
if (this->state_ != State::DATA) {
|
||||
APIError aerr;
|
||||
|
||||
if (state_ != State::DATA) {
|
||||
return APIError::WOULD_BLOCK;
|
||||
}
|
||||
|
||||
APIError aerr = this->try_read_frame_();
|
||||
std::vector<uint8_t> frame;
|
||||
aerr = try_read_frame_(&frame);
|
||||
if (aerr != APIError::OK) {
|
||||
if (aerr == APIError::BAD_INDICATOR) {
|
||||
// Make sure to tell the remote that we don't
|
||||
@@ -200,27 +197,19 @@ APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
// We must send at least 3 bytes to be read, so we add
|
||||
// a message after the indicator byte to ensures its long
|
||||
// enough and can aid in debugging.
|
||||
static constexpr uint8_t INDICATOR_MSG_SIZE = 19;
|
||||
#ifdef USE_ESP8266
|
||||
static const char MSG_PROGMEM[] PROGMEM = "\x00"
|
||||
"Bad indicator byte";
|
||||
char msg[INDICATOR_MSG_SIZE];
|
||||
memcpy_P(msg, MSG_PROGMEM, INDICATOR_MSG_SIZE);
|
||||
const char msg[] = "\x00"
|
||||
"Bad indicator byte";
|
||||
iov[0].iov_base = (void *) msg;
|
||||
#else
|
||||
static const char MSG[] = "\x00"
|
||||
"Bad indicator byte";
|
||||
iov[0].iov_base = (void *) MSG;
|
||||
#endif
|
||||
iov[0].iov_len = INDICATOR_MSG_SIZE;
|
||||
this->write_raw_(iov, 1, INDICATOR_MSG_SIZE);
|
||||
iov[0].iov_len = 19;
|
||||
this->write_raw_(iov, 1, 19);
|
||||
}
|
||||
return aerr;
|
||||
}
|
||||
|
||||
buffer->data = this->rx_buf_.data();
|
||||
buffer->data_len = this->rx_header_parsed_len_;
|
||||
buffer->type = this->rx_header_parsed_type_;
|
||||
buffer->container = std::move(frame);
|
||||
buffer->data_offset = 0;
|
||||
buffer->data_len = rx_header_parsed_len_;
|
||||
buffer->type = rx_header_parsed_type_;
|
||||
return APIError::OK;
|
||||
}
|
||||
APIError APIPlaintextFrameHelper::write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) {
|
||||
@@ -237,7 +226,8 @@ APIError APIPlaintextFrameHelper::write_protobuf_packets(ProtoWriteBuffer buffer
|
||||
return APIError::OK;
|
||||
}
|
||||
|
||||
uint8_t *buffer_data = buffer.get_buffer()->data();
|
||||
std::vector<uint8_t> *raw_buffer = buffer.get_buffer();
|
||||
uint8_t *buffer_data = raw_buffer->data(); // Cache buffer pointer
|
||||
|
||||
this->reusable_iovs_.clear();
|
||||
this->reusable_iovs_.reserve(packets.size());
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
class APIPlaintextFrameHelper final : public APIFrameHelper {
|
||||
class APIPlaintextFrameHelper : public APIFrameHelper {
|
||||
public:
|
||||
APIPlaintextFrameHelper(std::unique_ptr<socket::Socket> socket, const ClientInfo *client_info)
|
||||
: APIFrameHelper(std::move(socket), client_info) {
|
||||
@@ -22,9 +22,12 @@ class APIPlaintextFrameHelper final : public APIFrameHelper {
|
||||
APIError read_packet(ReadPacketBuffer *buffer) override;
|
||||
APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) override;
|
||||
APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override;
|
||||
uint8_t frame_header_padding() override { return frame_header_padding_; }
|
||||
// Get the frame footer size required by this protocol
|
||||
uint8_t frame_footer_size() override { return frame_footer_size_; }
|
||||
|
||||
protected:
|
||||
APIError try_read_frame_();
|
||||
APIError try_read_frame_(std::vector<uint8_t> *frame);
|
||||
|
||||
// Group 2-byte aligned types
|
||||
uint16_t rx_header_parsed_type_ = 0;
|
||||
|
||||
@@ -28,56 +28,4 @@ extend google.protobuf.FieldOptions {
|
||||
optional string field_ifdef = 1042;
|
||||
optional uint32 fixed_array_size = 50007;
|
||||
optional bool no_zero_copy = 50008 [default=false];
|
||||
optional bool fixed_array_skip_zero = 50009 [default=false];
|
||||
optional string fixed_array_size_define = 50010;
|
||||
optional string fixed_array_with_length_define = 50011;
|
||||
|
||||
// pointer_to_buffer: Use pointer instead of array for fixed-size byte fields
|
||||
// When set, the field will be declared as a pointer (const uint8_t *data)
|
||||
// instead of an array (uint8_t data[N]). This allows zero-copy on decode
|
||||
// by pointing directly to the protobuf buffer. The buffer must remain valid
|
||||
// until the message is processed (which is guaranteed for stack-allocated messages).
|
||||
optional bool pointer_to_buffer = 50012 [default=false];
|
||||
|
||||
// container_pointer: Zero-copy optimization for repeated fields.
|
||||
//
|
||||
// When container_pointer is set on a repeated field, the generated message will
|
||||
// store a pointer to an existing container instead of copying the data into the
|
||||
// message's own repeated field. This eliminates heap allocations and improves performance.
|
||||
//
|
||||
// Requirements for safe usage:
|
||||
// 1. The source container must remain valid until the message is encoded
|
||||
// 2. Messages must be encoded immediately (which ESPHome does by default)
|
||||
// 3. The container type must match the field type exactly
|
||||
//
|
||||
// Supported container types:
|
||||
// - "std::vector<T>" for most repeated fields
|
||||
// - "std::set<T>" for unique/sorted data
|
||||
// - Full type specification required for enums (e.g., "std::set<climate::ClimateMode>")
|
||||
//
|
||||
// Example usage in .proto file:
|
||||
// repeated string supported_modes = 12 [(container_pointer) = "std::set"];
|
||||
// repeated ColorMode color_modes = 13 [(container_pointer) = "std::set<light::ColorMode>"];
|
||||
//
|
||||
// The corresponding C++ code must provide const reference access to a container
|
||||
// that matches the specified type and remains valid during message encoding.
|
||||
// This is typically done through methods returning const T& or special accessor
|
||||
// methods like get_options() or supported_modes_for_api_().
|
||||
optional string container_pointer = 50001;
|
||||
|
||||
// fixed_vector: Use FixedVector instead of std::vector for repeated fields
|
||||
// When set, the repeated field will use FixedVector<T> which requires calling
|
||||
// init(size) before adding elements. This eliminates std::vector template overhead
|
||||
// and is ideal when the exact size is known before populating the array.
|
||||
optional bool fixed_vector = 50013 [default=false];
|
||||
|
||||
// container_pointer_no_template: Use a non-template container type for repeated fields
|
||||
// Similar to container_pointer, but for containers that don't take template parameters.
|
||||
// The container type is used as-is without appending element type.
|
||||
// The container must have:
|
||||
// - begin() and end() methods returning iterators
|
||||
// - empty() method
|
||||
// Example: [(container_pointer_no_template) = "light::ColorModeMask"]
|
||||
// generates: const light::ColorModeMask *supported_color_modes{};
|
||||
optional string container_pointer_no_template = 50014;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -66,7 +66,7 @@ static void dump_field(std::string &out, const char *field_name, float value, in
|
||||
static void dump_field(std::string &out, const char *field_name, uint64_t value, int indent = 2) {
|
||||
char buffer[64];
|
||||
append_field_prefix(out, field_name, indent);
|
||||
snprintf(buffer, 64, "%" PRIu64, value);
|
||||
snprintf(buffer, 64, "%llu", value);
|
||||
append_with_newline(out, buffer);
|
||||
}
|
||||
|
||||
@@ -88,12 +88,6 @@ static void dump_field(std::string &out, const char *field_name, StringRef value
|
||||
out.append("\n");
|
||||
}
|
||||
|
||||
static void dump_field(std::string &out, const char *field_name, const char *value, int indent = 2) {
|
||||
append_field_prefix(out, field_name, indent);
|
||||
out.append("'").append(value).append("'");
|
||||
out.append("\n");
|
||||
}
|
||||
|
||||
template<typename T> static void dump_field(std::string &out, const char *field_name, T value, int indent = 2) {
|
||||
append_field_prefix(out, field_name, indent);
|
||||
out.append(proto_enum_to_string<T>(value));
|
||||
@@ -179,8 +173,6 @@ template<> const char *proto_enum_to_string<enums::SensorStateClass>(enums::Sens
|
||||
return "STATE_CLASS_TOTAL_INCREASING";
|
||||
case enums::STATE_CLASS_TOTAL:
|
||||
return "STATE_CLASS_TOTAL";
|
||||
case enums::STATE_CLASS_MEASUREMENT_ANGLE:
|
||||
return "STATE_CLASS_MEASUREMENT_ANGLE";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
@@ -208,7 +200,7 @@ template<> const char *proto_enum_to_string<enums::LogLevel>(enums::LogLevel val
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
template<> const char *proto_enum_to_string<enums::ServiceArgType>(enums::ServiceArgType value) {
|
||||
switch (value) {
|
||||
case enums::SERVICE_ARG_TYPE_BOOL:
|
||||
@@ -231,20 +223,6 @@ template<> const char *proto_enum_to_string<enums::ServiceArgType>(enums::Servic
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
template<> const char *proto_enum_to_string<enums::SupportsResponseType>(enums::SupportsResponseType value) {
|
||||
switch (value) {
|
||||
case enums::SUPPORTS_RESPONSE_NONE:
|
||||
return "SUPPORTS_RESPONSE_NONE";
|
||||
case enums::SUPPORTS_RESPONSE_OPTIONAL:
|
||||
return "SUPPORTS_RESPONSE_OPTIONAL";
|
||||
case enums::SUPPORTS_RESPONSE_ONLY:
|
||||
return "SUPPORTS_RESPONSE_ONLY";
|
||||
case enums::SUPPORTS_RESPONSE_STATUS:
|
||||
return "SUPPORTS_RESPONSE_STATUS";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
template<> const char *proto_enum_to_string<enums::ClimateMode>(enums::ClimateMode value) {
|
||||
@@ -348,47 +326,6 @@ template<> const char *proto_enum_to_string<enums::ClimatePreset>(enums::Climate
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_WATER_HEATER
|
||||
template<> const char *proto_enum_to_string<enums::WaterHeaterMode>(enums::WaterHeaterMode value) {
|
||||
switch (value) {
|
||||
case enums::WATER_HEATER_MODE_OFF:
|
||||
return "WATER_HEATER_MODE_OFF";
|
||||
case enums::WATER_HEATER_MODE_ECO:
|
||||
return "WATER_HEATER_MODE_ECO";
|
||||
case enums::WATER_HEATER_MODE_ELECTRIC:
|
||||
return "WATER_HEATER_MODE_ELECTRIC";
|
||||
case enums::WATER_HEATER_MODE_PERFORMANCE:
|
||||
return "WATER_HEATER_MODE_PERFORMANCE";
|
||||
case enums::WATER_HEATER_MODE_HIGH_DEMAND:
|
||||
return "WATER_HEATER_MODE_HIGH_DEMAND";
|
||||
case enums::WATER_HEATER_MODE_HEAT_PUMP:
|
||||
return "WATER_HEATER_MODE_HEAT_PUMP";
|
||||
case enums::WATER_HEATER_MODE_GAS:
|
||||
return "WATER_HEATER_MODE_GAS";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
template<>
|
||||
const char *proto_enum_to_string<enums::WaterHeaterCommandHasField>(enums::WaterHeaterCommandHasField value) {
|
||||
switch (value) {
|
||||
case enums::WATER_HEATER_COMMAND_HAS_NONE:
|
||||
return "WATER_HEATER_COMMAND_HAS_NONE";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_MODE:
|
||||
return "WATER_HEATER_COMMAND_HAS_MODE";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE:
|
||||
return "WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_STATE:
|
||||
return "WATER_HEATER_COMMAND_HAS_STATE";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_LOW:
|
||||
return "WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_LOW";
|
||||
case enums::WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH:
|
||||
return "WATER_HEATER_COMMAND_HAS_TARGET_TEMPERATURE_HIGH";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#ifdef USE_NUMBER
|
||||
template<> const char *proto_enum_to_string<enums::NumberMode>(enums::NumberMode value) {
|
||||
switch (value) {
|
||||
@@ -446,12 +383,6 @@ template<> const char *proto_enum_to_string<enums::MediaPlayerState>(enums::Medi
|
||||
return "MEDIA_PLAYER_STATE_PLAYING";
|
||||
case enums::MEDIA_PLAYER_STATE_PAUSED:
|
||||
return "MEDIA_PLAYER_STATE_PAUSED";
|
||||
case enums::MEDIA_PLAYER_STATE_ANNOUNCING:
|
||||
return "MEDIA_PLAYER_STATE_ANNOUNCING";
|
||||
case enums::MEDIA_PLAYER_STATE_OFF:
|
||||
return "MEDIA_PLAYER_STATE_OFF";
|
||||
case enums::MEDIA_PLAYER_STATE_ON:
|
||||
return "MEDIA_PLAYER_STATE_ON";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
@@ -468,24 +399,6 @@ template<> const char *proto_enum_to_string<enums::MediaPlayerCommand>(enums::Me
|
||||
return "MEDIA_PLAYER_COMMAND_MUTE";
|
||||
case enums::MEDIA_PLAYER_COMMAND_UNMUTE:
|
||||
return "MEDIA_PLAYER_COMMAND_UNMUTE";
|
||||
case enums::MEDIA_PLAYER_COMMAND_TOGGLE:
|
||||
return "MEDIA_PLAYER_COMMAND_TOGGLE";
|
||||
case enums::MEDIA_PLAYER_COMMAND_VOLUME_UP:
|
||||
return "MEDIA_PLAYER_COMMAND_VOLUME_UP";
|
||||
case enums::MEDIA_PLAYER_COMMAND_VOLUME_DOWN:
|
||||
return "MEDIA_PLAYER_COMMAND_VOLUME_DOWN";
|
||||
case enums::MEDIA_PLAYER_COMMAND_ENQUEUE:
|
||||
return "MEDIA_PLAYER_COMMAND_ENQUEUE";
|
||||
case enums::MEDIA_PLAYER_COMMAND_REPEAT_ONE:
|
||||
return "MEDIA_PLAYER_COMMAND_REPEAT_ONE";
|
||||
case enums::MEDIA_PLAYER_COMMAND_REPEAT_OFF:
|
||||
return "MEDIA_PLAYER_COMMAND_REPEAT_OFF";
|
||||
case enums::MEDIA_PLAYER_COMMAND_CLEAR_PLAYLIST:
|
||||
return "MEDIA_PLAYER_COMMAND_CLEAR_PLAYLIST";
|
||||
case enums::MEDIA_PLAYER_COMMAND_TURN_ON:
|
||||
return "MEDIA_PLAYER_COMMAND_TURN_ON";
|
||||
case enums::MEDIA_PLAYER_COMMAND_TURN_OFF:
|
||||
return "MEDIA_PLAYER_COMMAND_TURN_OFF";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
@@ -718,26 +631,10 @@ template<> const char *proto_enum_to_string<enums::UpdateCommand>(enums::UpdateC
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
template<> const char *proto_enum_to_string<enums::ZWaveProxyRequestType>(enums::ZWaveProxyRequestType value) {
|
||||
switch (value) {
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE";
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE";
|
||||
case enums::ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE:
|
||||
return "ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void HelloRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HelloRequest");
|
||||
out.append(" client_info: ");
|
||||
out.append(format_hex_pretty(this->client_info, this->client_info_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "client_info", this->client_info);
|
||||
dump_field(out, "api_version_major", this->api_version_major);
|
||||
dump_field(out, "api_version_minor", this->api_version_minor);
|
||||
}
|
||||
@@ -748,18 +645,8 @@ void HelloResponse::dump_to(std::string &out) const {
|
||||
dump_field(out, "server_info", this->server_info_ref_);
|
||||
dump_field(out, "name", this->name_ref_);
|
||||
}
|
||||
#ifdef USE_API_PASSWORD
|
||||
void AuthenticationRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "AuthenticationRequest");
|
||||
out.append(" password: ");
|
||||
out.append(format_hex_pretty(this->password, this->password_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void AuthenticationResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "AuthenticationResponse");
|
||||
dump_field(out, "invalid_password", this->invalid_password);
|
||||
}
|
||||
#endif
|
||||
void ConnectRequest::dump_to(std::string &out) const { dump_field(out, "password", this->password); }
|
||||
void ConnectResponse::dump_to(std::string &out) const { dump_field(out, "invalid_password", this->invalid_password); }
|
||||
void DisconnectRequest::dump_to(std::string &out) const { out.append("DisconnectRequest {}"); }
|
||||
void DisconnectResponse::dump_to(std::string &out) const { out.append("DisconnectResponse {}"); }
|
||||
void PingRequest::dump_to(std::string &out) const { out.append("PingRequest {}"); }
|
||||
@@ -838,12 +725,6 @@ void DeviceInfoResponse::dump_to(std::string &out) const {
|
||||
this->area.dump_to(out);
|
||||
out.append("\n");
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
dump_field(out, "zwave_proxy_feature_flags", this->zwave_proxy_feature_flags);
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
dump_field(out, "zwave_home_id", this->zwave_home_id);
|
||||
#endif
|
||||
}
|
||||
void ListEntitiesRequest::dump_to(std::string &out) const { out.append("ListEntitiesRequest {}"); }
|
||||
void ListEntitiesDoneResponse::dump_to(std::string &out) const { out.append("ListEntitiesDoneResponse {}"); }
|
||||
@@ -933,7 +814,7 @@ void ListEntitiesFanResponse::dump_to(std::string &out) const {
|
||||
dump_field(out, "icon", this->icon_ref_);
|
||||
#endif
|
||||
dump_field(out, "entity_category", static_cast<enums::EntityCategory>(this->entity_category));
|
||||
for (const auto &it : *this->supported_preset_modes) {
|
||||
for (const auto &it : this->supported_preset_modes) {
|
||||
dump_field(out, "supported_preset_modes", it, 4);
|
||||
}
|
||||
#ifdef USE_DEVICES
|
||||
@@ -964,9 +845,7 @@ void FanCommandRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "has_speed_level", this->has_speed_level);
|
||||
dump_field(out, "speed_level", this->speed_level);
|
||||
dump_field(out, "has_preset_mode", this->has_preset_mode);
|
||||
out.append(" preset_mode: ");
|
||||
out.append(format_hex_pretty(this->preset_mode, this->preset_mode_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "preset_mode", this->preset_mode);
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
@@ -978,12 +857,12 @@ void ListEntitiesLightResponse::dump_to(std::string &out) const {
|
||||
dump_field(out, "object_id", this->object_id_ref_);
|
||||
dump_field(out, "key", this->key);
|
||||
dump_field(out, "name", this->name_ref_);
|
||||
for (const auto &it : *this->supported_color_modes) {
|
||||
for (const auto &it : this->supported_color_modes) {
|
||||
dump_field(out, "supported_color_modes", static_cast<enums::ColorMode>(it), 4);
|
||||
}
|
||||
dump_field(out, "min_mireds", this->min_mireds);
|
||||
dump_field(out, "max_mireds", this->max_mireds);
|
||||
for (const auto &it : *this->effects) {
|
||||
for (const auto &it : this->effects) {
|
||||
dump_field(out, "effects", it, 4);
|
||||
}
|
||||
dump_field(out, "disabled_by_default", this->disabled_by_default);
|
||||
@@ -1042,9 +921,7 @@ void LightCommandRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "has_flash_length", this->has_flash_length);
|
||||
dump_field(out, "flash_length", this->flash_length);
|
||||
dump_field(out, "has_effect", this->has_effect);
|
||||
out.append(" effect: ");
|
||||
out.append(format_hex_pretty(this->effect, this->effect_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "effect", this->effect);
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
@@ -1156,7 +1033,7 @@ void SubscribeLogsResponse::dump_to(std::string &out) const {
|
||||
void NoiseEncryptionSetKeyRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "NoiseEncryptionSetKeyRequest");
|
||||
out.append(" key: ");
|
||||
out.append(format_hex_pretty(this->key, this->key_len));
|
||||
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->key.data()), this->key.size()));
|
||||
out.append("\n");
|
||||
}
|
||||
void NoiseEncryptionSetKeyResponse::dump_to(std::string &out) const { dump_field(out, "success", this->success); }
|
||||
@@ -1170,8 +1047,8 @@ void HomeassistantServiceMap::dump_to(std::string &out) const {
|
||||
dump_field(out, "key", this->key_ref_);
|
||||
dump_field(out, "value", this->value);
|
||||
}
|
||||
void HomeassistantActionRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeassistantActionRequest");
|
||||
void HomeassistantServiceResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeassistantServiceResponse");
|
||||
dump_field(out, "service", this->service_ref_);
|
||||
for (const auto &it : this->data) {
|
||||
out.append(" data: ");
|
||||
@@ -1189,28 +1066,6 @@ void HomeassistantActionRequest::dump_to(std::string &out) const {
|
||||
out.append("\n");
|
||||
}
|
||||
dump_field(out, "is_event", this->is_event);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
dump_field(out, "call_id", this->call_id);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
dump_field(out, "wants_response", this->wants_response);
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
dump_field(out, "response_template", this->response_template);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void HomeassistantActionResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeassistantActionResponse");
|
||||
dump_field(out, "call_id", this->call_id);
|
||||
dump_field(out, "success", this->success);
|
||||
dump_field(out, "error_message", this->error_message);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
out.append(" response_data: ");
|
||||
out.append(format_hex_pretty(this->response_data, this->response_data_len));
|
||||
out.append("\n");
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
@@ -1225,26 +1080,14 @@ void SubscribeHomeAssistantStateResponse::dump_to(std::string &out) const {
|
||||
}
|
||||
void HomeAssistantStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "HomeAssistantStateResponse");
|
||||
out.append(" entity_id: ");
|
||||
out.append(format_hex_pretty(this->entity_id, this->entity_id_len));
|
||||
out.append("\n");
|
||||
out.append(" state: ");
|
||||
out.append(format_hex_pretty(this->state, this->state_len));
|
||||
out.append("\n");
|
||||
out.append(" attribute: ");
|
||||
out.append(format_hex_pretty(this->attribute, this->attribute_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "entity_id", this->entity_id);
|
||||
dump_field(out, "state", this->state);
|
||||
dump_field(out, "attribute", this->attribute);
|
||||
}
|
||||
#endif
|
||||
void GetTimeRequest::dump_to(std::string &out) const { out.append("GetTimeRequest {}"); }
|
||||
void GetTimeResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "GetTimeResponse");
|
||||
dump_field(out, "epoch_seconds", this->epoch_seconds);
|
||||
out.append(" timezone: ");
|
||||
out.append(format_hex_pretty(this->timezone, this->timezone_len));
|
||||
out.append("\n");
|
||||
}
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
void GetTimeResponse::dump_to(std::string &out) const { dump_field(out, "epoch_seconds", this->epoch_seconds); }
|
||||
#ifdef USE_API_SERVICES
|
||||
void ListEntitiesServicesArgument::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ListEntitiesServicesArgument");
|
||||
dump_field(out, "name", this->name_ref_);
|
||||
@@ -1259,7 +1102,6 @@ void ListEntitiesServicesResponse::dump_to(std::string &out) const {
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
dump_field(out, "supports_response", static_cast<enums::SupportsResponseType>(this->supports_response));
|
||||
}
|
||||
void ExecuteServiceArgument::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ExecuteServiceArgument");
|
||||
@@ -1269,7 +1111,7 @@ void ExecuteServiceArgument::dump_to(std::string &out) const {
|
||||
dump_field(out, "string_", this->string_);
|
||||
dump_field(out, "int_", this->int_);
|
||||
for (const auto it : this->bool_array) {
|
||||
dump_field(out, "bool_array", static_cast<bool>(it), 4);
|
||||
dump_field(out, "bool_array", it, 4);
|
||||
}
|
||||
for (const auto &it : this->int_array) {
|
||||
dump_field(out, "int_array", it, 4);
|
||||
@@ -1289,25 +1131,6 @@ void ExecuteServiceRequest::dump_to(std::string &out) const {
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
dump_field(out, "call_id", this->call_id);
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
dump_field(out, "return_response", this->return_response);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
void ExecuteServiceResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ExecuteServiceResponse");
|
||||
dump_field(out, "call_id", this->call_id);
|
||||
dump_field(out, "success", this->success);
|
||||
dump_field(out, "error_message", this->error_message_ref_);
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
out.append(" response_data: ");
|
||||
out.append(format_hex_pretty(this->response_data, this->response_data_len));
|
||||
out.append("\n");
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
@@ -1350,26 +1173,26 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
|
||||
dump_field(out, "name", this->name_ref_);
|
||||
dump_field(out, "supports_current_temperature", this->supports_current_temperature);
|
||||
dump_field(out, "supports_two_point_target_temperature", this->supports_two_point_target_temperature);
|
||||
for (const auto &it : *this->supported_modes) {
|
||||
for (const auto &it : this->supported_modes) {
|
||||
dump_field(out, "supported_modes", static_cast<enums::ClimateMode>(it), 4);
|
||||
}
|
||||
dump_field(out, "visual_min_temperature", this->visual_min_temperature);
|
||||
dump_field(out, "visual_max_temperature", this->visual_max_temperature);
|
||||
dump_field(out, "visual_target_temperature_step", this->visual_target_temperature_step);
|
||||
dump_field(out, "supports_action", this->supports_action);
|
||||
for (const auto &it : *this->supported_fan_modes) {
|
||||
for (const auto &it : this->supported_fan_modes) {
|
||||
dump_field(out, "supported_fan_modes", static_cast<enums::ClimateFanMode>(it), 4);
|
||||
}
|
||||
for (const auto &it : *this->supported_swing_modes) {
|
||||
for (const auto &it : this->supported_swing_modes) {
|
||||
dump_field(out, "supported_swing_modes", static_cast<enums::ClimateSwingMode>(it), 4);
|
||||
}
|
||||
for (const auto &it : *this->supported_custom_fan_modes) {
|
||||
for (const auto &it : this->supported_custom_fan_modes) {
|
||||
dump_field(out, "supported_custom_fan_modes", it, 4);
|
||||
}
|
||||
for (const auto &it : *this->supported_presets) {
|
||||
for (const auto &it : this->supported_presets) {
|
||||
dump_field(out, "supported_presets", static_cast<enums::ClimatePreset>(it), 4);
|
||||
}
|
||||
for (const auto &it : *this->supported_custom_presets) {
|
||||
for (const auto &it : this->supported_custom_presets) {
|
||||
dump_field(out, "supported_custom_presets", it, 4);
|
||||
}
|
||||
dump_field(out, "disabled_by_default", this->disabled_by_default);
|
||||
@@ -1385,7 +1208,6 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "feature_flags", this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ClimateStateResponse");
|
||||
@@ -1423,15 +1245,11 @@ void ClimateCommandRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "has_swing_mode", this->has_swing_mode);
|
||||
dump_field(out, "swing_mode", static_cast<enums::ClimateSwingMode>(this->swing_mode));
|
||||
dump_field(out, "has_custom_fan_mode", this->has_custom_fan_mode);
|
||||
out.append(" custom_fan_mode: ");
|
||||
out.append(format_hex_pretty(this->custom_fan_mode, this->custom_fan_mode_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "custom_fan_mode", this->custom_fan_mode);
|
||||
dump_field(out, "has_preset", this->has_preset);
|
||||
dump_field(out, "preset", static_cast<enums::ClimatePreset>(this->preset));
|
||||
dump_field(out, "has_custom_preset", this->has_custom_preset);
|
||||
out.append(" custom_preset: ");
|
||||
out.append(format_hex_pretty(this->custom_preset, this->custom_preset_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "custom_preset", this->custom_preset);
|
||||
dump_field(out, "has_target_humidity", this->has_target_humidity);
|
||||
dump_field(out, "target_humidity", this->target_humidity);
|
||||
#ifdef USE_DEVICES
|
||||
@@ -1439,55 +1257,6 @@ void ClimateCommandRequest::dump_to(std::string &out) const {
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_WATER_HEATER
|
||||
void ListEntitiesWaterHeaterResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ListEntitiesWaterHeaterResponse");
|
||||
dump_field(out, "object_id", this->object_id_ref_);
|
||||
dump_field(out, "key", this->key);
|
||||
dump_field(out, "name", this->name_ref_);
|
||||
#ifdef USE_ENTITY_ICON
|
||||
dump_field(out, "icon", this->icon_ref_);
|
||||
#endif
|
||||
dump_field(out, "disabled_by_default", this->disabled_by_default);
|
||||
dump_field(out, "entity_category", static_cast<enums::EntityCategory>(this->entity_category));
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "min_temperature", this->min_temperature);
|
||||
dump_field(out, "max_temperature", this->max_temperature);
|
||||
dump_field(out, "target_temperature_step", this->target_temperature_step);
|
||||
for (const auto &it : *this->supported_modes) {
|
||||
dump_field(out, "supported_modes", static_cast<enums::WaterHeaterMode>(it), 4);
|
||||
}
|
||||
dump_field(out, "supported_features", this->supported_features);
|
||||
}
|
||||
void WaterHeaterStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "WaterHeaterStateResponse");
|
||||
dump_field(out, "key", this->key);
|
||||
dump_field(out, "current_temperature", this->current_temperature);
|
||||
dump_field(out, "target_temperature", this->target_temperature);
|
||||
dump_field(out, "mode", static_cast<enums::WaterHeaterMode>(this->mode));
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "state", this->state);
|
||||
dump_field(out, "target_temperature_low", this->target_temperature_low);
|
||||
dump_field(out, "target_temperature_high", this->target_temperature_high);
|
||||
}
|
||||
void WaterHeaterCommandRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "WaterHeaterCommandRequest");
|
||||
dump_field(out, "key", this->key);
|
||||
dump_field(out, "has_fields", this->has_fields);
|
||||
dump_field(out, "mode", static_cast<enums::WaterHeaterMode>(this->mode));
|
||||
dump_field(out, "target_temperature", this->target_temperature);
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "state", this->state);
|
||||
dump_field(out, "target_temperature_low", this->target_temperature_low);
|
||||
dump_field(out, "target_temperature_high", this->target_temperature_high);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_NUMBER
|
||||
void ListEntitiesNumberResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ListEntitiesNumberResponse");
|
||||
@@ -1536,7 +1305,7 @@ void ListEntitiesSelectResponse::dump_to(std::string &out) const {
|
||||
#ifdef USE_ENTITY_ICON
|
||||
dump_field(out, "icon", this->icon_ref_);
|
||||
#endif
|
||||
for (const auto &it : *this->options) {
|
||||
for (const auto &it : this->options) {
|
||||
dump_field(out, "options", it, 4);
|
||||
}
|
||||
dump_field(out, "disabled_by_default", this->disabled_by_default);
|
||||
@@ -1557,9 +1326,7 @@ void SelectStateResponse::dump_to(std::string &out) const {
|
||||
void SelectCommandRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "SelectCommandRequest");
|
||||
dump_field(out, "key", this->key);
|
||||
out.append(" state: ");
|
||||
out.append(format_hex_pretty(this->state, this->state_len));
|
||||
out.append("\n");
|
||||
dump_field(out, "state", this->state);
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
@@ -1699,7 +1466,6 @@ void ListEntitiesMediaPlayerResponse::dump_to(std::string &out) const {
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "feature_flags", this->feature_flags);
|
||||
}
|
||||
void MediaPlayerStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "MediaPlayerStateResponse");
|
||||
@@ -1743,9 +1509,9 @@ void BluetoothLERawAdvertisement::dump_to(std::string &out) const {
|
||||
}
|
||||
void BluetoothLERawAdvertisementsResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "BluetoothLERawAdvertisementsResponse");
|
||||
for (uint16_t i = 0; i < this->advertisements_len; i++) {
|
||||
for (const auto &it : this->advertisements) {
|
||||
out.append(" advertisements: ");
|
||||
this->advertisements[i].dump_to(out);
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
}
|
||||
@@ -1770,7 +1536,6 @@ void BluetoothGATTDescriptor::dump_to(std::string &out) const {
|
||||
dump_field(out, "uuid", it, 4);
|
||||
}
|
||||
dump_field(out, "handle", this->handle);
|
||||
dump_field(out, "short_uuid", this->short_uuid);
|
||||
}
|
||||
void BluetoothGATTCharacteristic::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "BluetoothGATTCharacteristic");
|
||||
@@ -1784,7 +1549,6 @@ void BluetoothGATTCharacteristic::dump_to(std::string &out) const {
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
dump_field(out, "short_uuid", this->short_uuid);
|
||||
}
|
||||
void BluetoothGATTService::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "BluetoothGATTService");
|
||||
@@ -1797,7 +1561,6 @@ void BluetoothGATTService::dump_to(std::string &out) const {
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
dump_field(out, "short_uuid", this->short_uuid);
|
||||
}
|
||||
void BluetoothGATTGetServicesResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "BluetoothGATTGetServicesResponse");
|
||||
@@ -1831,7 +1594,7 @@ void BluetoothGATTWriteRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "handle", this->handle);
|
||||
dump_field(out, "response", this->response);
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()));
|
||||
out.append("\n");
|
||||
}
|
||||
void BluetoothGATTReadDescriptorRequest::dump_to(std::string &out) const {
|
||||
@@ -1844,7 +1607,7 @@ void BluetoothGATTWriteDescriptorRequest::dump_to(std::string &out) const {
|
||||
dump_field(out, "address", this->address);
|
||||
dump_field(out, "handle", this->handle);
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()));
|
||||
out.append("\n");
|
||||
}
|
||||
void BluetoothGATTNotifyRequest::dump_to(std::string &out) const {
|
||||
@@ -1913,7 +1676,6 @@ void BluetoothScannerStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "BluetoothScannerStateResponse");
|
||||
dump_field(out, "state", static_cast<enums::BluetoothScannerState>(this->state));
|
||||
dump_field(out, "mode", static_cast<enums::BluetoothScannerMode>(this->mode));
|
||||
dump_field(out, "configured_mode", static_cast<enums::BluetoothScannerMode>(this->configured_mode));
|
||||
}
|
||||
void BluetoothScannerSetModeRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "BluetoothScannerSetModeRequest");
|
||||
@@ -1997,25 +1759,8 @@ void VoiceAssistantWakeWord::dump_to(std::string &out) const {
|
||||
dump_field(out, "trained_languages", it, 4);
|
||||
}
|
||||
}
|
||||
void VoiceAssistantExternalWakeWord::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "VoiceAssistantExternalWakeWord");
|
||||
dump_field(out, "id", this->id);
|
||||
dump_field(out, "wake_word", this->wake_word);
|
||||
for (const auto &it : this->trained_languages) {
|
||||
dump_field(out, "trained_languages", it, 4);
|
||||
}
|
||||
dump_field(out, "model_type", this->model_type);
|
||||
dump_field(out, "model_size", this->model_size);
|
||||
dump_field(out, "model_hash", this->model_hash);
|
||||
dump_field(out, "url", this->url);
|
||||
}
|
||||
void VoiceAssistantConfigurationRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "VoiceAssistantConfigurationRequest");
|
||||
for (const auto &it : this->external_wake_words) {
|
||||
out.append(" external_wake_words: ");
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
out.append("VoiceAssistantConfigurationRequest {}");
|
||||
}
|
||||
void VoiceAssistantConfigurationResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "VoiceAssistantConfigurationResponse");
|
||||
@@ -2024,7 +1769,7 @@ void VoiceAssistantConfigurationResponse::dump_to(std::string &out) const {
|
||||
it.dump_to(out);
|
||||
out.append("\n");
|
||||
}
|
||||
for (const auto &it : *this->active_wake_words) {
|
||||
for (const auto &it : this->active_wake_words) {
|
||||
dump_field(out, "active_wake_words", it, 4);
|
||||
}
|
||||
dump_field(out, "max_active_wake_words", this->max_active_wake_words);
|
||||
@@ -2195,7 +1940,7 @@ void ListEntitiesEventResponse::dump_to(std::string &out) const {
|
||||
dump_field(out, "disabled_by_default", this->disabled_by_default);
|
||||
dump_field(out, "entity_category", static_cast<enums::EntityCategory>(this->entity_category));
|
||||
dump_field(out, "device_class", this->device_class_ref_);
|
||||
for (const auto &it : *this->event_types) {
|
||||
for (const auto &it : this->event_types) {
|
||||
dump_field(out, "event_types", it, 4);
|
||||
}
|
||||
#ifdef USE_DEVICES
|
||||
@@ -2324,21 +2069,6 @@ void UpdateCommandRequest::dump_to(std::string &out) const {
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void ZWaveProxyFrame::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ZWaveProxyFrame");
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
void ZWaveProxyRequest::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ZWaveProxyRequest");
|
||||
dump_field(out, "type", static_cast<enums::ZWaveProxyRequestType>(this->type));
|
||||
out.append(" data: ");
|
||||
out.append(format_hex_pretty(this->data, this->data_len));
|
||||
out.append("\n");
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
// This file provides includes needed by the generated protobuf code
|
||||
// when using pointer optimizations for component-specific types
|
||||
|
||||
#ifdef USE_CLIMATE
|
||||
#include "esphome/components/climate/climate_mode.h"
|
||||
#include "esphome/components/climate/climate_traits.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_WATER_HEATER
|
||||
#include "esphome/components/water_heater/water_heater.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_LIGHT
|
||||
#include "esphome/components/light/light_traits.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_FAN
|
||||
#include "esphome/components/fan/fan_traits.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_SELECT
|
||||
#include "esphome/components/select/select_traits.h"
|
||||
#endif
|
||||
|
||||
// Standard library includes that might be needed
|
||||
#include <set>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// This file only provides includes, no actual code
|
||||
|
||||
} // namespace esphome::api
|
||||
@@ -13,7 +13,7 @@ void APIServerConnectionBase::log_send_message_(const char *name, const std::str
|
||||
}
|
||||
#endif
|
||||
|
||||
void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) {
|
||||
void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) {
|
||||
switch (msg_type) {
|
||||
case HelloRequest::MESSAGE_TYPE: {
|
||||
HelloRequest msg;
|
||||
@@ -24,20 +24,18 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
this->on_hello_request(msg);
|
||||
break;
|
||||
}
|
||||
#ifdef USE_API_PASSWORD
|
||||
case AuthenticationRequest::MESSAGE_TYPE: {
|
||||
AuthenticationRequest msg;
|
||||
case ConnectRequest::MESSAGE_TYPE: {
|
||||
ConnectRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_authentication_request: %s", msg.dump().c_str());
|
||||
ESP_LOGVV(TAG, "on_connect_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_authentication_request(msg);
|
||||
this->on_connect_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
case DisconnectRequest::MESSAGE_TYPE: {
|
||||
DisconnectRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_disconnect_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -46,7 +44,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
}
|
||||
case DisconnectResponse::MESSAGE_TYPE: {
|
||||
DisconnectResponse msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_disconnect_response: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -55,7 +53,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
}
|
||||
case PingRequest::MESSAGE_TYPE: {
|
||||
PingRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_ping_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -64,7 +62,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
}
|
||||
case PingResponse::MESSAGE_TYPE: {
|
||||
PingResponse msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_ping_response: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -73,7 +71,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
}
|
||||
case DeviceInfoRequest::MESSAGE_TYPE: {
|
||||
DeviceInfoRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_device_info_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -82,7 +80,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
}
|
||||
case ListEntitiesRequest::MESSAGE_TYPE: {
|
||||
ListEntitiesRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_list_entities_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -91,7 +89,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
}
|
||||
case SubscribeStatesRequest::MESSAGE_TYPE: {
|
||||
SubscribeStatesRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_subscribe_states_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -154,7 +152,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
case SubscribeHomeassistantServicesRequest::MESSAGE_TYPE: {
|
||||
SubscribeHomeassistantServicesRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_subscribe_homeassistant_services_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -162,6 +160,15 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
case GetTimeRequest::MESSAGE_TYPE: {
|
||||
GetTimeRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_get_time_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_get_time_request(msg);
|
||||
break;
|
||||
}
|
||||
case GetTimeResponse::MESSAGE_TYPE: {
|
||||
GetTimeResponse msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
@@ -174,7 +181,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
case SubscribeHomeAssistantStatesRequest::MESSAGE_TYPE: {
|
||||
SubscribeHomeAssistantStatesRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_subscribe_home_assistant_states_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -193,7 +200,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
case ExecuteServiceRequest::MESSAGE_TYPE: {
|
||||
ExecuteServiceRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
@@ -383,7 +390,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
case SubscribeBluetoothConnectionsFreeRequest::MESSAGE_TYPE: {
|
||||
SubscribeBluetoothConnectionsFreeRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_subscribe_bluetooth_connections_free_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -394,7 +401,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
case UnsubscribeBluetoothLEAdvertisementsRequest::MESSAGE_TYPE: {
|
||||
UnsubscribeBluetoothLEAdvertisementsRequest msg;
|
||||
// Empty message: no decode needed
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_unsubscribe_bluetooth_le_advertisements_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
@@ -588,50 +595,6 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
|
||||
this->on_bluetooth_scanner_set_mode_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
case ZWaveProxyFrame::MESSAGE_TYPE: {
|
||||
ZWaveProxyFrame msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_z_wave_proxy_frame: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_z_wave_proxy_frame(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
case ZWaveProxyRequest::MESSAGE_TYPE: {
|
||||
ZWaveProxyRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_z_wave_proxy_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_z_wave_proxy_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
case HomeassistantActionResponse::MESSAGE_TYPE: {
|
||||
HomeassistantActionResponse msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_homeassistant_action_response: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_homeassistant_action_response(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_WATER_HEATER
|
||||
case WaterHeaterCommandRequest::MESSAGE_TYPE: {
|
||||
WaterHeaterCommandRequest msg;
|
||||
msg.decode(msg_data, msg_size);
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
ESP_LOGVV(TAG, "on_water_heater_command_request: %s", msg.dump().c_str());
|
||||
#endif
|
||||
this->on_water_heater_command_request(msg);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
break;
|
||||
@@ -643,13 +606,11 @@ void APIServerConnection::on_hello_request(const HelloRequest &msg) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#ifdef USE_API_PASSWORD
|
||||
void APIServerConnection::on_authentication_request(const AuthenticationRequest &msg) {
|
||||
if (!this->send_authenticate_response(msg)) {
|
||||
void APIServerConnection::on_connect_request(const ConnectRequest &msg) {
|
||||
if (!this->send_connect_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
void APIServerConnection::on_disconnect_request(const DisconnectRequest &msg) {
|
||||
if (!this->send_disconnect_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
@@ -661,139 +622,246 @@ void APIServerConnection::on_ping_request(const PingRequest &msg) {
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_device_info_request(const DeviceInfoRequest &msg) {
|
||||
if (!this->send_device_info_response(msg)) {
|
||||
if (this->check_connection_setup_() && !this->send_device_info_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) { this->list_entities(msg); }
|
||||
void APIServerConnection::on_subscribe_states_request(const SubscribeStatesRequest &msg) {
|
||||
this->subscribe_states(msg);
|
||||
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->list_entities(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_subscribe_states_request(const SubscribeStatesRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_states(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_logs(msg);
|
||||
}
|
||||
}
|
||||
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) { this->subscribe_logs(msg); }
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void APIServerConnection::on_subscribe_homeassistant_services_request(
|
||||
const SubscribeHomeassistantServicesRequest &msg) {
|
||||
this->subscribe_homeassistant_services(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_homeassistant_services(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
void APIServerConnection::on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) {
|
||||
this->subscribe_home_assistant_states(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_home_assistant_states(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) { this->execute_service(msg); }
|
||||
void APIServerConnection::on_get_time_request(const GetTimeRequest &msg) {
|
||||
if (this->check_connection_setup_() && !this->send_get_time_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#ifdef USE_API_SERVICES
|
||||
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->execute_service(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
void APIServerConnection::on_noise_encryption_set_key_request(const NoiseEncryptionSetKeyRequest &msg) {
|
||||
if (!this->send_noise_encryption_set_key_response(msg)) {
|
||||
if (this->check_authenticated_() && !this->send_noise_encryption_set_key_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BUTTON
|
||||
void APIServerConnection::on_button_command_request(const ButtonCommandRequest &msg) { this->button_command(msg); }
|
||||
void APIServerConnection::on_button_command_request(const ButtonCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->button_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
void APIServerConnection::on_camera_image_request(const CameraImageRequest &msg) { this->camera_image(msg); }
|
||||
void APIServerConnection::on_camera_image_request(const CameraImageRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->camera_image(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
void APIServerConnection::on_climate_command_request(const ClimateCommandRequest &msg) { this->climate_command(msg); }
|
||||
void APIServerConnection::on_climate_command_request(const ClimateCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->climate_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_COVER
|
||||
void APIServerConnection::on_cover_command_request(const CoverCommandRequest &msg) { this->cover_command(msg); }
|
||||
void APIServerConnection::on_cover_command_request(const CoverCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->cover_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATE
|
||||
void APIServerConnection::on_date_command_request(const DateCommandRequest &msg) { this->date_command(msg); }
|
||||
void APIServerConnection::on_date_command_request(const DateCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->date_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATETIME
|
||||
void APIServerConnection::on_date_time_command_request(const DateTimeCommandRequest &msg) {
|
||||
this->datetime_command(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->datetime_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_FAN
|
||||
void APIServerConnection::on_fan_command_request(const FanCommandRequest &msg) { this->fan_command(msg); }
|
||||
void APIServerConnection::on_fan_command_request(const FanCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->fan_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_LIGHT
|
||||
void APIServerConnection::on_light_command_request(const LightCommandRequest &msg) { this->light_command(msg); }
|
||||
void APIServerConnection::on_light_command_request(const LightCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->light_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_LOCK
|
||||
void APIServerConnection::on_lock_command_request(const LockCommandRequest &msg) { this->lock_command(msg); }
|
||||
void APIServerConnection::on_lock_command_request(const LockCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->lock_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_MEDIA_PLAYER
|
||||
void APIServerConnection::on_media_player_command_request(const MediaPlayerCommandRequest &msg) {
|
||||
this->media_player_command(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->media_player_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_NUMBER
|
||||
void APIServerConnection::on_number_command_request(const NumberCommandRequest &msg) { this->number_command(msg); }
|
||||
void APIServerConnection::on_number_command_request(const NumberCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->number_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_SELECT
|
||||
void APIServerConnection::on_select_command_request(const SelectCommandRequest &msg) { this->select_command(msg); }
|
||||
void APIServerConnection::on_select_command_request(const SelectCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->select_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_SIREN
|
||||
void APIServerConnection::on_siren_command_request(const SirenCommandRequest &msg) { this->siren_command(msg); }
|
||||
void APIServerConnection::on_siren_command_request(const SirenCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->siren_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_SWITCH
|
||||
void APIServerConnection::on_switch_command_request(const SwitchCommandRequest &msg) { this->switch_command(msg); }
|
||||
void APIServerConnection::on_switch_command_request(const SwitchCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->switch_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_TEXT
|
||||
void APIServerConnection::on_text_command_request(const TextCommandRequest &msg) { this->text_command(msg); }
|
||||
void APIServerConnection::on_text_command_request(const TextCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->text_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_DATETIME_TIME
|
||||
void APIServerConnection::on_time_command_request(const TimeCommandRequest &msg) { this->time_command(msg); }
|
||||
void APIServerConnection::on_time_command_request(const TimeCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->time_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_UPDATE
|
||||
void APIServerConnection::on_update_command_request(const UpdateCommandRequest &msg) { this->update_command(msg); }
|
||||
void APIServerConnection::on_update_command_request(const UpdateCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->update_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VALVE
|
||||
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) { this->valve_command(msg); }
|
||||
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) {
|
||||
if (this->check_authenticated_()) {
|
||||
this->valve_command(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_subscribe_bluetooth_le_advertisements_request(
|
||||
const SubscribeBluetoothLEAdvertisementsRequest &msg) {
|
||||
this->subscribe_bluetooth_le_advertisements(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_bluetooth_le_advertisements(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_device_request(const BluetoothDeviceRequest &msg) {
|
||||
this->bluetooth_device_request(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_device_request(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_get_services_request(const BluetoothGATTGetServicesRequest &msg) {
|
||||
this->bluetooth_gatt_get_services(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_get_services(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_read_request(const BluetoothGATTReadRequest &msg) {
|
||||
this->bluetooth_gatt_read(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_read(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_write_request(const BluetoothGATTWriteRequest &msg) {
|
||||
this->bluetooth_gatt_write(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_write(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_read_descriptor_request(const BluetoothGATTReadDescriptorRequest &msg) {
|
||||
this->bluetooth_gatt_read_descriptor(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_read_descriptor(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_write_descriptor_request(const BluetoothGATTWriteDescriptorRequest &msg) {
|
||||
this->bluetooth_gatt_write_descriptor(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_write_descriptor(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_gatt_notify_request(const BluetoothGATTNotifyRequest &msg) {
|
||||
this->bluetooth_gatt_notify(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_gatt_notify(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
|
||||
const SubscribeBluetoothConnectionsFreeRequest &msg) {
|
||||
if (!this->send_subscribe_bluetooth_connections_free_response(msg)) {
|
||||
if (this->check_authenticated_() && !this->send_subscribe_bluetooth_connections_free_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
@@ -801,68 +869,45 @@ void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_unsubscribe_bluetooth_le_advertisements_request(
|
||||
const UnsubscribeBluetoothLEAdvertisementsRequest &msg) {
|
||||
this->unsubscribe_bluetooth_le_advertisements(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->unsubscribe_bluetooth_le_advertisements(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
void APIServerConnection::on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &msg) {
|
||||
this->bluetooth_scanner_set_mode(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->bluetooth_scanner_set_mode(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
void APIServerConnection::on_subscribe_voice_assistant_request(const SubscribeVoiceAssistantRequest &msg) {
|
||||
this->subscribe_voice_assistant(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->subscribe_voice_assistant(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
void APIServerConnection::on_voice_assistant_configuration_request(const VoiceAssistantConfigurationRequest &msg) {
|
||||
if (!this->send_voice_assistant_get_configuration_response(msg)) {
|
||||
if (this->check_authenticated_() && !this->send_voice_assistant_get_configuration_response(msg)) {
|
||||
this->on_fatal_error();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
void APIServerConnection::on_voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) {
|
||||
this->voice_assistant_set_configuration(msg);
|
||||
if (this->check_authenticated_()) {
|
||||
this->voice_assistant_set_configuration(msg);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
void APIServerConnection::on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) {
|
||||
this->alarm_control_panel_command(msg);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServerConnection::on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) { this->zwave_proxy_frame(msg); }
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServerConnection::on_z_wave_proxy_request(const ZWaveProxyRequest &msg) { this->zwave_proxy_request(msg); }
|
||||
#endif
|
||||
|
||||
void APIServerConnection::read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) {
|
||||
// Check authentication/connection requirements for messages
|
||||
switch (msg_type) {
|
||||
case HelloRequest::MESSAGE_TYPE: // No setup required
|
||||
#ifdef USE_API_PASSWORD
|
||||
case AuthenticationRequest::MESSAGE_TYPE: // No setup required
|
||||
#endif
|
||||
case DisconnectRequest::MESSAGE_TYPE: // No setup required
|
||||
case PingRequest::MESSAGE_TYPE: // No setup required
|
||||
break; // Skip all checks for these messages
|
||||
case DeviceInfoRequest::MESSAGE_TYPE: // Connection setup only
|
||||
if (!this->check_connection_setup_()) {
|
||||
return; // Connection not setup
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// All other messages require authentication (which includes connection check)
|
||||
if (!this->check_authenticated_()) {
|
||||
return; // Authentication failed
|
||||
}
|
||||
break;
|
||||
if (this->check_authenticated_()) {
|
||||
this->alarm_control_panel_command(msg);
|
||||
}
|
||||
|
||||
// Call base implementation to process the message
|
||||
APIServerConnectionBase::read_message(msg_size, msg_type, msg_data);
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -26,9 +26,7 @@ class APIServerConnectionBase : public ProtoService {
|
||||
|
||||
virtual void on_hello_request(const HelloRequest &value){};
|
||||
|
||||
#ifdef USE_API_PASSWORD
|
||||
virtual void on_authentication_request(const AuthenticationRequest &value){};
|
||||
#endif
|
||||
virtual void on_connect_request(const ConnectRequest &value){};
|
||||
|
||||
virtual void on_disconnect_request(const DisconnectRequest &value){};
|
||||
virtual void on_disconnect_response(const DisconnectResponse &value){};
|
||||
@@ -66,9 +64,6 @@ class APIServerConnectionBase : public ProtoService {
|
||||
virtual void on_subscribe_homeassistant_services_request(const SubscribeHomeassistantServicesRequest &value){};
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
virtual void on_homeassistant_action_response(const HomeassistantActionResponse &value){};
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
virtual void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &value){};
|
||||
#endif
|
||||
@@ -76,10 +71,10 @@ class APIServerConnectionBase : public ProtoService {
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
virtual void on_home_assistant_state_response(const HomeAssistantStateResponse &value){};
|
||||
#endif
|
||||
|
||||
virtual void on_get_time_request(const GetTimeRequest &value){};
|
||||
virtual void on_get_time_response(const GetTimeResponse &value){};
|
||||
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
virtual void on_execute_service_request(const ExecuteServiceRequest &value){};
|
||||
#endif
|
||||
|
||||
@@ -91,10 +86,6 @@ class APIServerConnectionBase : public ProtoService {
|
||||
virtual void on_climate_command_request(const ClimateCommandRequest &value){};
|
||||
#endif
|
||||
|
||||
#ifdef USE_WATER_HEATER
|
||||
virtual void on_water_heater_command_request(const WaterHeaterCommandRequest &value){};
|
||||
#endif
|
||||
|
||||
#ifdef USE_NUMBER
|
||||
virtual void on_number_command_request(const NumberCommandRequest &value){};
|
||||
#endif
|
||||
@@ -214,23 +205,15 @@ class APIServerConnectionBase : public ProtoService {
|
||||
|
||||
#ifdef USE_UPDATE
|
||||
virtual void on_update_command_request(const UpdateCommandRequest &value){};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void on_z_wave_proxy_frame(const ZWaveProxyFrame &value){};
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void on_z_wave_proxy_request(const ZWaveProxyRequest &value){};
|
||||
#endif
|
||||
protected:
|
||||
void read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) override;
|
||||
void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;
|
||||
};
|
||||
|
||||
class APIServerConnection : public APIServerConnectionBase {
|
||||
public:
|
||||
virtual bool send_hello_response(const HelloRequest &msg) = 0;
|
||||
#ifdef USE_API_PASSWORD
|
||||
virtual bool send_authenticate_response(const AuthenticationRequest &msg) = 0;
|
||||
#endif
|
||||
virtual bool send_connect_response(const ConnectRequest &msg) = 0;
|
||||
virtual bool send_disconnect_response(const DisconnectRequest &msg) = 0;
|
||||
virtual bool send_ping_response(const PingRequest &msg) = 0;
|
||||
virtual bool send_device_info_response(const DeviceInfoRequest &msg) = 0;
|
||||
@@ -243,7 +226,8 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
virtual void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
virtual bool send_get_time_response(const GetTimeRequest &msg) = 0;
|
||||
#ifdef USE_API_SERVICES
|
||||
virtual void execute_service(const ExecuteServiceRequest &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
@@ -348,18 +332,10 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#endif
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
virtual void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void zwave_proxy_frame(const ZWaveProxyFrame &msg) = 0;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
virtual void zwave_proxy_request(const ZWaveProxyRequest &msg) = 0;
|
||||
#endif
|
||||
protected:
|
||||
void on_hello_request(const HelloRequest &msg) override;
|
||||
#ifdef USE_API_PASSWORD
|
||||
void on_authentication_request(const AuthenticationRequest &msg) override;
|
||||
#endif
|
||||
void on_connect_request(const ConnectRequest &msg) override;
|
||||
void on_disconnect_request(const DisconnectRequest &msg) override;
|
||||
void on_ping_request(const PingRequest &msg) override;
|
||||
void on_device_info_request(const DeviceInfoRequest &msg) override;
|
||||
@@ -372,7 +348,8 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) override;
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
void on_get_time_request(const GetTimeRequest &msg) override;
|
||||
#ifdef USE_API_SERVICES
|
||||
void on_execute_service_request(const ExecuteServiceRequest &msg) override;
|
||||
#endif
|
||||
#ifdef USE_API_NOISE
|
||||
@@ -478,13 +455,6 @@ class APIServerConnection : public APIServerConnectionBase {
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
void on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_z_wave_proxy_request(const ZWaveProxyRequest &msg) override;
|
||||
#endif
|
||||
void read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) override;
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -4,22 +4,17 @@
|
||||
#include "api_connection.h"
|
||||
#include "esphome/components/network/util.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/controller_registry.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/util.h"
|
||||
#include "esphome/core/version.h"
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
#include "homeassistant_service.h"
|
||||
#endif
|
||||
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
#endif
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
@@ -35,22 +30,25 @@ APIServer::APIServer() {
|
||||
}
|
||||
|
||||
void APIServer::setup() {
|
||||
ControllerRegistry::register_controller(this);
|
||||
this->setup_controller();
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
uint32_t hash = 88491486UL;
|
||||
|
||||
this->noise_pref_ = global_preferences->make_preference<SavedNoisePsk>(hash, true);
|
||||
|
||||
#ifndef USE_API_NOISE_PSK_FROM_YAML
|
||||
// Only load saved PSK if not set from YAML
|
||||
SavedNoisePsk noise_pref_saved{};
|
||||
if (this->noise_pref_.load(&noise_pref_saved)) {
|
||||
ESP_LOGD(TAG, "Loaded saved Noise PSK");
|
||||
|
||||
this->set_noise_psk(noise_pref_saved.psk);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// Schedule reboot if no clients connect within timeout
|
||||
if (this->reboot_timeout_ != 0) {
|
||||
this->schedule_reboot_timeout_();
|
||||
}
|
||||
|
||||
this->socket_ = socket::socket_ip_loop_monitored(SOCK_STREAM, 0); // monitored for incoming connections
|
||||
if (this->socket_ == nullptr) {
|
||||
@@ -87,7 +85,7 @@ void APIServer::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
err = this->socket_->listen(this->listen_backlog_);
|
||||
err = this->socket_->listen(4);
|
||||
if (err != 0) {
|
||||
ESP_LOGW(TAG, "Socket unable to listen: errno %d", errno);
|
||||
this->mark_failed();
|
||||
@@ -96,22 +94,42 @@ void APIServer::setup() {
|
||||
|
||||
#ifdef USE_LOGGER
|
||||
if (logger::global_logger != nullptr) {
|
||||
logger::global_logger->add_log_listener(this);
|
||||
logger::global_logger->add_on_log_callback(
|
||||
[this](int level, const char *tag, const char *message, size_t message_len) {
|
||||
if (this->shutting_down_) {
|
||||
// Don't try to send logs during shutdown
|
||||
// as it could result in a recursion and
|
||||
// we would be filling a buffer we are trying to clear
|
||||
return;
|
||||
}
|
||||
for (auto &c : this->clients_) {
|
||||
if (!c->flags_.remove && c->get_log_subscription_level() >= level)
|
||||
c->try_send_log_message(level, tag, message, message_len);
|
||||
}
|
||||
});
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_CAMERA
|
||||
if (camera::Camera::instance() != nullptr && !camera::Camera::instance()->is_internal()) {
|
||||
camera::Camera::instance()->add_listener(this);
|
||||
camera::Camera::instance()->add_image_callback([this](const std::shared_ptr<camera::CameraImage> &image) {
|
||||
for (auto &c : this->clients_) {
|
||||
if (!c->flags_.remove)
|
||||
c->set_camera_state(image);
|
||||
}
|
||||
});
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Initialize last_connected_ for reboot timeout tracking
|
||||
this->last_connected_ = App.get_loop_component_start_time();
|
||||
// Set warning status if reboot timeout is enabled
|
||||
if (this->reboot_timeout_ != 0) {
|
||||
this->status_set_warning();
|
||||
}
|
||||
void APIServer::schedule_reboot_timeout_() {
|
||||
this->status_set_warning();
|
||||
this->set_timeout("api_reboot", this->reboot_timeout_, []() {
|
||||
if (!global_api_server->is_connected()) {
|
||||
ESP_LOGE(TAG, "No clients; rebooting");
|
||||
App.reboot();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void APIServer::loop() {
|
||||
@@ -120,46 +138,24 @@ void APIServer::loop() {
|
||||
while (true) {
|
||||
struct sockaddr_storage source_addr;
|
||||
socklen_t addr_len = sizeof(source_addr);
|
||||
|
||||
auto sock = this->socket_->accept_loop_monitored((struct sockaddr *) &source_addr, &addr_len);
|
||||
if (!sock)
|
||||
break;
|
||||
|
||||
char peername[socket::PEERNAME_MAX_LEN];
|
||||
sock->getpeername_to(peername);
|
||||
|
||||
// Check if we're at the connection limit
|
||||
if (this->clients_.size() >= this->max_connections_) {
|
||||
ESP_LOGW(TAG, "Max connections (%d), rejecting %s", this->max_connections_, peername);
|
||||
// Immediately close - socket destructor will handle cleanup
|
||||
sock.reset();
|
||||
continue;
|
||||
}
|
||||
|
||||
ESP_LOGD(TAG, "Accept %s", peername);
|
||||
ESP_LOGD(TAG, "Accept %s", sock->getpeername().c_str());
|
||||
|
||||
auto *conn = new APIConnection(std::move(sock), this);
|
||||
this->clients_.emplace_back(conn);
|
||||
conn->start();
|
||||
|
||||
// First client connected - clear warning and update timestamp
|
||||
// Clear warning status and cancel reboot when first client connects
|
||||
if (this->clients_.size() == 1 && this->reboot_timeout_ != 0) {
|
||||
this->status_clear_warning();
|
||||
this->last_connected_ = App.get_loop_component_start_time();
|
||||
this->cancel_timeout("api_reboot");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this->clients_.empty()) {
|
||||
// Check reboot timeout - done in loop to avoid scheduler heap churn
|
||||
// (cancelled scheduler items sit in heap memory until their scheduled time)
|
||||
if (this->reboot_timeout_ != 0) {
|
||||
const uint32_t now = App.get_loop_component_start_time();
|
||||
if (now - this->last_connected_ > this->reboot_timeout_) {
|
||||
ESP_LOGE(TAG, "No clients; rebooting");
|
||||
App.reboot();
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -169,7 +165,7 @@ void APIServer::loop() {
|
||||
// Network is down - disconnect all clients
|
||||
for (auto &client : this->clients_) {
|
||||
client->on_fatal_error();
|
||||
client->log_client_(ESPHOME_LOG_LEVEL_WARN, LOG_STR("Network down; disconnect"));
|
||||
ESP_LOGW(TAG, "%s: Network down; disconnect", client->get_client_combined_info().c_str());
|
||||
}
|
||||
// Continue to process and clean up the clients below
|
||||
}
|
||||
@@ -187,11 +183,7 @@ void APIServer::loop() {
|
||||
|
||||
// Rare case: handle disconnection
|
||||
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
||||
// Trigger expects std::string, get fresh peername from socket
|
||||
this->client_disconnected_trigger_->trigger(client->client_info_.name, client->get_peername());
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
this->unregister_active_action_calls_for_connection(client.get());
|
||||
this->client_disconnected_trigger_->trigger(client->client_info_.name, client->client_info_.peername);
|
||||
#endif
|
||||
ESP_LOGV(TAG, "Remove connection %s", client->client_info_.name.c_str());
|
||||
|
||||
@@ -201,10 +193,9 @@ void APIServer::loop() {
|
||||
}
|
||||
this->clients_.pop_back();
|
||||
|
||||
// Last client disconnected - set warning and start tracking for reboot timeout
|
||||
// Schedule reboot when last client disconnects
|
||||
if (this->clients_.empty() && this->reboot_timeout_ != 0) {
|
||||
this->status_set_warning();
|
||||
this->last_connected_ = App.get_loop_component_start_time();
|
||||
this->schedule_reboot_timeout_();
|
||||
}
|
||||
// Don't increment client_index since we need to process the swapped element
|
||||
}
|
||||
@@ -213,13 +204,11 @@ void APIServer::loop() {
|
||||
void APIServer::dump_config() {
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Server:\n"
|
||||
" Address: %s:%u\n"
|
||||
" Listen backlog: %u\n"
|
||||
" Max connections: %u",
|
||||
network::get_use_address(), this->port_, this->listen_backlog_, this->max_connections_);
|
||||
" Address: %s:%u",
|
||||
network::get_use_address().c_str(), this->port_);
|
||||
#ifdef USE_API_NOISE
|
||||
ESP_LOGCONFIG(TAG, " Noise encryption: %s", YESNO(this->noise_ctx_.has_psk()));
|
||||
if (!this->noise_ctx_.has_psk()) {
|
||||
ESP_LOGCONFIG(TAG, " Noise encryption: %s", YESNO(this->noise_ctx_->has_psk()));
|
||||
if (!this->noise_ctx_->has_psk()) {
|
||||
ESP_LOGCONFIG(TAG, " Supports encryption: YES");
|
||||
}
|
||||
#else
|
||||
@@ -228,12 +217,12 @@ void APIServer::dump_config() {
|
||||
}
|
||||
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool APIServer::check_password(const uint8_t *password_data, size_t password_len) const {
|
||||
bool APIServer::check_password(const std::string &password) const {
|
||||
// depend only on input password length
|
||||
const char *a = this->password_.c_str();
|
||||
uint32_t len_a = this->password_.length();
|
||||
const char *b = reinterpret_cast<const char *>(password_data);
|
||||
uint32_t len_b = password_len;
|
||||
const char *b = password.c_str();
|
||||
uint32_t len_b = password.length();
|
||||
|
||||
// disable optimization with volatile
|
||||
volatile uint32_t length = len_b;
|
||||
@@ -256,12 +245,11 @@ bool APIServer::check_password(const uint8_t *password_data, size_t password_len
|
||||
|
||||
return result == 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void APIServer::handle_disconnect(APIConnection *conn) {}
|
||||
|
||||
// Macro for controller update dispatch
|
||||
// Macro for entities without extra parameters
|
||||
#define API_DISPATCH_UPDATE(entity_type, entity_name) \
|
||||
void APIServer::on_##entity_name##_update(entity_type *obj) { /* NOLINT(bugprone-macro-parentheses) */ \
|
||||
if (obj->is_internal()) \
|
||||
@@ -270,6 +258,15 @@ void APIServer::handle_disconnect(APIConnection *conn) {}
|
||||
c->send_##entity_name##_state(obj); \
|
||||
}
|
||||
|
||||
// Macro for entities with extra parameters (but parameters not used in send)
|
||||
#define API_DISPATCH_UPDATE_IGNORE_PARAMS(entity_type, entity_name, ...) \
|
||||
void APIServer::on_##entity_name##_update(entity_type *obj, __VA_ARGS__) { /* NOLINT(bugprone-macro-parentheses) */ \
|
||||
if (obj->is_internal()) \
|
||||
return; \
|
||||
for (auto &c : this->clients_) \
|
||||
c->send_##entity_name##_state(obj); \
|
||||
}
|
||||
|
||||
#ifdef USE_BINARY_SENSOR
|
||||
API_DISPATCH_UPDATE(binary_sensor::BinarySensor, binary_sensor)
|
||||
#endif
|
||||
@@ -287,15 +284,15 @@ API_DISPATCH_UPDATE(light::LightState, light)
|
||||
#endif
|
||||
|
||||
#ifdef USE_SENSOR
|
||||
API_DISPATCH_UPDATE(sensor::Sensor, sensor)
|
||||
API_DISPATCH_UPDATE_IGNORE_PARAMS(sensor::Sensor, sensor, float state)
|
||||
#endif
|
||||
|
||||
#ifdef USE_SWITCH
|
||||
API_DISPATCH_UPDATE(switch_::Switch, switch)
|
||||
API_DISPATCH_UPDATE_IGNORE_PARAMS(switch_::Switch, switch, bool state)
|
||||
#endif
|
||||
|
||||
#ifdef USE_TEXT_SENSOR
|
||||
API_DISPATCH_UPDATE(text_sensor::TextSensor, text_sensor)
|
||||
API_DISPATCH_UPDATE_IGNORE_PARAMS(text_sensor::TextSensor, text_sensor, const std::string &state)
|
||||
#endif
|
||||
|
||||
#ifdef USE_CLIMATE
|
||||
@@ -303,7 +300,7 @@ API_DISPATCH_UPDATE(climate::Climate, climate)
|
||||
#endif
|
||||
|
||||
#ifdef USE_NUMBER
|
||||
API_DISPATCH_UPDATE(number::Number, number)
|
||||
API_DISPATCH_UPDATE_IGNORE_PARAMS(number::Number, number, float state)
|
||||
#endif
|
||||
|
||||
#ifdef USE_DATETIME_DATE
|
||||
@@ -319,11 +316,11 @@ API_DISPATCH_UPDATE(datetime::DateTimeEntity, datetime)
|
||||
#endif
|
||||
|
||||
#ifdef USE_TEXT
|
||||
API_DISPATCH_UPDATE(text::Text, text)
|
||||
API_DISPATCH_UPDATE_IGNORE_PARAMS(text::Text, text, const std::string &state)
|
||||
#endif
|
||||
|
||||
#ifdef USE_SELECT
|
||||
API_DISPATCH_UPDATE(select::Select, select)
|
||||
API_DISPATCH_UPDATE_IGNORE_PARAMS(select::Select, select, const std::string &state, size_t index)
|
||||
#endif
|
||||
|
||||
#ifdef USE_LOCK
|
||||
@@ -338,18 +335,13 @@ API_DISPATCH_UPDATE(valve::Valve, valve)
|
||||
API_DISPATCH_UPDATE(media_player::MediaPlayer, media_player)
|
||||
#endif
|
||||
|
||||
#ifdef USE_WATER_HEATER
|
||||
API_DISPATCH_UPDATE(water_heater::WaterHeater, water_heater)
|
||||
#endif
|
||||
|
||||
#ifdef USE_EVENT
|
||||
// Event is a special case - unlike other entities with simple state fields,
|
||||
// events store their state in a member accessed via obj->get_last_event_type()
|
||||
void APIServer::on_event(event::Event *obj) {
|
||||
// Event is a special case - it's the only entity that passes extra parameters to the send method
|
||||
void APIServer::on_event(event::Event *obj, const std::string &event_type) {
|
||||
if (obj->is_internal())
|
||||
return;
|
||||
for (auto &c : this->clients_)
|
||||
c->send_event(obj, obj->get_last_event_type());
|
||||
c->send_event(obj, event_type);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -363,15 +355,6 @@ void APIServer::on_update(update::UpdateEntity *obj) {
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void APIServer::on_zwave_proxy_request(const esphome::api::ProtoMessage &msg) {
|
||||
// We could add code to manage a second subscription type, but, since this message type is
|
||||
// very infrequent and small, we simply send it to all clients
|
||||
for (auto &c : this->clients_)
|
||||
c->send_message(msg, api::ZWaveProxyRequest::MESSAGE_TYPE);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_ALARM_CONTROL_PANEL
|
||||
API_DISPATCH_UPDATE(alarm_control_panel::AlarmControlPanel, alarm_control_panel)
|
||||
#endif
|
||||
@@ -387,95 +370,33 @@ void APIServer::set_password(const std::string &password) { this->password_ = pa
|
||||
void APIServer::set_batch_delay(uint16_t batch_delay) { this->batch_delay_ = batch_delay; }
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void APIServer::send_homeassistant_action(const HomeassistantActionRequest &call) {
|
||||
void APIServer::send_homeassistant_service_call(const HomeassistantServiceResponse &call) {
|
||||
for (auto &client : this->clients_) {
|
||||
client->send_homeassistant_action(call);
|
||||
client->send_homeassistant_service_call(call);
|
||||
}
|
||||
}
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
void APIServer::register_action_response_callback(uint32_t call_id, ActionResponseCallback callback) {
|
||||
this->action_response_callbacks_.push_back({call_id, std::move(callback)});
|
||||
}
|
||||
|
||||
void APIServer::handle_action_response(uint32_t call_id, bool success, const std::string &error_message) {
|
||||
for (auto it = this->action_response_callbacks_.begin(); it != this->action_response_callbacks_.end(); ++it) {
|
||||
if (it->call_id == call_id) {
|
||||
auto callback = std::move(it->callback);
|
||||
this->action_response_callbacks_.erase(it);
|
||||
ActionResponse response(success, error_message);
|
||||
callback(response);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
void APIServer::handle_action_response(uint32_t call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len) {
|
||||
for (auto it = this->action_response_callbacks_.begin(); it != this->action_response_callbacks_.end(); ++it) {
|
||||
if (it->call_id == call_id) {
|
||||
auto callback = std::move(it->callback);
|
||||
this->action_response_callbacks_.erase(it);
|
||||
ActionResponse response(success, error_message, response_data, response_data_len);
|
||||
callback(response);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#endif // USE_API_HOMEASSISTANT_SERVICES
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
// Helper to add subscription (reduces duplication)
|
||||
void APIServer::add_state_subscription_(const char *entity_id, const char *attribute,
|
||||
std::function<void(const std::string &)> f, bool once) {
|
||||
void APIServer::subscribe_home_assistant_state(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(std::string)> f) {
|
||||
this->state_subs_.push_back(HomeAssistantStateSubscription{
|
||||
.entity_id = entity_id, .attribute = attribute, .callback = std::move(f), .once = once,
|
||||
// entity_id_dynamic_storage and attribute_dynamic_storage remain nullptr (no heap allocation)
|
||||
.entity_id = std::move(entity_id),
|
||||
.attribute = std::move(attribute),
|
||||
.callback = std::move(f),
|
||||
.once = false,
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to add subscription with heap-allocated strings (reduces duplication)
|
||||
void APIServer::add_state_subscription_(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(const std::string &)> f, bool once) {
|
||||
HomeAssistantStateSubscription sub;
|
||||
// Allocate heap storage for the strings
|
||||
sub.entity_id_dynamic_storage = std::make_unique<std::string>(std::move(entity_id));
|
||||
sub.entity_id = sub.entity_id_dynamic_storage->c_str();
|
||||
|
||||
if (attribute.has_value()) {
|
||||
sub.attribute_dynamic_storage = std::make_unique<std::string>(std::move(attribute.value()));
|
||||
sub.attribute = sub.attribute_dynamic_storage->c_str();
|
||||
} else {
|
||||
sub.attribute = nullptr;
|
||||
}
|
||||
|
||||
sub.callback = std::move(f);
|
||||
sub.once = once;
|
||||
this->state_subs_.push_back(std::move(sub));
|
||||
}
|
||||
|
||||
// New const char* overload (for internal components - zero allocation)
|
||||
void APIServer::subscribe_home_assistant_state(const char *entity_id, const char *attribute,
|
||||
std::function<void(const std::string &)> f) {
|
||||
this->add_state_subscription_(entity_id, attribute, std::move(f), false);
|
||||
}
|
||||
|
||||
void APIServer::get_home_assistant_state(const char *entity_id, const char *attribute,
|
||||
std::function<void(const std::string &)> f) {
|
||||
this->add_state_subscription_(entity_id, attribute, std::move(f), true);
|
||||
}
|
||||
|
||||
// Existing std::string overload (for custom_api_device.h - heap allocation)
|
||||
void APIServer::subscribe_home_assistant_state(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(const std::string &)> f) {
|
||||
this->add_state_subscription_(std::move(entity_id), std::move(attribute), std::move(f), false);
|
||||
}
|
||||
|
||||
void APIServer::get_home_assistant_state(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(const std::string &)> f) {
|
||||
this->add_state_subscription_(std::move(entity_id), std::move(attribute), std::move(f), true);
|
||||
}
|
||||
std::function<void(std::string)> f) {
|
||||
this->state_subs_.push_back(HomeAssistantStateSubscription{
|
||||
.entity_id = std::move(entity_id),
|
||||
.attribute = std::move(attribute),
|
||||
.callback = std::move(f),
|
||||
.once = true,
|
||||
});
|
||||
};
|
||||
|
||||
const std::vector<APIServer::HomeAssistantStateSubscription> &APIServer::get_state_subs() const {
|
||||
return this->state_subs_;
|
||||
@@ -487,10 +408,16 @@ uint16_t APIServer::get_port() const { return this->port_; }
|
||||
void APIServer::set_reboot_timeout(uint32_t reboot_timeout) { this->reboot_timeout_ = reboot_timeout; }
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
bool APIServer::update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg,
|
||||
const LogString *fail_log_msg, const psk_t &active_psk, bool make_active) {
|
||||
if (!this->noise_pref_.save(&new_psk)) {
|
||||
ESP_LOGW(TAG, "%s", LOG_STR_ARG(fail_log_msg));
|
||||
bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
|
||||
auto &old_psk = this->noise_ctx_->get_psk();
|
||||
if (std::equal(old_psk.begin(), old_psk.end(), psk.begin())) {
|
||||
ESP_LOGW(TAG, "New PSK matches old");
|
||||
return true;
|
||||
}
|
||||
|
||||
SavedNoisePsk new_saved_psk{psk};
|
||||
if (!this->noise_pref_.save(&new_saved_psk)) {
|
||||
ESP_LOGW(TAG, "Failed to save Noise PSK");
|
||||
return false;
|
||||
}
|
||||
// ensure it's written immediately
|
||||
@@ -498,11 +425,11 @@ bool APIServer::update_noise_psk_(const SavedNoisePsk &new_psk, const LogString
|
||||
ESP_LOGW(TAG, "Failed to sync preferences");
|
||||
return false;
|
||||
}
|
||||
ESP_LOGD(TAG, "%s", LOG_STR_ARG(save_log_msg));
|
||||
ESP_LOGD(TAG, "Noise PSK saved");
|
||||
if (make_active) {
|
||||
this->set_timeout(100, [this, active_psk]() {
|
||||
this->set_timeout(100, [this, psk]() {
|
||||
ESP_LOGW(TAG, "Disconnecting all clients to reset PSK");
|
||||
this->set_noise_psk(active_psk);
|
||||
this->set_noise_psk(psk);
|
||||
for (auto &c : this->clients_) {
|
||||
DisconnectRequest req;
|
||||
c->send_message(req, DisconnectRequest::MESSAGE_TYPE);
|
||||
@@ -511,38 +438,6 @@ bool APIServer::update_noise_psk_(const SavedNoisePsk &new_psk, const LogString
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
|
||||
#ifdef USE_API_NOISE_PSK_FROM_YAML
|
||||
// When PSK is set from YAML, this function should never be called
|
||||
// but if it is, reject the change
|
||||
ESP_LOGW(TAG, "Key set in YAML");
|
||||
return false;
|
||||
#else
|
||||
auto &old_psk = this->noise_ctx_.get_psk();
|
||||
if (std::equal(old_psk.begin(), old_psk.end(), psk.begin())) {
|
||||
ESP_LOGW(TAG, "New PSK matches old");
|
||||
return true;
|
||||
}
|
||||
|
||||
SavedNoisePsk new_saved_psk{psk};
|
||||
return this->update_noise_psk_(new_saved_psk, LOG_STR("Noise PSK saved"), LOG_STR("Failed to save Noise PSK"), psk,
|
||||
make_active);
|
||||
#endif
|
||||
}
|
||||
bool APIServer::clear_noise_psk(bool make_active) {
|
||||
#ifdef USE_API_NOISE_PSK_FROM_YAML
|
||||
// When PSK is set from YAML, this function should never be called
|
||||
// but if it is, reject the change
|
||||
ESP_LOGW(TAG, "Key set in YAML");
|
||||
return false;
|
||||
#else
|
||||
SavedNoisePsk empty_psk{};
|
||||
psk_t empty{};
|
||||
return this->update_noise_psk_(empty_psk, LOG_STR("Noise PSK cleared"), LOG_STR("Failed to clear Noise PSK"), empty,
|
||||
make_active);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_HOMEASSISTANT_TIME
|
||||
@@ -554,42 +449,7 @@ void APIServer::request_time() {
|
||||
}
|
||||
#endif
|
||||
|
||||
bool APIServer::is_connected(bool state_subscription_only) const {
|
||||
if (!state_subscription_only) {
|
||||
return !this->clients_.empty();
|
||||
}
|
||||
|
||||
for (const auto &client : this->clients_) {
|
||||
if (client->flags_.state_subscription) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifdef USE_LOGGER
|
||||
void APIServer::on_log(uint8_t level, const char *tag, const char *message, size_t message_len) {
|
||||
if (this->shutting_down_) {
|
||||
// Don't try to send logs during shutdown
|
||||
// as it could result in a recursion and
|
||||
// we would be filling a buffer we are trying to clear
|
||||
return;
|
||||
}
|
||||
for (auto &c : this->clients_) {
|
||||
if (!c->flags_.remove && c->get_log_subscription_level() >= level)
|
||||
c->try_send_log_message(level, tag, message, message_len);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_CAMERA
|
||||
void APIServer::on_camera_image(const std::shared_ptr<camera::CameraImage> &image) {
|
||||
for (auto &c : this->clients_) {
|
||||
if (!c->flags_.remove)
|
||||
c->set_camera_state(image);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
bool APIServer::is_connected() const { return !this->clients_.empty(); }
|
||||
|
||||
void APIServer::on_shutdown() {
|
||||
this->shutting_down_ = true;
|
||||
@@ -626,84 +486,5 @@ bool APIServer::teardown() {
|
||||
return this->clients_.empty();
|
||||
}
|
||||
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
// Timeout for action calls - matches aioesphomeapi client timeout (default 30s)
|
||||
// Can be overridden via USE_API_ACTION_CALL_TIMEOUT_MS define for testing
|
||||
#ifndef USE_API_ACTION_CALL_TIMEOUT_MS
|
||||
#define USE_API_ACTION_CALL_TIMEOUT_MS 30000 // NOLINT
|
||||
#endif
|
||||
|
||||
uint32_t APIServer::register_active_action_call(uint32_t client_call_id, APIConnection *conn) {
|
||||
uint32_t action_call_id = this->next_action_call_id_++;
|
||||
// Handle wraparound (skip 0 as it means "no call")
|
||||
if (this->next_action_call_id_ == 0) {
|
||||
this->next_action_call_id_ = 1;
|
||||
}
|
||||
this->active_action_calls_.push_back({action_call_id, client_call_id, conn});
|
||||
|
||||
// Schedule automatic cleanup after timeout (client will have given up by then)
|
||||
this->set_timeout(str_sprintf("action_call_%u", action_call_id), USE_API_ACTION_CALL_TIMEOUT_MS,
|
||||
[this, action_call_id]() {
|
||||
ESP_LOGD(TAG, "Action call %u timed out", action_call_id);
|
||||
this->unregister_active_action_call(action_call_id);
|
||||
});
|
||||
|
||||
return action_call_id;
|
||||
}
|
||||
|
||||
void APIServer::unregister_active_action_call(uint32_t action_call_id) {
|
||||
// Cancel the timeout for this action call
|
||||
this->cancel_timeout(str_sprintf("action_call_%u", action_call_id));
|
||||
|
||||
// Swap-and-pop is more efficient than remove_if for unordered vectors
|
||||
for (size_t i = 0; i < this->active_action_calls_.size(); i++) {
|
||||
if (this->active_action_calls_[i].action_call_id == action_call_id) {
|
||||
std::swap(this->active_action_calls_[i], this->active_action_calls_.back());
|
||||
this->active_action_calls_.pop_back();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void APIServer::unregister_active_action_calls_for_connection(APIConnection *conn) {
|
||||
// Remove all active action calls for disconnected connection using swap-and-pop
|
||||
for (size_t i = 0; i < this->active_action_calls_.size();) {
|
||||
if (this->active_action_calls_[i].connection == conn) {
|
||||
// Cancel the timeout for this action call
|
||||
this->cancel_timeout(str_sprintf("action_call_%u", this->active_action_calls_[i].action_call_id));
|
||||
|
||||
std::swap(this->active_action_calls_[i], this->active_action_calls_.back());
|
||||
this->active_action_calls_.pop_back();
|
||||
// Don't increment i - need to check the swapped element
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void APIServer::send_action_response(uint32_t action_call_id, bool success, const std::string &error_message) {
|
||||
for (auto &call : this->active_action_calls_) {
|
||||
if (call.action_call_id == action_call_id) {
|
||||
call.connection->send_execute_service_response(call.client_call_id, success, error_message);
|
||||
return;
|
||||
}
|
||||
}
|
||||
ESP_LOGW(TAG, "Cannot send response: no active call found for action_call_id %u", action_call_id);
|
||||
}
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
void APIServer::send_action_response(uint32_t action_call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len) {
|
||||
for (auto &call : this->active_action_calls_) {
|
||||
if (call.action_call_id == action_call_id) {
|
||||
call.connection->send_execute_service_response(call.client_call_id, success, error_message, response_data,
|
||||
response_data_len);
|
||||
return;
|
||||
}
|
||||
}
|
||||
ESP_LOGW(TAG, "Cannot send response: no active call found for action_call_id %u", action_call_id);
|
||||
}
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
|
||||
} // namespace esphome::api
|
||||
#endif
|
||||
|
||||
@@ -12,39 +12,21 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "list_entities.h"
|
||||
#include "subscribe_state.h"
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
#include "esphome/components/camera/camera.h"
|
||||
#ifdef USE_API_SERVICES
|
||||
#include "user_services.h"
|
||||
#endif
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
// Forward declaration - full definition in user_services.h
|
||||
class UserServiceDescriptor;
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
struct SavedNoisePsk {
|
||||
psk_t psk;
|
||||
} PACKED; // NOLINT
|
||||
#endif
|
||||
|
||||
class APIServer : public Component,
|
||||
public Controller
|
||||
#ifdef USE_LOGGER
|
||||
,
|
||||
public logger::LogListener
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
,
|
||||
public camera::CameraListener
|
||||
#endif
|
||||
{
|
||||
class APIServer : public Component, public Controller {
|
||||
public:
|
||||
APIServer();
|
||||
void setup() override;
|
||||
@@ -54,31 +36,22 @@ class APIServer : public Component,
|
||||
void dump_config() override;
|
||||
void on_shutdown() override;
|
||||
bool teardown() override;
|
||||
#ifdef USE_LOGGER
|
||||
void on_log(uint8_t level, const char *tag, const char *message, size_t message_len) override;
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
void on_camera_image(const std::shared_ptr<camera::CameraImage> &image) override;
|
||||
#endif
|
||||
#ifdef USE_API_PASSWORD
|
||||
bool check_password(const uint8_t *password_data, size_t password_len) const;
|
||||
bool check_password(const std::string &password) const;
|
||||
void set_password(const std::string &password);
|
||||
#endif
|
||||
void set_port(uint16_t port);
|
||||
void set_reboot_timeout(uint32_t reboot_timeout);
|
||||
void set_batch_delay(uint16_t batch_delay);
|
||||
uint16_t get_batch_delay() const { return batch_delay_; }
|
||||
void set_listen_backlog(uint8_t listen_backlog) { this->listen_backlog_ = listen_backlog; }
|
||||
void set_max_connections(uint8_t max_connections) { this->max_connections_ = max_connections; }
|
||||
|
||||
// Get reference to shared buffer for API connections
|
||||
std::vector<uint8_t> &get_shared_buffer_ref() { return shared_write_buffer_; }
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
bool save_noise_psk(psk_t psk, bool make_active = true);
|
||||
bool clear_noise_psk(bool make_active = true);
|
||||
void set_noise_psk(psk_t psk) { this->noise_ctx_.set_psk(psk); }
|
||||
APINoiseContext &get_noise_ctx() { return this->noise_ctx_; }
|
||||
void set_noise_psk(psk_t psk) { noise_ctx_->set_psk(psk); }
|
||||
std::shared_ptr<APINoiseContext> get_noise_ctx() { return noise_ctx_; }
|
||||
#endif // USE_API_NOISE
|
||||
|
||||
void handle_disconnect(APIConnection *conn);
|
||||
@@ -95,19 +68,19 @@ class APIServer : public Component,
|
||||
void on_light_update(light::LightState *obj) override;
|
||||
#endif
|
||||
#ifdef USE_SENSOR
|
||||
void on_sensor_update(sensor::Sensor *obj) override;
|
||||
void on_sensor_update(sensor::Sensor *obj, float state) override;
|
||||
#endif
|
||||
#ifdef USE_SWITCH
|
||||
void on_switch_update(switch_::Switch *obj) override;
|
||||
void on_switch_update(switch_::Switch *obj, bool state) override;
|
||||
#endif
|
||||
#ifdef USE_TEXT_SENSOR
|
||||
void on_text_sensor_update(text_sensor::TextSensor *obj) override;
|
||||
void on_text_sensor_update(text_sensor::TextSensor *obj, const std::string &state) override;
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
void on_climate_update(climate::Climate *obj) override;
|
||||
#endif
|
||||
#ifdef USE_NUMBER
|
||||
void on_number_update(number::Number *obj) override;
|
||||
void on_number_update(number::Number *obj, float state) override;
|
||||
#endif
|
||||
#ifdef USE_DATETIME_DATE
|
||||
void on_date_update(datetime::DateEntity *obj) override;
|
||||
@@ -119,10 +92,10 @@ class APIServer : public Component,
|
||||
void on_datetime_update(datetime::DateTimeEntity *obj) override;
|
||||
#endif
|
||||
#ifdef USE_TEXT
|
||||
void on_text_update(text::Text *obj) override;
|
||||
void on_text_update(text::Text *obj, const std::string &state) override;
|
||||
#endif
|
||||
#ifdef USE_SELECT
|
||||
void on_select_update(select::Select *obj) override;
|
||||
void on_select_update(select::Select *obj, const std::string &state, size_t index) override;
|
||||
#endif
|
||||
#ifdef USE_LOCK
|
||||
void on_lock_update(lock::Lock *obj) override;
|
||||
@@ -133,44 +106,11 @@ class APIServer : public Component,
|
||||
#ifdef USE_MEDIA_PLAYER
|
||||
void on_media_player_update(media_player::MediaPlayer *obj) override;
|
||||
#endif
|
||||
#ifdef USE_WATER_HEATER
|
||||
void on_water_heater_update(water_heater::WaterHeater *obj) override;
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
void send_homeassistant_action(const HomeassistantActionRequest &call);
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
// Action response handling
|
||||
using ActionResponseCallback = std::function<void(const class ActionResponse &)>;
|
||||
void register_action_response_callback(uint32_t call_id, ActionResponseCallback callback);
|
||||
void handle_action_response(uint32_t call_id, bool success, const std::string &error_message);
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
void handle_action_response(uint32_t call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len);
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#endif // USE_API_HOMEASSISTANT_SERVICES
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
void initialize_user_services(std::initializer_list<UserServiceDescriptor *> services) {
|
||||
this->user_services_.assign(services);
|
||||
}
|
||||
#ifdef USE_API_CUSTOM_SERVICES
|
||||
// Only compile push_back method when custom_services: true (external components)
|
||||
void register_user_service(UserServiceDescriptor *descriptor) { this->user_services_.push_back(descriptor); }
|
||||
void send_homeassistant_service_call(const HomeassistantServiceResponse &call);
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
// Action call context management - supports concurrent calls from multiple clients
|
||||
// Returns server-generated action_call_id to avoid collisions when clients use same call_id
|
||||
uint32_t register_active_action_call(uint32_t client_call_id, APIConnection *conn);
|
||||
void unregister_active_action_call(uint32_t action_call_id);
|
||||
void unregister_active_action_calls_for_connection(APIConnection *conn);
|
||||
// Send response for a specific action call (uses action_call_id, sends client_call_id in response)
|
||||
void send_action_response(uint32_t action_call_id, bool success, const std::string &error_message);
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
void send_action_response(uint32_t action_call_id, bool success, const std::string &error_message,
|
||||
const uint8_t *response_data, size_t response_data_len);
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES_JSON
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
#ifdef USE_API_SERVICES
|
||||
void register_user_service(UserServiceDescriptor *descriptor) { this->user_services_.push_back(descriptor); }
|
||||
#endif
|
||||
#ifdef USE_HOMEASSISTANT_TIME
|
||||
void request_time();
|
||||
@@ -180,45 +120,29 @@ class APIServer : public Component,
|
||||
void on_alarm_control_panel_update(alarm_control_panel::AlarmControlPanel *obj) override;
|
||||
#endif
|
||||
#ifdef USE_EVENT
|
||||
void on_event(event::Event *obj) override;
|
||||
void on_event(event::Event *obj, const std::string &event_type) override;
|
||||
#endif
|
||||
#ifdef USE_UPDATE
|
||||
void on_update(update::UpdateEntity *obj) override;
|
||||
#endif
|
||||
#ifdef USE_ZWAVE_PROXY
|
||||
void on_zwave_proxy_request(const esphome::api::ProtoMessage &msg);
|
||||
#endif
|
||||
|
||||
bool is_connected(bool state_subscription_only = false) const;
|
||||
bool is_connected() const;
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
struct HomeAssistantStateSubscription {
|
||||
const char *entity_id; // Pointer to flash (internal) or heap (external)
|
||||
const char *attribute; // Pointer to flash or nullptr (nullptr means no attribute)
|
||||
std::function<void(const std::string &)> callback;
|
||||
std::string entity_id;
|
||||
optional<std::string> attribute;
|
||||
std::function<void(std::string)> callback;
|
||||
bool once;
|
||||
|
||||
// Dynamic storage for external components using std::string API (custom_api_device.h)
|
||||
// These are only allocated when using the std::string overload (nullptr for const char* overload)
|
||||
std::unique_ptr<std::string> entity_id_dynamic_storage;
|
||||
std::unique_ptr<std::string> attribute_dynamic_storage;
|
||||
};
|
||||
|
||||
// New const char* overload (for internal components - zero allocation)
|
||||
void subscribe_home_assistant_state(const char *entity_id, const char *attribute,
|
||||
std::function<void(const std::string &)> f);
|
||||
void get_home_assistant_state(const char *entity_id, const char *attribute,
|
||||
std::function<void(const std::string &)> f);
|
||||
|
||||
// Existing std::string overload (for custom_api_device.h - heap allocation)
|
||||
void subscribe_home_assistant_state(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(const std::string &)> f);
|
||||
std::function<void(std::string)> f);
|
||||
void get_home_assistant_state(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(const std::string &)> f);
|
||||
|
||||
std::function<void(std::string)> f);
|
||||
const std::vector<HomeAssistantStateSubscription> &get_state_subs() const;
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
const std::vector<UserServiceDescriptor *> &get_user_services() const { return this->user_services_; }
|
||||
#endif
|
||||
|
||||
@@ -232,17 +156,7 @@ class APIServer : public Component,
|
||||
#endif
|
||||
|
||||
protected:
|
||||
#ifdef USE_API_NOISE
|
||||
bool update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg, const LogString *fail_log_msg,
|
||||
const psk_t &active_psk, bool make_active);
|
||||
#endif // USE_API_NOISE
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
// Helper methods to reduce code duplication
|
||||
void add_state_subscription_(const char *entity_id, const char *attribute, std::function<void(const std::string &)> f,
|
||||
bool once);
|
||||
void add_state_subscription_(std::string entity_id, optional<std::string> attribute,
|
||||
std::function<void(const std::string &)> f, bool once);
|
||||
#endif // USE_API_HOMEASSISTANT_STATES
|
||||
void schedule_reboot_timeout_();
|
||||
// Pointers and pointer-like types first (4 bytes each)
|
||||
std::unique_ptr<socket::Socket> socket_ = nullptr;
|
||||
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
||||
@@ -254,7 +168,6 @@ class APIServer : public Component,
|
||||
|
||||
// 4-byte aligned types
|
||||
uint32_t reboot_timeout_{300000};
|
||||
uint32_t last_connected_{0};
|
||||
|
||||
// Vectors and strings (12 bytes each on 32-bit)
|
||||
std::vector<std::unique_ptr<APIConnection>> clients_;
|
||||
@@ -265,40 +178,18 @@ class APIServer : public Component,
|
||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||
std::vector<HomeAssistantStateSubscription> state_subs_;
|
||||
#endif
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
std::vector<UserServiceDescriptor *> user_services_;
|
||||
#ifdef USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
// Active action calls - supports concurrent calls from multiple clients
|
||||
// Uses server-generated action_call_id to avoid collisions when multiple clients use same call_id
|
||||
struct ActiveActionCall {
|
||||
uint32_t action_call_id; // Server-generated unique ID (passed to actions)
|
||||
uint32_t client_call_id; // Client's original call_id (used in response)
|
||||
APIConnection *connection;
|
||||
};
|
||||
std::vector<ActiveActionCall> active_action_calls_;
|
||||
uint32_t next_action_call_id_{1}; // Counter for generating unique action_call_ids
|
||||
#endif // USE_API_USER_DEFINED_ACTION_RESPONSES
|
||||
#endif
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
struct PendingActionResponse {
|
||||
uint32_t call_id;
|
||||
ActionResponseCallback callback;
|
||||
};
|
||||
std::vector<PendingActionResponse> action_response_callbacks_;
|
||||
#endif
|
||||
|
||||
// Group smaller types together
|
||||
uint16_t port_{6053};
|
||||
uint16_t batch_delay_{100};
|
||||
// Connection limits - these defaults will be overridden by config values
|
||||
// from cv.SplitDefault in __init__.py which sets platform-specific defaults
|
||||
uint8_t listen_backlog_{4};
|
||||
uint8_t max_connections_{8};
|
||||
bool shutting_down_ = false;
|
||||
// 7 bytes used, 1 byte padding
|
||||
// 5 bytes used, 3 bytes padding
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
APINoiseContext noise_ctx_;
|
||||
std::shared_ptr<APINoiseContext> noise_ctx_ = std::make_shared<APINoiseContext>();
|
||||
ESPPreferenceObject noise_pref_;
|
||||
#endif // USE_API_NOISE
|
||||
};
|
||||
@@ -306,11 +197,8 @@ class APIServer : public Component,
|
||||
extern APIServer *global_api_server; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables)
|
||||
|
||||
template<typename... Ts> class APIConnectedCondition : public Condition<Ts...> {
|
||||
TEMPLATABLE_VALUE(bool, state_subscription_only)
|
||||
public:
|
||||
bool check(const Ts &...x) override {
|
||||
return global_api_server->is_connected(this->state_subscription_only_.value(x...));
|
||||
}
|
||||
bool check(Ts... x) override { return global_api_server->is_connected(); }
|
||||
};
|
||||
|
||||
} // namespace esphome::api
|
||||
|
||||
@@ -30,7 +30,7 @@ if TYPE_CHECKING:
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_run_logs(config: dict[str, Any], addresses: list[str]) -> None:
|
||||
async def async_run_logs(config: dict[str, Any], address: str) -> None:
|
||||
"""Run the logs command in the event loop."""
|
||||
conf = config["api"]
|
||||
name = config["esphome"]["name"]
|
||||
@@ -39,21 +39,13 @@ async def async_run_logs(config: dict[str, Any], addresses: list[str]) -> None:
|
||||
noise_psk: str | None = None
|
||||
if (encryption := conf.get(CONF_ENCRYPTION)) and (key := encryption.get(CONF_KEY)):
|
||||
noise_psk = key
|
||||
|
||||
if len(addresses) == 1:
|
||||
_LOGGER.info("Starting log output from %s using esphome API", addresses[0])
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Starting log output from %s using esphome API", " or ".join(addresses)
|
||||
)
|
||||
|
||||
_LOGGER.info("Starting log output from %s using esphome API", address)
|
||||
cli = APIClient(
|
||||
addresses[0], # Primary address for compatibility
|
||||
address,
|
||||
port,
|
||||
password,
|
||||
client_info=f"ESPHome Logs {__version__}",
|
||||
noise_psk=noise_psk,
|
||||
addresses=addresses, # Pass all addresses for automatic retry
|
||||
)
|
||||
dashboard = CORE.dashboard
|
||||
|
||||
@@ -62,11 +54,9 @@ async def async_run_logs(config: dict[str, Any], addresses: list[str]) -> None:
|
||||
time_ = datetime.now()
|
||||
message: bytes = msg.message
|
||||
text = message.decode("utf8", "backslashreplace")
|
||||
nanoseconds = time_.microsecond // 1000
|
||||
timestamp = (
|
||||
f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}.{nanoseconds:03}]"
|
||||
)
|
||||
for parsed_msg in parse_log_message(text, timestamp):
|
||||
for parsed_msg in parse_log_message(
|
||||
text, f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}]"
|
||||
):
|
||||
print(parsed_msg.replace("\033", "\\033") if dashboard else parsed_msg)
|
||||
|
||||
stop = await async_run(cli, on_log, name=name)
|
||||
@@ -76,7 +66,7 @@ async def async_run_logs(config: dict[str, Any], addresses: list[str]) -> None:
|
||||
await stop()
|
||||
|
||||
|
||||
def run_logs(config: dict[str, Any], addresses: list[str]) -> None:
|
||||
def run_logs(config: dict[str, Any], address: str) -> None:
|
||||
"""Run the logs command."""
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
asyncio.run(async_run_logs(config, addresses))
|
||||
asyncio.run(async_run_logs(config, address))
|
||||
|
||||
@@ -3,28 +3,25 @@
|
||||
#include <map>
|
||||
#include "api_server.h"
|
||||
#ifdef USE_API
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
#include "user_services.h"
|
||||
#endif
|
||||
namespace esphome::api {
|
||||
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
template<typename T, typename... Ts> class CustomAPIDeviceService : public UserServiceDynamic<Ts...> {
|
||||
#ifdef USE_API_SERVICES
|
||||
template<typename T, typename... Ts> class CustomAPIDeviceService : public UserServiceBase<Ts...> {
|
||||
public:
|
||||
CustomAPIDeviceService(const std::string &name, const std::array<std::string, sizeof...(Ts)> &arg_names, T *obj,
|
||||
void (T::*callback)(Ts...))
|
||||
: UserServiceDynamic<Ts...>(name, arg_names), obj_(obj), callback_(callback) {}
|
||||
: UserServiceBase<Ts...>(name, arg_names), obj_(obj), callback_(callback) {}
|
||||
|
||||
protected:
|
||||
// CustomAPIDevice services don't support action responses - ignore call_id and return_response
|
||||
void execute(uint32_t /*call_id*/, bool /*return_response*/, Ts... x) override {
|
||||
(this->obj_->*this->callback_)(x...); // NOLINT
|
||||
}
|
||||
void execute(Ts... x) override { (this->obj_->*this->callback_)(x...); } // NOLINT
|
||||
|
||||
T *obj_;
|
||||
void (T::*callback_)(Ts...);
|
||||
};
|
||||
#endif // USE_API_USER_DEFINED_ACTIONS
|
||||
#endif // USE_API_SERVICES
|
||||
|
||||
class CustomAPIDevice {
|
||||
public:
|
||||
@@ -52,26 +49,12 @@ class CustomAPIDevice {
|
||||
* @param name The name of the service to register.
|
||||
* @param arg_names The name of the arguments for the service, must match the arguments of the function.
|
||||
*/
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
template<typename T, typename... Ts>
|
||||
void register_service(void (T::*callback)(Ts...), const std::string &name,
|
||||
const std::array<std::string, sizeof...(Ts)> &arg_names) {
|
||||
#ifdef USE_API_CUSTOM_SERVICES
|
||||
auto *service = new CustomAPIDeviceService<T, Ts...>(name, arg_names, (T *) this, callback); // NOLINT
|
||||
global_api_server->register_user_service(service);
|
||||
#else
|
||||
static_assert(
|
||||
sizeof(T) == 0,
|
||||
"register_service() requires 'custom_services: true' in the 'api:' section of your YAML configuration");
|
||||
#endif
|
||||
}
|
||||
#else
|
||||
template<typename T, typename... Ts>
|
||||
void register_service(void (T::*callback)(Ts...), const std::string &name,
|
||||
const std::array<std::string, sizeof...(Ts)> &arg_names) {
|
||||
static_assert(
|
||||
sizeof(T) == 0,
|
||||
"register_service() requires 'custom_services: true' in the 'api:' section of your YAML configuration");
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -93,22 +76,10 @@ class CustomAPIDevice {
|
||||
* @param callback The member function to call when the service is triggered.
|
||||
* @param name The name of the arguments for the service, must match the arguments of the function.
|
||||
*/
|
||||
#ifdef USE_API_USER_DEFINED_ACTIONS
|
||||
#ifdef USE_API_SERVICES
|
||||
template<typename T> void register_service(void (T::*callback)(), const std::string &name) {
|
||||
#ifdef USE_API_CUSTOM_SERVICES
|
||||
auto *service = new CustomAPIDeviceService<T>(name, {}, (T *) this, callback); // NOLINT
|
||||
global_api_server->register_user_service(service);
|
||||
#else
|
||||
static_assert(
|
||||
sizeof(T) == 0,
|
||||
"register_service() requires 'custom_services: true' in the 'api:' section of your YAML configuration");
|
||||
#endif
|
||||
}
|
||||
#else
|
||||
template<typename T> void register_service(void (T::*callback)(), const std::string &name) {
|
||||
static_assert(
|
||||
sizeof(T) == 0,
|
||||
"register_service() requires 'custom_services: true' in the 'api:' section of your YAML configuration");
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -122,7 +93,7 @@ class CustomAPIDevice {
|
||||
* subscribe_homeassistant_state(&CustomNativeAPI::on_state_changed, "climate.kitchen", "current_temperature");
|
||||
* }
|
||||
*
|
||||
* void on_state_changed(const std::string &state) {
|
||||
* void on_state_changed(std::string state) {
|
||||
* // State of sensor.weather_forecast is `state`
|
||||
* }
|
||||
* ```
|
||||
@@ -133,7 +104,7 @@ class CustomAPIDevice {
|
||||
* @param attribute The entity state attribute to track.
|
||||
*/
|
||||
template<typename T>
|
||||
void subscribe_homeassistant_state(void (T::*callback)(const std::string &), const std::string &entity_id,
|
||||
void subscribe_homeassistant_state(void (T::*callback)(std::string), const std::string &entity_id,
|
||||
const std::string &attribute = "") {
|
||||
auto f = std::bind(callback, (T *) this, std::placeholders::_1);
|
||||
global_api_server->subscribe_home_assistant_state(entity_id, optional<std::string>(attribute), f);
|
||||
@@ -148,7 +119,7 @@ class CustomAPIDevice {
|
||||
* subscribe_homeassistant_state(&CustomNativeAPI::on_state_changed, "sensor.weather_forecast");
|
||||
* }
|
||||
*
|
||||
* void on_state_changed(const std::string &entity_id, const std::string &state) {
|
||||
* void on_state_changed(std::string entity_id, std::string state) {
|
||||
* // State of `entity_id` is `state`
|
||||
* }
|
||||
* ```
|
||||
@@ -159,27 +130,11 @@ class CustomAPIDevice {
|
||||
* @param attribute The entity state attribute to track.
|
||||
*/
|
||||
template<typename T>
|
||||
void subscribe_homeassistant_state(void (T::*callback)(const std::string &, const std::string &),
|
||||
const std::string &entity_id, const std::string &attribute = "") {
|
||||
void subscribe_homeassistant_state(void (T::*callback)(std::string, std::string), const std::string &entity_id,
|
||||
const std::string &attribute = "") {
|
||||
auto f = std::bind(callback, (T *) this, entity_id, std::placeholders::_1);
|
||||
global_api_server->subscribe_home_assistant_state(entity_id, optional<std::string>(attribute), f);
|
||||
}
|
||||
#else
|
||||
template<typename T>
|
||||
void subscribe_homeassistant_state(void (T::*callback)(const std::string &), const std::string &entity_id,
|
||||
const std::string &attribute = "") {
|
||||
static_assert(sizeof(T) == 0,
|
||||
"subscribe_homeassistant_state() requires 'homeassistant_states: true' in the 'api:' section "
|
||||
"of your YAML configuration");
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void subscribe_homeassistant_state(void (T::*callback)(const std::string &, const std::string &),
|
||||
const std::string &entity_id, const std::string &attribute = "") {
|
||||
static_assert(sizeof(T) == 0,
|
||||
"subscribe_homeassistant_state() requires 'homeassistant_states: true' in the 'api:' section "
|
||||
"of your YAML configuration");
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_SERVICES
|
||||
@@ -194,9 +149,9 @@ class CustomAPIDevice {
|
||||
* @param service_name The service to call.
|
||||
*/
|
||||
void call_homeassistant_service(const std::string &service_name) {
|
||||
HomeassistantActionRequest resp;
|
||||
HomeassistantServiceResponse resp;
|
||||
resp.set_service(StringRef(service_name));
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
}
|
||||
|
||||
/** Call a Home Assistant service from ESPHome.
|
||||
@@ -214,15 +169,15 @@ class CustomAPIDevice {
|
||||
* @param data The data for the service call, mapping from string to string.
|
||||
*/
|
||||
void call_homeassistant_service(const std::string &service_name, const std::map<std::string, std::string> &data) {
|
||||
HomeassistantActionRequest resp;
|
||||
HomeassistantServiceResponse resp;
|
||||
resp.set_service(StringRef(service_name));
|
||||
resp.data.init(data.size());
|
||||
for (auto &it : data) {
|
||||
auto &kv = resp.data.emplace_back();
|
||||
resp.data.emplace_back();
|
||||
auto &kv = resp.data.back();
|
||||
kv.set_key(StringRef(it.first));
|
||||
kv.value = it.second;
|
||||
}
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
}
|
||||
|
||||
/** Fire an ESPHome event in Home Assistant.
|
||||
@@ -236,10 +191,10 @@ class CustomAPIDevice {
|
||||
* @param event_name The event to fire.
|
||||
*/
|
||||
void fire_homeassistant_event(const std::string &event_name) {
|
||||
HomeassistantActionRequest resp;
|
||||
HomeassistantServiceResponse resp;
|
||||
resp.set_service(StringRef(event_name));
|
||||
resp.is_event = true;
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
}
|
||||
|
||||
/** Fire an ESPHome event in Home Assistant.
|
||||
@@ -256,38 +211,16 @@ class CustomAPIDevice {
|
||||
* @param data The data for the event, mapping from string to string.
|
||||
*/
|
||||
void fire_homeassistant_event(const std::string &service_name, const std::map<std::string, std::string> &data) {
|
||||
HomeassistantActionRequest resp;
|
||||
HomeassistantServiceResponse resp;
|
||||
resp.set_service(StringRef(service_name));
|
||||
resp.is_event = true;
|
||||
resp.data.init(data.size());
|
||||
for (auto &it : data) {
|
||||
auto &kv = resp.data.emplace_back();
|
||||
resp.data.emplace_back();
|
||||
auto &kv = resp.data.back();
|
||||
kv.set_key(StringRef(it.first));
|
||||
kv.value = it.second;
|
||||
}
|
||||
global_api_server->send_homeassistant_action(resp);
|
||||
}
|
||||
#else
|
||||
template<typename T = void> void call_homeassistant_service(const std::string &service_name) {
|
||||
static_assert(sizeof(T) == 0, "call_homeassistant_service() requires 'homeassistant_services: true' in the 'api:' "
|
||||
"section of your YAML configuration");
|
||||
}
|
||||
|
||||
template<typename T = void>
|
||||
void call_homeassistant_service(const std::string &service_name, const std::map<std::string, std::string> &data) {
|
||||
static_assert(sizeof(T) == 0, "call_homeassistant_service() requires 'homeassistant_services: true' in the 'api:' "
|
||||
"section of your YAML configuration");
|
||||
}
|
||||
|
||||
template<typename T = void> void fire_homeassistant_event(const std::string &event_name) {
|
||||
static_assert(sizeof(T) == 0, "fire_homeassistant_event() requires 'homeassistant_services: true' in the 'api:' "
|
||||
"section of your YAML configuration");
|
||||
}
|
||||
|
||||
template<typename T = void>
|
||||
void fire_homeassistant_event(const std::string &service_name, const std::map<std::string, std::string> &data) {
|
||||
static_assert(sizeof(T) == 0, "fire_homeassistant_event() requires 'homeassistant_services: true' in the 'api:' "
|
||||
"section of your YAML configuration");
|
||||
global_api_server->send_homeassistant_service_call(resp);
|
||||
}
|
||||
#endif
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user