Compare commits

..

8 Commits

Author SHA1 Message Date
J. Nick Koston
6ca1b90752 Address Copilot review feedback
- Fix regex to actually match std::to_string() by using alternation
  (the : in the lookbehind was preventing matches)
- Update error message to mention both std::to_string() and
  unqualified to_string() forms
- Correct buffer sizes for signed integer types:
  - int8_t: 5 chars (not 4) for "-128\0"
  - int16_t: 7 chars (not 6) for "-32768\0"
  - int32_t: 12 chars (not 11) for "-2147483648\0"
2026-01-28 18:14:16 -10:00
J. Nick Koston
fe1aa7e9ba Merge branch 'dev' into no_new_to_string 2026-01-28 17:42:57 -10:00
J. Nick Koston
a5f60750c2 [tx20] Eliminate heap allocations in wind sensor (#13298) 2026-01-29 16:07:41 +13:00
Clyde Stubbs
a382383d83 [workflows] Add deprecation check (#13584) 2026-01-29 12:08:45 +13:00
J. Nick Koston
8d51e2f580 Merge remote-tracking branch 'upstream/dev' into no_new_to_string
# Conflicts:
#	script/ci-custom.py
2026-01-21 19:50:39 -10:00
J. Nick Koston
11fb46ad11 Apply suggestions from code review 2026-01-19 17:44:25 -10:00
J. Nick Koston
9245c691d0 Merge branch 'dev' into no_new_to_string 2026-01-19 17:43:46 -10:00
J. Nick Koston
971a1a3e00 [ci] Block new std::to_string() usage, suggest snprintf alternatives 2026-01-19 08:49:31 -10:00
12 changed files with 202 additions and 40 deletions

View File

@@ -3,6 +3,7 @@ module.exports = {
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
TOO_BIG_MARKER: '<!-- too-big-request -->',
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
MANAGED_LABELS: [
'new-component',
@@ -27,6 +28,7 @@ module.exports = {
'breaking-change',
'developer-breaking-change',
'code-quality',
'deprecated-component'
],
DOCS_PR_PATTERNS: [

View File

@@ -251,6 +251,76 @@ async function detectPRTemplateCheckboxes(context) {
return labels;
}
// Strategy: Deprecated component detection
async function detectDeprecatedComponents(github, context, changedFiles) {
const labels = new Set();
const deprecatedInfo = [];
const { owner, repo } = context.repo;
// Compile regex once for better performance
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
// Get files that are modified or added in components directory
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
if (componentFiles.length === 0) {
return { labels, deprecatedInfo };
}
// Extract unique component names using the same regex
const components = new Set();
for (const file of componentFiles) {
const match = file.match(componentFileRegex);
if (match) {
components.add(match[1]);
}
}
// Get PR head to fetch files from the PR branch
const prNumber = context.payload.pull_request.number;
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
for (const component of components) {
const initFile = `esphome/components/${component}/__init__.py`;
try {
// Fetch file content from PR head using GitHub API
const { data: fileData } = await github.rest.repos.getContent({
owner,
repo,
path: initFile,
ref: `refs/pull/${prNumber}/head`
});
// Decode base64 content
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
// Support single quotes, double quotes, and triple quotes (for multiline)
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
if (deprecatedMatch) {
labels.add('deprecated-component');
deprecatedInfo.push({
component: component,
message: deprecatedMatch[1].trim()
});
console.log(`Found deprecated component: ${component}`);
}
} catch (error) {
// Only log if it's not a simple "file not found" error (404)
if (error.status !== 404) {
console.log(`Error reading ${initFile}:`, error.message);
}
}
}
return { labels, deprecatedInfo };
}
// Strategy: Requirements detection
async function detectRequirements(allLabels, prFiles, context) {
const labels = new Set();
@@ -298,5 +368,6 @@ module.exports = {
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
};

View File

@@ -11,6 +11,7 @@ const {
detectCodeOwner,
detectTests,
detectPRTemplateCheckboxes,
detectDeprecatedComponents,
detectRequirements
} = require('./detectors');
const { handleReviews } = require('./reviews');
@@ -112,6 +113,7 @@ module.exports = async ({ github, context }) => {
codeOwnerLabels,
testLabels,
checkboxLabels,
deprecatedResult
] = await Promise.all([
detectMergeBranch(context),
detectComponentPlatforms(changedFiles, apiData),
@@ -124,8 +126,13 @@ module.exports = async ({ github, context }) => {
detectCodeOwner(github, context, changedFiles),
detectTests(changedFiles),
detectPRTemplateCheckboxes(context),
detectDeprecatedComponents(github, context, changedFiles)
]);
// Extract deprecated component info
const deprecatedLabels = deprecatedResult.labels;
const deprecatedInfo = deprecatedResult.deprecatedInfo;
// Combine all labels
const allLabels = new Set([
...branchLabels,
@@ -139,6 +146,7 @@ module.exports = async ({ github, context }) => {
...codeOwnerLabels,
...testLabels,
...checkboxLabels,
...deprecatedLabels
]);
// Detect requirements based on all other labels
@@ -169,7 +177,7 @@ module.exports = async ({ github, context }) => {
console.log('Computed labels:', finalLabels.join(', '));
// Handle reviews
await handleReviews(github, context, finalLabels, originalLabelCount, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
// Apply labels
await applyLabels(github, context, finalLabels);

View File

@@ -2,12 +2,29 @@ const {
BOT_COMMENT_MARKER,
CODEOWNERS_MARKER,
TOO_BIG_MARKER,
DEPRECATED_COMPONENT_MARKER
} = require('./constants');
// Generate review messages
function generateReviewMessages(finalLabels, originalLabelCount, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
const messages = [];
// Deprecated component message
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
message += `Hey there @${prAuthor},\n`;
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
for (const info of deprecatedInfo) {
message += `#### Component: \`${info.component}\`\n`;
message += `${info.message}\n\n`;
}
message += `Consider migrating to the recommended alternative if applicable.`;
messages.push(message);
}
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
@@ -54,14 +71,14 @@ function generateReviewMessages(finalLabels, originalLabelCount, prFiles, totalA
}
// Handle reviews
async function handleReviews(github, context, finalLabels, originalLabelCount, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
const { owner, repo } = context.repo;
const pr_number = context.issue.number;
const prAuthor = context.payload.pull_request.user.login;
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners'].includes(label)
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({

View File

@@ -25,7 +25,9 @@ template<typename... X> class TemplatableStringValue : public TemplatableValue<s
private:
// Helper to convert value to string - handles the case where value is already a string
template<typename T> static std::string value_to_string(T &&val) { return to_string(std::forward<T>(val)); }
template<typename T> static std::string value_to_string(T &&val) {
return to_string(std::forward<T>(val)); // NOLINT
}
// Overloads for string types - needed because std::to_string doesn't support them
static std::string value_to_string(char *val) {

View File

@@ -48,7 +48,7 @@ class ESPBTUUID {
// Remove before 2026.8.0
ESPDEPRECATED("Use to_str() instead. Removed in 2026.8.0", "2026.2.0")
std::string to_string() const;
std::string to_string() const; // NOLINT
const char *to_str(std::span<char, UUID_STR_LEN> output) const;
protected:

View File

@@ -90,14 +90,16 @@ void HttpRequestUpdate::update_task(void *params) {
UPDATE_RETURN;
}
size_t read_index = container->get_bytes_read();
size_t content_length = container->content_length;
container->end();
container.reset(); // Release ownership of the container's shared_ptr
bool valid = false;
{ // Scope to ensure JsonDocument is destroyed before deallocating buffer
valid = json::parse_json(data, read_index, [this_update](JsonObject root) -> bool {
{ // Ensures the response string falls out of scope and deallocates before the task ends
std::string response((char *) data, read_index);
allocator.deallocate(data, container->content_length);
container->end();
container.reset(); // Release ownership of the container's shared_ptr
valid = json::parse_json(response, [this_update](JsonObject root) -> bool {
if (!root[ESPHOME_F("name")].is<const char *>() || !root[ESPHOME_F("version")].is<const char *>() ||
!root[ESPHOME_F("builds")].is<JsonArray>()) {
ESP_LOGE(TAG, "Manifest does not contain required fields");
@@ -135,7 +137,6 @@ void HttpRequestUpdate::update_task(void *params) {
return false;
});
}
allocator.deallocate(data, content_length);
if (!valid) {
ESP_LOGE(TAG, "Failed to parse JSON from %s", this_update->source_url_.c_str());
@@ -156,12 +157,17 @@ void HttpRequestUpdate::update_task(void *params) {
}
}
{ // Ensures the current version string falls out of scope and deallocates before the task ends
std::string current_version;
#ifdef ESPHOME_PROJECT_VERSION
this_update->update_info_.current_version = ESPHOME_PROJECT_VERSION;
current_version = ESPHOME_PROJECT_VERSION;
#else
this_update->update_info_.current_version = ESPHOME_VERSION;
current_version = ESPHOME_VERSION;
#endif
this_update->update_info_.current_version = current_version;
}
bool trigger_update_available = false;
if (this_update->update_info_.latest_version.empty() ||

View File

@@ -25,13 +25,8 @@ std::string build_json(const json_build_t &f) {
}
bool parse_json(const std::string &data, const json_parse_t &f) {
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
return parse_json(reinterpret_cast<const uint8_t *>(data.c_str()), data.size(), f);
}
bool parse_json(const uint8_t *data, size_t len, const json_parse_t &f) {
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
JsonDocument doc = parse_json(data, len);
JsonDocument doc = parse_json(reinterpret_cast<const uint8_t *>(data.c_str()), data.size());
if (doc.overflowed() || doc.isNull())
return false;
return f(doc.as<JsonObject>());

View File

@@ -50,8 +50,6 @@ std::string build_json(const json_build_t &f);
/// Parse a JSON string and run the provided json parse function if it's valid.
bool parse_json(const std::string &data, const json_parse_t &f);
/// Parse JSON from raw bytes and run the provided json parse function if it's valid.
bool parse_json(const uint8_t *data, size_t len, const json_parse_t &f);
/// Parse a JSON string and return the root JsonDocument (or an unbound object on error)
JsonDocument parse_json(const uint8_t *data, size_t len);

View File

@@ -2,7 +2,7 @@
#include "esphome/core/helpers.h"
#include "esphome/core/log.h"
#include <vector>
#include <array>
namespace esphome {
namespace tx20 {
@@ -45,25 +45,25 @@ std::string Tx20Component::get_wind_cardinal_direction() const { return this->wi
void Tx20Component::decode_and_publish_() {
ESP_LOGVV(TAG, "Decode Tx20");
std::string string_buffer;
std::string string_buffer_2;
std::vector<bool> bit_buffer;
std::array<bool, MAX_BUFFER_SIZE> bit_buffer{};
size_t bit_pos = 0;
bool current_bit = true;
// Cap at MAX_BUFFER_SIZE - 1 to prevent out-of-bounds access (buffer_index can exceed MAX_BUFFER_SIZE in ISR)
const int max_buffer_index =
std::min(static_cast<int>(this->store_.buffer_index), static_cast<int>(MAX_BUFFER_SIZE - 1));
for (int i = 1; i <= this->store_.buffer_index; i++) {
string_buffer_2 += to_string(this->store_.buffer[i]) + ", ";
for (int i = 1; i <= max_buffer_index; i++) {
uint8_t repeat = this->store_.buffer[i] / TX20_BIT_TIME;
// ignore segments at the end that were too short
string_buffer.append(repeat, current_bit ? '1' : '0');
bit_buffer.insert(bit_buffer.end(), repeat, current_bit);
for (uint8_t j = 0; j < repeat && bit_pos < MAX_BUFFER_SIZE; j++) {
bit_buffer[bit_pos++] = current_bit;
}
current_bit = !current_bit;
}
current_bit = !current_bit;
if (string_buffer.length() < MAX_BUFFER_SIZE) {
uint8_t remain = MAX_BUFFER_SIZE - string_buffer.length();
string_buffer_2 += to_string(remain) + ", ";
string_buffer.append(remain, current_bit ? '1' : '0');
bit_buffer.insert(bit_buffer.end(), remain, current_bit);
size_t bits_before_padding = bit_pos;
while (bit_pos < MAX_BUFFER_SIZE) {
bit_buffer[bit_pos++] = current_bit;
}
uint8_t tx20_sa = 0;
@@ -108,8 +108,24 @@ void Tx20Component::decode_and_publish_() {
// 2. Check received checksum matches calculated checksum
// 3. Check that Wind Direction matches Wind Direction (Inverted)
// 4. Check that Wind Speed matches Wind Speed (Inverted)
ESP_LOGVV(TAG, "BUFFER %s", string_buffer_2.c_str());
ESP_LOGVV(TAG, "Decoded bits %s", string_buffer.c_str());
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERY_VERBOSE
// Build debug strings from completed data
char debug_buf[320]; // buffer values: max 40 entries * 7 chars each
size_t debug_pos = 0;
for (int i = 1; i <= max_buffer_index; i++) {
debug_pos = buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%u, ", this->store_.buffer[i]);
}
if (bits_before_padding < MAX_BUFFER_SIZE) {
buf_append_printf(debug_buf, sizeof(debug_buf), debug_pos, "%zu, ", MAX_BUFFER_SIZE - bits_before_padding);
}
char bits_buf[MAX_BUFFER_SIZE + 1];
for (size_t i = 0; i < MAX_BUFFER_SIZE; i++) {
bits_buf[i] = bit_buffer[i] ? '1' : '0';
}
bits_buf[MAX_BUFFER_SIZE] = '\0';
ESP_LOGVV(TAG, "BUFFER %s", debug_buf);
ESP_LOGVV(TAG, "Decoded bits %s", bits_buf);
#endif
if (tx20_sa == 4) {
if (chk == tx20_sd) {

View File

@@ -83,7 +83,7 @@ struct Timer {
}
// Remove before 2026.8.0
ESPDEPRECATED("Use to_str() instead. Removed in 2026.8.0", "2026.2.0")
std::string to_string() const {
std::string to_string() const { // NOLINT
char buffer[TO_STR_BUFFER_SIZE];
return this->to_str(buffer);
}

View File

@@ -756,6 +756,53 @@ def lint_no_sprintf(fname, match):
)
@lint_re_check(
# Match std::to_string() or unqualified to_string() calls
# The esphome namespace has "using std::to_string;" so unqualified calls resolve to std::to_string
# Use negative lookbehind for unqualified calls to avoid matching:
# - Function definitions: "const char *to_string(" or "std::string to_string("
# - Method definitions: "Class::to_string("
# - Method calls: ".to_string(" or "->to_string("
# - Other identifiers: "_to_string("
# Also explicitly match std::to_string since : is in the lookbehind
r"(?:(?<![*&.\w>:])to_string|std\s*::\s*to_string)\s*\(" + CPP_RE_EOL,
include=cpp_include,
exclude=[
# Vendored library
"esphome/components/http_request/httplib.h",
# Deprecated helpers that return std::string
"esphome/core/helpers.cpp",
# The using declaration itself
"esphome/core/helpers.h",
# Test fixtures - not production embedded code
"tests/integration/fixtures/*",
],
)
def lint_no_std_to_string(fname, match):
return (
f"{highlight('std::to_string()')} (including unqualified {highlight('to_string()')}) "
f"allocates heap memory. On long-running embedded devices, repeated heap allocations "
f"fragment memory over time.\n"
f"Please use {highlight('snprintf()')} with a stack buffer instead.\n"
f"\n"
f"Buffer sizes and format specifiers (sizes include sign and null terminator):\n"
f" uint8_t: 4 chars - %u (or PRIu8)\n"
f" int8_t: 5 chars - %d (or PRId8)\n"
f" uint16_t: 6 chars - %u (or PRIu16)\n"
f" int16_t: 7 chars - %d (or PRId16)\n"
f" uint32_t: 11 chars - %" + "PRIu32\n"
" int32_t: 12 chars - %" + "PRId32\n"
" uint64_t: 21 chars - %" + "PRIu64\n"
" int64_t: 21 chars - %" + "PRId64\n"
f" float/double: 24 chars - %.8g (15 digits + sign + decimal + e+XXX)\n"
f" 317 chars - %f (for DBL_MAX: 309 int digits + decimal + 6 frac + sign)\n"
f"\n"
f"For sensor values, use value_accuracy_to_buf() from helpers.h.\n"
f'Example: char buf[11]; snprintf(buf, sizeof(buf), "%" PRIu32, value);\n'
f"(If strictly necessary, add `{highlight('// NOLINT')}` to the end of the line)"
)
@lint_content_find_check(
"ESP_LOG",
include=["*.h", "*.tcc"],