Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0dec7cfbf8 | ||
|
|
30e7797577 | ||
|
|
0e5cabadc1 | ||
|
|
58f4fa53d0 | ||
|
|
04dc848620 | ||
|
|
8203b8fcd3 | ||
|
|
0ad61f4a95 | ||
|
|
9fd4076ab8 | ||
|
|
a9e799cb06 | ||
|
|
3ec931ffa4 | ||
|
|
e3094d9689 | ||
|
|
3594779401 | ||
|
|
94978d0063 | ||
|
|
415e12309b | ||
|
|
aa5f887ff3 | ||
|
|
28561ea6a4 | ||
|
|
3b3ff4fea9 | ||
|
|
6b8125f5f2 | ||
|
|
ab43390983 | ||
|
|
611592170b | ||
|
|
8a58ff91c3 | ||
|
|
27b86d89b0 | ||
|
|
36da3b85d5 | ||
|
|
4e4ffc3a24 | ||
|
|
4dce7fa103 | ||
|
|
467ef9902f | ||
|
|
486174073b | ||
|
|
e9d9de448e | ||
|
|
08c16020c6 | ||
|
|
0ade9baf65 | ||
|
|
2fab7e73b9 | ||
|
|
af4e2bf61d | ||
|
|
74fefea5bb | ||
|
|
21c22fe04a |
179
.gitlab-ci.yml
179
.gitlab-ci.yml
@@ -61,44 +61,84 @@ test2:
|
||||
<<: *docker-builder
|
||||
stage: build
|
||||
script:
|
||||
- docker run --rm --privileged hassioaddons/qemu-user-static:latest
|
||||
- BUILD_FROM=homeassistant/${ADDON_ARCH}-base-ubuntu:latest
|
||||
- ADDON_VERSION="${CI_COMMIT_TAG#v}"
|
||||
- ADDON_VERSION="${ADDON_VERSION:-${CI_COMMIT_SHA:0:7}}"
|
||||
- ESPHOMELIB_VERSION="${ESPHOMELIB_VERSION:-dev}"
|
||||
- echo "Build from ${BUILD_FROM}"
|
||||
- echo "Add-on version ${ADDON_VERSION}"
|
||||
- echo "Esphomelib version ${ESPHOMELIB_VERSION}"
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev"
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- |
|
||||
hassio-builder.sh \
|
||||
-t . \
|
||||
-i ottowinter/esphomeyaml-hassio-${ADDON_ARCH} \
|
||||
-d "$CI_REGISTRY" \
|
||||
--${ADDON_ARCH}
|
||||
docker build \
|
||||
--build-arg "BUILD_FROM=${BUILD_FROM}" \
|
||||
--build-arg "ADDON_ARCH=${ADDON_ARCH}" \
|
||||
--build-arg "ADDON_VERSION=${ADDON_VERSION}" \
|
||||
--build-arg "ESPHOMELIB_VERSION=${ESPHOMELIB_VERSION}" \
|
||||
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev" \
|
||||
--tag "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
--file "docker/Dockerfile.hassio" \
|
||||
.
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:dev" \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:dev"
|
||||
retry: 2
|
||||
if [ "${DO_PUSH:-true}" = true ]; then
|
||||
echo "Pushing to CI registry"
|
||||
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}
|
||||
docker push ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:dev
|
||||
fi
|
||||
|
||||
# Generic deploy template
|
||||
.deploy-release: &deploy-release
|
||||
<<: *docker-builder
|
||||
stage: deploy
|
||||
script:
|
||||
- version=${CI_COMMIT_TAG:1}
|
||||
- version="${CI_COMMIT_TAG#v}"
|
||||
- echo "Publishing release version ${version}"
|
||||
- docker pull "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||
- docker pull "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- |
|
||||
docker tag \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:latest"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
except:
|
||||
@@ -108,25 +148,38 @@ test2:
|
||||
<<: *docker-builder
|
||||
stage: deploy
|
||||
script:
|
||||
- version=${CI_COMMIT_TAG:1}
|
||||
- version="${CI_COMMIT_TAG#v}"
|
||||
- echo "Publishing beta version ${version}"
|
||||
- docker pull "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
- docker login -u "$DOCKER_USER" -p "$DOCKER_PASSWORD"
|
||||
- docker pull "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:beta"
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
|
||||
- echo "Tag ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- |
|
||||
docker tag \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:beta"
|
||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:beta"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:${version}"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:beta"
|
||||
"${CI_REGISTRY}/esphomeyaml-hassio-${ADDON_ARCH}:${CI_COMMIT_SHA}" \
|
||||
"ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
- docker push "ottowinter/esphomeyaml-hassio-${ADDON_ARCH}:rc"
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
except:
|
||||
@@ -137,33 +190,67 @@ build:normal:
|
||||
<<: *docker-builder
|
||||
stage: build
|
||||
script:
|
||||
- docker build -t "${CI_REGISTRY}/ottowinter/esphomeyaml:dev" .
|
||||
- |
|
||||
docker tag \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml:dev" \
|
||||
"${CI_REGISTRY}/ottowinter/esphomeyaml:${CI_COMMIT_SHA}"
|
||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml:${CI_COMMIT_SHA}"
|
||||
- docker push "${CI_REGISTRY}/ottowinter/esphomeyaml:dev"
|
||||
- docker build -t "${CI_REGISTRY}/esphomeyaml:dev" .
|
||||
|
||||
build:armhf:
|
||||
.build-hassio-edge: &build-hassio-edge
|
||||
<<: *build-hassio
|
||||
except:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
|
||||
.build-hassio-release: &build-hassio-release
|
||||
<<: *build-hassio
|
||||
only:
|
||||
- /^v\d+\.\d+\.\d+$/
|
||||
- /^v\d+\.\d+\.\d+b\d+$/
|
||||
|
||||
build:hassio-armhf-edge:
|
||||
<<: *build-hassio-edge
|
||||
variables:
|
||||
ADDON_ARCH: armhf
|
||||
DO_PUSH: "false"
|
||||
|
||||
#build:aarch64:
|
||||
# <<: *build
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
build:hassio-armhf:
|
||||
<<: *build-hassio-release
|
||||
variables:
|
||||
ADDON_ARCH: armhf
|
||||
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
build:i386:
|
||||
<<: *build-hassio
|
||||
#build:hassio-aarch64-edge:
|
||||
# <<: *build-hassio-edge
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
# DO_PUSH: "false"
|
||||
|
||||
#build:hassio-aarch64:
|
||||
# <<: *build-hassio-release
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
# ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
build:hassio-i386-edge:
|
||||
<<: *build-hassio-edge
|
||||
variables:
|
||||
ADDON_ARCH: i386
|
||||
DO_PUSH: "false"
|
||||
|
||||
build:amd64:
|
||||
<<: *build-hassio
|
||||
build:hassio-i386:
|
||||
<<: *build-hassio-release
|
||||
variables:
|
||||
ADDON_ARCH: i386
|
||||
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
build:hassio-amd64-edge:
|
||||
<<: *build-hassio-edge
|
||||
variables:
|
||||
ADDON_ARCH: amd64
|
||||
DO_PUSH: "false"
|
||||
|
||||
build:hassio-amd64:
|
||||
<<: *build-hassio-release
|
||||
variables:
|
||||
ADDON_ARCH: amd64
|
||||
ESPHOMELIB_VERSION: "${CI_COMMIT_TAG}"
|
||||
|
||||
# Deploy jobs
|
||||
deploy-release:armhf:
|
||||
@@ -180,7 +267,7 @@ deploy-beta:armhf:
|
||||
# <<: *deploy-release
|
||||
# variables:
|
||||
# ADDON_ARCH: aarch64
|
||||
|
||||
#
|
||||
#deploy-beta:aarch64:
|
||||
# <<: *deploy-beta
|
||||
# variables:
|
||||
|
||||
23
Dockerfile
23
Dockerfile
@@ -1,25 +1,28 @@
|
||||
FROM python:2.7
|
||||
ARG BUILD_FROM=python:2.7
|
||||
FROM ${BUILD_FROM}
|
||||
MAINTAINER Otto Winter <contact@otto-winter.com>
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python-pil \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No && \
|
||||
platformio settings set check_libraries_interval 1000000 && \
|
||||
platformio settings set check_platformio_interval 1000000 && \
|
||||
platformio settings set check_platforms_interval 1000000
|
||||
|
||||
ENV ESPHOMEYAML_OTA_HOST_PORT=6123
|
||||
EXPOSE 6123
|
||||
VOLUME /config
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
RUN pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
COPY docker/platformio.ini /usr/src/app/
|
||||
RUN platformio settings set enable_telemetry No && \
|
||||
platformio run -e espressif32 -e espressif8266; exit 0
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir -e . && \
|
||||
pip install --no-cache-dir tzlocal pillow
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
WORKDIR /config
|
||||
ENTRYPOINT ["esphomeyaml"]
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# Dockerfile for aarch64 version of HassIO add-on
|
||||
FROM arm64v8/ubuntu:bionic
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
@@ -1,21 +0,0 @@
|
||||
# Dockerfile for amd64 version of HassIO add-on
|
||||
FROM ubuntu:bionic
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
@@ -1,31 +0,0 @@
|
||||
# Dockerfile for armhf version of HassIO add-on
|
||||
FROM homeassistant/armhf-base:latest
|
||||
|
||||
RUN apk add --no-cache \
|
||||
python2 \
|
||||
python2-dev \
|
||||
py2-pip \
|
||||
git \
|
||||
gcc \
|
||||
openssh \
|
||||
libc6-compat \
|
||||
jpeg-dev \
|
||||
zlib-dev \
|
||||
freetype-dev \
|
||||
lcms2-dev \
|
||||
openjpeg-dev \
|
||||
tiff-dev \
|
||||
libc-dev \
|
||||
linux-headers \
|
||||
&& \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
COPY docker/platformio-esp8266.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir pillow tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
@@ -27,6 +27,4 @@ RUN apt-get update && apt-get install -y \
|
||||
binfmt-support \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY docker/hassio-builder.sh /usr/bin/
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
42
docker/Dockerfile.hassio
Normal file
42
docker/Dockerfile.hassio
Normal file
@@ -0,0 +1,42 @@
|
||||
# Dockerfile for HassIO add-on
|
||||
ARG BUILD_FROM=homeassistant/amd64-base-ubuntu:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No && \
|
||||
platformio settings set check_libraries_interval 1000000 && \
|
||||
platformio settings set check_platformio_interval 1000000 && \
|
||||
platformio settings set check_platforms_interval 1000000
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
ARG ESPHOMELIB_VERSION="dev"
|
||||
RUN platformio lib -g install "https://github.com/OttoWinter/esphomelib.git#${ESPHOMELIB_VERSION}"
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
|
||||
# Build arugments
|
||||
ARG ADDON_ARCH
|
||||
ARG ADDON_VERSION
|
||||
|
||||
# Labels
|
||||
LABEL \
|
||||
io.hass.name="esphomeyaml" \
|
||||
io.hass.description="esphomeyaml HassIO add-on for intelligently managing all your ESP8266/ESP32 devices." \
|
||||
io.hass.arch="${ADDON_ARCH}" \
|
||||
io.hass.type="addon" \
|
||||
io.hass.version="${ADDON_VERSION}" \
|
||||
io.hass.url="https://esphomelib.com/esphomeyaml/index.html" \
|
||||
maintainer="Otto Winter <contact@otto-winter.com>"
|
||||
@@ -1,21 +0,0 @@
|
||||
# Dockerfile for i386 version of HassIO add-on
|
||||
FROM i386/ubuntu:bionic
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/*rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
COPY docker/platformio.ini /pio/platformio.ini
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
COPY . .
|
||||
RUN pip install --no-cache-dir --no-binary :all: -e . && \
|
||||
pip install --no-cache-dir --no-binary :all: tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
@@ -1,318 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Based on Home Assistant's docker builder
|
||||
######################
|
||||
# Hass.io Build-env
|
||||
######################
|
||||
set -e
|
||||
|
||||
echo -- "$@"
|
||||
|
||||
#### Variable ####
|
||||
|
||||
DOCKER_TIMEOUT=20
|
||||
DOCKER_PID=-1
|
||||
DOCKER_HUB=""
|
||||
DOCKER_CACHE="true"
|
||||
DOCKER_LOCAL="false"
|
||||
TARGET=""
|
||||
IMAGE=""
|
||||
BUILD_LIST=()
|
||||
BUILD_TASKS=()
|
||||
|
||||
#### Misc functions ####
|
||||
|
||||
function print_help() {
|
||||
cat << EOF
|
||||
Hass.io build-env for ecosystem:
|
||||
docker run --rm homeassistant/{arch}-builder:latest [options]
|
||||
|
||||
Options:
|
||||
-h, --help
|
||||
Display this help and exit.
|
||||
|
||||
Repository / Data
|
||||
-t, --target <PATH_TO_BUILD>
|
||||
Set local folder or path inside repository for build.
|
||||
|
||||
Version/Image handling
|
||||
-i, --image <IMAGE_NAME>
|
||||
Overwrite image name of build / support {arch}
|
||||
|
||||
Architecture
|
||||
--armhf
|
||||
Build for arm.
|
||||
--amd64
|
||||
Build for intel/amd 64bit.
|
||||
--aarch64
|
||||
Build for arm 64bit.
|
||||
--i386
|
||||
Build for intel/amd 32bit.
|
||||
--all
|
||||
Build all architecture.
|
||||
|
||||
Build handling
|
||||
--no-cache
|
||||
Disable cache for the build (from latest).
|
||||
-d, --docker-hub <DOCKER_REPOSITORY>
|
||||
Set or overwrite the docker repository.
|
||||
|
||||
Use the host docker socket if mapped into container:
|
||||
/var/run/docker.sock
|
||||
|
||||
EOF
|
||||
|
||||
exit 1
|
||||
}
|
||||
|
||||
#### Docker functions ####
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
if [ -S "/var/run/docker.sock" ]; then
|
||||
echo "[INFO] Use host docker setup with '/var/run/docker.sock'"
|
||||
DOCKER_LOCAL="true"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "[INFO] Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "[INFO] Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le ${DOCKER_TIMEOUT} ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "[ERROR] Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "[INFO] Docker was initialized"
|
||||
}
|
||||
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
if [ "$DOCKER_LOCAL" == "true" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "[INFO] Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le ${DOCKER_TIMEOUT} ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "[ERROR] Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "[WARN] Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
function run_build() {
|
||||
local build_dir=$1
|
||||
local repository=$2
|
||||
local image=$3
|
||||
local version=$4
|
||||
local build_arch=$5
|
||||
local docker_cli=("${!6}")
|
||||
|
||||
local push_images=()
|
||||
|
||||
# Overwrites
|
||||
if [ ! -z "$DOCKER_HUB" ]; then repository="$DOCKER_HUB"; fi
|
||||
if [ ! -z "$IMAGE" ]; then image="$IMAGE"; fi
|
||||
|
||||
# Init Cache
|
||||
if [ "$DOCKER_CACHE" == "true" ]; then
|
||||
echo "[INFO] Init cache for $repository/$image:$version"
|
||||
if docker pull "$repository/$image:latest" > /dev/null 2>&1; then
|
||||
docker_cli+=("--cache-from" "$repository/$image:latest")
|
||||
else
|
||||
docker_cli+=("--no-cache")
|
||||
echo "[WARN] No cache image found. Cache is disabled for build"
|
||||
fi
|
||||
else
|
||||
docker_cli+=("--no-cache")
|
||||
fi
|
||||
|
||||
# Build image
|
||||
echo "[INFO] Run build for $repository/$image:$version"
|
||||
docker build --pull -t "$repository/$image:$version" \
|
||||
--label "io.hass.version=$version" \
|
||||
--label "io.hass.arch=$build_arch" \
|
||||
-f "$TARGET/docker/Dockerfile.$build_arch" \
|
||||
"${docker_cli[@]}" \
|
||||
"$build_dir"
|
||||
|
||||
echo "[INFO] Finish build for $repository/$image:$version"
|
||||
docker tag "$repository/$image:$version" "$repository/$image:dev"
|
||||
}
|
||||
|
||||
|
||||
#### HassIO functions ####
|
||||
|
||||
function build_addon() {
|
||||
local build_arch=$1
|
||||
|
||||
local docker_cli=()
|
||||
local image=""
|
||||
local repository=""
|
||||
local raw_image=""
|
||||
local name=""
|
||||
local description=""
|
||||
local url=""
|
||||
local args=""
|
||||
|
||||
# Read addon config.json
|
||||
name="$(jq --raw-output '.name // empty' "$TARGET/esphomeyaml/config.json" | sed "s/'//g")"
|
||||
description="$(jq --raw-output '.description // empty' "$TARGET/esphomeyaml/config.json" | sed "s/'//g")"
|
||||
url="$(jq --raw-output '.url // empty' "$TARGET/esphomeyaml/config.json")"
|
||||
version="$(jq --raw-output '.version' "$TARGET/esphomeyaml/config.json")"
|
||||
raw_image="$(jq --raw-output '.image // empty' "$TARGET/esphomeyaml/config.json")"
|
||||
|
||||
# Read data from image
|
||||
if [ ! -z "$raw_image" ]; then
|
||||
repository="$(echo "$raw_image" | cut -f 1 -d '/')"
|
||||
image="$(echo "$raw_image" | cut -f 2 -d '/')"
|
||||
fi
|
||||
|
||||
# Set additional labels
|
||||
docker_cli+=("--label" "io.hass.name=$name")
|
||||
docker_cli+=("--label" "io.hass.description=$description")
|
||||
docker_cli+=("--label" "io.hass.type=addon")
|
||||
|
||||
if [ ! -z "$url" ]; then
|
||||
docker_cli+=("--label" "io.hass.url=$url")
|
||||
fi
|
||||
|
||||
# Start build
|
||||
run_build "$TARGET" "$repository" "$image" "$version" \
|
||||
"$build_arch" docker_cli[@]
|
||||
}
|
||||
|
||||
#### initialized cross-build ####
|
||||
|
||||
function init_crosscompile() {
|
||||
echo "[INFO] Setup crosscompiling feature"
|
||||
(
|
||||
mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
update-binfmts --enable qemu-arm
|
||||
update-binfmts --enable qemu-aarch64
|
||||
) > /dev/null 2>&1 || echo "[WARN] Can't enable crosscompiling feature"
|
||||
}
|
||||
|
||||
|
||||
function clean_crosscompile() {
|
||||
echo "[INFO] Clean crosscompiling feature"
|
||||
if [ -f /proc/sys/fs/binfmt_misc ]; then
|
||||
umount /proc/sys/fs/binfmt_misc || true
|
||||
fi
|
||||
|
||||
(
|
||||
update-binfmts --disable qemu-arm
|
||||
update-binfmts --disable qemu-aarch64
|
||||
) > /dev/null 2>&1 || echo "[WARN] No crosscompiling feature found for cleanup"
|
||||
}
|
||||
|
||||
#### Error handling ####
|
||||
|
||||
function error_handling() {
|
||||
stop_docker
|
||||
clean_crosscompile
|
||||
|
||||
exit 1
|
||||
}
|
||||
trap 'error_handling' SIGINT SIGTERM
|
||||
|
||||
#### Parse arguments ####
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
key=$1
|
||||
case ${key} in
|
||||
-h|--help)
|
||||
print_help
|
||||
;;
|
||||
-t|--target)
|
||||
TARGET=$2
|
||||
shift
|
||||
;;
|
||||
-i|--image)
|
||||
IMAGE=$2
|
||||
shift
|
||||
;;
|
||||
--no-cache)
|
||||
DOCKER_CACHE="false"
|
||||
;;
|
||||
-d|--docker-hub)
|
||||
DOCKER_HUB=$2
|
||||
shift
|
||||
;;
|
||||
--armhf)
|
||||
BUILD_LIST+=("armhf")
|
||||
;;
|
||||
--amd64)
|
||||
BUILD_LIST+=("amd64")
|
||||
;;
|
||||
--i386)
|
||||
BUILD_LIST+=("i386")
|
||||
;;
|
||||
--aarch64)
|
||||
BUILD_LIST+=("aarch64")
|
||||
;;
|
||||
--all)
|
||||
BUILD_LIST=("armhf" "amd64" "i386" "aarch64")
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "[WARN] $0 : Argument '$1' unknown will be Ignoring"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Check if an architecture is available
|
||||
if [ "${#BUILD_LIST[@]}" -eq 0 ]; then
|
||||
echo "[ERROR] You need select an architecture for build!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
#### Main ####
|
||||
|
||||
mkdir -p /data
|
||||
|
||||
# Setup docker env
|
||||
init_crosscompile
|
||||
start_docker
|
||||
|
||||
# Select arch build
|
||||
for arch in "${BUILD_LIST[@]}"; do
|
||||
(build_addon "$arch") &
|
||||
BUILD_TASKS+=($!)
|
||||
done
|
||||
|
||||
# Wait until all build jobs are done
|
||||
wait "${BUILD_TASKS[@]}"
|
||||
|
||||
# Cleanup docker env
|
||||
clean_crosscompile
|
||||
stop_docker
|
||||
|
||||
exit 0
|
||||
@@ -1,7 +0,0 @@
|
||||
; This file allows the docker build file to install the required platformio
|
||||
; platforms
|
||||
|
||||
[env:espressif8266]
|
||||
platform = espressif8266
|
||||
board = nodemcuv2
|
||||
framework = arduino
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"name": "esphomeyaml-beta",
|
||||
"version": "1.9.0b1",
|
||||
"version": "1.9.0b3",
|
||||
"slug": "esphomeyaml-beta",
|
||||
"description": "Beta version of esphomeyaml HassIO add-on.",
|
||||
"url": "https://esphomelib.com/esphomeyaml/index.html",
|
||||
"url": "https://beta.esphomelib.com/esphomeyaml/index.html",
|
||||
"startup": "application",
|
||||
"webui": "http://[HOST]:[PORT:6052]",
|
||||
"boot": "auto",
|
||||
|
||||
@@ -1,59 +1,24 @@
|
||||
# Dockerfile for HassIO add-on
|
||||
ARG BUILD_FROM=ubuntu:bionic
|
||||
# Dockerfile for HassIO edge add-on
|
||||
ARG BUILD_FROM=homeassistant/amd64-base-ubuntu:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
# Re-declare BUILD_FROM to fix weird docker issue
|
||||
ARG BUILD_FROM
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python \
|
||||
python-pip \
|
||||
python-setuptools \
|
||||
python-pil \
|
||||
git \
|
||||
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* && \
|
||||
pip install --no-cache-dir --no-binary :all: platformio && \
|
||||
platformio settings set enable_telemetry No && \
|
||||
platformio settings set check_libraries_interval 1000000 && \
|
||||
platformio settings set check_platformio_interval 1000000 && \
|
||||
platformio settings set check_platforms_interval 1000000
|
||||
|
||||
# On amd64 and alike, using ubuntu as the base is better as building
|
||||
# for the ESP32 only works with glibc (and ubuntu). However, on armhf
|
||||
# the build toolchain frequently procudes segfaults under ubuntu.
|
||||
# -> Use ubuntu for most architectures, except alpine for armhf
|
||||
#
|
||||
# * python and related required because this is a python project
|
||||
# * git required for platformio library dependencies downloads
|
||||
# * libc6-compat and openssh required on alpine for weird reasons
|
||||
# * disable platformio telemetry on install
|
||||
RUN /bin/bash -c "if [[ '$BUILD_FROM' = *\"ubuntu\"* ]]; then \
|
||||
apt-get update && apt-get install -y --no-install-recommends \
|
||||
python python-pip python-setuptools python-pil git && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/*; \
|
||||
else \
|
||||
apk add --no-cache \
|
||||
python2 \
|
||||
python2-dev \
|
||||
py2-pip \
|
||||
git \
|
||||
gcc \
|
||||
openssh \
|
||||
libc6-compat \
|
||||
jpeg-dev \
|
||||
zlib-dev \
|
||||
freetype-dev \
|
||||
lcms2-dev \
|
||||
openjpeg-dev \
|
||||
tiff-dev \
|
||||
libc-dev \
|
||||
linux-headers; \
|
||||
fi" && \
|
||||
pip install --no-cache-dir platformio && \
|
||||
platformio settings set enable_telemetry No
|
||||
|
||||
|
||||
# Create fake project to make platformio install all depdencies.
|
||||
# * Ignore build errors from platformio - empty project
|
||||
# * On alpine, only install ESP8266 toolchain
|
||||
COPY platformio.ini /pio/platformio.ini
|
||||
RUN /bin/bash -c "if [[ '$BUILD_FROM' = *\"ubuntu\"* ]]; then \
|
||||
platformio run -e espressif32 -e espressif8266 -d /pio; exit 0; \
|
||||
else \
|
||||
echo \"\$(head -8 /pio/platformio.ini)\" >/pio/platformio.ini; \
|
||||
platformio run -e espressif8266 -d /pio; exit 0; \
|
||||
fi"
|
||||
RUN platformio run -d /pio; rm -rf /pio
|
||||
|
||||
# Install latest esphomeyaml from git
|
||||
RUN pip install --no-cache-dir \
|
||||
git+git://github.com/OttoWinter/esphomeyaml.git && \
|
||||
RUN pip install --no-cache-dir git+https://github.com/OttoWinter/esphomeyaml.git@dev#egg=esphomeyaml && \
|
||||
pip install --no-cache-dir pillow tzlocal
|
||||
|
||||
CMD ["esphomeyaml", "/config/esphomeyaml", "dashboard"]
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"squash": false,
|
||||
"build_from": {
|
||||
"aarch64": "arm64v8/ubuntu:bionic",
|
||||
"amd64": "ubuntu:bionic",
|
||||
"armhf": "homeassistant/armhf-base:latest",
|
||||
"i386": "i386/ubuntu:bionic"
|
||||
"aarch64": "homeassistant/aarch64-base-ubuntu:latest",
|
||||
"amd64": "homeassistant/amd64-base-ubuntu:latest",
|
||||
"armhf": "homeassistant/armhf-base-ubuntu:latest",
|
||||
"i386": "homeassistant/i386-base-ubuntu:latest"
|
||||
},
|
||||
"args": {}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
from esphomeyaml import const, core, core_config, mqtt, wizard, writer, yaml_util
|
||||
from esphomeyaml import const, core, core_config, mqtt, wizard, writer, yaml_util, platformio_api
|
||||
from esphomeyaml.config import get_component, iter_components, read_config
|
||||
from esphomeyaml.const import CONF_BAUD_RATE, CONF_BUILD_PATH, CONF_DOMAIN, CONF_ESPHOMEYAML, \
|
||||
CONF_HOSTNAME, CONF_LOGGER, CONF_MANUAL_IP, CONF_NAME, CONF_STATIC_IP, CONF_USE_CUSTOM_CODE, \
|
||||
CONF_WIFI, ESP_PLATFORM_ESP8266
|
||||
from esphomeyaml.core import ESPHomeYAMLError
|
||||
from esphomeyaml.helpers import AssignmentExpression, Expression, RawStatement, \
|
||||
_EXPRESSIONS, add, \
|
||||
add_job, color, flush_tasks, indent, quote, statement, relative_path
|
||||
from esphomeyaml.util import safe_print
|
||||
_EXPRESSIONS, add, add_job, color, flush_tasks, indent, statement, relative_path
|
||||
from esphomeyaml.util import safe_print, run_external_command
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -62,34 +62,6 @@ def choose_serial_port(config):
|
||||
return result[opt][0]
|
||||
|
||||
|
||||
def run_platformio(*cmd, **kwargs):
|
||||
def mock_exit(return_code):
|
||||
raise SystemExit(return_code)
|
||||
|
||||
orig_argv = sys.argv
|
||||
orig_exit = sys.exit # mock sys.exit
|
||||
full_cmd = u' '.join(quote(x) for x in cmd)
|
||||
_LOGGER.info(u"Running: %s", full_cmd)
|
||||
try:
|
||||
func = kwargs.get('main')
|
||||
if func is None:
|
||||
import platformio.__main__
|
||||
func = platformio.__main__.main
|
||||
sys.argv = list(cmd)
|
||||
sys.exit = mock_exit
|
||||
return func() or 0
|
||||
except KeyboardInterrupt:
|
||||
return 1
|
||||
except SystemExit as err:
|
||||
return err.args[0]
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.error(u"Running platformio failed: %s", err)
|
||||
_LOGGER.error(u"Please try running %s locally.", full_cmd)
|
||||
finally:
|
||||
sys.argv = orig_argv
|
||||
sys.exit = orig_exit
|
||||
|
||||
|
||||
def run_miniterm(config, port, escape=False):
|
||||
import serial
|
||||
if CONF_LOGGER not in config:
|
||||
@@ -100,6 +72,7 @@ def run_miniterm(config, port, escape=False):
|
||||
_LOGGER.info("UART logging is disabled (baud_rate=0). Not starting UART logs.")
|
||||
_LOGGER.info("Starting log output from %s with baud rate %s", port, baud_rate)
|
||||
|
||||
backtrace_state = False
|
||||
with serial.Serial(port, baudrate=baud_rate) as ser:
|
||||
while True:
|
||||
try:
|
||||
@@ -114,6 +87,9 @@ def run_miniterm(config, port, escape=False):
|
||||
message = message.replace('\033', '\\033')
|
||||
safe_print(message)
|
||||
|
||||
backtrace_state = platformio_api.process_stacktrace(
|
||||
config, line, backtrace_state=backtrace_state)
|
||||
|
||||
|
||||
def write_cpp(config):
|
||||
_LOGGER.info("Generating C++ source...")
|
||||
@@ -154,11 +130,7 @@ def write_cpp(config):
|
||||
|
||||
def compile_program(args, config):
|
||||
_LOGGER.info("Compiling app...")
|
||||
build_path = relative_path(config[CONF_ESPHOMEYAML][CONF_BUILD_PATH])
|
||||
command = ['platformio', 'run', '-d', build_path]
|
||||
if args.verbose:
|
||||
command.append('-v')
|
||||
return run_platformio(*command)
|
||||
return platformio_api.run_compile(config, args.verbose)
|
||||
|
||||
|
||||
def get_upload_host(config):
|
||||
@@ -176,10 +148,10 @@ def upload_using_esptool(config, port):
|
||||
|
||||
build_path = relative_path(config[CONF_ESPHOMEYAML][CONF_BUILD_PATH])
|
||||
path = os.path.join(build_path, '.pioenvs', core.NAME, 'firmware.bin')
|
||||
cmd = ['esptool.py', '--before', 'default_reset', '--after', 'hard_reset',
|
||||
'--chip', 'esp8266', '--port', port, 'write_flash', '0x0', path]
|
||||
# pylint: disable=protected-access
|
||||
return run_platformio('esptool.py', '--before', 'default_reset', '--after', 'hard_reset',
|
||||
'--chip', 'esp8266', '--port', port, 'write_flash', '0x0',
|
||||
path, main=esptool._main)
|
||||
return run_external_command(esptool._main, *cmd)
|
||||
|
||||
|
||||
def upload_program(config, args, port):
|
||||
@@ -190,11 +162,7 @@ def upload_program(config, args, port):
|
||||
if port != 'OTA' and serial_port:
|
||||
if core.ESP_PLATFORM == ESP_PLATFORM_ESP8266 and args.use_esptoolpy:
|
||||
return upload_using_esptool(config, port)
|
||||
command = ['platformio', 'run', '-d', build_path,
|
||||
'-t', 'upload', '--upload-port', port]
|
||||
if args.verbose:
|
||||
command.append('-v')
|
||||
return run_platformio(*command)
|
||||
return platformio_api.run_upload(config, args.verbose, port)
|
||||
|
||||
if 'ota' not in config:
|
||||
_LOGGER.error("No serial port found and OTA not enabled. Can't upload!")
|
||||
@@ -243,7 +211,7 @@ def clean_mqtt(config, args):
|
||||
def setup_log(debug=False):
|
||||
log_level = logging.DEBUG if debug else logging.INFO
|
||||
logging.basicConfig(level=log_level)
|
||||
fmt = "%(levelname)s [%(name)s] %(message)s"
|
||||
fmt = "%(levelname)s %(message)s"
|
||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||
datefmt = '%H:%M:%S'
|
||||
|
||||
@@ -367,6 +335,28 @@ def command_clean(args, config):
|
||||
return 0
|
||||
|
||||
|
||||
def command_hass_config(args, config):
|
||||
from esphomeyaml.components import mqtt as mqtt_component
|
||||
|
||||
_LOGGER.info("This is what you should put in your Home Assistant YAML configuration.")
|
||||
_LOGGER.info("Please note this is only necessary if you're not using MQTT discovery.")
|
||||
data = mqtt_component.GenerateHassConfigData(config)
|
||||
hass_config = OrderedDict()
|
||||
for domain, component, conf in iter_components(config):
|
||||
if not hasattr(component, 'to_hass_config'):
|
||||
continue
|
||||
func = getattr(component, 'to_hass_config')
|
||||
ret = func(data, conf)
|
||||
if not isinstance(ret, (list, tuple)):
|
||||
ret = [ret]
|
||||
ret = [x for x in ret if x is not None]
|
||||
domain_conf = hass_config.setdefault(domain.split('.')[0], [])
|
||||
domain_conf += ret
|
||||
|
||||
safe_print(yaml_util.dump(hass_config))
|
||||
return 0
|
||||
|
||||
|
||||
def command_dashboard(args):
|
||||
from esphomeyaml.dashboard import dashboard
|
||||
|
||||
@@ -388,6 +378,7 @@ POST_CONFIG_ACTIONS = {
|
||||
'clean-mqtt': command_clean_mqtt,
|
||||
'mqtt-fingerprint': command_mqtt_fingerprint,
|
||||
'clean': command_clean,
|
||||
'hass-config': command_hass_config,
|
||||
}
|
||||
|
||||
|
||||
@@ -469,6 +460,9 @@ def parse_args(argv):
|
||||
dashboard.add_argument("--open-ui", help="Open the dashboard UI in a browser.",
|
||||
action='store_true')
|
||||
|
||||
subparsers.add_parser('hass-config', help="Dump the configuration entries that should be added"
|
||||
"to Home Assistant when not using MQTT discovery.")
|
||||
|
||||
return parser.parse_args(argv[1:])
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import copy
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import core
|
||||
@@ -28,8 +30,9 @@ def validate_recursive_condition(value):
|
||||
|
||||
|
||||
def validate_recursive_action(value):
|
||||
value = cv.ensure_list(value)
|
||||
value = cv.ensure_list(value)[:]
|
||||
for i, item in enumerate(value):
|
||||
item = copy.deepcopy(item)
|
||||
if not isinstance(item, dict):
|
||||
raise vol.Invalid(u"Action must consist of key-value mapping! Got {}".format(item))
|
||||
key = next((x for x in item if x != CONF_ACTION_ID), None)
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import automation, core
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml import automation
|
||||
from esphomeyaml.const import CONF_DEVICE_CLASS, CONF_ID, CONF_INTERNAL, CONF_INVERTED, \
|
||||
CONF_MAX_LENGTH, CONF_MIN_LENGTH, CONF_MQTT_ID, CONF_ON_CLICK, CONF_ON_DOUBLE_CLICK, \
|
||||
CONF_ON_PRESS, CONF_ON_RELEASE, CONF_TRIGGER_ID, CONF_FILTERS, CONF_INVERT, CONF_DELAYED_ON, \
|
||||
CONF_DELAYED_OFF, CONF_LAMBDA, CONF_HEARTBEAT
|
||||
from esphomeyaml.helpers import App, NoArg, Pvariable, add, add_job, esphomelib_ns, \
|
||||
setup_mqtt_component, bool_, process_lambda, ArrayInitializer
|
||||
from esphomeyaml.const import CONF_DELAYED_OFF, CONF_DELAYED_ON, CONF_DEVICE_CLASS, CONF_FILTERS, \
|
||||
CONF_HEARTBEAT, CONF_ID, CONF_INTERNAL, CONF_INVALID_COOLDOWN, CONF_INVERT, CONF_INVERTED, \
|
||||
CONF_LAMBDA, CONF_MAX_LENGTH, CONF_MIN_LENGTH, CONF_MQTT_ID, CONF_ON_CLICK, \
|
||||
CONF_ON_DOUBLE_CLICK, CONF_ON_MULTI_CLICK, CONF_ON_PRESS, CONF_ON_RELEASE, CONF_STATE, \
|
||||
CONF_TIMING, CONF_TRIGGER_ID
|
||||
from esphomeyaml.helpers import App, ArrayInitializer, NoArg, Pvariable, StructInitializer, add, \
|
||||
add_job, bool_, esphomelib_ns, process_lambda, setup_mqtt_component
|
||||
|
||||
DEVICE_CLASSES = [
|
||||
'', 'battery', 'cold', 'connectivity', 'door', 'garage_door', 'gas',
|
||||
@@ -25,6 +27,8 @@ PressTrigger = binary_sensor_ns.PressTrigger
|
||||
ReleaseTrigger = binary_sensor_ns.ReleaseTrigger
|
||||
ClickTrigger = binary_sensor_ns.ClickTrigger
|
||||
DoubleClickTrigger = binary_sensor_ns.DoubleClickTrigger
|
||||
MultiClickTrigger = binary_sensor_ns.MultiClickTrigger
|
||||
MultiClickTriggerEvent = binary_sensor_ns.MultiClickTriggerEvent
|
||||
BinarySensor = binary_sensor_ns.BinarySensor
|
||||
InvertFilter = binary_sensor_ns.InvertFilter
|
||||
LambdaFilter = binary_sensor_ns.LambdaFilter
|
||||
@@ -43,6 +47,99 @@ FILTERS_SCHEMA = vol.All(cv.ensure_list, [vol.All({
|
||||
vol.Optional(CONF_LAMBDA): cv.lambda_,
|
||||
}, cv.has_exactly_one_key(*FILTER_KEYS))])
|
||||
|
||||
MULTI_CLICK_TIMING_SCHEMA = vol.Schema({
|
||||
vol.Optional(CONF_STATE): cv.boolean,
|
||||
vol.Optional(CONF_MIN_LENGTH): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_MAX_LENGTH): cv.positive_time_period_milliseconds,
|
||||
})
|
||||
|
||||
|
||||
def parse_multi_click_timing_str(value):
|
||||
if not isinstance(value, basestring):
|
||||
return value
|
||||
|
||||
parts = value.lower().split(' ')
|
||||
if len(parts) != 5:
|
||||
raise vol.Invalid("Multi click timing grammar consists of exactly 5 words, not {}"
|
||||
"".format(len(parts)))
|
||||
try:
|
||||
state = cv.boolean(parts[0])
|
||||
except vol.Invalid:
|
||||
raise vol.Invalid(u"First word must either be ON or OFF, not {}".format(parts[0]))
|
||||
|
||||
if parts[1] != 'for':
|
||||
raise vol.Invalid(u"Second word must be 'for', got {}".format(parts[1]))
|
||||
|
||||
if parts[2] == 'at':
|
||||
if parts[3] == 'least':
|
||||
key = CONF_MIN_LENGTH
|
||||
elif parts[3] == 'most':
|
||||
key = CONF_MAX_LENGTH
|
||||
else:
|
||||
raise vol.Invalid(u"Third word after at must either be 'least' or 'most', got {}"
|
||||
u"".format(parts[3]))
|
||||
try:
|
||||
length = cv.positive_time_period_milliseconds(parts[4])
|
||||
except vol.Invalid as err:
|
||||
raise vol.Invalid(u"Multi Click Grammar Parsing length failed: {}".format(err))
|
||||
return {
|
||||
CONF_STATE: state,
|
||||
key: str(length)
|
||||
}
|
||||
|
||||
if parts[3] != 'to':
|
||||
raise vol.Invalid("Multi click grammar: 4th word must be 'to'")
|
||||
|
||||
try:
|
||||
min_length = cv.positive_time_period_milliseconds(parts[2])
|
||||
except vol.Invalid as err:
|
||||
raise vol.Invalid(u"Multi Click Grammar Parsing minimum length failed: {}".format(err))
|
||||
|
||||
try:
|
||||
max_length = cv.positive_time_period_milliseconds(parts[4])
|
||||
except vol.Invalid as err:
|
||||
raise vol.Invalid(u"Multi Click Grammar Parsing minimum length failed: {}".format(err))
|
||||
|
||||
return {
|
||||
CONF_STATE: state,
|
||||
CONF_MIN_LENGTH: str(min_length),
|
||||
CONF_MAX_LENGTH: str(max_length)
|
||||
}
|
||||
|
||||
|
||||
def validate_multi_click_timing(value):
|
||||
if not isinstance(value, list):
|
||||
raise vol.Invalid("Timing option must be a *list* of times!")
|
||||
timings = []
|
||||
state = None
|
||||
for i, v_ in enumerate(value):
|
||||
v_ = MULTI_CLICK_TIMING_SCHEMA(v_)
|
||||
min_length = v_.get(CONF_MIN_LENGTH)
|
||||
max_length = v_.get(CONF_MAX_LENGTH)
|
||||
if min_length is None and max_length is None:
|
||||
raise vol.Invalid("At least one of min_length and max_length is required!")
|
||||
if min_length is None and max_length is not None:
|
||||
min_length = core.TimePeriodMilliseconds(milliseconds=0)
|
||||
|
||||
new_state = v_.get(CONF_STATE, not state)
|
||||
if new_state == state:
|
||||
raise vol.Invalid("Timings must have alternating state. Indices {} and {} have "
|
||||
"the same state {}".format(i, i + 1, state))
|
||||
if max_length is not None and max_length < min_length:
|
||||
raise vol.Invalid("Max length ({}) must be larger than min length ({})."
|
||||
"".format(max_length, min_length))
|
||||
|
||||
state = new_state
|
||||
tim = {
|
||||
CONF_STATE: new_state,
|
||||
CONF_MIN_LENGTH: min_length,
|
||||
}
|
||||
if max_length is not None:
|
||||
tim[CONF_MAX_LENGTH] = max_length
|
||||
timings.append(tim)
|
||||
return timings
|
||||
|
||||
|
||||
BINARY_SENSOR_SCHEMA = cv.MQTT_COMPONENT_SCHEMA.extend({
|
||||
cv.GenerateID(CONF_MQTT_ID): cv.declare_variable_id(MQTTBinarySensorComponent),
|
||||
cv.GenerateID(): cv.declare_variable_id(BinarySensor),
|
||||
@@ -65,6 +162,12 @@ BINARY_SENSOR_SCHEMA = cv.MQTT_COMPONENT_SCHEMA.extend({
|
||||
vol.Optional(CONF_MIN_LENGTH, default='50ms'): cv.positive_time_period_milliseconds,
|
||||
vol.Optional(CONF_MAX_LENGTH, default='350ms'): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
vol.Optional(CONF_ON_MULTI_CLICK): automation.validate_automation({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(MultiClickTrigger),
|
||||
vol.Required(CONF_TIMING): vol.All([parse_multi_click_timing_str],
|
||||
validate_multi_click_timing),
|
||||
vol.Optional(CONF_INVALID_COOLDOWN): cv.positive_time_period_milliseconds,
|
||||
}),
|
||||
|
||||
vol.Optional(CONF_INVERTED): cv.invalid(
|
||||
"The inverted binary_sensor property has been replaced by the "
|
||||
@@ -136,6 +239,22 @@ def setup_binary_sensor_core_(binary_sensor_var, mqtt_var, config):
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
|
||||
for conf in config.get(CONF_ON_MULTI_CLICK, []):
|
||||
timings = []
|
||||
for tim in conf[CONF_TIMING]:
|
||||
timings.append(StructInitializer(
|
||||
MultiClickTriggerEvent,
|
||||
('state', tim[CONF_STATE]),
|
||||
('min_length', tim[CONF_MIN_LENGTH]),
|
||||
('max_length', tim.get(CONF_MAX_LENGTH, 4294967294)),
|
||||
))
|
||||
timings = ArrayInitializer(*timings, multiline=False)
|
||||
rhs = App.register_component(binary_sensor_var.make_multi_click_trigger(timings))
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
if CONF_INVALID_COOLDOWN in conf:
|
||||
add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN]))
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
|
||||
setup_mqtt_component(mqtt_var, config)
|
||||
|
||||
|
||||
@@ -154,4 +273,14 @@ def register_binary_sensor(var, config):
|
||||
add_job(setup_binary_sensor_core_, binary_sensor_var, mqtt_var, config)
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'binary_sensor', config,
|
||||
include_state=True, include_command=False)
|
||||
if ret is None:
|
||||
return None
|
||||
if CONF_DEVICE_CLASS in config:
|
||||
ret['device_class'] = config[CONF_DEVICE_CLASS]
|
||||
return ret
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BINARY_SENSOR'
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components import binary_sensor
|
||||
from esphomeyaml.components.esp32_ble_tracker import CONF_ESP32_BLE_ID, ESP32BLETracker, \
|
||||
make_address_array
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_MAC_ADDRESS, CONF_NAME
|
||||
from esphomeyaml.helpers import get_variable
|
||||
from esphomeyaml.helpers import esphomelib_ns, get_variable
|
||||
|
||||
DEPENDENCIES = ['esp32_ble_tracker']
|
||||
ESP32BLEPresenceDevice = esphomelib_ns.ESP32BLEPresenceDevice
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32BLEPresenceDevice),
|
||||
vol.Required(CONF_MAC_ADDRESS): cv.mac_address,
|
||||
cv.GenerateID(CONF_ESP32_BLE_ID): cv.use_variable_id(ESP32BLETracker)
|
||||
}))
|
||||
@@ -21,3 +23,7 @@ def to_code(config):
|
||||
yield
|
||||
rhs = hub.make_presence_sensor(config[CONF_NAME], make_address_array(config[CONF_MAC_ADDRESS]))
|
||||
binary_sensor.register_binary_sensor(rhs, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -34,7 +34,10 @@ def validate_touch_pad(value):
|
||||
return value
|
||||
|
||||
|
||||
ESP32TouchBinarySensor = binary_sensor.binary_sensor_ns.ESP32TouchBinarySensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32TouchBinarySensor),
|
||||
vol.Required(CONF_PIN): validate_touch_pad,
|
||||
vol.Required(CONF_THRESHOLD): cv.uint16_t,
|
||||
cv.GenerateID(CONF_ESP32_TOUCH_ID): cv.use_variable_id(ESP32TouchComponent),
|
||||
@@ -51,3 +54,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_ESP32_TOUCH_BINARY_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -7,8 +7,10 @@ from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_PIN
|
||||
from esphomeyaml.helpers import App, gpio_input_pin_expression, variable, Application
|
||||
|
||||
MakeGPIOBinarySensor = Application.MakeGPIOBinarySensor
|
||||
GPIOBinarySensorComponent = binary_sensor.binary_sensor_ns.GPIOBinarySensorComponent
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(GPIOBinarySensorComponent),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeGPIOBinarySensor),
|
||||
vol.Required(CONF_PIN): pins.gpio_input_pin_schema
|
||||
}))
|
||||
@@ -24,3 +26,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_GPIO_BINARY_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components import binary_sensor
|
||||
from esphomeyaml.components import binary_sensor, display
|
||||
from esphomeyaml.components.display.nextion import Nextion
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_COMPONENT_ID, CONF_NAME, CONF_PAGE_ID
|
||||
from esphomeyaml.helpers import get_variable
|
||||
|
||||
@@ -10,7 +10,10 @@ DEPENDENCIES = ['display']
|
||||
|
||||
CONF_NEXTION_ID = 'nextion_id'
|
||||
|
||||
NextionTouchComponent = display.display_ns.NextionTouchComponent
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(NextionTouchComponent),
|
||||
vol.Required(CONF_PAGE_ID): cv.uint8_t,
|
||||
vol.Required(CONF_COMPONENT_ID): cv.uint8_t,
|
||||
cv.GenerateID(CONF_NEXTION_ID): cv.use_variable_id(Nextion)
|
||||
@@ -24,3 +27,7 @@ def to_code(config):
|
||||
rhs = hub.make_touch_component(config[CONF_NAME], config[CONF_PAGE_ID],
|
||||
config[CONF_COMPONENT_ID])
|
||||
binary_sensor.register_binary_sensor(rhs, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -27,7 +27,10 @@ def validate_uid(value):
|
||||
return value
|
||||
|
||||
|
||||
PN532BinarySensor = binary_sensor.binary_sensor_ns.PN532BinarySensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(PN532BinarySensor),
|
||||
vol.Required(CONF_UID): validate_uid,
|
||||
cv.GenerateID(CONF_PN532_ID): cv.use_variable_id(PN532Component)
|
||||
}))
|
||||
@@ -40,3 +43,7 @@ def to_code(config):
|
||||
addr = [HexInt(int(x, 16)) for x in config[CONF_UID].split('-')]
|
||||
rhs = hub.make_tag(config[CONF_NAME], ArrayInitializer(*addr, multiline=False))
|
||||
binary_sensor.register_binary_sensor(rhs, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -9,7 +9,10 @@ DEPENDENCIES = ['rdm6300']
|
||||
|
||||
CONF_RDM6300_ID = 'rdm6300_id'
|
||||
|
||||
RDM6300BinarySensor = binary_sensor.binary_sensor_ns.RDM6300BinarySensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RDM6300BinarySensor),
|
||||
vol.Required(CONF_UID): cv.uint32_t,
|
||||
cv.GenerateID(CONF_RDM6300_ID): cv.use_variable_id(rdm6300.RDM6300Component)
|
||||
}))
|
||||
@@ -21,3 +24,7 @@ def to_code(config):
|
||||
yield
|
||||
rhs = hub.make_card(config[CONF_NAME], config[CONF_UID])
|
||||
binary_sensor.register_binary_sensor(rhs, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -36,6 +36,7 @@ RCSwitchTypeCReceiver = remote_ns.RCSwitchTypeCReceiver
|
||||
RCSwitchTypeDReceiver = remote_ns.RCSwitchTypeDReceiver
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RemoteReceiver),
|
||||
vol.Optional(CONF_LG): vol.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
vol.Optional(CONF_NBITS, default=28): vol.All(vol.Coerce(int), cv.one_of(28, 32)),
|
||||
@@ -119,3 +120,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_REMOTE_RECEIVER'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -6,9 +6,11 @@ from esphomeyaml.helpers import App, Application, variable
|
||||
DEPENDENCIES = ['mqtt']
|
||||
|
||||
MakeStatusBinarySensor = Application.MakeStatusBinarySensor
|
||||
StatusBinarySensor = binary_sensor.binary_sensor_ns.StatusBinarySensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeStatusBinarySensor),
|
||||
cv.GenerateID(): cv.declare_variable_id(StatusBinarySensor),
|
||||
}))
|
||||
|
||||
|
||||
@@ -19,3 +21,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_STATUS_BINARY_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components import binary_sensor
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_LAMBDA, CONF_MAKE_ID, CONF_NAME
|
||||
from esphomeyaml.helpers import App, Application, process_lambda, variable, optional, bool_, add
|
||||
from esphomeyaml.helpers import App, Application, add, bool_, optional, process_lambda, variable
|
||||
|
||||
MakeTemplateBinarySensor = Application.MakeTemplateBinarySensor
|
||||
TemplateBinarySensor = binary_sensor.binary_sensor_ns.TemplateBinarySensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(binary_sensor.BINARY_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(TemplateBinarySensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeTemplateBinarySensor),
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
}))
|
||||
@@ -26,3 +28,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_BINARY_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return binary_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ID, CONF_MQTT_ID, CONF_INTERNAL
|
||||
from esphomeyaml.helpers import Pvariable, esphomelib_ns, setup_mqtt_component, add, \
|
||||
@@ -88,3 +89,10 @@ def cover_stop_to_code(config, action_id, arg_type):
|
||||
rhs = var.make_stop_action(template_arg)
|
||||
type = StopAction.template(arg_type)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'cover', config, include_state=True, include_command=True)
|
||||
if ret is None:
|
||||
return None
|
||||
return ret
|
||||
|
||||
@@ -45,3 +45,12 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_COVER'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = cover.core_to_hass_config(data, config)
|
||||
if ret is None:
|
||||
return None
|
||||
if CONF_OPTIMISTIC in config:
|
||||
ret['optimistic'] = config[CONF_OPTIMISTIC]
|
||||
return ret
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ID, CONF_MQTT_ID, CONF_OSCILLATION_COMMAND_TOPIC, \
|
||||
CONF_OSCILLATION_STATE_TOPIC, CONF_SPEED_COMMAND_TOPIC, CONF_SPEED_STATE_TOPIC, CONF_INTERNAL, \
|
||||
CONF_SPEED, CONF_OSCILLATING
|
||||
CONF_SPEED, CONF_OSCILLATING, CONF_OSCILLATION_OUTPUT, CONF_NAME
|
||||
from esphomeyaml.helpers import Application, Pvariable, add, esphomelib_ns, setup_mqtt_component, \
|
||||
TemplateArguments, get_variable, templatable, bool_
|
||||
|
||||
@@ -128,3 +129,15 @@ def fan_turn_on_to_code(config, action_id, arg_type):
|
||||
yield None
|
||||
add(action.set_speed(template_))
|
||||
yield action
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'fan', config, include_state=True, include_command=True)
|
||||
if ret is None:
|
||||
return None
|
||||
if CONF_OSCILLATION_OUTPUT in config:
|
||||
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'oscillation/state')
|
||||
ret['oscillation_state_topic'] = config.get(CONF_OSCILLATION_STATE_TOPIC, default)
|
||||
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'oscillation/command')
|
||||
ret['oscillation_command__topic'] = config.get(CONF_OSCILLATION_COMMAND_TOPIC, default)
|
||||
return ret
|
||||
|
||||
@@ -27,3 +27,7 @@ def to_code(config):
|
||||
add(fan_struct.Poutput.set_oscillation(oscillation_output))
|
||||
|
||||
fan.setup_fan(fan_struct.Pstate, fan_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return fan.core_to_hass_config(data, config)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components import fan
|
||||
from esphomeyaml.components import fan, mqtt
|
||||
from esphomeyaml.const import CONF_HIGH, CONF_LOW, CONF_MAKE_ID, CONF_MEDIUM, CONF_NAME, \
|
||||
CONF_OSCILLATION_OUTPUT, CONF_OUTPUT, CONF_SPEED, CONF_SPEED_COMMAND_TOPIC, \
|
||||
CONF_SPEED_STATE_TOPIC
|
||||
@@ -43,3 +43,14 @@ def to_code(config):
|
||||
add(fan_struct.Poutput.set_oscillation(oscillation_output))
|
||||
|
||||
fan.setup_fan(fan_struct.Pstate, fan_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = fan.core_to_hass_config(data, config)
|
||||
if ret is None:
|
||||
return None
|
||||
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'speed/state')
|
||||
ret['speed_state_topic'] = config.get(CONF_SPEED_STATE_TOPIC, default)
|
||||
default = mqtt.get_default_topic_for(data, 'fan', config[CONF_NAME], 'speed/command')
|
||||
ret['speed_command__topic'] = config.get(CONF_SPEED_COMMAND_TOPIC, default)
|
||||
return ret
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ALPHA, CONF_BLUE, CONF_BRIGHTNESS, CONF_COLORS, \
|
||||
CONF_DEFAULT_TRANSITION_LENGTH, CONF_DURATION, CONF_EFFECTS, CONF_EFFECT_ID, \
|
||||
@@ -448,3 +449,24 @@ def light_turn_on_to_code(config, action_id, arg_type):
|
||||
yield None
|
||||
add(action.set_effect(template_))
|
||||
yield action
|
||||
|
||||
|
||||
def core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=True,
|
||||
white_value=True):
|
||||
ret = mqtt.build_hass_config(data, 'light', config, include_state=True, include_command=True,
|
||||
platform='mqtt_json')
|
||||
if ret is None:
|
||||
return None
|
||||
if brightness:
|
||||
ret['brightness'] = True
|
||||
if rgb:
|
||||
ret['rgb'] = True
|
||||
if color_temp:
|
||||
ret['color_temp'] = True
|
||||
if white_value:
|
||||
ret['white_value'] = True
|
||||
for effect in config.get(CONF_EFFECTS, []):
|
||||
ret["effect"] = True
|
||||
effects = ret.setdefault("effect_list", [])
|
||||
effects.append(next(x for x in effect.values())[CONF_NAME])
|
||||
return ret
|
||||
|
||||
@@ -19,3 +19,8 @@ def to_code(config):
|
||||
rhs = App.make_binary_light(config[CONF_NAME], output)
|
||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=False, rgb=False, color_temp=False,
|
||||
white_value=False)
|
||||
|
||||
@@ -32,3 +32,8 @@ def to_code(config):
|
||||
cold_white, warm_white)
|
||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=False, color_temp=True,
|
||||
white_value=False)
|
||||
|
||||
@@ -102,3 +102,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_FAST_LED_LIGHT'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||
white_value=False)
|
||||
|
||||
@@ -82,3 +82,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_FAST_LED_LIGHT'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||
white_value=False)
|
||||
|
||||
@@ -22,3 +22,8 @@ def to_code(config):
|
||||
rhs = App.make_monochromatic_light(config[CONF_NAME], output)
|
||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=False, color_temp=False,
|
||||
white_value=False)
|
||||
|
||||
@@ -30,3 +30,8 @@ def to_code(config):
|
||||
rhs = App.make_rgb_light(config[CONF_NAME], red, green, blue)
|
||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||
white_value=False)
|
||||
|
||||
@@ -34,3 +34,8 @@ def to_code(config):
|
||||
rhs = App.make_rgbw_light(config[CONF_NAME], red, green, blue, white)
|
||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=False,
|
||||
white_value=True)
|
||||
|
||||
@@ -60,3 +60,8 @@ def to_code(config):
|
||||
red, green, blue, cold_white, warm_white)
|
||||
light_struct = variable(config[CONF_MAKE_ID], rhs)
|
||||
light.setup_light(light_struct.Pstate, light_struct.Pmqtt, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return light.core_to_hass_config(data, config, brightness=True, rgb=True, color_temp=True,
|
||||
white_value=True)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from collections import OrderedDict
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -10,7 +11,10 @@ from esphomeyaml.const import CONF_BIRTH_MESSAGE, CONF_BROKER, CONF_CLIENT_ID, C
|
||||
CONF_DISCOVERY_PREFIX, CONF_DISCOVERY_RETAIN, CONF_ID, CONF_KEEPALIVE, CONF_LEVEL, \
|
||||
CONF_LOG_TOPIC, CONF_ON_MESSAGE, CONF_PASSWORD, CONF_PAYLOAD, CONF_PORT, CONF_QOS, \
|
||||
CONF_REBOOT_TIMEOUT, CONF_RETAIN, CONF_SHUTDOWN_MESSAGE, CONF_SSL_FINGERPRINTS, CONF_TOPIC, \
|
||||
CONF_TOPIC_PREFIX, CONF_TRIGGER_ID, CONF_USERNAME, CONF_WILL_MESSAGE, CONF_ON_JSON_MESSAGE
|
||||
CONF_TOPIC_PREFIX, CONF_TRIGGER_ID, CONF_USERNAME, CONF_WILL_MESSAGE, CONF_ON_JSON_MESSAGE, \
|
||||
CONF_STATE_TOPIC, CONF_MQTT, CONF_ESPHOMEYAML, CONF_NAME, CONF_AVAILABILITY, \
|
||||
CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_INTERNAL
|
||||
from esphomeyaml.core import ESPHomeYAMLError
|
||||
from esphomeyaml.helpers import App, ArrayInitializer, Pvariable, RawExpression, \
|
||||
StructInitializer, TemplateArguments, add, esphomelib_ns, optional, std_string, templatable, \
|
||||
uint8, bool_, JsonObjectRef, process_lambda, JsonObjectConstRef
|
||||
@@ -239,3 +243,66 @@ def required_build_flags(config):
|
||||
if CONF_SSL_FINGERPRINTS in config:
|
||||
return '-DASYNC_TCP_SSL_ENABLED=1'
|
||||
return None
|
||||
|
||||
|
||||
def get_default_topic_for(data, component_type, name, suffix):
|
||||
whitelist = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_'
|
||||
sanitized_name = ''.join(x for x in name.lower().replace(' ', '_') if x in whitelist)
|
||||
return '{}/{}/{}/{}'.format(data.topic_prefix, component_type,
|
||||
sanitized_name, suffix)
|
||||
|
||||
|
||||
def build_hass_config(data, component_type, config, include_state=True, include_command=True,
|
||||
platform='mqtt'):
|
||||
if config.get(CONF_INTERNAL, False):
|
||||
return None
|
||||
ret = OrderedDict()
|
||||
ret['platform'] = platform
|
||||
ret['name'] = config[CONF_NAME]
|
||||
if include_state:
|
||||
default = get_default_topic_for(data, component_type, config[CONF_NAME], 'state')
|
||||
ret['state_topic'] = config.get(CONF_STATE_TOPIC, default)
|
||||
if include_command:
|
||||
default = get_default_topic_for(data, component_type, config[CONF_NAME], 'command')
|
||||
ret['command_topic'] = config.get(CONF_STATE_TOPIC, default)
|
||||
avail = config.get(CONF_AVAILABILITY, data.availability)
|
||||
if avail:
|
||||
ret['availability_topic'] = avail[CONF_TOPIC]
|
||||
payload_available = avail[CONF_PAYLOAD_AVAILABLE]
|
||||
if payload_available != 'online':
|
||||
ret['payload_available'] = payload_available
|
||||
payload_not_available = avail[CONF_PAYLOAD_NOT_AVAILABLE]
|
||||
if payload_not_available != 'offline':
|
||||
ret['payload_not_available'] = payload_not_available
|
||||
return ret
|
||||
|
||||
|
||||
class GenerateHassConfigData(object):
|
||||
def __init__(self, config):
|
||||
if 'mqtt' not in config:
|
||||
raise ESPHomeYAMLError("Cannot generate Home Assistant MQTT config if MQTT is not "
|
||||
"used!")
|
||||
mqtt = config[CONF_MQTT]
|
||||
self.topic_prefix = mqtt.get(CONF_TOPIC_PREFIX, config[CONF_ESPHOMEYAML][CONF_NAME])
|
||||
birth_message = mqtt.get(CONF_BIRTH_MESSAGE)
|
||||
if CONF_BIRTH_MESSAGE not in mqtt:
|
||||
birth_message = {
|
||||
CONF_TOPIC: self.topic_prefix + '/status',
|
||||
CONF_PAYLOAD: 'online',
|
||||
}
|
||||
will_message = mqtt.get(CONF_WILL_MESSAGE)
|
||||
if CONF_WILL_MESSAGE not in mqtt:
|
||||
will_message = {
|
||||
CONF_TOPIC: self.topic_prefix + '/status',
|
||||
CONF_PAYLOAD: 'offline'
|
||||
}
|
||||
if not birth_message or not will_message:
|
||||
self.availability = None
|
||||
elif birth_message[CONF_TOPIC] != will_message[CONF_TOPIC]:
|
||||
self.availability = None
|
||||
else:
|
||||
self.availability = {
|
||||
CONF_TOPIC: birth_message[CONF_TOPIC],
|
||||
CONF_PAYLOAD_AVAILABLE: birth_message[CONF_PAYLOAD],
|
||||
CONF_PAYLOAD_NOT_AVAILABLE: will_message[CONF_PAYLOAD],
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components.power_supply import PowerSupplyComponent
|
||||
from esphomeyaml.const import CONF_INVERTED, CONF_MAX_POWER, CONF_POWER_SUPPLY, CONF_ID, CONF_LEVEL
|
||||
from esphomeyaml.helpers import add, esphomelib_ns, get_variable, TemplateArguments, Pvariable, \
|
||||
templatable, bool_
|
||||
templatable, float_, add_job
|
||||
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
||||
|
||||
@@ -43,37 +43,20 @@ def setup_output_platform_(obj, config, skip_power_supply=False):
|
||||
|
||||
|
||||
def setup_output_platform(obj, config, skip_power_supply=False):
|
||||
for _ in setup_output_platform_(obj, config, skip_power_supply):
|
||||
yield
|
||||
add_job(setup_output_platform_, obj, config, skip_power_supply)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_OUTPUT'
|
||||
|
||||
|
||||
CONF_OUTPUT_TURN_ON = 'output.turn_on'
|
||||
OUTPUT_TURN_OFF_ACTION = maybe_simple_id({
|
||||
OUTPUT_TURN_ON_ACTION = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(None),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_ON, OUTPUT_TURN_OFF_ACTION)
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_ON, OUTPUT_TURN_ON_ACTION)
|
||||
def output_turn_on_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
type = TurnOffAction.template(arg_type)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_OUTPUT_TURN_OFF = 'output.turn_off'
|
||||
OUTPUT_TURN_ON_ACTION = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(None)
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_OFF, OUTPUT_TURN_ON_ACTION)
|
||||
def output_turn_off_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
@@ -82,10 +65,26 @@ def output_turn_off_to_code(config, action_id, arg_type):
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_OUTPUT_TURN_OFF = 'output.turn_off'
|
||||
OUTPUT_TURN_OFF_ACTION = maybe_simple_id({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(None)
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_OUTPUT_TURN_OFF, OUTPUT_TURN_OFF_ACTION)
|
||||
def output_turn_off_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_turn_off_action(template_arg)
|
||||
type = TurnOffAction.template(arg_type)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
CONF_OUTPUT_SET_LEVEL = 'output.set_level'
|
||||
OUTPUT_SET_LEVEL_ACTION = vol.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(None),
|
||||
vol.Required(CONF_LEVEL): cv.percentage,
|
||||
vol.Required(CONF_LEVEL): cv.templatable(cv.percentage),
|
||||
})
|
||||
|
||||
|
||||
@@ -97,7 +96,7 @@ def output_set_level_to_code(config, action_id, arg_type):
|
||||
rhs = var.make_set_level_action(template_arg)
|
||||
type = SetLevelAction.template(arg_type)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_LEVEL], arg_type, bool_):
|
||||
for template_ in templatable(config[CONF_LEVEL], arg_type, float_):
|
||||
yield None
|
||||
add(action.set_level(template_))
|
||||
yield action
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml import automation
|
||||
from esphomeyaml.const import CONF_ABOVE, CONF_ACCURACY_DECIMALS, CONF_ALPHA, CONF_BELOW, \
|
||||
@@ -7,7 +8,8 @@ from esphomeyaml.const import CONF_ABOVE, CONF_ACCURACY_DECIMALS, CONF_ALPHA, CO
|
||||
CONF_FILTER_NAN, CONF_FILTER_OUT, CONF_HEARTBEAT, CONF_ICON, CONF_ID, CONF_INTERNAL, \
|
||||
CONF_LAMBDA, CONF_MQTT_ID, CONF_MULTIPLY, CONF_OFFSET, CONF_ON_RAW_VALUE, CONF_ON_VALUE, \
|
||||
CONF_ON_VALUE_RANGE, CONF_OR, CONF_SEND_EVERY, CONF_SLIDING_WINDOW_MOVING_AVERAGE, \
|
||||
CONF_THROTTLE, CONF_TRIGGER_ID, CONF_UNIQUE, CONF_UNIT_OF_MEASUREMENT, CONF_WINDOW_SIZE
|
||||
CONF_THROTTLE, CONF_TRIGGER_ID, CONF_UNIQUE, CONF_UNIT_OF_MEASUREMENT, CONF_WINDOW_SIZE, \
|
||||
CONF_SEND_FIRST_AT
|
||||
from esphomeyaml.helpers import App, ArrayInitializer, Pvariable, add, add_job, esphomelib_ns, \
|
||||
float_, process_lambda, setup_mqtt_component, templatable
|
||||
|
||||
@@ -20,6 +22,15 @@ def validate_recursive_filter(value):
|
||||
return FILTERS_SCHEMA(value)
|
||||
|
||||
|
||||
def validate_send_first_at(value):
|
||||
send_first_at = value.get(CONF_SEND_FIRST_AT)
|
||||
send_every = value[CONF_SEND_EVERY]
|
||||
if send_first_at is not None and send_first_at > send_every:
|
||||
raise vol.Invalid("send_first_at must be smaller than or equal to send_every! {} <= {}"
|
||||
"".format(send_first_at, send_every))
|
||||
return value
|
||||
|
||||
|
||||
FILTER_KEYS = [CONF_OFFSET, CONF_MULTIPLY, CONF_FILTER_OUT, CONF_FILTER_NAN,
|
||||
CONF_SLIDING_WINDOW_MOVING_AVERAGE, CONF_EXPONENTIAL_MOVING_AVERAGE, CONF_LAMBDA,
|
||||
CONF_THROTTLE, CONF_DELTA, CONF_UNIQUE, CONF_HEARTBEAT, CONF_DEBOUNCE, CONF_OR]
|
||||
@@ -29,10 +40,11 @@ FILTERS_SCHEMA = vol.All(cv.ensure_list, [vol.All({
|
||||
vol.Optional(CONF_MULTIPLY): vol.Coerce(float),
|
||||
vol.Optional(CONF_FILTER_OUT): vol.Coerce(float),
|
||||
vol.Optional(CONF_FILTER_NAN): None,
|
||||
vol.Optional(CONF_SLIDING_WINDOW_MOVING_AVERAGE): vol.Schema({
|
||||
vol.Optional(CONF_SLIDING_WINDOW_MOVING_AVERAGE): vol.All(vol.Schema({
|
||||
vol.Required(CONF_WINDOW_SIZE): cv.positive_not_null_int,
|
||||
vol.Required(CONF_SEND_EVERY): cv.positive_not_null_int,
|
||||
}),
|
||||
vol.Optional(CONF_SEND_FIRST_AT): cv.positive_not_null_int,
|
||||
}), validate_send_first_at),
|
||||
vol.Optional(CONF_EXPONENTIAL_MOVING_AVERAGE): vol.Schema({
|
||||
vol.Required(CONF_ALPHA): cv.positive_float,
|
||||
vol.Required(CONF_SEND_EVERY): cv.positive_not_null_int,
|
||||
@@ -103,7 +115,8 @@ def setup_filter(config):
|
||||
yield FilterOutNANFilter.new()
|
||||
elif CONF_SLIDING_WINDOW_MOVING_AVERAGE in config:
|
||||
conf = config[CONF_SLIDING_WINDOW_MOVING_AVERAGE]
|
||||
yield SlidingWindowMovingAverageFilter.new(conf[CONF_WINDOW_SIZE], conf[CONF_SEND_EVERY])
|
||||
yield SlidingWindowMovingAverageFilter.new(conf[CONF_WINDOW_SIZE], conf[CONF_SEND_EVERY],
|
||||
conf.get(CONF_SEND_FIRST_AT))
|
||||
elif CONF_EXPONENTIAL_MOVING_AVERAGE in config:
|
||||
conf = config[CONF_EXPONENTIAL_MOVING_AVERAGE]
|
||||
yield ExponentialMovingAverageFilter.new(conf[CONF_ALPHA], conf[CONF_SEND_EVERY])
|
||||
@@ -199,3 +212,18 @@ def register_sensor(var, config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_SENSOR'
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'sensor', config, include_state=True, include_command=False)
|
||||
if ret is None:
|
||||
return None
|
||||
if CONF_UNIT_OF_MEASUREMENT in config:
|
||||
ret['unit_of_measurement'] = config[CONF_UNIT_OF_MEASUREMENT]
|
||||
if CONF_EXPIRE_AFTER in config:
|
||||
expire = config[CONF_EXPIRE_AFTER]
|
||||
if expire is not None:
|
||||
ret['expire_after'] = expire.total_seconds
|
||||
if CONF_ICON in config:
|
||||
ret['icon'] = config[CONF_ICON]
|
||||
return ret
|
||||
|
||||
@@ -23,8 +23,10 @@ def validate_adc_pin(value):
|
||||
|
||||
|
||||
MakeADCSensor = Application.MakeADCSensor
|
||||
ADCSensorComponent = sensor.sensor_ns.ADCSensorComponent
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ADCSensorComponent),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeADCSensor),
|
||||
vol.Required(CONF_PIN): validate_adc_pin,
|
||||
vol.Optional(CONF_ATTENUATION): vol.All(cv.only_on_esp32, cv.one_of(*ATTENUATION_MODES)),
|
||||
@@ -52,3 +54,7 @@ def required_build_flags(config):
|
||||
if config[CONF_PIN] == 'VCC':
|
||||
return '-DUSE_ADC_SENSOR_VCC'
|
||||
return None
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -45,7 +45,10 @@ def validate_mux(value):
|
||||
return cv.one_of(*MUX)(value)
|
||||
|
||||
|
||||
ADS1115Sensor = sensor.sensor_ns.ADS1115Sensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ADS1115Sensor),
|
||||
vol.Required(CONF_MULTIPLEXER): validate_mux,
|
||||
vol.Required(CONF_GAIN): validate_gain,
|
||||
cv.GenerateID(CONF_ADS1115_ID): cv.use_variable_id(ADS1115Component),
|
||||
@@ -65,3 +68,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_ADS1115_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -15,8 +15,10 @@ BH1750_RESOLUTIONS = {
|
||||
}
|
||||
|
||||
MakeBH1750Sensor = Application.MakeBH1750Sensor
|
||||
BH1750Sensor = sensor.sensor_ns.BH1750Sensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(BH1750Sensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeBH1750Sensor),
|
||||
vol.Optional(CONF_ADDRESS, default=0x23): cv.i2c_address,
|
||||
vol.Optional(CONF_RESOLUTION): vol.All(cv.positive_float, cv.one_of(*BH1750_RESOLUTIONS)),
|
||||
@@ -35,3 +37,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BH1750'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -5,11 +5,14 @@ from esphomeyaml.components import sensor
|
||||
from esphomeyaml.components.esp32_ble_tracker import CONF_ESP32_BLE_ID, ESP32BLETracker, \
|
||||
make_address_array
|
||||
from esphomeyaml.const import CONF_MAC_ADDRESS, CONF_NAME
|
||||
from esphomeyaml.helpers import get_variable
|
||||
from esphomeyaml.helpers import get_variable, esphomelib_ns
|
||||
|
||||
DEPENDENCIES = ['esp32_ble_tracker']
|
||||
|
||||
ESP32BLERSSISensor = esphomelib_ns.ESP32BLERSSISensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32BLERSSISensor),
|
||||
vol.Required(CONF_MAC_ADDRESS): cv.mac_address,
|
||||
cv.GenerateID(CONF_ESP32_BLE_ID): cv.use_variable_id(ESP32BLETracker)
|
||||
}))
|
||||
@@ -21,3 +24,7 @@ def to_code(config):
|
||||
yield
|
||||
rhs = hub.make_rssi_sensor(config[CONF_NAME], make_address_array(config[CONF_MAC_ADDRESS]))
|
||||
sensor.register_sensor(rhs, config)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -72,3 +72,9 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BME280'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_PRESSURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||
|
||||
@@ -92,3 +92,10 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BME680'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_PRESSURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY]),
|
||||
sensor.core_to_hass_config(data, config[CONF_GAS_RESISTANCE])]
|
||||
|
||||
@@ -34,3 +34,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BMP085_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_PRESSURE])]
|
||||
|
||||
@@ -65,3 +65,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_BMP280'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_PRESSURE])]
|
||||
|
||||
@@ -40,3 +40,11 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_CSE7766'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -7,7 +7,10 @@ from esphomeyaml.const import CONF_ADDRESS, CONF_DALLAS_ID, CONF_INDEX, CONF_NAM
|
||||
CONF_RESOLUTION
|
||||
from esphomeyaml.helpers import HexIntLiteral, get_variable
|
||||
|
||||
DallasTemperatureSensor = sensor.sensor_ns.DallasTemperatureSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(DallasTemperatureSensor),
|
||||
vol.Exclusive(CONF_ADDRESS, 'dallas'): cv.hex_int,
|
||||
vol.Exclusive(CONF_INDEX, 'dallas'): cv.positive_int,
|
||||
cv.GenerateID(CONF_DALLAS_ID): cv.use_variable_id(DallasComponent),
|
||||
@@ -29,3 +32,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_DALLAS_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -46,3 +46,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_DHT_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||
|
||||
@@ -31,3 +31,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_DHT12_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||
|
||||
@@ -7,8 +7,10 @@ from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_PIN, CONF_UPDATE_INT
|
||||
from esphomeyaml.helpers import App, Application, gpio_input_pin_expression, variable
|
||||
|
||||
MakeDutyCycleSensor = Application.MakeDutyCycleSensor
|
||||
DutyCycleSensor = sensor.sensor_ns.DutyCycleSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(DutyCycleSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeDutyCycleSensor),
|
||||
vol.Required(CONF_PIN): pins.internal_gpio_input_pin_schema,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
@@ -26,3 +28,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_DUTY_CYCLE_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -8,8 +8,10 @@ from esphomeyaml.helpers import App, Application, variable
|
||||
ESP_PLATFORMS = [ESP_PLATFORM_ESP32]
|
||||
|
||||
MakeESP32HallSensor = Application.MakeESP32HallSensor
|
||||
ESP32HallSensor = sensor.sensor_ns.ESP32HallSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ESP32HallSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeESP32HallSensor),
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
}))
|
||||
@@ -22,3 +24,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_ESP32_HALL_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -32,3 +32,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_HDC1080_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||
|
||||
@@ -50,8 +50,18 @@ def to_code(config):
|
||||
sensor.register_sensor(hlw.make_power_sensor(conf[CONF_NAME]), conf)
|
||||
if CONF_CURRENT_RESISTOR in config:
|
||||
add(hlw.set_current_resistor(config[CONF_CURRENT_RESISTOR]))
|
||||
if CONF_VOLTAGE_DIVIDER in config:
|
||||
add(hlw.set_voltage_divider(config[CONF_VOLTAGE_DIVIDER]))
|
||||
if CONF_CHANGE_MODE_EVERY in config:
|
||||
add(hlw.set_change_mode_every(config[CONF_CHANGE_MODE_EVERY]))
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_HLW8012'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -71,3 +71,11 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_HMC5883L'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_FIELD_STRENGTH_X, CONF_FIELD_STRENGTH_Y, CONF_FIELD_STRENGTH_Z, CONF_HEADING):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -30,3 +30,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_HTU21D_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||
|
||||
@@ -44,3 +44,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_HX711'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -51,3 +51,11 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_INA219'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_BUS_VOLTAGE, CONF_SHUNT_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -62,3 +62,15 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_INA3221'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for channel in (CONF_CHANNEL_1, CONF_CHANNEL_2, CONF_CHANNEL_3):
|
||||
if channel not in config:
|
||||
continue
|
||||
conf = config[channel]
|
||||
for key in (CONF_BUS_VOLTAGE, CONF_SHUNT_VOLTAGE, CONF_CURRENT, CONF_POWER):
|
||||
if key in conf:
|
||||
ret.append(sensor.core_to_hass_config(data, conf[key]))
|
||||
return ret
|
||||
|
||||
@@ -9,8 +9,10 @@ from esphomeyaml.const import CONF_CS_PIN, CONF_MAKE_ID, CONF_NAME, CONF_SPI_ID,
|
||||
from esphomeyaml.helpers import App, Application, get_variable, gpio_output_pin_expression, variable
|
||||
|
||||
MakeMAX6675Sensor = Application.MakeMAX6675Sensor
|
||||
MAX6675Sensor = sensor.sensor_ns.MAX6675Sensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(MAX6675Sensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeMAX6675Sensor),
|
||||
cv.GenerateID(CONF_SPI_ID): cv.use_variable_id(SPIComponent),
|
||||
vol.Required(CONF_CS_PIN): pins.gpio_output_pin_schema,
|
||||
@@ -32,3 +34,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MAX6675_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -36,3 +36,11 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MHZ19'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_CO2, CONF_TEMPERATURE):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -69,3 +69,12 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MPU6050'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_ACCEL_X, CONF_ACCEL_Y, CONF_ACCEL_Z, CONF_GYRO_X, CONF_GYRO_Y, CONF_GYRO_Z,
|
||||
CONF_TEMPERATURE):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -8,8 +8,10 @@ from esphomeyaml.helpers import App, Application, add, variable
|
||||
DEPENDENCIES = ['mqtt']
|
||||
|
||||
MakeMQTTSubscribeSensor = Application.MakeMQTTSubscribeSensor
|
||||
MQTTSubscribeSensor = sensor.sensor_ns.MQTTSubscribeSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(MQTTSubscribeSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeMQTTSubscribeSensor),
|
||||
vol.Required(CONF_TOPIC): cv.subscribe_topic,
|
||||
vol.Optional(CONF_QOS): cv.mqtt_qos,
|
||||
@@ -27,3 +29,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MQTT_SUBSCRIBE_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -35,3 +35,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MS5611'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_PRESSURE])]
|
||||
|
||||
@@ -85,3 +85,12 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_PMSX003'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_PM_1_0, CONF_PM_2_5, CONF_PM_10_0, CONF_TEMPERATURE, CONF_HUMIDITY,
|
||||
CONF_FORMALDEHYDE):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -17,6 +17,7 @@ COUNT_MODES = {
|
||||
COUNT_MODE_SCHEMA = vol.All(vol.Upper, cv.one_of(*COUNT_MODES))
|
||||
|
||||
MakePulseCounterSensor = Application.MakePulseCounterSensor
|
||||
PulseCounterSensorComponent = sensor.sensor_ns.PulseCounterSensorComponent
|
||||
|
||||
|
||||
def validate_internal_filter(value):
|
||||
@@ -33,6 +34,7 @@ def validate_internal_filter(value):
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(PulseCounterSensorComponent),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakePulseCounterSensor),
|
||||
vol.Required(CONF_PIN): pins.internal_gpio_input_pin_schema,
|
||||
vol.Optional(CONF_COUNT_MODE): vol.Schema({
|
||||
@@ -65,3 +67,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_PULSE_COUNTER_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -17,8 +17,10 @@ CONF_PIN_B = 'pin_b'
|
||||
CONF_PIN_RESET = 'pin_reset'
|
||||
|
||||
MakeRotaryEncoderSensor = Application.MakeRotaryEncoderSensor
|
||||
RotaryEncoderSensor = sensor.sensor_ns.RotaryEncoderSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RotaryEncoderSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeRotaryEncoderSensor),
|
||||
vol.Required(CONF_PIN_A): pins.internal_gpio_input_pin_schema,
|
||||
vol.Required(CONF_PIN_B): pins.internal_gpio_input_pin_schema,
|
||||
@@ -49,3 +51,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_ROTARY_ENCODER_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -36,3 +36,8 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_SHT3XD'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return [sensor.core_to_hass_config(data, config[CONF_TEMPERATURE]),
|
||||
sensor.core_to_hass_config(data, config[CONF_HUMIDITY])]
|
||||
|
||||
@@ -78,3 +78,12 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TCS34725'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_RED_CHANNEL, CONF_GREEN_CHANNEL, CONF_BLUE_CHANNEL, CONF_CLEAR_CHANNEL,
|
||||
CONF_ILLUMINANCE, CONF_COLOR_TEMPERATURE):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -6,8 +6,10 @@ from esphomeyaml.const import CONF_LAMBDA, CONF_MAKE_ID, CONF_NAME, CONF_UPDATE_
|
||||
from esphomeyaml.helpers import App, process_lambda, variable, Application, float_, optional, add
|
||||
|
||||
MakeTemplateSensor = Application.MakeTemplateSensor
|
||||
TemplateSensor = sensor.sensor_ns.TemplateSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(TemplateSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeTemplateSensor),
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
@@ -27,3 +29,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
37
esphomeyaml/components/sensor/total_daily_energy.py
Normal file
37
esphomeyaml/components/sensor/total_daily_energy.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.components import sensor
|
||||
from esphomeyaml.components.time import sntp
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_TIME_ID
|
||||
from esphomeyaml.helpers import App, Application, get_variable, variable
|
||||
|
||||
DEPENDENCIES = ['time']
|
||||
|
||||
CONF_POWER_ID = 'power_id'
|
||||
MakeTotalDailyEnergySensor = Application.MakeTotalDailyEnergySensor
|
||||
TotalDailyEnergy = sensor.sensor_ns.TotalDailyEnergy
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(TotalDailyEnergy),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeTotalDailyEnergySensor),
|
||||
cv.GenerateID(CONF_TIME_ID): cv.use_variable_id(sntp.SNTPComponent),
|
||||
vol.Required(CONF_POWER_ID): cv.use_variable_id(None),
|
||||
}))
|
||||
|
||||
|
||||
def to_code(config):
|
||||
for time in get_variable(config[CONF_TIME_ID]):
|
||||
yield
|
||||
for sens in get_variable(config[CONF_POWER_ID]):
|
||||
yield
|
||||
rhs = App.make_total_daily_energy_sensor(config[CONF_NAME], time, sens)
|
||||
make = variable(config[CONF_MAKE_ID], rhs)
|
||||
sensor.setup_sensor(make.Ptotal_energy, make.Pmqtt, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TOTAL_DAILY_ENERGY_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
@@ -29,8 +29,10 @@ def validate_integration_time(value):
|
||||
|
||||
|
||||
MakeTSL2561Sensor = Application.MakeTSL2561Sensor
|
||||
TSL2561Sensor = Application.TSL2561Sensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(TSL2561Sensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeTSL2561Sensor),
|
||||
vol.Optional(CONF_ADDRESS, default=0x39): cv.i2c_address,
|
||||
vol.Optional(CONF_INTEGRATION_TIME): validate_integration_time,
|
||||
@@ -55,3 +57,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TSL2561'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -9,8 +9,10 @@ from esphomeyaml.helpers import App, Application, add, gpio_input_pin_expression
|
||||
gpio_output_pin_expression, variable
|
||||
|
||||
MakeUltrasonicSensor = Application.MakeUltrasonicSensor
|
||||
UltrasonicSensorComponent = sensor.sensor_ns.UltrasonicSensorComponent
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(UltrasonicSensorComponent),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeUltrasonicSensor),
|
||||
vol.Required(CONF_TRIGGER_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Required(CONF_ECHO_PIN): pins.internal_gpio_input_pin_schema,
|
||||
@@ -39,3 +41,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_ULTRASONIC_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -6,8 +6,10 @@ from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_UPDATE_INTERVAL
|
||||
from esphomeyaml.helpers import App, Application, variable
|
||||
|
||||
MakeUptimeSensor = Application.MakeUptimeSensor
|
||||
UptimeSensor = sensor.sensor_ns.UptimeSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(UptimeSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeUptimeSensor),
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
}))
|
||||
@@ -20,3 +22,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_UPTIME_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -6,8 +6,10 @@ from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_UPDATE_INTERVAL
|
||||
from esphomeyaml.helpers import App, Application, variable
|
||||
|
||||
MakeWiFiSignalSensor = Application.MakeWiFiSignalSensor
|
||||
WiFiSignalSensor = sensor.sensor_ns.WiFiSignalSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(WiFiSignalSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeWiFiSignalSensor),
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
}))
|
||||
@@ -20,3 +22,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_WIFI_SIGNAL_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -45,3 +45,12 @@ def to_code(config):
|
||||
if CONF_BATTERY_LEVEL in config:
|
||||
conf = config[CONF_BATTERY_LEVEL]
|
||||
sensor.register_sensor(dev.Pmake_battery_level_sensor(conf[CONF_NAME]), conf)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_TEMPERATURE, CONF_MOISTURE, CONF_ILLUMINANCE, CONF_CONDUCTIVITY,
|
||||
CONF_BATTERY_LEVEL):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
@@ -37,3 +37,11 @@ def to_code(config):
|
||||
if CONF_BATTERY_LEVEL in config:
|
||||
conf = config[CONF_BATTERY_LEVEL]
|
||||
sensor.register_sensor(dev.Pmake_battery_level_sensor(conf[CONF_NAME]), conf)
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
ret = []
|
||||
for key in (CONF_TEMPERATURE, CONF_HUMIDITY, CONF_BATTERY_LEVEL):
|
||||
if key in config:
|
||||
ret.append(sensor.core_to_hass_config(data, config[key]))
|
||||
return ret
|
||||
|
||||
126
esphomeyaml/components/stepper/__init__.py
Normal file
126
esphomeyaml/components/stepper/__init__.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.automation import ACTION_REGISTRY
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ACCELERATION, CONF_DECELERATION, CONF_ID, CONF_MAX_SPEED, \
|
||||
CONF_POSITION, CONF_TARGET
|
||||
from esphomeyaml.helpers import Pvariable, TemplateArguments, add, add_job, esphomelib_ns, \
|
||||
get_variable, int32, templatable
|
||||
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
|
||||
|
||||
})
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
stepper_ns = esphomelib_ns.namespace('stepper')
|
||||
Stepper = stepper_ns.Stepper
|
||||
|
||||
SetTargetAction = stepper_ns.SetTargetAction
|
||||
ReportPositionAction = stepper_ns.ReportPositionAction
|
||||
|
||||
|
||||
def validate_acceleration(value):
|
||||
value = cv.string(value)
|
||||
for suffix in ('steps/s^2', 'steps/s*s', 'steps/s/s', 'steps/ss', 'steps/(s*s)'):
|
||||
if value.endswith(suffix):
|
||||
value = value[:-len(suffix)]
|
||||
|
||||
if value == 'inf':
|
||||
return 1e6
|
||||
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
raise vol.Invalid("Expected acceleration as floating point number, got {}".format(value))
|
||||
|
||||
if value <= 0:
|
||||
raise vol.Invalid("Acceleration must be larger than 0 steps/s^2!")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def validate_speed(value):
|
||||
value = cv.string(value)
|
||||
for suffix in ('steps/s', 'steps/s'):
|
||||
if value.endswith(suffix):
|
||||
value = value[:-len(suffix)]
|
||||
|
||||
if value == 'inf':
|
||||
return 1e6
|
||||
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
raise vol.Invalid("Expected speed as floating point number, got {}".format(value))
|
||||
|
||||
if value <= 0:
|
||||
raise vol.Invalid("Speed must be larger than 0 steps/s!")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
STEPPER_SCHEMA = vol.Schema({
|
||||
cv.GenerateID(): cv.declare_variable_id(Stepper),
|
||||
vol.Required(CONF_MAX_SPEED): validate_speed,
|
||||
vol.Optional(CONF_ACCELERATION): validate_acceleration,
|
||||
vol.Optional(CONF_DECELERATION): validate_acceleration,
|
||||
})
|
||||
|
||||
STEPPER_PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(STEPPER_SCHEMA.schema)
|
||||
|
||||
|
||||
def setup_stepper_core_(stepper_var, config):
|
||||
if CONF_ACCELERATION in config:
|
||||
add(stepper_var.set_acceleration(config[CONF_ACCELERATION]))
|
||||
if CONF_DECELERATION in config:
|
||||
add(stepper_var.set_deceleration(config[CONF_DECELERATION]))
|
||||
if CONF_MAX_SPEED in config:
|
||||
add(stepper_var.set_max_speed(config[CONF_MAX_SPEED]))
|
||||
|
||||
|
||||
def setup_stepper(stepper_var, config):
|
||||
add_job(setup_stepper_core_, stepper_var, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_STEPPER'
|
||||
|
||||
CONF_STEPPER_SET_TARGET = 'stepper.set_target'
|
||||
STEPPER_SET_TARGET_ACTION_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(None),
|
||||
vol.Required(CONF_TARGET): cv.templatable(cv.int_),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_STEPPER_SET_TARGET, STEPPER_SET_TARGET_ACTION_SCHEMA)
|
||||
def stepper_set_target_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_set_target_action(template_arg)
|
||||
type = SetTargetAction.template(arg_type)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_TARGET], arg_type, int32):
|
||||
yield None
|
||||
add(action.set_target(template_))
|
||||
yield action
|
||||
|
||||
|
||||
CONF_STEPPER_REPORT_POSITION = 'stepper.report_position'
|
||||
STEPPER_REPORT_POSITION_ACTION_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_ID): cv.use_variable_id(None),
|
||||
vol.Required(CONF_POSITION): cv.templatable(cv.int_),
|
||||
})
|
||||
|
||||
|
||||
@ACTION_REGISTRY.register(CONF_STEPPER_REPORT_POSITION, STEPPER_REPORT_POSITION_ACTION_SCHEMA)
|
||||
def stepper_report_position_to_code(config, action_id, arg_type):
|
||||
template_arg = TemplateArguments(arg_type)
|
||||
for var in get_variable(config[CONF_ID]):
|
||||
yield None
|
||||
rhs = var.make_report_position_action(template_arg)
|
||||
type = ReportPositionAction.template(arg_type)
|
||||
action = Pvariable(action_id, rhs, type=type)
|
||||
for template_ in templatable(config[CONF_POSITION], arg_type, int32):
|
||||
yield None
|
||||
add(action.set_target(template_))
|
||||
yield action
|
||||
35
esphomeyaml/components/stepper/a4988.py
Normal file
35
esphomeyaml/components/stepper/a4988.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import pins
|
||||
from esphomeyaml.components import stepper
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_DIR_PIN, CONF_ID, CONF_SLEEP_PIN, CONF_STEP_PIN
|
||||
from esphomeyaml.helpers import App, Pvariable, add, gpio_output_pin_expression
|
||||
|
||||
A4988 = stepper.stepper_ns.A4988
|
||||
|
||||
PLATFORM_SCHEMA = stepper.STEPPER_PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ID): cv.declare_variable_id(A4988),
|
||||
vol.Required(CONF_STEP_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Required(CONF_DIR_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Optional(CONF_SLEEP_PIN): pins.gpio_output_pin_schema,
|
||||
})
|
||||
|
||||
|
||||
def to_code(config):
|
||||
for step_pin in gpio_output_pin_expression(config[CONF_STEP_PIN]):
|
||||
yield
|
||||
for dir_pin in gpio_output_pin_expression(config[CONF_DIR_PIN]):
|
||||
yield
|
||||
rhs = App.make_a4988(step_pin, dir_pin)
|
||||
a4988 = Pvariable(config[CONF_ID], rhs)
|
||||
|
||||
if CONF_SLEEP_PIN in config:
|
||||
for sleep_pin in gpio_output_pin_expression(config[CONF_SLEEP_PIN]):
|
||||
yield
|
||||
add(a4988.set_sleep_pin(sleep_pin))
|
||||
|
||||
stepper.setup_stepper(a4988, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_A4988'
|
||||
@@ -1,8 +1,10 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml.automation import maybe_simple_id, ACTION_REGISTRY
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INVERTED, CONF_MQTT_ID, CONF_INTERNAL
|
||||
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INVERTED, CONF_MQTT_ID, CONF_INTERNAL, \
|
||||
CONF_OPTIMISTIC
|
||||
from esphomeyaml.helpers import App, Pvariable, add, esphomelib_ns, setup_mqtt_component, \
|
||||
TemplateArguments, get_variable
|
||||
|
||||
@@ -100,3 +102,14 @@ def switch_turn_on_to_code(config, action_id, arg_type):
|
||||
rhs = var.make_turn_on_action(template_arg)
|
||||
type = TurnOnAction.template(arg_type)
|
||||
yield Pvariable(action_id, rhs, type=type)
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'switch', config, include_state=True, include_command=True)
|
||||
if ret is None:
|
||||
return None
|
||||
if CONF_ICON in config:
|
||||
ret['icon'] = config[CONF_ICON]
|
||||
if CONF_OPTIMISTIC in config:
|
||||
ret['optimistic'] = config[CONF_OPTIMISTIC]
|
||||
return ret
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml import pins
|
||||
from esphomeyaml.components import switch
|
||||
from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_PIN, CONF_POWER_ON_VALUE
|
||||
from esphomeyaml.helpers import App, Application, gpio_output_pin_expression, variable, add
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_PIN
|
||||
from esphomeyaml.helpers import App, Application, gpio_output_pin_expression, variable
|
||||
|
||||
MakeGPIOSwitch = Application.MakeGPIOSwitch
|
||||
GPIOSwitch = switch.switch_ns.GPIOSwitch
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(GPIOSwitch),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeGPIOSwitch),
|
||||
vol.Required(CONF_PIN): pins.gpio_output_pin_schema,
|
||||
vol.Optional(CONF_POWER_ON_VALUE): cv.boolean,
|
||||
}))
|
||||
|
||||
|
||||
@@ -22,10 +23,11 @@ def to_code(config):
|
||||
rhs = App.make_gpio_switch(config[CONF_NAME], pin)
|
||||
gpio = variable(config[CONF_MAKE_ID], rhs)
|
||||
|
||||
if CONF_POWER_ON_VALUE in config:
|
||||
add(gpio.Pswitch_.set_power_on_value(config[CONF_POWER_ON_VALUE]))
|
||||
|
||||
switch.setup_switch(gpio.Pswitch_, gpio.Pmqtt, config)
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_GPIO_SWITCH'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -6,8 +6,10 @@ from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME, CONF_OUTPUT
|
||||
from esphomeyaml.helpers import App, Application, get_variable, variable
|
||||
|
||||
MakeOutputSwitch = Application.MakeOutputSwitch
|
||||
OutputSwitch = switch.switch_ns.OutputSwitch
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(OutputSwitch),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeOutputSwitch),
|
||||
vol.Required(CONF_OUTPUT): cv.use_variable_id(None),
|
||||
}))
|
||||
@@ -23,3 +25,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_OUTPUT_SWITCH'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import voluptuous as vol
|
||||
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components import switch
|
||||
from esphomeyaml.components.remote_transmitter import RC_SWITCH_RAW_SCHEMA, \
|
||||
RC_SWITCH_TYPE_A_SCHEMA, RC_SWITCH_TYPE_B_SCHEMA, RC_SWITCH_TYPE_C_SCHEMA, \
|
||||
RC_SWITCH_TYPE_D_SCHEMA, RemoteTransmitterComponent, binary_code, build_rc_switch_protocol, \
|
||||
remote_ns
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ADDRESS, CONF_CARRIER_FREQUENCY, CONF_CHANNEL, CONF_CODE, \
|
||||
CONF_COMMAND, CONF_DATA, CONF_DEVICE, CONF_FAMILY, CONF_GROUP, CONF_INVERTED, CONF_LG, \
|
||||
CONF_NAME, CONF_NBITS, CONF_NEC, CONF_PANASONIC, CONF_PROTOCOL, CONF_RAW, CONF_RC_SWITCH_RAW, \
|
||||
CONF_RC_SWITCH_TYPE_A, CONF_RC_SWITCH_TYPE_B, CONF_RC_SWITCH_TYPE_C, CONF_RC_SWITCH_TYPE_D, \
|
||||
CONF_REPEAT, CONF_SAMSUNG, CONF_SONY, CONF_STATE, CONF_TIMES, \
|
||||
CONF_WAIT_TIME
|
||||
from esphomeyaml.helpers import App, ArrayInitializer, Pvariable, add, get_variable
|
||||
from esphomeyaml.helpers import ArrayInitializer, Pvariable, add, get_variable
|
||||
|
||||
DEPENDENCIES = ['remote_transmitter']
|
||||
|
||||
@@ -39,6 +39,7 @@ RCSwitchTypeDTransmitter = remote_ns.RCSwitchTypeDTransmitter
|
||||
validate_raw_data = [vol.Any(vol.Coerce(int), cv.time_period_microseconds)]
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RemoteTransmitter),
|
||||
vol.Optional(CONF_LG): vol.Schema({
|
||||
vol.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
vol.Optional(CONF_NBITS, default=28): vol.All(vol.Coerce(int), cv.one_of(28, 32)),
|
||||
@@ -130,7 +131,7 @@ def to_code(config):
|
||||
remote = None
|
||||
for remote in get_variable(config[CONF_REMOTE_TRANSMITTER_ID]):
|
||||
yield
|
||||
rhs = App.register_component(transmitter_base(config))
|
||||
rhs = transmitter_base(config)
|
||||
transmitter = Pvariable(config[CONF_TRANSMITTER_ID], rhs)
|
||||
|
||||
if CONF_REPEAT in config:
|
||||
@@ -146,3 +147,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_REMOTE_TRANSMITTER'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -6,8 +6,10 @@ from esphomeyaml.const import CONF_INVERTED, CONF_MAKE_ID, CONF_NAME
|
||||
from esphomeyaml.helpers import App, Application, variable
|
||||
|
||||
MakeRestartSwitch = Application.MakeRestartSwitch
|
||||
RestartSwitch = switch.switch_ns.RestartSwitch
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(RestartSwitch),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeRestartSwitch),
|
||||
vol.Optional(CONF_INVERTED): cv.invalid("Restart switches do not support inverted mode!"),
|
||||
}))
|
||||
@@ -20,3 +22,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_RESTART_SWITCH'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -6,8 +6,10 @@ from esphomeyaml.const import CONF_INVERTED, CONF_MAKE_ID, CONF_NAME
|
||||
from esphomeyaml.helpers import App, Application, variable
|
||||
|
||||
MakeShutdownSwitch = Application.MakeShutdownSwitch
|
||||
ShutdownSwitch = switch.switch_ns.ShutdownSwitch
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(ShutdownSwitch),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeShutdownSwitch),
|
||||
vol.Optional(CONF_INVERTED): cv.invalid("Shutdown switches do not support inverted mode!"),
|
||||
}))
|
||||
@@ -20,3 +22,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_SHUTDOWN_SWITCH'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -4,18 +4,21 @@ import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml import automation
|
||||
from esphomeyaml.components import switch
|
||||
from esphomeyaml.const import CONF_LAMBDA, CONF_MAKE_ID, CONF_NAME, CONF_TURN_OFF_ACTION, \
|
||||
CONF_TURN_ON_ACTION, CONF_OPTIMISTIC
|
||||
CONF_TURN_ON_ACTION, CONF_OPTIMISTIC, CONF_RESTORE_STATE
|
||||
from esphomeyaml.helpers import App, Application, process_lambda, variable, NoArg, add, bool_, \
|
||||
optional
|
||||
|
||||
MakeTemplateSwitch = Application.MakeTemplateSwitch
|
||||
TemplateSwitch = switch.switch_ns.TemplateSwitch
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(TemplateSwitch),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeTemplateSwitch),
|
||||
vol.Optional(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_OPTIMISTIC): cv.boolean,
|
||||
vol.Optional(CONF_TURN_OFF_ACTION): automation.validate_automation(single=True),
|
||||
vol.Optional(CONF_TURN_ON_ACTION): automation.validate_automation(single=True),
|
||||
vol.Optional(CONF_RESTORE_STATE): cv.boolean,
|
||||
}), cv.has_at_least_one_key(CONF_LAMBDA, CONF_OPTIMISTIC))
|
||||
|
||||
|
||||
@@ -40,5 +43,12 @@ def to_code(config):
|
||||
if CONF_OPTIMISTIC in config:
|
||||
add(make.Ptemplate_.set_optimistic(config[CONF_OPTIMISTIC]))
|
||||
|
||||
if CONF_RESTORE_STATE in config:
|
||||
add(make.Ptemplate_.set_restore_state(config[CONF_RESTORE_STATE]))
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_SWITCH'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -10,6 +10,7 @@ from esphomeyaml.helpers import App, Application, ArrayInitializer, get_variable
|
||||
DEPENDENCIES = ['uart']
|
||||
|
||||
MakeUARTSwitch = Application.MakeUARTSwitch
|
||||
UARTSwitch = switch.switch_ns.UARTSwitch
|
||||
|
||||
|
||||
def validate_data(value):
|
||||
@@ -23,6 +24,7 @@ def validate_data(value):
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(switch.SWITCH_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(UARTSwitch),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeUARTSwitch),
|
||||
cv.GenerateID(CONF_UART_ID): cv.use_variable_id(UARTComponent),
|
||||
vol.Required(CONF_DATA): validate_data,
|
||||
@@ -43,3 +45,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_UART_SWITCH'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return switch.core_to_hass_config(data, config)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from esphomeyaml import automation
|
||||
from esphomeyaml.components import mqtt
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.const import CONF_ICON, CONF_ID, CONF_INTERNAL, CONF_MQTT_ID, CONF_ON_VALUE, \
|
||||
CONF_TRIGGER_ID
|
||||
@@ -58,3 +59,12 @@ def register_text_sensor(var, config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEXT_SENSOR'
|
||||
|
||||
|
||||
def core_to_hass_config(data, config):
|
||||
ret = mqtt.build_hass_config(data, 'sensor', config, include_state=True, include_command=False)
|
||||
if ret is None:
|
||||
return None
|
||||
if CONF_ICON in config:
|
||||
ret['icon'] = config[CONF_ICON]
|
||||
return ret
|
||||
|
||||
@@ -8,8 +8,10 @@ from esphomeyaml.helpers import App, Application, add, variable
|
||||
DEPENDENCIES = ['mqtt']
|
||||
|
||||
MakeMQTTSubscribeTextSensor = Application.MakeMQTTSubscribeTextSensor
|
||||
MQTTSubscribeTextSensor = text_sensor.text_sensor_ns.MQTTSubscribeTextSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(text_sensor.TEXT_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(MQTTSubscribeTextSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeMQTTSubscribeTextSensor),
|
||||
vol.Required(CONF_TOPIC): cv.subscribe_topic,
|
||||
vol.Optional(CONF_QOS): cv.mqtt_qos,
|
||||
@@ -25,3 +27,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_MQTT_SUBSCRIBE_TEXT_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return text_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -7,8 +7,10 @@ from esphomeyaml.helpers import App, Application, add, optional, process_lambda,
|
||||
variable
|
||||
|
||||
MakeTemplateTextSensor = Application.MakeTemplateTextSensor
|
||||
TemplateTextSensor = text_sensor.text_sensor_ns.TemplateTextSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(text_sensor.TEXT_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(TemplateTextSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeTemplateTextSensor),
|
||||
vol.Required(CONF_LAMBDA): cv.lambda_,
|
||||
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
|
||||
@@ -28,3 +30,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_TEMPLATE_TEXT_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return text_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -4,8 +4,10 @@ from esphomeyaml.const import CONF_MAKE_ID, CONF_NAME
|
||||
from esphomeyaml.helpers import App, Application, variable
|
||||
|
||||
MakeVersionTextSensor = Application.MakeVersionTextSensor
|
||||
VersionTextSensor = text_sensor.text_sensor_ns.VersionTextSensor
|
||||
|
||||
PLATFORM_SCHEMA = cv.nameable(text_sensor.TEXT_SENSOR_PLATFORM_SCHEMA.extend({
|
||||
cv.GenerateID(): cv.declare_variable_id(VersionTextSensor),
|
||||
cv.GenerateID(CONF_MAKE_ID): cv.declare_variable_id(MakeVersionTextSensor),
|
||||
}))
|
||||
|
||||
@@ -17,3 +19,7 @@ def to_code(config):
|
||||
|
||||
|
||||
BUILD_FLAGS = '-DUSE_VERSION_TEXT_SENSOR'
|
||||
|
||||
|
||||
def to_hass_config(data, config):
|
||||
return text_sensor.core_to_hass_config(data, config)
|
||||
|
||||
@@ -8,7 +8,7 @@ import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml import automation
|
||||
from esphomeyaml.const import CONF_CRON, CONF_DAYS_OF_MONTH, CONF_DAYS_OF_WEEK, CONF_HOURS, \
|
||||
CONF_MINUTES, CONF_MONTHS, CONF_ON_TIME, CONF_SECONDS, CONF_TIMEZONE, CONF_TRIGGER_ID
|
||||
from esphomeyaml.helpers import App, NoArg, Pvariable, add, add_job, esphomelib_ns
|
||||
from esphomeyaml.helpers import App, NoArg, Pvariable, add, add_job, esphomelib_ns, ArrayInitializer
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,18 +47,8 @@ def _tz_dst_str(dt):
|
||||
_tz_timedelta(td))
|
||||
|
||||
|
||||
def detect_tz():
|
||||
try:
|
||||
import tzlocal
|
||||
import pytz
|
||||
except ImportError:
|
||||
raise vol.Invalid("No timezone specified and 'tzlocal' not installed. To automatically "
|
||||
"detect the timezone please install tzlocal (pip2 install tzlocal)")
|
||||
try:
|
||||
tz = tzlocal.get_localzone()
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
_LOGGER.warning("Could not auto-detect timezone. Using UTC...")
|
||||
return 'UTC'
|
||||
def convert_tz(pytz_obj):
|
||||
tz = pytz_obj
|
||||
|
||||
def _dst(dt, is_dst):
|
||||
try:
|
||||
@@ -107,18 +97,34 @@ def detect_tz():
|
||||
tzbase = '{}{}'.format(norm_tzname, _tz_timedelta(-1 * norm_utcoffset))
|
||||
if dst_begins is None:
|
||||
# No DST in this timezone
|
||||
_LOGGER.info("Auto-detected timezone '%s' with UTC offset %s",
|
||||
_LOGGER.info("Detected timezone '%s' with UTC offset %s",
|
||||
norm_tzname, _tz_timedelta(norm_utcoffset))
|
||||
return tzbase
|
||||
tzext = '{}{},{},{}'.format(dst_tzname, _tz_timedelta(-1 * dst_utcoffset),
|
||||
_tz_dst_str(dst_begins), _tz_dst_str(dst_ends))
|
||||
_LOGGER.info("Auto-detected timezone '%s' with UTC offset %s and daylight savings time from "
|
||||
_LOGGER.info("Detected timezone '%s' with UTC offset %s and daylight savings time from "
|
||||
"%s to %s",
|
||||
norm_tzname, _tz_timedelta(norm_utcoffset), dst_begins.strftime("%x %X"),
|
||||
dst_ends.strftime("%x %X"))
|
||||
return tzbase + tzext
|
||||
|
||||
|
||||
def detect_tz():
|
||||
try:
|
||||
import tzlocal
|
||||
import pytz
|
||||
except ImportError:
|
||||
raise vol.Invalid("No timezone specified and 'tzlocal' not installed. To automatically "
|
||||
"detect the timezone please install tzlocal (pip2 install tzlocal)")
|
||||
try:
|
||||
tz = tzlocal.get_localzone()
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
_LOGGER.warning("Could not auto-detect timezone. Using UTC...")
|
||||
return 'UTC'
|
||||
|
||||
return convert_tz(tz)
|
||||
|
||||
|
||||
def _parse_cron_int(value, special_mapping, message):
|
||||
special_mapping = special_mapping or {}
|
||||
if isinstance(value, (str, unicode)) and value in special_mapping:
|
||||
@@ -233,8 +239,19 @@ def validate_cron_keys(value):
|
||||
return cv.has_at_least_one_key(*CRON_KEYS)(value)
|
||||
|
||||
|
||||
def validate_tz(value):
|
||||
value = cv.string_strict(value)
|
||||
|
||||
try:
|
||||
import pytz
|
||||
|
||||
return convert_tz(pytz.timezone(value))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return value
|
||||
|
||||
|
||||
TIME_PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_TIMEZONE, default=detect_tz): cv.string,
|
||||
vol.Optional(CONF_TIMEZONE, default=detect_tz): validate_tz,
|
||||
vol.Optional(CONF_ON_TIME): automation.validate_automation({
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(CronTrigger),
|
||||
vol.Optional(CONF_SECONDS): validate_cron_seconds,
|
||||
@@ -254,18 +271,25 @@ def setup_time_core_(time_var, config):
|
||||
for conf in config.get(CONF_ON_TIME, []):
|
||||
rhs = App.register_component(time_var.Pmake_cron_trigger())
|
||||
trigger = Pvariable(conf[CONF_TRIGGER_ID], rhs)
|
||||
for second in conf.get(CONF_SECONDS, [x for x in range(0, 61)]):
|
||||
add(trigger.add_second(second))
|
||||
for minute in conf.get(CONF_MINUTES, [x for x in range(0, 60)]):
|
||||
add(trigger.add_minute(minute))
|
||||
for hour in conf.get(CONF_HOURS, [x for x in range(0, 24)]):
|
||||
add(trigger.add_hour(hour))
|
||||
for day_of_month in conf.get(CONF_DAYS_OF_MONTH, [x for x in range(1, 32)]):
|
||||
add(trigger.add_day_of_month(day_of_month))
|
||||
for month in conf.get(CONF_MONTHS, [x for x in range(1, 13)]):
|
||||
add(trigger.add_month(month))
|
||||
for day_of_week in conf.get(CONF_DAYS_OF_WEEK, [x for x in range(1, 8)]):
|
||||
add(trigger.add_day_of_week(day_of_week))
|
||||
|
||||
seconds = conf.get(CONF_SECONDS, [x for x in range(0, 61)])
|
||||
add(trigger.add_seconds(ArrayInitializer(*seconds, multiline=False)))
|
||||
|
||||
minutes = conf.get(CONF_MINUTES, [x for x in range(0, 60)])
|
||||
add(trigger.add_minutes(ArrayInitializer(*minutes, multiline=False)))
|
||||
|
||||
hours = conf.get(CONF_HOURS, [x for x in range(0, 24)])
|
||||
add(trigger.add_hours(ArrayInitializer(*hours, multiline=False)))
|
||||
|
||||
days_of_month = conf.get(CONF_DAYS_OF_MONTH, [x for x in range(1, 32)])
|
||||
add(trigger.add_days_of_month(ArrayInitializer(*days_of_month, multiline=False)))
|
||||
|
||||
months = conf.get(CONF_MONTHS, [x for x in range(1, 13)])
|
||||
add(trigger.add_months(ArrayInitializer(*months, multiline=False)))
|
||||
|
||||
days_of_week = conf.get(CONF_DAYS_OF_WEEK, [x for x in range(1, 8)])
|
||||
add(trigger.add_days_of_week(ArrayInitializer(*days_of_week, multiline=False)))
|
||||
|
||||
automation.build_automation(trigger, NoArg, conf)
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import voluptuous as vol
|
||||
import esphomeyaml.config_validation as cv
|
||||
from esphomeyaml.components import time as time_
|
||||
from esphomeyaml.const import CONF_ID, CONF_LAMBDA, CONF_SERVERS
|
||||
from esphomeyaml.helpers import App, Pvariable
|
||||
from esphomeyaml.helpers import App, Pvariable, add
|
||||
|
||||
SNTPComponent = time_.time_ns.SNTPComponent
|
||||
|
||||
@@ -15,8 +15,10 @@ PLATFORM_SCHEMA = time_.TIME_PLATFORM_SCHEMA.extend({
|
||||
|
||||
|
||||
def to_code(config):
|
||||
rhs = App.make_sntp_component(*config.get(CONF_SERVERS, []))
|
||||
rhs = App.make_sntp_component()
|
||||
sntp = Pvariable(config[CONF_ID], rhs)
|
||||
if CONF_SERVERS in config:
|
||||
add(sntp.set_servers(*config[CONF_SERVERS]))
|
||||
|
||||
time_.setup_time(sntp, config)
|
||||
|
||||
|
||||
@@ -296,7 +296,8 @@ def time_period_str_colon(value):
|
||||
def time_period_str_unit(value):
|
||||
"""Validate and transform time period with time unit and integer value."""
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
raise vol.Invalid("Don't know what '{}' means as it has no time *unit*! Did you mean "
|
||||
"'{}s'?".format(value, value))
|
||||
elif not isinstance(value, (str, unicode)):
|
||||
raise vol.Invalid("Expected string for time period with unit.")
|
||||
|
||||
@@ -555,8 +556,14 @@ i2c_address = hex_uint8_t
|
||||
|
||||
|
||||
def percentage(value):
|
||||
if isinstance(value, (str, unicode)) and value.endswith('%'):
|
||||
has_percent_sign = isinstance(value, (str, unicode)) and value.endswith('%')
|
||||
if has_percent_sign:
|
||||
value = float(value[:-1].rstrip()) / 100.0
|
||||
if value > 1:
|
||||
msg = "Percentage must not be higher than 100%."
|
||||
if not has_percent_sign:
|
||||
msg += " Please don't put to put a percent sign after the number!"
|
||||
raise vol.Invalid(msg)
|
||||
return zero_to_one_float(value)
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
MAJOR_VERSION = 1
|
||||
MINOR_VERSION = 9
|
||||
PATCH_VERSION = '0b1'
|
||||
PATCH_VERSION = '0b4'
|
||||
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
||||
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
||||
ESPHOMELIB_VERSION = '1.9.0b1'
|
||||
ESPHOMELIB_VERSION = '1.9.0b4'
|
||||
|
||||
ESP_PLATFORM_ESP32 = 'ESP32'
|
||||
ESP_PLATFORM_ESP8266 = 'ESP8266'
|
||||
@@ -222,6 +222,7 @@ CONF_ON_PRESS = 'on_press'
|
||||
CONF_ON_RELEASE = 'on_release'
|
||||
CONF_ON_CLICK = 'on_click'
|
||||
CONF_ON_DOUBLE_CLICK = 'on_double_click'
|
||||
CONF_ON_MULTI_CLICK = 'on_multi_click'
|
||||
CONF_MIN_LENGTH = 'min_length'
|
||||
CONF_MAX_LENGTH = 'max_length'
|
||||
CONF_ON_VALUE = 'on_value'
|
||||
@@ -350,6 +351,20 @@ CONF_ARGS = 'args'
|
||||
CONF_FORMAT = 'format'
|
||||
CONF_COLOR_CORRECT = 'color_correct'
|
||||
CONF_ON_JSON_MESSAGE = 'on_json_message'
|
||||
CONF_ACCELERATION = 'acceleration'
|
||||
CONF_DECELERATION = 'deceleration'
|
||||
CONF_MAX_SPEED = 'max_speed'
|
||||
CONF_TARGET = 'target'
|
||||
CONF_POSITION = 'position'
|
||||
CONF_STEP_PIN = 'step_pin'
|
||||
CONF_DIR_PIN = 'dir_pin'
|
||||
CONF_SLEEP_PIN = 'sleep_pin'
|
||||
CONF_SEND_FIRST_AT = 'send_first_at'
|
||||
CONF_TIME_ID = 'time_id'
|
||||
CONF_RESTORE_STATE = 'restore_state'
|
||||
CONF_TIMING = 'timing'
|
||||
CONF_INVALID_COOLDOWN = 'invalid_cooldown'
|
||||
|
||||
|
||||
ALLOWED_NAME_CHARS = u'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
ARDUINO_VERSION_ESP32_DEV = 'https://github.com/platformio/platform-espressif32.git#feature/stage'
|
||||
|
||||
@@ -46,6 +46,8 @@ def validate_board(value):
|
||||
def validate_simple_esphomelib_version(value):
|
||||
value = cv.string_strict(value)
|
||||
if value.upper() == 'LATEST':
|
||||
if ESPHOMELIB_VERSION == 'dev':
|
||||
return validate_simple_esphomelib_version('dev')
|
||||
return {
|
||||
CONF_REPOSITORY: LIBRARY_URI_REPO,
|
||||
CONF_TAG: 'v' + ESPHOMELIB_VERSION,
|
||||
@@ -53,7 +55,7 @@ def validate_simple_esphomelib_version(value):
|
||||
elif value.upper() == 'DEV':
|
||||
return {
|
||||
CONF_REPOSITORY: LIBRARY_URI_REPO,
|
||||
CONF_BRANCH: 'master'
|
||||
CONF_BRANCH: 'dev'
|
||||
}
|
||||
elif VERSION_REGEX.match(value) is not None:
|
||||
return {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user