Compare commits

...

37 Commits

Author SHA1 Message Date
10bceb55b8 chore: cargo lock 2025-05-03 21:08:59 -04:00
e219edb64b chore: move some dependencies to the workspace 2025-05-03 21:08:50 -04:00
da321db40b chore: remove unnecessary entries from .gitignore 2025-05-03 21:08:36 -04:00
cff48691ef feat(python-utils): a type that validates as the None Python type 2025-05-03 21:08:10 -04:00
089e96b99f chore(home-assistant): implement the revised light protocol 2025-05-03 21:07:08 -04:00
50e9ee43f7 feat(entrypoint): take arguments in preparation for a new persisted crate (featuring a Signal that serializes with postcard to a fjall database) 2025-05-03 21:05:49 -04:00
c0b27dc5f0 chore(emitter-and-signal): ext-trait is moving to a workspace dependency 2025-05-03 21:04:12 -04:00
277182a93e feat+chore(driver/kasa)!: implement the revised light protocol and reorganize that into its own module file 2025-05-03 21:03:52 -04:00
d6515521a4 chore(protocol)!: clean up traits (including making extension traits pub), trying auto implementing Toggle for GetState and SetState - we'll see how that goes 2025-05-03 20:57:47 -04:00
472ca50ec0 feat!: overhaul the light protocol (i.e. collection of traits) 2025-05-02 17:33:21 -04:00
e680f10be8 docs: explain licenses of dependencies 2025-04-22 20:48:06 -04:00
de3ab27414 docs: readme and pull request template outlining how to contribute to this project while upholding the Unlicense 2025-04-22 16:34:12 -04:00
c95d2f8d99 chore(home-assistant): no longer depend on chrono-tz 2025-04-22 01:52:56 -04:00
6e366a9c51 fix(home-assistant); activate arbitrary-value's needed pyo3 feature 2025-04-22 01:52:36 -04:00
d1daa0bc01 chore(entrypoint): no longer depend on chrono and chrono-tz 2025-04-22 01:51:45 -04:00
cc51a262ae chore: Unlicense 2025-04-22 01:32:41 -04:00
78e4be3fd9 chore: allow the Unlicense 2025-04-22 01:28:02 -04:00
fb0ad50954 chore: allow some licenses 2025-04-22 01:20:16 -04:00
fb8fb38611 chore: init for cargo-deny 2025-04-22 01:08:36 -04:00
61bb3519ca chore: resolve advisory that backoff is unmaintained 2025-04-22 01:04:18 -04:00
536743a15d chore: depend on fewer tokio features because the dependencies that need them are declaring them themselves 2025-04-22 00:49:46 -04:00
2c8ece1168 fix: make the light protocols return a Send future, resolving the warning that async fn in trait definitions is discouraged 2025-04-22 00:48:48 -04:00
18a0776edb chore: update dependencies 2025-04-21 21:26:52 -04:00
1ca8acd21e style(arbitrary-value): rearrange dependencies 2025-04-21 21:26:44 -04:00
f8b269b6ce chore: extract python_utils and home-assistant to their own crates 2025-04-21 21:26:14 -04:00
e4fd9844cc chore: Cargo.lock 2025-04-21 16:43:51 -04:00
54e0997799 chore: remove unnecessary pyproject.toml and usage.py 2025-04-21 16:43:39 -04:00
a97cf73061 chore: extract Emitter and Signal to their own crate 2025-04-21 16:43:17 -04:00
f422888d37 feat: early stage of defining protocols that can be implemented by drivers 2025-04-21 16:42:54 -04:00
f884bc7675 feat: early stages of a TP-Link Kasa driver for our smart lights 2025-04-21 16:42:14 -04:00
38e89f31f4 chore: convert into a workspace 2025-04-21 16:41:34 -04:00
ea7e9e3c53 chore: extract arbitrary into its own crate arbitrary-value 2025-04-21 16:40:27 -04:00
325cb60aa1 ci: delete maturin's autogenerated GitHub Actions workflows because I don't want them 2025-04-02 16:44:21 -04:00
ead3c6e4a9 feat: implement IntoPyObject for HomeAssistant 2025-03-22 16:53:38 -04:00
04f9aa24cf feat: implement IntoPyObject for EntityId 2025-03-22 16:53:26 -04:00
70dda580ee chore: take Python<'_> as a parameter instead of &Python 2025-03-22 16:49:45 -04:00
7b2ebc5fe9 feat: initial store implementation 2025-03-19 20:51:07 -04:00
73 changed files with 3909 additions and 546 deletions

View File

@@ -1,48 +0,0 @@
# This file is autogenerated by maturin v1.2.3
# To update, run
#
# maturin generate-ci github
#
name: CI
on:
push:
branches:
- main
- master
tags:
- '*'
pull_request:
workflow_dispatch:
permissions:
contents: read
jobs:
linux:
runs-on: ubuntu-latest
strategy:
matrix:
target: [x86_64, x86, aarch64, armv7, s390x, ppc64le]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Build wheels
uses: PyO3/maturin-action@v1
with:
target: ${{ matrix.target }}
args: --release --out dist --find-interpreter
sccache: 'true'
manylinux: auto
sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build sdist
uses: PyO3/maturin-action@v1
with:
command: sdist
args: --out dist

71
.gitignore vendored
View File

@@ -1,72 +1 @@
/target
# Byte-compiled / optimized / DLL files
__pycache__/
.pytest_cache/
*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
.venv/
env/
bin/
build/
develop-eggs/
dist/
eggs/
lib/
lib64/
parts/
sdist/
var/
include/
man/
venv/
*.egg-info/
.installed.cfg
*.egg
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
pip-selfcheck.json
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
# Mr Developer
.mr.developer.cfg
.project
.pydevproject
# Rope
.ropeproject
# Django stuff:
*.log
*.pot
.DS_Store
# Sphinx documentation
docs/_build/
# PyCharm
.idea/
# VSCode
.vscode/
# Pyenv
.python-version

1177
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,40 +1,30 @@
[package]
name = "smart-home-in-rust-with-home-assistant"
version = "0.2.0"
edition = "2021"
[workspace]
members = [
"arbitrary-value",
"driver/kasa",
"emitter-and-signal",
"entrypoint",
"home-assistant",
"protocol",
"python-utils",
]
resolver = "2"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "smart_home_in_rust_with_home_assistant"
crate-type = ["cdylib"]
[workspace.package]
license = "Unlicense"
[dependencies]
[workspace.dependencies]
backon = "1.5"
chrono = "0.4.40"
chrono-tz = "0.10.1"
derive_more = { version = "2.0.1", features = [
"display",
"from",
"into",
"try_from",
"try_into",
] }
ijson = "0.1.4"
itertools = "0.14.0"
pyo3 = { version = "0.24.0", features = [
"auto-initialize",
"chrono",
"chrono-tz",
] }
pyo3-async-runtimes = { version = "0.24.0", features = ["tokio-runtime"] }
serde_json = "1.0.140"
shadow-rs = { version = "1.0.1", default-features = false }
deranged = "0.4"
derive_more = "2.0.1"
ext-trait = "2.0.0"
palette = "0.7"
pyo3 = "0.24.0"
pyo3-async-runtimes = "0.24.0"
serde = "1.0.219"
snafu = "0.8.5"
strum = { version = "0.27.1", features = ["derive"] }
tokio = { version = "1.32.0", features = ["rt", "rt-multi-thread", "time"] }
strum = "0.27.1"
tokio = "1.32.0"
tracing = "0.1.37"
tracing-appender = "0.2.3"
tracing-subscriber = "0.3.17"
ulid = "1.2.0"
[build-dependencies]
shadow-rs = "1.0.1"

41
PULL_REQUEST_TEMPLATE Normal file
View File

@@ -0,0 +1,41 @@
# Prior Issue
<!--
Please identify the tracked issue relating to this matter, or create one first if it does not yet exist.
Write it like e.g.
* `closes #567` (so that the issue is linked by keyword https://docs.github.com/en/issues/tracking-your-work-with-issues/using-issues/linking-a-pull-request-to-an-issue),
* `partially addresses #12 and #345`
->
# Future Improvements
<!--
Do you believe there's any more work to be done to round out this contribution? Perhaps documentation or testing?
Any limitations in the feature or solution offered in this code? Anything else you want to say about the contribution?
Or, like if this is a trivial change, of course you can just say succinctly that this is everything.
-->
---
<!--
Read, ensure you agree with, and affirm the copyright waiver below by keeping it in this pull request description.
If you do not agree or cannot or will not fulfill the requirements described, then you must not include it in your contribution. You are recommended not to attempt to submit the contribution to this project in that case.
-->
# Copyright waiver for <https://gitea.katniss.top/jacob/smart-home-in-rust-with-home-assistant> (mirrored at <https://github.com/babichjacob/smart-home-in-rust-with-home-assistant>)
I dedicate any and all copyright interest in this software to the
public domain. I make this dedication for the benefit of the public at
large and to the detriment of my heirs and successors. I intend this
dedication to be an overt act of relinquishment in perpetuity of all
present and future rights to this software under copyright law.
To the best of my knowledge and belief, my contributions are either
originally authored by me or are derived from prior works which I have
verified are also in the public domain and are not subject to claims
of copyright by other parties.
To the best of my knowledge and belief, no individual, business,
organization, government, or other entity has any copyright interest
in my contributions, and I affirm that I will not make contributions
that are otherwise encumbered.

17
README.md Normal file
View File

@@ -0,0 +1,17 @@
# smart home in Rust with Home Assistant
You probably don't want to use this if you're not me.
## Unlicense & Contributing
The contents of this repository are released under the [Unlicense](UNLICENSE). Cargo-based dependencies of this project use free software `licenses` marked `allow` in [the `cargo-deny` configuration](deny.toml). [Home Assistant itself is Apache 2.0](https://www.home-assistant.io/developers/license/) and libraries it uses may be licensed differently and cannot be trivially tracked from here.
Please create an issue before working on a pull request. It's helpful for you to know if the idea you have in mind will for sure be incorporated into the project, and won't require you to acquaint yourself with the project internals. It even opens the floor for someone else to do the work implementing it for you.
Some [existing issues are labeled straightforward](https://gitea.katniss.top/jacob/smart-home-in-rust-with-home-assistant/issues?labels=42) and expected to be the easiest to work on, if you'd like to try.
Any pull request you make to this repository must
1. contain exclusively commits that are [cryptographically verified](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification) to have been authored by you.
2. be explicitly dedicated to the public domain. You can do this by retaining the copywright waiver in [the pull request template](PULL_REQUEST_TEMPLATE).
Your contribution will be declined if it does not ensure this project remains completely free and unencumbered by anyone's copyright monopoly.

24
UNLICENSE Normal file
View File

@@ -0,0 +1,24 @@
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <https://unlicense.org/>

View File

@@ -0,0 +1,17 @@
[package]
name = "arbitrary-value"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
[features]
pyo3 = ["dep:pyo3"]
[dependencies]
chrono = { workspace = true }
chrono-tz = { workspace = true }
derive_more = { workspace = true }
ijson = "0.1.4"
itertools = "0.14.0"
pyo3 = { workspace = true, optional = true, features = ["chrono", "chrono-tz"] }
snafu = { workspace = true }

View File

@@ -1,6 +1,7 @@
use chrono::DateTime;
use chrono_tz::Tz;
use ijson::{IArray, INumber, IObject, IString, IValue};
#[cfg(feature = "pyo3")]
use pyo3::{
exceptions::{PyTypeError, PyValueError},
prelude::*,
@@ -71,6 +72,7 @@ impl From<Arbitrary> for IValue {
}
}
#[cfg(feature = "pyo3")]
impl<'py> FromPyObject<'py> for Arbitrary {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
if let Ok(map_key) = ob.extract::<MapKey>() {
@@ -91,6 +93,7 @@ impl<'py> FromPyObject<'py> for Arbitrary {
}
}
#[cfg(feature = "pyo3")]
impl<'py> IntoPyObject<'py> for Arbitrary {
type Target = PyAny;

View File

@@ -1,7 +1,7 @@
use pyo3::IntoPyObject;
use snafu::Snafu;
#[derive(Debug, Clone, derive_more::Into, IntoPyObject)]
#[cfg_attr(feature = "pyo3", derive(pyo3::IntoPyObject))]
#[derive(Debug, Clone, derive_more::Into)]
pub struct FiniteF64(f64);
#[derive(Debug, Snafu)]

View File

@@ -2,3 +2,5 @@ pub mod arbitrary;
pub mod finite_f64;
pub mod map;
pub mod map_key;
pub use arbitrary::*;

View File

@@ -0,0 +1,7 @@
use std::collections::BTreeMap;
use super::{arbitrary::Arbitrary, map_key::MapKey};
#[cfg_attr(feature = "pyo3", derive(pyo3::FromPyObject, pyo3::IntoPyObject))]
#[derive(Debug, Clone, Default, derive_more::From, derive_more::Into)]
pub struct Map(pub BTreeMap<MapKey, Arbitrary>);

View File

@@ -4,6 +4,7 @@ use chrono::DateTime;
use chrono_tz::Tz;
use ijson::IString;
use itertools::Itertools;
#[cfg(feature = "pyo3")]
use pyo3::{
exceptions::PyTypeError,
prelude::*,
@@ -41,6 +42,7 @@ impl Display for MapKey {
}
}
#[cfg(feature = "pyo3")]
impl<'py> FromPyObject<'py> for MapKey {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
if let Ok(_none) = ob.downcast::<PyNone>() {
@@ -62,6 +64,7 @@ impl<'py> FromPyObject<'py> for MapKey {
}
}
#[cfg(feature = "pyo3")]
impl<'py> IntoPyObject<'py> for MapKey {
type Target = PyAny;

240
deny.toml Normal file
View File

@@ -0,0 +1,240 @@
# This template contains all of the possible sections and their default values
# Note that all fields that take a lint level have these possible values:
# * deny - An error will be produced and the check will fail
# * warn - A warning will be produced, but the check will not fail
# * allow - No warning or error will be produced, though in some cases a note
# will be
# The values provided in this template are the default values that will be used
# when any section or field is not specified in your own configuration
# Root options
# The graph table configures how the dependency graph is constructed and thus
# which crates the checks are performed against
[graph]
# If 1 or more target triples (and optionally, target_features) are specified,
# only the specified targets will be checked when running `cargo deny check`.
# This means, if a particular package is only ever used as a target specific
# dependency, such as, for example, the `nix` crate only being used via the
# `target_family = "unix"` configuration, that only having windows targets in
# this list would mean the nix crate, as well as any of its exclusive
# dependencies not shared by any other crates, would be ignored, as the target
# list here is effectively saying which targets you are building for.
targets = [
# The triple can be any string, but only the target triples built in to
# rustc (as of 1.40) can be checked against actual config expressions
#"x86_64-unknown-linux-musl",
# You can also specify which target_features you promise are enabled for a
# particular target. target_features are currently not validated against
# the actual valid features supported by the target architecture.
#{ triple = "wasm32-unknown-unknown", features = ["atomics"] },
]
# When creating the dependency graph used as the source of truth when checks are
# executed, this field can be used to prune crates from the graph, removing them
# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate
# is pruned from the graph, all of its dependencies will also be pruned unless
# they are connected to another crate in the graph that hasn't been pruned,
# so it should be used with care. The identifiers are [Package ID Specifications]
# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html)
#exclude = []
# If true, metadata will be collected with `--all-features`. Note that this can't
# be toggled off if true, if you want to conditionally enable `--all-features` it
# is recommended to pass `--all-features` on the cmd line instead
all-features = false
# If true, metadata will be collected with `--no-default-features`. The same
# caveat with `all-features` applies
no-default-features = false
# If set, these feature will be enabled when collecting metadata. If `--features`
# is specified on the cmd line they will take precedence over this option.
#features = []
# The output table provides options for how/if diagnostics are outputted
[output]
# When outputting inclusion graphs in diagnostics that include features, this
# option can be used to specify the depth at which feature edges will be added.
# This option is included since the graphs can be quite large and the addition
# of features from the crate(s) to all of the graph roots can be far too verbose.
# This option can be overridden via `--feature-depth` on the cmd line
feature-depth = 1
# This section is considered when running `cargo deny check advisories`
# More documentation for the advisories section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html
[advisories]
# The path where the advisory databases are cloned/fetched into
#db-path = "$CARGO_HOME/advisory-dbs"
# The url(s) of the advisory databases to use
#db-urls = ["https://github.com/rustsec/advisory-db"]
# A list of advisory IDs to ignore. Note that ignored advisories will still
# output a note when they are encountered.
ignore = [
#"RUSTSEC-0000-0000",
#{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" },
#"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish
#{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" },
]
# If this is true, then cargo deny will use the git executable to fetch advisory database.
# If this is false, then it uses a built-in git library.
# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support.
# See Git Authentication for more information about setting up git authentication.
#git-fetch-with-cli = true
# This section is considered when running `cargo deny check licenses`
# More documentation for the licenses section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html
[licenses]
# List of explicitly allowed licenses
# See https://spdx.org/licenses/ for list of possible licenses
# [possible values: any SPDX 3.11 short identifier (+ optional exception)].
allow = [
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
"BSD-3-Clause",
"MIT",
"MPL-2.0",
"Unicode-3.0",
"Unlicense",
"Zlib",
]
# The confidence threshold for detecting a license from license text.
# The higher the value, the more closely the license text must be to the
# canonical license text of a valid SPDX license file.
# [possible values: any between 0.0 and 1.0].
confidence-threshold = 0.8
# Allow 1 or more licenses on a per-crate basis, so that particular licenses
# aren't accepted for every possible crate as with the normal allow list
exceptions = [
# Each entry is the crate and version constraint, and its specific allow
# list
#{ allow = ["Zlib"], crate = "adler32" },
]
# Some crates don't have (easily) machine readable licensing information,
# adding a clarification entry for it allows you to manually specify the
# licensing information
#[[licenses.clarify]]
# The package spec the clarification applies to
#crate = "ring"
# The SPDX expression for the license requirements of the crate
#expression = "MIT AND ISC AND OpenSSL"
# One or more files in the crate's source used as the "source of truth" for
# the license expression. If the contents match, the clarification will be used
# when running the license check, otherwise the clarification will be ignored
# and the crate will be checked normally, which may produce warnings or errors
# depending on the rest of your configuration
#license-files = [
# Each entry is a crate relative path, and the (opaque) hash of its contents
#{ path = "LICENSE", hash = 0xbd0eed23 }
#]
[licenses.private]
# If true, ignores workspace crates that aren't published, or are only
# published to private registries.
# To see how to mark a crate as unpublished (to the official registry),
# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.
ignore = false
# One or more private registries that you might publish crates to, if a crate
# is only published to private registries, and ignore is true, the crate will
# not have its license(s) checked
registries = [
#"https://sekretz.com/registry
]
# This section is considered when running `cargo deny check bans`.
# More documentation about the 'bans' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html
[bans]
# Lint level for when multiple versions of the same crate are detected
multiple-versions = "warn"
# Lint level for when a crate version requirement is `*`
wildcards = "allow"
# The graph highlighting used when creating dotgraphs for crates
# with multiple versions
# * lowest-version - The path to the lowest versioned duplicate is highlighted
# * simplest-path - The path to the version with the fewest edges is highlighted
# * all - Both lowest-version and simplest-path are used
highlight = "all"
# The default lint level for `default` features for crates that are members of
# the workspace that is being checked. This can be overridden by allowing/denying
# `default` on a crate-by-crate basis if desired.
workspace-default-features = "allow"
# The default lint level for `default` features for external crates that are not
# members of the workspace. This can be overridden by allowing/denying `default`
# on a crate-by-crate basis if desired.
external-default-features = "allow"
# List of crates that are allowed. Use with care!
allow = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" },
]
# List of crates to deny
deny = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" },
# Wrapper crates can optionally be specified to allow the crate when it
# is a direct dependency of the otherwise banned crate
#{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] },
]
# List of features to allow/deny
# Each entry the name of a crate and a version range. If version is
# not specified, all versions will be matched.
#[[bans.features]]
#crate = "reqwest"
# Features to not allow
#deny = ["json"]
# Features to allow
#allow = [
# "rustls",
# "__rustls",
# "__tls",
# "hyper-rustls",
# "rustls",
# "rustls-pemfile",
# "rustls-tls-webpki-roots",
# "tokio-rustls",
# "webpki-roots",
#]
# If true, the allowed features must exactly match the enabled feature set. If
# this is set there is no point setting `deny`
#exact = true
# Certain crates/versions that will be skipped when doing duplicate detection.
skip = [
#"ansi_term@0.11.0",
#{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" },
]
# Similarly to `skip` allows you to skip certain crates during duplicate
# detection. Unlike skip, it also includes the entire tree of transitive
# dependencies starting at the specified crate, up to a certain depth, which is
# by default infinite.
skip-tree = [
#"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies
#{ crate = "ansi_term@0.11.0", depth = 20 },
]
# This section is considered when running `cargo deny check sources`.
# More documentation about the 'sources' section can be found here:
# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html
[sources]
# Lint level for what to happen when a crate from a crate registry that is not
# in the allow list is encountered
unknown-registry = "warn"
# Lint level for what to happen when a crate from a git repository that is not
# in the allow list is encountered
unknown-git = "warn"
# List of URLs for allowed crate registries. Defaults to the crates.io index
# if not specified. If it is specified but empty, no registries are allowed.
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
# List of URLs for allowed Git repositories
allow-git = []
[sources.allow-org]
# github.com organizations to allow git sources for
github = []
# gitlab.com organizations to allow git sources for
gitlab = []
# bitbucket.org organizations to allow git sources for
bitbucket = []

20
driver/kasa/Cargo.toml Normal file
View File

@@ -0,0 +1,20 @@
[package]
name = "driver-kasa"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
[dependencies]
backon = { workspace = true }
deranged = { workspace = true }
derive_more = { workspace = true, features = ["from"] }
mac_address = { version = "1.1.8", features = ["serde"] }
palette = { workspace = true }
protocol = { path = "../../protocol" }
serde = { workspace = true, features = ["derive"] }
serde_json = "1.0.140"
serde_repr = "0.1.20"
serde_with = "3.12.0"
snafu = { workspace = true }
tokio = { workspace = true, features = ["io-util", "net", "sync", "time"] }
tracing = { workspace = true }

View File

@@ -0,0 +1,278 @@
use crate::messages::{
GetSysInfo, GetSysInfoResponse, LB130USSys, LightState, Off, On, SetLightLastOn, SetLightOff,
SetLightState, SetLightStateArgs, SetLightStateResponse, SetLightTo, SysInfo,
};
use backon::{FibonacciBuilder, Retryable};
use serde::{Deserialize, Serialize};
use snafu::{ResultExt, Snafu};
use std::{io, net::SocketAddr, num::NonZero, time::Duration};
use tokio::{
io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, BufReader, BufWriter},
net::TcpStream,
sync::{mpsc, oneshot},
time::timeout,
};
struct XorEncryption<const INITIAL_KEY: u8>;
impl<const INITIAL_KEY: u8> XorEncryption<INITIAL_KEY> {
fn encrypt_in_place(bytes: &mut [u8]) {
let mut key = INITIAL_KEY;
for unencrypted_byte in bytes {
let encrypted_byte = key ^ *unencrypted_byte;
key = encrypted_byte;
*unencrypted_byte = encrypted_byte;
}
}
fn decrypt_in_place(bytes: &mut [u8]) {
let mut key = INITIAL_KEY;
for encrypted_byte in bytes {
let unencrypted_byte = key ^ *encrypted_byte;
key = *encrypted_byte;
*encrypted_byte = unencrypted_byte;
}
}
}
fn into_encrypted(mut msg: Vec<u8>) -> Vec<u8> {
let length = msg.len() as u32;
let big_endian = length.to_be_bytes();
XorEncryption::<171>::encrypt_in_place(&mut msg);
let all_together = big_endian.into_iter().chain(msg);
all_together.collect()
}
#[derive(Debug, Snafu)]
pub enum CommunicationError {
SerializeError { source: serde_json::Error },
WriteError { source: std::io::Error },
ReadError { source: std::io::Error },
DeserializeError { source: serde_json::Error },
WrongDevice,
}
fn should_try_reconnecting(communication_error: &CommunicationError) -> bool {
matches!(
communication_error,
CommunicationError::WriteError { .. } | CommunicationError::ReadError { .. }
)
}
#[derive(Debug)]
enum LB130USMessage {
GetSysInfo(oneshot::Sender<Result<LB130USSys, CommunicationError>>),
SetLightState(
SetLightStateArgs,
oneshot::Sender<Result<SetLightStateResponse, CommunicationError>>,
),
}
#[tracing::instrument(skip(messages))]
async fn lb130us_actor(
addr: SocketAddr,
disconnect_after_idle: Duration,
mut messages: mpsc::Receiver<LB130USMessage>,
) {
let mut connection_cell = None;
loop {
let (connection, message) = match &mut connection_cell {
Some(connection) => match timeout(disconnect_after_idle, messages.recv()).await {
Ok(Some(message)) => (connection, message),
Ok(None) => return,
Err(timed_out) => {
tracing::warn!(
?addr,
?timed_out,
"disconnecting from the LB130(US) because the idle timeout has been reached",
);
connection_cell.take();
continue;
}
},
None => {
let Some(message) = messages.recv().await else {
return;
};
tracing::info!(
"connecting for a first time / reconnecting after having gone idle..."
);
match (|| async {
let stream = TcpStream::connect(addr).await?;
let (reader, writer) = stream.into_split();
let buf_reader = BufReader::new(reader);
let buf_writer = BufWriter::new(writer);
Ok((buf_reader, buf_writer))
})
.retry(FibonacciBuilder::default())
.notify(|err: &io::Error, duration| {
tracing::error!(?err, ?duration);
})
.await
{
Ok(connection) => (connection_cell.insert(connection), message),
Err(err) => {
tracing::error!(?addr, ?err, "error connecting to an LB130(US)");
continue;
}
}
}
};
let (reader, writer) = connection;
tracing::info!("yay connected and got a message");
match message {
LB130USMessage::GetSysInfo(callback) => {
let res = handle_get_sysinfo(writer, reader).await;
if let Err(communication_error) = &res {
if should_try_reconnecting(communication_error) {
connection_cell.take();
}
}
let _ = callback.send(res);
}
LB130USMessage::SetLightState(args, callback) => {
let res = handle_set_light_state(writer, reader, args).await;
if let Err(communication_error) = &res {
if should_try_reconnecting(communication_error) {
connection_cell.take();
}
}
let _ = callback.send(res);
}
}
}
}
#[tracing::instrument(skip(writer, reader, request))]
async fn send_request<
AW: AsyncWrite + Unpin,
AR: AsyncRead + Unpin,
Request: Serialize,
Response: for<'de> Deserialize<'de>,
>(
writer: &mut AW,
reader: &mut AR,
request: &Request,
) -> Result<Response, CommunicationError> {
let outgoing = serde_json::to_vec(request).context(SerializeSnafu)?;
tracing::info!(?outgoing);
let encrypted_outgoing = into_encrypted(outgoing);
tracing::info!(?encrypted_outgoing);
writer
.write_all(&encrypted_outgoing)
.await
.context(WriteSnafu)?;
writer.flush().await.context(WriteSnafu)?;
tracing::info!("sent it, now about to try to get a response");
let incoming_length = reader.read_u32().await.context(ReadSnafu)?;
tracing::info!(?incoming_length);
let mut incoming_message = Vec::new();
incoming_message.resize(incoming_length as usize, 0);
reader
.read_exact(&mut incoming_message)
.await
.context(ReadSnafu)?;
XorEncryption::<171>::decrypt_in_place(&mut incoming_message);
tracing::info!(?incoming_message);
let response_as_json: serde_json::Value =
serde_json::from_slice(&incoming_message).context(DeserializeSnafu)?;
tracing::info!(?response_as_json);
let response = Response::deserialize(response_as_json).context(DeserializeSnafu)?;
Ok(response)
}
#[tracing::instrument(skip(writer, reader))]
async fn handle_get_sysinfo<AW: AsyncWrite + Unpin, AR: AsyncRead + Unpin>(
writer: &mut AW,
reader: &mut AR,
) -> Result<LB130USSys, CommunicationError> {
let request = GetSysInfo;
let response: GetSysInfoResponse = send_request(writer, reader, &request).await?;
let SysInfo::LB130US(lb130us) = response.system.get_sysinfo else {
return Err(CommunicationError::WrongDevice);
};
tracing::info!(?lb130us);
Ok(lb130us)
}
#[tracing::instrument(skip(writer, reader))]
async fn handle_set_light_state<AW: AsyncWrite + Unpin, AR: AsyncRead + Unpin>(
writer: &mut AW,
reader: &mut AR,
args: SetLightStateArgs,
) -> Result<SetLightStateResponse, CommunicationError> {
let request = SetLightState(args);
send_request(writer, reader, &request).await
}
#[derive(Debug, Clone)]
pub struct LB130USHandle {
sender: mpsc::Sender<LB130USMessage>,
}
#[derive(Debug, Snafu)]
pub enum HandleError {
CommunicationError { source: CommunicationError },
Dead,
}
impl LB130USHandle {
pub fn new(addr: SocketAddr, disconnect_after_idle: Duration, buffer: NonZero<usize>) -> Self {
let (sender, receiver) = mpsc::channel(buffer.get());
tokio::spawn(lb130us_actor(addr, disconnect_after_idle, receiver));
Self { sender }
}
pub async fn get_sysinfo(&self) -> Result<LB130USSys, HandleError> {
let (sender, receiver) = oneshot::channel();
self.sender
.send(LB130USMessage::GetSysInfo(sender))
.await
.map_err(|_| HandleError::Dead)?;
receiver
.await
.map_err(|_| HandleError::Dead)?
.context(CommunicationSnafu)
}
pub async fn set_light_state(
&self,
args: SetLightStateArgs,
) -> Result<SetLightStateResponse, HandleError> {
let (sender, receiver) = oneshot::channel();
self.sender
.send(LB130USMessage::SetLightState(args, sender))
.await
.map_err(|_| HandleError::Dead)?;
receiver
.await
.map_err(|_| HandleError::Dead)?
.context(CommunicationSnafu)
}
}

View File

@@ -0,0 +1,97 @@
use std::convert::Infallible;
use palette::{encoding::Srgb, Hsv, IntoColor};
use protocol::light::{GetState, Kelvin, SetState, TurnToColor, TurnToTemperature};
use snafu::{ResultExt, Snafu};
use crate::{
connection::{HandleError, LB130USHandle},
messages::{
Angle, Hsb, LightState, Off, On, Percentage, SetLightHsv, SetLightLastOn, SetLightOff,
SetLightStateArgs, SetLightTo,
},
};
#[derive(Debug, Snafu)]
#[snafu(module)]
pub enum GetStateError {
HandleError { source: HandleError },
}
impl GetState for LB130USHandle {
type Error = GetStateError;
async fn get_state(&self) -> Result<protocol::light::State, Self::Error> {
let sys = self
.get_sysinfo()
.await
.context(get_state_error::HandleSnafu)?;
let light_state = sys.sys_info.light_state;
let state = match light_state {
LightState::On { .. } => protocol::light::State::On,
LightState::Off { .. } => protocol::light::State::Off,
};
Ok(state)
}
}
#[derive(Debug, Snafu)]
#[snafu(module)]
pub enum SetStateError {
HandleError { source: HandleError },
}
impl SetState for LB130USHandle {
type Error = SetStateError;
async fn set_state(&mut self, state: protocol::light::State) -> Result<(), Self::Error> {
let to = match state {
protocol::light::State::Off => SetLightTo::Off(SetLightOff { on_off: Off }),
protocol::light::State::On => SetLightTo::LastOn(SetLightLastOn { on_off: On }),
};
let args = SetLightStateArgs {
to,
transition: None,
};
self.set_light_state(args)
.await
.context(set_state_error::HandleSnafu)?;
Ok(())
}
}
impl TurnToTemperature for LB130USHandle {
type Error = Infallible; // TODO
async fn turn_to_temperature(&mut self, temperature: Kelvin) -> Result<(), Self::Error> {
todo!()
}
}
#[derive(Debug, Snafu)]
#[snafu(module)]
pub enum TurnToColorError {
HandleError { source: HandleError },
}
impl TurnToColor for LB130USHandle {
type Error = TurnToColorError;
async fn turn_to_color(&mut self, color: protocol::light::Oklch) -> Result<(), Self::Error> {
let hsv: Hsv<Srgb, f64> = color.into_color();
let hsb = hsv.into_color();
self.set_light_state(SetLightStateArgs {
to: SetLightTo::Hsv(SetLightHsv { on_off: On, hsb }),
transition: None,
})
.await
.context(turn_to_color_error::HandleSnafu)?;
Ok(())
}
}

3
driver/kasa/src/lib.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod connection;
mod impl_protocol;
pub mod messages;

421
driver/kasa/src/messages.rs Normal file
View File

@@ -0,0 +1,421 @@
use std::{collections::BTreeMap, fmt::Display, str::FromStr, time::Duration};
use deranged::{RangedU16, RangedU8};
use mac_address::{MacAddress, MacParseError};
use palette::{FromColor, Hsv};
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize};
use serde_repr::Deserialize_repr;
use serde_with::{DeserializeFromStr, SerializeDisplay};
#[derive(Debug)]
pub struct GetSysInfo;
impl Serialize for GetSysInfo {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let target = "system";
let cmd = "get_sysinfo";
let arg: Option<()> = None;
let mut top_level_map = serializer.serialize_map(Some(1))?;
top_level_map.serialize_entry(target, &BTreeMap::from([(cmd, arg)]))?;
top_level_map.end()
}
}
#[derive(Debug, Deserialize)]
pub struct GetSysInfoResponse {
pub system: GetSysInfoResponseSystem,
}
#[derive(Debug, Deserialize)]
pub struct GetSysInfoResponseSystem {
pub get_sysinfo: SysInfo,
}
#[derive(Debug, Deserialize)]
pub struct CommonSysInfo {
pub active_mode: ActiveMode,
pub alias: String,
pub ctrl_protocols: CtrlProtocols,
pub description: String,
pub dev_state: DevState,
#[serde(rename = "deviceId")]
pub device_id: DeviceId,
pub disco_ver: String,
pub err_code: i32, // No idea
pub heapsize: u64, // No idea
#[serde(rename = "hwId")]
pub hw_id: HardwareId,
pub hw_ver: String,
pub is_color: IsColor,
pub is_dimmable: IsDimmable,
pub is_factory: bool,
pub is_variable_color_temp: IsVariableColorTemp,
pub light_state: LightState,
pub mic_mac: MacAddressWithoutSeparators,
pub mic_type: MicType,
// model: Model,
#[serde(rename = "oemId")]
pub oem_id: OemId,
pub preferred_state: Vec<PreferredStateChoice>,
pub rssi: i32,
pub sw_ver: String,
}
#[derive(Debug, Deserialize)]
pub struct LB130USSys {
#[serde(flatten)]
pub sys_info: CommonSysInfo,
}
#[derive(Debug, Deserialize)]
#[serde(tag = "model")]
pub enum SysInfo {
#[serde(rename = "LB130(US)")]
LB130US(LB130USSys),
}
#[derive(Debug, Deserialize)]
pub struct PreferredStateChoice {
#[serde(flatten)]
pub color: Color,
}
#[derive(Debug, SerializeDisplay, DeserializeFromStr)]
struct MacAddressWithoutSeparators(MacAddress);
impl FromStr for MacAddressWithoutSeparators {
type Err = MacParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let [a, b, c, d, e, f, g, h, i, j, k, l] = s
.as_bytes()
.try_into()
.map_err(|_| MacParseError::InvalidLength)?;
let bytes = [(a, b), (c, d), (e, f), (g, h), (i, j), (k, l)];
let mut digits = [0; 6];
for (i, (one, two)) in bytes.into_iter().enumerate() {
let slice = [one, two];
let as_string = std::str::from_utf8(&slice).map_err(|_| MacParseError::InvalidDigit)?;
let number =
u8::from_str_radix(as_string, 16).map_err(|_| MacParseError::InvalidDigit)?;
digits[i] = number;
}
Ok(MacAddressWithoutSeparators(MacAddress::new(digits)))
}
}
impl Display for MacAddressWithoutSeparators {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
Display::fmt(&self.0, f)
}
}
#[derive(Debug, Deserialize)]
enum ActiveMode {
#[serde(rename = "none")]
None,
}
#[derive(Debug, Deserialize)]
struct CtrlProtocols {
name: String,
version: String,
}
#[derive(Debug, Deserialize)]
struct DeviceId(pub String);
#[derive(Debug, Deserialize)]
enum DevState {
#[serde(rename = "normal")]
Normal,
}
#[derive(Debug, Deserialize)]
struct HardwareId(pub String);
#[derive(Debug, Deserialize_repr)]
#[repr(u8)]
enum IsColor {
NoColor = 0,
Color = 1,
}
#[derive(Debug, Deserialize_repr)]
#[repr(u8)]
enum IsDimmable {
NotDimmable = 0,
Dimmable = 1,
}
#[derive(Debug, Deserialize_repr)]
#[repr(u8)]
enum IsVariableColorTemp {
NoVariableColorTemp = 0,
VariableColorTemp = 1,
}
pub type Percentage = RangedU8<0, 100>;
pub type Angle = RangedU16<0, 360>;
pub type Kelvin = RangedU16<2500, 9000>;
#[derive(Debug, Clone)]
struct MaybeKelvin(Option<Kelvin>);
impl<'de> Deserialize<'de> for MaybeKelvin {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
match u16::deserialize(deserializer)? {
0 => Ok(MaybeKelvin(None)),
value => {
let kelvin = Kelvin::try_from(value).map_err(|e| {
serde::de::Error::custom(format!(
"{value} is not in the range {}..{}",
Kelvin::MIN,
Kelvin::MAX
))
})?;
Ok(MaybeKelvin(Some(kelvin)))
}
}
}
}
#[derive(Debug, Clone, Deserialize)]
struct RawColor {
brightness: Percentage,
color_temp: MaybeKelvin,
hue: Angle,
saturation: Percentage,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Hsb {
hue: Angle,
saturation: Percentage,
brightness: Percentage,
}
impl<S> FromColor<Hsv<S, f64>> for Hsb {
fn from_color(hsv: Hsv<S, f64>) -> Self {
let (hue, saturation, value) = hsv.into_components();
let hue = hue.into_positive_degrees();
let hue = Angle::new_saturating(hue as u16);
let saturation = saturation * (Percentage::MAX.get() as f64);
let saturation = Percentage::new_saturating(saturation as u8);
let brightness = value * (Percentage::MAX.get() as f64);
let brightness = Percentage::new_saturating(brightness as u8);
Hsb {
hue,
saturation,
brightness,
}
}
}
#[derive(Debug, Clone)]
struct KelvinWithBrightness {
kelvin: Kelvin,
brightness: Percentage,
}
#[derive(Debug, Clone)]
enum Color {
HSB(Hsb),
KelvinWithBrightness(KelvinWithBrightness),
}
impl<'de> Deserialize<'de> for Color {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let raw_color = RawColor::deserialize(deserializer)?;
let RawColor {
brightness,
color_temp,
hue,
saturation,
} = raw_color;
match color_temp.0 {
Some(kelvin) => Ok(Color::KelvinWithBrightness(KelvinWithBrightness {
kelvin,
brightness,
})),
None => Ok(Color::HSB(Hsb {
hue,
saturation,
brightness,
})),
}
}
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord)]
pub struct Off;
impl<'de> Deserialize<'de> for Off {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = u8::deserialize(deserializer)?;
if value == 0 {
Ok(Off)
} else {
Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Unsigned(value.into()),
&"0",
))
}
}
}
impl Serialize for Off {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u8(0)
}
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord)]
pub struct On;
impl<'de> Deserialize<'de> for On {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = u8::deserialize(deserializer)?;
if value == 1 {
Ok(On)
} else {
Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Unsigned(value.into()),
&"1",
))
}
}
}
impl Serialize for On {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u8(1)
}
}
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
pub enum LightState {
On {
on_off: On,
#[serde(flatten)]
color: Color,
mode: LightStateMode,
},
Off {
on_off: Off,
dft_on_state: DftOnState,
},
}
#[derive(Debug, Clone, Deserialize)]
struct DftOnState {
#[serde(flatten)]
color: Color,
mode: LightStateMode,
}
#[derive(Debug, Clone, Deserialize)]
enum LightStateMode {
#[serde(rename = "normal")]
Normal,
}
#[derive(Debug, Clone, Deserialize)]
enum MicType {
#[serde(rename = "IOT.SMARTBULB")]
IotSmartbulb,
}
#[derive(Debug, Clone, Deserialize)]
struct OemId(pub String);
#[derive(Debug, Clone, Serialize)]
pub struct SetLightStateArgs {
#[serde(flatten)]
pub to: SetLightTo,
pub transition: Option<Duration>,
}
#[derive(Debug, Clone, Serialize)]
pub struct SetLightOff {
pub on_off: Off,
}
#[derive(Debug, Clone, Serialize)]
pub struct SetLightLastOn {
pub on_off: On,
}
#[derive(Debug, Clone, Serialize)]
pub struct SetLightHsv {
pub on_off: On,
#[serde(flatten)]
pub hsb: Hsb,
}
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
pub enum SetLightTo {
Off(SetLightOff),
LastOn(SetLightLastOn),
Hsv(SetLightHsv),
// TODO: kelvin
}
#[derive(Debug, Clone, derive_more::From)]
pub struct SetLightState(pub SetLightStateArgs);
impl Serialize for SetLightState {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let target = "smartlife.iot.smartbulb.lightingservice";
let cmd = "transition_light_state";
let arg = &self.0;
let mut top_level_map = serializer.serialize_map(Some(1))?;
top_level_map.serialize_entry(target, &BTreeMap::from([(cmd, arg)]))?;
top_level_map.end()
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct SetLightStateResponse {
// TODO
}

View File

@@ -0,0 +1,10 @@
[package]
name = "emitter-and-signal"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
[dependencies]
deranged = { workspace = true }
ext-trait = { workspace = true }
tokio = { workspace = true, features = ["sync"] }

View File

@@ -0,0 +1,115 @@
use std::{future::Future, num::NonZero};
use deranged::RangedUsize;
use tokio::{
sync::{broadcast, mpsc},
task::JoinHandle,
};
use super::ProducerExited;
#[derive(Debug)]
pub struct Publisher<T> {
sender: broadcast::Sender<T>,
}
impl<T> Publisher<T> {
pub async fn all_unsubscribed(&self) {
self.sender.closed().await
}
pub fn publish(&self, event: T) {
let _ = self.sender.send(event);
}
}
#[derive(Debug)]
pub struct PublisherStream<T> {
receiver: mpsc::Receiver<Publisher<T>>,
}
impl<T> PublisherStream<T> {
/// Returns `None` when no more subscriptions can ever be made
pub async fn wait(&mut self) -> Option<Publisher<T>> {
self.receiver.recv().await
}
}
#[derive(Debug, Clone)]
pub struct Emitter<T> {
sender: broadcast::Sender<T>,
publisher_sender: mpsc::Sender<Publisher<T>>,
}
pub type Capacity = RangedUsize<1, { usize::MAX / 2 }>;
impl<T> Emitter<T> {
pub fn new<R, Fut>(
producer: impl FnOnce(PublisherStream<T>) -> Fut,
capacity: Capacity,
) -> (Self, JoinHandle<R>)
where
Fut: Future<Output = R> + Send + 'static,
T: Clone,
R: Send + 'static,
{
let (sender, _) = broadcast::channel(capacity.get());
let (publisher_sender, publisher_receiver) = mpsc::channel(1);
let publisher_stream = PublisherStream {
receiver: publisher_receiver,
};
let producer_join_handle = tokio::spawn(producer(publisher_stream));
(
Self {
publisher_sender,
sender,
},
producer_join_handle,
)
}
pub fn listen(&self) -> Result<Subscription<T>, ProducerExited> {
let receiver = self.sender.subscribe();
if self.sender.receiver_count() == 1 {
if let Err(mpsc::error::TrySendError::Closed(_)) =
self.publisher_sender.try_send(Publisher {
sender: self.sender.clone(),
})
{
return Err(ProducerExited);
}
}
Ok(Subscription { receiver })
}
}
pub struct Subscription<T> {
receiver: broadcast::Receiver<T>,
}
pub enum NextError {
ProducerExited(ProducerExited),
Lagged { skipped_events: NonZero<u64> },
}
impl<T> Subscription<T> {
pub async fn next(&mut self) -> Result<T, NextError>
where
T: Clone,
{
self.receiver.recv().await.map_err(|err| match err {
broadcast::error::RecvError::Closed => NextError::ProducerExited(ProducerExited),
broadcast::error::RecvError::Lagged(skipped_events) => NextError::Lagged {
skipped_events: skipped_events
.try_into()
.expect("lagging 0 events should be impossible"),
},
})
}
}

View File

@@ -0,0 +1,147 @@
use ext_trait::extension;
use tokio::{select, task::JoinHandle};
use super::emitter::{Capacity, Emitter, NextError};
#[extension(pub trait EmitterExt)]
impl<T> Emitter<T> {
fn map<M, F>(self, mut func: F, capacity: Capacity) -> (Emitter<M>, JoinHandle<()>)
where
T: Send + 'static + Clone,
M: Send + 'static + Clone,
F: Send + 'static + FnMut(T) -> M,
{
Emitter::new(
|mut publisher_stream| async move {
while let Some(publisher) = publisher_stream.wait().await {
let Ok(mut subscription) = self.listen() else {
return;
};
loop {
select! {
biased;
_ = publisher.all_unsubscribed() => {
break;
}
event_res = subscription.next() => {
match event_res {
Ok(event) => publisher.publish(func(event)),
Err(NextError::Lagged { .. }) => {},
Err(NextError::ProducerExited(_)) => return,
}
}
}
}
}
},
capacity,
)
}
fn filter<F>(self, mut func: F, capacity: Capacity) -> (Emitter<T>, JoinHandle<()>)
where
T: Send + 'static + Clone,
F: Send + 'static + FnMut(&T) -> bool,
{
Emitter::new(
|mut publisher_stream| async move {
while let Some(publisher) = publisher_stream.wait().await {
let Ok(mut subscription) = self.listen() else {
return;
};
loop {
select! {
biased;
_ = publisher.all_unsubscribed() => {
break;
}
event_res = subscription.next() => {
match event_res {
Ok(event) => if func(&event) {
publisher.publish(event)
},
Err(NextError::Lagged { .. }) => {},
Err(NextError::ProducerExited(_)) => return,
}
}
}
}
}
},
capacity,
)
}
fn filter_mut<F>(self, mut func: F, capacity: Capacity) -> (Emitter<T>, JoinHandle<()>)
where
T: Send + 'static + Clone,
F: Send + 'static + FnMut(&mut T) -> bool,
{
Emitter::new(
|mut publisher_stream| async move {
while let Some(publisher) = publisher_stream.wait().await {
let Ok(mut subscription) = self.listen() else {
return;
};
loop {
select! {
biased;
_ = publisher.all_unsubscribed() => {
break;
}
event_res = subscription.next() => {
match event_res {
Ok(mut event) => if func(&mut event) {
publisher.publish(event)
},
Err(NextError::Lagged { .. }) => {},
Err(NextError::ProducerExited(_)) => return,
}
}
}
}
}
},
capacity,
)
}
fn filter_map<M, F>(self, mut func: F, capacity: Capacity) -> (Emitter<M>, JoinHandle<()>)
where
T: Send + 'static + Clone,
M: Send + 'static + Clone,
F: Send + 'static + FnMut(T) -> Option<M>,
{
Emitter::new(
|mut publisher_stream| async move {
while let Some(publisher) = publisher_stream.wait().await {
let Ok(mut subscription) = self.listen() else {
return;
};
loop {
select! {
biased;
_ = publisher.all_unsubscribed() => {
break;
}
event_res = subscription.next() => {
match event_res {
Ok(event) => if let Some(mapped) = func(event) {
publisher.publish(mapped)
},
Err(NextError::Lagged { .. }) => {},
Err(NextError::ProducerExited(_)) => return,
}
}
}
}
}
},
capacity,
)
}
}

View File

@@ -0,0 +1,10 @@
pub mod emitter;
mod emitter_ext;
pub mod signal;
mod signal_ext;
pub use emitter_ext::EmitterExt;
pub use signal_ext::SignalExt;
#[derive(Debug, Clone, Copy)]
pub struct ProducerExited;

View File

@@ -0,0 +1,116 @@
use std::future::Future;
use tokio::sync::{mpsc, watch};
pub use tokio::task::JoinError;
#[derive(Debug)]
pub struct Publisher<T> {
sender: watch::Sender<T>,
}
impl<T> Publisher<T> {
pub async fn all_unsubscribed(&self) {
self.sender.closed().await
}
pub fn publish(&self, value: T) {
self.sender.send_replace(value);
}
pub fn publish_with<F: FnOnce(&mut T) -> bool>(&self, maybe_modify: F) {
self.sender.send_if_modified(maybe_modify);
}
}
#[derive(Debug)]
pub struct PublisherStream<T> {
receiver: mpsc::Receiver<Publisher<T>>,
}
impl<T> PublisherStream<T> {
/// Returns `None` when no more subscriptions can ever be made
pub async fn wait(&mut self) -> Option<Publisher<T>> {
self.receiver.recv().await
}
}
#[derive(Debug)]
pub struct Signal<T> {
sender: watch::Sender<T>,
publisher_sender: mpsc::Sender<Publisher<T>>,
}
impl<T> Signal<T> {
pub fn new<R, Fut: Future<Output = R> + Send + 'static>(
initial: T,
producer: impl FnOnce(PublisherStream<T>) -> Fut,
) -> (Self, impl Future<Output = Result<R, JoinError>>)
where
R: Send + 'static,
{
let (sender, _) = watch::channel(initial);
let (publisher_sender, publisher_receiver) = mpsc::channel(1);
let publisher_stream = PublisherStream {
receiver: publisher_receiver,
};
let producer_join_handle = tokio::spawn(producer(publisher_stream));
(
Self {
publisher_sender,
sender,
},
producer_join_handle,
)
}
pub fn subscribe(&self) -> Result<Subscription<T>, ProducerExited> {
let receiver = self.sender.subscribe();
if self.sender.receiver_count() == 1 {
if let Err(mpsc::error::TrySendError::Closed(_)) =
self.publisher_sender.try_send(Publisher {
sender: self.sender.clone(),
})
{
return Err(ProducerExited);
}
}
Ok(Subscription { receiver })
}
}
pub struct Subscription<T> {
receiver: watch::Receiver<T>,
}
#[derive(Debug, Clone, Copy)]
pub struct ProducerExited;
impl<T> Subscription<T> {
pub async fn changed(&mut self) -> Result<(), ProducerExited> {
self.receiver.changed().await.map_err(|_| ProducerExited)
}
pub fn get(&mut self) -> T::Owned
where
T: ToOwned,
{
self.receiver.borrow_and_update().to_owned()
}
pub async fn for_each<Fut: Future<Output = ()>>(mut self, mut func: impl FnMut(T::Owned) -> Fut)
where
T: ToOwned,
{
loop {
func(self.get()).await;
if self.changed().await.is_err() {
return;
}
}
}
}

View File

@@ -0,0 +1,6 @@
use ext_trait::extension;
use super::signal::Signal;
#[extension(pub trait SignalExt)]
impl<T> Signal<T> {}

42
entrypoint/Cargo.toml Normal file
View File

@@ -0,0 +1,42 @@
[package]
name = "smart-home-in-rust-with-home-assistant"
version = "0.2.0"
edition = "2021"
license = { workspace = true }
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "smart_home_in_rust_with_home_assistant"
crate-type = ["cdylib"]
[dependencies]
arbitrary-value = { path = "../arbitrary-value", features = ["pyo3"] }
arc-swap = "1.7.1"
async-gate = "0.4.0"
axum = { version = "0.8.1", default-features = false, features = [
"http1",
"tokio",
] }
clap = { version = "4", features = ["derive", "env"] }
deranged = { workspace = true, features = ["serde"] }
driver-kasa = { path = "../driver/kasa" }
emitter-and-signal = { path = "../emitter-and-signal" }
home-assistant = { path = "../home-assistant" }
im = { version = "15.1.0", features = ["rayon"] }
protocol = { path = "../protocol" }
pyo3 = { workspace = true, features = [
"auto-initialize",
"chrono",
"extension-module",
] }
pyo3-async-runtimes = { workspace = true, features = ["tokio-runtime"] }
shadow-rs = { version = "1.0.1", default-features = false }
snafu = { workspace = true }
tokio = { workspace = true, features = ["time"] }
tracing = { workspace = true }
tracing-appender = "0.2.3"
tracing-subscriber = "0.3.17"
uom = "0.36.0"
[build-dependencies]
shadow-rs = "1.0.1"

150
entrypoint/src/lib.rs Normal file
View File

@@ -0,0 +1,150 @@
use std::{path::PathBuf, str::FromStr, time::Duration};
use clap::Parser;
use driver_kasa::connection::LB130USHandle;
use home_assistant::{
home_assistant::HomeAssistant, light::HomeAssistantLight, object_id::ObjectId,
};
use protocol::light::{IsOff, IsOn};
use pyo3::prelude::*;
use shadow_rs::shadow;
use tokio::time::interval;
use tracing::{level_filters::LevelFilter, Level};
use tracing_appender::rolling::{self, RollingFileAppender};
use tracing_subscriber::{
fmt::{self, fmt, format::FmtSpan},
layer::SubscriberExt,
registry,
util::SubscriberInitExt,
Layer,
};
use tracing_to_home_assistant::TracingToHomeAssistant;
mod tracing_to_home_assistant;
shadow!(build_info);
#[derive(Debug, Parser)]
struct Args {
#[arg(env)]
persistence_directory: Option<PathBuf>,
#[arg(env)]
tracing_directory: Option<PathBuf>,
#[arg(env, default_value = "")]
tracing_file_name_prefix: String,
#[arg(env, default_value = "log")]
tracing_file_name_suffix: String,
#[arg(env, default_value_t = 64)]
tracing_max_log_files: usize,
}
async fn real_main(
Args {
persistence_directory,
tracing_directory,
tracing_file_name_prefix,
tracing_file_name_suffix,
tracing_max_log_files,
}: Args,
home_assistant: HomeAssistant,
) -> ! {
let tracing_to_directory_res = tracing_directory
.map(|tracing_directory| {
tracing_appender::rolling::Builder::new()
.filename_prefix(tracing_file_name_prefix)
.filename_suffix(tracing_file_name_suffix)
.max_log_files(tracing_max_log_files)
.build(tracing_directory)
.map(tracing_appender::non_blocking)
})
.transpose();
let (tracing_to_directory, _guard, tracing_to_directory_initialization_error) =
match tracing_to_directory_res {
Ok(tracing_to_directory) => match tracing_to_directory {
Some((tracing_to_directory, guard)) => {
(Some(tracing_to_directory), Some(guard), None)
}
None => (None, None, None),
},
Err(error) => (None, None, Some(error)),
};
registry()
.with(
fmt::layer()
.pretty()
.with_span_events(FmtSpan::ACTIVE)
.with_filter(LevelFilter::from_level(Level::TRACE)),
)
.with(TracingToHomeAssistant)
.with(tracing_to_directory.map(|writer| {
fmt::layer()
.pretty()
.with_span_events(FmtSpan::ACTIVE)
.with_writer(writer)
.with_filter(LevelFilter::from_level(Level::TRACE))
}))
.init();
if let Some(error) = tracing_to_directory_initialization_error {
tracing::error!(?error, "cannot trace to directory");
}
let built_at = build_info::BUILD_TIME;
tracing::info!(built_at);
// let lamp = HomeAssistantLight {
// home_assistant,
// object_id: ObjectId::from_str("jacob_s_lamp_side").unwrap(),
// };
let ip = [10, 0, 3, 71];
let port = 9999;
let some_light = LB130USHandle::new(
(ip, port).into(),
Duration::from_secs(10),
(64).try_into().unwrap(),
);
let mut interval = interval(Duration::from_secs(20));
interval.tick().await;
loop {
interval.tick().await;
tracing::info!("about to call get_sysinfo");
let sysinfo_res = some_light.get_sysinfo().await;
tracing::info!(?sysinfo_res, "got sys info");
let is_on = some_light.is_on().await;
tracing::info!(?is_on);
let is_off = some_light.is_off().await;
tracing::info!(?is_off);
// let is_on = lamp.is_on().await;
// tracing::info!(?is_on);
// let is_off = lamp.is_off().await;
// tracing::info!(?is_off);
// let something = lamp.turn_on().await;
// tracing::info!(?something);
}
}
#[pyfunction]
fn main<'py>(py: Python<'py>, home_assistant: HomeAssistant) -> PyResult<Bound<'py, PyAny>> {
let args = Args::parse();
pyo3_async_runtimes::tokio::future_into_py::<_, ()>(py, async {
real_main(args, home_assistant).await;
})
}
/// A Python module implemented in Rust.
#[pymodule]
fn smart_home_in_rust_with_home_assistant(module: &Bound<'_, PyModule>) -> PyResult<()> {
module.add_function(wrap_pyfunction!(main, module)?)?;
Ok(())
}

View File

@@ -7,7 +7,7 @@ use tracing::{
};
use tracing_subscriber::{layer::Context, Layer};
use crate::home_assistant::logger::{HassLogger, LogData};
use home_assistant::logger::{HassLogger, LogData};
pub struct TracingToHomeAssistant;

32
home-assistant/Cargo.toml Normal file
View File

@@ -0,0 +1,32 @@
[package]
name = "home-assistant"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
[features]
tracing = ["dep:tracing"]
[dependencies]
arbitrary-value = { path = "../arbitrary-value", features = ["pyo3"] }
chrono = { workspace = true }
derive_more = { workspace = true, features = [
"display",
"from",
"from_str",
"into",
"try_from",
"try_into",
] }
emitter-and-signal = { path = "../emitter-and-signal" }
once_cell = "1.21.3"
protocol = { path = "../protocol" }
pyo3 = { workspace = true }
pyo3-async-runtimes = { workspace = true, features = ["tokio-runtime"] }
python-utils = { path = "../python-utils" }
smol_str = "0.3.2"
snafu = { workspace = true }
strum = { workspace = true, features = ["derive"] }
tokio = { workspace = true }
tracing = { optional = true, workspace = true }
ulid = "1.2.0"

View File

@@ -1,6 +1,6 @@
use std::{fmt::Display, str::FromStr};
use std::{convert::Infallible, fmt::Display, str::FromStr};
use pyo3::{exceptions::PyValueError, prelude::*};
use pyo3::{exceptions::PyValueError, prelude::*, types::PyString};
use snafu::{ResultExt, Snafu};
use super::{
@@ -58,3 +58,14 @@ impl<'py> FromPyObject<'py> for EntityId {
Ok(entity_id)
}
}
impl<'py> IntoPyObject<'py> for EntityId {
type Target = PyString;
type Output = Bound<'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
let s = self.to_string();
s.into_pyobject(py)
}
}

View File

@@ -0,0 +1,39 @@
use super::id::Id;
use once_cell::sync::OnceCell;
use pyo3::{prelude::*, types::PyType};
/// The context that triggered something.
#[derive(Debug, FromPyObject)]
pub struct Context<Event> {
pub id: Id,
pub user_id: Option<String>,
pub parent_id: Option<String>,
/// In order to prevent cycles, the user must decide to pass [`Py<PyAny>`] for the `Event` type here
/// or for the `Context` type in [`Event`]
pub origin_event: Event,
}
impl<'py, Event: IntoPyObject<'py>> IntoPyObject<'py> for Context<Event> {
type Target = PyAny;
type Output = Bound<'py, Self::Target>;
type Error = PyErr;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
static HOMEASSISTANT_CORE: OnceCell<Py<PyModule>> = OnceCell::new();
let homeassistant_core = HOMEASSISTANT_CORE
.get_or_try_init(|| Result::<_, PyErr>::Ok(py.import("homeassistant.core")?.unbind()))?
.bind(py);
let context_class = homeassistant_core.getattr("Context")?;
let context_class = context_class.downcast_into::<PyType>()?;
let context_instance = context_class.call1((self.user_id, self.parent_id, self.id))?;
context_instance.setattr("origin_event", self.origin_event)?;
Ok(context_instance)
}
}

View File

@@ -0,0 +1,38 @@
use std::convert::Infallible;
use pyo3::{prelude::*, types::PyString};
use smol_str::SmolStr;
use ulid::Ulid;
#[derive(Debug, Clone)]
pub enum Id {
Ulid(Ulid),
Other(SmolStr),
}
impl<'py> FromPyObject<'py> for Id {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<String>()?;
if let Ok(ulid) = s.parse() {
Ok(Id::Ulid(ulid))
} else {
Ok(Id::Other(s.into()))
}
}
}
impl<'py> IntoPyObject<'py> for Id {
type Target = PyString;
type Output = Bound<'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
match self {
Id::Ulid(ulid) => ulid.to_string().into_pyobject(py),
Id::Other(id) => id.as_str().into_pyobject(py),
}
}
}

View File

@@ -0,0 +1,58 @@
use pyo3::exceptions::PyValueError;
use pyo3::prelude::*;
use crate::{entity_id::EntityId, state_object::StateObject};
#[derive(Debug, Clone)]
pub struct Type;
impl<'py> FromPyObject<'py> for Type {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<&str>()?;
if s == "state_changed" {
Ok(Type)
} else {
Err(PyValueError::new_err(format!(
"expected a string of value 'state_changed', but got {s}"
)))
}
}
}
#[derive(Debug, FromPyObject)]
#[pyo3(from_item_all)]
pub struct Data<
OldState,
OldAttributes,
OldStateContextEvent,
NewState,
NewAttributes,
NewStateContextEvent,
> {
pub entity_id: EntityId,
pub old_state: Option<StateObject<OldState, OldAttributes, OldStateContextEvent>>,
pub new_state: Option<StateObject<NewState, NewAttributes, NewStateContextEvent>>,
}
/// A state changed event is fired when on state write the state is changed.
pub type Event<
OldState,
OldAttributes,
OldStateContextEvent,
NewState,
NewAttributes,
NewStateContextEvent,
Context,
> = super::super::event::Event<
Type,
Data<
OldState,
OldAttributes,
OldStateContextEvent,
NewState,
NewAttributes,
NewStateContextEvent,
>,
Context,
>;

View File

@@ -0,0 +1,60 @@
use std::convert::Infallible;
use pyo3::prelude::*;
use python_utils::{detach, validate_type_by_name};
use super::{service_registry::ServiceRegistry, state_machine::StateMachine};
#[derive(Debug)]
pub struct HomeAssistant(Py<PyAny>);
impl<'source> FromPyObject<'source> for HomeAssistant {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
// region: Validation
validate_type_by_name(ob, "HomeAssistant")?;
// endregion: Validation
Ok(Self(detach(ob)))
}
}
impl<'py> IntoPyObject<'py> for &HomeAssistant {
type Target = PyAny;
type Output = Bound<'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
Ok(self.0.bind(py).to_owned())
}
}
impl HomeAssistant {
/// Return the representation
pub fn repr(&self, py: Python<'_>) -> Result<String, PyErr> {
let bound = self.0.bind(py);
let repr = bound.repr()?;
repr.extract()
}
/// Return if Home Assistant is running.
pub fn is_running(&self, py: Python<'_>) -> Result<bool, PyErr> {
let is_running = self.0.getattr(py, "is_running")?;
is_running.extract(py)
}
/// Return if Home Assistant is stopping.
pub fn is_stopping(&self, py: Python<'_>) -> Result<bool, PyErr> {
let is_stopping = self.0.getattr(py, "is_stopping")?;
is_stopping.extract(py)
}
pub fn states(&self, py: Python<'_>) -> Result<StateMachine, PyErr> {
let states = self.0.getattr(py, "states")?;
states.extract(py)
}
pub fn services(&self, py: Python<'_>) -> Result<ServiceRegistry, PyErr> {
let services = self.0.getattr(py, "services")?;
services.extract(py)
}
}

View File

@@ -2,7 +2,12 @@ pub mod domain;
pub mod entity_id;
pub mod event;
pub mod home_assistant;
pub mod light;
pub mod logger;
pub mod object_id;
pub mod service;
pub mod service_registry;
pub mod slug;
pub mod state;
pub mod state_machine;
pub mod state_object;

View File

@@ -0,0 +1,8 @@
use pyo3::prelude::*;
#[derive(Debug, FromPyObject)]
#[pyo3(from_item_all)]
pub struct LightAttributes {
min_color_temp_kelvin: Option<u16>, // TODO: only here to allow compilation!
max_color_temp_kelvin: Option<u16>, // TODO: only here to allow compilation!
}

View File

@@ -0,0 +1,54 @@
use attributes::LightAttributes;
use pyo3::prelude::*;
use snafu::{ResultExt, Snafu};
use state::LightState;
use crate::state::HomeAssistantState;
use super::{
domain::Domain, entity_id::EntityId, home_assistant::HomeAssistant, object_id::ObjectId,
state_object::StateObject,
};
mod attributes;
mod protocol;
mod service;
mod state;
#[derive(Debug)]
pub struct HomeAssistantLight {
pub home_assistant: HomeAssistant,
pub object_id: ObjectId,
}
impl HomeAssistantLight {
fn entity_id(&self) -> EntityId {
EntityId(Domain::Light, self.object_id.clone())
}
}
#[derive(Debug, Snafu)]
pub enum GetStateObjectError {
PythonError { source: PyErr },
EntityMissing,
}
impl HomeAssistantLight {
fn get_state_object(
&self,
) -> Result<
StateObject<HomeAssistantState<LightState>, LightAttributes, Py<PyAny>>,
GetStateObjectError,
> {
Python::with_gil(|py| {
let states = self.home_assistant.states(py).context(PythonSnafu)?;
let entity_id = self.entity_id();
let state_object = states
.get(py, entity_id)
.context(PythonSnafu)?
.ok_or(GetStateObjectError::EntityMissing)?;
Ok(state_object)
})
}
}

View File

@@ -0,0 +1,76 @@
use super::service::{turn_off::TurnOff, turn_on::TurnOn};
use super::{state::LightState, GetStateObjectError, HomeAssistantLight};
use crate::{
event::context::context::Context,
state::{ErrorState, HomeAssistantState, UnexpectedState},
};
use protocol::light::{GetState, SetState};
use pyo3::prelude::*;
use python_utils::IsNone;
use snafu::{ResultExt, Snafu};
#[derive(Debug, Snafu)]
pub enum GetStateError {
GetStateObjectError { source: GetStateObjectError },
Error { state: ErrorState },
UnexpectedError { state: UnexpectedState },
}
impl GetState for HomeAssistantLight {
type Error = GetStateError;
async fn get_state(&self) -> Result<protocol::light::State, Self::Error> {
let state_object = self.get_state_object().context(GetStateObjectSnafu)?;
let state = state_object.state;
match state {
HomeAssistantState::Ok(light_state) => Ok(light_state.into()),
HomeAssistantState::Err(error_state) => {
Err(GetStateError::Error { state: error_state })
}
HomeAssistantState::UnexpectedErr(state) => {
Err(GetStateError::UnexpectedError { state })
}
}
}
}
impl SetState for HomeAssistantLight {
type Error = PyErr;
async fn set_state(&mut self, state: protocol::light::State) -> Result<(), Self::Error> {
let context: Option<Context<()>> = None;
let target: Option<()> = None;
let services = Python::with_gil(|py| self.home_assistant.services(py))?;
let _: IsNone = match state {
protocol::light::State::Off => {
services
.call_service(
TurnOff {
entity_id: self.entity_id(),
},
context,
target,
false,
)
.await
}
protocol::light::State::On => {
services
.call_service(
TurnOn {
entity_id: self.entity_id(),
},
context,
target,
false,
)
.await
}
}?;
Ok(())
}
}

View File

@@ -0,0 +1,2 @@
pub mod turn_off;
pub mod turn_on;

View File

@@ -0,0 +1,33 @@
use std::str::FromStr;
use pyo3::IntoPyObject;
use crate::{
entity_id::EntityId,
service::{service_domain::ServiceDomain, service_id::ServiceId, IntoServiceCall},
};
#[derive(Debug, Clone)]
pub struct TurnOff {
pub entity_id: EntityId,
}
#[derive(Debug, Clone, IntoPyObject)]
pub struct TurnOffServiceData {
entity_id: EntityId,
}
impl IntoServiceCall for TurnOff {
type ServiceData = TurnOffServiceData;
fn into_service_call(self) -> (ServiceDomain, ServiceId, Self::ServiceData) {
let service_domain = ServiceDomain::from_str("light").expect("statically written and known to be a valid slug; hoping to get compiler checks instead in the future");
let service_id = ServiceId::from_str("turn_off").expect("statically written and known to be a valid slug; hoping to get compiler checks instead in the future");
let Self { entity_id } = self;
let service_data = TurnOffServiceData { entity_id };
(service_domain, service_id, service_data)
}
}

View File

@@ -0,0 +1,32 @@
use std::str::FromStr;
use pyo3::IntoPyObject;
use crate::{
entity_id::EntityId,
service::{service_domain::ServiceDomain, service_id::ServiceId, IntoServiceCall},
};
#[derive(Debug, Clone)]
pub struct TurnOn {
pub entity_id: EntityId,
}
#[derive(Debug, Clone, IntoPyObject)]
pub struct TurnOnServiceData {
entity_id: EntityId,
}
impl IntoServiceCall for TurnOn {
type ServiceData = TurnOnServiceData;
fn into_service_call(self) -> (ServiceDomain, ServiceId, Self::ServiceData) {
let service_domain = ServiceDomain::from_str("light").expect("statically written and known to be a valid slug; hoping to get compiler checks instead in the future");
let service_id = ServiceId::from_str("turn_on").expect("statically written and known to be a valid slug; hoping to get compiler checks instead in the future");
let Self { entity_id } = self;
let service_data = TurnOnServiceData { entity_id };
(service_domain, service_id, service_data)
}
}

View File

@@ -0,0 +1,40 @@
use std::str::FromStr;
use pyo3::{exceptions::PyValueError, prelude::*};
use strum::EnumString;
#[derive(Debug, Clone, EnumString, strum::Display)]
#[strum(serialize_all = "snake_case")]
pub enum LightState {
On,
Off,
}
impl<'py> FromPyObject<'py> for LightState {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<String>()?;
let state =
LightState::from_str(&s).map_err(|err| PyValueError::new_err(err.to_string()))?;
Ok(state)
}
}
impl From<LightState> for protocol::light::State {
fn from(light_state: LightState) -> Self {
match light_state {
LightState::On => protocol::light::State::On,
LightState::Off => protocol::light::State::Off,
}
}
}
impl From<protocol::light::State> for LightState {
fn from(state: protocol::light::State) -> Self {
match state {
protocol::light::State::On => LightState::On,
protocol::light::State::Off => LightState::Off,
}
}
}

View File

@@ -1,9 +1,7 @@
use arbitrary_value::{arbitrary::Arbitrary, map::Map};
use once_cell::sync::OnceCell;
use pyo3::{prelude::*, types::PyTuple};
use crate::{
arbitrary::{arbitrary::Arbitrary, map::Map},
python_utils::{detach, validate_type_by_name},
};
use python_utils::{detach, validate_type_by_name};
#[derive(Debug)]
pub struct HassLogger(Py<PyAny>);
@@ -55,8 +53,12 @@ pub struct LogData<ExcInfo> {
impl HassLogger {
pub fn new(py: Python<'_>, name: &str) -> PyResult<Self> {
let logging = py.import("logging")?;
let logger = logging.call_method1("getLogger", (name,))?;
static LOGGING_MODULE: OnceCell<Py<PyModule>> = OnceCell::new();
let logging_module = LOGGING_MODULE
.get_or_try_init(|| Result::<_, PyErr>::Ok(py.import("logging")?.unbind()))?
.bind(py);
let logger = logging_module.call_method1("getLogger", (name,))?;
Ok(logger.extract()?)
}

View File

@@ -0,0 +1,21 @@
use std::convert::Infallible;
use pyo3::{prelude::*, types::PyString};
use super::slug::Slug;
pub use super::slug::SlugParsingError as ObjectIdParsingError;
#[derive(Debug, Clone, derive_more::Display, derive_more::FromStr)]
pub struct ObjectId(pub Slug);
impl<'py> IntoPyObject<'py> for ObjectId {
type Target = PyString;
type Output = Bound<'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
let s = self.to_string();
s.into_pyobject(py)
}
}

View File

@@ -0,0 +1,11 @@
use service_domain::ServiceDomain;
use service_id::ServiceId;
pub mod service_domain;
pub mod service_id;
pub trait IntoServiceCall {
type ServiceData;
fn into_service_call(self) -> (ServiceDomain, ServiceId, Self::ServiceData);
}

View File

@@ -0,0 +1,21 @@
use std::convert::Infallible;
use pyo3::{prelude::*, types::PyString};
use super::super::slug::Slug;
pub use super::super::slug::SlugParsingError as ServiceDomainParsingError;
#[derive(Debug, Clone, derive_more::Display, derive_more::FromStr)]
pub struct ServiceDomain(pub Slug);
impl<'py> IntoPyObject<'py> for ServiceDomain {
type Target = PyString;
type Output = Bound<'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
let s = self.to_string();
s.into_pyobject(py)
}
}

View File

@@ -0,0 +1,21 @@
use std::convert::Infallible;
use pyo3::{prelude::*, types::PyString};
use super::super::slug::Slug;
pub use super::super::slug::SlugParsingError as ServiceIdParsingError;
#[derive(Debug, Clone, derive_more::Display, derive_more::FromStr)]
pub struct ServiceId(pub Slug);
impl<'py> IntoPyObject<'py> for ServiceId {
type Target = PyString;
type Output = Bound<'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
let s = self.to_string();
s.into_pyobject(py)
}
}

View File

@@ -0,0 +1,54 @@
use super::{event::context::context::Context, service::IntoServiceCall};
use pyo3::prelude::*;
use python_utils::{detach, validate_type_by_name};
#[derive(Debug)]
pub struct ServiceRegistry(Py<PyAny>);
impl<'py> FromPyObject<'py> for ServiceRegistry {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
// region: Validation
validate_type_by_name(ob, "ServiceRegistry")?;
// endregion: Validation
Ok(Self(detach(ob)))
}
}
impl ServiceRegistry {
pub async fn call_service<
ServiceData: for<'py> IntoPyObject<'py>,
Target: for<'py> IntoPyObject<'py>,
Event: for<'py> IntoPyObject<'py>,
ServiceResponse: for<'py> FromPyObject<'py>,
>(
&self,
service_call: impl IntoServiceCall<ServiceData = ServiceData>,
context: Option<Context<Event>>,
target: Option<Target>,
return_response: bool,
) -> PyResult<ServiceResponse> {
let (domain, service, service_data) = service_call.into_service_call();
let blocking = true;
let args = (
domain,
service,
service_data,
blocking,
context,
target,
return_response,
);
let future = Python::with_gil::<_, PyResult<_>>(|py| {
let service_registry = self.0.bind(py);
let awaitable = service_registry.call_method("async_call", args, None)?;
pyo3_async_runtimes::tokio::into_future(awaitable)
})?;
let service_response = future.await?;
Python::with_gil(|py| service_response.extract(py))
}
}

View File

@@ -1,19 +1,26 @@
use std::{str::FromStr, sync::Arc};
use std::str::FromStr;
use pyo3::{exceptions::PyValueError, PyErr};
use smol_str::SmolStr;
use snafu::Snafu;
#[derive(Debug, Clone, derive_more::Display)]
pub struct ObjectId(Arc<str>);
pub struct Slug(SmolStr);
#[derive(Debug, Clone, Snafu)]
#[snafu(display("expected a lowercase ASCII alphabetical character (i.e. a through z) or a digit (i.e. 0 through 9) or an underscore (i.e. _) but encountered {encountered}"))]
pub struct ObjectIdParsingError {
pub struct SlugParsingError {
encountered: char,
}
impl FromStr for ObjectId {
type Err = ObjectIdParsingError;
impl From<SlugParsingError> for PyErr {
fn from(error: SlugParsingError) -> Self {
PyValueError::new_err(error.to_string())
}
}
impl FromStr for Slug {
type Err = SlugParsingError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
for c in s.chars() {
@@ -21,16 +28,10 @@ impl FromStr for ObjectId {
'a'..='z' => {}
'0'..='9' => {}
'_' => {}
_ => return Err(ObjectIdParsingError { encountered: c }),
_ => return Err(SlugParsingError { encountered: c }),
}
}
Ok(Self(s.into()))
}
}
impl From<ObjectIdParsingError> for PyErr {
fn from(error: ObjectIdParsingError) -> Self {
PyValueError::new_err(error.to_string())
}
}

View File

@@ -0,0 +1,71 @@
use std::{convert::Infallible, str::FromStr};
use pyo3::{exceptions::PyValueError, prelude::*};
use smol_str::SmolStr;
use strum::EnumString;
/// A state in Home Assistant that is known to represent an error of some kind:
/// * `unavailable` (the device is likely offline or unreachable from the Home Assistant instance)
/// * `unknown` (I don't know how to explain this one)
#[derive(Debug, Clone, EnumString, strum::Display)]
#[strum(serialize_all = "snake_case")]
pub enum ErrorState {
Unavailable,
Unknown,
}
impl<'py> FromPyObject<'py> for ErrorState {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<String>()?;
let state =
ErrorState::from_str(&s).map_err(|err| PyValueError::new_err(err.to_string()))?;
Ok(state)
}
}
#[derive(Debug, Clone, derive_more::Display, derive_more::FromStr)]
pub struct UnexpectedState(pub SmolStr);
impl<'py> FromPyObject<'py> for UnexpectedState {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<String>()?;
let s = SmolStr::new(s);
Ok(UnexpectedState(s))
}
}
#[derive(Debug, Clone, derive_more::Display)]
pub enum HomeAssistantState<State> {
Ok(State),
Err(ErrorState),
UnexpectedErr(UnexpectedState),
}
impl<State: FromStr> FromStr for HomeAssistantState<State> {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {
if let Ok(ok) = State::from_str(s) {
return Ok(HomeAssistantState::Ok(ok));
}
if let Ok(error) = ErrorState::from_str(s) {
return Ok(HomeAssistantState::Err(error));
}
Ok(HomeAssistantState::UnexpectedErr(UnexpectedState(s.into())))
}
}
impl<'py, State: FromStr + FromPyObject<'py>> FromPyObject<'py> for HomeAssistantState<State> {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<String>()?;
let Ok(state) = s.parse();
Ok(state)
}
}

View File

@@ -1,11 +1,7 @@
use super::entity_id::EntityId;
use super::state_object::StateObject;
use pyo3::prelude::*;
use crate::{
home_assistant::entity_id::EntityId,
python_utils::{detach, validate_type_by_name},
};
use super::state::State;
use python_utils::{detach, validate_type_by_name};
#[derive(Debug)]
pub struct StateMachine(Py<PyAny>);
@@ -21,13 +17,18 @@ impl<'py> FromPyObject<'py> for StateMachine {
}
impl StateMachine {
pub fn get<Attributes: for<'py> FromPyObject<'py>, ContextEvent: for<'py> FromPyObject<'py>>(
pub fn get<
'py,
State: FromPyObject<'py>,
Attributes: FromPyObject<'py>,
ContextEvent: FromPyObject<'py>,
>(
&self,
py: &Python,
py: Python<'py>,
entity_id: EntityId,
) -> Result<Option<State<Attributes, ContextEvent>>, PyErr> {
) -> PyResult<Option<StateObject<State, Attributes, ContextEvent>>> {
let args = (entity_id.to_string(),);
let state = self.0.call_method1(*py, "get", args)?;
state.extract(*py)
let state = self.0.call_method1(py, "get", args)?;
state.extract(py)
}
}

View File

@@ -0,0 +1,151 @@
use super::{
event::{context::context::Context, specific::state_changed},
home_assistant::HomeAssistant,
};
use crate::entity_id::EntityId;
use chrono::{DateTime, Utc};
use emitter_and_signal::signal::Signal;
use once_cell::sync::OnceCell;
use pyo3::{
prelude::*,
types::{PyCFunction, PyDict, PyTuple},
};
use std::{future::Future, sync::Arc};
use tokio::{select, sync::mpsc};
#[derive(Debug, FromPyObject)]
pub struct StateObject<State, Attributes, ContextEvent> {
pub entity_id: EntityId,
pub state: State,
pub attributes: Attributes,
pub last_changed: Option<DateTime<Utc>>,
pub last_reported: Option<DateTime<Utc>>,
pub last_updated: Option<DateTime<Utc>>,
pub context: Context<ContextEvent>,
}
impl<
State: Send + Sync + 'static + for<'py> FromPyObject<'py>,
Attributes: Send + Sync + 'static + for<'py> FromPyObject<'py>,
ContextEvent: Send + Sync + 'static + for<'py> FromPyObject<'py>,
> StateObject<State, Attributes, ContextEvent>
{
pub fn store(
py: Python<'_>,
home_assistant: &HomeAssistant,
entity_id: EntityId,
) -> PyResult<(
Signal<Option<Arc<Self>>>,
impl Future<Output = Result<(), emitter_and_signal::signal::JoinError>>,
)> {
let state_machine = home_assistant.states(py)?;
let current = state_machine.get(py, entity_id.clone())?;
let py_home_assistant = home_assistant.into_pyobject(py)?.unbind();
let (store, task) = Signal::new(current.map(Arc::new), |mut publisher_stream| async move {
while let Some(publisher) = publisher_stream.wait().await {
let (new_state_sender, mut new_state_receiver) = mpsc::channel(8);
let untrack = Python::with_gil::<_, PyResult<_>>(|py| {
static EVENT_MODULE: OnceCell<Py<PyModule>> = OnceCell::new();
let event_module = EVENT_MODULE
.get_or_try_init(|| {
Result::<_, PyErr>::Ok(
py.import("homeassistant.helpers.event")?.unbind(),
)
})?
.bind(py);
let untrack = {
let callback =
move |args: &Bound<'_, PyTuple>,
_kwargs: Option<&Bound<'_, PyDict>>| {
#[cfg(feature = "tracing")]
tracing::debug!("calling the closure");
if let Ok((event,)) = args.extract::<(
state_changed::Event<
State,
Attributes,
ContextEvent,
State,
Attributes,
ContextEvent,
Py<PyAny>,
>,
)>() {
let new_state = event.data.new_state;
#[cfg(feature = "tracing")]
tracing::debug!("sending a new state"); // TODO: remove
new_state_sender.try_send(new_state).unwrap();
}
};
let callback = PyCFunction::new_closure(py, None, None, callback)?;
let args = (
py_home_assistant.clone_ref(py),
vec![entity_id.clone()],
callback,
);
event_module.call_method1("async_track_state_change_event", args)?
};
#[cfg(feature = "tracing")]
tracing::debug!(?untrack, "as any");
let is_callable = untrack.is_callable();
#[cfg(feature = "tracing")]
tracing::debug!(?is_callable);
// let untrack = untrack.downcast_into::<PyFunction>()?;
// tracing::debug!(?untrack, "as downcast");
let untrack = untrack.unbind();
#[cfg(feature = "tracing")]
tracing::debug!(?untrack, "as unbound");
Ok(untrack)
});
if let Ok(untrack) = untrack {
#[cfg(feature = "tracing")]
tracing::debug!("untrack is ok, going to wait for the next relevant event...");
loop {
select! {
biased;
_ = publisher.all_unsubscribed() => {
#[cfg(feature = "tracing")]
tracing::debug!("calling untrack");
let res = Python::with_gil(|py| untrack.call0(py));
#[cfg(feature = "tracing")]
tracing::debug!(?res);
break;
}
new_state = new_state_receiver.recv() => {
match new_state {
Some(new_state) => {
#[cfg(feature = "tracing")]
tracing::debug!("publishing new state");
publisher.publish(new_state.map(Arc::new))
},
None => {
#[cfg(feature = "tracing")]
tracing::debug!("channel dropped");
break
},
}
}
}
}
} else {
#[cfg(feature = "tracing")]
tracing::debug!("untrack is err");
}
}
});
Ok((store, task))
}
}

19
protocol/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "protocol"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
[features]
default = []
serde = ["dep:serde"]
[dependencies]
deranged = { workspace = true }
derive_more = { workspace = true }
ext-trait = { workspace = true }
palette = { workspace = true }
snafu = { workspace = true }
strum = { workspace = true, features = ["derive"] }
serde = { optional = true, workspace = true, features = ["derive"] }

1
protocol/src/lib.rs Normal file
View File

@@ -0,0 +1 @@
pub mod light;

124
protocol/src/light.rs Normal file
View File

@@ -0,0 +1,124 @@
use std::{error::Error, future::Future};
use deranged::RangedU16;
use snafu::{ResultExt, Snafu};
#[derive(
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, strum::Display, strum::EnumIs,
)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub enum State {
Off,
On,
}
impl State {
pub const fn invert(self) -> Self {
match self {
State::Off => State::On,
State::On => State::Off,
}
}
}
impl From<bool> for State {
fn from(bool: bool) -> Self {
if bool {
State::On
} else {
State::Off
}
}
}
impl From<State> for bool {
fn from(state: State) -> Self {
state.is_on()
}
}
pub trait GetState {
type Error: Error;
fn get_state(&self) -> impl Future<Output = Result<State, Self::Error>> + Send;
}
#[ext_trait::extension(pub trait IsOff)]
impl<T: GetState> T {
async fn is_off(&self) -> Result<bool, T::Error> {
Ok(self.get_state().await?.is_off())
}
}
#[ext_trait::extension(pub trait IsOn)]
impl<T: GetState> T {
async fn is_on(&self) -> Result<bool, T::Error> {
Ok(self.get_state().await?.is_on())
}
}
pub trait SetState {
type Error: Error;
fn set_state(&mut self, state: State) -> impl Future<Output = Result<(), Self::Error>> + Send;
}
#[ext_trait::extension(pub trait TurnOff)]
impl<T: SetState> T {
async fn turn_off(&mut self) -> Result<(), T::Error> {
self.set_state(State::Off).await
}
}
#[ext_trait::extension(pub trait TurnOn)]
impl<T: SetState> T {
async fn turn_on(&mut self) -> Result<(), T::Error> {
self.set_state(State::On).await
}
}
pub trait Toggle {
type Error: Error;
fn toggle(&mut self) -> impl Future<Output = Result<(), Self::Error>> + Send;
}
#[derive(Debug, Clone, Snafu)]
pub enum InvertToToggleError<GetStateError: Error + 'static, SetStateError: Error + 'static> {
GetStateError { source: GetStateError },
SetStateError { source: SetStateError },
}
impl<T: GetState + SetState + Send> Toggle for T
where
<T as GetState>::Error: 'static,
<T as SetState>::Error: 'static,
{
type Error = InvertToToggleError<<T as GetState>::Error, <T as SetState>::Error>;
/// Toggle the light by setting it to the inverse of its current state
async fn toggle(&mut self) -> Result<(), Self::Error> {
let state = self.get_state().await.context(GetStateSnafu)?;
self.set_state(state.invert())
.await
.context(SetStateSnafu)?;
Ok(())
}
}
pub type Kelvin = RangedU16<2000, 10000>;
pub trait TurnToTemperature {
type Error: Error;
fn turn_to_temperature(
&mut self,
temperature: Kelvin,
) -> impl Future<Output = Result<(), Self::Error>> + Send;
}
pub type Oklch = palette::Oklch<f64>;
pub trait TurnToColor {
type Error: Error;
fn turn_to_color(
&mut self,
color: Oklch,
) -> impl Future<Output = Result<(), Self::Error>> + Send;
}

View File

@@ -1,23 +0,0 @@
[tool.poetry]
name = "smart-home-in-rust-with-home-assistant"
version = "0.1.0"
description = ""
authors = ["J / Jacob Babich <jacobbabichpublic+git@gmail.com>"]
readme = "README.md"
[build-system]
requires = ["maturin>=1.2,<2.0"]
build-backend = "maturin"
[project]
name = "smart-home-in-rust-with-home-assistant"
requires-python = ">=3.7"
classifiers = [
"Programming Language :: Rust",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
[tool.maturin]
features = ["pyo3/extension-module"]

8
python-utils/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "python-utils"
version = "0.1.0"
edition = "2021"
license = { workspace = true }
[dependencies]
pyo3 = { workspace = true }

View File

@@ -1,6 +1,30 @@
use pyo3::{exceptions::PyTypeError, prelude::*};
use std::convert::Infallible;
/// Create a GIL-independent reference (similar to [`Arc`](std::sync::Arc))
use pyo3::{exceptions::PyTypeError, prelude::*, types::PyNone};
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct IsNone;
impl<'py> FromPyObject<'py> for IsNone {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
ob.downcast::<PyNone>()?;
Ok(IsNone)
}
}
impl<'py> IntoPyObject<'py> for IsNone {
type Target = PyNone;
type Output = Borrowed<'py, 'py, Self::Target>;
type Error = Infallible;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
Ok(PyNone::get(py))
}
}
/// Create a GIL-independent reference
pub fn detach<T>(bound: &Bound<T>) -> Py<T> {
let py = bound.py();
bound.as_unbound().clone_ref(py)

View File

@@ -1,16 +0,0 @@
use std::collections::BTreeMap;
use pyo3::prelude::*;
use super::{arbitrary::Arbitrary, map_key::MapKey};
#[derive(Debug, Clone, Default, derive_more::From, derive_more::Into, IntoPyObject)]
pub struct Map(pub BTreeMap<MapKey, Arbitrary>);
impl<'py> FromPyObject<'py> for Map {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let inner = ob.extract()?;
Ok(Self(inner))
}
}

View File

@@ -1,14 +0,0 @@
use pyo3::prelude::*;
use super::id::Id;
/// The context that triggered something.
#[derive(Debug, FromPyObject)]
pub struct Context<Event> {
pub id: Id,
pub user_id: Option<String>,
pub parent_id: Option<String>,
/// In order to prevent cycles, the user must decide to pass [`Py<PyAny>`] for the `Event` type here
/// or for the `Context` type in [`Event`]
pub origin_event: Event,
}

View File

@@ -1,20 +0,0 @@
use pyo3::prelude::*;
use ulid::Ulid;
#[derive(Debug, Clone)]
pub enum Id {
Ulid(Ulid),
Other(String),
}
impl<'py> FromPyObject<'py> for Id {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<String>()?;
if let Ok(ulid) = s.parse() {
Ok(Id::Ulid(ulid))
} else {
Ok(Id::Other(s))
}
}
}

View File

@@ -1,37 +0,0 @@
use pyo3::exceptions::PyValueError;
use pyo3::prelude::*;
use crate::home_assistant::{entity_id::EntityId, state::State};
#[derive(Debug, Clone)]
pub struct Type;
impl<'py> FromPyObject<'py> for Type {
fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult<Self> {
let s = ob.extract::<&str>()?;
if s == "state_changed" {
Ok(Type)
} else {
Err(PyValueError::new_err(format!(
"expected a string of value 'state_changed', but got {s}"
)))
}
}
}
#[derive(Debug, FromPyObject)]
#[pyo3(from_item_all)]
pub struct Data<OldAttributes, OldStateContextEvent, NewAttributes, NewStateContextEvent> {
pub entity_id: EntityId,
pub old_state: Option<State<OldAttributes, OldStateContextEvent>>,
pub new_state: Option<State<NewAttributes, NewStateContextEvent>>,
}
/// A state changed event is fired when on state write the state is changed.
pub type Event<OldAttributes, OldStateContextEvent, NewAttributes, NewStateContextEvent, Context> =
super::super::event::Event<
Type,
Data<OldAttributes, OldStateContextEvent, NewAttributes, NewStateContextEvent>,
Context,
>;

View File

@@ -1,43 +0,0 @@
use pyo3::prelude::*;
use crate::python_utils::{detach, validate_type_by_name};
use super::state_machine::StateMachine;
#[derive(Debug)]
pub struct HomeAssistant(Py<PyAny>);
impl<'source> FromPyObject<'source> for HomeAssistant {
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
// region: Validation
validate_type_by_name(ob, "HomeAssistant")?;
// endregion: Validation
Ok(Self(detach(ob)))
}
}
impl HomeAssistant {
/// Return the representation
pub fn repr(&self, py: &Python) -> Result<String, PyErr> {
let bound = self.0.bind(*py);
let repr = bound.repr()?;
repr.extract()
}
/// Return if Home Assistant is running.
pub fn is_running(&self, py: &Python) -> Result<bool, PyErr> {
let is_running = self.0.getattr(*py, "is_running")?;
is_running.extract(*py)
}
/// Return if Home Assistant is stopping.
pub fn is_stopping(&self, py: &Python) -> Result<bool, PyErr> {
let is_stopping = self.0.getattr(*py, "is_stopping")?;
is_stopping.extract(*py)
}
pub fn states(&self, py: &Python) -> Result<StateMachine, PyErr> {
let states = self.0.getattr(*py, "states")?;
states.extract(*py)
}
}

View File

@@ -1,17 +0,0 @@
use chrono::{DateTime, Utc};
use pyo3::prelude::*;
use crate::home_assistant::entity_id::EntityId;
use super::event::context::context::Context;
#[derive(Debug, FromPyObject)]
pub struct State<Attributes, ContextEvent> {
pub entity_id: EntityId,
pub state: String,
pub attributes: Attributes,
pub last_changed: Option<DateTime<Utc>>,
pub last_reported: Option<DateTime<Utc>>,
pub last_updated: Option<DateTime<Utc>>,
pub context: Context<ContextEvent>,
}

View File

@@ -1,60 +0,0 @@
use std::time::Duration;
use home_assistant::home_assistant::HomeAssistant;
use pyo3::prelude::*;
use shadow_rs::shadow;
use tokio::time::interval;
use tracing::{level_filters::LevelFilter, Level};
use tracing_subscriber::{
fmt::{self, format::FmtSpan},
layer::SubscriberExt,
registry,
util::SubscriberInitExt,
Layer,
};
use tracing_to_home_assistant::TracingToHomeAssistant;
mod arbitrary;
mod home_assistant;
mod python_utils;
mod tracing_to_home_assistant;
shadow!(build_info);
async fn real_main(home_assistant: HomeAssistant) -> ! {
registry()
.with(
fmt::layer()
.pretty()
.with_span_events(FmtSpan::ACTIVE)
.with_filter(LevelFilter::from_level(Level::TRACE)),
)
.with(TracingToHomeAssistant)
.init();
let built_at = build_info::BUILD_TIME;
tracing::info!(built_at);
let duration = Duration::from_millis(5900);
let mut interval = interval(duration);
loop {
let instant = interval.tick().await;
tracing::debug!(?instant, "it is now");
}
}
#[pyfunction]
fn main<'p>(py: Python<'p>, home_assistant: HomeAssistant) -> PyResult<Bound<'p, PyAny>> {
pyo3_async_runtimes::tokio::future_into_py::<_, ()>(py, async {
real_main(home_assistant).await;
})
}
/// A Python module implemented in Rust.
#[pymodule]
fn smart_home_in_rust_with_home_assistant(m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_function(wrap_pyfunction!(main, m)?)?;
Ok(())
}

View File

@@ -1,8 +0,0 @@
from asyncio import run
async def smart_home_in_rust_with_home_assistant_main():
from smart_home_in_rust_with_home_assistant import main
await main()
run(smart_home_in_rust_with_home_assistant_main())