Skip to content

Commit

Permalink
Start optimizing large stack sizes
Browse files Browse the repository at this point in the history
  • Loading branch information
ivmarkov committed Jun 2, 2024
1 parent 610c421 commit 70158c7
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 19 deletions.
4 changes: 4 additions & 0 deletions clippy.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
future-size-threshold = 2048
stack-size-threshold = 3048
pass-by-value-size-limit = 16
large-error-threshold = 64
6 changes: 6 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
#![allow(unexpected_cfgs)]
#![allow(clippy::declare_interior_mutable_const)]
#![warn(clippy::large_futures)]
#![warn(clippy::large_stack_frames)]
#![warn(clippy::large_types_passed_by_value)]

use core::net::{Ipv6Addr, SocketAddr, SocketAddrV6};
use core::pin::pin;
Expand Down Expand Up @@ -115,6 +117,8 @@ where
{
/// Create a new `MatterStack` instance.
#[cfg(feature = "std")]
#[allow(clippy::large_stack_frames)]
#[inline(always)]
pub const fn new_default(
dev_det: &'a BasicInfoConfig,
dev_att: &'a dyn DevAttDataFetcher,
Expand All @@ -129,6 +133,8 @@ where
}

/// Create a new `MatterStack` instance.
#[allow(clippy::large_stack_frames)]
#[inline(always)]
pub const fn new(
dev_det: &'a BasicInfoConfig,
dev_att: &'a dyn DevAttDataFetcher,
Expand Down
2 changes: 2 additions & 0 deletions src/persist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -376,6 +376,8 @@ impl<E> KvBlobBuf<E>
where
E: Embedding,
{
#[allow(clippy::large_stack_frames)]
#[inline(always)]
const fn new() -> Self {
Self {
buf: PooledBuffers::new(0),
Expand Down
33 changes: 14 additions & 19 deletions src/wifi/comm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use embassy_sync::blocking_mutex::raw::RawMutex;
use log::{error, info, warn};

use rs_matter::data_model::objects::{
AsyncHandler, AttrDataEncoder, AttrDataWriter, AttrDetails, AttrType, CmdDataEncoder,
CmdDetails, Dataver,
AttrDataEncoder, AttrDataWriter, AttrDetails, AttrType, CmdDataEncoder, CmdDetails, Dataver,
Handler,
};
use rs_matter::data_model::sdm::nw_commissioning::{
AddWifiNetworkRequest, Attributes, Commands, ConnectNetworkRequest, ConnectNetworkResponse,
Expand Down Expand Up @@ -118,7 +118,7 @@ where
}

/// Invoke a command.
pub async fn invoke(
pub fn invoke(
&self,
exchange: &Exchange<'_>,
cmd: &CmdDetails<'_>,
Expand All @@ -140,8 +140,7 @@ where
}
Commands::ConnectNetwork => {
info!("ConnectNetwork");
self.connect_network(exchange, &ConnectNetworkRequest::from_tlv(data)?, encoder)
.await?;
self.connect_network(exchange, &ConnectNetworkRequest::from_tlv(data)?, encoder)?;
}
Commands::ReorderNetwork => {
info!("ReorderNetwork");
Expand Down Expand Up @@ -307,7 +306,7 @@ where
})
}

async fn connect_network(
fn connect_network(
&self,
_exchange: &Exchange<'_>,
req: &ConnectNetworkRequest<'_>,
Expand Down Expand Up @@ -425,26 +424,22 @@ where
}
}

impl<'a, const N: usize, M> AsyncHandler for WifiNwCommCluster<'a, N, M>
impl<'a, const N: usize, M> Handler for WifiNwCommCluster<'a, N, M>
where
M: RawMutex,
{
async fn read<'m>(
&'m self,
attr: &'m AttrDetails<'_>,
encoder: AttrDataEncoder<'m, '_, '_>,
) -> Result<(), Error> {
fn read(&self, attr: &AttrDetails, encoder: AttrDataEncoder) -> Result<(), Error> {
WifiNwCommCluster::read(self, attr, encoder)
}

async fn invoke<'m>(
&'m self,
exchange: &'m Exchange<'_>,
cmd: &'m CmdDetails<'_>,
data: &'m TLVElement<'_>,
encoder: CmdDataEncoder<'m, '_, '_>,
fn invoke(
&self,
exchange: &Exchange<'_>,
cmd: &CmdDetails,
data: &TLVElement,
encoder: CmdDataEncoder,
) -> Result<(), Error> {
WifiNwCommCluster::invoke(self, exchange, cmd, data, encoder).await
WifiNwCommCluster::invoke(self, exchange, cmd, data, encoder)
}
}

Expand Down

0 comments on commit 70158c7

Please sign in to comment.