Skip to content

Commit

Permalink
Debugging
Browse files Browse the repository at this point in the history
  • Loading branch information
EricLBuehler committed Oct 2, 2024
1 parent c8e5465 commit e5b6de0
Showing 1 changed file with 6 additions and 1 deletion.
7 changes: 6 additions & 1 deletion mistralrs-core/src/vision_models/mllama/text.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,7 @@ impl MLlamaSelfAttentionDecoderLayer {
if hidden_states.dtype() == DType::F16 {
hidden_states = clamp_inf(&hidden_states)?;
}
dbg!("self attn",&hidden_states.mean_all());

let residual = &hidden_states;
let mut hidden_states = self.post_attention_layernorm.forward(&hidden_states)?;
Expand All @@ -266,6 +267,7 @@ impl MLlamaSelfAttentionDecoderLayer {
if hidden_states.dtype() == DType::F16 {
hidden_states = clamp_inf(&hidden_states)?;
}
dbg!("self mlp",&hidden_states.mean_all());

residual + hidden_states
}
Expand Down Expand Up @@ -495,6 +497,7 @@ impl MLlamaCrossAttentionDecoderLayer {
if hidden_states.dtype() == DType::F16 {
hidden_states = clamp_inf(&hidden_states)?;
}
dbg!("cross attn",&hidden_states.mean_all());

let residual = &hidden_states;
let mut hidden_states = self.post_attention_layernorm.forward(&hidden_states)?;
Expand All @@ -503,12 +506,14 @@ impl MLlamaCrossAttentionDecoderLayer {
if hidden_states.dtype() == DType::F16 {
hidden_states = clamp_inf(&hidden_states)?;
}
dbg!("cross mlp",&hidden_states.mean_all());
if let Some(full_text_row_masked_out_mask) = full_text_row_masked_out_mask {
hidden_states = full_text_row_masked_out_mask
.to_dtype(hidden_states.dtype())?
.i((.., 0))?
.broadcast_mul(&hidden_states)?;
}
dbg!("ftrm",&hidden_states.mean_all());

residual + hidden_states.broadcast_mul(&self.mlp_gate)?
}
Expand Down Expand Up @@ -691,7 +696,7 @@ impl MLlamaTextModel {
)?;
}
}
dbg!(&hidden_states);
dbg!("eol",&hidden_states.mean_all());
}

hidden_states = hidden_states.to_device(&self.device)?;
Expand Down

0 comments on commit e5b6de0

Please sign in to comment.