Skip to content

Commit

Permalink
Fix: minicpmv2 (#1705)
Browse files Browse the repository at this point in the history
* updmini cpm

* flash_attn implementation

* remove flash attn
  • Loading branch information
Samoed authored Jan 6, 2025
1 parent 4a1c8e6 commit 222bb35
Showing 1 changed file with 7 additions and 0 deletions.
7 changes: 7 additions & 0 deletions mteb/models/misc_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

from functools import partial

import torch

from mteb.model_meta import ModelMeta, sentence_transformers_loader

Haon_Chen__speed_embedding_7b_instruct = ModelMeta(
Expand Down Expand Up @@ -1556,6 +1558,11 @@
sentence_transformers_loader,
model_name="openbmb/MiniCPM-Embedding",
revision="c0cb2de33fb366e17c30f9d53142ff11bc18e049",
# https://huggingface.co/openbmb/MiniCPM-Embedding/blob/c0cb2de33fb366e17c30f9d53142ff11bc18e049/README.md?code=true#L405
model_kwargs={
# "attn_implementation": "flash_attention_2",
"torch_dtype": torch.float16,
},
trust_remote_code=True,
),
name="openbmb/MiniCPM-Embedding",
Expand Down

0 comments on commit 222bb35

Please sign in to comment.