| 12345678910111213141516 |
- from pymilvus import model
- import torch
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
- device = "cuda" if torch.cuda.is_available() else "cpu"
- # 使用sentence transformer方式加载模型
- # embedding_path = r"/opt/models/multilingual-e5-large-instruct/" # 线上路径
- embedding_path = r"G:/work/code/models/multilingual-e5-large-instruct/" # 本地路径
- sentence_transformer_ef = model.dense.SentenceTransformerEmbeddingFunction(model_name=embedding_path,device=device)
- # rerank模型
- # bce_rerank_model_path = r"/opt/models/bce-reranker-base_v1" # 线上路径
- bce_rerank_model_path = r"G:/work/code/models/bce-reranker-base_v1" # 本地路径
- bce_rerank_tokenizer = AutoTokenizer.from_pretrained(bce_rerank_model_path)
- bce_rerank_base_model = AutoModelForSequenceClassification.from_pretrained(bce_rerank_model_path).to(device)
|