| 123456789101112131415161718192021222324252627282930313233343536 |
- # from pymilvus import model
- # import torch
- # from transformers import AutoTokenizer, AutoModelForSequenceClassification
- # device = "cuda:1" if torch.cuda.is_available() else "cpu"
- # # 使用sentence transformer方式加载模型
- # embedding_path = r"/work/models/multilingual-e5-large-instruct" # 线上路径
- # # embedding_path = r"G:/work/code/models/multilingual-e5-large-instruct/" # 本地路径
- # sentence_transformer_ef = model.dense.SentenceTransformerEmbeddingFunction(model_name=embedding_path,device=device)
- # # rerank模型
- # bce_rerank_model_path = r"/work/models/bce-reranker-base_v1" # 线上路径
- # # bce_rerank_model_path = r"G:/work/code/models/bce-reranker-base_v1" # 本地路径
- # bce_rerank_tokenizer = AutoTokenizer.from_pretrained(bce_rerank_model_path)
- # bce_rerank_base_model = AutoModelForSequenceClassification.from_pretrained(bce_rerank_model_path).to(device)
- from pymilvus import model
- import torch
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
- device = "cuda:1" if torch.cuda.is_available() else "cpu"
- # 使用sentence transformer方式加载模型
- # embedding_path = r"/work/models/multilingual-e5-large-instruct" # 线上路径
- embedding_path = "/opt/models/multilingual-e5-large-instruct/" # jk线上路径
- # embedding_path = r"G:/work/code/models/multilingual-e5-large-instruct/" # 本地路径
- sentence_transformer_ef = model.dense.SentenceTransformerEmbeddingFunction(model_name=embedding_path,device=device)
- # rerank模型
- # bce_rerank_model_path = r"/work/models/bce-reranker-base_v1" # 线上路径
- bce_rerank_model_path = r"/opt/models/bce-reranker-base_v1" # jk线上路径
- # bce_rerank_model_path = r"G:/work/code/models/bce-reranker-base_v1" # 本地路径
- bce_rerank_tokenizer = AutoTokenizer.from_pretrained(bce_rerank_model_path)
- bce_rerank_base_model = AutoModelForSequenceClassification.from_pretrained(bce_rerank_model_path).to(device)
|