代码拉取完成,页面将自动刷新
import os
import torch
# device config
EMBEDDING_DEVICE = "cuda" if torch.cuda.is_available(
) else "mps" if torch.backends.mps.is_available() else "cpu"
LLM_DEVICE = "cuda" if torch.cuda.is_available(
) else "mps" if torch.backends.mps.is_available() else "cpu"
num_gpus = torch.cuda.device_count()
# model cache config
MODEL_CACHE_PATH = os.path.join(os.path.dirname(__file__), 'model_cache')
# vector storage config
VECTOR_STORE_PATH='./vector_store'
COLLECTION_NAME='my_collection'
# init model config
init_llm = "ChatGLM2-6B"
init_embedding_model = "text2vec-base"
# model config
embedding_model_dict = {
"ernie-tiny": "nghuyong/ernie-3.0-nano-zh",
"ernie-base": "nghuyong/ernie-3.0-base-zh",
"ernie-medium": "nghuyong/ernie-3.0-medium-zh",
"ernie-xbase": "nghuyong/ernie-3.0-xbase-zh",
"text2vec-base": "GanymedeNil/text2vec-base-chinese",
'simbert-base-chinese': 'WangZeJun/simbert-base-chinese',
'paraphrase-multilingual-MiniLM-L12-v2': "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
}
llm_model_dict = {
"chatglm2": {
"ChatGLM2-6B": "THUDM/chatglm2-6b",
"ChatGLM2-6B-int4": "THUDM/chatglm2-6b-int4",
},
"chatglm": {
"ChatGLM-6B": "THUDM/chatglm-6b",
"ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
"ChatGLM-6B-int8": "THUDM/chatglm-6b-int8",
"ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe"
},
"belle": {
"BELLE-LLaMA-Local": "/pretrainmodel/belle",
},
"vicuna": {
"Vicuna-Local": "/pretrainmodel/vicuna",
},
"internlm": {
"internlm-chat-7b-8k": "internlm/internlm-chat-7b-8k",
"internlm-chat-7b": "internlm/internlm-chat-7b",
"internlm-chat-7b-v1_1": "internlm/internlm-chat-7b-v1_1",
}
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。