knowledgebase/llm.py
@@ -7,9 +7,9 @@ from langchain_openai.chat_models import ChatOpenAI llm = ChatOpenAI(temperature=0, model="qwen2.5-72b-instruct", base_url="https://dashscope.aliyuncs.com/compatible-mode/v1", api_key="sk-15ecf7e273ad4b729c7f7f42b542749e") model="Qwen2.5-72B-Instruct-AWQ", base_url="http://10.74.15.171:8000/v1", api_key="EMPTY") vision_llm = ChatOpenAI(temperature=0, model="qwen2.5-vl-32b-instruct",