openaiのバージョンチェック

main
NON906 2024-03-29 10:54:51 +09:00
parent 1a7222ca77
commit ce8cce4823
1 changed files with 14 additions and 6 deletions

View File

@ -4,9 +4,6 @@
import launch
import os
if not launch.is_installed('openai'):
launch.run_pip('install openai==0.28.1', 'openai')
if not launch.is_installed('langchain'):
launch.run_pip('install langchain', 'langchain')
@ -24,8 +21,12 @@ if launch.args.api:
pip_list_str = launch.run('pip list')
pip_list_lines = pip_list_str.splitlines()
torch_version = [item for item in pip_list_lines if item.startswith('torch')][0].split()[-1]
if '+cu' in torch_version:
torch_lines = [item for item in pip_list_lines if item.startswith('torch')]
torch_version = None
if torch_lines and len(torch_lines) > 0:
torch_version = torch_lines[0].split()[-1]
if torch_version is not None and '+cu' in torch_version:
cuda_version = torch_version.split('+cu')[-1]
llama_cpp_versions = [item for item in pip_list_lines if item.startswith('llama_cpp_python')]
if len(llama_cpp_versions) > 0:
@ -41,4 +42,11 @@ else:
try:
launch.run_pip('install llama-cpp-python==0.2.36 --prefer-binary --extra-index-url=https://jllllll.github.io/llama-cpp-python-cuBLAS-wheels/AVX2/cpu', 'llama-cpp-python')
except:
launch.run_pip('install llama-cpp-python==0.2.36 --prefer-binary --extra-index-url=https://jllllll.github.io/llama-cpp-python-cuBLAS-wheels/basic/cpu', 'llama-cpp-python')
launch.run_pip('install llama-cpp-python==0.2.36 --prefer-binary --extra-index-url=https://jllllll.github.io/llama-cpp-python-cuBLAS-wheels/basic/cpu', 'llama-cpp-python')
openai_lines = [item for item in pip_list_lines if item.startswith('openai')]
openai_version = None
if openai_lines and len(openai_lines) > 0:
openai_version = openai_lines[0].split()[-1]
if openai_version is None or openai_version != '0.28.1':
launch.run_pip('install -U openai==0.28.1', 'openai')