2023-08-24 22:48:54 +08:00
|
|
|
echo $@
|
|
|
|
|
2023-07-03 17:41:47 +08:00
|
|
|
if [[ ${cnMirror} == 1 ]]; then
|
|
|
|
export PIP_INDEX_URL="https://pypi.tuna.tsinghua.edu.cn/simple"
|
|
|
|
if grep -q "mirrors.aliyun.com" /etc/apt/sources.list; then
|
|
|
|
echo "apt cnMirror already set"
|
|
|
|
else
|
|
|
|
sudo sed -i 's/http:\/\/archive.ubuntu.com\/ubuntu\//http:\/\/mirrors.aliyun.com\/ubuntu\//g' /etc/apt/sources.list
|
|
|
|
sudo apt update
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
2023-07-07 18:57:51 +08:00
|
|
|
if dpkg -s "gcc" >/dev/null 2>&1; then
|
|
|
|
echo "gcc installed"
|
|
|
|
else
|
|
|
|
sudo apt -y install gcc
|
|
|
|
fi
|
|
|
|
|
2023-07-03 17:41:47 +08:00
|
|
|
if dpkg -s "python3-pip" >/dev/null 2>&1; then
|
|
|
|
echo "pip installed"
|
|
|
|
else
|
2023-07-03 21:40:16 +08:00
|
|
|
sudo apt -y install python3-pip
|
2023-07-03 17:41:47 +08:00
|
|
|
fi
|
|
|
|
|
|
|
|
if dpkg -s "ninja-build" >/dev/null 2>&1; then
|
|
|
|
echo "ninja installed"
|
|
|
|
else
|
2023-07-03 21:40:16 +08:00
|
|
|
sudo apt -y install ninja-build
|
2023-07-03 17:41:47 +08:00
|
|
|
fi
|
|
|
|
|
2023-07-07 18:57:51 +08:00
|
|
|
if dpkg -s "cuda" >/dev/null 2>&1 && dpkg -s "cuda" | grep Version | awk '{print $2}' | grep -q "12"; then
|
|
|
|
echo "cuda 12 installed"
|
2023-07-03 17:41:47 +08:00
|
|
|
else
|
2023-07-07 18:57:51 +08:00
|
|
|
wget -N https://developer.download.nvidia.com/compute/cuda/repos/wsl-ubuntu/x86_64/cuda-wsl-ubuntu.pin
|
2023-07-03 17:41:47 +08:00
|
|
|
sudo mv cuda-wsl-ubuntu.pin /etc/apt/preferences.d/cuda-repository-pin-600
|
2023-07-07 18:57:51 +08:00
|
|
|
wget -N https://developer.download.nvidia.com/compute/cuda/12.2.0/local_installers/cuda-repo-wsl-ubuntu-12-2-local_12.2.0-1_amd64.deb
|
|
|
|
sudo dpkg -i cuda-repo-wsl-ubuntu-12-2-local_12.2.0-1_amd64.deb
|
|
|
|
sudo cp /var/cuda-repo-wsl-ubuntu-12-2-local/cuda-*-keyring.gpg /usr/share/keyrings/
|
2023-07-03 17:41:47 +08:00
|
|
|
sudo apt-get update
|
|
|
|
sudo apt-get -y install cuda
|
|
|
|
fi
|
|
|
|
|
|
|
|
if python3 -c "import pkg_resources; pkg_resources.require(open('./finetune/requirements.txt',mode='r'))" &>/dev/null; then
|
|
|
|
echo "requirements satisfied"
|
|
|
|
else
|
|
|
|
python3 -m pip install -r ./finetune/requirements.txt
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo "loading $loadModel"
|
2023-12-29 12:23:36 +08:00
|
|
|
modelInfo=$(python3 ./finetune/get_layer_and_embd.py $loadModel 5.2)
|
2023-07-03 17:41:47 +08:00
|
|
|
echo $modelInfo
|
2023-11-17 22:37:21 +08:00
|
|
|
if [[ $modelInfo =~ "--n_layer" ]]; then
|
2023-12-29 12:23:36 +08:00
|
|
|
python3 ./finetune/lora/$modelInfo $@ --proj_dir lora-models --data_type binidx --lora \
|
2023-11-17 22:37:21 +08:00
|
|
|
--lora_parts=att,ffn,time,ln --strategy deepspeed_stage_2 --accelerator gpu
|
|
|
|
else
|
|
|
|
echo "modelInfo is invalid"
|
|
|
|
exit 1
|
|
|
|
fi
|