Specify absolute path in configs

This commit is contained in:
Sypherd 2023-09-19 08:00:33 -06:00
parent ef1ac08ce3
commit 894505a1af
10 changed files with 10 additions and 10 deletions

View File

@ -28,7 +28,7 @@ pip install git+https://github.com/lm-sys/FastChat.git@v0.1.10
Then, run the following command to create the final working weight
```
python -m fastchat.model.apply_delta --base /path/to/llama-13bOR7b-hf/ --target /path/to/save/working/vicuna/weight/ --delta /path/to/vicuna-13bOR7b-delta-v0/
python -m fastchat.model.apply_delta --base /absolute/path/to/llama-13bOR7b-hf/ --target /absolute/path/to/save/working/vicuna/weight/ --delta /absolute/path/to/vicuna-13bOR7b-delta-v0/
```
Now you are good to go!

View File

@ -21,7 +21,7 @@ laion_synthetic_filtered_large.json
### setup the dataset folder and move the annotation file to the data storage folder
```
export MINIGPT4_DATASET=/YOUR/PATH/FOR/LARGE/DATASET/
export MINIGPT4_DATASET=/YOUR/ABSOLUTE/PATH/FOR/LARGE/DATASET/
mkdir ${MINIGPT4_DATASET}/cc_sbu
mkdir ${MINIGPT4_DATASET}/laion
mv ccs_synthetic_filtered_large.json ${MINIGPT4_DATASET}/cc_sbu

View File

@ -5,7 +5,7 @@ model:
end_sym: "</s>"
low_resource: True
prompt_template: '[INST] {} [/INST] '
ckpt: '/path/to/checkpoint/'
ckpt: '/absolute/path/to/checkpoint/'
datasets:

View File

@ -2,4 +2,4 @@ datasets:
cc_sbu_align:
data_type: images
build_info:
storage: /path/to/cc_sbu_align/
storage: /absolute/path/to/cc_sbu_align/

View File

@ -2,4 +2,4 @@ datasets:
cc_sbu:
data_type: images
build_info:
storage: /path/to/cc_sbu_dataset/{00000..01255}.tar
storage: /absolute/path/to/cc_sbu_dataset/{00000..01255}.tar

View File

@ -2,4 +2,4 @@ datasets:
laion:
data_type: images
build_info:
storage: /path/to/laion_dataset/{00000..10488}.tar
storage: /absolute/path/to/laion_dataset/{00000..10488}.tar

View File

@ -12,7 +12,7 @@ model:
# generation configs
prompt: ""
llama_model: "/path/to/llama2/weight"
llama_model: "/absolute/path/to/llama2/weight"
preprocess:
vis_processor:

View File

@ -15,7 +15,7 @@ model:
# generation configs
prompt: ""
llama_model: "/path/to/vicuna/weight"
llama_model: "/absolute/path/to/vicuna/weight"
preprocess:
vis_processor:

View File

@ -6,7 +6,7 @@ model:
end_sym: "</s>"
prompt_path: "prompts/alignment.txt"
prompt_template: '[INST] {} [/INST] '
ckpt: '/path/to/stage1/checkpoint/'
ckpt: '/absolute/path/to/stage1/checkpoint/'
datasets:

View File

@ -6,7 +6,7 @@ model:
end_sym: "###"
prompt_path: "prompts/alignment.txt"
prompt_template: '###Human: {} ###Assistant: '
ckpt: '/path/to/stage1/checkpoint/'
ckpt: '/absolute/path/to/stage1/checkpoint/'
datasets: