forked from abhijitmishra/Thought2Text
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrun_inference_chance.sh
More file actions
29 lines (25 loc) · 999 Bytes
/
run_inference_chance.sh
File metadata and controls
29 lines (25 loc) · 999 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
#!/bin/bash
# Declare array for LLM models
#llms=("mistralai/Mistral-7B-Instruct-v0.3" "meta-llama/Meta-Llama-3-8B-Instruct" "Qwen/Qwen2.5-7B-Instruct")
llms=("$1")
mkdir -p results
# Loop over each LLM model
for llm in "${llms[@]}"; do
# Extract the second half of the LLM model for the output directory
llm_name=$(echo "$llm" | awk -F '/' '{print $2}')
# Construct the output directory name
# Run the Python command
model_path="all_models/${llm_name}_all"
results_csv="results/results_${llm_name}_chance1.csv"
# Execute the python inference command
python inference_chance.py --model_path "$model_path" \
--eeg_dataset data/block/eeg_55_95_std.pth \
--image_dir data/images/ \
--dest "$results_csv" \
--splits_path data/block/block_splits_by_image_all.pth
# Check if the command executed successfully
if [ $? -ne 0 ]; then
echo "Error with LLM $llm"
exit 1
fi
done