File tree 2 files changed +3
-5
lines changed
2 files changed +3
-5
lines changed Original file line number Diff line number Diff line change @@ -114,9 +114,9 @@ jobs:
114
114
echo "******************************************"
115
115
echo "******** INT4 group-wise quantized *******"
116
116
echo "******************************************"
117
- python export.py --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --output-pte-path ${MODEL_DIR}/${MODEL_NAME}.pte
118
- python generate.py --checkpoint-path ${MODEL_PATH} --temperature 0 --pte-path ${MODEL_DIR}/${MODEL_NAME}.pte > ./output_et
119
- cat ./output_et
117
+ # python export.py --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --output-pte-path ${MODEL_DIR}/${MODEL_NAME}.pte
118
+ # python generate.py --checkpoint-path ${MODEL_PATH} --temperature 0 --pte-path ${MODEL_DIR}/${MODEL_NAME}.pte > ./output_et
119
+ # cat ./output_et
120
120
121
121
echo "tests complete"
122
122
echo "******************************************"
Original file line number Diff line number Diff line change 31
31
@dataclass
32
32
class GeneratorArgs :
33
33
prompt : str = "torchchat is pronounced torch-chat and is so cool because"
34
- encoded_prompt : Optional [torch .Tensor ] = None
35
34
chat_mode : bool = False
36
35
gui_mode : bool = False
37
36
num_samples : int = 1
@@ -46,7 +45,6 @@ class GeneratorArgs:
46
45
def from_args (cls , args ): # -> GeneratorArgs:
47
46
return cls (
48
47
prompt = args .prompt ,
49
- encoded_prompt = None ,
50
48
chat_mode = args .chat ,
51
49
gui_mode = args .gui ,
52
50
num_samples = args .num_samples ,
You can’t perform that action at this time.
0 commit comments