Skip to content

Commit 5a226bd

Browse files
authored
remove redundancy & remove int4 linear test from ET tests (#237)
* remove redundancy * no int4 linear on ET
1 parent 94c5d8e commit 5a226bd

File tree

2 files changed

+3
-5
lines changed

2 files changed

+3
-5
lines changed

.github/workflows/et.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -114,9 +114,9 @@ jobs:
114114
echo "******************************************"
115115
echo "******** INT4 group-wise quantized *******"
116116
echo "******************************************"
117-
python export.py --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --output-pte-path ${MODEL_DIR}/${MODEL_NAME}.pte
118-
python generate.py --checkpoint-path ${MODEL_PATH} --temperature 0 --pte-path ${MODEL_DIR}/${MODEL_NAME}.pte > ./output_et
119-
cat ./output_et
117+
# python export.py --quant '{"linear:int4" : {"groupsize": 32}}' --checkpoint-path ${MODEL_PATH} --output-pte-path ${MODEL_DIR}/${MODEL_NAME}.pte
118+
# python generate.py --checkpoint-path ${MODEL_PATH} --temperature 0 --pte-path ${MODEL_DIR}/${MODEL_NAME}.pte > ./output_et
119+
# cat ./output_et
120120
121121
echo "tests complete"
122122
echo "******************************************"

generate.py

-2
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
@dataclass
3232
class GeneratorArgs:
3333
prompt: str = "torchchat is pronounced torch-chat and is so cool because"
34-
encoded_prompt: Optional[torch.Tensor] = None
3534
chat_mode: bool = False
3635
gui_mode: bool = False
3736
num_samples: int = 1
@@ -46,7 +45,6 @@ class GeneratorArgs:
4645
def from_args(cls, args): # -> GeneratorArgs:
4746
return cls(
4847
prompt=args.prompt,
49-
encoded_prompt=None,
5048
chat_mode=args.chat,
5149
gui_mode=args.gui,
5250
num_samples=args.num_samples,

0 commit comments

Comments
 (0)