@@ -13,114 +13,97 @@ Before starting fine-tuning, please ensure your machine meets the minimum hardwa
1313 <th style = { { textAlign: " center" }} >Model</th >
1414 <th style = { { textAlign: " center" }} >Training Type</th >
1515 <th style = { { textAlign: " center" }} >Distribution Strategy</th >
16- <th style = { { textAlign: " center" }} >Mixed Precision</th >
1716 <th style = { { textAlign: " center" }} >Training Resolution (FxHxW)</th >
18- <th style = { { textAlign: " center" }} >Hardware Requirements </th >
17+ <th style = { { textAlign: " center" }} >Requirement </th >
1918 </tr >
2019 </thead >
2120 <tbody >
2221 <tr >
2322 <td rowspan = " 6" >cogvideox-t2v-2b</td >
24- <td >lora (rank128) </td >
23+ <td >lora</td >
2524 <td >DDP</td >
26- <td >fp16</td >
2725 <td >49x480x720</td >
2826 <td >16GB VRAM</td >
2927 </tr >
3028 <tr >
3129 <td rowspan = " 5" >sft</td >
3230 <td >DDP</td >
33- <td >fp16</td >
3431 <td >49x480x720</td >
3532 <td >36GB VRAM</td >
3633 </tr >
3734 <tr >
3835 <td >1-GPU zero-2 + opt offload</td >
39- <td >fp16</td >
4036 <td >49x480x720</td >
4137 <td >17GB VRAM</td >
4238 </tr >
4339 <tr >
4440 <td >8-GPU zero-2</td >
45- <td >fp16</td >
4641 <td >49x480x720</td >
4742 <td >17GB VRAM</td >
4843 </tr >
4944 <tr >
5045 <td >8-GPU zero-3</td >
51- <td >fp16</td >
5246 <td >49x480x720</td >
5347 <td >19GB VRAM</td >
5448 </tr >
5549 <tr >
5650 <td >8-GPU zero-3 + opt and param offload</td >
57- <td >bf16</td >
5851 <td >49x480x720</td >
5952 <td >14GB VRAM</td >
6053 </tr >
6154 <tr >
6255 <td rowspan = " 5" >cogvideox-\{ t2v,i2v\} -5b</td >
63- <td >lora (rank128) </td >
56+ <td >lora</td >
6457 <td >DDP</td >
65- <td >bf16</td >
6658 <td >49x480x720</td >
6759 <td >24GB VRAM</td >
6860 </tr >
6961 <tr >
7062 <td rowspan = " 4" >sft</td >
7163 <td >1-GPU zero-2 + opt offload</td >
72- <td >bf16</td >
7364 <td >49x480x720</td >
7465 <td >42GB VRAM</td >
7566 </tr >
7667 <tr >
7768 <td >8-GPU zero-2</td >
78- <td >bf16</td >
7969 <td >49x480x720</td >
8070 <td >42GB VRAM</td >
8171 </tr >
8272 <tr >
8373 <td >8-GPU zero-3</td >
84- <td >bf16</td >
8574 <td >49x480x720</td >
8675 <td >43GB VRAM</td >
8776 </tr >
8877 <tr >
8978 <td >8-GPU zero-3 + opt and param offload</td >
90- <td >bf16</td >
9179 <td >49x480x720</td >
9280 <td >28GB VRAM</td >
9381 </tr >
9482 <tr >
9583 <td rowspan = " 5" >cogvideox1.5-\{ t2v,i2v\} -5b</td >
96- <td >lora (rank128) </td >
84+ <td >lora</td >
9785 <td >DDP</td >
98- <td >bf16</td >
9986 <td >81x768x1360</td >
10087 <td >35GB VRAM</td >
10188 </tr >
10289 <tr >
10390 <td rowspan = " 4" >sft</td >
10491 <td >1-GPU zero-2 + opt offload</td >
105- <td >bf16</td >
10692 <td >81x768x1360</td >
10793 <td >56GB VRAM</td >
10894 </tr >
10995 <tr >
11096 <td >8-GPU zero-2</td >
111- <td >bf16</td >
11297 <td >81x768x1360</td >
11398 <td >55GB VRAM</td >
11499 </tr >
115100 <tr >
116101 <td >8-GPU zero-3</td >
117- <td >bf16</td >
118102 <td >81x768x1360</td >
119103 <td >55GB VRAM</td >
120104 </tr >
121105 <tr >
122106 <td >8-GPU zero-3 + opt and param offload</td >
123- <td >bf16</td >
124107 <td >81x768x1360</td >
125108 <td >40GB VRAM</td >
126109 </tr >
@@ -135,42 +118,42 @@ Before starting fine-tuning, please ensure your machine meets the minimum hardwa
135118 <th style = { { textAlign: " center" }} >Model</th >
136119 <th style = { { textAlign: " center" }} >Training Type</th >
137120 <th style = { { textAlign: " center" }} >Distribution Strategy</th >
138- <th style = { { textAlign: " center" }} >Mixed Precision</th >
139121 <th style = { { textAlign: " center" }} >Training Resolution (HxW)</th >
140- <th style = { { textAlign: " center" }} >Hardware Requirements </th >
122+ <th style = { { textAlign: " center" }} >Requirement </th >
141123 </tr >
142124 </thead >
143125 <tbody >
144126 <tr >
145- <td rowspan = " 5" >CogView4-6B</td >
146- <td >lora (rank128)</td >
127+ <td rowspan = " 6" >CogView4-6B</td >
128+ <td >qlora + param offload <br />(` --low_vram ` )</td >
129+ <td >DDP</td >
130+ <td >1024x1024</td >
131+ <td >9GB VRAM</td >
132+ </tr >
133+ <tr >
134+ <td >lora</td >
147135 <td >DDP</td >
148- <td >bf16</td >
149136 <td >1024x1024</td >
150137 <td >30GB VRAM</td >
151138 </tr >
152139 <tr >
153140 <td rowspan = " 4" >sft</td >
154141 <td >1-GPU zero-2 + opt offload</td >
155- <td >bf16</td >
156142 <td >1024x1024</td >
157143 <td >42GB VRAM</td >
158144 </tr >
159145 <tr >
160146 <td >8-GPU zero-2</td >
161- <td >bf16</td >
162147 <td >1024x1024</td >
163148 <td >50GB VRAM</td >
164149 </tr >
165150 <tr >
166151 <td >8-GPU zero-3</td >
167- <td >bf16</td >
168152 <td >1024x1024</td >
169153 <td >47GB VRAM</td >
170154 </tr >
171155 <tr >
172156 <td >8-GPU zero-3 + opt and param offload</td >
173- <td >bf16</td >
174157 <td >1024x1024</td >
175158 <td >28GB VRAM</td >
176159 </tr >
0 commit comments