File tree Expand file tree Collapse file tree 18 files changed +33
-32
lines changed
tensorflow/python/training Expand file tree Collapse file tree 18 files changed +33
-32
lines changed Original file line number Diff line number Diff line change @@ -612,10 +612,9 @@ def train(sess_config,
612
612
hooks = []
613
613
hooks .extend (input_hooks )
614
614
615
- sharded_saver = tf_config != None
616
615
scaffold = tf .train .Scaffold (
617
616
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
618
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
617
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
619
618
620
619
stop_hook = tf .train .StopAtStepHook (last_step = steps )
621
620
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -527,10 +527,9 @@ def train(sess_config,
527
527
hooks = []
528
528
hooks .extend (input_hooks )
529
529
530
- sharded_saver = tf_config != None
531
530
scaffold = tf .train .Scaffold (
532
531
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
533
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
532
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
534
533
535
534
stop_hook = tf .train .StopAtStepHook (last_step = steps )
536
535
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -594,10 +594,9 @@ def train(sess_config,
594
594
hooks = []
595
595
hooks .extend (input_hooks )
596
596
597
- sharded_saver = tf_config != None
598
597
scaffold = tf .train .Scaffold (
599
598
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
600
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
599
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
601
600
602
601
stop_hook = tf .train .StopAtStepHook (last_step = steps )
603
602
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -610,10 +610,9 @@ def train(sess_config,
610
610
hooks = []
611
611
hooks .extend (input_hooks )
612
612
613
- sharded_saver = tf_config != None
614
613
scaffold = tf .train .Scaffold (
615
614
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
616
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
615
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
617
616
618
617
stop_hook = tf .train .StopAtStepHook (last_step = steps )
619
618
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -472,10 +472,9 @@ def train(sess_config,
472
472
hooks = []
473
473
hooks .extend (input_hooks )
474
474
475
- sharded_saver = tf_config != None
476
475
scaffold = tf .train .Scaffold (
477
476
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
478
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
477
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
479
478
480
479
stop_hook = tf .train .StopAtStepHook (last_step = steps )
481
480
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -776,10 +776,9 @@ def train(sess_config,
776
776
hooks = []
777
777
hooks .extend (input_hooks )
778
778
779
- sharded_saver = tf_config != None
780
779
scaffold = tf .train .Scaffold (
781
780
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
782
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
781
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
783
782
784
783
stop_hook = tf .train .StopAtStepHook (last_step = steps )
785
784
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -594,10 +594,9 @@ def train(sess_config,
594
594
hooks = []
595
595
hooks .extend (input_hooks )
596
596
597
- sharded_saver = tf_config != None
598
597
scaffold = tf .train .Scaffold (
599
598
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
600
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
599
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
601
600
602
601
stop_hook = tf .train .StopAtStepHook (last_step = steps )
603
602
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -507,10 +507,9 @@ def train(sess_config,
507
507
hooks = []
508
508
hooks .extend (input_hooks )
509
509
510
- sharded_saver = tf_config != None
511
510
scaffold = tf .train .Scaffold (
512
511
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
513
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
512
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
514
513
515
514
stop_hook = tf .train .StopAtStepHook (last_step = steps )
516
515
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -478,10 +478,9 @@ def train(sess_config,
478
478
hooks = []
479
479
hooks .extend (input_hooks )
480
480
481
- sharded_saver = tf_config != None
482
481
scaffold = tf .train .Scaffold (
483
482
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
484
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
483
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
485
484
486
485
stop_hook = tf .train .StopAtStepHook (last_step = steps )
487
486
log_hook = tf .train .LoggingTensorHook (
Original file line number Diff line number Diff line change @@ -534,10 +534,9 @@ def train(sess_config,
534
534
hooks = []
535
535
hooks .extend (input_hooks )
536
536
537
- sharded_saver = tf_config != None
538
537
scaffold = tf .train .Scaffold (
539
538
local_init_op = tf .group (tf .local_variables_initializer (), data_init_op ),
540
- saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = sharded_saver ))
539
+ saver = tf .train .Saver (max_to_keep = args .keep_checkpoint_max , sharded = True ))
541
540
542
541
stop_hook = tf .train .StopAtStepHook (last_step = train_steps )
543
542
log_hook = tf .train .LoggingTensorHook (
You can’t perform that action at this time.
0 commit comments