@@ -321,7 +321,7 @@ def build_branch_net(self):
321321 if callable (self .layer_size_func [1 ]):
322322 # User-defined network
323323 return self .layer_size_func [1 ](self .X_func )
324-
324+
325325 if self .stacked :
326326 # Stacked fully connected network
327327 return self ._build_stacked_branch_net ()
@@ -422,15 +422,14 @@ def _dense(
422422 regularizer = None ,
423423 trainable = True ,
424424 ):
425- return tf .layers .dense (
426- inputs ,
425+ return tf .keras .layers .Dense (
427426 units ,
428427 activation = activation ,
429428 use_bias = use_bias ,
430429 kernel_initializer = self .kernel_initializer ,
431430 kernel_regularizer = regularizer ,
432431 trainable = trainable ,
433- )
432+ )( inputs )
434433
435434 def _stacked_dense (
436435 self , inputs , units , stack_size , activation = None , use_bias = True , trainable = True
@@ -637,25 +636,23 @@ def build_branch_net(self):
637636 else :
638637 # Fully connected network
639638 for i in range (1 , len (self .layer_size_func ) - 1 ):
640- y_func = tf .layers .dense (
641- y_func ,
639+ y_func = tf .keras .layers .Dense (
642640 self .layer_size_func [i ],
643641 activation = self .activation_branch ,
644642 kernel_initializer = self .kernel_initializer ,
645643 kernel_regularizer = self .regularizer ,
646- )
644+ )( y_func )
647645 if self .dropout_rate_branch [i - 1 ] > 0 :
648646 y_func = tf .layers .dropout (
649647 y_func ,
650648 rate = self .dropout_rate_branch [i - 1 ],
651649 training = self .training ,
652650 )
653- y_func = tf .layers .dense (
654- y_func ,
651+ y_func = tf .keras .layers .Dense (
655652 self .layer_size_func [- 1 ],
656653 kernel_initializer = self .kernel_initializer ,
657654 kernel_regularizer = self .regularizer ,
658- )
655+ )( y_func )
659656 return y_func
660657
661658 def build_trunk_net (self ):
@@ -664,13 +661,12 @@ def build_trunk_net(self):
664661 if self ._input_transform is not None :
665662 y_loc = self ._input_transform (y_loc )
666663 for i in range (1 , len (self .layer_size_loc )):
667- y_loc = tf .layers .dense (
668- y_loc ,
664+ y_loc = tf .keras .layers .Dense (
669665 self .layer_size_loc [i ],
670666 activation = self .activation_trunk ,
671667 kernel_initializer = self .kernel_initializer ,
672668 kernel_regularizer = self .regularizer ,
673- )
669+ )( y_loc )
674670 if self .dropout_rate_trunk [i - 1 ] > 0 :
675671 y_loc = tf .layers .dropout (
676672 y_loc , rate = self .dropout_rate_trunk [i - 1 ], training = self .training
0 commit comments