|
21 | 21 |
|
22 | 22 | from tensorboard.plugins.graph import keras_util
|
23 | 23 |
|
| 24 | +# Stay on Keras 2 for now: https://github.com/keras-team/keras/issues/18467. |
| 25 | +version_fn = getattr(tf.keras, "version", None) |
| 26 | +if version_fn and version_fn().startswith("3."): |
| 27 | + import tf_keras as keras # Keras 2 |
| 28 | +else: |
| 29 | + keras = tf.keras # Keras 2 |
| 30 | + |
24 | 31 |
|
25 | 32 | class KerasUtilTest(tf.test.TestCase):
|
26 | 33 | def assertGraphDefToModel(self, expected_proto, model):
|
@@ -112,12 +119,12 @@ def DISABLED_test_keras_model_to_graph_def_sequential_model(self):
|
112 | 119 | }
|
113 | 120 | }
|
114 | 121 | """
|
115 |
| - model = tf.keras.models.Sequential( |
| 122 | + model = keras.models.Sequential( |
116 | 123 | [
|
117 |
| - tf.keras.layers.Dense(32, input_shape=(784,)), |
118 |
| - tf.keras.layers.Activation("relu", name="my_relu"), |
119 |
| - tf.keras.layers.Dense(10), |
120 |
| - tf.keras.layers.Activation("softmax"), |
| 124 | + keras.layers.Dense(32, input_shape=(784,)), |
| 125 | + keras.layers.Activation("relu", name="my_relu"), |
| 126 | + keras.layers.Dense(10), |
| 127 | + keras.layers.Activation("softmax"), |
121 | 128 | ]
|
122 | 129 | )
|
123 | 130 | self.assertGraphDefToModel(expected_proto, model)
|
@@ -188,12 +195,12 @@ def test_keras_model_to_graph_def_functional_model(self):
|
188 | 195 | }
|
189 | 196 | }
|
190 | 197 | """
|
191 |
| - inputs = tf.keras.layers.Input(shape=(784,), name="functional_input") |
192 |
| - d0 = tf.keras.layers.Dense(64, activation="relu") |
193 |
| - d1 = tf.keras.layers.Dense(64, activation="relu") |
194 |
| - d2 = tf.keras.layers.Dense(64, activation="relu") |
| 198 | + inputs = keras.layers.Input(shape=(784,), name="functional_input") |
| 199 | + d0 = keras.layers.Dense(64, activation="relu") |
| 200 | + d1 = keras.layers.Dense(64, activation="relu") |
| 201 | + d2 = keras.layers.Dense(64, activation="relu") |
195 | 202 |
|
196 |
| - model = tf.keras.models.Model( |
| 203 | + model = keras.models.Model( |
197 | 204 | inputs=inputs, outputs=d2(d1(d0(inputs))), name="model"
|
198 | 205 | )
|
199 | 206 | self.assertGraphDefToModel(expected_proto, model)
|
@@ -265,12 +272,12 @@ def test_keras_model_to_graph_def_functional_model_with_cycle(self):
|
265 | 272 | }
|
266 | 273 | }
|
267 | 274 | """
|
268 |
| - inputs = tf.keras.layers.Input(shape=(784,), name="cycle_input") |
269 |
| - d0 = tf.keras.layers.Dense(64, activation="relu") |
270 |
| - d1 = tf.keras.layers.Dense(64, activation="relu") |
271 |
| - d2 = tf.keras.layers.Dense(64, activation="relu") |
| 275 | + inputs = keras.layers.Input(shape=(784,), name="cycle_input") |
| 276 | + d0 = keras.layers.Dense(64, activation="relu") |
| 277 | + d1 = keras.layers.Dense(64, activation="relu") |
| 278 | + d2 = keras.layers.Dense(64, activation="relu") |
272 | 279 |
|
273 |
| - model = tf.keras.models.Model( |
| 280 | + model = keras.models.Model( |
274 | 281 | inputs=inputs, outputs=d1(d2(d1(d0(inputs)))), name="model"
|
275 | 282 | )
|
276 | 283 | self.assertGraphDefToModel(expected_proto, model)
|
@@ -309,10 +316,10 @@ def test_keras_model_to_graph_def_lstm_model(self):
|
309 | 316 | }
|
310 | 317 | }
|
311 | 318 | """
|
312 |
| - inputs = tf.keras.layers.Input(shape=(None, 5), name="lstm_input") |
313 |
| - encoder = tf.keras.layers.SimpleRNN(256) |
| 319 | + inputs = keras.layers.Input(shape=(None, 5), name="lstm_input") |
| 320 | + encoder = keras.layers.SimpleRNN(256) |
314 | 321 |
|
315 |
| - model = tf.keras.models.Model( |
| 322 | + model = keras.models.Model( |
316 | 323 | inputs=inputs, outputs=encoder(inputs), name="model"
|
317 | 324 | )
|
318 | 325 | self.assertGraphDefToModel(expected_proto, model)
|
@@ -447,25 +454,25 @@ def DISABLED_test_keras_model_to_graph_def_nested_sequential_model(self):
|
447 | 454 | }
|
448 | 455 | }
|
449 | 456 | """
|
450 |
| - sub_sub_model = tf.keras.models.Sequential( |
| 457 | + sub_sub_model = keras.models.Sequential( |
451 | 458 | [
|
452 |
| - tf.keras.layers.Dense(32, input_shape=(784,)), |
453 |
| - tf.keras.layers.Activation("relu"), |
| 459 | + keras.layers.Dense(32, input_shape=(784,)), |
| 460 | + keras.layers.Activation("relu"), |
454 | 461 | ]
|
455 | 462 | )
|
456 | 463 |
|
457 |
| - sub_model = tf.keras.models.Sequential( |
| 464 | + sub_model = keras.models.Sequential( |
458 | 465 | [
|
459 | 466 | sub_sub_model,
|
460 |
| - tf.keras.layers.Activation("relu", name="my_relu"), |
| 467 | + keras.layers.Activation("relu", name="my_relu"), |
461 | 468 | ]
|
462 | 469 | )
|
463 | 470 |
|
464 |
| - model = tf.keras.models.Sequential( |
| 471 | + model = keras.models.Sequential( |
465 | 472 | [
|
466 | 473 | sub_model,
|
467 |
| - tf.keras.layers.Dense(10), |
468 |
| - tf.keras.layers.Activation("softmax"), |
| 474 | + keras.layers.Dense(10), |
| 475 | + keras.layers.Activation("softmax"), |
469 | 476 | ]
|
470 | 477 | )
|
471 | 478 |
|
@@ -601,27 +608,27 @@ def test_keras_model_to_graph_def_functional_multi_inputs(self):
|
601 | 608 | }
|
602 | 609 | }
|
603 | 610 | """
|
604 |
| - main_input = tf.keras.layers.Input( |
| 611 | + main_input = keras.layers.Input( |
605 | 612 | shape=(100,), dtype="int32", name="main_input"
|
606 | 613 | )
|
607 |
| - x = tf.keras.layers.Embedding( |
| 614 | + x = keras.layers.Embedding( |
608 | 615 | output_dim=512, input_dim=10000, input_length=100
|
609 | 616 | )(main_input)
|
610 |
| - rnn_out = tf.keras.layers.SimpleRNN(32)(x) |
| 617 | + rnn_out = keras.layers.SimpleRNN(32)(x) |
611 | 618 |
|
612 |
| - auxiliary_output = tf.keras.layers.Dense( |
| 619 | + auxiliary_output = keras.layers.Dense( |
613 | 620 | 1, activation="sigmoid", name="aux_output"
|
614 | 621 | )(rnn_out)
|
615 |
| - auxiliary_input = tf.keras.layers.Input(shape=(5,), name="aux_input") |
| 622 | + auxiliary_input = keras.layers.Input(shape=(5,), name="aux_input") |
616 | 623 |
|
617 |
| - x = tf.keras.layers.concatenate([rnn_out, auxiliary_input]) |
618 |
| - x = tf.keras.layers.Dense(64, activation="relu")(x) |
| 624 | + x = keras.layers.concatenate([rnn_out, auxiliary_input]) |
| 625 | + x = keras.layers.Dense(64, activation="relu")(x) |
619 | 626 |
|
620 |
| - main_output = tf.keras.layers.Dense( |
| 627 | + main_output = keras.layers.Dense( |
621 | 628 | 1, activation="sigmoid", name="main_output"
|
622 | 629 | )(x)
|
623 | 630 |
|
624 |
| - model = tf.keras.models.Model( |
| 631 | + model = keras.models.Model( |
625 | 632 | inputs=[main_input, auxiliary_input],
|
626 | 633 | outputs=[main_output, auxiliary_output],
|
627 | 634 | name="model",
|
@@ -757,22 +764,22 @@ def test_keras_model_to_graph_def_functional_model_as_layer(self):
|
757 | 764 | }
|
758 | 765 | }
|
759 | 766 | """
|
760 |
| - inputs1 = tf.keras.layers.Input(shape=(784,), name="sub_func_input_1") |
761 |
| - inputs2 = tf.keras.layers.Input(shape=(784,), name="sub_func_input_2") |
762 |
| - d0 = tf.keras.layers.Dense(64, activation="relu") |
763 |
| - d1 = tf.keras.layers.Dense(64, activation="relu") |
764 |
| - d2 = tf.keras.layers.Dense(64, activation="relu") |
| 767 | + inputs1 = keras.layers.Input(shape=(784,), name="sub_func_input_1") |
| 768 | + inputs2 = keras.layers.Input(shape=(784,), name="sub_func_input_2") |
| 769 | + d0 = keras.layers.Dense(64, activation="relu") |
| 770 | + d1 = keras.layers.Dense(64, activation="relu") |
| 771 | + d2 = keras.layers.Dense(64, activation="relu") |
765 | 772 |
|
766 |
| - sub_model = tf.keras.models.Model( |
| 773 | + sub_model = keras.models.Model( |
767 | 774 | inputs=[inputs2, inputs1],
|
768 | 775 | outputs=[d0(inputs1), d1(inputs2)],
|
769 | 776 | name="model",
|
770 | 777 | )
|
771 | 778 |
|
772 | 779 | main_outputs = d2(
|
773 |
| - tf.keras.layers.concatenate(sub_model([inputs2, inputs1])) |
| 780 | + keras.layers.concatenate(sub_model([inputs2, inputs1])) |
774 | 781 | )
|
775 |
| - model = tf.keras.models.Model( |
| 782 | + model = keras.models.Model( |
776 | 783 | inputs=[inputs2, inputs1],
|
777 | 784 | outputs=main_outputs,
|
778 | 785 | name="model_1",
|
@@ -864,16 +871,16 @@ def DISABLED_test_keras_model_to_graph_def_functional_sequential_model(
|
864 | 871 | }
|
865 | 872 | }
|
866 | 873 | """
|
867 |
| - inputs = tf.keras.layers.Input(shape=(784,), name="func_seq_input") |
868 |
| - sub_model = tf.keras.models.Sequential( |
| 874 | + inputs = keras.layers.Input(shape=(784,), name="func_seq_input") |
| 875 | + sub_model = keras.models.Sequential( |
869 | 876 | [
|
870 |
| - tf.keras.layers.Dense(32, input_shape=(784,)), |
871 |
| - tf.keras.layers.Activation("relu", name="my_relu"), |
| 877 | + keras.layers.Dense(32, input_shape=(784,)), |
| 878 | + keras.layers.Activation("relu", name="my_relu"), |
872 | 879 | ]
|
873 | 880 | )
|
874 |
| - dense = tf.keras.layers.Dense(64, activation="relu") |
| 881 | + dense = keras.layers.Dense(64, activation="relu") |
875 | 882 |
|
876 |
| - model = tf.keras.models.Model( |
| 883 | + model = keras.models.Model( |
877 | 884 | inputs=inputs, outputs=dense(sub_model(inputs))
|
878 | 885 | )
|
879 | 886 |
|
@@ -962,15 +969,15 @@ def DISABLED_test_keras_model_to_graph_def_sequential_functional_model(
|
962 | 969 | }
|
963 | 970 | }
|
964 | 971 | """
|
965 |
| - inputs = tf.keras.layers.Input(shape=(784,), name="func_seq_input") |
966 |
| - dense = tf.keras.layers.Dense(64, activation="relu") |
| 972 | + inputs = keras.layers.Input(shape=(784,), name="func_seq_input") |
| 973 | + dense = keras.layers.Dense(64, activation="relu") |
967 | 974 |
|
968 |
| - sub_model = tf.keras.models.Model(inputs=inputs, outputs=dense(inputs)) |
969 |
| - model = tf.keras.models.Sequential( |
| 975 | + sub_model = keras.models.Model(inputs=inputs, outputs=dense(inputs)) |
| 976 | + model = keras.models.Sequential( |
970 | 977 | [
|
971 | 978 | sub_model,
|
972 |
| - tf.keras.layers.Dense(32, input_shape=(784,)), |
973 |
| - tf.keras.layers.Activation("relu", name="my_relu"), |
| 979 | + keras.layers.Dense(32, input_shape=(784,)), |
| 980 | + keras.layers.Activation("relu", name="my_relu"), |
974 | 981 | ]
|
975 | 982 | )
|
976 | 983 |
|
@@ -1029,16 +1036,16 @@ def test_keras_model_to_graph_def_functional_multiple_inbound_nodes_from_same_no
|
1029 | 1036 | }
|
1030 | 1037 | }
|
1031 | 1038 | """
|
1032 |
| - inputs = tf.keras.Input(shape=(2,)) |
| 1039 | + inputs = keras.Input(shape=(2,)) |
1033 | 1040 | doubling_layer = _DoublingLayer()
|
1034 |
| - reducing_layer = tf.keras.layers.Add() |
| 1041 | + reducing_layer = keras.layers.Add() |
1035 | 1042 | outputs = reducing_layer(doubling_layer(inputs))
|
1036 |
| - model = tf.keras.Model(inputs=[inputs], outputs=outputs) |
| 1043 | + model = keras.Model(inputs=[inputs], outputs=outputs) |
1037 | 1044 |
|
1038 | 1045 | self.assertGraphDefToModel(expected_proto, model)
|
1039 | 1046 |
|
1040 | 1047 |
|
1041 |
| -class _DoublingLayer(tf.keras.layers.Layer): |
| 1048 | +class _DoublingLayer(keras.layers.Layer): |
1042 | 1049 | def call(self, inputs):
|
1043 | 1050 | return inputs, inputs
|
1044 | 1051 |
|
|
0 commit comments