@@ -43,12 +43,12 @@ def conv_block(
43
43
)(x )
44
44
if pool :
45
45
x = tf .keras .layers .MaxPool2D (pool_size = 3 , strides = 2 )(x )
46
- x = tf .keras .layers .BatchNormalization (scale = False , momentum = 0.9 )(x )
47
- return x
46
+ return tf .keras .layers .BatchNormalization (scale = False , momentum = 0.9 )(x )
48
47
49
48
def dense_block (self , x : tf .Tensor , units : int ) -> tf .Tensor :
50
- x = lq .layers .QuantDense (
49
+ x = lq .layers .QuantConv2D (
51
50
units ,
51
+ kernel_size = 1 ,
52
52
input_quantizer = self .input_quantizer ,
53
53
kernel_quantizer = self .kernel_quantizer ,
54
54
kernel_constraint = self .kernel_constraint ,
@@ -75,10 +75,11 @@ def build(self) -> tf.keras.models.Model:
75
75
76
76
# Classifier
77
77
if self .include_top :
78
- out = tf .keras .layers .Flatten ( )(out )
78
+ out = tf .keras .layers .Reshape (( 1 , 1 , - 1 ) )(out )
79
79
out = self .dense_block (out , units = 4096 )
80
80
out = self .dense_block (out , units = 4096 )
81
81
out = self .dense_block (out , self .num_classes )
82
+ out = tf .keras .layers .Flatten ()(out )
82
83
out = tf .keras .layers .Activation ("softmax" , dtype = "float32" )(out )
83
84
84
85
model = tf .keras .models .Model (
@@ -91,9 +92,9 @@ def build(self) -> tf.keras.models.Model:
91
92
if self .include_top :
92
93
weights_path = utils .download_pretrained_model (
93
94
model = "binary_alexnet" ,
94
- version = "v0.2 .0" ,
95
+ version = "v0.3 .0" ,
95
96
file = "binary_alexnet_weights.h5" ,
96
- file_hash = "0f8d3f6c1073ef993e2e99a38f8e661e5efe385085b2a84b43a7f2af8500a3d3 " ,
97
+ file_hash = "7fc065c47c5c1d92389e0bb988ce6df6a4fa09d803b866e2ba648069d6652d63 " ,
97
98
)
98
99
else :
99
100
weights_path = utils .download_pretrained_model (
@@ -122,7 +123,7 @@ def BinaryAlexNet(
122
123
Optionally loads weights pre-trained on ImageNet.
123
124
124
125
```netron
125
- binary_alexnet-v0.2 .0/binary_alexnet.json
126
+ binary_alexnet-v0.3 .0/binary_alexnet.json
126
127
```
127
128
```summary
128
129
literature.BinaryAlexNet
0 commit comments