【714】LinkNet实现
代码:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 | import os import numpy as np from keras.layers import Input , concatenate, Conv2D, MaxPooling2D, Activation, UpSampling2D, BatchNormalization, add from keras.layers.core import Flatten, Reshape from keras.models import Model from keras.regularizers import l2 import keras.backend as K def _shortcut( input , residual): """Adds a shortcut between input and residual block and merges them with "sum" """ # Expand channels of shortcut to match residual. # Stride appropriately to match residual (width, height) # Should be int if network architecture is correctly configured. input_shape = K.int_shape( input ) residual_shape = K.int_shape(residual) stride_width = int ( round (input_shape[ 1 ] / residual_shape[ 1 ])) stride_height = int ( round (input_shape[ 2 ] / residual_shape[ 2 ])) equal_channels = input_shape[ 3 ] = = residual_shape[ 3 ] shortcut = input # 1 X 1 conv if shape is different. Else identity. if stride_width > 1 or stride_height > 1 or not equal_channels: shortcut = Conv2D(filters = residual_shape[ 3 ], kernel_size = ( 1 , 1 ), strides = (stride_width, stride_height), padding = "valid" , kernel_initializer = "he_normal" , kernel_regularizer = l2( 0.0001 ))( input ) return add([shortcut, residual]) def encoder_block(input_tensor, m, n): x = BatchNormalization()(input_tensor) x = Activation( 'relu' )(x) x = Conv2D(filters = n, kernel_size = ( 3 , 3 ), strides = ( 2 , 2 ), padding = "same" )(x) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = n, kernel_size = ( 3 , 3 ), padding = "same" )(x) added_1 = _shortcut(input_tensor, x) x = BatchNormalization()(added_1) x = Activation( 'relu' )(x) x = Conv2D(filters = n, kernel_size = ( 3 , 3 ), padding = "same" )(x) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = n, kernel_size = ( 3 , 3 ), padding = "same" )(x) added_2 = _shortcut(added_1, x) return added_2 def decoder_block(input_tensor, m, n): x = BatchNormalization()(input_tensor) x = Activation( 'relu' )(x) x = Conv2D(filters = int (m / 4 ), kernel_size = ( 1 , 1 ))(x) x = UpSampling2D(( 2 , 2 ))(x) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = int (m / 4 ), kernel_size = ( 3 , 3 ), padding = 'same' )(x) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = n, kernel_size = ( 1 , 1 ))(x) return x def LinkNet(input_shape = ( 256 , 256 , 3 ), classes = 1 ): inputs = Input (shape = input_shape) x = BatchNormalization()(inputs) x = Activation( 'relu' )(x) x = Conv2D(filters = 64 , kernel_size = ( 7 , 7 ), strides = ( 2 , 2 ))(x) x = MaxPooling2D(( 3 , 3 ), strides = ( 2 , 2 ), padding = "same" )(x) encoder_1 = encoder_block(input_tensor = x, m = 64 , n = 64 ) encoder_2 = encoder_block(input_tensor = encoder_1, m = 64 , n = 128 ) encoder_3 = encoder_block(input_tensor = encoder_2, m = 128 , n = 256 ) encoder_4 = encoder_block(input_tensor = encoder_3, m = 256 , n = 512 ) decoder_4 = decoder_block(input_tensor = encoder_4, m = 512 , n = 256 ) decoder_3_in = add([decoder_4, encoder_3]) decoder_3_in = Activation( 'relu' )(decoder_3_in) decoder_3 = decoder_block(input_tensor = decoder_3_in, m = 256 , n = 128 ) decoder_2_in = add([decoder_3, encoder_2]) decoder_2_in = Activation( 'relu' )(decoder_2_in) decoder_2 = decoder_block(input_tensor = decoder_2_in, m = 128 , n = 64 ) decoder_1_in = add([decoder_2, encoder_1]) decoder_1_in = Activation( 'relu' )(decoder_1_in) decoder_1 = decoder_block(input_tensor = decoder_1_in, m = 64 , n = 64 ) x = UpSampling2D(( 2 , 2 ))(decoder_1) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = 32 , kernel_size = ( 3 , 3 ), padding = "same" )(x) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = 32 , kernel_size = ( 3 , 3 ), padding = "same" )(x) x = UpSampling2D(( 2 , 2 ))(x) x = BatchNormalization()(x) x = Activation( 'relu' )(x) x = Conv2D(filters = classes, kernel_size = ( 2 , 2 ), activation = 'sigmoid' , padding = "same" )(x) model = Model(inputs = inputs, outputs = x) return model model = LinkNet() model.summary() |
输出:
Model: "model_2" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_3 (InputLayer) [(None, 256, 256, 3) 0 __________________________________________________________________________________________________ batch_normalization_64 (BatchNo (None, 256, 256, 3) 12 input_3[0][0] __________________________________________________________________________________________________ activation_70 (Activation) (None, 256, 256, 3) 0 batch_normalization_64[0][0] __________________________________________________________________________________________________ conv2d_72 (Conv2D) (None, 125, 125, 64) 9472 activation_70[0][0] __________________________________________________________________________________________________ max_pooling2d_2 (MaxPooling2D) (None, 63, 63, 64) 0 conv2d_72[0][0] __________________________________________________________________________________________________ batch_normalization_65 (BatchNo (None, 63, 63, 64) 256 max_pooling2d_2[0][0] __________________________________________________________________________________________________ activation_71 (Activation) (None, 63, 63, 64) 0 batch_normalization_65[0][0] __________________________________________________________________________________________________ conv2d_73 (Conv2D) (None, 32, 32, 64) 36928 activation_71[0][0] __________________________________________________________________________________________________ batch_normalization_66 (BatchNo (None, 32, 32, 64) 256 conv2d_73[0][0] __________________________________________________________________________________________________ activation_72 (Activation) (None, 32, 32, 64) 0 batch_normalization_66[0][0] __________________________________________________________________________________________________ conv2d_75 (Conv2D) (None, 32, 32, 64) 4160 max_pooling2d_2[0][0] __________________________________________________________________________________________________ conv2d_74 (Conv2D) (None, 32, 32, 64) 36928 activation_72[0][0] __________________________________________________________________________________________________ add_22 (Add) (None, 32, 32, 64) 0 conv2d_75[0][0] conv2d_74[0][0] __________________________________________________________________________________________________ batch_normalization_67 (BatchNo (None, 32, 32, 64) 256 add_22[0][0] __________________________________________________________________________________________________ activation_73 (Activation) (None, 32, 32, 64) 0 batch_normalization_67[0][0] __________________________________________________________________________________________________ conv2d_76 (Conv2D) (None, 32, 32, 64) 36928 activation_73[0][0] __________________________________________________________________________________________________ batch_normalization_68 (BatchNo (None, 32, 32, 64) 256 conv2d_76[0][0] __________________________________________________________________________________________________ activation_74 (Activation) (None, 32, 32, 64) 0 batch_normalization_68[0][0] __________________________________________________________________________________________________ conv2d_77 (Conv2D) (None, 32, 32, 64) 36928 activation_74[0][0] __________________________________________________________________________________________________ add_23 (Add) (None, 32, 32, 64) 0 add_22[0][0] conv2d_77[0][0] __________________________________________________________________________________________________ batch_normalization_69 (BatchNo (None, 32, 32, 64) 256 add_23[0][0] __________________________________________________________________________________________________ activation_75 (Activation) (None, 32, 32, 64) 0 batch_normalization_69[0][0] __________________________________________________________________________________________________ conv2d_78 (Conv2D) (None, 16, 16, 128) 73856 activation_75[0][0] __________________________________________________________________________________________________ batch_normalization_70 (BatchNo (None, 16, 16, 128) 512 conv2d_78[0][0] __________________________________________________________________________________________________ activation_76 (Activation) (None, 16, 16, 128) 0 batch_normalization_70[0][0] __________________________________________________________________________________________________ conv2d_80 (Conv2D) (None, 16, 16, 128) 8320 add_23[0][0] __________________________________________________________________________________________________ conv2d_79 (Conv2D) (None, 16, 16, 128) 147584 activation_76[0][0] __________________________________________________________________________________________________ add_24 (Add) (None, 16, 16, 128) 0 conv2d_80[0][0] conv2d_79[0][0] __________________________________________________________________________________________________ batch_normalization_71 (BatchNo (None, 16, 16, 128) 512 add_24[0][0] __________________________________________________________________________________________________ activation_77 (Activation) (None, 16, 16, 128) 0 batch_normalization_71[0][0] __________________________________________________________________________________________________ conv2d_81 (Conv2D) (None, 16, 16, 128) 147584 activation_77[0][0] __________________________________________________________________________________________________ batch_normalization_72 (BatchNo (None, 16, 16, 128) 512 conv2d_81[0][0] __________________________________________________________________________________________________ activation_78 (Activation) (None, 16, 16, 128) 0 batch_normalization_72[0][0] __________________________________________________________________________________________________ conv2d_82 (Conv2D) (None, 16, 16, 128) 147584 activation_78[0][0] __________________________________________________________________________________________________ add_25 (Add) (None, 16, 16, 128) 0 add_24[0][0] conv2d_82[0][0] __________________________________________________________________________________________________ batch_normalization_73 (BatchNo (None, 16, 16, 128) 512 add_25[0][0] __________________________________________________________________________________________________ activation_79 (Activation) (None, 16, 16, 128) 0 batch_normalization_73[0][0] __________________________________________________________________________________________________ conv2d_83 (Conv2D) (None, 8, 8, 256) 295168 activation_79[0][0] __________________________________________________________________________________________________ batch_normalization_74 (BatchNo (None, 8, 8, 256) 1024 conv2d_83[0][0] __________________________________________________________________________________________________ activation_80 (Activation) (None, 8, 8, 256) 0 batch_normalization_74[0][0] __________________________________________________________________________________________________ conv2d_85 (Conv2D) (None, 8, 8, 256) 33024 add_25[0][0] __________________________________________________________________________________________________ conv2d_84 (Conv2D) (None, 8, 8, 256) 590080 activation_80[0][0] __________________________________________________________________________________________________ add_26 (Add) (None, 8, 8, 256) 0 conv2d_85[0][0] conv2d_84[0][0] __________________________________________________________________________________________________ batch_normalization_75 (BatchNo (None, 8, 8, 256) 1024 add_26[0][0] __________________________________________________________________________________________________ activation_81 (Activation) (None, 8, 8, 256) 0 batch_normalization_75[0][0] __________________________________________________________________________________________________ conv2d_86 (Conv2D) (None, 8, 8, 256) 590080 activation_81[0][0] __________________________________________________________________________________________________ batch_normalization_76 (BatchNo (None, 8, 8, 256) 1024 conv2d_86[0][0] __________________________________________________________________________________________________ activation_82 (Activation) (None, 8, 8, 256) 0 batch_normalization_76[0][0] __________________________________________________________________________________________________ conv2d_87 (Conv2D) (None, 8, 8, 256) 590080 activation_82[0][0] __________________________________________________________________________________________________ add_27 (Add) (None, 8, 8, 256) 0 add_26[0][0] conv2d_87[0][0] __________________________________________________________________________________________________ batch_normalization_77 (BatchNo (None, 8, 8, 256) 1024 add_27[0][0] __________________________________________________________________________________________________ activation_83 (Activation) (None, 8, 8, 256) 0 batch_normalization_77[0][0] __________________________________________________________________________________________________ conv2d_88 (Conv2D) (None, 4, 4, 512) 1180160 activation_83[0][0] __________________________________________________________________________________________________ batch_normalization_78 (BatchNo (None, 4, 4, 512) 2048 conv2d_88[0][0] __________________________________________________________________________________________________ activation_84 (Activation) (None, 4, 4, 512) 0 batch_normalization_78[0][0] __________________________________________________________________________________________________ conv2d_90 (Conv2D) (None, 4, 4, 512) 131584 add_27[0][0] __________________________________________________________________________________________________ conv2d_89 (Conv2D) (None, 4, 4, 512) 2359808 activation_84[0][0] __________________________________________________________________________________________________ add_28 (Add) (None, 4, 4, 512) 0 conv2d_90[0][0] conv2d_89[0][0] __________________________________________________________________________________________________ batch_normalization_79 (BatchNo (None, 4, 4, 512) 2048 add_28[0][0] __________________________________________________________________________________________________ activation_85 (Activation) (None, 4, 4, 512) 0 batch_normalization_79[0][0] __________________________________________________________________________________________________ conv2d_91 (Conv2D) (None, 4, 4, 512) 2359808 activation_85[0][0] __________________________________________________________________________________________________ batch_normalization_80 (BatchNo (None, 4, 4, 512) 2048 conv2d_91[0][0] __________________________________________________________________________________________________ activation_86 (Activation) (None, 4, 4, 512) 0 batch_normalization_80[0][0] __________________________________________________________________________________________________ conv2d_92 (Conv2D) (None, 4, 4, 512) 2359808 activation_86[0][0] __________________________________________________________________________________________________ add_29 (Add) (None, 4, 4, 512) 0 add_28[0][0] conv2d_92[0][0] __________________________________________________________________________________________________ batch_normalization_81 (BatchNo (None, 4, 4, 512) 2048 add_29[0][0] __________________________________________________________________________________________________ activation_87 (Activation) (None, 4, 4, 512) 0 batch_normalization_81[0][0] __________________________________________________________________________________________________ conv2d_93 (Conv2D) (None, 4, 4, 128) 65664 activation_87[0][0] __________________________________________________________________________________________________ up_sampling2d_12 (UpSampling2D) (None, 8, 8, 128) 0 conv2d_93[0][0] __________________________________________________________________________________________________ batch_normalization_82 (BatchNo (None, 8, 8, 128) 512 up_sampling2d_12[0][0] __________________________________________________________________________________________________ activation_88 (Activation) (None, 8, 8, 128) 0 batch_normalization_82[0][0] __________________________________________________________________________________________________ conv2d_94 (Conv2D) (None, 8, 8, 128) 147584 activation_88[0][0] __________________________________________________________________________________________________ batch_normalization_83 (BatchNo (None, 8, 8, 128) 512 conv2d_94[0][0] __________________________________________________________________________________________________ activation_89 (Activation) (None, 8, 8, 128) 0 batch_normalization_83[0][0] __________________________________________________________________________________________________ conv2d_95 (Conv2D) (None, 8, 8, 256) 33024 activation_89[0][0] __________________________________________________________________________________________________ add_30 (Add) (None, 8, 8, 256) 0 conv2d_95[0][0] add_27[0][0] __________________________________________________________________________________________________ activation_90 (Activation) (None, 8, 8, 256) 0 add_30[0][0] __________________________________________________________________________________________________ batch_normalization_84 (BatchNo (None, 8, 8, 256) 1024 activation_90[0][0] __________________________________________________________________________________________________ activation_91 (Activation) (None, 8, 8, 256) 0 batch_normalization_84[0][0] __________________________________________________________________________________________________ conv2d_96 (Conv2D) (None, 8, 8, 64) 16448 activation_91[0][0] __________________________________________________________________________________________________ up_sampling2d_13 (UpSampling2D) (None, 16, 16, 64) 0 conv2d_96[0][0] __________________________________________________________________________________________________ batch_normalization_85 (BatchNo (None, 16, 16, 64) 256 up_sampling2d_13[0][0] __________________________________________________________________________________________________ activation_92 (Activation) (None, 16, 16, 64) 0 batch_normalization_85[0][0] __________________________________________________________________________________________________ conv2d_97 (Conv2D) (None, 16, 16, 64) 36928 activation_92[0][0] __________________________________________________________________________________________________ batch_normalization_86 (BatchNo (None, 16, 16, 64) 256 conv2d_97[0][0] __________________________________________________________________________________________________ activation_93 (Activation) (None, 16, 16, 64) 0 batch_normalization_86[0][0] __________________________________________________________________________________________________ conv2d_98 (Conv2D) (None, 16, 16, 128) 8320 activation_93[0][0] __________________________________________________________________________________________________ add_31 (Add) (None, 16, 16, 128) 0 conv2d_98[0][0] add_25[0][0] __________________________________________________________________________________________________ activation_94 (Activation) (None, 16, 16, 128) 0 add_31[0][0] __________________________________________________________________________________________________ batch_normalization_87 (BatchNo (None, 16, 16, 128) 512 activation_94[0][0] __________________________________________________________________________________________________ activation_95 (Activation) (None, 16, 16, 128) 0 batch_normalization_87[0][0] __________________________________________________________________________________________________ conv2d_99 (Conv2D) (None, 16, 16, 32) 4128 activation_95[0][0] __________________________________________________________________________________________________ up_sampling2d_14 (UpSampling2D) (None, 32, 32, 32) 0 conv2d_99[0][0] __________________________________________________________________________________________________ batch_normalization_88 (BatchNo (None, 32, 32, 32) 128 up_sampling2d_14[0][0] __________________________________________________________________________________________________ activation_96 (Activation) (None, 32, 32, 32) 0 batch_normalization_88[0][0] __________________________________________________________________________________________________ conv2d_100 (Conv2D) (None, 32, 32, 32) 9248 activation_96[0][0] __________________________________________________________________________________________________ batch_normalization_89 (BatchNo (None, 32, 32, 32) 128 conv2d_100[0][0] __________________________________________________________________________________________________ activation_97 (Activation) (None, 32, 32, 32) 0 batch_normalization_89[0][0] __________________________________________________________________________________________________ conv2d_101 (Conv2D) (None, 32, 32, 64) 2112 activation_97[0][0] __________________________________________________________________________________________________ add_32 (Add) (None, 32, 32, 64) 0 conv2d_101[0][0] add_23[0][0] __________________________________________________________________________________________________ activation_98 (Activation) (None, 32, 32, 64) 0 add_32[0][0] __________________________________________________________________________________________________ batch_normalization_90 (BatchNo (None, 32, 32, 64) 256 activation_98[0][0] __________________________________________________________________________________________________ activation_99 (Activation) (None, 32, 32, 64) 0 batch_normalization_90[0][0] __________________________________________________________________________________________________ conv2d_102 (Conv2D) (None, 32, 32, 16) 1040 activation_99[0][0] __________________________________________________________________________________________________ up_sampling2d_15 (UpSampling2D) (None, 64, 64, 16) 0 conv2d_102[0][0] __________________________________________________________________________________________________ batch_normalization_91 (BatchNo (None, 64, 64, 16) 64 up_sampling2d_15[0][0] __________________________________________________________________________________________________ activation_100 (Activation) (None, 64, 64, 16) 0 batch_normalization_91[0][0] __________________________________________________________________________________________________ conv2d_103 (Conv2D) (None, 64, 64, 16) 2320 activation_100[0][0] __________________________________________________________________________________________________ batch_normalization_92 (BatchNo (None, 64, 64, 16) 64 conv2d_103[0][0] __________________________________________________________________________________________________ activation_101 (Activation) (None, 64, 64, 16) 0 batch_normalization_92[0][0] __________________________________________________________________________________________________ conv2d_104 (Conv2D) (None, 64, 64, 64) 1088 activation_101[0][0] __________________________________________________________________________________________________ up_sampling2d_16 (UpSampling2D) (None, 128, 128, 64) 0 conv2d_104[0][0] __________________________________________________________________________________________________ batch_normalization_93 (BatchNo (None, 128, 128, 64) 256 up_sampling2d_16[0][0] __________________________________________________________________________________________________ activation_102 (Activation) (None, 128, 128, 64) 0 batch_normalization_93[0][0] __________________________________________________________________________________________________ conv2d_105 (Conv2D) (None, 128, 128, 32) 18464 activation_102[0][0] __________________________________________________________________________________________________ batch_normalization_94 (BatchNo (None, 128, 128, 32) 128 conv2d_105[0][0] __________________________________________________________________________________________________ activation_103 (Activation) (None, 128, 128, 32) 0 batch_normalization_94[0][0] __________________________________________________________________________________________________ conv2d_106 (Conv2D) (None, 128, 128, 32) 9248 activation_103[0][0] __________________________________________________________________________________________________ up_sampling2d_17 (UpSampling2D) (None, 256, 256, 32) 0 conv2d_106[0][0] __________________________________________________________________________________________________ batch_normalization_95 (BatchNo (None, 256, 256, 32) 128 up_sampling2d_17[0][0] __________________________________________________________________________________________________ activation_104 (Activation) (None, 256, 256, 32) 0 batch_normalization_95[0][0] __________________________________________________________________________________________________ conv2d_107 (Conv2D) (None, 256, 256, 1) 129 activation_104[0][0] ================================================================================================== Total params: 11,551,469 Trainable params: 11,541,543 Non-trainable params: 9,926 __________________________________________________________________________________________________
结构图:
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· DeepSeek 开源周回顾「GitHub 热点速览」
· 记一次.NET内存居高不下排查解决与启示
· 物流快递公司核心技术能力-地址解析分单基础技术分享
· .NET 10首个预览版发布:重大改进与新特性概览!
· .NET10 - 预览版1新功能体验(一)
2021-05-31 【569】LaTex, arrows over letters
2021-05-31 【568】review
2021-05-31 【567】Convert LaTex to Word Using Pandoc
2012-05-31 【043】光盘库编码