有没有Unet++和门结构的实例
时间: 2023-05-24 13:07:12 浏览: 59
以下是Unet和门结构的Python代码实例:
Unet模型:
```
import tensorflow as tf
from tensorflow.keras import Input, Model
from tensorflow.keras.layers import Conv2D, MaxPool2D, UpSampling2D, Concatenate
def down_block(x, filters, kernel_size=(3, 3), padding="same", strides=1):
c = Conv2D(filters, kernel_size, padding=padding, strides=strides, activation="relu")(x)
c = Conv2D(filters, kernel_size, padding=padding, strides=strides, activation="relu")(c)
p = MaxPool2D((2, 2), (2, 2))(c)
return c, p
def up_block(x, skip, filters, kernel_size=(3, 3), padding="same", strides=1):
us = UpSampling2D((2, 2))(x)
concat = Concatenate()([us, skip])
c = Conv2D(filters, kernel_size, padding=padding, strides=strides, activation="relu")(concat)
c = Conv2D(filters, kernel_size, padding=padding, strides=strides, activation="relu")(c)
return c
def UNet(image_size, channels):
input = Input(shape=(image_size, image_size, channels))
c1, p1 = down_block(input, 64)
c2, p2 = down_block(p1, 128)
c3, p3 = down_block(p2, 256)
c4, p4 = down_block(p3, 512)
x = Conv2D(1024, (3, 3), padding="same", activation="relu")(p4)
x = up_block(x, c4, 512)
x = up_block(x, c3, 256)
x = up_block(x, c2, 128)
x = up_block(x, c1, 64)
output = Conv2D(1, (1, 1), padding="same", activation="sigmoid")(x)
model = Model(input, output)
return model
```
门结构模型:
```
import tensorflow as tf
from tensorflow.keras import Input, Model
from tensorflow.keras.layers import Conv2D, Activation, Concatenate, multiply, add, BatchNormalization
def conv_block(x, filters, kernel_size=(3, 3), padding="same", strides=1):
x = Conv2D(filters, kernel_size, padding=padding, strides=strides)(x)
x = BatchNormalization()(x)
x = Activation("relu")(x)
return x
def attention_block(x, g, inter_channel):
theta_x = Conv2D(inter_channel, (1, 1), padding="same", use_bias=False)(x)
phi_g = Conv2D(inter_channel, (1, 1), padding="same", use_bias=False)(g)
f = Activation("relu")(add([theta_x, phi_g]))
psi_f = Conv2D(1, (1, 1), padding="same", use_bias=False)(f)
sigm_psi_f = Activation("sigmoid")(psi_f)
return multiply([x, sigm_psi_f])
def gated_attention_model(input_shape, classes):
input = Input(shape=input_shape)
conv1 = conv_block(input, 64)
pool1 = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = conv_block(pool1, 128)
pool2 = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_block(pool2, 256)
pool3 = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_block(pool3, 512)
pool4 = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(conv4)
atten = attention_block(conv4, conv3, 64)
up5 = Conv2D(256, 2, padding='same')(UpSampling2D(size = (2,2))(atten))
merge5 = Concatenate()([conv3, up5])
conv5 = conv_block(merge5, 256)
atten1 = attention_block(conv5, conv2, 32)
up6 = Conv2D(128, 2, padding='same')(UpSampling2D(size = (2,2))(atten1))
merge6 = Concatenate()([conv2, up6])
conv6 = conv_block(merge6, 128)
atten2 = attention_block(conv6, conv1, 16)
up7 = Conv2D(64, 2, padding='same')(UpSampling2D(size = (2,2))(atten2))
merge7 = Concatenate()([conv1, up7])
conv7 = conv_block(merge7, 64)
output = Conv2D(classes, (1,1), padding='same', activation='softmax')(conv7)
model = Model(input, output)
return model
```