Added multi-gpu capabilities

This commit is contained in:
mrt
2023-05-31 11:40:35 +02:00
parent 9230013fb2
commit feca6ce642
3 changed files with 77 additions and 62 deletions

View File

@@ -11,11 +11,14 @@ from data_generator import create_data_generators
HYPERPARAMETERS
"""
# Distribute training
strategy = tf.distribute.MirroredStrategy()
# Input
image_size = 224
# Hyper-parameters
batch_size = 128
batch_size = 128 * strategy.num_replicas_in_sync
num_epochs = 25
learning_rate = 0.0001
num_classes = 8631
@@ -32,15 +35,16 @@ train_gen, val_gen, test_gen = create_data_generators(target_size=image_size, ba
MODEL
"""
resnet_model = tf.keras.applications.ResNet50(
include_top=False,
weights="imagenet",
input_shape=(image_size, image_size, 3),
pooling=None,
)
Y = GlobalAvgPool2D()(resnet_model.output)
Y = Dense(units=num_classes, activation='softmax', kernel_initializer=GlorotUniform())(Y)
resnet_model = Model(inputs=resnet_model.input, outputs=Y, name='ResNet50')
with strategy.scope():
resnet_model = tf.keras.applications.ResNet50(
include_top=False,
weights="imagenet",
input_shape=(image_size, image_size, 3),
pooling=None,
)
Y = GlobalAvgPool2D()(resnet_model.output)
Y = Dense(units=num_classes, activation='softmax', kernel_initializer=GlorotUniform())(Y)
resnet_model = Model(inputs=resnet_model.input, outputs=Y, name='ResNet50')
resnet_model.summary()
@@ -48,17 +52,18 @@ resnet_model.summary()
MODEL COMPILE
"""
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
resnet_model.compile(
optimizer=optimizer,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=[
tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=5, name='top-5-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=10, name='top-10-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=100, name='top-100-accuracy'),
]
)
with strategy.scope():
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
resnet_model.compile(
optimizer=optimizer,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=[
tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=5, name='top-5-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=10, name='top-10-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=100, name='top-100-accuracy'),
]
)
"""

View File

@@ -11,11 +11,14 @@ from data_generator import create_data_generators
HYPERPARAMETERS
"""
# Distribute training
strategy = tf.distribute.MirroredStrategy()
# Input
image_size = 224
# Hyper-parameters
batch_size = 128
batch_size = 128 * strategy.num_replicas_in_sync
num_epochs = 25
learning_rate = 0.0001
num_classes = 8631
@@ -32,15 +35,16 @@ train_gen, val_gen, test_gen = create_data_generators(target_size=image_size, ba
MODEL
"""
vgg_model = tf.keras.applications.VGG16(
include_top=True,
weights="imagenet",
input_shape=(image_size, image_size, 3),
pooling=None,
)
Y = vgg_model.layers[-2].output
Y = Dense(units=num_classes, activation='softmax', kernel_initializer=GlorotUniform)(Y)
vgg_model = Model(inputs=vgg_model.input, outputs=Y, name='VGG16')
with strategy.scope():
vgg_model = tf.keras.applications.VGG16(
include_top=True,
weights="imagenet",
input_shape=(image_size, image_size, 3),
pooling=None,
)
Y = vgg_model.layers[-2].output
Y = Dense(units=num_classes, activation='softmax', kernel_initializer=GlorotUniform)(Y)
vgg_model = Model(inputs=vgg_model.input, outputs=Y, name='VGG16')
vgg_model.summary()
@@ -48,17 +52,18 @@ vgg_model.summary()
MODEL COMPILE
"""
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
vgg_model.compile(
optimizer=optimizer,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=[
tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=5, name='top-5-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=10, name='top-10-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=100, name='top-100-accuracy'),
]
)
with strategy.scope():
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
vgg_model.compile(
optimizer=optimizer,
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=[
tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=5, name='top-5-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=10, name='top-10-accuracy'),
tf.keras.metrics.SparseTopKCategoricalAccuracy(k=100, name='top-100-accuracy'),
]
)
"""

View File

@@ -10,11 +10,14 @@ from data_generator import create_data_generators
HYPERPARAMETERS
"""
# Distribute training
strategy = tf.distribute.MirroredStrategy()
# Input
image_size = 224
# Hyper-parameters
batch_size = 128
batch_size = 128 * strategy.num_replicas_in_sync
num_epochs = 25
learning_rate = 0.0001
num_classes = 8631
@@ -31,14 +34,15 @@ train_gen, val_gen, test_gen = create_data_generators(target_size=image_size, ba
MODEL
"""
base_model = vit.vit_b32(
image_size=image_size,
pretrained=True,
include_top=False,
pretrained_top=False,
)
y = tf.keras.layers.Dense(num_classes, activation='softmax')(base_model.output)
vit_model = tf.keras.models.Model(inputs=base_model.input, outputs=y)
with strategy.scope():
base_model = vit.vit_b32(
image_size=image_size,
pretrained=True,
include_top=False,
pretrained_top=False,
)
y = tf.keras.layers.Dense(num_classes, activation='softmax')(base_model.output)
vit_model = tf.keras.models.Model(inputs=base_model.input, outputs=y)
vit_model.summary()
@@ -46,17 +50,18 @@ vit_model.summary()
MODEL COMPILE
"""
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
vit_model.compile(
optimizer=optimizer,
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=[
keras.metrics.SparseCategoricalAccuracy(name="accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(k=5, name="top-5-accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(k=10, name="top-10-accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(k=100, name="top-100-accuracy"),
]
)
with strategy.scope():
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
vit_model.compile(
optimizer=optimizer,
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=[
keras.metrics.SparseCategoricalAccuracy(name="accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(k=5, name="top-5-accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(k=10, name="top-10-accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(k=100, name="top-100-accuracy"),
]
)
"""