Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,13 @@ def create_legacy_directory(package_dir):
for fname in fnames:
if fname.endswith(".py"):
legacy_fpath = os.path.join(root, fname)
tf_keras_root = root.replace("/_legacy", "/_tf_keras/keras")
tf_keras_root = root.replace(
os.path.join(os.path.sep, "_legacy"),
os.path.join(os.path.sep, "_tf_keras", "keras"),
)
core_api_fpath = os.path.join(
root.replace("/_legacy", ""), fname
root.replace(os.path.join(os.path.sep, "_legacy"), ""),
fname,
)
if not os.path.exists(tf_keras_root):
os.makedirs(tf_keras_root)
Expand Down Expand Up @@ -125,7 +129,7 @@ def create_legacy_directory(package_dir):
r"\n",
core_api_contents,
)
legacy_contents = core_api_contents + "\n" + legacy_contents
legacy_contents = f"{core_api_contents}\n{legacy_contents}"
with open(tf_keras_fpath, "w") as f:
f.write(legacy_contents)

Expand Down
5 changes: 3 additions & 2 deletions guides/distributed_training_with_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,8 @@ def make_or_restore_model():
# Either restore the latest model, or create a fresh one
# if there is no checkpoint available.
checkpoints = [
checkpoint_dir + "/" + name for name in os.listdir(checkpoint_dir)
os.path.join(checkpoint_dir, name)
for name in os.listdir(checkpoint_dir)
]
if checkpoints:
latest_checkpoint = max(checkpoints, key=os.path.getctime)
Expand All @@ -216,7 +217,7 @@ def run_training(epochs=1):
# This callback saves a SavedModel every epoch
# We include the current epoch in the folder name.
keras.callbacks.ModelCheckpoint(
filepath=checkpoint_dir + "/ckpt-{epoch}.keras",
filepath=os.path.join(checkpoint_dir, "ckpt-{epoch}.keras"),
save_freq="epoch",
)
]
Expand Down
6 changes: 4 additions & 2 deletions guides/training_with_built_in_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -1133,7 +1133,8 @@ def make_or_restore_model():
# Either restore the latest model, or create a fresh one
# if there is no checkpoint available.
checkpoints = [
checkpoint_dir + "/" + name for name in os.listdir(checkpoint_dir)
os.path.join(checkpoint_dir, name)
for name in os.listdir(checkpoint_dir)
]
if checkpoints:
latest_checkpoint = max(checkpoints, key=os.path.getctime)
Expand All @@ -1148,7 +1149,8 @@ def make_or_restore_model():
# This callback saves the model every 100 batches.
# We include the training loss in the saved model name.
keras.callbacks.ModelCheckpoint(
filepath=checkpoint_dir + "/model-loss={loss:.2f}.keras", save_freq=100
filepath=os.path.join(checkpoint_dir, "model-loss={loss:.2f}.keras"),
save_freq=100,
)
]
model.fit(x_train, y_train, epochs=1, callbacks=callbacks)
Expand Down
35 changes: 24 additions & 11 deletions integration_tests/import_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,19 @@ def create_virtualenv():
# Create virtual environment
"python3 -m venv test_env",
]
os.environ["PATH"] = (
"/test_env/bin/" + os.pathsep + os.environ.get("PATH", "")
os.environ["PATH"] = os.pathsep.join(
(
os.path.join(os.getcwd(), "test_env", "bin"),
os.environ.get("PATH", ""),
)
)
if os.name == "nt":
os.environ["PATH"] = os.pathsep.join(
(
os.path.join(os.getcwd(), "test_env", "Scripts"),
os.environ["PATH"],
)
)
run_commands_local(env_setup)


Expand All @@ -53,18 +63,17 @@ def manage_venv_installs(whl_path):
backend_pkg, backend_extra_url = BACKEND_REQ[backend.backend()]
install_setup = [
# Installs the backend's package and common requirements
"pip install " + backend_extra_url + backend_pkg,
f"pip install {backend_extra_url}{backend_pkg}",
"pip install -r requirements-common.txt",
"pip install pytest",
# Ensure other backends are uninstalled
"pip uninstall -y "
+ BACKEND_REQ[other_backends[0]][0]
+ " "
+ BACKEND_REQ[other_backends[1]][0]
+ " "
+ BACKEND_REQ[other_backends[2]][0],
"pip uninstall -y {0} {1} {2}".format(
BACKEND_REQ[other_backends[0]][0],
BACKEND_REQ[other_backends[1]][0],
BACKEND_REQ[other_backends[2]][0],
),
# Install `.whl` package
"pip install " + whl_path,
f"pip install {whl_path}",
]
# Install flax for JAX when NNX is enabled
if backend.backend() == "jax" and config.is_nnx_enabled():
Expand Down Expand Up @@ -102,7 +111,11 @@ def run_commands_venv(commands):
for command in commands:
print(f"Running command: {command}")
cmd_with_args = command.split(" ")
cmd_with_args[0] = "test_env/bin/" + cmd_with_args[0]
cmd_with_args[0] = os.path.join(
"test_env",
"Scripts" if os.name == "nt" else "bin",
cmd_with_args[0],
)
p = subprocess.Popen(cmd_with_args)
assert p.wait() == 0

Expand Down
6 changes: 3 additions & 3 deletions integration_tests/model_visualization_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def get_node_dict(graph, path=""):

for subgraph in graph.get_subgraphs():
sub_nodes = get_node_dict(
subgraph, path=path + subgraph.get_label() + " > "
subgraph, path=f"{path}{subgraph.get_label()} > "
)
nodes.update(sub_nodes)

Expand Down Expand Up @@ -85,7 +85,7 @@ def get_edges(graph):
class ModelVisualizationTest(testing.TestCase):
def multi_plot_model(self, model, name, expand_nested=False):
if expand_nested:
name = name + "-expand_nested"
name = f"{name}-expand_nested"

TEST_CASES = [
{},
Expand Down Expand Up @@ -130,7 +130,7 @@ def multi_plot_model(self, model, name, expand_nested=False):

for test_case in TEST_CASES:
tags = [v if k == "rankdir" else k for k, v in test_case.items()]
file_name = "-".join([name] + tags) + ".png"
file_name = f"{'-'.join([name] + tags)}.png"
plot_model(
model, file_name, expand_nested=expand_nested, **test_case
)
Expand Down
40 changes: 20 additions & 20 deletions keras/src/applications/convnext.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def ConvNeXtBlock(
A function representing a ConvNeXtBlock block.
"""
if name is None:
name = "prestem" + str(backend.get_uid("prestem"))
name = f"prestem{str(backend.get_uid('prestem'))}"

def apply(inputs):
x = inputs
Expand All @@ -254,25 +254,25 @@ def apply(inputs):
kernel_size=7,
padding="same",
groups=projection_dim,
name=name + "_depthwise_conv",
name=f"{name}_depthwise_conv",
)(x)
x = layers.LayerNormalization(epsilon=1e-6, name=name + "_layernorm")(x)
x = layers.Dense(4 * projection_dim, name=name + "_pointwise_conv_1")(x)
x = layers.Activation("gelu", name=name + "_gelu")(x)
x = layers.Dense(projection_dim, name=name + "_pointwise_conv_2")(x)
x = layers.LayerNormalization(epsilon=1e-6, name=f"{name}_layernorm")(x)
x = layers.Dense(4 * projection_dim, name=f"{name}_pointwise_conv_1")(x)
x = layers.Activation("gelu", name=f"{name}_gelu")(x)
x = layers.Dense(projection_dim, name=f"{name}_pointwise_conv_2")(x)

if layer_scale_init_value is not None:
x = LayerScale(
layer_scale_init_value,
projection_dim,
name=name + "_layer_scale",
name=f"{name}_layer_scale",
)(x)
if drop_path_rate:
layer = StochasticDepth(
drop_path_rate, name=name + "_stochastic_depth"
drop_path_rate, name=f"{name}_stochastic_depth"
)
else:
layer = layers.Activation("linear", name=name + "_identity")
layer = layers.Activation("linear", name=f"{name}_identity")

return inputs + layer(x)

Expand All @@ -282,7 +282,7 @@ def apply(inputs):
def PreStem(name=None):
"""Normalizes inputs with ImageNet-1k mean and std."""
if name is None:
name = "prestem" + str(backend.get_uid("prestem"))
name = "prestem{0}".format(str(backend.get_uid("prestem")))

def apply(x):
x = layers.Normalization(
Expand All @@ -292,7 +292,7 @@ def apply(x):
(0.224 * 255) ** 2,
(0.225 * 255) ** 2,
],
name=name + "_prestem_normalization",
name=f"{name}_prestem_normalization",
)(x)
return x

Expand All @@ -314,14 +314,14 @@ def Head(num_classes=1000, classifier_activation=None, name=None):
name = str(backend.get_uid("head"))

def apply(x):
x = layers.GlobalAveragePooling2D(name=name + "_head_gap")(x)
x = layers.GlobalAveragePooling2D(name=f"{name}_head_gap")(x)
x = layers.LayerNormalization(
epsilon=1e-6, name=name + "_head_layernorm"
epsilon=1e-6, name=f"{name}_head_layernorm"
)(x)
x = layers.Dense(
num_classes,
activation=classifier_activation,
name=name + "_head_dense",
name=f"{name}_head_dense",
)(x)
return x

Expand Down Expand Up @@ -452,13 +452,13 @@ def ConvNeXt(
projection_dims[0],
kernel_size=4,
strides=4,
name=name + "_stem_conv",
name=f"{name}_stem_conv",
),
layers.LayerNormalization(
epsilon=1e-6, name=name + "_stem_layernorm"
epsilon=1e-6, name=f"{name}_stem_layernorm"
),
],
name=name + "_stem",
name=f"{name}_stem",
)

# Downsampling blocks.
Expand All @@ -471,16 +471,16 @@ def ConvNeXt(
[
layers.LayerNormalization(
epsilon=1e-6,
name=name + "_downsampling_layernorm_" + str(i),
name=f"{name}_downsampling_layernorm_{i}",
),
layers.Conv2D(
projection_dims[i + 1],
kernel_size=2,
strides=2,
name=name + "_downsampling_conv_" + str(i),
name=f"{name}_downsampling_conv_{i}",
),
],
name=name + "_downsampling_block_" + str(i),
name=f"{name}_downsampling_block_{i}",
)
downsample_layers.append(downsample_layer)

Expand Down
42 changes: 21 additions & 21 deletions keras/src/applications/densenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,25 +10,25 @@
"https://storage.googleapis.com/tensorflow/keras-applications/densenet/"
)
DENSENET121_WEIGHT_PATH = (
BASE_WEIGHTS_PATH + "densenet121_weights_tf_dim_ordering_tf_kernels.h5"
f"{BASE_WEIGHTS_PATH}densenet121_weights_tf_dim_ordering_tf_kernels.h5"
)
DENSENET121_WEIGHT_PATH_NO_TOP = (
BASE_WEIGHTS_PATH
+ "densenet121_weights_tf_dim_ordering_tf_kernels_notop.h5"
f"{BASE_WEIGHTS_PATH}"
"densenet121_weights_tf_dim_ordering_tf_kernels_notop.h5"
)
DENSENET169_WEIGHT_PATH = (
BASE_WEIGHTS_PATH + "densenet169_weights_tf_dim_ordering_tf_kernels.h5"
f"{BASE_WEIGHTS_PATH}densenet169_weights_tf_dim_ordering_tf_kernels.h5"
)
DENSENET169_WEIGHT_PATH_NO_TOP = (
BASE_WEIGHTS_PATH
+ "densenet169_weights_tf_dim_ordering_tf_kernels_notop.h5"
f"{BASE_WEIGHTS_PATH}"
"densenet169_weights_tf_dim_ordering_tf_kernels_notop.h5"
)
DENSENET201_WEIGHT_PATH = (
BASE_WEIGHTS_PATH + "densenet201_weights_tf_dim_ordering_tf_kernels.h5"
f"{BASE_WEIGHTS_PATH}densenet201_weights_tf_dim_ordering_tf_kernels.h5"
)
DENSENET201_WEIGHT_PATH_NO_TOP = (
BASE_WEIGHTS_PATH
+ "densenet201_weights_tf_dim_ordering_tf_kernels_notop.h5"
f"{BASE_WEIGHTS_PATH}"
"densenet201_weights_tf_dim_ordering_tf_kernels_notop.h5"
)


Expand All @@ -44,7 +44,7 @@ def dense_block(x, blocks, name):
Output tensor for the block.
"""
for i in range(blocks):
x = conv_block(x, 32, name=name + "_block" + str(i + 1))
x = conv_block(x, 32, name=f"{name}_block{i + 1}")
return x


Expand All @@ -61,16 +61,16 @@ def transition_block(x, reduction, name):
"""
bn_axis = 3 if backend.image_data_format() == "channels_last" else 1
x = layers.BatchNormalization(
axis=bn_axis, epsilon=1.001e-5, name=name + "_bn"
axis=bn_axis, epsilon=1.001e-5, name=f"{name}_bn"
)(x)
x = layers.Activation("relu", name=name + "_relu")(x)
x = layers.Activation("relu", name=f"{name}_relu")(x)
x = layers.Conv2D(
int(x.shape[bn_axis] * reduction),
1,
use_bias=False,
name=name + "_conv",
name=f"{name}_conv",
)(x)
x = layers.AveragePooling2D(2, strides=2, name=name + "_pool")(x)
x = layers.AveragePooling2D(2, strides=2, name=f"{name}_pool")(x)
return x


Expand All @@ -87,20 +87,20 @@ def conv_block(x, growth_rate, name):
"""
bn_axis = 3 if backend.image_data_format() == "channels_last" else 1
x1 = layers.BatchNormalization(
axis=bn_axis, epsilon=1.001e-5, name=name + "_0_bn"
axis=bn_axis, epsilon=1.001e-5, name=f"{name}_0_bn"
)(x)
x1 = layers.Activation("relu", name=name + "_0_relu")(x1)
x1 = layers.Activation("relu", name=f"{name}_0_relu")(x1)
x1 = layers.Conv2D(
4 * growth_rate, 1, use_bias=False, name=name + "_1_conv"
4 * growth_rate, 1, use_bias=False, name=f"{name}_1_conv"
)(x1)
x1 = layers.BatchNormalization(
axis=bn_axis, epsilon=1.001e-5, name=name + "_1_bn"
axis=bn_axis, epsilon=1.001e-5, name=f"{name}_1_bn"
)(x1)
x1 = layers.Activation("relu", name=name + "_1_relu")(x1)
x1 = layers.Activation("relu", name=f"{name}_1_relu")(x1)
x1 = layers.Conv2D(
growth_rate, 3, padding="same", use_bias=False, name=name + "_2_conv"
growth_rate, 3, padding="same", use_bias=False, name=f"{name}_2_conv"
)(x1)
x = layers.Concatenate(axis=bn_axis, name=name + "_concat")([x, x1])
x = layers.Concatenate(axis=bn_axis, name=f"{name}_concat")([x, x1])
return x


Expand Down
Loading