Skip to content

avoid failure when docstrings have been stripped (python -OO) #21477

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions keras/src/applications/densenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,6 @@ def decode_predictions(preds, top=5):
A Keras model instance.
"""

setattr(DenseNet121, "__doc__", DenseNet121.__doc__ + DOC)
setattr(DenseNet169, "__doc__", DenseNet169.__doc__ + DOC)
setattr(DenseNet201, "__doc__", DenseNet201.__doc__ + DOC)
setattr(DenseNet121, "__doc__", (DenseNet121.__doc__ or "") + DOC)
setattr(DenseNet169, "__doc__", (DenseNet169.__doc__ or "") + DOC)
setattr(DenseNet201, "__doc__", (DenseNet201.__doc__ or "") + DOC)
7 changes: 4 additions & 3 deletions keras/src/applications/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,6 +586,7 @@ def decode_predictions(preds, top=5):
A Model instance.
"""

setattr(ResNet50, "__doc__", ResNet50.__doc__ + DOC)
setattr(ResNet101, "__doc__", ResNet101.__doc__ + DOC)
setattr(ResNet152, "__doc__", ResNet152.__doc__ + DOC)
if ResNet50.__doc__ is not None:
setattr(ResNet50, "__doc__", ResNet50.__doc__ + DOC)
setattr(ResNet101, "__doc__", ResNet101.__doc__ + DOC)
setattr(ResNet152, "__doc__", ResNet152.__doc__ + DOC)
Comment on lines +589 to +592
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The current check if ResNet50.__doc__ is not None: assumes that if ResNet50 has a docstring, then ResNet101 and ResNet152 will as well. While this might be true now, it's better to check each model's docstring individually for robustness and to avoid potential issues if new model variants are added without docstrings in the future.

Suggested change
if ResNet50.__doc__ is not None:
setattr(ResNet50, "__doc__", ResNet50.__doc__ + DOC)
setattr(ResNet101, "__doc__", ResNet101.__doc__ + DOC)
setattr(ResNet152, "__doc__", ResNet152.__doc__ + DOC)
models = [ResNet50, ResNet101, ResNet152]
for model in models:
if model.__doc__ is not None:
setattr(model, "__doc__", model.__doc__ + DOC)

Comment on lines +589 to +592
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Adding a conditional check ensures that the docstring replacement only occurs if the ResNet50.__doc__ attribute is not None. This prevents potential errors when docstrings are stripped.

Suggested change
if ResNet50.__doc__ is not None:
setattr(ResNet50, "__doc__", ResNet50.__doc__ + DOC)
setattr(ResNet101, "__doc__", ResNet101.__doc__ + DOC)
setattr(ResNet152, "__doc__", ResNet152.__doc__ + DOC)
if ResNet50.__doc__ is not None:
setattr(ResNet50, "__doc__", ResNet50.__doc__ + DOC)
setattr(ResNet101, "__doc__", ResNet101.__doc__ + DOC)
setattr(ResNet152, "__doc__", ResNet152.__doc__ + DOC)

7 changes: 4 additions & 3 deletions keras/src/applications/resnet_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ def decode_predictions(preds, top=5):
A Model instance.
"""

setattr(ResNet50V2, "__doc__", ResNet50V2.__doc__ + DOC)
setattr(ResNet101V2, "__doc__", ResNet101V2.__doc__ + DOC)
setattr(ResNet152V2, "__doc__", ResNet152V2.__doc__ + DOC)
if ResNet50V2.__doc__ is not None:
setattr(ResNet50V2, "__doc__", ResNet50V2.__doc__ + DOC)
setattr(ResNet101V2, "__doc__", ResNet101V2.__doc__ + DOC)
setattr(ResNet152V2, "__doc__", ResNet152V2.__doc__ + DOC)
Comment on lines +206 to +209
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Similar to resnet.py, the check if ResNet50V2.__doc__ is not None: assumes that if ResNet50V2 has a docstring, then ResNet101V2 and ResNet152V2 will too. To make the code more robust and easier to maintain, it's better to check each model's docstring individually.

Suggested change
if ResNet50V2.__doc__ is not None:
setattr(ResNet50V2, "__doc__", ResNet50V2.__doc__ + DOC)
setattr(ResNet101V2, "__doc__", ResNet101V2.__doc__ + DOC)
setattr(ResNet152V2, "__doc__", ResNet152V2.__doc__ + DOC)
models = [ResNet50V2, ResNet101V2, ResNet152V2]
for model in models:
if model.__doc__ is not None:
setattr(model, "__doc__", model.__doc__ + DOC)

Comment on lines +206 to +209
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Adding a conditional check ensures that the docstring replacement only occurs if the ResNet50V2.__doc__ attribute is not None. This prevents potential errors when docstrings are stripped.

Suggested change
if ResNet50V2.__doc__ is not None:
setattr(ResNet50V2, "__doc__", ResNet50V2.__doc__ + DOC)
setattr(ResNet101V2, "__doc__", ResNet101V2.__doc__ + DOC)
setattr(ResNet152V2, "__doc__", ResNet152V2.__doc__ + DOC)
if ResNet50V2.__doc__ is not None:
setattr(ResNet50V2, "__doc__", ResNet50V2.__doc__ + DOC)
setattr(ResNet101V2, "__doc__", ResNet101V2.__doc__ + DOC)
setattr(ResNet152V2, "__doc__", ResNet152V2.__doc__ + DOC)

7 changes: 4 additions & 3 deletions keras/src/optimizers/adadelta.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ def get_config(self):
return config


Adadelta.__doc__ = Adadelta.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Adadelta.__doc__ is not None:
Adadelta.__doc__ = Adadelta.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/adafactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ def get_config(self):
return config


Adafactor.__doc__ = Adafactor.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Adafactor.__doc__ is not None:
Adafactor.__doc__ = Adafactor.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/adagrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def get_config(self):
return config


Adagrad.__doc__ = Adagrad.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Adagrad.__doc__ is not None:
Adagrad.__doc__ = Adagrad.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ def get_config(self):
return config


Adam.__doc__ = Adam.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Adam.__doc__ is not None:
Adam.__doc__ = Adam.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/adamax.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ def get_config(self):
return config


Adamax.__doc__ = Adamax.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Adamax.__doc__ is not None:
Adamax.__doc__ = Adamax.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/adamw.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def __init__(
)


AdamW.__doc__ = AdamW.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if AdamW.__doc__ is not None:
AdamW.__doc__ = AdamW.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/ftrl.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@ def get_config(self):
return config


Ftrl.__doc__ = Ftrl.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Ftrl.__doc__ is not None:
Ftrl.__doc__ = Ftrl.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/lamb.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def get_config(self):
return config


Lamb.__doc__ = Lamb.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Lamb.__doc__ is not None:
Lamb.__doc__ = Lamb.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/lion.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ def get_config(self):
return config


Lion.__doc__ = Lion.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Lion.__doc__ is not None:
Lion.__doc__ = Lion.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/loss_scale_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,7 @@ def from_config(cls, config, custom_objects=None):
return cls(inner_optimizer, **config)


LossScaleOptimizer.__doc__ = LossScaleOptimizer.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if LossScaleOptimizer.__doc__ is not None:
LossScaleOptimizer.__doc__ = LossScaleOptimizer.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/nadam.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,7 @@ def get_config(self):
return config


Nadam.__doc__ = Nadam.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if Nadam.__doc__ is not None:
Nadam.__doc__ = Nadam.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/rmsprop.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ def get_config(self):
return config


RMSprop.__doc__ = RMSprop.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if RMSprop.__doc__ is not None:
RMSprop.__doc__ = RMSprop.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
7 changes: 4 additions & 3 deletions keras/src/optimizers/sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ def get_config(self):
return config


SGD.__doc__ = SGD.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
if SGD.__doc__ is not None:
SGD.__doc__ = SGD.__doc__.replace(
"{{base_optimizer_keyword_args}}", optimizer.base_optimizer_keyword_args
)
Loading