Skip to content

Commit ae21699

Browse files
fix: replace 35 bare except clauses with except Exception (#7873)
## What Replace 35 bare `except:` clauses with `except Exception:`. ## Why Bare `except:` catches `BaseException`, including `KeyboardInterrupt` and `SystemExit`, which can prevent clean process shutdown and mask critical errors. Using `except Exception:` catches all application-level errors while allowing system-level exceptions to propagate correctly. Co-authored-by: haosenwang1018 <haosenwang1018@users.noreply.github.com>
1 parent efc0b49 commit ae21699

File tree

27 files changed

+35
-35
lines changed

27 files changed

+35
-35
lines changed

accelerator/cpu_accelerator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def is_fp16_supported(self):
232232
try:
233233
if torch.ops.mkldnn._is_mkldnn_fp16_supported():
234234
return True
235-
except:
235+
except Exception:
236236
return False
237237

238238
def supported_dtypes(self):

csrc/aio/py_test/parse_aio_stats.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def extract_value(key, file):
5050
return int(v[0]) * 1024 * 1024
5151
else:
5252
return int(key[2:])
53-
except:
53+
except Exception:
5454
print(f"{file}: extract_value fails on {key}")
5555
return None
5656

deepspeed/autotuning/autotuner.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def __init__(self, args, active_resources):
6969
try:
7070
os.makedirs(self.exps_dir, exist_ok=True)
7171
logger.info(f"Created autotuning experiments directory: {self.exps_dir}")
72-
except:
72+
except Exception:
7373
logger.error(
7474
f"Failed to create {self.exps_dir}, please check exps_dir in the autotuning config file is accessible by all the nodes in the job."
7575
)
@@ -82,7 +82,7 @@ def __init__(self, args, active_resources):
8282
try:
8383
os.makedirs(self.results_dir, exist_ok=True)
8484
logger.info(f"Created autotuning results directory: {self.results_dir}")
85-
except:
85+
except Exception:
8686
logger.error(
8787
f"Failed to create {self.results_dir}, please check results_dir in the autotuning config file is accessible by all the nodes in the job."
8888
)

deepspeed/autotuning/tuner/base_tuner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,6 @@ def tune(self, sample_size=1, n_trials=1000, early_stopping=None):
6767
)
6868
break
6969
return i
70-
except:
70+
except Exception:
7171
logger.info("Tuner Error:", sys.exc_info()[0])
7272
return i

deepspeed/compile/input_storage.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def _extract_tensor_metadata(self, tensor: torch.Tensor) -> TensorMetadata:
4444
# Get memory format safely
4545
try:
4646
memory_format = tensor.memory_format() if hasattr(tensor, 'memory_format') else torch.contiguous_format
47-
except:
47+
except Exception:
4848
memory_format = torch.contiguous_format
4949

5050
# Store real data for tensors if configured to do so

deepspeed/compile/profilers/graph_profile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def _get_mem_usage_out_of_torch():
6767

6868
torch_alloc = get_accelerator().memory_allocated()
6969
adjust = info.used - torch_alloc
70-
except:
70+
except Exception:
7171
# pynvml not available
7272
pass
7373

deepspeed/inference/v2/inference_parameter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def to(self, *args, **kwargs):
4747
new_attr = attr.to(*args, **kwargs)
4848
setattr(new_tensor, name, new_attr)
4949
new_tensor.aux_attrs[name] = new_attr
50-
except:
50+
except Exception:
5151
pass
5252

5353
return new_tensor

deepspeed/module_inject/auto_tp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def strided_copy(self,
6868
if src_shape[outer_dim] == dst_shape[self.out_dim]:
6969
try:
7070
dst = dst.reshape(-1).data.copy_(src.data.reshape(-1)).reshape(src.shape)
71-
except:
71+
except Exception:
7272
print(dst.shape, src.shape)
7373
exit()
7474
dst = torch.nn.parameter.Parameter(dst, requires_grad=False)

deepspeed/module_inject/containers/bert.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def __init__(self, client_module, inference=False):
4141
transformers.models.bert.modeling_bert.BertLayer,
4242
transformers.models.roberta.modeling_roberta.RobertaLayer
4343
]
44-
except:
44+
except Exception:
4545
HFBertLayerPolicy._orig_layer_class = None
4646

4747
def get_hidden_heads(self):

deepspeed/module_inject/containers/clip.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def __init__(self, client_module, inference=False):
3535
try:
3636
import transformers
3737
HFCLIPLayerPolicy._orig_layer_class = transformers.models.clip.modeling_clip.CLIPEncoderLayer
38-
except:
38+
except Exception:
3939
HFCLIPLayerPolicy._orig_layer_class = None
4040

4141
def get_hidden_heads(self):

0 commit comments

Comments
 (0)