未验证 提交 16099abf 编写于 作者: Z zhiboniu 提交者: GitHub

fix new ci check errors (#33561)

上级 0b4a7f1a
......@@ -92,6 +92,10 @@ from .dygraph.checkpoint import save_dygraph, load_dygraph
from .dygraph.varbase_patch_methods import monkey_patch_varbase
from . import generator
from .core import _cuda_synchronize
from .generator import Generator
from .trainer_desc import TrainerDesc, DistMultiTrainer, PipelineTrainer, MultiTrainer, HeterXpuTrainer, HeterBoxTrainer
from .transpiler import HashName, RoundRobin
from .backward import append_backward
Tensor = LoDTensor
enable_imperative = enable_dygraph
......@@ -116,7 +120,6 @@ __all__ = framework.__all__ + executor.__all__ + \
'transpiler',
'nets',
'optimizer',
'learning_rate_decay',
'backward',
'regularizer',
'LoDTensor',
......@@ -137,7 +140,6 @@ __all__ = framework.__all__ + executor.__all__ + \
'install_check',
'save',
'load',
'VarBase',
'_cuda_synchronize'
]
......
......@@ -33,6 +33,7 @@ from .mixed_precision import *
from . import layers
from .layers import *
from . import optimizer
from .optimizer import *
from . import sparsity
from .sparsity import *
......
......@@ -20,7 +20,9 @@ from . import fp16_lists
from .fp16_lists import *
from . import fp16_utils
from .fp16_utils import *
from . import bf16
__all__ = decorator.__all__
__all__ = []
__all__ += decorator.__all__
__all__ += fp16_lists.__all__
__all__ += fp16_utils.__all__
......@@ -21,6 +21,7 @@ from .amp_utils import *
from . import decorator
from .decorator import *
__all__ = decorator.__all__
__all__ = []
__all__ += decorator.__all__
__all__ += amp_lists.__all__
__all__ += amp_utils.__all__
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册