"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "3e31e7f9563debe36620f4a07e0b0cc93c3b736e"
Unverified Commit ce2e7ef3 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

[Core] Add lazy import structure to imports (#26090)

* improve import time

* Update src/transformers/integrations/__init__.py

* sort import
parent 9cebae64
...@@ -11,61 +11,132 @@ ...@@ -11,61 +11,132 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .bitsandbytes import ( from typing import TYPE_CHECKING
get_keys_to_not_convert,
replace_8bit_linear, from ..utils import _LazyModule
replace_with_bnb_linear,
set_module_8bit_tensor_to_device,
set_module_quantized_tensor_to_device, _import_structure = {
) "bitsandbytes": [
from .deepspeed import ( "get_keys_to_not_convert",
HfDeepSpeedConfig, "replace_8bit_linear",
HfTrainerDeepSpeedConfig, "replace_with_bnb_linear",
deepspeed_config, "set_module_8bit_tensor_to_device",
deepspeed_init, "set_module_quantized_tensor_to_device",
deepspeed_load_checkpoint, ],
deepspeed_optim_sched, "deepspeed": [
is_deepspeed_available, "HfDeepSpeedConfig",
is_deepspeed_zero3_enabled, "HfTrainerDeepSpeedConfig",
set_hf_deepspeed_config, "deepspeed_config",
unset_hf_deepspeed_config, "deepspeed_init",
) "deepspeed_load_checkpoint",
from .integration_utils import ( "deepspeed_optim_sched",
INTEGRATION_TO_CALLBACK, "is_deepspeed_available",
AzureMLCallback, "is_deepspeed_zero3_enabled",
ClearMLCallback, "set_hf_deepspeed_config",
CodeCarbonCallback, "unset_hf_deepspeed_config",
CometCallback, ],
DagsHubCallback, "integration_utils": [
FlyteCallback, "INTEGRATION_TO_CALLBACK",
MLflowCallback, "AzureMLCallback",
NeptuneCallback, "ClearMLCallback",
NeptuneMissingConfiguration, "CodeCarbonCallback",
TensorBoardCallback, "CometCallback",
WandbCallback, "DagsHubCallback",
get_available_reporting_integrations, "FlyteCallback",
get_reporting_integration_callbacks, "MLflowCallback",
hp_params, "NeptuneCallback",
is_azureml_available, "NeptuneMissingConfiguration",
is_clearml_available, "TensorBoardCallback",
is_codecarbon_available, "WandbCallback",
is_comet_available, "get_available_reporting_integrations",
is_dagshub_available, "get_reporting_integration_callbacks",
is_fairscale_available, "hp_params",
is_flyte_deck_standard_available, "is_azureml_available",
is_flytekit_available, "is_clearml_available",
is_mlflow_available, "is_codecarbon_available",
is_neptune_available, "is_comet_available",
is_optuna_available, "is_dagshub_available",
is_ray_available, "is_fairscale_available",
is_ray_tune_available, "is_flyte_deck_standard_available",
is_sigopt_available, "is_flytekit_available",
is_tensorboard_available, "is_mlflow_available",
is_wandb_available, "is_neptune_available",
rewrite_logs, "is_optuna_available",
run_hp_search_optuna, "is_ray_available",
run_hp_search_ray, "is_ray_tune_available",
run_hp_search_sigopt, "is_sigopt_available",
run_hp_search_wandb, "is_tensorboard_available",
) "is_wandb_available",
from .peft import PeftAdapterMixin "rewrite_logs",
"run_hp_search_optuna",
"run_hp_search_ray",
"run_hp_search_sigopt",
"run_hp_search_wandb",
],
"peft": ["PeftAdapterMixin"],
}
if TYPE_CHECKING:
from .bitsandbytes import (
get_keys_to_not_convert,
replace_8bit_linear,
replace_with_bnb_linear,
set_module_8bit_tensor_to_device,
set_module_quantized_tensor_to_device,
)
from .deepspeed import (
HfDeepSpeedConfig,
HfTrainerDeepSpeedConfig,
deepspeed_config,
deepspeed_init,
deepspeed_load_checkpoint,
deepspeed_optim_sched,
is_deepspeed_available,
is_deepspeed_zero3_enabled,
set_hf_deepspeed_config,
unset_hf_deepspeed_config,
)
from .integration_utils import (
INTEGRATION_TO_CALLBACK,
AzureMLCallback,
ClearMLCallback,
CodeCarbonCallback,
CometCallback,
DagsHubCallback,
FlyteCallback,
MLflowCallback,
NeptuneCallback,
NeptuneMissingConfiguration,
TensorBoardCallback,
WandbCallback,
get_available_reporting_integrations,
get_reporting_integration_callbacks,
hp_params,
is_azureml_available,
is_clearml_available,
is_codecarbon_available,
is_comet_available,
is_dagshub_available,
is_fairscale_available,
is_flyte_deck_standard_available,
is_flytekit_available,
is_mlflow_available,
is_neptune_available,
is_optuna_available,
is_ray_available,
is_ray_tune_available,
is_sigopt_available,
is_tensorboard_available,
is_wandb_available,
rewrite_logs,
run_hp_search_optuna,
run_hp_search_ray,
run_hp_search_sigopt,
run_hp_search_wandb,
)
from .peft import PeftAdapterMixin
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment