From e5c22715928b44819d5f9762cdf7b3e0d99fbdb4 Mon Sep 17 00:00:00 2001 From: jianyunchao Date: Tue, 9 Jul 2024 14:39:34 +0800 Subject: [PATCH] update jit config. --- official/cv/MobileNet/mobilenetv2/eval.py | 2 +- official/cv/MobileNet/mobilenetv2/train.py | 26 +++++++++++----------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/official/cv/MobileNet/mobilenetv2/eval.py b/official/cv/MobileNet/mobilenetv2/eval.py index bd39c52b0..d387d7555 100644 --- a/official/cv/MobileNet/mobilenetv2/eval.py +++ b/official/cv/MobileNet/mobilenetv2/eval.py @@ -29,7 +29,7 @@ config.is_training = config.is_training_eval @moxing_wrapper(pre_process=modelarts_process) def eval_mobilenetv2(): - mindspore.set_context(mode=0, device_target=config.platform, save_graphs=False) + mindspore.set_context(mode=0, device_target=config.platform, save_graphs=False, jit_config={"jit_level": "O2"}) config.dataset_path = os.path.join(config.dataset_path, 'validation_preprocess') print('\nconfig: \n', config) if not config.device_id: diff --git a/official/cv/MobileNet/mobilenetv2/train.py b/official/cv/MobileNet/mobilenetv2/train.py index 283c5fcff..3ae33dec7 100644 --- a/official/cv/MobileNet/mobilenetv2/train.py +++ b/official/cv/MobileNet/mobilenetv2/train.py @@ -41,13 +41,13 @@ def train_mobilenetv2(): """ train_mobilenetv2 """ if config.platform == "CPU": config.run_distribute = False - mindspore.set_context(mode=0, device_target=config.platform, save_graphs=False) + mindspore.set_context(mode=0, device_target=config.platform, save_graphs=False, jit_config={"jit_level": "O2"}) if config.run_distribute: comm.init() config.rank_id = comm.get_rank() config.rank_size = comm.get_group_size() mindspore.set_auto_parallel_context(parallel_mode=mindspore.ParallelMode.DATA_PARALLEL, - gradients_mean=True) + gradients_mean=True) config.train_dataset_path = os.path.join(config.dataset_path, 'train') config.eval_dataset_path = os.path.join(config.dataset_path, 'validation_preprocess') if not config.device_id: @@ -85,12 +85,12 @@ def train_mobilenetv2(): # get learning rate lr = mindspore.Tensor(get_lr(global_step=0, - lr_init=config.lr_init, - lr_end=config.lr_end, - lr_max=config.lr_max, - warmup_epochs=config.warmup_epochs, - total_epochs=epoch_size, - steps_per_epoch=step_size)) + lr_init=config.lr_init, + lr_end=config.lr_end, + lr_max=config.lr_max, + warmup_epochs=config.warmup_epochs, + total_epochs=epoch_size, + steps_per_epoch=step_size)) metrics = {"acc"} dist_eval_network = None eval_dataset = None @@ -104,15 +104,15 @@ def train_mobilenetv2(): group_params = build_params_groups(net) opt = nn.Momentum(group_params, lr, config.momentum, loss_scale=config.loss_scale) model = mindspore.Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, - metrics=metrics, eval_network=dist_eval_network, - amp_level="O2", keep_batchnorm_fp32=False, - boost_level=config.boost_mode, - boost_config_dict={"boost": {"mode": "manual", "grad_freeze": False}}) + metrics=metrics, eval_network=dist_eval_network, + amp_level="O2", keep_batchnorm_fp32=False, + boost_level=config.boost_mode, + boost_config_dict={"boost": {"mode": "manual", "grad_freeze": False}}) else: opt = nn.Momentum(net.trainable_params(), lr, config.momentum, config.weight_decay) model = mindspore.Model(net, loss_fn=loss, optimizer=opt, metrics=metrics, eval_network=dist_eval_network, - boost_level=config.boost_mode) + boost_level=config.boost_mode) cb = config_ckpoint(config, lr, step_size, model, eval_dataset) print("============== Starting Training ==============") model.train(epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) -- Gitee