"torchvision/vscode:/vscode.git/clone" did not exist on "c9d9e67e4259ff43f71f1bd8327f780ac8134228"
Commit 4b3214c0 authored by A. Unique TensorFlower's avatar A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 341913626
parent cb0ed243
...@@ -4,7 +4,6 @@ runtime: ...@@ -4,7 +4,6 @@ runtime:
mixed_precision_dtype: 'bfloat16' mixed_precision_dtype: 'bfloat16'
task: task:
annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json' annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json'
gradient_clip_norm: 10.0
losses: losses:
l2_weight_decay: 4.0e-05 l2_weight_decay: 4.0e-05
model: model:
......
...@@ -3,7 +3,6 @@ runtime: ...@@ -3,7 +3,6 @@ runtime:
mixed_precision_dtype: 'bfloat16' mixed_precision_dtype: 'bfloat16'
task: task:
annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json' annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json'
gradient_clip_norm: 0.0
losses: losses:
l2_weight_decay: 4.0e-05 l2_weight_decay: 4.0e-05
model: model:
......
...@@ -4,7 +4,6 @@ runtime: ...@@ -4,7 +4,6 @@ runtime:
mixed_precision_dtype: 'bfloat16' mixed_precision_dtype: 'bfloat16'
task: task:
annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json' annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json'
gradient_clip_norm: 0.0
losses: losses:
l2_weight_decay: 4.0e-05 l2_weight_decay: 4.0e-05
model: model:
......
...@@ -4,7 +4,6 @@ runtime: ...@@ -4,7 +4,6 @@ runtime:
mixed_precision_dtype: 'bfloat16' mixed_precision_dtype: 'bfloat16'
task: task:
annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json' annotation_file: '/readahead/200M/placer/prod/home/snaggletooth/test/data/coco/instances_val2017.json'
gradient_clip_norm: 0.0
losses: losses:
l2_weight_decay: 4.0e-05 l2_weight_decay: 4.0e-05
model: model:
......
...@@ -70,7 +70,6 @@ class ImageClassificationTask(cfg.TaskConfig): ...@@ -70,7 +70,6 @@ class ImageClassificationTask(cfg.TaskConfig):
validation_data: DataConfig = DataConfig(is_training=False) validation_data: DataConfig = DataConfig(is_training=False)
losses: Losses = Losses() losses: Losses = Losses()
evaluation: Evaluation = Evaluation() evaluation: Evaluation = Evaluation()
gradient_clip_norm: float = 0.0
init_checkpoint: Optional[str] = None init_checkpoint: Optional[str] = None
init_checkpoint_modules: str = 'all' # all or backbone init_checkpoint_modules: str = 'all' # all or backbone
......
...@@ -207,7 +207,6 @@ class MaskRCNNTask(cfg.TaskConfig): ...@@ -207,7 +207,6 @@ class MaskRCNNTask(cfg.TaskConfig):
init_checkpoint: Optional[str] = None init_checkpoint: Optional[str] = None
init_checkpoint_modules: str = 'all' # all or backbone init_checkpoint_modules: str = 'all' # all or backbone
annotation_file: Optional[str] = None annotation_file: Optional[str] = None
gradient_clip_norm: float = 0.0
per_category_metrics: bool = False per_category_metrics: bool = False
# If set, we only use masks for the specified class IDs. # If set, we only use masks for the specified class IDs.
allowed_mask_class_ids: Optional[List[int]] = None allowed_mask_class_ids: Optional[List[int]] = None
......
...@@ -128,7 +128,6 @@ class RetinaNetTask(cfg.TaskConfig): ...@@ -128,7 +128,6 @@ class RetinaNetTask(cfg.TaskConfig):
init_checkpoint: Optional[str] = None init_checkpoint: Optional[str] = None
init_checkpoint_modules: str = 'all' # all or backbone init_checkpoint_modules: str = 'all' # all or backbone
annotation_file: Optional[str] = None annotation_file: Optional[str] = None
gradient_clip_norm: float = 0.0
per_category_metrics: bool = False per_category_metrics: bool = False
......
...@@ -87,7 +87,6 @@ class SemanticSegmentationTask(cfg.TaskConfig): ...@@ -87,7 +87,6 @@ class SemanticSegmentationTask(cfg.TaskConfig):
train_data: DataConfig = DataConfig(is_training=True) train_data: DataConfig = DataConfig(is_training=True)
validation_data: DataConfig = DataConfig(is_training=False) validation_data: DataConfig = DataConfig(is_training=False)
losses: Losses = Losses() losses: Losses = Losses()
gradient_clip_norm: float = 0.0
init_checkpoint: Optional[str] = None init_checkpoint: Optional[str] = None
init_checkpoint_modules: Union[ init_checkpoint_modules: Union[
str, List[str]] = 'all' # all, backbone, and/or decoder str, List[str]] = 'all' # all, backbone, and/or decoder
......
...@@ -97,7 +97,6 @@ class VideoClassificationTask(cfg.TaskConfig): ...@@ -97,7 +97,6 @@ class VideoClassificationTask(cfg.TaskConfig):
train_data: DataConfig = DataConfig(is_training=True) train_data: DataConfig = DataConfig(is_training=True)
validation_data: DataConfig = DataConfig(is_training=False) validation_data: DataConfig = DataConfig(is_training=False)
losses: Losses = Losses() losses: Losses = Losses()
gradient_clip_norm: float = -1.0
def add_trainer(experiment: cfg.ExperimentConfig, def add_trainer(experiment: cfg.ExperimentConfig,
......
...@@ -180,11 +180,6 @@ class ImageClassificationTask(base_task.Task): ...@@ -180,11 +180,6 @@ class ImageClassificationTask(base_task.Task):
if isinstance( if isinstance(
optimizer, tf.keras.mixed_precision.LossScaleOptimizer): optimizer, tf.keras.mixed_precision.LossScaleOptimizer):
grads = optimizer.get_unscaled_gradients(grads) grads = optimizer.get_unscaled_gradients(grads)
# Apply gradient clipping.
if self.task_config.gradient_clip_norm > 0:
grads, _ = tf.clip_by_global_norm(
grads, self.task_config.gradient_clip_norm)
optimizer.apply_gradients(list(zip(grads, tvars))) optimizer.apply_gradients(list(zip(grads, tvars)))
logs = {self.loss: loss} logs = {self.loss: loss}
......
...@@ -280,11 +280,6 @@ class MaskRCNNTask(base_task.Task): ...@@ -280,11 +280,6 @@ class MaskRCNNTask(base_task.Task):
# Scales back gradient when LossScaleOptimizer is used. # Scales back gradient when LossScaleOptimizer is used.
if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer): if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer):
grads = optimizer.get_unscaled_gradients(grads) grads = optimizer.get_unscaled_gradients(grads)
# Apply gradient clipping.
if self.task_config.gradient_clip_norm > 0:
grads, _ = tf.clip_by_global_norm(
grads, self.task_config.gradient_clip_norm)
optimizer.apply_gradients(list(zip(grads, tvars))) optimizer.apply_gradients(list(zip(grads, tvars)))
logs = {self.loss: losses['total_loss']} logs = {self.loss: losses['total_loss']}
......
...@@ -218,11 +218,6 @@ class RetinaNetTask(base_task.Task): ...@@ -218,11 +218,6 @@ class RetinaNetTask(base_task.Task):
# Scales back gradient when LossScaleOptimizer is used. # Scales back gradient when LossScaleOptimizer is used.
if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer): if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer):
grads = optimizer.get_unscaled_gradients(grads) grads = optimizer.get_unscaled_gradients(grads)
# Apply gradient clipping.
if self.task_config.gradient_clip_norm > 0:
grads, _ = tf.clip_by_global_norm(
grads, self.task_config.gradient_clip_norm)
optimizer.apply_gradients(list(zip(grads, tvars))) optimizer.apply_gradients(list(zip(grads, tvars)))
logs = {self.loss: loss} logs = {self.loss: loss}
......
...@@ -188,11 +188,6 @@ class SemanticSegmentationTask(base_task.Task): ...@@ -188,11 +188,6 @@ class SemanticSegmentationTask(base_task.Task):
# used. # used.
if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer): if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer):
grads = optimizer.get_unscaled_gradients(grads) grads = optimizer.get_unscaled_gradients(grads)
# Apply gradient clipping.
if self.task_config.gradient_clip_norm > 0:
grads, _ = tf.clip_by_global_norm(
grads, self.task_config.gradient_clip_norm)
optimizer.apply_gradients(list(zip(grads, tvars))) optimizer.apply_gradients(list(zip(grads, tvars)))
logs = {self.loss: loss} logs = {self.loss: loss}
......
...@@ -160,11 +160,6 @@ class VideoClassificationTask(base_task.Task): ...@@ -160,11 +160,6 @@ class VideoClassificationTask(base_task.Task):
# used. # used.
if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer): if isinstance(optimizer, tf.keras.mixed_precision.LossScaleOptimizer):
grads = optimizer.get_unscaled_gradients(grads) grads = optimizer.get_unscaled_gradients(grads)
# Apply gradient clipping.
if self.task_config.gradient_clip_norm > 0:
grads, _ = tf.clip_by_global_norm(
grads, self.task_config.gradient_clip_norm)
optimizer.apply_gradients(list(zip(grads, tvars))) optimizer.apply_gradients(list(zip(grads, tvars)))
logs = {self.loss: loss} logs = {self.loss: loss}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment