"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "df2f28120d2c96f40a2303d9755c26036aecf45f"
Unverified Commit 93b54368 authored by Younes Belkada's avatar Younes Belkada Committed by GitHub
Browse files

[`BiT`] Small patch fix (#20657)

* patch fix for `fp16`

* use `np` instead
parent 0526a075
...@@ -18,6 +18,7 @@ import collections ...@@ -18,6 +18,7 @@ import collections
import math import math
from typing import Optional, Tuple from typing import Optional, Tuple
import numpy as np
import torch import torch
import torch.utils.checkpoint import torch.utils.checkpoint
from torch import Tensor, nn from torch import Tensor, nn
...@@ -592,7 +593,8 @@ class BitEncoder(nn.Module): ...@@ -592,7 +593,8 @@ class BitEncoder(nn.Module):
dilation = 1 dilation = 1
layer_dropouts = [ layer_dropouts = [
x.tolist() for x in torch.linspace(0, config.drop_path_rate, sum(config.depths)).split(config.depths) x.tolist()
for x in torch.Tensor(np.linspace(0, config.drop_path_rate, sum(config.depths))).split(config.depths)
] ]
for stage_idx, (current_depth, current_hidden_size, layer_dropout) in enumerate( for stage_idx, (current_depth, current_hidden_size, layer_dropout) in enumerate(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment