Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
bitsandbytes
Commits
e9b87112
"src/vscode:/vscode.git/clone" did not exist on "2f7a417d1fb11bd242ad7f9098bb9fdf77c54422"
Commit
e9b87112
authored
Sep 17, 2022
by
justheuristic
Browse files
un-fuse bias
parent
56a074f6
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
4 deletions
+6
-4
bitsandbytes/autograd/_functions.py
bitsandbytes/autograd/_functions.py
+6
-4
No files found.
bitsandbytes/autograd/_functions.py
View file @
e9b87112
...
...
@@ -316,15 +316,14 @@ class MatMul8bitLt(torch.autograd.Function):
if
bias
is
None
or
bias
.
dtype
==
torch
.
float16
:
output
=
F
.
mm_dequant
(
out32
,
Sout32
,
SCA
,
state
.
SCB
,
bias
=
bias
)
output
=
output
.
to
(
A_dtype
)
delayed_bias
=
None
else
:
# apply bias separately
output
=
F
.
mm_dequant
(
out32
,
Sout32
,
SCA
,
state
.
SCB
,
bias
=
None
)
output
=
output
.
to
(
A_dtype
).
add_
(
bias
)
delayed_bias
=
bias
# 4. Mixed-precision decomposition matmul
if
coo_tensorA
is
not
None
and
subA
is
not
None
:
output
+=
torch
.
matmul
(
subA
,
state
.
subB
)
output
.
addmm_
(
subA
,
state
.
subB
)
# 5. Save state
ctx
.
state
=
state
...
...
@@ -341,6 +340,9 @@ class MatMul8bitLt(torch.autograd.Function):
ctx
.
tensor_states
=
(
None
,
None
)
ctx
.
save_for_backward
(
None
,
None
)
output
=
output
.
to
(
A_dtype
)
if
delayed_bias
is
not
None
:
output
.
add_
(
delayed_bias
)
clone_func
=
torch
.
clone
if
len
(
output_shape
)
==
3
else
lambda
x
:
x
return
clone_func
(
output
.
view
(
output_shape
))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment