Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
norm
vllm
Commits
318e2b5a
Commit
318e2b5a
authored
Feb 01, 2024
by
zhuwenwen
Browse files
skip fp8
parent
51679bbd
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
22 additions
and
22 deletions
+22
-22
csrc/cache_kernels.cu
csrc/cache_kernels.cu
+14
-14
csrc/dispatch_utils.h
csrc/dispatch_utils.h
+5
-5
csrc/quantization/fp8_e5m2_kvcache/quant_utils.cuh
csrc/quantization/fp8_e5m2_kvcache/quant_utils.cuh
+1
-1
vllm/utils.py
vllm/utils.py
+2
-2
No files found.
csrc/cache_kernels.cu
View file @
318e2b5a
...
...
@@ -244,17 +244,17 @@ void reshape_and_cache(
CALL_RESHAPE_AND_CACHE
(
float
,
float
,
false
);
}
else
if
(
key
.
dtype
()
==
at
::
ScalarType
::
Half
)
{
CALL_RESHAPE_AND_CACHE
(
uint16_t
,
uint16_t
,
false
);
}
else
if
(
key
.
dtype
()
==
at
::
ScalarType
::
BFloat16
)
{
CALL_RESHAPE_AND_CACHE
(
__nv_bfloat16
,
__nv_bfloat16
,
false
);
}
}
else
if
(
kv_cache_dtype
==
"fp8_e5m2"
)
{
if
(
key
.
dtype
()
==
at
::
ScalarType
::
Float
)
{
CALL_RESHAPE_AND_CACHE
(
float
,
uint8_t
,
true
);
}
else
if
(
key
.
dtype
()
==
at
::
ScalarType
::
Half
)
{
CALL_RESHAPE_AND_CACHE
(
uint16_t
,
uint8_t
,
true
);
}
else
if
(
key
.
dtype
()
==
at
::
ScalarType
::
BFloat16
)
{
CALL_RESHAPE_AND_CACHE
(
__nv_bfloat16
,
uint8_t
,
true
);
// } else if (key.dtype() == at::ScalarType::BFloat16) {
// CALL_RESHAPE_AND_CACHE(__nv_bfloat16, __nv_bfloat16, false);
}
// } else if (kv_cache_dtype == "fp8_e5m2") {
// if (key.dtype() == at::ScalarType::Float) {
// CALL_RESHAPE_AND_CACHE(float, uint8_t, true);
// } else if (key.dtype() == at::ScalarType::Half) {
// CALL_RESHAPE_AND_CACHE(uint16_t, uint8_t, true);
// } else if (key.dtype() == at::ScalarType::BFloat16) {
// CALL_RESHAPE_AND_CACHE(__nv_bfloat16, uint8_t, true);
// }
}
else
{
TORCH_CHECK
(
false
,
"Unsupported data type of kv cache: "
,
kv_cache_dtype
);
}
...
...
@@ -462,13 +462,13 @@ void convert_fp8_e5m2(
CALL_CONVERT_FP8_E5M2
(
uint8_t
,
float
);
}
else
if
(
src_cache
.
dtype
()
==
at
::
ScalarType
::
Half
)
{
CALL_CONVERT_FP8_E5M2
(
uint8_t
,
uint16_t
);
}
else
if
(
src_cache
.
dtype
()
==
at
::
ScalarType
::
BFloat16
)
{
CALL_CONVERT_FP8_E5M2
(
uint8_t
,
__nv_bfloat16
);
//
} else if (src_cache.dtype() == at::ScalarType::BFloat16) {
//
CALL_CONVERT_FP8_E5M2(uint8_t, __nv_bfloat16);
}
else
if
(
dst_cache
.
dtype
()
==
at
::
ScalarType
::
Float
)
{
CALL_CONVERT_FP8_E5M2
(
float
,
uint8_t
);
}
else
if
(
dst_cache
.
dtype
()
==
at
::
ScalarType
::
Half
)
{
CALL_CONVERT_FP8_E5M2
(
uint16_t
,
uint8_t
);
}
else
if
(
dst_cache
.
dtype
()
==
at
::
ScalarType
::
BFloat16
)
{
CALL_CONVERT_FP8_E5M2
(
__nv_bfloat16
,
uint8_t
);
//
} else if (dst_cache.dtype() == at::ScalarType::BFloat16) {
//
CALL_CONVERT_FP8_E5M2(__nv_bfloat16, uint8_t);
}
}
csrc/dispatch_utils.h
View file @
318e2b5a
...
...
@@ -8,8 +8,8 @@
#define VLLM_DISPATCH_CASE_FLOATING_TYPES(...) \
AT_DISPATCH_CASE(at::ScalarType::Float, __VA_ARGS__) \
AT_DISPATCH_CASE(at::ScalarType::Half, __VA_ARGS__)
\
AT_DISPATCH_CASE(at::ScalarType::BFloat16, __VA_ARGS__)
AT_DISPATCH_CASE(at::ScalarType::Half, __VA_ARGS__)
//
AT_DISPATCH_CASE(at::ScalarType::BFloat16, __VA_ARGS__)
#define VLLM_DISPATCH_FLOATING_TYPES(TYPE, NAME, ...) \
AT_DISPATCH_SWITCH( \
...
...
@@ -17,9 +17,9 @@
#define VLLM_DISPATCH_CASE_FLOATING_AND_BYTE_TYPES(...) \
AT_DISPATCH_CASE(at::ScalarType::Float, __VA_ARGS__) \
AT_DISPATCH_CASE(at::ScalarType::Half, __VA_ARGS__)
\
AT_DISPATCH_CASE(at::ScalarType::BFloat16, __VA_ARGS__) \
AT_DISPATCH_CASE(at::ScalarType::Byte, __VA_ARGS__)
AT_DISPATCH_CASE(at::ScalarType::Half, __VA_ARGS__)
//
AT_DISPATCH_CASE(at::ScalarType::BFloat16, __VA_ARGS__) \
//
AT_DISPATCH_CASE(at::ScalarType::Byte, __VA_ARGS__)
#define VLLM_DISPATCH_FLOATING_AND_BYTE_TYPES(TYPE, NAME, ...) \
AT_DISPATCH_SWITCH( \
...
...
csrc/quantization/fp8_e5m2_kvcache/quant_utils.cuh
View file @
318e2b5a
...
...
@@ -7,7 +7,7 @@
#include "../../attention/attention_dtypes.h"
#include "../../attention/dtype_float32.cuh"
#include "../../attention/dtype_float16.cuh"
#include "../../attention/dtype_bfloat16.cuh"
//
#include "../../attention/dtype_bfloat16.cuh"
#pragma once
...
...
vllm/utils.py
View file @
318e2b5a
...
...
@@ -26,9 +26,9 @@ logger = init_logger(__name__)
STR_DTYPE_TO_TORCH_DTYPE
=
{
"half"
:
torch
.
half
,
"bfloat16"
:
torch
.
bfloat16
,
#
"bfloat16": torch.bfloat16,
"float"
:
torch
.
float
,
"fp8_e5m2"
:
torch
.
uint8
,
#
"fp8_e5m2": torch.uint8,
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment