Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
text-generation-inference
Commits
45344244
Unverified
Commit
45344244
authored
Apr 25, 2023
by
Nicolas Patry
Committed by
GitHub
Apr 25, 2023
Browse files
Starting some routing tests. (#233)
parent
323546df
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
51 additions
and
1 deletion
+51
-1
.gitignore
.gitignore
+2
-1
router/src/validation.rs
router/src/validation.rs
+49
-0
No files found.
.gitignore
View file @
45344244
.idea
.idea
target
target
router/tokenizer.json
router/src/validation.rs
View file @
45344244
...
@@ -378,3 +378,52 @@ pub enum ValidationError {
...
@@ -378,3 +378,52 @@ pub enum ValidationError {
#[error(
"tokenizer error {0}"
)]
#[error(
"tokenizer error {0}"
)]
Tokenizer
(
String
),
Tokenizer
(
String
),
}
}
#[cfg(test)]
mod
tests
{
use
super
::
*
;
use
std
::
io
::
Write
;
#[tokio::test]
async
fn
test_validation_max_new_tokens
(){
let
tokenizer
=
None
;
let
max_best_of
=
2
;
let
max_stop_sequence
=
3
;
let
max_input_length
=
4
;
let
max_total_tokens
=
5
;
let
workers
=
1
;
let
validation
=
Validation
::
new
(
workers
,
tokenizer
,
max_best_of
,
max_stop_sequence
,
max_input_length
,
max_total_tokens
);
let
max_new_tokens
=
10
;
match
validation
.validate_input
(
"Hello"
.to_string
(),
None
,
max_new_tokens
)
.await
{
Err
(
ValidationError
::
MaxNewTokens
(
1
,
10
))
=>
(),
_
=>
panic!
(
"Unexpected not max new tokens"
)
}
}
async
fn
get_tokenizer
()
->
Tokenizer
{
if
!
std
::
path
::
Path
::
new
(
"tokenizer.json"
)
.exists
(){
let
content
=
reqwest
::
get
(
"https://huggingface.co/gpt2/raw/main/tokenizer.json"
)
.await
.unwrap
()
.bytes
()
.await
.unwrap
();
let
mut
file
=
std
::
fs
::
File
::
create
(
"tokenizer.json"
)
.unwrap
();
file
.write_all
(
&
content
)
.unwrap
();
}
Tokenizer
::
from_file
(
"tokenizer.json"
)
.unwrap
()
}
#[tokio::test]
async
fn
test_validation_input_length
(){
let
tokenizer
=
Some
(
get_tokenizer
()
.await
);
let
max_best_of
=
2
;
let
max_stop_sequence
=
3
;
let
max_input_length
=
4
;
let
max_total_tokens
=
5
;
let
workers
=
1
;
let
validation
=
Validation
::
new
(
workers
,
tokenizer
,
max_best_of
,
max_stop_sequence
,
max_input_length
,
max_total_tokens
);
let
max_new_tokens
=
10
;
match
validation
.validate_input
(
"Hello"
.to_string
(),
None
,
max_new_tokens
)
.await
{
Err
(
ValidationError
::
MaxTotalTokens
(
5
,
1
,
10
))
=>
(),
_
=>
panic!
(
"Unexpected not max new tokens"
)
}
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment