Cargo.toml 1.13 KB
Newer Older
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
1
2
[workspace]
members = [
Nicolas Patry's avatar
Nicolas Patry committed
3
  "benchmark",
4
  "backends/v2",
Nicolas Patry's avatar
Nicolas Patry committed
5
6
7
  "backends/v3",
  "backends/grpc-metadata",
  "backends/trtllm",
Nicolas Patry's avatar
Nicolas Patry committed
8
9
  "launcher",
  "router"
Nicolas Patry's avatar
Nicolas Patry committed
10
11
12
]
default-members = [
  "benchmark",
13
  "backends/v2",
Nicolas Patry's avatar
Nicolas Patry committed
14
15
16
  "backends/v3",
  "backends/grpc-metadata",
  # "backends/trtllm",
Nicolas Patry's avatar
Nicolas Patry committed
17
18
  "launcher",
  "router"
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
19
]
20
resolver = "2"
21
22

[workspace.package]
23
version = "2.4.2-dev0"
24
25
26
edition = "2021"
authors = ["Olivier Dehaene"]
homepage = "https://github.com/huggingface/text-generation-inference"
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
27

28
[workspace.dependencies]
29
base64 = "0.22.0"
Nicolas Patry's avatar
Nicolas Patry committed
30
tokenizers = { version = "0.20.0", features = ["http"] }
31
hf-hub = { version = "0.3.1", features = ["tokio"] }
Nicolas Patry's avatar
Nicolas Patry committed
32
33
metrics = { version = "0.23.0" }
metrics-exporter-prometheus = { version = "0.15.1", features = [] }
34
35
minijinja = { version = "2.2.0", features = ["json"] }
minijinja-contrib = { version = "2.0.2", features = ["pycompat"] }
36
pyo3 = { version = "0.22.2", features = ["auto-initialize"] }
37

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
38
[profile.release]
39
40
41
42
incremental = true

[profile.release-binary]
inherits = "release"
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
43
44
debug = 1
incremental = true
45
46
47
48
49
50
panic = "abort"

[profile.release-opt]
inherits = "release"
debug = 0
incremental = false
51
52
53
lto = "fat"
opt-level = 3
codegen-units = 1