Makefile 1.34 KB
Newer Older
OlivierDehaene's avatar
OlivierDehaene committed
1
transformers_commit := 2f87dca1ca3e5663d0637da9bb037a6956e57a5e
2

Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
3
gen-server:
Nicolas Patry's avatar
Nicolas Patry committed
4
	# Compile protos
5
	pip install grpcio-tools==1.51.1 --no-cache-dir
6
7
8
9
	mkdir text_generation_server/pb || true
	python -m grpc_tools.protoc -I../proto --python_out=text_generation_server/pb --grpc_python_out=text_generation_server/pb ../proto/generate.proto
	find text_generation_server/pb/ -type f -name "*.py" -print0 -exec sed -i -e 's/^\(import.*pb2\)/from . \1/g' {} \;
	touch text_generation_server/pb/__init__.py
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
10

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
11
install-transformers:
12
	# Install specific version of transformers with custom cuda kernels
13
14
	pip uninstall transformers -y || true
	rm -rf transformers || true
15
16
17
18
19
	rm -rf transformers-$(transformers_commit) || true
	curl -L -O https://github.com/OlivierDehaene/transformers/archive/$(transformers_commit).zip
	unzip $(transformers_commit).zip
	rm $(transformers_commit).zip
	mv transformers-$(transformers_commit) transformers
Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
20
	cd transformers && python setup.py install
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
21

Olivier Dehaene's avatar
v0.1.0  
Olivier Dehaene committed
22
23
install-torch:
	# Install specific version of torch
24
	pip install torch --extra-index-url https://download.pytorch.org/whl/cu118 --no-cache-dir
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
25

OlivierDehaene's avatar
OlivierDehaene committed
26
install: gen-server install-torch install-transformers
Nicolas Patry's avatar
Nicolas Patry committed
27
28
	pip install pip --upgrade
	pip install -e . --no-cache-dir
Olivier Dehaene's avatar
Init  
Olivier Dehaene committed
29

Nicolas Patry's avatar
Nicolas Patry committed
30
run-dev:
31
	SAFETENSORS_FAST_GPU=1 python -m torch.distributed.run --nproc_per_node=2 text_generation_server/cli.py serve bigscience/bloom-560m --sharded