"libai/tokenizer/tokenization_t5.py" did not exist on "fd158e88e82c3fa848017c62a7eccb49a5c64f78"
sampler_cpu.h 186 Bytes
Newer Older
quyuanhao123's avatar
quyuanhao123 committed
1
2
#pragma once

limm's avatar
limm committed
3
#include "../extensions.h"
quyuanhao123's avatar
quyuanhao123 committed
4
5
6

torch::Tensor neighbor_sampler_cpu(torch::Tensor start, torch::Tensor rowptr,
                                   int64_t count, double factor);