Unverified Commit 605566ec authored by Miltos's avatar Miltos Committed by GitHub
Browse files

Add optional parameter to `scatter_[log_]softmax` to indicate the number of segments (#243)

* Add parameter to `scatter_[log_]softmax`

* Update softmax.py

* Format code.
parent 26844e11
from typing import Optional
import torch import torch
from torch_scatter import scatter_sum, scatter_max from torch_scatter import scatter_sum, scatter_max
...@@ -5,39 +7,45 @@ from torch_scatter.utils import broadcast ...@@ -5,39 +7,45 @@ from torch_scatter.utils import broadcast
def scatter_softmax(src: torch.Tensor, index: torch.Tensor, def scatter_softmax(src: torch.Tensor, index: torch.Tensor,
dim: int = -1) -> torch.Tensor: dim: int = -1,
dim_size: Optional[int] = None) -> torch.Tensor:
if not torch.is_floating_point(src): if not torch.is_floating_point(src):
raise ValueError('`scatter_softmax` can only be computed over tensors ' raise ValueError('`scatter_softmax` can only be computed over tensors '
'with floating point data types.') 'with floating point data types.')
index = broadcast(index, src, dim) index = broadcast(index, src, dim)
max_value_per_index = scatter_max(src, index, dim=dim)[0] max_value_per_index = scatter_max(
src, index, dim=dim, dim_size=dim_size)[0]
max_per_src_element = max_value_per_index.gather(dim, index) max_per_src_element = max_value_per_index.gather(dim, index)
recentered_scores = src - max_per_src_element recentered_scores = src - max_per_src_element
recentered_scores_exp = recentered_scores.exp_() recentered_scores_exp = recentered_scores.exp_()
sum_per_index = scatter_sum(recentered_scores_exp, index, dim) sum_per_index = scatter_sum(
recentered_scores_exp, index, dim, dim_size=dim_size)
normalizing_constants = sum_per_index.gather(dim, index) normalizing_constants = sum_per_index.gather(dim, index)
return recentered_scores_exp.div(normalizing_constants) return recentered_scores_exp.div(normalizing_constants)
def scatter_log_softmax(src: torch.Tensor, index: torch.Tensor, dim: int = -1, def scatter_log_softmax(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
eps: float = 1e-12) -> torch.Tensor: eps: float = 1e-12,
dim_size: Optional[int] = None) -> torch.Tensor:
if not torch.is_floating_point(src): if not torch.is_floating_point(src):
raise ValueError('`scatter_log_softmax` can only be computed over ' raise ValueError('`scatter_log_softmax` can only be computed over '
'tensors with floating point data types.') 'tensors with floating point data types.')
index = broadcast(index, src, dim) index = broadcast(index, src, dim)
max_value_per_index = scatter_max(src, index, dim=dim)[0] max_value_per_index = scatter_max(
src, index, dim=dim, dim_size=dim_size)[0]
max_per_src_element = max_value_per_index.gather(dim, index) max_per_src_element = max_value_per_index.gather(dim, index)
recentered_scores = src - max_per_src_element recentered_scores = src - max_per_src_element
sum_per_index = scatter_sum(recentered_scores.exp(), index, dim) sum_per_index = scatter_sum(
recentered_scores.exp(), index, dim, dim_size=dim_size)
normalizing_constants = sum_per_index.add_(eps).log_().gather(dim, index) normalizing_constants = sum_per_index.add_(eps).log_().gather(dim, index)
return recentered_scores.sub_(normalizing_constants) return recentered_scores.sub_(normalizing_constants)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment