Unverified Commit 4176bc16 authored by giacomo snidero's avatar giacomo snidero Committed by GitHub
Browse files

Add documentation for multi-label classification (#14168)

* "update example docstring multilabel example

* update example docstring multilabel example
parent faacd747
...@@ -865,7 +865,7 @@ PT_QUESTION_ANSWERING_SAMPLE = r""" ...@@ -865,7 +865,7 @@ PT_QUESTION_ANSWERING_SAMPLE = r"""
""" """
PT_SEQUENCE_CLASSIFICATION_SAMPLE = r""" PT_SEQUENCE_CLASSIFICATION_SAMPLE = r"""
Example:: Example of single-label classification::
>>> from transformers import {processor_class}, {model_class} >>> from transformers import {processor_class}, {model_class}
>>> import torch >>> import torch
...@@ -878,8 +878,23 @@ PT_SEQUENCE_CLASSIFICATION_SAMPLE = r""" ...@@ -878,8 +878,23 @@ PT_SEQUENCE_CLASSIFICATION_SAMPLE = r"""
>>> outputs = model(**inputs, labels=labels) >>> outputs = model(**inputs, labels=labels)
>>> loss = outputs.loss >>> loss = outputs.loss
>>> logits = outputs.logits >>> logits = outputs.logits
Example of multi-label classification::
>>> from transformers import {processor_class}, {model_class}
>>> import torch
>>> tokenizer = {processor_class}.from_pretrained('{checkpoint}')
>>> model = {model_class}.from_pretrained('{checkpoint}', problem_type="multi_label_classification")
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> labels = torch.tensor([[1, 1]], dtype=torch.float) # need dtype=float for BCEWithLogitsLoss
>>> outputs = model(**inputs, labels=labels)
>>> loss = outputs.loss
>>> logits = outputs.logits
""" """
PT_MASKED_LM_SAMPLE = r""" PT_MASKED_LM_SAMPLE = r"""
Example:: Example::
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment