mirror of
https://github.com/huggingface/transformers.git
synced 2025-10-20 17:13:56 +08:00
Move the warning to the documentation for DataCollatorWithFlattening (#36707)
Remove init warning
This commit is contained in:
committed by
GitHub
parent
9be4728af8
commit
b815fae359
@ -1793,16 +1793,19 @@ class DataCollatorWithFlattening(DefaultDataCollator):
|
||||
- concatate the entire mini batch into single long sequence [1, total_tokens]
|
||||
- uses `separator_id` to separate sequences within the concatenated `labels`, default value is -100
|
||||
- no padding will be added, returns `input_ids`, `labels` and `position_ids`
|
||||
|
||||
<Tip warning={true}>
|
||||
|
||||
Using `DataCollatorWithFlattening` will flatten the entire mini batch into single long sequence.
|
||||
Make sure your attention computation is able to handle it!
|
||||
|
||||
</Tip>
|
||||
"""
|
||||
|
||||
def __init__(self, *args, return_position_ids=True, separator_id=-100, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.return_position_ids = return_position_ids
|
||||
self.separator_id = separator_id
|
||||
warnings.warn(
|
||||
"Using `DataCollatorWithFlattening` will flatten the entire mini batch into single long sequence."
|
||||
"Make sure your attention computation is able to handle it!"
|
||||
)
|
||||
|
||||
def __call__(self, features, return_tensors=None, separator_id=None):
|
||||
if return_tensors is None:
|
||||
|
Reference in New Issue
Block a user