You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
// At this point, tokens is batched: [batch_size, tokens]
2791
-
// However, array may be jagged. So, we pad to max_length
2792
-
2793
-
if(truncation&&max_length===null){
2790
+
// At this point, `encodedTokens` is batched, of shape [batch_size, tokens].
2791
+
// However, array may be jagged. So, we may need pad to max_length.
2792
+
if(max_length===null){
2794
2793
max_length=this.model_max_length;
2795
2794
}elseif(max_length&&truncation===null){
2796
-
console.warn(`Truncation was not explicitly activated but \`max_length\` is provided a specific value, please use \`truncation=true\` to explicitly truncate examples to max length.`)
2795
+
if(padding===true){
2796
+
console.warn(
2797
+
"`max_length` is ignored when `padding: true` and there is no truncation strategy. "+
2798
+
"To pad to max length, use `padding: 'max_length'`."
2799
+
)
2800
+
max_length=this.model_max_length;
2801
+
}elseif(padding===false){
2802
+
console.warn("Truncation was not explicitly activated but `max_length` is provided a specific value, please use `truncation: true` to explicitly truncate examples to max length.");
2803
+
truncation=true;
2804
+
}
2797
2805
}
2798
2806
2799
2807
// padding: 'max_length' doesn't require any additional calculation
0 commit comments