@@ -12,14 +12,14 @@ import 'package:ml_dataframe/ml_dataframe.dart';
1212import 'package:ml_linalg/dtype.dart' ;
1313
1414/// A class that performs decision tree-based classification
15- ///
16- /// Decision tree is an algorithm that recursively splits the input data into
17- /// subsets until the subsets conforming certain stop criteria are met.
18- ///
19- /// Process of forming such a recursive subsets structure is called
20- /// decision tree learning. Once a decision tree learned, it may be used to
21- /// classify new samples with the same features that were used to learn the
22- /// tree.
15+ //
16+ // The decision tree is an algorithm that recursively splits the input data
17+ // into subsets until certain stop criteria are met - e.g. a tree node contains
18+ // a certain number of observations, or the tree depth is equal to a given value
19+ //
20+ // The process of forming such a data structure is called decision tree
21+ // learning. Once a decision tree is built, it may be used to classify new
22+ // samples with the same features that were used to learn the tree.
2323abstract class DecisionTreeClassifier
2424 implements
2525 Assessable ,
@@ -28,27 +28,25 @@ abstract class DecisionTreeClassifier
2828 Classifier {
2929 /// Parameters:
3030 ///
31- /// [trainData] A [DataFrame] with observations that will be used by the
32- /// classifier to learn a decision tree. Must contain [targetName] column.
31+ /// [trainData] A [DataFrame] with observations that will be used to build a
32+ /// decision tree. Must contain [targetName] column.
3333 ///
34- /// [targetName] A name of a column in [trainData] that contains class
35- /// labels
34+ /// [targetName] A name of a column in [trainData] that contains class labels
3635 ///
37- /// [minError] A value within the range 0..1 (both inclusive). The value
38- /// denotes a minimal error on a single decision tree node and is used as a
39- /// stop criteria to avoid farther decision's tree node splitting: if the
40- /// node is good enough, there is no need to split it and thus it will become
41- /// a leaf.
36+ /// [minError] A value within the range 0..1 (both inclusive). The value is a
37+ /// minimal error on a single decision tree node and is used as a stop
38+ /// criterion to avoid further decision tree node splitting: if the node is
39+ /// good enough, there is no need to split it and thus it will become a leaf.
4240 ///
4341 /// [minSamplesCount] A minimal number of samples (observations) on the
4442 /// decision's tree node. The value is used as a stop criteria to avoid
45- /// farther decision's tree node splitting: if the node contains less than or
43+ /// further decision tree node splitting: if the node contains less than or
4644 /// equal to [minSamplesCount] observations, the node turns into the leaf.
4745 ///
4846 /// [maxDepth] A maximum number of decision tree levels.
4947 ///
50- /// [assessorType] Defines an assessment type that will be applied to a subset
51- /// of data in order to decide how to split the subset while building the tree.
48+ /// [assessorType] Defines an assessment type that will be applied to the
49+ /// data in order to decide how to split the subset while building the tree.
5250 /// Default value is [TreeAssessorType.gini]
5351 ///
5452 /// Possible values of [assessorType] :
@@ -109,16 +107,15 @@ abstract class DecisionTreeClassifier
109107 /// final json = await file.readAsString();
110108 /// final restoredClassifier = DecisionTreeClassifier.fromJson(json);
111109 ///
112- /// // here you can use previously fitted restored classifier to make
113- /// // some prediction, e.g. via `DecisionTreeClassifier.predict(...)`;
110+ /// // here you can do whatever you want with the restored classifier
114111 /// ````
115112 factory DecisionTreeClassifier .fromJson (String json) =>
116113 initDecisionTreeModule ()
117114 .get <DecisionTreeClassifierFactory >()
118115 .fromJson (json);
119116
120117 /// A minimal error on a single decision tree node. It is used as a
121- /// stop criteria to avoid farther decision's tree node splitting: if the
118+ /// stop criteria to avoid further decision tree node splitting: if the
122119 /// node is good enough, there is no need to split it and thus it can be
123120 /// considered a leaf.
124121 ///
@@ -129,7 +126,7 @@ abstract class DecisionTreeClassifier
129126
130127 /// A minimal number of samples (observations) on the
131128 /// decision's tree node. The value is used as a stop criteria to avoid
132- /// farther decision's tree node splitting: if the node contains less than or
129+ /// further decision tree node splitting: if the node contains less than or
133130 /// equal to [minSamplesCount] observations, the node is considered a leaf.
134131 ///
135132 /// The value is read-only, it's a hyperparameter of the model
@@ -140,11 +137,11 @@ abstract class DecisionTreeClassifier
140137 /// The value is read-only, it's a hyperparameter of the model
141138 int get maxDepth;
142139
143- /// An assessment type that was applied to a subset of data in order to
144- /// decide how to split the subset while building the tree
140+ /// An assessment type that was applied to the initial data in order to
141+ /// decide how to split it while building the tree
145142 TreeAssessorType get assessorType;
146143
147- /// Saves tree as SVG-image. Example:
144+ /// Saves the tree as an SVG-image. Example:
148145 ///
149146 /// ```dart
150147 /// final samples = (await fromCsv('path/to/dataset.csv'));
0 commit comments