Skip to content
This repository was archived by the owner on Aug 31, 2021. It is now read-only.

Commit 35e12d7

Browse files
committed
- Unnecessary random call fixed issue #92
1 parent 357eeee commit 35e12d7

File tree

11 files changed

+0
-34
lines changed

11 files changed

+0
-34
lines changed

examples/boston.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,11 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
16-
1715
from sklearn import datasets, cross_validation, metrics
1816
from sklearn import preprocessing
1917

2018
import skflow
2119

22-
random.seed(42)
23-
2420
# Load dataset
2521
boston = datasets.load_boston()
2622
X, y = boston.data, boston.target

examples/digits.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
1615
from sklearn import datasets, cross_validation, metrics
1716
import tensorflow as tf
1817

examples/iris.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
16-
1715
from sklearn import datasets, metrics, cross_validation
1816

1917
import skflow
2018

21-
random.seed(42)
22-
2319
# Load dataset.
2420
iris = datasets.load_iris()
2521
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,

examples/iris_custom_decay_dnn.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,12 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
16-
1715
from sklearn import datasets, metrics
1816
from sklearn.cross_validation import train_test_split
1917

2018
import skflow
2119
import tensorflow as tf
2220

23-
24-
random.seed(42)
25-
2621
iris = datasets.load_iris()
2722
X_train, X_test, y_train, y_test = train_test_split(iris.data,
2823
iris.target,

examples/iris_custom_model.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,13 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
16-
1715
import skflow
1816
from sklearn import datasets, metrics, cross_validation
1917

2018
iris = datasets.load_iris()
2119
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,
2220
test_size=0.2, random_state=42)
2321

24-
random.seed(42)
25-
2622
def my_model(X, y):
2723
"""This is DNN with 10, 20, 10 hidden layers, and dropout of 0.9 probability."""
2824
layers = skflow.ops.dnn(X, [10, 20, 10], keep_prob=0.9)

examples/iris_early_stopping.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,16 +15,12 @@
1515
import tensorflow as tf
1616
from tensorflow.python.platform import googletest
1717

18-
import random
19-
2018
from sklearn import datasets, metrics
2119
from sklearn.cross_validation import train_test_split
2220

2321
import skflow
2422

2523

26-
random.seed(42)
27-
2824
iris = datasets.load_iris()
2925
X_train, X_test, y_train, y_test = train_test_split(iris.data,
3026
iris.target,

examples/iris_save_restore.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
1615
import shutil
1716

1817
import skflow
@@ -22,8 +21,6 @@
2221
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,
2322
test_size=0.2, random_state=42)
2423

25-
random.seed(42)
26-
2724
classifier = skflow.TensorFlowLinearClassifier(n_classes=3)
2825
classifier.fit(X_train, y_train)
2926
score = metrics.accuracy_score(y_test, classifier.predict(X_test))

examples/mnist.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
https://www.tensorflow.org/versions/master/tutorials/mnist/pros/index.html#deep-mnist-for-experts
1919
"""
2020

21-
import random
2221
from sklearn import metrics
2322

2423
import tensorflow as tf

examples/multiple_gpu.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,6 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
16-
1715
import skflow
1816
import tensorflow as tf
1917
from sklearn import datasets, metrics, cross_validation
@@ -22,8 +20,6 @@
2220
X_train, X_test, y_train, y_test = cross_validation.train_test_split(iris.data, iris.target,
2321
test_size=0.2, random_state=42)
2422

25-
random.seed(42)
26-
2723
def my_model(X, y):
2824
"""
2925
This is DNN with 10, 20, 10 hidden layers, and dropout of 0.5 probability.

examples/out_of_core_data_classification.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,6 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import random
16-
1715
from sklearn import datasets, metrics, cross_validation
1816

1917
import skflow
@@ -23,7 +21,6 @@
2321
# Sometimes when your dataset is too large to hold in the memory
2422
# you may want to load it into a out-of-core dataframe as provided by dask library
2523
# to firstly draw sample batches and then load into memory for training.
26-
random.seed(42)
2724

2825
# Load dataset.
2926
iris = datasets.load_iris()

0 commit comments

Comments
 (0)