Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,13 @@ os:
env:
- JOB=DOCS
- JOB=BUILD_AND_TEST
- JOB=PRE_COMMIT
matrix:
exclude:
- os: osx
env: JOB=DOCS # Only generate documentation in linux
env: JOB=DOCS # Only generate documentation in linux.
- os: osx
env: JOB=PRE_COMMIT # Only check pre-commit hook in linux

addons:
apt:
Expand Down Expand Up @@ -39,6 +42,7 @@ addons:
- lcov
- graphviz
- swig
- clang-format-3.8
before_install:
- |
if [ ${JOB} == "BUILD_AND_TEST" ]; then
Expand All @@ -53,7 +57,8 @@ before_install:
fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo paddle/scripts/travis/before_install.linux.sh; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then paddle/scripts/travis/before_install.osx.sh; fi
- pip install wheel protobuf sphinx recommonmark virtualenv numpy sphinx_rtd_theme
- if [[ "$JOB" == "PRE_COMMIT" ]]; then sudo ln -s /usr/bin/clang-format-3.8 /usr/bin/clang-format; fi
- pip install wheel protobuf sphinx recommonmark virtualenv numpy sphinx_rtd_theme pre-commit
script:
- paddle/scripts/travis/main.sh
notifications:
Expand Down
20 changes: 9 additions & 11 deletions WORKSPACE
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
# External dependency to Google protobuf.
http_archive(
name = "protobuf",
url = "http://github.com/google/protobuf/archive/v3.1.0.tar.gz",
sha256 = "0a0ae63cbffc274efb573bdde9a253e3f32e458c41261df51c5dbc5ad541e8f7",
strip_prefix = "protobuf-3.1.0",
)
name="protobuf",
url="http://github.com/google/protobuf/archive/v3.1.0.tar.gz",
sha256="0a0ae63cbffc274efb573bdde9a253e3f32e458c41261df51c5dbc5ad541e8f7",
strip_prefix="protobuf-3.1.0", )

# External dependency to gtest 1.7.0. This method comes from
# https://www.bazel.io/versions/master/docs/tutorial/cpp.html.
new_http_archive(
name = "gtest",
url = "https://github.com/google/googletest/archive/release-1.7.0.zip",
sha256 = "b58cb7547a28b2c718d1e38aee18a3659c9e3ff52440297e965f5edffe34b6d0",
build_file = "third_party/gtest.BUILD",
strip_prefix = "googletest-release-1.7.0",
)
name="gtest",
url="https://github.com/google/googletest/archive/release-1.7.0.zip",
sha256="b58cb7547a28b2c718d1e38aee18a3659c9e3ff52440297e965f5edffe34b6d0",
build_file="third_party/gtest.BUILD",
strip_prefix="googletest-release-1.7.0", )
1 change: 0 additions & 1 deletion benchmark/tensorflow/rnn/run_multi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,3 @@ test 4 2 256 512
test 4 2 512 128
test 4 2 512 256
test 4 2 512 512

2 changes: 1 addition & 1 deletion demo/gan/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ Then you can run the command below. The flag -d specifies the training data (cif
$python gan_trainer.py -d cifar --use_gpu 1

The generated images will be stored in ./cifar_samples/
The corresponding models will be stored in ./cifar_params/
The corresponding models will be stored in ./cifar_params/
1 change: 0 additions & 1 deletion demo/gan/data/download_cifar.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,3 @@ set -e
wget https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
tar zxf cifar-10-python.tar.gz
rm cifar-10-python.tar.gz

2 changes: 0 additions & 2 deletions demo/gan/data/get_mnist_data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,3 @@ do
gunzip ${fname}.gz
fi
done


147 changes: 82 additions & 65 deletions demo/gan/gan_conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@
from paddle.trainer_config_helpers import *

mode = get_config_arg("mode", str, "generator")
assert mode in set(["generator",
"discriminator",
"generator_training",
"discriminator_training"])
assert mode in set([
"generator", "discriminator", "generator_training", "discriminator_training"
])

is_generator_training = mode == "generator_training"
is_discriminator_training = mode == "discriminator_training"
Expand All @@ -38,8 +37,8 @@
settings(
batch_size=128,
learning_rate=1e-4,
learning_method=AdamOptimizer(beta1=0.5)
)
learning_method=AdamOptimizer(beta1=0.5))


def discriminator(sample):
"""
Expand All @@ -50,70 +49,87 @@ def discriminator(sample):
of the sample is from real data.
"""
param_attr = ParamAttr(is_static=is_generator_training)
bias_attr = ParamAttr(is_static=is_generator_training,
initial_mean=1.0,
initial_std=0)

hidden = fc_layer(input=sample, name="dis_hidden", size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=ReluActivation())

hidden2 = fc_layer(input=hidden, name="dis_hidden2", size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())

hidden_bn = batch_norm_layer(hidden2,
act=ReluActivation(),
name="dis_hidden_bn",
bias_attr=bias_attr,
param_attr=ParamAttr(is_static=is_generator_training,
initial_mean=1.0,
initial_std=0.02),
use_global_stats=False)

return fc_layer(input=hidden_bn, name="dis_prob", size=2,
bias_attr=bias_attr,
param_attr=param_attr,
act=SoftmaxActivation())
bias_attr = ParamAttr(
is_static=is_generator_training, initial_mean=1.0, initial_std=0)

hidden = fc_layer(
input=sample,
name="dis_hidden",
size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=ReluActivation())

hidden2 = fc_layer(
input=hidden,
name="dis_hidden2",
size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())

hidden_bn = batch_norm_layer(
hidden2,
act=ReluActivation(),
name="dis_hidden_bn",
bias_attr=bias_attr,
param_attr=ParamAttr(
is_static=is_generator_training, initial_mean=1.0,
initial_std=0.02),
use_global_stats=False)

return fc_layer(
input=hidden_bn,
name="dis_prob",
size=2,
bias_attr=bias_attr,
param_attr=param_attr,
act=SoftmaxActivation())


def generator(noise):
"""
generator generates a sample given noise
"""
param_attr = ParamAttr(is_static=is_discriminator_training)
bias_attr = ParamAttr(is_static=is_discriminator_training,
initial_mean=1.0,
initial_std=0)

hidden = fc_layer(input=noise,
name="gen_layer_hidden",
size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=ReluActivation())

hidden2 = fc_layer(input=hidden, name="gen_hidden2", size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())

hidden_bn = batch_norm_layer(hidden2,
act=ReluActivation(),
name="gen_layer_hidden_bn",
bias_attr=bias_attr,
param_attr=ParamAttr(is_static=is_discriminator_training,
initial_mean=1.0,
initial_std=0.02),
use_global_stats=False)

return fc_layer(input=hidden_bn,
name="gen_layer1",
size=sample_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())
bias_attr = ParamAttr(
is_static=is_discriminator_training, initial_mean=1.0, initial_std=0)

hidden = fc_layer(
input=noise,
name="gen_layer_hidden",
size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=ReluActivation())

hidden2 = fc_layer(
input=hidden,
name="gen_hidden2",
size=hidden_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())

hidden_bn = batch_norm_layer(
hidden2,
act=ReluActivation(),
name="gen_layer_hidden_bn",
bias_attr=bias_attr,
param_attr=ParamAttr(
is_static=is_discriminator_training,
initial_mean=1.0,
initial_std=0.02),
use_global_stats=False)

return fc_layer(
input=hidden_bn,
name="gen_layer1",
size=sample_dim,
bias_attr=bias_attr,
param_attr=param_attr,
act=LinearActivation())


if is_generator_training:
noise = data_layer(name="noise", size=noise_dim)
Expand All @@ -126,7 +142,8 @@ def generator(noise):
label = data_layer(name="label", size=1)
prob = discriminator(sample)
cost = cross_entropy(input=prob, label=label)
classification_error_evaluator(input=prob, label=label, name=mode+'_error')
classification_error_evaluator(
input=prob, label=label, name=mode + '_error')
outputs(cost)

if is_generator:
Expand Down
Loading