Skip to content

Commit

Permalink
fix develop build issue (PaddlePaddle#10978)
Browse files Browse the repository at this point in the history
* fix develop build issue

* fix google style

* cpplint check only fluid
  • Loading branch information
typhoonzero authored May 29, 2018
1 parent 8075a11 commit 8f7b020
Show file tree
Hide file tree
Showing 278 changed files with 1,138 additions and 1,132 deletions.
2 changes: 1 addition & 1 deletion .clang-format
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ BasedOnStyle: Google
IndentWidth: 2
TabWidth: 2
ContinuationIndentWidth: 4
AccessModifierOffset: -2 # The private/protected/public has no indent in class
AccessModifierOffset: -1 # The private/protected/public has no indent in class
Standard: Cpp11
AllowAllParametersOfDeclarationOnNextLine: true
BinPackParameters: false
Expand Down
4 changes: 2 additions & 2 deletions paddle/api/GradientMachine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ void UpdateCallback::apply(Parameter* p) {
}

class UpdateCallbackWrapper {
public:
public:
explicit UpdateCallbackWrapper(const UpdateCallback& callback)
: callback(const_cast<UpdateCallback&>(callback)) {}

Expand All @@ -105,7 +105,7 @@ class UpdateCallbackWrapper {
delete p;
}

private:
private:
UpdateCallback& callback;
};

Expand Down
99 changes: 50 additions & 49 deletions paddle/api/PaddleAPI.h
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,10 @@ class RangeError {};

/// Not support Error, such as access GPU memory directly, etc.
class UnsupportError : public std::runtime_error {
public:
UnsupportError() : std::runtime_error(" "){};
UnsupportError(const std::string& message) : std::runtime_error(message){};
public:
UnsupportError() : std::runtime_error(" ") {}
explicit UnsupportError(const std::string& message)
: std::runtime_error(message) {}
};

/// This type will map to python's list of float.
Expand Down Expand Up @@ -105,7 +106,7 @@ class Matrix {
DISABLE_COPY(Matrix);
static Matrix* createByPaddleMatrixPtr(void* sharedPtr);

public:
public:
virtual ~Matrix();

/**
Expand Down Expand Up @@ -231,7 +232,7 @@ class Matrix {

bool isGpu() const;

private:
private:
void* getSharedPtr() const;

MatrixPrivate* m;
Expand All @@ -248,7 +249,7 @@ class Vector {

void* getSharedPtr();

public:
public:
~Vector();

/// Create Vector filled with zero.
Expand Down Expand Up @@ -310,10 +311,10 @@ class Vector {
/// __len__ in python
size_t getSize() const;

private:
private:
VectorPrivate* m;

private:
private:
friend class Parameter;
friend class ParameterOptimizer;
friend struct ParameterTraverseCallbackPrivate;
Expand All @@ -325,7 +326,7 @@ class IVector {
DISABLE_COPY(IVector);
static IVector* createByPaddleVectorPtr(void* ptr);

public:
public:
/// Create IVector filled with zero
static IVector* createZero(size_t sz, bool useGpu = isUsingGpu());

Expand Down Expand Up @@ -389,7 +390,7 @@ class IVector {
/// This method will map to python __len__();
size_t getSize() const;

private:
private:
void* getSharedPtr() const;

friend class Arguments;
Expand All @@ -400,11 +401,11 @@ struct ArgumentsPrivate;

/// The Arguments is actual a std::vector<paddle::Argument> in paddle.
class Arguments {
private:
private:
Arguments(); // Internal Create.
DISABLE_COPY(Arguments);

public:
public:
/**
* Create a arguments with size.
* Note that it can be zero.
Expand Down Expand Up @@ -475,12 +476,12 @@ class Arguments {

float sum() const;

private:
private:
static Arguments* createByPaddleArgumentVector(void* ptr);
static Arguments* createByPaddleArgument(const void* ptr);
void* getInternalArgumentsPtr() const;

private:
private:
ArgumentsPrivate* m;
friend class Trainer;
friend class GradientMachine;
Expand All @@ -507,18 +508,18 @@ class ParameterConfig {
static ParameterConfig* createParameterConfigFromParameterPtr(void* ptr);
void* getRawPtr();

public:
public:
~ParameterConfig();

/**
* return proto buf string.
*/
std::string toProtoString() const;

private:
private:
ParameterConfigPrivate* m;

private:
private:
friend class Parameter;
friend class ParameterOptimizer;
friend struct ParameterTraverseCallbackPrivate;
Expand All @@ -529,7 +530,7 @@ class OptimizationConfig {
DISABLE_COPY(OptimizationConfig);
OptimizationConfig();

public:
public:
static OptimizationConfig* createFromProtoString(const std::string& str);
~OptimizationConfig();

Expand All @@ -538,7 +539,7 @@ class OptimizationConfig {
*/
std::string toProtoString();

private:
private:
OptimizationConfigPrivate* m;

friend class TrainerConfig;
Expand All @@ -549,11 +550,11 @@ class OptimizationConfig {

struct ParameterPrivate;
class Parameter {
private:
private:
Parameter();
DISABLE_COPY(Parameter);

public:
public:
virtual ~Parameter();

/**
Expand All @@ -580,11 +581,11 @@ class Parameter {

size_t getSize() const;

private:
private:
static Parameter* createFromRawPtr(void* ptr);
static Parameter* createFromSharedPtr(void* ptr);

private:
private:
ParameterPrivate* m;
friend class UpdateCallbackWrapper;
friend class GradientMachine;
Expand All @@ -598,14 +599,14 @@ struct ModelConfigPrivate;
* It is used by GradientMachine.
*/
class ModelConfig {
private:
private:
ModelConfig();
DISABLE_COPY(ModelConfig);

public:
public:
virtual ~ModelConfig();

private:
private:
ModelConfigPrivate* m;
friend class TrainerConfig;
friend struct TrainerConfigPrivate;
Expand All @@ -619,11 +620,11 @@ struct TrainerConfigPrivate;
* It is used by GradientMachine.
*/
class TrainerConfig {
private:
private:
TrainerConfig();
DISABLE_COPY(TrainerConfig);

public:
public:
virtual ~TrainerConfig();

static TrainerConfig* createFromTrainerConfigFile(
Expand All @@ -634,7 +635,7 @@ class TrainerConfig {

OptimizationConfig* getOptimizationConfig() const;

private:
private:
TrainerConfigPrivate* m;
friend class Trainer;
};
Expand All @@ -654,7 +655,7 @@ class TrainerConfig {
* @endcode
*/
class UpdateCallback {
public:
public:
virtual ~UpdateCallback();
virtual void apply(Parameter* p);
};
Expand All @@ -664,14 +665,14 @@ class ParameterTraverseCallback {
DISABLE_COPY(ParameterTraverseCallback);
ParameterTraverseCallback();

public:
public:
~ParameterTraverseCallback();

void apply(const std::vector<Vector*>& vecs,
const ParameterConfig& config,
size_t sparseId);

private:
private:
ParameterTraverseCallbackPrivate* m;
friend class ParameterOptimizer;
};
Expand All @@ -686,7 +687,7 @@ class ParameterOptimizer {
DISABLE_COPY(ParameterOptimizer);
ParameterOptimizer();

public:
public:
static ParameterOptimizer* create(OptimizationConfig* config);

~ParameterOptimizer();
Expand All @@ -710,19 +711,19 @@ class ParameterOptimizer {
ParameterTraverseCallback* needSpecialTraversal(
const ParameterConfig& config) const;

private:
private:
ParameterOptimizerPrivate* m;
};

class SequenceGenerator;
class Evaluator;
struct GradientMachinePrivate;
class GradientMachine {
private:
private:
GradientMachine();
DISABLE_COPY(GradientMachine);

public:
public:
virtual ~GradientMachine();

/**
Expand Down Expand Up @@ -817,7 +818,7 @@ class GradientMachine {

void eval(Evaluator* evaluator);

private:
private:
GradientMachinePrivate* m;

static GradientMachine* createFromPaddleModelPtr(
Expand All @@ -833,10 +834,10 @@ class GradientMachine {

struct ParameterUpdaterPrivate;
class ParameterUpdater {
private:
private:
ParameterUpdater();

public:
public:
static ParameterUpdater* createLocalUpdater(OptimizationConfig* config);
static ParameterUpdater* createRemoteUpdater(OptimizationConfig* config,
int passCount,
Expand Down Expand Up @@ -911,17 +912,17 @@ class ParameterUpdater {
*/
void catchUpWith();

private:
private:
ParameterUpdaterPrivate* m;
};

struct EvaluatorPrivate;
class Evaluator {
private:
private:
Evaluator();
DISABLE_COPY(Evaluator);

public:
public:
~Evaluator();

/**
Expand All @@ -945,21 +946,21 @@ class Evaluator {

double getValue(const std::string name) const;

private:
private:
EvaluatorPrivate* m;

friend class GradientMachine;
};

struct TrainerPrivate;
class Trainer {
private:
private:
TrainerPrivate* m;
Trainer();
Trainer(TrainerConfig* optConfig, GradientMachine* gm);
DISABLE_COPY(Trainer);

public:
public:
virtual ~Trainer();

/// Create A Trainer By TrainerConfig. using paddle command line.
Expand Down Expand Up @@ -1002,7 +1003,7 @@ class Trainer {

/// the N-Best results generated from one input sequence.
class ISequenceResults {
public:
public:
virtual ~ISequenceResults();

/// Number of result.
Expand All @@ -1026,7 +1027,7 @@ class SequenceGenerator {
DISABLE_COPY(SequenceGenerator);
SequenceGenerator();

public:
public:
virtual ~SequenceGenerator();

/**
Expand All @@ -1044,10 +1045,10 @@ class SequenceGenerator {
void setMaxLength(size_t maxlength);
void setBeamSize(size_t beamSize);

private:
private:
static SequenceGenerator* createByGradientMachineSharedPtr(void* ptr);
friend class GradientMachine;

private:
private:
SequenceGeneratorPrivate* m;
};
Loading

0 comments on commit 8f7b020

Please sign in to comment.