Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,21 @@ LOCK
LOG*
CURRENT
MANIFEST-*
windows/bin/
*.opensdf
*.sdf
*.suo
*.dll
*.lib
windows/extern/*/include/
windows/extern/*/lib/
windows/extern/*/bin/
windows/tmp/
*.pdb
*.pyd
windows/python/caffe/__init__.py
windows/python/caffe/classifier.py
windows/python/caffe/detector.py
windows/python/caffe/draw.py
windows/python/caffe/io.py
windows/python/caffe/pycaffe.py
45 changes: 7 additions & 38 deletions include/caffe/layer_factory.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,59 +56,28 @@ class LayerRegistry {
typedef shared_ptr<Layer<Dtype> > (*Creator)(const LayerParameter&);
typedef std::map<string, Creator> CreatorRegistry;

static CreatorRegistry& Registry() {
static CreatorRegistry* g_registry_ = new CreatorRegistry();
return *g_registry_;
}
static CreatorRegistry& Registry();

// Adds a creator.
static void AddCreator(const string& type, Creator creator) {
CreatorRegistry& registry = Registry();
CHECK_EQ(registry.count(type), 0)
<< "Layer type " << type << " already registered.";
registry[type] = creator;
}
static void AddCreator(const string& type, Creator creator);

// Get a layer using a LayerParameter.
static shared_ptr<Layer<Dtype> > CreateLayer(const LayerParameter& param) {
if (Caffe::root_solver()) {
LOG(INFO) << "Creating layer " << param.name();
}
const string& type = param.type();
CreatorRegistry& registry = Registry();
CHECK_EQ(registry.count(type), 1) << "Unknown layer type: " << type
<< " (known types: " << LayerTypeList() << ")";
return registry[type](param);
}
static shared_ptr<Layer<Dtype> > CreateLayer(const LayerParameter& param);

private:
// Layer registry should never be instantiated - everything is done with its
// static variables.
LayerRegistry() {}

static string LayerTypeList() {
CreatorRegistry& registry = Registry();
string layer_types;
for (typename CreatorRegistry::iterator iter = registry.begin();
iter != registry.end(); ++iter) {
if (iter != registry.begin()) {
layer_types += ", ";
}
layer_types += iter->first;
}
return layer_types;
}
LayerRegistry();

static string LayerTypeList();
};


template <typename Dtype>
class LayerRegisterer {
public:
LayerRegisterer(const string& type,
shared_ptr<Layer<Dtype> > (*creator)(const LayerParameter&)) {
// LOG(INFO) << "Registering layer type: " << type;
LayerRegistry<Dtype>::AddCreator(type, creator);
}
shared_ptr<Layer<Dtype> > (*creator)(const LayerParameter&));
};


Expand Down
52 changes: 52 additions & 0 deletions include/caffe/layer_forcereg.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
//////////////////////////////////////////////////////////////////////////
// This file must be included in every windows program / dll that is going
// to link libcaffe.lib. Without including this file the unused symbols
// will not be used and the layers are not registered.
//////////////////////////////////////////////////////////////////////////

#ifdef _WIN32

#pragma once

#include "caffe/vision_layers.hpp"

namespace caffe
{
REGISTER_LAYER_CLASS(AbsVal);
REGISTER_LAYER_CLASS(Accuracy);
REGISTER_LAYER_CLASS(ArgMax);
REGISTER_LAYER_CLASS(BNLL);
REGISTER_LAYER_CLASS(Concat);
REGISTER_LAYER_CLASS(ContrastiveLoss);
REGISTER_LAYER_CLASS(Data);
REGISTER_LAYER_CLASS(Deconvolution);
REGISTER_LAYER_CLASS(Dropout);
REGISTER_LAYER_CLASS(DummyData);
REGISTER_LAYER_CLASS(Eltwise);
REGISTER_LAYER_CLASS(EuclideanLoss);
REGISTER_LAYER_CLASS(Exp);
REGISTER_LAYER_CLASS(Flatten);
REGISTER_LAYER_CLASS(HDF5Data);
REGISTER_LAYER_CLASS(HDF5Output);
REGISTER_LAYER_CLASS(HingeLoss);
REGISTER_LAYER_CLASS(Im2col);
REGISTER_LAYER_CLASS(ImageData);
REGISTER_LAYER_CLASS(InfogainLoss);
REGISTER_LAYER_CLASS(InnerProduct);
REGISTER_LAYER_CLASS(MemoryData);
REGISTER_LAYER_CLASS(MultinomialLogisticLoss);
REGISTER_LAYER_CLASS(MVN);
REGISTER_LAYER_CLASS(Power);
REGISTER_LAYER_CLASS(PReLU);
REGISTER_LAYER_CLASS(Reshape);
REGISTER_LAYER_CLASS(SigmoidCrossEntropyLoss);
REGISTER_LAYER_CLASS(Silence);
REGISTER_LAYER_CLASS(Slice);
REGISTER_LAYER_CLASS(SoftmaxWithLoss);
REGISTER_LAYER_CLASS(Split);
REGISTER_LAYER_CLASS(SPP);
REGISTER_LAYER_CLASS(Threshold);
REGISTER_LAYER_CLASS(WindowData);
}

#endif
22 changes: 22 additions & 0 deletions include/caffe/mkstemp.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#ifdef _WIN32

/* mkstemp extracted from libc/sysdeps/posix/tempname.c. Copyright
(C) 1991-1999, 2000, 2001, 2006 Free Software Foundation, Inc.

The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version. */

#pragma once

static const char letters[] =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";

/* Generate a temporary file name based on TMPL. TMPL must match the
rules for mk[s]temp (i.e. end in "XXXXXX"). The name constructed
does not exist at the time of the call to mkstemp. TMPL is
overwritten with the result. */
int mkstemp(char *tmpl);

#endif
10 changes: 9 additions & 1 deletion include/caffe/util/io.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
#include "caffe/common.hpp"
#include "caffe/proto/caffe.pb.h"

#ifdef _WIN32
#include "caffe/mkstemp.h"
#endif

#define HDF5_NUM_DIMS 4

namespace caffe {
Expand All @@ -37,7 +41,11 @@ inline void MakeTempDir(string* temp_dirname) {
char* temp_dirname_cstr = new char[temp_dirname->size() + 1];
// NOLINT_NEXT_LINE(runtime/printf)
strcpy(temp_dirname_cstr, temp_dirname->c_str());
char* mkdtemp_result = mkdtemp(temp_dirname_cstr);
#ifndef _WIN32
char* mkdtemp_result = mkdtemp(temp_dirname_cstr);
#else
errno_t mkdtemp_result = _mktemp_s(temp_dirname_cstr, sizeof(temp_dirname_cstr));
#endif
CHECK(mkdtemp_result != NULL)
<< "Failed to create a temporary directory at: " << *temp_dirname;
*temp_dirname = temp_dirname_cstr;
Expand Down
1 change: 1 addition & 0 deletions python/caffe/_caffe.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

#include "caffe/caffe.hpp"
#include "caffe/python_layer.hpp"
#include "caffe/layer_forcereg.hpp"

// Temporary solution for numpy < 1.7 versions: old macro, no promises.
// You're strongly advised to upgrade to >= 1.7.
Expand Down
6 changes: 6 additions & 0 deletions src/caffe/common.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
#include <cstdio>
#include <ctime>

#ifdef _WIN32
#include <process.h>
#endif

#include "caffe/common.hpp"
#include "caffe/util/rng.hpp"

Expand Down Expand Up @@ -45,7 +49,9 @@ void GlobalInit(int* pargc, char*** pargv) {
// Google logging.
::google::InitGoogleLogging(*(pargv)[0]);
// Provide a backtrace on segfault.
#ifndef _WIN32
::google::InstallFailureSignalHandler();
#endif
}

#ifdef CPU_ONLY // CPU-only Caffe.
Expand Down
87 changes: 87 additions & 0 deletions src/caffe/layer_factory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,90 @@

namespace caffe {


//////////////////////////////////////////////////////////////////////////
// Implementation of layer registration for windows must be in
// a specific object-unit. Otherwise the layer registration and
// creation fail.
//////////////////////////////////////////////////////////////////////////

template class LayerRegistry <float>;
template class LayerRegistry <double>;

template <typename Dtype>
LayerRegistry<Dtype>::LayerRegistry()
{

}


template <typename Dtype>
typename LayerRegistry<Dtype>::CreatorRegistry& LayerRegistry<Dtype>::Registry()
{
static CreatorRegistry* g_registry_ = new CreatorRegistry();
return *g_registry_;
}


template <typename Dtype>
void LayerRegistry<Dtype>::AddCreator(const string& type, Creator creator)
{
CreatorRegistry& registry = Registry();
#ifndef _WIN32
CHECK_EQ(registry.count(type), 0)
<< "Layer type " << type << " already registered.";
#else
// On windows this will happen, as the layers need to be explicitly
// registered. Once that explicit registration is done, the implicit
// layer registration is performed as well which in turn results
// to two registration calls for the same type.
if (registry.count(type) > 0)
return;
#endif
registry[type] = creator;
}


template <typename Dtype>
shared_ptr<Layer<Dtype> > LayerRegistry<Dtype>::CreateLayer(const LayerParameter& param)
{
if (Caffe::root_solver()) {
LOG(INFO) << "Creating layer " << param.name();
}
const string& type = param.type();
CreatorRegistry& registry = Registry();
CHECK_EQ(registry.count(type), 1) << "Unknown layer type: " << type
<< " (known types: " << LayerTypeList() << ")";
return registry[type](param);
}


template <typename Dtype>
string LayerRegistry<Dtype>::LayerTypeList()
{
CreatorRegistry& registry = Registry();
string layer_types;
for (typename CreatorRegistry::iterator iter = registry.begin();
iter != registry.end(); ++iter) {
if (iter != registry.begin()) {
layer_types += ", ";
}
layer_types += iter->first;
}
return layer_types;
}


template <typename Dtype>
LayerRegisterer<Dtype>::LayerRegisterer(const string& type, shared_ptr<Layer<Dtype> >(*creator)(const LayerParameter&))
{
// LOG(INFO) << "Registering layer type: " << type;
LayerRegistry<Dtype>::AddCreator(type, creator);
}

//////////////////////////////////////////////////////////////////////////


// Get convolution layer according to engine.
template <typename Dtype>
shared_ptr<Layer<Dtype> > GetConvolutionLayer(
Expand Down Expand Up @@ -209,4 +293,7 @@ REGISTER_LAYER_CREATOR(Python, GetPythonLayer);

// Layers that use their constructor as their default creator should be
// registered in their corresponding cpp files. Do not register them here.



} // namespace caffe
4 changes: 4 additions & 0 deletions src/caffe/layers/bnll_layer.cu
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,11 @@

namespace caffe {

#ifndef _WIN32
const float kBNLL_THRESHOLD = 50.;
#else
#define kBNLL_THRESHOLD 50.
#endif

template <typename Dtype>
__global__ void BNLLForward(const int n, const Dtype* in, Dtype* out) {
Expand Down
2 changes: 1 addition & 1 deletion src/caffe/layers/contrastive_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ void ContrastiveLossLayer<Dtype>::Forward_cpu(
if (legacy_version) {
loss += std::max(margin - dist_sq_.cpu_data()[i], Dtype(0.0));
} else {
Dtype dist = std::max(margin - sqrt(dist_sq_.cpu_data()[i]), 0.0);
Dtype dist = std::max(margin - sqrt(dist_sq_.cpu_data()[i]), Dtype(0.0));
loss += dist*dist;
}
}
Expand Down
Loading