diff --git a/include/caffe/python_layer.hpp b/include/caffe/python_layer.hpp index 9c30250c1b5..2957e7426be 100644 --- a/include/caffe/python_layer.hpp +++ b/include/caffe/python_layer.hpp @@ -18,6 +18,8 @@ class PythonLayer : public Layer { virtual void LayerSetUp(const vector*>& bottom, const vector*>& top) { + self_.attr("param_str") = bp::str( + this->layer_param_.python_param().param_str()); self_.attr("setup")(bottom, top); } virtual void Reshape(const vector*>& bottom, diff --git a/python/caffe/test/test_python_layer_with_param_str.py b/python/caffe/test/test_python_layer_with_param_str.py new file mode 100644 index 00000000000..3d0f107b3bb --- /dev/null +++ b/python/caffe/test/test_python_layer_with_param_str.py @@ -0,0 +1,59 @@ +import unittest +import tempfile +import os +import six + +import caffe + + +class SimpleParamLayer(caffe.Layer): + """A layer that just multiplies by the numeric value of its param string""" + + def setup(self, bottom, top): + try: + self.value = float(self.param_str) + except ValueError: + raise ValueError("Parameter string must be a legible float") + + def reshape(self, bottom, top): + top[0].reshape(*bottom[0].data.shape) + + def forward(self, bottom, top): + top[0].data[...] = self.value * bottom[0].data + + def backward(self, top, propagate_down, bottom): + bottom[0].diff[...] = self.value * top[0].diff + + +def python_param_net_file(): + with tempfile.NamedTemporaryFile(mode='w+', delete=False) as f: + f.write("""name: 'pythonnet' force_backward: true + input: 'data' input_shape { dim: 10 dim: 9 dim: 8 } + layer { type: 'Python' name: 'mul10' bottom: 'data' top: 'mul10' + python_param { module: 'test_python_layer_with_param_str' + layer: 'SimpleParamLayer' param_str: '10' } } + layer { type: 'Python' name: 'mul2' bottom: 'mul10' top: 'mul2' + python_param { module: 'test_python_layer_with_param_str' + layer: 'SimpleParamLayer' param_str: '2' } }""") + return f.name + + +class TestLayerWithParam(unittest.TestCase): + def setUp(self): + net_file = python_param_net_file() + self.net = caffe.Net(net_file, caffe.TRAIN) + os.remove(net_file) + + def test_forward(self): + x = 8 + self.net.blobs['data'].data[...] = x + self.net.forward() + for y in self.net.blobs['mul2'].data.flat: + self.assertEqual(y, 2 * 10 * x) + + def test_backward(self): + x = 7 + self.net.blobs['mul2'].diff[...] = x + self.net.backward() + for y in self.net.blobs['data'].diff.flat: + self.assertEqual(y, 2 * 10 * x) diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto index 8c3f0723600..adcf4e2fd5a 100644 --- a/src/caffe/proto/caffe.proto +++ b/src/caffe/proto/caffe.proto @@ -703,6 +703,11 @@ message PowerParameter { message PythonParameter { optional string module = 1; optional string layer = 2; + // This value is set to the attribute `param_str` of the `PythonLayer` object + // in Python before calling the `setup()` method. This could be a number, + // string, dictionary in Python dict format, JSON, etc. You may parse this + // string in `setup` method and use it in `forward` and `backward`. + optional string param_str = 3 [default = '']; } // Message that stores parameters used by ReductionLayer