Coverage for starry/_core/ops/integration.py : 91%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- coding: utf-8 -*-
2from __future__ import division, print_function
3import numpy as np
4from theano import gof
5import theano.tensor as tt
8__all__ = ["sTOp", "rTReflectedOp"]
11class sTOp(tt.Op):
12 def __init__(self, func, N):
13 self.func = func
14 self.N = N
15 self._grad_op = sTGradientOp(self)
17 def make_node(self, *inputs):
18 inputs = [tt.as_tensor_variable(i) for i in inputs]
19 outputs = [tt.TensorType(inputs[-1].dtype, (False, False))()]
20 return gof.Apply(self, inputs, outputs)
22 def infer_shape(self, node, shapes):
23 return [shapes[0] + (tt.as_tensor(self.N),)]
25 def R_op(self, inputs, eval_points):
26 if eval_points[0] is None:
27 return eval_points
28 return self.grad(inputs, eval_points)
30 def perform(self, node, inputs, outputs):
31 outputs[0][0] = self.func(*inputs)
33 def grad(self, inputs, gradients):
34 return self._grad_op(*(inputs + gradients))
37class sTGradientOp(tt.Op):
38 def __init__(self, base_op):
39 self.base_op = base_op
41 def make_node(self, *inputs):
42 inputs = [tt.as_tensor_variable(i) for i in inputs]
43 outputs = [i.type() for i in inputs[:-1]]
44 return gof.Apply(self, inputs, outputs)
46 def infer_shape(self, node, shapes):
47 return shapes[:-1]
49 def perform(self, node, inputs, outputs):
50 bb, br = self.base_op.func(*inputs)
51 outputs[0][0] = np.reshape(bb, np.shape(inputs[0]))
52 outputs[1][0] = np.reshape(br, np.shape(inputs[1]))
55class rTReflectedOp(tt.Op):
56 def __init__(self, func, N):
57 self.func = func
58 self.N = N
59 self._grad_op = rTReflectedGradientOp(self)
61 def make_node(self, *inputs):
62 inputs = [tt.as_tensor_variable(i) for i in inputs]
63 outputs = [tt.TensorType(inputs[0].dtype, (False, False))()]
64 return gof.Apply(self, inputs, outputs)
66 def infer_shape(self, node, shapes):
67 return [shapes[0] + (tt.as_tensor(self.N),)]
69 def R_op(self, inputs, eval_points):
70 if eval_points[0] is None:
71 return eval_points
72 return self.grad(inputs, eval_points)
74 def perform(self, node, inputs, outputs):
75 outputs[0][0] = self.func(inputs[0])
77 def grad(self, inputs, gradients):
78 # NOTE: There may be a bug in Theano for custom Ops
79 # that are functions of a single variable, since a
80 # call to their gradient method does not return a
81 # list (which it *should*). We need to explicitly make it
82 # into a list below.
83 return [self._grad_op(*(inputs + gradients))]
86class rTReflectedGradientOp(tt.Op):
87 def __init__(self, base_op):
88 self.base_op = base_op
90 def make_node(self, *inputs):
91 inputs = [tt.as_tensor_variable(i) for i in inputs]
92 outputs = [i.type() for i in inputs[:-1]]
93 return gof.Apply(self, inputs, outputs)
95 def infer_shape(self, node, shapes):
96 return shapes[:-1]
98 def perform(self, node, inputs, outputs):
99 bb = self.base_op.func(*inputs)
100 outputs[0][0] = np.reshape(bb, np.shape(inputs[0]))