10 #ifndef EIGEN_CXX11_TENSOR_TENSOR_PADDING_H 11 #define EIGEN_CXX11_TENSOR_TENSOR_PADDING_H 23 template<
typename PaddingDimensions,
typename XprType>
24 struct traits<TensorPaddingOp<PaddingDimensions, XprType> > :
public traits<XprType>
26 typedef typename XprType::Scalar Scalar;
27 typedef traits<XprType> XprTraits;
28 typedef typename packet_traits<Scalar>::type Packet;
29 typedef typename XprTraits::StorageKind StorageKind;
30 typedef typename XprTraits::Index Index;
31 typedef typename XprType::Nested Nested;
32 typedef typename remove_reference<Nested>::type _Nested;
33 static const int NumDimensions = XprTraits::NumDimensions;
34 static const int Layout = XprTraits::Layout;
37 template<
typename PaddingDimensions,
typename XprType>
38 struct eval<TensorPaddingOp<PaddingDimensions, XprType>,
Eigen::Dense>
40 typedef const TensorPaddingOp<PaddingDimensions, XprType>& type;
43 template<
typename PaddingDimensions,
typename XprType>
44 struct nested<TensorPaddingOp<PaddingDimensions, XprType>, 1, typename eval<TensorPaddingOp<PaddingDimensions, XprType> >::type>
46 typedef TensorPaddingOp<PaddingDimensions, XprType> type;
53 template<
typename PaddingDimensions,
typename XprType>
54 class TensorPaddingOp :
public TensorBase<TensorPaddingOp<PaddingDimensions, XprType>, ReadOnlyAccessors>
57 typedef typename Eigen::internal::traits<TensorPaddingOp>::Scalar Scalar;
58 typedef typename Eigen::internal::traits<TensorPaddingOp>::Packet Packet;
59 typedef typename Eigen::NumTraits<Scalar>::Real RealScalar;
60 typedef typename XprType::CoeffReturnType CoeffReturnType;
61 typedef typename XprType::PacketReturnType PacketReturnType;
62 typedef typename Eigen::internal::nested<TensorPaddingOp>::type Nested;
63 typedef typename Eigen::internal::traits<TensorPaddingOp>::StorageKind StorageKind;
64 typedef typename Eigen::internal::traits<TensorPaddingOp>::Index Index;
66 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorPaddingOp(
const XprType& expr,
const PaddingDimensions& padding_dims)
67 : m_xpr(expr), m_padding_dims(padding_dims) {}
70 const PaddingDimensions& padding()
const {
return m_padding_dims; }
73 const typename internal::remove_all<typename XprType::Nested>::type&
74 expression()
const {
return m_xpr; }
77 typename XprType::Nested m_xpr;
78 const PaddingDimensions m_padding_dims;
83 template<
typename PaddingDimensions,
typename ArgType,
typename Device>
84 struct TensorEvaluator<const TensorPaddingOp<PaddingDimensions, ArgType>, Device>
86 typedef TensorPaddingOp<PaddingDimensions, ArgType> XprType;
87 typedef typename XprType::Index Index;
88 static const int NumDims = internal::array_size<PaddingDimensions>::value;
89 typedef DSizes<Index, NumDims> Dimensions;
93 PacketAccess = TensorEvaluator<ArgType, Device>::PacketAccess,
94 Layout = TensorEvaluator<ArgType, Device>::Layout,
98 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorEvaluator(
const XprType& op,
const Device& device)
99 : m_impl(op.expression(), device), m_padding(op.padding())
104 EIGEN_STATIC_ASSERT(NumDims > 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
107 m_dimensions = m_impl.dimensions();
108 for (
int i = 0; i < NumDims; ++i) {
109 m_dimensions[i] += m_padding[i].first + m_padding[i].second;
111 const typename TensorEvaluator<ArgType, Device>::Dimensions& input_dims = m_impl.dimensions();
112 if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
113 m_inputStrides[0] = 1;
114 m_outputStrides[0] = 1;
115 for (
int i = 1; i < NumDims; ++i) {
116 m_inputStrides[i] = m_inputStrides[i-1] * input_dims[i-1];
117 m_outputStrides[i] = m_outputStrides[i-1] * m_dimensions[i-1];
119 m_outputStrides[NumDims] = m_outputStrides[NumDims-1] * m_dimensions[NumDims-1];
121 m_inputStrides[NumDims - 1] = 1;
122 m_outputStrides[NumDims] = 1;
123 for (
int i = NumDims - 2; i >= 0; --i) {
124 m_inputStrides[i] = m_inputStrides[i+1] * input_dims[i+1];
125 m_outputStrides[i+1] = m_outputStrides[i+2] * m_dimensions[i+1];
127 m_outputStrides[0] = m_outputStrides[1] * m_dimensions[0];
131 typedef typename XprType::Scalar Scalar;
132 typedef typename XprType::CoeffReturnType CoeffReturnType;
133 typedef typename XprType::PacketReturnType PacketReturnType;
135 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Dimensions& dimensions()
const {
return m_dimensions; }
137 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
bool evalSubExprsIfNeeded(Scalar*) {
138 m_impl.evalSubExprsIfNeeded(NULL);
141 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void cleanup() {
145 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(Index index)
const 147 eigen_assert(index < dimensions().TotalSize());
148 Index inputIndex = 0;
149 if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
150 for (
int i = NumDims - 1; i > 0; --i) {
151 const Index idx = index / m_outputStrides[i];
152 if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
155 inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
156 index -= idx * m_outputStrides[i];
158 if (index < m_padding[0].first || index >= m_dimensions[0] - m_padding[0].second) {
161 inputIndex += (index - m_padding[0].first);
163 for (
int i = 0; i < NumDims - 1; ++i) {
164 const Index idx = index / m_outputStrides[i+1];
165 if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
168 inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
169 index -= idx * m_outputStrides[i+1];
171 if (index < m_padding[NumDims-1].first ||
172 index >= m_dimensions[NumDims-1] - m_padding[NumDims-1].second) {
175 inputIndex += (index - m_padding[NumDims-1].first);
177 return m_impl.coeff(inputIndex);
180 template<
int LoadMode>
181 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packet(Index index)
const 183 if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
184 return packetColMajor(index);
186 return packetRowMajor(index);
189 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(
const array<Index, NumDims>& coords)
const 192 if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
194 const Index idx = coords[0];
195 if (idx < m_padding[0].first || idx >= m_dimensions[0] - m_padding[0].second) {
198 inputIndex = idx - m_padding[0].first;
200 for (
int i = 1; i < NumDims; ++i) {
201 const Index idx = coords[i];
202 if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
205 inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
209 const Index idx = coords[NumDims-1];
210 if (idx < m_padding[NumDims-1].first || idx >= m_dimensions[NumDims-1] - m_padding[NumDims-1].second) {
213 inputIndex = idx - m_padding[NumDims-1].first;
215 for (
int i = NumDims - 2; i >= 0; --i) {
216 const Index idx = coords[i];
217 if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
220 inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
223 return m_impl.coeff(inputIndex);
226 EIGEN_DEVICE_FUNC Scalar* data()
const {
return NULL; }
230 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packetColMajor(Index index)
const 232 const int packetSize = internal::unpacket_traits<PacketReturnType>::size;
233 EIGEN_STATIC_ASSERT(packetSize > 1, YOU_MADE_A_PROGRAMMING_MISTAKE)
234 eigen_assert(index+packetSize-1 < dimensions().TotalSize());
236 const Index initialIndex = index;
237 Index inputIndex = 0;
238 for (
int i = NumDims - 1; i > 0; --i) {
239 const Index first = index;
240 const Index last = index + packetSize - 1;
241 const Index lastPaddedLeft = m_padding[i].first * m_outputStrides[i];
242 const Index firstPaddedRight = (m_dimensions[i] - m_padding[i].second) * m_outputStrides[i];
243 const Index lastPaddedRight = m_outputStrides[i+1];
245 if (last < lastPaddedLeft) {
247 return internal::pset1<PacketReturnType>(Scalar(0));
249 else if (first >= firstPaddedRight && last < lastPaddedRight) {
251 return internal::pset1<PacketReturnType>(Scalar(0));
253 else if (first >= lastPaddedLeft && last < firstPaddedRight) {
255 const Index idx = index / m_outputStrides[i];
256 inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
257 index -= idx * m_outputStrides[i];
261 return packetWithPossibleZero(initialIndex);
265 const Index last = index + packetSize - 1;
266 const Index first = index;
267 const Index lastPaddedLeft = m_padding[0].first;
268 const Index firstPaddedRight = (m_dimensions[0] - m_padding[0].second);
269 const Index lastPaddedRight = m_outputStrides[1];
271 if (last < lastPaddedLeft) {
273 return internal::pset1<PacketReturnType>(Scalar(0));
275 else if (first >= firstPaddedRight && last < lastPaddedRight) {
277 return internal::pset1<PacketReturnType>(Scalar(0));
279 else if (first >= lastPaddedLeft && last < firstPaddedRight) {
281 inputIndex += (index - m_padding[0].first);
282 return m_impl.template packet<Unaligned>(inputIndex);
285 return packetWithPossibleZero(initialIndex);
288 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packetRowMajor(Index index)
const 290 const int packetSize = internal::unpacket_traits<PacketReturnType>::size;
291 EIGEN_STATIC_ASSERT(packetSize > 1, YOU_MADE_A_PROGRAMMING_MISTAKE)
292 eigen_assert(index+packetSize-1 < dimensions().TotalSize());
294 const Index initialIndex = index;
295 Index inputIndex = 0;
297 for (
int i = 0; i < NumDims - 1; ++i) {
298 const Index first = index;
299 const Index last = index + packetSize - 1;
300 const Index lastPaddedLeft = m_padding[i].first * m_outputStrides[i+1];
301 const Index firstPaddedRight = (m_dimensions[i] - m_padding[i].second) * m_outputStrides[i+1];
302 const Index lastPaddedRight = m_outputStrides[i];
304 if (last < lastPaddedLeft) {
306 return internal::pset1<PacketReturnType>(Scalar(0));
308 else if (first >= firstPaddedRight && last < lastPaddedRight) {
310 return internal::pset1<PacketReturnType>(Scalar(0));
312 else if (first >= lastPaddedLeft && last < firstPaddedRight) {
314 const Index idx = index / m_outputStrides[i+1];
315 inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
316 index -= idx * m_outputStrides[i+1];
320 return packetWithPossibleZero(initialIndex);
324 const Index last = index + packetSize - 1;
325 const Index first = index;
326 const Index lastPaddedLeft = m_padding[NumDims-1].first;
327 const Index firstPaddedRight = (m_dimensions[NumDims-1] - m_padding[NumDims-1].second);
328 const Index lastPaddedRight = m_outputStrides[NumDims-1];
330 if (last < lastPaddedLeft) {
332 return internal::pset1<PacketReturnType>(Scalar(0));
334 else if (first >= firstPaddedRight && last < lastPaddedRight) {
336 return internal::pset1<PacketReturnType>(Scalar(0));
338 else if (first >= lastPaddedLeft && last < firstPaddedRight) {
340 inputIndex += (index - m_padding[NumDims-1].first);
341 return m_impl.template packet<Unaligned>(inputIndex);
344 return packetWithPossibleZero(initialIndex);
347 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packetWithPossibleZero(Index index)
const 349 const int packetSize = internal::unpacket_traits<PacketReturnType>::size;
350 EIGEN_ALIGN_MAX
typename internal::remove_const<CoeffReturnType>::type values[packetSize];
351 for (
int i = 0; i < packetSize; ++i) {
352 values[i] = coeff(index+i);
354 PacketReturnType rslt = internal::pload<PacketReturnType>(values);
358 Dimensions m_dimensions;
359 array<Index, NumDims+1> m_outputStrides;
360 array<Index, NumDims> m_inputStrides;
361 TensorEvaluator<ArgType, Device> m_impl;
362 PaddingDimensions m_padding;
370 #endif // EIGEN_CXX11_TENSOR_TENSOR_PADDING_H Namespace containing all symbols from the Eigen library.
Definition: CXX11Meta.h:13