/*! * Copyright (c) 2016 by Contributors * \file lsoftmax.cc * \brief LSoftmax from * \author luoyetx */ #include "./lsoftmax-inl.h" namespace mshadow { template inline void LSoftmaxForward(const Tensor &x, const Tensor &w, const Tensor &label, const Tensor &out, const Tensor &x_norm, const Tensor &w_norm, const Tensor &k_table, const Tensor &c_table, const int margin, const DType beta) { LOG(FATAL) << "Not Implemented."; } template inline void LSoftmaxBackward(const Tensor &x, const Tensor &w, const Tensor &label, const Tensor &x_norm, const Tensor &w_norm, const Tensor &o_grad, const Tensor &x_grad, const Tensor &w_grad, const Tensor &workspace, const Tensor &k_table, const Tensor &c_table, const int margin, const DType beta) { LOG(FATAL) << "Not Implemented."; } } // namespace mshadow namespace mxnet { namespace op { template<> Operator *CreateOp(LSoftmaxParam param, int dtype) { Operator *op = NULL; MSHADOW_REAL_TYPE_SWITCH(dtype, DType, { op = new LSoftmaxOp(param); }) return op; } Operator *LSoftmaxProp::CreateOperatorEx(Context ctx, std::vector *in_shape, std::vector *in_type) const { std::vector out_shape, aux_shape; std::vector out_type, aux_type; CHECK(InferType(in_type, &out_type, &aux_type)); CHECK(InferShape(in_shape, &out_shape, &aux_shape)); DO_BIND_DISPATCH(CreateOp, param_, in_type->at(0)); } DMLC_REGISTER_PARAMETER(LSoftmaxParam); MXNET_REGISTER_OP_PROPERTY(LSoftmax, LSoftmaxProp) .describe("LSoftmax from ") .add_argument("data", "Symbol", "data") .add_argument("weight", "Symbol", "weight") .add_argument("label", "Symbol", "label") .add_arguments(LSoftmaxParam::__FIELDS__()); } // namespace op } // namespace mxnet