Created by: willthefrog
PR types
Performance optimization
PR changes
Others
Describe
Make reshape
squeeze
unsqueeze
inplace in dygraph mode
this is a short term solution, so that we can use canonical broadcasting semantics, i.e., remove the axis
argument.
example of generated op implementation
std::tuple<std::shared_ptr<imperative::VarBase>,std::shared_ptr<imperative::VarBase>> imperative_reshape2(const std::shared_ptr<imperative::VarBase>& X, const py::args* & args)
{
framework::AttributeMap attrs;
ConstructAttrMapFromPyArgs(&attrs, args);
{
py::gil_scoped_release release;
auto tracer = imperative::GetCurrentTracer();
imperative::NameVarBaseMap outs = {{"Out", {std::shared_ptr<imperative::VarBase>(new imperative::VarBase(tracer->GenerateUniqueName()))}},{"XShape", {std::shared_p tr<imperative::VarBase>(new imperative::VarBase(tracer->GenerateUniqueName()))}}};
imperative::NameVarBaseMap ins = {{"X", {X}}};
if (ins["X"][0]->Var().IsType<paddle::framework::LoDTensor>()) {
outs["Out"][0]->MutableVar()->GetMutable<paddle::framework::LoDTensor>()->ShareBufferWith(ins["X"][0]->Var().Get<paddle::framework::LoDTensor>());
} else {
PADDLE_THROW("Variables must be of type LoDTensor");
}
tracer->TraceOp("reshape2", ins, outs, attrs);
return std::make_tuple(outs["Out"][0],outs["XShape"][0]);
}
}