提交 5b697c71 编写于 作者: M Megvii Engine Team

chore(imperative): remove unused reference to megbrain_graph

GitOrigin-RevId: 2c31f13bb8e1df28c7cf6e8c3d34b5d68d430b0f
上级 f0170b9c
......@@ -15,7 +15,6 @@ from .._imperative_rt.core2 import Tensor, apply, dtype_promotion, get_device
from ..ops import builtin
from ..ops.special import Const
from .dtype import is_dtype_equal, is_quantize
from .megbrain_graph import VarNode
_enable_convert_inputs = True
......@@ -60,7 +59,7 @@ def astype(x, dtype):
def convert_single_value(v, *, dtype=None, device=None):
if isinstance(v, (Tensor, VarNode)):
if isinstance(v, Tensor):
if not is_quantize(v.dtype):
v = astype(v, dtype)
else:
......
......@@ -14,7 +14,7 @@ import numpy as np
from ..core._imperative_rt.core2 import apply
from ..core.ops import builtin
from ..core.ops.builtin import Elemwise
from ..core.tensor import megbrain_graph, utils
from ..core.tensor import utils
from ..core.tensor.array_method import _elwise_apply
from ..core.tensor.utils import isscalar, setscalar
from ..device import get_default_device
......@@ -77,9 +77,7 @@ __all__ = [
def _elwise(*args, mode):
tensor_args = list(
filter(lambda x: isinstance(x, (Tensor, megbrain_graph.VarNode)), args)
)
tensor_args = list(filter(lambda x: isinstance(x, Tensor), args))
if len(tensor_args) == 0:
dtype = utils.dtype_promotion(args)
first_arg = Tensor(args[0], dtype=dtype, device=get_default_device())
......
......@@ -15,7 +15,7 @@ from ..core._trace_option import use_symbolic_shape
from ..core.ops import builtin
from ..core.ops.builtin import BatchNorm
from ..core.ops.special import Const
from ..core.tensor import megbrain_graph, utils
from ..core.tensor import utils
from ..core.tensor.utils import astensor1d, setscalar
from ..distributed import WORLD, is_distributed
from ..jit.tracing import is_tracing
......@@ -416,7 +416,6 @@ def adaptive_max_pool2d(
:param oshp: `(OH, OW)` size of the output shape.
:return: output tensor.
"""
assert isinstance(inp, (Tensor, megbrain_graph.VarNode)), "inp must be Tensor type"
if isinstance(oshp, int):
oshp = (oshp, oshp)
......@@ -438,7 +437,6 @@ def adaptive_avg_pool2d(
:param oshp: `(OH, OW)` size of the output shape.
:return: output tensor.
"""
assert isinstance(inp, (Tensor, megbrain_graph.VarNode)), "inp must be Tensor type"
if isinstance(oshp, int):
oshp = (oshp, oshp)
......@@ -1181,7 +1179,6 @@ def remap(
op = builtin.Remap(
imode=interp_mode, border_type=border_mode, format="NCHW", scalar=scalar
)
assert isinstance(inp, (Tensor, megbrain_graph.VarNode)), "inp must be Tensor type"
(result,) = apply(op, inp, map_xy)
return result
......@@ -1759,7 +1756,6 @@ def nvof(src: Tensor, precision: int = 1) -> Tensor:
print(result.numpy())
"""
assert isinstance(src, (Tensor, megbrain_graph.VarNode)), "src must be Tensor type"
assert src.ndim == 5 and src.shape[4] == 4
src = src.detach()
......
......@@ -15,7 +15,6 @@ from .. import functional as F
from ..autodiff import Function
from ..core._imperative_rt.core2 import apply
from ..core.ops import builtin
from ..core.tensor import megbrain_graph
from ..core.tensor.dtype import _metadata_dict
from ..tensor import Tensor
......@@ -94,14 +93,6 @@ def fake_quant_tensor(inp: Tensor, qmin: int, qmax: int, q_dict: Dict) -> Tensor
if q_dict["mode"] == QuantMode.ASYMMERTIC:
zero_point = q_dict["zero_point"]
assert isinstance(inp, (Tensor, megbrain_graph.VarNode)), "inp must be Tensor type"
assert isinstance(
scale, (Tensor, megbrain_graph.VarNode)
), "scale must be Tensor type"
assert isinstance(
zero_point, (Tensor, megbrain_graph.VarNode)
), "zero point must be Tensor type"
op = builtin.FakeQuant(qmin=qmin, qmax=qmax)
return apply(op, inp, scale, zero_point)[0]
......
......@@ -12,6 +12,7 @@
#include "megbrain/dtype.h"
#include "megbrain/common.h"
#include "megbrain/imperative/ops/utility.h"
#include "megbrain/imperative/ops/backward_graph.h"
#include "./tensor.h"
#include "./grad.h"
......@@ -156,7 +157,7 @@ PyObject* py_apply(PyObject* self, PyObject*const* args, size_t nargs/* , PyObje
ctx.args = &tensors[0];
ctx.nargs = nargs;
ctx.pytype = pytype;
if (strstr(op->ob_type->tp_name, "BackwardGraph")) {
if (ctx.op->same_type<BackwardGraph>()) {
ctx.backward = true;
}
......@@ -165,7 +166,9 @@ PyObject* py_apply(PyObject* self, PyObject*const* args, size_t nargs/* , PyObje
auto* t = tensors[i] = tw->m_tensor.get();
ctx.flags |= t->m_flags;
} else {
PyErr_SetString(PyExc_TypeError, "expect Tensor");
PyErr_SetString(PyExc_TypeError,
ssprintf("op %s expect type Tensor as inputs, got %s actually",
ctx.op->make_name().c_str(), Py_TYPE(args[i])->tp_name).c_str());
return nullptr;
}
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册