[docs]defmake_feeds(proto:Union[onnx.ModelProto,List[str]],inputs:Any,use_numpy:bool=False,copy:bool=False,check_flatten:bool=True,)->Dict[str,Union[torch.Tensor,np.ndarray]]:""" Serializes the inputs to produce feeds expected by :class:`onnxruntime.InferenceSession`. :param proto: onnx model or list of names :param inputs: any kind of inputs :param use_numpy: if True, converts torch tensors into numpy arrays :param copy: a copy is made, this should be the case if the inputs is ingested by ``OrtValue`` :param check_flatten: if True, checks the ``torch.utils._pytree.tree_flatten`` returns the same number of outputs :return: feeds dictionary """flat=flatten_object(inputs,drop_keys=True)assert(notcheck_flattenornotall(isinstance(obj,torch.Tensor)forobjinflat)ornotis_cache_dynamic_registered(fast=True)orlen(flat)==len(torch.utils._pytree.tree_flatten(inputs)[0])),(f"Unexpected number of flattened objects, "f"{string_type(flat,with_shape=True)} != "f"{string_type(torch.utils._pytree.tree_flatten(inputs)[0],with_shape=True)}")ifuse_numpy:flat=[t.detach().cpu().numpy()ifisinstance(t,torch.Tensor)elsetfortinflat]names=([i.nameforiinproto.graph.input]ifisinstance(proto,onnx.ModelProto)elseproto)ifcopy:flat=[t.copy()ifhasattr(t,"copy")elset.clone()fortinflat]returndict(zip(names,flat))