AssertionError: 无法计算输出 Tensor

我正在尝试构建一个使用函数式API的模型,该模型可以处理多个输入和多个输出。我按照这个指南创建了代码。

def create_model_multiple():    input1 = tf.keras.Input(shape=(13,), name = 'I1')    input2 = tf.keras.Input(shape=(6,), name = 'I2')    hidden1 = tf.keras.layers.Dense(units = 4, activation='relu')(input1)    hidden2 = tf.keras.layers.Dense(units = 4, activation='relu')(input2)    merge = tf.keras.layers.concatenate([hidden1, hidden2])    hidden3 = tf.keras.layers.Dense(units = 3, activation='relu')(merge)    output1 = tf.keras.layers.Dense(units = 2, activation='softmax', name ='O1')(hidden3)    output2 = tf.keras.layers.Dense(units = 2, activation='softmax', name = 'O2')(hidden3)    model = tf.keras.models.Model(inputs = [input1,input2], outputs = [output1,output2])    model.compile(optimizer='adam',                  loss='sparse_categorical_crossentropy',                  metrics=['accuracy'])    return model

我的model.fit命令如下所示:

history = model.fit({'I1':train_data, 'I2':new_train_data},                    {'O1':train_labels, 'O2': new_target_label},                    validation_data=(val_data,val_labels),                    epochs=100,                    verbose = 1)

输入数据的形状如下:
train_data是(192,13)
new_train_data是(192,6)
train-labels,new_target_labels是(192,)
代码运行了几步后就出现了这个错误:

Epoch 1/1001/6 [====>.........................] - ETA: 0s - loss: 360.3317 - O1_loss: 127.8019 - O2_loss: 232.5298 - O1_accuracy: 0.3438 - O2_accuracy: 0.4062---------------------------------------------------------------------------AssertionError                            Traceback (most recent call last)<ipython-input-29-db61ad0a9d8b> in <module>      3                     validation_data=(val_data,val_labels),      4                     epochs=100,----> 5                     verbose = 1)c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)     64   def _method_wrapper(self, *args, **kwargs):     65     if not self._in_multi_worker_mode():  # pylint: disable=protected-access---> 66       return method(self, *args, **kwargs)     67      68     # Running inside `run_distribute_coordinator` already.c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)    870               workers=workers,    871               use_multiprocessing=use_multiprocessing,--> 872               return_dict=True)    873           val_logs = {'val_' + name: val for name, val in val_logs.items()}    874           epoch_logs.update(val_logs)c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)     64   def _method_wrapper(self, *args, **kwargs):     65     if not self._in_multi_worker_mode():  # pylint: disable=protected-access---> 66       return method(self, *args, **kwargs)     67      68     # Running inside `run_distribute_coordinator` already.c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\training.py in evaluate(self, x, y, batch_size, verbose, sample_weight, steps, callbacks, max_queue_size, workers, use_multiprocessing, return_dict)   1079                 step_num=step):   1080               callbacks.on_test_batch_begin(step)-> 1081               tmp_logs = test_function(iterator)   1082               # Catch OutOfRangeError for Datasets of unknown size.   1083               # This blocks until the batch has finished executing.c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)    578         xla_context.Exit()    579     else:--> 580       result = self._call(*args, **kwds)    581     582     if tracing_count == self._get_tracing_count():c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)    616       # In this case we have not created variables on the first call. So we can    617       # run the first trace but we should fail if variables are created.--> 618       results = self._stateful_fn(*args, **kwds)    619       if self._created_variables:    620         raise ValueError("Creating variables on a non-first call to a function"c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\function.py in __call__(self, *args, **kwargs)   2417     """Calls a graph function specialized to the inputs."""   2418     with self._lock:-> 2419       graph_function, args, kwargs = self._maybe_define_function(args, kwargs)   2420     return graph_function._filtered_call(args, kwargs)  # pylint: disable=protected-access   2421 c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)   2772           and self.input_signature is None   2773           and call_context_key in self._function_cache.missed):-> 2774         return self._define_function_with_shape_relaxation(args, kwargs)   2775    2776       self._function_cache.missed.add(call_context_key)c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\function.py in _define_function_with_shape_relaxation(self, args, kwargs)   2704         relaxed_arg_shapes)   2705     graph_function = self._create_graph_function(-> 2706         args, kwargs, override_flat_arg_shapes=relaxed_arg_shapes)   2707     self._function_cache.arg_relaxed[rank_only_cache_key] = graph_function   2708 c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)   2665             arg_names=arg_names,   2666             override_flat_arg_shapes=override_flat_arg_shapes,-> 2667             capture_by_value=self._capture_by_value),   2668         self._function_attributes,   2669         # Tell the ConcreteFunction to clean up its graph once it goes out ofc:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)    979         _, original_func = tf_decorator.unwrap(python_func)    980 --> 981       func_outputs = python_func(*func_args, **func_kwargs)    982     983       # invariant: `func_outputs` contains only Tensors, CompositeTensors,c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)    439         # __wrapped__ allows AutoGraph to swap in a converted function. We give    440         # the function a weak reference to itself to avoid a reference cycle.--> 441         return weak_wrapped_fn().__wrapped__(*args, **kwds)    442     weak_wrapped_fn = weakref.ref(wrapped_fn)    443 c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)    966           except Exception as e:  # pylint:disable=broad-except    967             if hasattr(e, "ag_error_metadata"):--> 968               raise e.ag_error_metadata.to_exception(e)    969             else:    970               raiseAssertionError: in user code:    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\training.py:941 test_function  *        outputs = self.distribute_strategy.run(    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:951 run  **        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2290 call_for_each_replica        return self._call_for_each_replica(fn, args, kwargs)    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2649 _call_for_each_replica        return fn(*args, **kwargs)    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\training.py:909 test_step  **        y_pred = self(x, training=False)    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\base_layer.py:927 __call__        outputs = call_fn(cast_inputs, *args, **kwargs)    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\network.py:719 call        convert_kwargs_to_constants=base_layer_utils.call_context().saving)    c:\users\aniket\documents\aniket\learning-ml\ml_env\lib\site-packages\tensorflow\python\keras\engine\network.py:899 _run_internal_graph        assert str(id(x)) in tensor_dict, 'Could not compute output ' + str(x)    AssertionError: Could not compute output Tensor("O1_6/Identity:0", shape=(None, 2), dtype=float32)

包含完整代码的jupyter-notebook在这里:链接


回答:

你必须以正确的格式提供验证数据(就像你的训练数据一样)。你需要传递两个输入数据和两个目标…你只传递了一个

这是一个示例代码:

def create_model_multiple():    input1 = tf.keras.Input(shape=(13,), name = 'I1')    input2 = tf.keras.Input(shape=(6,), name = 'I2')    hidden1 = tf.keras.layers.Dense(units = 4, activation='relu')(input1)    hidden2 = tf.keras.layers.Dense(units = 4, activation='relu')(input2)    merge = tf.keras.layers.concatenate([hidden1, hidden2])    hidden3 = tf.keras.layers.Dense(units = 3, activation='relu')(merge)    output1 = tf.keras.layers.Dense(units = 2, activation='softmax', name ='O1')(hidden3)    output2 = tf.keras.layers.Dense(units = 2, activation='softmax', name = 'O2')(hidden3)    model = tf.keras.models.Model(inputs = [input1,input2], outputs = [output1,output2])    model.compile(optimizer='adam',                  loss='sparse_categorical_crossentropy',                  metrics=['accuracy'])    return modelx1 = np.random.uniform(0,1, (190,13))x2 = np.random.uniform(0,1, (190,6))val_x1 = np.random.uniform(0,1, (50,13))val_x2 = np.random.uniform(0,1, (50,6))y1 = np.random.randint(0,2, 190)y2 = np.random.randint(0,2, 190)val_y1 = np.random.randint(0,2, 50)val_y2 = np.random.randint(0,2, 50)model = create_model_multiple()history = model.fit({'I1':x1, 'I2':x2},                    {'O1':y1, 'O2': y2},                    validation_data=([val_x1,val_x2], [val_y1,val_y2]), # <=========                    epochs=100,                    verbose = 1)

Related Posts

使用LSTM在Python中预测未来值

这段代码可以预测指定股票的当前日期之前的值,但不能预测…

如何在gensim的word2vec模型中查找双词组的相似性

我有一个word2vec模型,假设我使用的是googl…

dask_xgboost.predict 可以工作但无法显示 – 数据必须是一维的

我试图使用 XGBoost 创建模型。 看起来我成功地…

ML Tuning – Cross Validation in Spark

我在https://spark.apache.org/…

如何在React JS中使用fetch从REST API获取预测

我正在开发一个应用程序,其中Flask REST AP…

如何分析ML.NET中多类分类预测得分数组?

我在ML.NET中创建了一个多类分类项目。该项目可以对…

发表回复

您的邮箱地址不会被公开。 必填项已用 * 标注