我正在努力在 tensorflow 中重現一個簡單的代碼。我有自己定義的函數用作模型的度量。這是一個簡單的三元組損失函數(稍作修改),但如果我使用普通函數,問題還是一樣的。import pandas as pdimport numpy as npimport tensorflow as tffrom tensorflow.keras.activations import sigmoidfrom tensorflow.keras import backendfrom tensorflow.keras.models import Model, Sequentialfrom tensorflow.keras.layers import Input, Embedding, Flatten, Dense, Dropout, Lambda, dot, [email protected] bpr_triplet_loss(inputs): anchor_latent, positive_item_latent, negative_item_latent = inputs # BPR loss loss = 1.0 - backend.sigmoid( backend.sum(anchor_latent * positive_item_latent, axis=-1, keepdims=True) - backend.sum(anchor_latent * negative_item_latent, axis=-1, keepdims=True)) return lossdef getModel(n_users, n_items, emb_dim = 20): # Input Layers user_input = Input(shape=[1], name = 'user_input') pos_item_input = Input(shape=[1], name = 'pos_item_input') neg_item_input = Input(shape=[1], name = 'neg_item_input') # Embedding Layers # Shared embedding layer for positive and negative items user_embedding = Embedding(output_dim=emb_dim, input_dim=n_users + 1, input_length=1, name='user_emb')(user_input) item_embedding = Embedding(output_dim=emb_dim, input_dim=n_items + 1, input_length=1, name='item_emb') pos_item_embedding = item_embedding(pos_item_input) neg_item_embedding = item_embedding(neg_item_input) user_vecs = Flatten()(user_embedding) pos_item_vecs = Flatten()(pos_item_embedding) neg_item_vecs = Flatten()(neg_item_embedding) # Triplet loss function output = concatenate([user_vecs, pos_item_vecs, neg_item_vecs]) loss = Lambda(bpr_triplet_loss, (1,))(output) model = Model(inputs=[anchor, positive, negative], outputs=loss) model.compile(optimizer='Adam', loss='mse', metrics=["mae"])當我運行此代碼時,我收到以下(開始令人沮喪)錯誤注意 我在 Tensorflow 2.0.0 中的tf.executing_eagerly()狀態True
OperatorNotAllowedInGraphError 仍然存在,啟用了急切執行并添加了裝飾器
慕桂英3389331
2022-10-25 14:42:06