You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
ValueError: Passed in object <KerasVariable shape=(1, 64), dtype=float64, path=dense_15/kernel> of type 'Variable', not tf.Tensor or tf.Variable or ExtensionType.
#19874
Open
hgaur0007 opened this issue
Jun 18, 2024
· 0 comments
Cell In[42], line 83, in model.network_learn(self, r)
80 self.bounds = {"lb" : tf.math.reduce_min(r),
81 "ub" : tf.math.reduce_max(r)}
82 for i in range(self.num_epoch):
---> 83 L, g = self.get_grad(r)
84 self.train_op.apply_gradients(zip(g, self.trainable_variables))
85 self.adam_loss_hist.append(L)
Cell In[42], line 73, in model.get_grad(self, r)
71 def get_grad(self, r):
72 with tf.GradientTape() as tape:
---> 73 tape.watch(self.trainable_variables)
74 L = self.get_loss(r)
75 g = tape.gradient(L, self.trainable_variables)
File ~\AppData\Roaming\Python\Python311\site-packages\tensorflow\python\eager\backprop.py:873, in GradientTape.watch(self, tensor)
864 def watch(self, tensor):
865 """Ensures that tensor is being traced by this tape.
866
867 Args:
(...)
871 ValueError: if it encounters something that is not a tensor.
872 """
--> 873 for t in _extract_tensors_and_variables(tensor):
874 if not backprop_util.IsTrainable(t):
875 logging.log_first_n(
876 logging.WARN, "The dtype of the watched tensor must be "
877 "floating (e.g. tf.float32), got %r", 5, t.dtype)
File ~\AppData\Roaming\Python\Python311\site-packages\tensorflow\python\eager\backprop.py:700, in _extract_tensors_and_variables(tensor)
698 yield from _extract_tensors_and_variables(components)
699 else:
--> 700 raise ValueError(f"Passed in object {obj} of type {type(obj).name!r}"
701 f", not tf.Tensor or tf.Variable or ExtensionType.")
ValueError: Passed in object <KerasVariable shape=(1, 64), dtype=float64, path=dense_15/kernel> of type 'Variable', not tf.Tensor or tf.Variable or ExtensionType.
The text was updated successfully, but these errors were encountered:
Hello Everyone,
Could you pleas help me out with the error display above while running the following code:
My Keras version is '3.3.3.dev2024060803'
import tensorflow as tf
import numpy as np
import time
from tensorflow.keras.layers import Dense
import tensorflow_probability as tfp
tf.random.set_seed(42)
class model (tf.keras.Model):
def init(self, layers, train_op, num_epoch, print_epoch):
super(model, self).init()
self.model_layers = layers
self.train_op = train_op
self.num_epoch = num_epoch
self.print_epoch = print_epoch
self.adam_loss_hist = []
rmin = -1
rmax = -0.7
numPts = 25
data_type = "float64"
a = tf.constant(-1, dtype=tf.float64)
rint = np.linspace(rmin, rmax, numPts).astype(data_type)
rint = np.array(rint)[np.newaxis].T
#define the model
tf.keras.backend.set_floatx(data_type)
l1 = tf.keras.layers.Dense(25, "tanh")
l2 = tf.keras.layers.Dense(25, "tanh")
l3 = tf.keras.layers.Dense(1, None)
train_op = tf.keras.optimizers.Adam()
num_epoch = 200
print_epoch = 10
pred_model = model([l1, l2, l3], train_op, num_epoch, print_epoch)
#convert the training data to tensors
rint_tf = tf.convert_to_tensor(rint)
#training
print("Training (ADAM)...")
t0 = time.time()
pred_model.network_learn(rint_tf)
t1 = time.time()
print("Time taken (ADAM)", t1-t0, "seconds")
Following Error Appears:
ValueError Traceback (most recent call last)
Cell In[46], line 4
2 print("Training (ADAM)...")
3 t0 = time.time()
----> 4 pred_model.network_learn(rint_tf)
5 t1 = time.time()
6 print("Time taken (ADAM)", t1-t0, "seconds")
Cell In[42], line 83, in model.network_learn(self, r)
80 self.bounds = {"lb" : tf.math.reduce_min(r),
81 "ub" : tf.math.reduce_max(r)}
82 for i in range(self.num_epoch):
---> 83 L, g = self.get_grad(r)
84 self.train_op.apply_gradients(zip(g, self.trainable_variables))
85 self.adam_loss_hist.append(L)
Cell In[42], line 73, in model.get_grad(self, r)
71 def get_grad(self, r):
72 with tf.GradientTape() as tape:
---> 73 tape.watch(self.trainable_variables)
74 L = self.get_loss(r)
75 g = tape.gradient(L, self.trainable_variables)
File ~\AppData\Roaming\Python\Python311\site-packages\tensorflow\python\eager\backprop.py:873, in GradientTape.watch(self, tensor)
864 def watch(self, tensor):
865 """Ensures that
tensor
is being traced by this tape.866
867 Args:
(...)
871 ValueError: if it encounters something that is not a tensor.
872 """
--> 873 for t in _extract_tensors_and_variables(tensor):
874 if not backprop_util.IsTrainable(t):
875 logging.log_first_n(
876 logging.WARN, "The dtype of the watched tensor must be "
877 "floating (e.g. tf.float32), got %r", 5, t.dtype)
File ~\AppData\Roaming\Python\Python311\site-packages\tensorflow\python\eager\backprop.py:700, in _extract_tensors_and_variables(tensor)
698 yield from _extract_tensors_and_variables(components)
699 else:
--> 700 raise ValueError(f"Passed in object {obj} of type {type(obj).name!r}"
701 f", not tf.Tensor or tf.Variable or ExtensionType.")
ValueError: Passed in object <KerasVariable shape=(1, 64), dtype=float64, path=dense_15/kernel> of type 'Variable', not tf.Tensor or tf.Variable or ExtensionType.
The text was updated successfully, but these errors were encountered: