Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Tensorboard Error 'Can not convert a AdamOptimizer into a Tensor or Operation.'

I made a DNN Regression Model to predicts results which we don't have in data table but I can not make tensorboard.

This code is from https://deeplearning4j.org/linear-regression.html and lecture notes written by Sunghun Kim of Hong Kong University.

    import tensorflow as tf
    import numpy as np
    tf.set_random_seed(777) #for reproducibility

    # data Import
    xy = np.loadtxt('Training_Data.csv', delimiter=',', dtype=np.float32)
    x_data = xy[:,0:-1]
    y_data = xy[:,[-1]]

    # Make sure the shape and data are OK
    print(x_data.shape, x_data)
    print(y_data.shape, y_data)

    # input place holders
    X = tf.placeholder(tf.float32, shape=[None, 2])
    Y = tf.placeholder(tf.float32, shape=[None, 1])

    # weight & bias for nn Layers

    W1 = tf.get_variable("W1", shape=[2, 512],initializer=tf.contrib.layers.xavier_initializer())
    b1 = tf.Variable(tf.random_normal([512]))
    L1 = tf.nn.relu(tf.matmul(X, W1) + b1)

    W2 = tf.get_variable("W2", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer())
    b2 = tf.Variable(tf.random_normal([512]))
    L2= tf.nn.relu(tf.matmul(L1, W2) + b2)

    W3 = tf.get_variable("W3", shape=[512, 1], initializer=tf.contrib.layers.xavier_initializer())
    b3 = tf.Variable(tf.random_normal([1]))
    hypothesis = tf.matmul(L2, W3) + b3

    # cost/loss function
    cost = tf.reduce_mean(tf.square(hypothesis - Y))

    # Minimize/Optimizer
    optimizer = tf.train.AdamOptimizer(learning_rate=1e-5)
    train = optimizer.minimize(cost)

    # Launch the graph in a session.
    sess = tf.Session()

    # Initializes global variables in the graph.
    sess.run(tf.global_variables_initializer())

    # Fit the Line with new training data
    for step in range(2001):
        cost_val, hy_val, _ = sess.run([cost, hypothesis, train], feed_dict={X: x_data, Y: y_data})
        if step % 100 == 0:
            print(step, "Cost: ", cost_val, "/n Prediction: /n", hy_val)

    # Command What value you want
    print("wing loadings will be ", sess.run(hypothesis, 
                    feed_dict={X: [[0.0531, 0.05]]}))

    w2_hist=tf.summary.histogram("weight2",W2)
    cost_summ=tf.summary.scalar("cost",cost)

    summary=tf.summary.merge_all()

    #Create Summary writer
    writer=tf.summary.FileWriter('C:\\Users\\jh902\\Documents\\.logs')
    writer.add_graph(sess.graph)

    s,_= sess.run([summary, optimizer], feed_dict={X: x_data, Y: y_data})
    writer.add_summary(s, global_step=2001)
TypeError                                 Traceback (most recent call last)
    C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in __init__(self, fetches, contraction_fn)
        266         self._unique_fetches.append(ops.get_default_graph().as_graph_element(
    --> 267             fetch, allow_tensor=True, allow_operation=True))
        268       except TypeError as e:

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\framework\ops.py in as_graph_element(self, obj, allow_tensor, allow_operation)
       2469     with self._lock:
    -> 2470       return self._as_graph_element_locked(obj, allow_tensor, allow_operation)
       2471 

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\framework\ops.py in _as_graph_element_locked(self, obj, allow_tensor, allow_operation)
       2558       raise TypeError("Can not convert a %s into a %s."
    -> 2559                       % (type(obj).__name__, types_str))
       2560 

   TypeError: Can not convert a AdamOptimizer into a Tensor or Operation.

   During handling of the above exception, another exception occurred:

   TypeError                                 Traceback (most recent call last)
    <ipython-input-20-b8394996caf6> in <module>()
    ----> 1 s,_= sess.run([summary, optimizer], feed_dict={X: x_data, Y: y_data})
          2 writer.add_summary(s, global_step=2001)

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in run(self, fetches, feed_dict, options, run_metadata)
        765     try:
        766       result = self._run(None, fetches, feed_dict, options_ptr,
    --> 767                          run_metadata_ptr)
        768       if run_metadata:
        769         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
        950 
        951     # Create a fetch handler to take care of the structure of fetches.
    --> 952     fetch_handler = _FetchHandler(self._graph, fetches, feed_dict_string)
        953 
        954     # Run request and get response.

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in __init__(self, graph, fetches, feeds)
        406     """
        407     with graph.as_default():
    --> 408       self._fetch_mapper = _FetchMapper.for_fetch(fetches)
        409     self._fetches = []
        410     self._targets = []

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in for_fetch(fetch)
        228     elif isinstance(fetch, (list, tuple)):
        229       # NOTE(touts): This is also the code path for namedtuples.
    --> 230       return _ListFetchMapper(fetch)
        231     elif isinstance(fetch, dict):
        232       return _DictFetchMapper(fetch)

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in __init__(self, fetches)
        335     """
        336     self._fetch_type = type(fetches)
    --> 337     self._mappers = [_FetchMapper.for_fetch(fetch) for fetch in fetches]
        338     self._unique_fetches, self._value_indices = _uniquify_fetches(self._mappers)
        339 

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in <listcomp>(.0)
        335     """
        336     self._fetch_type = type(fetches)
    --> 337     self._mappers = [_FetchMapper.for_fetch(fetch) for fetch in fetches]
        338     self._unique_fetches, self._value_indices = _uniquify_fetches(self._mappers)
        339 

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in for_fetch(fetch)
        236         if isinstance(fetch, tensor_type):
        237           fetches, contraction_fn = fetch_fn(fetch)
    --> 238           return _ElementFetchMapper(fetches, contraction_fn)
        239     # Did not find anything.
        240     raise TypeError('Fetch argument %r has invalid type %r' %

   C:\Users\jh902\Anaconda3\lib\site-packages\tensorflow\python\client\session.py in __init__(self, fetches, contraction_fn)
        269         raise TypeError('Fetch argument %r has invalid type %r, '
        270                         'must be a string or Tensor. (%s)'
    --> 271                         % (fetch, type(fetch), str(e)))
        272       except ValueError as e:
        273         raise ValueError('Fetch argument %r cannot be interpreted as a '

   TypeError: Fetch argument <tensorflow.python.training.adam.AdamOptimizer object at 0x000001E08E7E1CF8> has invalid type <class 'tensorflow.python.training.adam.AdamOptimizer'>, must be a string or Tensor. (Can not convert a AdamOptimizer into a Tensor or Operation.)

   tensorboard --logdir=.logs
      File "<ipython-input-83-e4b16f0da480>", line 1
        tensorboard --logdir=.logs
                             ^
    SyntaxError: invalid syntax
like image 294
Jang Pierr Avatar asked May 17 '17 10:05

Jang Pierr


2 Answers

I have spotted an error here optimizer = tf.train.AdamOptimizer(learning_rate=1e-5) instead it should have been optimizer = tf.train.AdamOptimizer(learning_rate=1e-5).minimize(cost)

Otherwise, you would end up evaluating the optimizer itself.

Or else you should replace the optimizer near

s,_= sess.run([summary, optimizer], feed_dict={X: x_data, Y: y_data})

by

s,_= sess.run([summary, train], feed_dict={X: x_data, Y: y_data})
like image 87
Cbrom Avatar answered Oct 10 '22 17:10

Cbrom


s,_= sess.run([summary, optimizer], feed_dict={X: x_data, Y: y_data})

This is the problem. You are trying to evaluate the optimizer. You can evaluate the train operation and the get the cost operation but the optimizer itself cannot be eveluated. If you don't fetch the optimizer the problem should be solved.

like image 45
Laurens Op 't Zandt Avatar answered Oct 10 '22 15:10

Laurens Op 't Zandt