cjweeks/tensorflow-cmake

Not able to get cost from "SoftmaxCrossEntropyWithLogits"

sansinghsanjay opened this issue · 0 comments

Hi,

I am trying to write a simple neural network using C++ TensorFlow API. I am unable to get cost from "SoftmaxCrossEntropyWithLogits" function. I don't know the correct syntax to write this function.

I raised this issue on StackOverflow also but didn't get any solution from there. Here is the StackOverflow link

Following is my code in C++ TensorFlow:

// libraries
#include <iostream>
#include <stdlib.h>
#include "tensorflow/cc/client/client_session.h"
#include "tensorflow/cc/ops/standard_ops.h"
#include "tensorflow/core/framework/tensor.h"

using namespace std;
using namespace tensorflow;
using namespace tensorflow::ops;

// main function
int main(int argc, char *argv[]) {
	// clear terminal
	system("clear");
	// creating tensorgraph
	Scope root = Scope::NewRootScope();
	// creating constants
	auto x1 = Const(root, {{3.f}, {2.f}, {8.f}});
	auto y1 = Const(root, {{0.f}, {1.f}, {0.f}});
	// creating placeholder
	auto x = Placeholder(root, DT_FLOAT, Placeholder::Shape({-1, 784}));
	auto y = Placeholder(root, DT_FLOAT, Placeholder::Shape({-1, 10}));
	//Tensor x(DT_FLOAT, TensorShape({3}));
	//Tensor y(DT_FLOAT, TensorShape({3}));
	// add operation
	//auto add_op = Add(root.WithOpName("add_op"), x, y);
	// first layer
	TensorShape weight_shape_1({784, 256});
	TensorShape bias_shape_1({256});
	auto weight_1 = Variable(root, weight_shape_1, DT_FLOAT);
	auto bias_1 = Variable(root, bias_shape_1, DT_FLOAT);
	auto layer_1 = Relu(root.WithOpName("layer_1"), Add(root, MatMul(root, x, weight_1), bias_1));
	// second layer
	TensorShape weight_shape_2({256, 256});
	TensorShape bias_shape_2({256});
	auto weight_2 = Variable(root, weight_shape_2, DT_FLOAT);
	auto bias_2 = Variable(root, bias_shape_2, DT_FLOAT);
	auto layer_2 = Relu(root.WithOpName("layer_2"), Add(root, MatMul(root, layer_1, weight_2), bias_2));
	// output layer
	TensorShape weight_shape_output({256, 2});
	TensorShape bias_shape_output({2});
	auto weight_output = Variable(root, weight_shape_output, DT_FLOAT);
	auto bias_output = Variable(root, bias_shape_output, DT_FLOAT);
	auto output_layer = Add(root.WithOpName("output_layer"), MatMul(root, layer_2, weight_output), bias_output);
	// defining loss function and optimizer
	auto cost = SoftmaxCrossEntropyWithLogits(root.WithOpName("cost"), output_layer, y);
	// taking mean of cost
	//auto mean_cost = Mean(root.WithOpName("mean_cost"), cost[0], Input({0}));
	// defining optimizer
	//auto optimizer = ApplyAdam(root.WithOpName("optimizer"), cost, Input({0.05f}));
	// for holding output
	vector<Tensor> output;
	// creating session
	ClientSession session(root);
	// training network
	//session.Run({{x, x1}, {y, y1}}, {cost}, &output);
	cout<<"DONE"<<endl;
	return 0;
}

Please help me.

Thanks & Regards.. :-)