Skip to content

Commit 25fb8cb

Browse files
committed
fix _get_graph_from_inputs recurisve reference.
1 parent 5a73e69 commit 25fb8cb

File tree

4 files changed

+252
-3
lines changed

4 files changed

+252
-3
lines changed

src/TensorFlowNET.Core/ops.py.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ public static void reset_default_graph()
103103
}
104104

105105
public static Graph _get_graph_from_inputs(params Tensor[] op_input_list)
106-
=> _get_graph_from_inputs(op_input_list: op_input_list);
106+
=> _get_graph_from_inputs(op_input_list: op_input_list, graph: null);
107107

108108
public static Graph _get_graph_from_inputs(Tensor[] op_input_list, Graph graph = null)
109109
{
Lines changed: 250 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,250 @@
1+
using System;
2+
using System.Collections;
3+
using System.Collections.Generic;
4+
using System.Diagnostics;
5+
using System.IO;
6+
using System.Linq;
7+
using System.Text;
8+
using NumSharp;
9+
using Tensorflow;
10+
using Tensorflow.Keras.Engine;
11+
using Tensorflow.Sessions;
12+
using TensorFlowNET.Examples.Text.cnn_models;
13+
using TensorFlowNET.Examples.TextClassification;
14+
using TensorFlowNET.Examples.Utility;
15+
using static Tensorflow.Python;
16+
17+
namespace TensorFlowNET.Examples
18+
{
19+
/// <summary>
20+
/// https://github.com/dongjun-Lee/text-classification-models-tf
21+
/// </summary>
22+
public class CnnTextClassification : IExample
23+
{
24+
public int Priority => 17;
25+
public bool Enabled { get; set; } = true;
26+
public string Name => "CNN Text Classification";
27+
public int? DataLimit = null;
28+
public bool ImportGraph { get; set; } = true;
29+
public bool UseSubset = false; // <----- set this true to use a limited subset of dbpedia
30+
31+
private string dataDir = "text_classification";
32+
private string dataFileName = "dbpedia_csv.tar.gz";
33+
34+
private const string TRAIN_PATH = "text_classification/dbpedia_csv/train.csv";
35+
private const string TEST_PATH = "text_classification/dbpedia_csv/test.csv";
36+
37+
private const int NUM_CLASS = 14;
38+
private const int BATCH_SIZE = 64;
39+
private const int NUM_EPOCHS = 10;
40+
private const int WORD_MAX_LEN = 100;
41+
private const int CHAR_MAX_LEN = 1014;
42+
43+
protected float loss_value = 0;
44+
45+
public bool Run()
46+
{
47+
PrepareData();
48+
49+
var graph = tf.Graph().as_default();
50+
return with(tf.Session(graph), sess =>
51+
{
52+
if (ImportGraph)
53+
return RunWithImportedGraph(sess, graph);
54+
else
55+
return RunWithBuiltGraph(sess, graph);
56+
});
57+
}
58+
59+
protected virtual bool RunWithImportedGraph(Session sess, Graph graph)
60+
{
61+
var stopwatch = Stopwatch.StartNew();
62+
Console.WriteLine("Building dataset...");
63+
int[][] x = null;
64+
int[] y = null;
65+
int alphabet_size = 0;
66+
int vocabulary_size = 0;
67+
68+
var word_dict = DataHelpers.build_word_dict(TRAIN_PATH);
69+
vocabulary_size = len(word_dict);
70+
(x, y) = DataHelpers.build_word_dataset(TRAIN_PATH, word_dict, WORD_MAX_LEN);
71+
72+
Console.WriteLine("\tDONE ");
73+
74+
var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
75+
Console.WriteLine("Training set size: " + train_x.len);
76+
Console.WriteLine("Test set size: " + valid_x.len);
77+
78+
Console.WriteLine("Import graph...");
79+
var meta_file = "word_cnn.meta";
80+
tf.train.import_meta_graph(Path.Join("graph", meta_file));
81+
Console.WriteLine("\tDONE " + stopwatch.Elapsed);
82+
83+
sess.run(tf.global_variables_initializer());
84+
var saver = tf.train.Saver(tf.global_variables());
85+
86+
var train_batches = batch_iter(train_x, train_y, BATCH_SIZE, NUM_EPOCHS);
87+
var num_batches_per_epoch = (len(train_x) - 1) / BATCH_SIZE + 1;
88+
double max_accuracy = 0;
89+
90+
Tensor is_training = graph.OperationByName("is_training");
91+
Tensor model_x = graph.OperationByName("x");
92+
Tensor model_y = graph.OperationByName("y");
93+
Tensor loss = graph.OperationByName("loss/Mean");
94+
Operation optimizer = graph.OperationByName("loss/Adam");
95+
Tensor global_step = graph.OperationByName("Variable");
96+
Tensor accuracy = graph.OperationByName("accuracy/accuracy");
97+
stopwatch = Stopwatch.StartNew();
98+
int i = 0;
99+
foreach (var (x_batch, y_batch, total) in train_batches)
100+
{
101+
i++;
102+
var train_feed_dict = new FeedDict
103+
{
104+
[model_x] = x_batch,
105+
[model_y] = y_batch,
106+
[is_training] = true,
107+
};
108+
109+
var result = sess.run(new ITensorOrOperation[] { optimizer, global_step, loss }, train_feed_dict);
110+
loss_value = result[2];
111+
var step = (int)result[1];
112+
if (step % 10 == 0)
113+
{
114+
var estimate = TimeSpan.FromSeconds((stopwatch.Elapsed.TotalSeconds / i) * total);
115+
Console.WriteLine($"Training on batch {i}/{total} loss: {loss_value}. Estimated training time: {estimate}");
116+
}
117+
118+
if (step % 100 == 0)
119+
{
120+
// Test accuracy with validation data for each epoch.
121+
var valid_batches = batch_iter(valid_x, valid_y, BATCH_SIZE, 1);
122+
var (sum_accuracy, cnt) = (0.0f, 0);
123+
foreach (var (valid_x_batch, valid_y_batch, total_validation_batches) in valid_batches)
124+
{
125+
var valid_feed_dict = new FeedDict
126+
{
127+
[model_x] = valid_x_batch,
128+
[model_y] = valid_y_batch,
129+
[is_training] = false
130+
};
131+
var result1 = sess.run(accuracy, valid_feed_dict);
132+
float accuracy_value = result1;
133+
sum_accuracy += accuracy_value;
134+
cnt += 1;
135+
}
136+
137+
var valid_accuracy = sum_accuracy / cnt;
138+
139+
print($"\nValidation Accuracy = {valid_accuracy}\n");
140+
141+
// Save model
142+
if (valid_accuracy > max_accuracy)
143+
{
144+
max_accuracy = valid_accuracy;
145+
saver.save(sess, $"{dataDir}/word_cnn.ckpt", global_step: step.ToString());
146+
print("Model is saved.\n");
147+
}
148+
}
149+
}
150+
151+
return false;
152+
}
153+
154+
protected virtual bool RunWithBuiltGraph(Session session, Graph graph)
155+
{
156+
Console.WriteLine("Building dataset...");
157+
var (x, y, alphabet_size) = DataHelpers.build_char_dataset("train", "word_cnn", CHAR_MAX_LEN, DataLimit);
158+
159+
var (train_x, valid_x, train_y, valid_y) = train_test_split(x, y, test_size: 0.15f);
160+
161+
ITextClassificationModel model = null;
162+
// todo train the model
163+
return false;
164+
}
165+
166+
// TODO: this originally is an SKLearn utility function. it randomizes train and test which we don't do here
167+
private (NDArray, NDArray, NDArray, NDArray) train_test_split(NDArray x, NDArray y, float test_size = 0.3f)
168+
{
169+
Console.WriteLine("Splitting in Training and Testing data...");
170+
int len = x.shape[0];
171+
//int classes = y.Data<int>().Distinct().Count();
172+
//int samples = len / classes;
173+
int train_size = (int)Math.Round(len * (1 - test_size));
174+
var train_x = x[new Slice(stop: train_size), new Slice()];
175+
var valid_x = x[new Slice(start: train_size), new Slice()];
176+
var train_y = y[new Slice(stop: train_size)];
177+
var valid_y = y[new Slice(start: train_size)];
178+
Console.WriteLine("\tDONE");
179+
return (train_x, valid_x, train_y, valid_y);
180+
}
181+
182+
private static void FillWithShuffledLabels(int[][] x, int[] y, int[][] shuffled_x, int[] shuffled_y, Random random, Dictionary<int, HashSet<int>> labels)
183+
{
184+
int i = 0;
185+
var label_keys = labels.Keys.ToArray();
186+
while (i < shuffled_x.Length)
187+
{
188+
var key = label_keys[random.Next(label_keys.Length)];
189+
var set = labels[key];
190+
var index = set.First();
191+
if (set.Count == 0)
192+
{
193+
labels.Remove(key); // remove the set as it is empty
194+
label_keys = labels.Keys.ToArray();
195+
}
196+
shuffled_x[i] = x[index];
197+
shuffled_y[i] = y[index];
198+
i++;
199+
}
200+
}
201+
202+
private IEnumerable<(NDArray, NDArray, int)> batch_iter(NDArray inputs, NDArray outputs, int batch_size, int num_epochs)
203+
{
204+
var num_batches_per_epoch = (len(inputs) - 1) / batch_size + 1;
205+
var total_batches = num_batches_per_epoch * num_epochs;
206+
foreach (var epoch in range(num_epochs))
207+
{
208+
foreach (var batch_num in range(num_batches_per_epoch))
209+
{
210+
var start_index = batch_num * batch_size;
211+
var end_index = Math.Min((batch_num + 1) * batch_size, len(inputs));
212+
if (end_index <= start_index)
213+
break;
214+
yield return (inputs[new Slice(start_index, end_index)], outputs[new Slice(start_index, end_index)], total_batches);
215+
}
216+
}
217+
}
218+
219+
public void PrepareData()
220+
{
221+
if (UseSubset)
222+
{
223+
var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/data/dbpedia_subset.zip";
224+
Web.Download(url, dataDir, "dbpedia_subset.zip");
225+
Compress.UnZip(Path.Combine(dataDir, "dbpedia_subset.zip"), Path.Combine(dataDir, "dbpedia_csv"));
226+
}
227+
else
228+
{
229+
string url = "https://github.com/le-scientifique/torchDatasets/raw/master/dbpedia_csv.tar.gz";
230+
Web.Download(url, dataDir, dataFileName);
231+
Compress.ExtractTGZ(Path.Join(dataDir, dataFileName), dataDir);
232+
}
233+
234+
if (ImportGraph)
235+
{
236+
// download graph meta data
237+
var meta_file = "word_cnn.meta";
238+
var meta_path = Path.Combine("graph", meta_file);
239+
if (File.GetLastWriteTime(meta_path) < new DateTime(2019, 05, 11))
240+
{
241+
// delete old cached file which contains errors
242+
Console.WriteLine("Discarding cached file: " + meta_path);
243+
File.Delete(meta_path);
244+
}
245+
var url = "https://raw.githubusercontent.com/SciSharp/TensorFlow.NET/master/graph/" + meta_file;
246+
Web.Download(url, "graph", meta_file);
247+
}
248+
}
249+
}
250+
}

test/TensorFlowNET.Examples/TextProcess/TextClassificationTrain.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
using TensorFlowNET.Examples.Utility;
1515
using static Tensorflow.Python;
1616

17-
namespace TensorFlowNET.Examples.CnnTextClassification
17+
namespace TensorFlowNET.Examples
1818
{
1919
/// <summary>
2020
/// https://github.com/dongjun-Lee/text-classification-models-tf

test/TensorFlowNET.UnitTest/ExamplesTests/ExamplesTest.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
using Microsoft.VisualStudio.TestTools.UnitTesting;
55
using Tensorflow;
66
using TensorFlowNET.Examples;
7-
using TensorFlowNET.Examples.CnnTextClassification;
87

98
namespace TensorFlowNET.ExamplesTests
109
{

0 commit comments

Comments
 (0)