forum.alglib.net http://forum.alglib.net/ |
|
Neural Classifier not intending as expected http://forum.alglib.net/viewtopic.php?f=2&t=3794 |
Page 1 of 1 |
Author: | sagh0901 [ Sun Mar 05, 2017 6:19 pm ] |
Post subject: | Neural Classifier not intending as expected |
Hello Admins, I am trying to understand framework for Neural Networks. I wrote some sample program to predict whether the number is Prime or Not. I have hard time figuring out how many hidden layers and neurons in them to choose. I am going with incremental option each for a trail. Though this network is simple classifier it has hard time to classify the output. i generalized the given sample in documentation to fit all other programming libraries. Unique code way(specific to c#) restricts me moving the code to other platform if needed. Hence, I didnt use MLPTrainer. However, i achieved creating a network with my inputs in the below code but failing to get the output as desired. Below is my C# code: Code: static void Main(string[] args) { int trainSize = 800; int testStart = trainSize + 1; int testLimit = 1000; alglib.multilayerperceptron network; double[,] xy = GetDataSet(trainSize); Console.WriteLine("P0 : 0, Not Prime"); Console.WriteLine("P1 : 1, Prime"); CreateNeuralNet(1, 5, 0,2, out network); TrainData(xy,network,0.001,5); Console.WriteLine("Number\tClassification(P0,P1)"); for (int i = testStart; i < testLimit; i++) { double[] x = new double[] {i}; double[] y = new double[2]; alglib.mlpprocess(network, x, ref y); System.Console.WriteLine("{0}\t{1}",i, alglib.ap.format(y, 1)); } Console.ReadKey(); } private static bool CreateNeuralNet(int inputLayerNeuronsCount, int firstHiddenLayerNeuronsCount, int secondHiddenLayerNeuronsCount, int outputLayerNeuronsCount, out multilayerperceptron neuralNet ) { if (firstHiddenLayerNeuronsCount < 1 && secondHiddenLayerNeuronsCount < 1) Create0(inputLayerNeuronsCount, outputLayerNeuronsCount,out neuralNet); else if (secondHiddenLayerNeuronsCount < 1) Create1(inputLayerNeuronsCount, firstHiddenLayerNeuronsCount, outputLayerNeuronsCount, out neuralNet); else Create2(inputLayerNeuronsCount, firstHiddenLayerNeuronsCount, secondHiddenLayerNeuronsCount, outputLayerNeuronsCount, out neuralNet); string lastError; bool created = NetworkCreated(out lastError, neuralNet); if (!created) Console.WriteLine(new object[] { "Error creation NEURAL NETWORK ==> ", "createNeuralNet", " ", lastError }); return (created); } private static void Create0(int inputLayerNeuronsCount, int outputLayerNeuronsCount, out multilayerperceptron neuralNet ) { mlpcreatec0(inputLayerNeuronsCount, outputLayerNeuronsCount, out neuralNet); } private static void Create1(int inputLayerNeuronsCount, int firstHiddenLayerNeuronsCount, int outputLayerNeuronsCount, out multilayerperceptron neuralNet ) { mlpcreatec1(inputLayerNeuronsCount, firstHiddenLayerNeuronsCount, outputLayerNeuronsCount, out neuralNet); } private static void Create2(int inputLayerNeuronsCount, int firstHiddenLayerNeuronsCount, int secondHiddenLayerNeuronsCount, int outputLayerNeuronsCount, out multilayerperceptron neuralNet ) { mlpcreatec2(inputLayerNeuronsCount, firstHiddenLayerNeuronsCount, secondHiddenLayerNeuronsCount, outputLayerNeuronsCount, out neuralNet); } private static void TrainData(double[,] inputData,multilayerperceptron network,double decay,int restarts) { double rmsError = 0; int codResp, historial = inputData.GetLength(0); mlpreport infoEntren; DateTime startTime = DateTime.Now; var watch = Stopwatch.StartNew(); Console.WriteLine("{0}",new object[] { "Starting Optimization of NEURAL NETWORK ..." }); Console.WriteLine("{0}",new object[] { "Wait a few minutes ... depending on the amount of history involved." }); Console.WriteLine("{0}", new object[] { "...///..." }); int numEntras = 0, numSals = 0, numweights = 0; mlpproperties(network, out numEntras, out numSals, out numweights); if (numweights < 500) mlptrainlm(network, inputData, historial, decay, restarts, out codResp, out infoEntren); else mlptrainlbfgs(network, inputData, historial, decay, restarts, 0.01, 0, out codResp, out infoEntren); if (codResp == 2 || codResp == 6) rmsError = mlprmserror(network, inputData, historial); DateTime endTime = DateTime.Now; watch.Stop(); Console.WriteLine("{0}\t{1}\t{2}\t", new object[] { "NGrad", "NHess", "NCholesky" }); Console.WriteLine("{0}\t{1}\t{2}\t", new object[] { infoEntren.ngrad, infoEntren.nhess, infoEntren.ncholesky }); Console.WriteLine("{0}\t{1}\t{2}\t", new object[] { "codResp", "RMS Error", "trainingCycles" }); Console.WriteLine("{0}\t{1}\t{2}\t", new object[] { codResp, rmsError, restarts }); Console.WriteLine("{0}\t{1}\t{2}\t", new object[] { "Time Taken", "Start Time", "End Time" }); Console.WriteLine("{0}\t{1}\t{2}\t", new object[] { watch.ElapsedMilliseconds, startTime, endTime }); } private static double[,] GetDataSet(int trainSize) { double[,] xy = new double[trainSize, 2]; for (int i = 0; i < trainSize; i++) { xy[i, 0] = (i + 1); xy[i, 1] = IsPrime(i + 1); } return xy; } private static bool NetworkCreated(out string lastError, multilayerperceptron neuralNet) { bool resp; try { int ninputs; int nouptuts; int nweights; mlpproperties(neuralNet, out ninputs, out nouptuts, out nweights); resp = ninputs > 0 && nouptuts > 0; lastError = null; } catch (Exception ex) { lastError = ex.Message; throw; } return (resp); } private static double IsPrime(int num) { bool primo = num < 4 && num > 0; int k = 2; if (!primo) for (k = 2; k <= (Math.Sqrt(num) + 1); k++) { primo = num % k != 0; if (!primo) break; } return Convert.ToInt32(primo); } The output I am getting: Unfortunately I am getting same result for every input i am giving. Can someone let me know the mistake with this network and guide me when should i go with hidden layer neurons. |
Author: | Sergey.Bochkanov [ Thu Mar 09, 2017 4:06 pm ] |
Post subject: | Re: Neural Classifier not intending as expected |
Detecting primes with neural networks is useless idea - there is no chance for network to develop internal mechanisms which allow to distinguish prime numbers from non-prime ones. The best you can hope for, is to remember all primes in the dataset. Finally, classifier has problems with your encoding of real numbers (unpreprocessed linear values). It is hard to explain in a few words, I just can say that it is bad idea to model such complex dependency (prime or not) using just one scalar value. So, in my point of view, classifier performs as expected, when you load it with incorrect tasks. It returns just mean expected value, which tells that ~90% of your numbers are composite ones, and 10% are primes. |
Page 1 of 1 | All times are UTC |
Powered by phpBB © 2000, 2002, 2005, 2007 phpBB Group http://www.phpbb.com/ |