Procedente de redes neuronales binarias basadas en neuronas aleatorias y funciones lógicas.

Hoy, entre muchos algoritmos de aprendizaje automático, las redes neuronales (NS) son ampliamente utilizadas. La principal ventaja de NS sobre otros métodos de aprendizaje automático es que pueden revelar patrones bastante profundos, a menudo no obvios, en los datos. El paradigma clásico entre los NS es redes totalmente conectadas con propagación hacia atrás del error.


Las NS totalmente conectadas con propagación hacia atrás de errores tienen muchas ventajas, la principal de las cuales es la precisión bastante alta de la clasificación de los datos fuente, basada en el aparato matemático "fuerte" que subyace a su funcionamiento. Pero, por otro lado, hay inconvenientes, el más importante de los cuales es la tendencia a la reentrenamiento, cuando el NS se adapta a las características locales de la muestra de entrenamiento y pierde su capacidad de generalización. Esto reduce la eficiencia y la conveniencia de su uso como un medio de clasificación o pronóstico fuera del conjunto de capacitación sobre datos arbitrarios.


Este artículo propone considerar una variante de NS binario completamente conectado (las variables binarias actúan como el valor objetivo de la red) con una función lógica en la salida, en la que no existe un mecanismo para la propagación inversa del error. En la etapa de entrenamiento, cuando se forman los coeficientes de peso de las neuronas, en lugar de múltiples cálculos iterativos realizados para cada muestra de entrenamiento, se lleva a cabo una única selección aleatoria de coeficientes, lo que reduce significativamente el tiempo de entrenamiento. Otra ventaja fundamental de este enfoque es la ausencia de un problema con el reentrenamiento de la red.


En la Fig. Se proporciona el diagrama de bloques de una red neuronal lógica aleatoria binaria.



El NS aleatorio binario se compone estructuralmente de tres capas consecutivas, numeradas a medida que la información pasa de entrada a salida:


  1. : ;
  2. : ;
  3. : .

.


, 0 1. , , .


: f(), s=F(x) (0,1), F(x) , . . f()=dF(x)/dx.



, X S, (0,1) . , X, N- , N- , N- , . , , .



, , F(x) Xi (i=1,2,…,N) . Fi(x), i- . . , :



, N X. : a – ; x0 – , .


, S . N- , W={w0, w1, w2, w3, …, wN} , S={s1, s2, s3, …, sN}, , :



, , , . .


- , , W :


N- ,


. ( ), . , .



W , , . N- , : a1, a2, a3, …, aN, 0<ai<1. , , :



n1, n2, n3, …, nN – , S ( , ); w0 – , 1. , .


, , . . . -, . .


:



M M , , , Sign, () . , .


, , , , . , , , , .


-’ (https://habr.com/ru/post/424517). , , . , «» «*». .


C# ( )
using System;
using System.Collections.Generic;
using System.Linq;

namespace RndNeuroNet
{
  #region   
  /// <summary>
  ///       
  /// </summary>
  public class RandomNeuralNetwork
  {
    private DistributionFunc[] pDistributionDataFuncs;
    private RandomDiscrimBase pNeuroLogicDiscrim;

    /// <summary>
    /// 
    /// </summary>
    public RandomNeuralNetwork()
    {
    }

    /// <summary>
    ///    
    /// </summary>
    /// <param name="pSourceTermsData"></param>
    /// <param name="pTargetFeature"></param>
    public void CreateNetwork(double[][] pSourceData, bool[] pTargetFeature,
      int iFromPos, int iTargetPos, int iWindowSize,
      int iAmountLogicDiscrims, int iLogicDiscrimsMaxTrainRestarts = 1,
      int iMaxAmountDistributionPoints = 1000, bool bIsNorming = true)
    {
      //  
      double[][] pSrcArray = new double[iTargetPos - iFromPos][];
      bool[] pDstArray = new bool[iTargetPos - iFromPos];
      for (int i = 0, t = iFromPos; t < iTargetPos; t++, i++)
      {
        //  
        pSrcArray[i] = (bIsNorming ? NormingE(pSourceData[t], iWindowSize, iWindowSize) : pSourceData[t]);
        pDstArray[i] = pTargetFeature[t];
      }
      //    
      pDistributionDataFuncs = new DistributionFunc[iWindowSize];
      for (int i = 0; i < iWindowSize; i++)
      {
        //   
        double[] pTrend2Recalc = pSrcArray.Select(p => p != null ? p[i] : 0).ToArray();
        // 
        pDistributionDataFuncs[i] = new DistributionFunc(pTrend2Recalc, iMaxAmountDistributionPoints);
        //     
        pTrend2Recalc = pDistributionDataFuncs[i].RecalcTrend(pTrend2Recalc);
        //    
        for (int t = 0; t < pSrcArray.Length; t++) pSrcArray[t][i] = pTrend2Recalc[t];
      }
      // 
      pNeuroLogicDiscrim = (RandomDiscrimBase)new RandomLogicDiscrim();
      pNeuroLogicDiscrim.CreateDiscrim(pSrcArray, pDstArray, 0, pSrcArray.Length,
        iWindowSize, iAmountLogicDiscrims, iLogicDiscrimsMaxTrainRestarts);
    }

    public double CalcResponce(double[] pSourceData, int iWindowSize = 0, bool bIsNorming = true)
    {
      // 
      if (iWindowSize <= 0) iWindowSize = pSourceData.Length;
      //  
      pSourceData = (bIsNorming ? NormingE(pSourceData, iWindowSize, iWindowSize) : pSourceData);
      //  
      for (int i = 0; i < iWindowSize; i++)
      {
        pSourceData[i] = pDistributionDataFuncs[i].RecalcTrend(pSourceData[i]);
      }
      //  
      return pNeuroLogicDiscrim.GetPrognos(pSourceData, iWindowSize);
    }

    /// <summary>
    ///     
    /// </summary>
    /// <param name="pTrend"></param>
    /// <param name="iTargetPosition"></param>
    /// <param name="iVectorSize"></param>
    /// <returns></returns>
    public static double[] NormingE(double[] pTrend, int iTargetPosition,
      int iVectorSize, double[] pResultVector = null, bool bIsMinusOffset = false)
    {
      if (pResultVector == null) pResultVector = new double[iVectorSize];
      double dNorming = 0;
      for (int i = 0, t = iTargetPosition - iVectorSize; i < iVectorSize; i++, t++)
      {
        dNorming += pTrend[t];
      }
      dNorming /= iVectorSize;
      double dOffset = (bIsMinusOffset ? 1 : 0);
      for (int i = 0, t = iTargetPosition - iVectorSize; i < iVectorSize; i++, t++)
      {
        pResultVector[i] = (pTrend[t] / dNorming) - dOffset;
      }
      return pResultVector;
    }
  }

  /// <summary>
  ///     
  /// </summary>
  public class DistributionFunc
  {
    private class DataCont
    {
      public int Counter;
      public double SumSrc;
      public double ValueP;
    }

    private readonly SortedDictionary<int, DataCont> pDistribution =
      new SortedDictionary<int, DataCont>();
    private int m_iMaxAmountDistributionPoints;
    private double dAreaMin, dAreaMax;

    /// <summary>
    /// 
    /// </summary>
    public DistributionFunc()
    {
    }

    /// <summary>
    /// 
    /// </summary>
    public DistributionFunc(double[] pTrend, int iMaxAmountDistributionPoints = 1000)
    {
      CreateDistribution(pTrend, iMaxAmountDistributionPoints);
    }

    //  
    public void CreateDistribution(double[] pTrend, int iMaxAmountDistributionPoints = 1000)
    {
      m_iMaxAmountDistributionPoints = iMaxAmountDistributionPoints;
      dAreaMin = double.MaxValue; dAreaMax = 0;
      //      
      for (int t = 0; t < pTrend.Length; t++)
      {
        double dTrendVal = pTrend[t];
        if (dTrendVal == 0) continue;
        dAreaMin = Math.Min(dAreaMin, dTrendVal);
        dAreaMax = Math.Max(dAreaMax, dTrendVal);
      }
      //  
      for (int t = 0; t < pTrend.Length; t++)
      {
        double dTrendVal = pTrend[t];
        if (dTrendVal == 0) continue;
        int iIndex = (int)(((dTrendVal - dAreaMin) / (dAreaMax - dAreaMin)) * m_iMaxAmountDistributionPoints);
        DataCont pKeyVal = null;
        pDistribution.TryGetValue(iIndex, out pKeyVal);
        if (pKeyVal == null) pDistribution.Add(iIndex, pKeyVal = new DataCont());
        pKeyVal.Counter++;
        pKeyVal.SumSrc += dTrendVal;
      }
      //   
      double dSumP = 0;
      foreach (KeyValuePair<int, DataCont> dataValue in pDistribution)
      {
        dataValue.Value.SumSrc /= dataValue.Value.Counter;
        dSumP += (double)dataValue.Value.Counter / (double)pTrend.Length;
        dataValue.Value.ValueP = dSumP;
      }
    }

    //    ,   
    public double[] RecalcTrend(double[] pTrend, double[] pNewTrend = null)
    {
      if (pNewTrend == null) pNewTrend = new double[pTrend.Length];
      for (int t = 0; t < pTrend.Length; t++)
      {
        pNewTrend[t] = RecalcTrend(pTrend[t]);
      }
      return pNewTrend;
    }

    //    ,   
    public double RecalcTrend(double dTrendVal)
    {
      int iIndex = (int)(((dTrendVal - dAreaMin) / (dAreaMax - dAreaMin)) * m_iMaxAmountDistributionPoints);
      if (iIndex < 0) iIndex = pDistribution.Keys.Min();
      if (iIndex > pDistribution.Keys.Max()) iIndex = pDistribution.Keys.Max();
      if (pDistribution.Keys.Contains(iIndex))
      {
        dTrendVal = pDistribution[iIndex].ValueP;
      }
      else
      {
        int iDnIndex = pDistribution.Keys.Max<int>(p => p < iIndex ? p : 0);
        double dDnVal = pDistribution[iDnIndex].ValueP;
        int iUpIndex = pDistribution.Keys.Min<int>(p => p > iIndex ? p : int.MaxValue);
        double dUpVal = pDistribution[iUpIndex].ValueP;
        dTrendVal = (dUpVal - dDnVal) * ((double)(iIndex - iDnIndex) / (double)(iUpIndex - iDnIndex)) + dDnVal;
      }
      return dTrendVal;
    }
  }

  /// <summary>
  ///     
  /// </summary>
  public class RandomDiscrimBase
  {
    public static int GetTermPoint(double[] pNeuronWeights,
      double[] pData, int iDataCortegeLength = 0)
    {
      double dVal = pNeuronWeights[iDataCortegeLength]; //  
      for (int i = 0; i < iDataCortegeLength; i++) dVal += (pNeuronWeights[i] * pData[i]);
      return Math.Sign(dVal);
    }

    public static int[] GetTermPoint(IList<double[]> pNeuroSurfaces,
      double[] pData, int iDataCortegeLength = 0)
    {
      if (iDataCortegeLength <= 0) iDataCortegeLength = pData.Length;
      int[] pLogicData = new int[pNeuroSurfaces.Count];
      for (int n = 0; n < pNeuroSurfaces.Count; n++)
      {
        pLogicData[n] = GetTermPoint(pNeuroSurfaces[n], pData, iDataCortegeLength);
      }
      return pLogicData;
    }

    /// <summary>
    ///   
    /// </summary>
    public virtual void CreateDiscrim(double[][] pSrcData, bool[] pDstData,
      int iFromPos, int iToPos, int iDataCortegeLength, int iAmountLogicDiscrims,
      int iLogicDiscrimsMaxTrainRestarts)
    {
      throw new MissingMethodException("  ");
    }

    /// <summary>
    ///  
    ///           -1  +1
    /// </summary>
    public virtual double GetPrognos(double[] pTermPointData, int iDataCortegeLength = -1)
    {
      throw new MissingMethodException("  ");
    }
  }

  /// <summary>
  ///        
  /// </summary>
  public class RandomLogicDiscrim : RandomDiscrimBase
  {
    private IList<double[]> pLogicDiscrims = null;
    public LogicFunc.Quine_McCluskey neuronLogic = null;

    /// <summary>
    /// 
    /// </summary>
    public RandomLogicDiscrim()
    {
    }

    /// <summary>
    ///   
    /// </summary>
    public override void CreateDiscrim(double[][] pSrcArray, bool[] pTargetData,
      int iFromPos, int iToPos, int iDataCortegeLength, int iAmountLogicDiscrims,
      int iLogicDiscrimsMaxTrainRestarts)
    {
      Random RndFabric = new Random(Environment.TickCount);
      //  
      int iMaxCounter = 0;
      //   
      neuronLogic = null;
      pLogicDiscrims = new List<double[]>();
      do
      {
        //    
        // -   ,  iAmountLogicDiscrims,   
        while (pLogicDiscrims.Count < iAmountLogicDiscrims)
        {
          double[] pNewSurface = new double[iDataCortegeLength + 1];
          for (int i = 0; i < iDataCortegeLength; i++)
          {
            //    
            double dNormal = RndFabric.NextDouble() - 0.5;
            //   ,    (0...1)
            double dPoint = RndFabric.NextDouble();
            //  
            pNewSurface[i] = dNormal;
            //  
            pNewSurface[iDataCortegeLength] -= dNormal * dPoint;
          }
          pLogicDiscrims.Add(pNewSurface);
        }
        //  
        ICollection<byte[]> TermInputUp = new LinkedList<byte[]>();
        ICollection<byte[]> TermInputDn = new LinkedList<byte[]>();
        for (int t = iFromPos; t < iToPos; t++)
        {
          byte[] pSrcData = GetTermPoint(pLogicDiscrims, pSrcArray[t], iDataCortegeLength)
            .Select(p => (byte)(p > 0 ? 1 : 0)).ToArray();
          if (pTargetData[t])
            TermInputUp.Add(pSrcData);
          else
            TermInputDn.Add(pSrcData);
        }
        //  
        neuronLogic = new LogicFunc.Quine_McCluskey();
        neuronLogic.Start(TermInputUp, TermInputDn);
        //   , . .
        //   ,     
        if ((iMaxCounter + 1) < iLogicDiscrimsMaxTrainRestarts)
        {
          Dictionary<int, int> TermStars = new Dictionary<int, int>(iAmountLogicDiscrims);
          for (int i = 0; i < iAmountLogicDiscrims; i++) TermStars.Add(i, 0);
          foreach (byte[] pTerm in neuronLogic.Result.Terms)
          {
            for (int i = 0; i < iAmountLogicDiscrims; i++)
            {
              if (pTerm[i] != LogicFunc.LogicFunction.cStarSymb) TermStars[i]++;
            }
          }
          foreach (byte[] pTerm in neuronLogic.ResultNeg.Terms)
          {
            for (int i = 0; i < iAmountLogicDiscrims; i++)
            {
              if (pTerm[i] != LogicFunc.LogicFunction.cStarSymb) TermStars[i]++;
            }
          }
          foreach (KeyValuePair<int, int> p in TermStars)
          {
            //    -   
            if (p.Value <= 0) pLogicDiscrims[p.Key] = null;
          }
          pLogicDiscrims = pLogicDiscrims.Where(p => p != null).ToList();
        }
      } while ((pLogicDiscrims.Count < iAmountLogicDiscrims) && (iMaxCounter++ < iLogicDiscrimsMaxTrainRestarts));
    }

    /// <summary>
    ///  
    /// </summary>
    /// <param name="pTermPointData"></param>
    /// <returns></returns>
    public override double GetPrognos(double[] pTermPointData, int iDataCortegeLength = -1)
    {
      if (iDataCortegeLength <= 0) iDataCortegeLength = pTermPointData.Length;
      byte[] pSrcData = GetTermPoint(pLogicDiscrims, pTermPointData, iDataCortegeLength).Select(p => (byte)(p > 0 ? 1 : 0)).ToArray();
      int iPrognos = 0;
      iPrognos += (neuronLogic.Result.Calculate(pSrcData)    ? +1 : -1);
      iPrognos += (neuronLogic.ResultNeg.Calculate(pSrcData) ? -1 : +1);
      return (iPrognos / 2);
    }
  }
  #endregion
}

namespace LogicFunc
{
  #region  
  /// <summary>
  ///     
  /// </summary>
  public abstract class LogicFunction
  {
    // "" 
    public const byte cStarSymb = 2;

    //   
    public readonly ICollection<byte[]> Terms = new LinkedList<byte[]>();
    //  
    public abstract bool Calculate(bool[] X);
    //  
    public abstract bool Calculate(char[] X);
    //  
    public abstract bool Calculate(byte[] X);
  }

  /// <summary>
  ///   
  /// </summary>
  public class Dnf : LogicFunction
  {
    public static bool Calculate(byte[] X, byte[] term)
    {
      bool bResult = true;
      for (int i = 0; i < term.Length; i++)
      {
        if ((term[i] == cStarSymb) || (term[i] == X[i])) continue;
        bResult = false;
        break;
      }
      return bResult;
    }

    public override bool Calculate(byte[] X)
    {
      bool bResult = false;
      foreach (byte[] term in Terms)
      {
        bool bTermVal = true;
        for (int i = 0; i < term.Length; i++)
        {
          if ((term[i] >= cStarSymb) || (term[i] == X[i])) continue;
          bTermVal = false;
          break;
        }
        //bResult |= bTermVal;
        if (bTermVal)
        {
          bResult = true;
          break;
        }
      }
      return bResult;
    }

    public override bool Calculate(char[] X)
    {
      bool bResult = false;
      foreach (byte[] term in Terms)
      {
        bool bTermVal = true;
        for (int i = 0; i < term.Length; i++)
        {
          if ((term[i] >= cStarSymb) || (term[i] == (byte)(X[i] == '0' ? 0 : 1))) continue;
          bTermVal = false;
          break;
        }
        //bResult |= bTermVal;
        if (bTermVal)
        {
          bResult = true;
          break;
        }
      }
      return bResult;
    }

    public override bool Calculate(bool[] X)
    {
      bool bResult = false;
      foreach (byte[] term in Terms)
      {
        bool bTermVal = true;
        for (int i = 0; i < term.Length; i++)
        {
          if ((term[i] >= cStarSymb) || ((term[i] != 0) == X[i])) continue;
          bTermVal = false;
          break;
        }
        //bResult |= bTermVal;
        if (bTermVal)
        {
          bResult = true;
          break;
        }
      }
      return bResult;
    }
  }

  /// <summary>
  ///  
  /// </summary>
  public class TreeFuncTerm
  {
    //    
    public class TreeNodeEnd { }
    //   
    private readonly TreeNodeEnd pCommonTreeNodeEnd = new TreeNodeEnd();

    //
    private readonly object[] rootNode = new object[3];
    //      
    private readonly bool IsNewTreeNodeEndMode = false;
    // () 
    private int _rang = 0;
    public int Rang
    {
      get { return _rang; }
    }
    //   
    private int enumerationPos = 0;
    private object[][] enumerationBuf;
    //,     
    private byte[] enumerationTerm;
    public byte[] EnumerationTerm
    {
      get { return enumerationTerm; }
    }
    // ,    
    private TreeNodeEnd enumerationNode;
    public TreeNodeEnd EnumerationNode
    {
      get { return enumerationNode; }
    }
    //    
    private UInt32 _count = 0;
    public UInt32 Count
    {
      get { return _count; }
    }

    //
    public TreeFuncTerm(bool bNewTreeNodeEndMode = false)
    {
      IsNewTreeNodeEndMode = bNewTreeNodeEndMode;
      Clear();
    }

    // 
    public void Clear()
    {
      _count = 0;
      _rang = 0;
      enumerationPos = 0;
      enumerationBuf = null;
      enumerationTerm = null;
      enumerationNode = null;
      rootNode[0] = rootNode[1] = rootNode[2] = null;
    }

    //     
    public TreeNodeEnd EnumerationInit()
    {
      enumerationPos = 0;
      enumerationTerm = new byte[_rang];
      enumerationTerm[0] = 0;
      enumerationNode = null;
      enumerationBuf = new object[_rang][];
      enumerationBuf[0] = rootNode;
      //   
      return EnumerationNextNode();
    }

    //    
    public TreeNodeEnd EnumerationNextNode()
    {
      int iIsNext = (enumerationNode != null ? 1 : 0);
      enumerationNode = null;
      while ((enumerationNode == null) && (enumerationPos >= 0))
      {
        object pNextNode = null;
        int iSymb = enumerationTerm[enumerationPos] + iIsNext;
        for (object[] pNodes = enumerationBuf[enumerationPos]; iSymb < 3; iSymb++)
        {
          if ((pNextNode = pNodes[iSymb]) != null) break;
        }
        if (pNextNode == null)
        {
          //   
          enumerationPos--;
          iIsNext = 1;
        }
        else
        {
          enumerationTerm[enumerationPos] = (byte)iSymb;
          if (pNextNode is TreeNodeEnd)
          {
            //  
            enumerationNode = (TreeNodeEnd)pNextNode;
          }
          else
          {
            //   
            enumerationPos++;
            enumerationBuf[enumerationPos] = (object[])pNextNode;
            enumerationTerm[enumerationPos] = 0;
            iIsNext = 0;
          }
        }
      }
      return enumerationNode;
    }

    //       
    public TreeNodeEnd Add(byte[] term)
    {
      _rang = Math.Max(_rang, term.Length);
      object[] pCurrNode = rootNode;
      int iTermLength1 = term.Length - 1;
      for (int i = 0; i < iTermLength1; i++)
      {
        byte cSymb = term[i];
        object pNextNode = pCurrNode[cSymb];
        if (pNextNode == null)
        {
          pNextNode = new object[3];
          pCurrNode[cSymb] = pNextNode;
        }
        pCurrNode = (object[])pNextNode;
      }
      object pNewNode = pCurrNode[term[iTermLength1]];
      if (pNewNode == null)
      {
        pNewNode = (IsNewTreeNodeEndMode ? new TreeNodeEnd() : pCommonTreeNodeEnd);
        pCurrNode[term[iTermLength1]] = pNewNode;
        _count++;
      }
      return (TreeNodeEnd)pNewNode;
    }

    //     
    public TreeNodeEnd Remove(byte[] term)
    {
      object[] pCurrNode = rootNode;
      int iTermLength1 = term.Length - 1;
      for (int i = 0; i < iTermLength1; i++)
      {
        pCurrNode = (object[])pCurrNode[term[i]];
        if (pCurrNode == null) break;
      }
      TreeNodeEnd pRemovedNode = null;
      if (pCurrNode != null)
      {
        //     
        pRemovedNode = (TreeNodeEnd)pCurrNode[term[iTermLength1]];
        if (pRemovedNode != null)
        {
          //    
          pCurrNode[term[iTermLength1]] = null;
          // - 
          _count--;
        }
      }
      return pRemovedNode;
    }

    //     
    public void Remove(IEnumerable<byte[]> RemovedTerms)
    {
      if ((RemovedTerms == null) || (RemovedTerms.Count() == 0)) return;
      foreach (byte[] x1 in RemovedTerms)
      {
        //-       Remove
        //   IsContains
        Remove(x1);
      }
    }

    //    
    public bool Contains(byte[] term)
    {
      object pCurrNode = rootNode;
      for (int i = 0; i < term.Length; i++)
      {
        pCurrNode = ((object[])pCurrNode)[term[i]];
        if (pCurrNode == null) break;
      }
      return ((pCurrNode != null) && (pCurrNode is TreeNodeEnd));
    }

    //    ,
    //     
    public bool IsCalculateTrue(byte[] term)
    {
      return IsCalculateTrue(rootNode, term, 0);
    }

    //   
    private static bool IsCalculateTrue(object[] pCurrNode,
      byte[] term, int iStartPos)
    {
      int iTermLength1 = term.Length - 1;
      while ((pCurrNode != null) && (iStartPos < iTermLength1))
      {
        byte cSymb = term[iStartPos++];
        if (cSymb != LogicFunction.cStarSymb)
        {
          pCurrNode = (object[])pCurrNode[cSymb];
        }
        else
        {
          if ((pCurrNode[0] != null) && (pCurrNode[1] != null))
          {
            if (IsCalculateTrue((object[])pCurrNode[1], term, iStartPos)) return true;
            pCurrNode = (object[])pCurrNode[0];
          }
          else
          {
            pCurrNode = (object[])(pCurrNode[0] != null ? pCurrNode[0] : pCurrNode[1]);
          }
        }
      }
      TreeNodeEnd pEndNode = null;
      if (pCurrNode != null)
      {
        byte cSymb = term[iTermLength1];
        if (cSymb != LogicFunction.cStarSymb)
        {
          pEndNode = (TreeNodeEnd)pCurrNode[cSymb];
        }
        else
        {
          pEndNode = (TreeNodeEnd)(pCurrNode[0] != null ? pCurrNode[0] : pCurrNode[1]);
        }
      }
      return (pEndNode != null);
    }

    //    ,
    //     
    public void GetAllCalculateTrueTerms(byte[] term,
      ICollection<TreeNodeEnd> pAllCalculateTrueTermsList)
    {
      pAllCalculateTrueTermsList.Clear();
      GetAllCalculateTrueTerms(rootNode, term, 0, pAllCalculateTrueTermsList);
    }

    //   
    private static void GetAllCalculateTrueTerms(object[] pCurrNode,
      byte[] term, int iStartPos, ICollection<TreeNodeEnd> pAllCalculateTrueTermsList)
    {
      int iTermLength1 = term.Length - 1;
      while ((pCurrNode != null) && (iStartPos < iTermLength1))
      {
        byte cSymb = term[iStartPos++];
        if (cSymb != LogicFunction.cStarSymb)
        {
          pCurrNode = (object[])pCurrNode[cSymb];
        }
        else
        {
          if ((pCurrNode[0] != null) && (pCurrNode[1] != null))
          {
            GetAllCalculateTrueTerms((object[])pCurrNode[1], term, iStartPos,
              pAllCalculateTrueTermsList);
            pCurrNode = (object[])pCurrNode[0];
          }
          else
          {
            pCurrNode = (object[])(pCurrNode[0] != null ? pCurrNode[0] : pCurrNode[1]);
          }
        }
      }
      if (pCurrNode != null)
      {
        byte cSymb = term[iTermLength1];
        if (cSymb != LogicFunction.cStarSymb)
        {
          TreeNodeEnd pEndNode = (TreeNodeEnd)pCurrNode[cSymb];
          if (pEndNode != null) pAllCalculateTrueTermsList.Add(pEndNode);
        }
        else
        {
          if (pCurrNode[0] != null) pAllCalculateTrueTermsList.Add((TreeNodeEnd)pCurrNode[0]);
          if (pCurrNode[1] != null) pAllCalculateTrueTermsList.Add((TreeNodeEnd)pCurrNode[1]);
        }
      }
    }
  }

  /// <summary>
  ///     ----
  /// </summary>
  public class Quine_McCluskey
  {
    //   
    private readonly Dnf _result = new Dnf();
    public Dnf Result
    {
      get { return _result; }
    }
    //   
    private readonly Dnf _resultNeg = new Dnf();
    public Dnf ResultNeg
    {
      get { return _resultNeg; }
    }

    //    
    private static void Skleivanie(TreeFuncTerm X1Tree,
      TreeFuncTerm X2Tree, TreeFuncTerm NegativTree,
      TreeFuncTerm InpNegTerms, TreeFuncTerm AllOutTerms)
    {
      bool IsVirtSkleivOn = ((NegativTree != null) &&
        (InpNegTerms != null) && (InpNegTerms.Count != 0));
      for (TreeFuncTerm.TreeNodeEnd x1 = X1Tree.EnumerationInit();
        x1 != null; x1 = X1Tree.EnumerationNextNode())
      {
        bool bIsSkleiv = false;
        byte[] pCurrTerm = X1Tree.EnumerationTerm;
        for (int iPos = 0; iPos < pCurrTerm.Length; iPos++)
        {
          byte cSymbSav = pCurrTerm[iPos];
          if (cSymbSav == LogicFunction.cStarSymb) continue;
          //     
          pCurrTerm[iPos] = (byte)(1 - cSymbSav);
          if (X1Tree.Contains(pCurrTerm))
          {
            bIsSkleiv = true;
            if (cSymbSav == 0)
            {
              pCurrTerm[iPos] = LogicFunction.cStarSymb; // 
              X2Tree.Add(pCurrTerm);
            }
          }
          //    ,    NegativTree
          else if (IsVirtSkleivOn && !NegativTree.Contains(pCurrTerm))
          {
            pCurrTerm[iPos] = LogicFunction.cStarSymb; // 
            if (!InpNegTerms.IsCalculateTrue(pCurrTerm))
            {
              bIsSkleiv = true;
              X2Tree.Add(pCurrTerm);
            }
          }
          pCurrTerm[iPos] = cSymbSav;
        }
        //    ,      
        if (!bIsSkleiv && (AllOutTerms != null)) AllOutTerms.Add(pCurrTerm);
      }
    }

    //     
    //      
    private static void DeleteDublicatingTerms(
      IEnumerable<byte[]> InX1, TreeFuncTerm OutX2Tree)
    {
      OutX2Tree.Clear();
      foreach (byte[] x1 in InX1) OutX2Tree.Add(x1);
    }

    //           .
    // ,   ,       (),
    //    “ – ”
    // (http://www.studfiles.ru/preview/5175815/page:4/)
    private static void ReduceRedundancyTerms(TreeFuncTerm AllOutputTerms,
      TreeFuncTerm AllInputTerms, ICollection<byte[]> ResultTerms)
    {
      //  
      ResultTerms.Clear();
      //        ,   
      Dictionary<byte[], HashSet<TreeFuncTerm.TreeNodeEnd>> Outputs2Inputs =
        new Dictionary<byte[], HashSet<TreeFuncTerm.TreeNodeEnd>>();
      //        ,   
      Dictionary<TreeFuncTerm.TreeNodeEnd, HashSet<byte[]>> Inputs2Outputs =
        new Dictionary<TreeFuncTerm.TreeNodeEnd, HashSet<byte[]>>();
      //      
      for (TreeFuncTerm.TreeNodeEnd pNode = AllOutputTerms.EnumerationInit();
        pNode != null; pNode = AllOutputTerms.EnumerationNextNode())
      {
        byte[] outTerm = (byte[])AllOutputTerms.EnumerationTerm.Clone();
        //  ,      term
        HashSet<TreeFuncTerm.TreeNodeEnd> InpTermsLst = new HashSet<TreeFuncTerm.TreeNodeEnd>();
        AllInputTerms.GetAllCalculateTrueTerms(outTerm, InpTermsLst);
        Outputs2Inputs.Add(outTerm, InpTermsLst);
        foreach (TreeFuncTerm.TreeNodeEnd inputTerm in InpTermsLst)
        {
          if (!Inputs2Outputs.ContainsKey(inputTerm)) Inputs2Outputs.Add(inputTerm, new HashSet<byte[]>());
          Inputs2Outputs[inputTerm].Add(outTerm);
        }
      }
      //      -   
      Inputs2Outputs = Inputs2Outputs.OrderBy(p => p.Value.Count).ToDictionary(p => p.Key, v => v.Value);
      //   ,   -   
      while (Inputs2Outputs.Count > 0)
      {
        byte[] outTerm = Inputs2Outputs.First().Value.OrderByDescending(q => Outputs2Inputs[q].Count()).First();
        ResultTerms.Add(outTerm);
        foreach (TreeFuncTerm.TreeNodeEnd inTerm in Outputs2Inputs[outTerm].ToArray())
        {
          foreach (byte[] outTerm2Del in Inputs2Outputs[inTerm]) Outputs2Inputs[outTerm2Del].Remove(inTerm);
          Inputs2Outputs.Remove(inTerm);
        }
      }
    }

    //   
    public static void LogicFuncMinimize(
      IEnumerable<byte[]> PositivTerms, ICollection<byte[]> OutPos,
      IEnumerable<byte[]> NegativTerms, ICollection<byte[]> OutNeg)
    {
      TreeFuncTerm InpPosTerms = new TreeFuncTerm(true);
      DeleteDublicatingTerms(PositivTerms, InpPosTerms);

      int iTotalLevels = InpPosTerms.Rang;
      if (iTotalLevels <= 0) return;

      TreeFuncTerm OutPosTerms = new TreeFuncTerm();
      TreeFuncTerm OutNegTerms = null;

      TreeFuncTerm InpNegTerms = null;
      if ((NegativTerms != null) && (NegativTerms.Count() != 0))
      {
        InpNegTerms = new TreeFuncTerm(true);
        DeleteDublicatingTerms(NegativTerms, InpNegTerms);
        OutNegTerms = new TreeFuncTerm();

        //        
        for (TreeFuncTerm.TreeNodeEnd pNode = InpPosTerms.EnumerationInit();
          pNode != null; pNode = InpPosTerms.EnumerationNextNode())
        {
          if (!InpNegTerms.Contains(InpPosTerms.EnumerationTerm)) continue;
          // -    X1   NegativTerms
          int iPos_Count = PositivTerms.Count(p => Enumerable.SequenceEqual(p, InpPosTerms.EnumerationTerm));
          int iNeg_Count = NegativTerms.Count(p => Enumerable.SequenceEqual(p, InpPosTerms.EnumerationTerm));
          if (iPos_Count > iNeg_Count)
          {
            InpNegTerms.Remove(InpPosTerms.EnumerationTerm);
          }
          else if (iPos_Count < iNeg_Count)
          {
            InpPosTerms.Remove(InpPosTerms.EnumerationTerm);
          }
          else //if (iX1_Count == iNeg_Count)
          {
            InpPosTerms.Remove(InpPosTerms.EnumerationTerm);
            InpNegTerms.Remove(InpPosTerms.EnumerationTerm);
          }
        }
      }

      // 
      TreeFuncTerm X1PositivTree = InpPosTerms;
      TreeFuncTerm X1NegativTree = InpNegTerms;

      int iLevelCounter = 0;
      //       
      while ((X1PositivTree.Count != 0) && (iLevelCounter < iTotalLevels))
      {
        TreeFuncTerm X2PositivTree = new TreeFuncTerm();
        Skleivanie(X1PositivTree, X2PositivTree, X1NegativTree, InpNegTerms, OutPosTerms);

        if ((X1NegativTree != null) && (X1NegativTree.Count != 0))
        {
          TreeFuncTerm X2NegativTree = new TreeFuncTerm();
          Skleivanie(X1NegativTree, X2NegativTree, X1PositivTree, InpPosTerms, OutNegTerms);

          //  
          if (iLevelCounter > 0) X1NegativTree.Clear();

          X1NegativTree = X2NegativTree;
        }

        //  
        if (iLevelCounter > 0) X1PositivTree.Clear();

        X1PositivTree = X2PositivTree;

        iLevelCounter++;

        GC.Collect();
      }

      if (OutPosTerms.Count > 0)
      {
        // ,     cStarSymb
        OutPosTerms.Remove(Enumerable.Repeat(LogicFunction.cStarSymb, iTotalLevels).ToArray());
      }
      //   
      ReduceRedundancyTerms(OutPosTerms, InpPosTerms, OutPos);

      if ((OutNeg != null) && (OutNegTerms != null))
      {
        if (OutNegTerms.Count > 0)
        {
          // ,     cStarSymb
          OutNegTerms.Remove(Enumerable.Repeat(LogicFunction.cStarSymb, iTotalLevels).ToArray());
        }
        //   
        ReduceRedundancyTerms(OutNegTerms, InpNegTerms, OutNeg);
      }
    }

    // 
    public void Start(IEnumerable<byte[]> TermsInput)
    {
      LogicFuncMinimize(TermsInput, _result.Terms, null, null);
    }

    // 
    public void Start(IEnumerable<byte[]> TermsInput, IEnumerable<byte[]> NegativTerms)
    {
      LogicFuncMinimize(TermsInput, _result.Terms, NegativTerms, _resultNeg.Terms);
    }

    // 
    public void Start(IEnumerable<char[]> TermsInput)
    {
      Start(TermsInput.Select(t => t.Select(p => (byte)(p == '0' ? 0 : 1)).ToArray()));
    }

    // 
    public void Start(IEnumerable<char[]> TermsInput, IEnumerable<char[]> NegativTerms)
    {
      Start(TermsInput.Select(t => t.Select(p => (byte)(p == '0' ? 0 : 1)).ToArray()),
          NegativTerms.Select(t => t.Select(p => (byte)(p == '0' ? 0 : 1)).ToArray()));
    }

    // 
    public void Start(IEnumerable<bool[]> TermsInput)
    {
      Start(TermsInput.Select(t => t.Select(p => (byte)(p ? 1 : 0)).ToArray()));
    }

    // 
    public void Start(IEnumerable<bool[]> TermsInput, IEnumerable<bool[]> NegativTerms)
    {
      Start(TermsInput.Select(t => t.Select(p => (byte)(p ? 1 : 0)).ToArray()),
          NegativTerms.Select(t => t.Select(p => (byte)(p ? 1 : 0)).ToArray()));
    }
  }
  #endregion
}

- , . , , . t N X(t-j) (j=1,2,…,N), Sign(X(t)-X(t-1)). , . , . . , . , Sign(X(t)-X(t-1)) . , , - , . . - .


C# ( )
    public static void TestRandomNeuralNetwork()
    {
      //:   
      const int iTrendSize = 2000;
      //:   
      const int iTestTrendSize = 1000;
      //:   
      const int iWindowSize = 10;
      //   
      const int iOmega = 100;
      //-   
      const int iHarmonics = 5;
      //    
      const int iMinAmpl = 50;
      //    
      const int iMaxAmpl = 100;
      //  /
      const double dNoise2SignalRatio = 0;
      //:   
      const int iAmountLogicDiscrims = 20;
      //:  -    
      const int iMaxAmountDistributionPoints = 1000;
      //:  -    
      const int iLogicDiscrimsMaxTrainRestarts = 1;

      //   
      int iPrepTrendLength = iTrendSize - iTestTrendSize;
      double[] pTrend = new double[iTrendSize];
      // -
      //for (int t = 0; t < iTrendSize; t++) pTrend[t] = Math.Sin((t * Math.PI) / iOmega) + 200;
      //  
      int[] pFreq = new int[iHarmonics];
      int[] pPhase = new int[iHarmonics];
      double[] pAmpl = new double[iHarmonics];
      //  . 
      Random RndFabric = new Random(Environment.TickCount);
      for (int h = 0; h < iHarmonics; h++)
      {
        pFreq[h] = RndFabric.Next(iOmega/*iPrepTrendLength*/) + 1;
        pPhase[h] = RndFabric.Next(iPrepTrendLength);
        pAmpl[h] = RndFabric.NextDouble();
      }
      double iMinValue = double.MaxValue, iMaxValue = 0;
      for (int t = 0; t < iTrendSize; t++)
      {
        double dValue = 0; //iMinAmpl + ((iMaxAmpl - iMinAmpl) * RndFabric.NextDouble());
        for (int h = 0; h < iHarmonics; h++)
        {
          dValue += ((pAmpl[h] * (iMaxAmpl - iMinAmpl)) + iMinAmpl) * Math.Sin(((t + pPhase[h]) * Math.PI) / pFreq[h]);
        }
        pTrend[t] = dValue;
        iMinValue = Math.Min(iMinValue, dValue);
        iMaxValue = Math.Max(iMaxValue, dValue);
      }
      // 
      if (dNoise2SignalRatio > 0)
      {
        double dNoiseAmp = (iMaxValue - iMinValue) * dNoise2SignalRatio;
        for (int t = 0; t < iTrendSize; t++)
        {
          pTrend[t] += dNoiseAmp * 2.0 * (RndFabric.NextDouble() - 0.5);
          iMinValue = Math.Min(iMinValue, pTrend[t]);
        }
      }
      // ,     
      if (iMinValue < 0)
      {
        for (int t = 0; t < iTrendSize; t++) pTrend[t] -= iMinValue;
      }
      //  
      double[][] pSourceTrendData = new double[iTrendSize][];
      bool[] pTargetFeature = new bool[iTrendSize];
      // 
      for (int t = iWindowSize; t < iTrendSize; t++)
      {
        // 
        double[] pNormData = new double[iWindowSize];
        for (int i = 0; i < iWindowSize; i++) pNormData[i] = pTrend[t - iWindowSize + i];
        pSourceTrendData[t] = pNormData;
        //  
        pTargetFeature[t] = (pTrend[t] >= pTrend[t - 1]);
      }
      DateTime dtStartDt = DateTime.Now;
      RndNeuroNet.RandomNeuralNetwork randomNeuralNetwork = new RndNeuroNet.RandomNeuralNetwork();
      randomNeuralNetwork.CreateNetwork(pSourceTrendData, pTargetFeature, iWindowSize, iPrepTrendLength, iWindowSize, iAmountLogicDiscrims,
        //iClustersAmount, iClustersRestart,
        iLogicDiscrimsMaxTrainRestarts, iMaxAmountDistributionPoints);
      //  
      TimeSpan tsTime = (DateTime.Now - dtStartDt);
      //     
      int iUpGapCounterTrain = 0, iDnGapCounterTrain = 0, iSuccessCounterTrain = 0, iErrorCounterTrain = 0, iIgnoreCounterTrain = 0;
      for (int t = iWindowSize; t < iPrepTrendLength; t++)
      {
        double dPrognos = randomNeuralNetwork.CalcResponce(pSourceTrendData[t]);
        if (dPrognos == 0)
        {
          iIgnoreCounterTrain++;
        }
        else
        {
          if (((pTargetFeature[t] ? +1 : -1) * dPrognos) > 0)
          {
            iSuccessCounterTrain++;
          }
          else
          {
            iErrorCounterTrain++;
          }
        }
        // GAP-   
        if (pTargetFeature[t])
        {
          iUpGapCounterTrain++;
        }
        else
        {
          iDnGapCounterTrain++;
        }
      }
      //     
      int iUpGapCounterTest = 0, iDnGapCounterTest = 0, iSuccessCounterTest = 0, iErrorCounterTest = 0, iIgnoreCounterTest = 0;
      for (int t = iPrepTrendLength; t < iTrendSize; t++)
      {
        double dPrognos = randomNeuralNetwork.CalcResponce(pSourceTrendData[t]);
        if (dPrognos == 0)
        {
          iIgnoreCounterTest++;
        }
        else
        {
          if (((pTargetFeature[t] ? +1 : -1) * dPrognos) > 0)
          {
            iSuccessCounterTest++;
          }
          else
          {
            iErrorCounterTest++;
          }
        }
        // GAP-   
        if (pTargetFeature[t])
        {
          iUpGapCounterTest++;
        }
        else
        {
          iDnGapCounterTest++;
        }
      }
      //
      Console.WriteLine("-      = " + iUpGapCounterTrain + Environment.NewLine +
                      "-      = " + iDnGapCounterTrain + Environment.NewLine +
                      "-     = " + iSuccessCounterTrain + Environment.NewLine +
                      "-     = " + iErrorCounterTrain + Environment.NewLine +
                      "-     = " + iIgnoreCounterTrain + Environment.NewLine +
                      "-      = " + iUpGapCounterTest + Environment.NewLine +
                      "-      = " + iDnGapCounterTest + Environment.NewLine +
                      "-     = " + iSuccessCounterTest + Environment.NewLine +
                      "-     = " + iErrorCounterTest + Environment.NewLine +
                      "-     = " + iIgnoreCounterTest + Environment.NewLine +
                      "  = " + (int)tsTime.TotalMinutes + " . " + tsTime.Seconds + " ."
      );
      Console.WriteLine("Press Enter to exit ...");
      Console.ReadLine();
    }

, , . , , :


  • ;
  • ;
  • , . . , « - ».

Como inconveniente de la NS, es necesario tener en cuenta sus grandes demandas sobre la cantidad de RAM disponible y el tiempo significativo dedicado al entrenamiento. Su principal consumidor es el algoritmo para minimizar las funciones lógicas mediante el método Quine-Mac'Klaski, es decir, la tercera capa de salida. Pero existe la posibilidad de mitigar un poco este inconveniente conectando en cascada varias funciones lógicas. Por lo demás, este NS se ha establecido como una herramienta confiable para predecir y clasificar datos binarios.


All Articles