summaryrefslogtreecommitdiff
path: root/src/CuTNetLib/cuCache.h
diff options
context:
space:
mode:
authorJoe Zhao <ztuowen@gmail.com>2014-04-14 08:14:45 +0800
committerJoe Zhao <ztuowen@gmail.com>2014-04-14 08:14:45 +0800
commitcccccbf6cca94a3eaf813b4468453160e91c332b (patch)
tree23418cb73a10ae3b0688681a7f0ba9b06424583e /src/CuTNetLib/cuCache.h
downloadtnet-cccccbf6cca94a3eaf813b4468453160e91c332b.tar.gz
tnet-cccccbf6cca94a3eaf813b4468453160e91c332b.tar.bz2
tnet-cccccbf6cca94a3eaf813b4468453160e91c332b.zip
First commit
Diffstat (limited to 'src/CuTNetLib/cuCache.h')
-rw-r--r--src/CuTNetLib/cuCache.h94
1 files changed, 94 insertions, 0 deletions
diff --git a/src/CuTNetLib/cuCache.h b/src/CuTNetLib/cuCache.h
new file mode 100644
index 0000000..42d9b4d
--- /dev/null
+++ b/src/CuTNetLib/cuCache.h
@@ -0,0 +1,94 @@
+#ifndef _CUCACHE_H_
+#define _CUCACHE_H_
+
+#include "cumatrix.h"
+
+namespace TNet {
+
+
+ /**
+ * \brief The feature-target pair cache
+ *
+ * \ingroup CuNNComp
+ * Preloads mCachesize features and labels to GRAM
+ *
+ * During every iterations read mBunchsize data to networks.
+ *
+ * When Cache is to be filled with more data that it can hold
+ * extras are stored in LeftOver, and when data is filled again
+ * LeftOvers are moved to the Cache.
+ *
+ * Note:
+ * - Cache Size must be divisible by Bunch Size to ensure proper functionality
+ * - Once extracted data. Cache must be depleted to begin filling or every time extraction start at location Zero.
+ * - Cache must be filled to begin extraction of Data or we can't start filling and harder to avoid discarding data.
+ * - @todo Why not implement CuCache as a Stack instead of a Queue?
+ * .
+ */
+ class CuCache {
+ typedef enum { EMPTY, INTAKE, FULL, EXHAUST } State;
+ public:
+ CuCache();
+ ~CuCache();
+
+ /// Initialize the cache
+ void Init(size_t cachesize, size_t bunchsize);
+
+ /// Add data to cache, returns number of added vectors
+ /// \param[in] rFeatures CuNN Input features data
+ /// \param[in] rDesired CuNN features data label
+ void AddData(const CuMatrix<BaseFloat>& rFeatures, const CuMatrix<BaseFloat>& rDesired);
+ /// Randomizes the cache
+ void Randomize();
+ /// Get the bunch of training data
+ /// \param[out] rFeatures Bunchsize of CuNN Input features data
+ /// \param[out] rDesired Bunchsize of CuNN features data label
+ void GetBunch(CuMatrix<BaseFloat>& rFeatures, CuMatrix<BaseFloat>& rDesired);
+
+
+ /// Returns true if the cache was completely filled
+ bool Full()
+ { return (mState == FULL); }
+
+ /// Returns true if the cache is empty
+ bool Empty()
+ { return (mState == EMPTY || mIntakePos < mBunchsize); }
+
+ /// Number of discarded frames
+ int Discarded()
+ { return mDiscarded; }
+
+ /// Set the trace message level
+ void Trace(int trace)
+ { mTrace = trace; }
+
+ private:
+
+ static long int GenerateRandom(int max)
+ { return lrand48() % max; }
+
+ State mState; ///< Current state of the cache
+
+ size_t mIntakePos; ///< Number of intaken vectors by AddData
+ size_t mExhaustPos; ///< Number of exhausted vectors by GetBunch
+
+ size_t mCachesize; ///< Size of cache
+ size_t mBunchsize; ///< Size of bunch
+ int mDiscarded; ///< Number of discarded frames
+
+ CuMatrix<BaseFloat> mFeatures; ///< Feature cache
+ CuMatrix<BaseFloat> mFeaturesRandom; ///< Feature cache
+ CuMatrix<BaseFloat> mFeaturesLeftover; ///< Feature cache
+
+ CuMatrix<BaseFloat> mDesired; ///< Desired vector cache
+ CuMatrix<BaseFloat> mDesiredRandom; ///< Desired vector cache
+ CuMatrix<BaseFloat> mDesiredLeftover; ///< Desired vector cache
+
+ bool mRandomized;
+
+ int mTrace;
+ };
+
+}
+
+#endif