diff --git a/FlucomaClients.cmake b/FlucomaClients.cmake index 2208b82b1..2958a825f 100644 --- a/FlucomaClients.cmake +++ b/FlucomaClients.cmake @@ -140,6 +140,7 @@ add_kr_in_client(Stats clients/rt/RunningStatsClient.hpp CLASS RunningStatsClien add_client(TransientSlice clients/rt/TransientSliceClient.hpp CLASS RTTransientSliceClient ) add_client(Transients clients/rt/TransientClient.hpp CLASS RTTransientClient ) + #lib manipulation client group add_client(DataSet clients/nrt/DataSetClient.hpp CLASS NRTThreadedDataSetClient GROUP MANIPULATION) add_client(DataSetQuery clients/nrt/DataSetQueryClient.hpp CLASS NRTThreadedDataSetQueryClient GROUP MANIPULATION) @@ -158,3 +159,4 @@ add_client(UMAP clients/nrt/UMAPClient.hpp CLASS NRTThreadedUMAPClient GROUP MAN add_client(MLPRegressor clients/nrt/MLPRegressorClient.hpp CLASS NRTThreadedMLPRegressorClient GROUP MANIPULATION) add_client(MLPClassifier clients/nrt/MLPClassifierClient.hpp CLASS NRTThreadedMLPClassifierClient GROUP MANIPULATION) add_client(Grid clients/nrt/GridClient.hpp CLASS NRTThreadedGridClient GROUP MANIPULATION) +add_client(PolynomialRegressor clients/nrt/PolynomialRegressorClient.hpp CLASS NRTThreadedPolynomialRegressorClient GROUP MANIPULATION) diff --git a/include/algorithms/public/PolynomialRegressor.hpp b/include/algorithms/public/PolynomialRegressor.hpp new file mode 100644 index 000000000..f70701fad --- /dev/null +++ b/include/algorithms/public/PolynomialRegressor.hpp @@ -0,0 +1,185 @@ +/* +Part of the Fluid Corpus Manipulation Project (http://www.flucoma.org/) +Copyright University of Huddersfield. +Licensed under the BSD-3 License. +See license.md file in the project root for full license information. +This project has received funding from the European Research Council (ERC) +under the European Union’s Horizon 2020 research and innovation programme +(grant agreement No 725899). +*/ + +#pragma once + +#include "../util/AlgorithmUtils.hpp" +#include "../util/FluidEigenMappings.hpp" +#include "../../data/FluidIndex.hpp" +#include "../../data/FluidMemory.hpp" +#include "../../data/TensorTypes.hpp" +#include +#include +#include +#include + +namespace fluid { +namespace algorithm { + +class PolynomialRegressor +{ +public: + explicit PolynomialRegressor() = default; + ~PolynomialRegressor() = default; + + void init(index degree, index dims, double tikhonov = 0.0) + { + mInitialized = true; + setDegree(degree); + setDims(dims); + setTikhonov(tikhonov); + }; + + index degree() const { return mInitialized ? asSigned(mDegree) : 0; }; + double tihkonov() const { return mInitialized ? mTikhonovFactor : 0.0; }; + index dims() const { return mInitialized ? asSigned(mDims) : 0; }; + index size() const { return mInitialized ? asSigned(mDegree) : 0; }; + + void clear() { mRegressed = false; } + + bool regressed() const { return mRegressed; }; + bool initialized() const { return mInitialized; }; + + void setDegree(index degree) + { + if (mDegree == degree) return; + + mDegree = degree; + mRegressed = false; + } + + void setDims(index dims) + { + if (mDims == dims) return; + + mDims = dims; + mRegressed = false; + } + + void setTikhonov(double tikhonov) + { + if (mTikhonovFactor == tikhonov) return; + + mTikhonovFactor = tikhonov; + mRegressed = false; + } + + + void regress(InputRealMatrixView in, InputRealMatrixView out, + Allocator& alloc = FluidDefaultAllocator()) + { + using namespace _impl; + using namespace Eigen; + + ScopedEigenMap input(in.rows(), in.cols(), alloc), + output(out.rows(), out.cols(), alloc), + transposeProduct(mDegree + 1, mDegree + 1, alloc); + + input = asEigen(in); + output = asEigen(out); + + mCoefficients.resize(mDegree + 1, mDims); + mTikhonovMatrix.resize(mDegree + 1, mDegree + 1); + + asEigen(mTikhonovMatrix) = + mTikhonovFactor * MatrixXd::Identity(mDegree + 1, mDegree + 1); + + for (index i = 0; i < mDims; ++i) + { + generateDesignMatrix(input.col(i)); + + // tikhonov/ridge regularisation, given Ax = y where x could be noisy + // optimise the value _x = (A^T . A + R^T . R)^-1 . A^T . y + // where R is a tikhonov filter matrix, in case of ridge regression of the + // form a.I + transposeProduct = asEigen(mDesignMatrix).transpose() * + asEigen(mDesignMatrix) + + asEigen(mTikhonovMatrix).transpose() * + asEigen(mTikhonovMatrix); + asEigen(mCoefficients.col(i)) = + transposeProduct.inverse() * + asEigen(mDesignMatrix).transpose() * output.col(i); + } + + mRegressed = true; + }; + + void getCoefficients(RealMatrixView coefficients) const + { + if (mInitialized) coefficients <<= mCoefficients; + }; + + void setCoefficients(InputRealMatrixView coefficients) + { + if (!mInitialized) mInitialized = true; + + setDegree(coefficients.rows() - 1); + setDims(coefficients.cols()); + + mCoefficients <<= coefficients; + mRegressed = true; + } + + void process(InputRealMatrixView in, RealMatrixView out, + Allocator& alloc = FluidDefaultAllocator()) const + { + using namespace _impl; + using namespace Eigen; + + ScopedEigenMap coefficientsColumn(mCoefficients.rows(), alloc), + inputColumn(in.rows(), alloc); + + for (index i = 0; i < mDims; ++i) + { + inputColumn = asEigen(in.col(i)); + coefficientsColumn = asEigen(mCoefficients.col(i)); + + generateDesignMatrix(inputColumn); + + asEigen(out.col(i)) = + asEigen(mDesignMatrix) * coefficientsColumn; + } + } + +private: + void generateDesignMatrix(Eigen::Ref in, + Allocator& alloc = FluidDefaultAllocator()) const + { + using namespace _impl; + using namespace Eigen; + + ScopedEigenMap designColumn(in.size(), alloc), + inArray(in.size(), alloc); + + designColumn = VectorXd::Ones(in.size()); + inArray = in.array(); + + mDesignMatrix.resize(in.size(), mDegree + 1); + + for (index i = 0; i < mDegree + 1; + ++i, designColumn = designColumn * inArray) + asEigen(mDesignMatrix.col(i)) = designColumn; + } + + index mDegree{2}; + index mDims{1}; + bool mRegressed{false}; + bool mInitialized{false}; + + double mTikhonovFactor{0}; + + RealMatrix mCoefficients; + + mutable RealMatrix mDesignMatrix; + mutable RealMatrix mTikhonovMatrix; +}; + +} // namespace algorithm +} // namespace fluid \ No newline at end of file diff --git a/include/clients/nrt/PolynomialRegressorClient.hpp b/include/clients/nrt/PolynomialRegressorClient.hpp new file mode 100644 index 000000000..23b7c6dda --- /dev/null +++ b/include/clients/nrt/PolynomialRegressorClient.hpp @@ -0,0 +1,275 @@ +/* +Part of the Fluid Corpus Manipulation Project (http://www.flucoma.org/) +Copyright University of Huddersfield. +Licensed under the BSD-3 License. +See license.md file in the project root for full license information. +This project has received funding from the European Research Council (ERC) +under the European Union’s Horizon 2020 research and innovation programme +(grant agreement No 725899). +*/ + +#pragma once + +#include "DataSetClient.hpp" +#include "NRTClient.hpp" +#include "../../algorithms/public/PolynomialRegressor.hpp" + +namespace fluid { +namespace client { +namespace polynomialregressor { + +constexpr auto PolynomialRegressorParams = defineParameters( + StringParam>("name", "Name"), + LongParam("degree", "Degree of polynomial", 2, Min(0)), + FloatParam("tikhonov", "Tihkonov factor for regression", 0.0, Min(0.0))); + +class PolynomialRegressorClient + : public FluidBaseClient, + OfflineIn, + OfflineOut, + ModelObject, + public DataClient +{ + enum { kName, kDegree, kTikhonov }; + +public: + using string = std::string; + using BufferPtr = std::shared_ptr; + using InputBufferPtr = std::shared_ptr; + using DataSet = FluidDataSet; + using StringVector = FluidTensor; + + using ParamDescType = decltype(PolynomialRegressorParams); + using ParamSetViewType = ParameterSetView; + using ParamValues = typename ParamSetViewType::ValueTuple; + + std::reference_wrapper mParams; + + template + auto& get() const + { + return mParams.get().template get(); + } + + void setParams(ParamSetViewType& p) + { + mParams = p; + mAlgorithm.setDegree(get()); + mAlgorithm.setTikhonov(get()); + } + + static constexpr auto& getParameterDescriptors() + { + return PolynomialRegressorParams; + } + + PolynomialRegressorClient(ParamSetViewType& p, FluidContext&) : mParams(p) + { + controlChannelsIn(1); + controlChannelsOut({1, 1}); + } + + template + Result process(FluidContext&) + { + return {}; + } + + MessageResult fit(InputDataSetClientRef source, + InputDataSetClientRef target) + { + auto targetClientPtr = target.get().lock(); + if (!targetClientPtr) return Error(NoDataSet); + auto targetDataSet = targetClientPtr->getDataSet(); + if (targetDataSet.size() == 0) return Error(EmptyDataSet); + + auto sourceClientPtr = source.get().lock(); + if (!sourceClientPtr) return Error(NoDataSet); + auto sourceDataSet = sourceClientPtr->getDataSet(); + if (sourceDataSet.size() == 0) return Error(EmptyDataSet); + + if (sourceDataSet.size() != targetDataSet.size()) + return Error(SizesDontMatch); + + if (sourceDataSet.dims() != targetDataSet.dims()) + return Error(WrongPointSize); + + mAlgorithm.init(get(), sourceDataSet.dims(), get()); + + RealMatrixView data = sourceDataSet.getData(); + RealMatrixView tgt = targetDataSet.getData(); + + mAlgorithm.regress(data, tgt); + + return OK(); + } + + MessageResult predict(InputDataSetClientRef src, DataSetClientRef dest) + { + index inputSize = mAlgorithm.dims(); + index outputSize = mAlgorithm.dims(); + auto srcPtr = src.get().lock(); + auto destPtr = dest.get().lock(); + + if (!srcPtr || !destPtr) return Error(NoDataSet); + + auto srcDataSet = srcPtr->getDataSet(); + if (srcDataSet.size() == 0) return Error(EmptyDataSet); + + if (!mAlgorithm.regressed()) return Error(NoDataFitted); + if (srcDataSet.dims() != inputSize) return Error(WrongPointSize); + + StringVector ids{srcDataSet.getIds()}; + RealMatrix output(srcDataSet.size(), outputSize); + + mAlgorithm.process(srcDataSet.getData(), output); + + DataSet result(ids, output); + destPtr->setDataSet(result); + + return OK(); + } + + MessageResult predictPoint(InputBufferPtr in, BufferPtr out) const + { + index inputSize = mAlgorithm.dims(); + index outputSize = mAlgorithm.dims(); + + if (!in || !out) return Error(NoBuffer); + + BufferAdaptor::ReadAccess inBuf(in.get()); + BufferAdaptor::Access outBuf(out.get()); + + if (!inBuf.exists()) return Error(InvalidBuffer); + if (!outBuf.exists()) return Error(InvalidBuffer); + if (inBuf.numFrames() != inputSize) return Error(WrongPointSize); + + if (!mAlgorithm.regressed()) return Error(NoDataFitted); + + Result resizeResult = outBuf.resize(outputSize, 1, inBuf.sampleRate()); + if (!resizeResult.ok()) return Error(BufferAlloc); + + RealMatrix src(inputSize, 1); + RealMatrix dest(outputSize, 1); + + src.col(0) <<= inBuf.samps(0, inputSize, 0); + mAlgorithm.process(src, dest); + outBuf.samps(0, outputSize, 0) <<= dest.col(0); + + return OK(); + } + + + MessageResult print() + { + return "PolynomialRegressor " + std::string(get()) + + "\npolynimal degree: " + std::to_string(mAlgorithm.degree()) + + "\nparallel regressors: " + std::to_string(mAlgorithm.dims()) + + "\nTikhonov regularisation factor: " + + std::to_string(mAlgorithm.tihkonov()) + + "\nregressed: " + (mAlgorithm.regressed() ? "true" : "false"); + } + + MessageResult write(string fileName) + { + if (!mAlgorithm.regressed()) return Error(NoDataFitted); + return DataClient::write(fileName); + } + + MessageResult read(string fileName) + { + auto result = DataClient::read(fileName); + if (result.ok()) return updateParameters(); + return {result.status(), result.message()}; + } + + MessageResult load(string fileName) + { + auto result = DataClient::load(fileName); + if (result.ok()) return updateParameters(); + return {result.status(), result.message()}; + } + + + static auto getMessageDescriptors() + { + return defineMessages( + makeMessage("fit", &PolynomialRegressorClient::fit), + makeMessage("dims", &PolynomialRegressorClient::dims), + makeMessage("clear", &PolynomialRegressorClient::clear), + makeMessage("size", &PolynomialRegressorClient::size), + makeMessage("print", &PolynomialRegressorClient::print), + makeMessage("predict", &PolynomialRegressorClient::predict), + makeMessage("predictPoint", &PolynomialRegressorClient::predictPoint), + makeMessage("load", &PolynomialRegressorClient::load), + makeMessage("dump", &PolynomialRegressorClient::dump), + makeMessage("write", &PolynomialRegressorClient::write), + makeMessage("read", &PolynomialRegressorClient::read)); + } + +private: + MessageResult updateParameters() + { + get() = mAlgorithm.degree(); + get() = mAlgorithm.tihkonov(); + + return mParams.get().toTuple(); + } +}; + +using PolynomialRegressorRef = SharedClientRef; + +constexpr auto PolynomialRegressorQueryParams = defineParameters( + PolynomialRegressorRef::makeParam("model", "Source Model"), + LongParam("degree", "Prediction Polynomial Degree", 2, Min(0)), + InputDataSetClientRef::makeParam("dataSet", "DataSet Name"), + InputBufferParam("inputPointBuffer", "Input Point Buffer"), + BufferParam("predictionBuffer", "Prediction Buffer")); + +class PolynomialRegressorQuery : public FluidBaseClient, ControlIn, ControlOut +{ + enum { kModel, kDegree, kInputBuffer, kOutputBuffer }; + +public: + using ParamDescType = decltype(PolynomialRegressorQueryParams); + using ParamSetViewType = ParameterSetView; + + std::reference_wrapper mParams; + + void setParams(ParamSetViewType& p) { mParams = p; } + + template + auto& get() const + { + return mParams.get().template get(); + } + + static constexpr auto& getParameterDescriptors() + { + return PolynomialRegressorQueryParams; + } + + PolynomialRegressorQuery(ParamSetViewType& p, FluidContext& c) : mParams(p) + { + controlChannelsIn(1); + controlChannelsOut({1, 1}); + } + + template + void process(std::vector>& input, + std::vector>& output, FluidContext& c) + {} + + index latency() { return 0; } +}; + +} // namespace polynomialregressor + +using NRTThreadedPolynomialRegressorClient = NRTThreadingAdaptor< + typename polynomialregressor::PolynomialRegressorRef::SharedType>; + +using RTPolynomialRegressorQueryClient = + ClientWrapper; + +} // namespace client +} // namespace fluid \ No newline at end of file diff --git a/include/data/FluidJSON.hpp b/include/data/FluidJSON.hpp index 0cdd5ede4..9d459148c 100644 --- a/include/data/FluidJSON.hpp +++ b/include/data/FluidJSON.hpp @@ -10,6 +10,7 @@ #include #include #include +#include #include #include #include @@ -473,6 +474,34 @@ void from_json(const nlohmann::json &j, UMAP &umap) { umap.init(embedding, tree, k, a, b); } +// PolynomialRegressor +void to_json(nlohmann::json &j, const PolynomialRegressor ®) { + RealMatrix coefficients(reg.degree() + 1, reg.dims()); + + reg.getCoefficients(coefficients); + + j["tikhonov"] = reg.tihkonov(); + j["coefficients"] = RealMatrixView(coefficients); +} + +bool check_json(const nlohmann::json &j, const PolynomialRegressor &) { + return fluid::check_json(j, + {"tikhonov", "coefficients"}, + {JSONTypes::NUMBER, JSONTypes::ARRAY} + ); +} + +void from_json(const nlohmann::json &j, PolynomialRegressor ®) { + RealMatrix embedding(reg.degree() + 1, reg.dims()); + double tikhonov; + + j.at("tikhonov").get_to(tikhonov); + j.at("coefficients").get_to(embedding); + + reg.setTikhonov(tikhonov); + reg.setCoefficients(embedding); +} + } // namespace algorithm class JSONFile {