Gist

Here’s a fun thing to try out with TensorFlow.js: take whatever function you like and let a network learn it. A famous theorem shows that any function can be learned by a network. No surprise, any function can be reproduced by a Fourier expansion too. Given enough parametric space and a limited interval it’s conceptually obvious yet somewhat more tricky to prove it rigorously.

The code uses the cosine function and some Plotly library, the essence really is the layering of dense connections and the initialization.

  const N = 100;
    const M = 10;
    var loss = [];
    async function plot_loss(h) {
      loss.push(h.history.loss[0]);
      Plotly.newPlot('loss', [{
        x: loss.length,
        y: loss,
        name: "loss",
        type: 'scatter'
      }], {
        title: "Current loss: " + Math.round(h.history.loss[0]*10000)/10000,
        xaxis: {
          title: "run"
        },
        yaxis: {
          title: "loss"
        }
      });
    }

    function sleep(ms) {
      return new Promise(resolve => setTimeout(resolve, ms));
    }

The TensorFlow did a great job in ensuring API similarity. So, if you’re familiar with the Python API this kinda code can be assembled in a meeting.

  async function run() {

      const b = _.range(N);
      const model = tf.sequential();
      model.add(tf.layers.dense({
        name: "input",
        units: 1,
        inputShape: [1]
      }));
      model.add(tf.layers.dense({
        name: "learning_stack_1",
        activation: "tanh",
        kernelInitializer: "randomNormal",
        units: 15
      }));
      model.add(tf.layers.dense({
        name: "learning_stack_2",
        activation: "tanh",
        kernelInitializer: "randomNormal",
        units: 15
      }));
      model.add(tf.layers.dense({
        name: "learning_stack_3",
        activation: "tanh",
        kernelInitializer: "randomNormal",
        units: 15
      }));
      model.add(tf.layers.dense({
        name: "outputter",
        activation: "linear",
        kernelInitializer: "randomNormal",
        units: 1
      }));

      model.compile({
        loss: 'meanSquaredError',
        optimizer: 'adam'
      });
      const xs = tf.tensor2d(b, [N, 1]);
      const ys = tf.tensor2d(_.map(b, x => Math.cos(2 * Math.PI * x / N)), [N, 1]);

      for (let i = 1; i < M; ++i) {
        const h = await model.fit(xs, ys, {
          batchSize: 10,
          epochs: 10
        });
        //console.log("Loss after Epoch " + i + " : " + h.history.loss[0]);
        plot_loss(h);
        $("#done").css("width", `${100*i/(M-1)}%`).text(`${Math.round(100*i/(M-1))}%`);
        await sleep(100);
      }

      model.predict(xs).data().then(function(p) {
        var actual = {
          x: b,
          y: _.map(b, x => Math.cos(2 * Math.PI * x / N)),
          name: "actual data",
          type: 'scatter',
          mode: 'lines',
        };
        var predicted = {
          x: b,
          y: p,
          name: "predicted data",
          type: 'scatter'
        };

        Plotly.newPlot('result', [actual, predicted]);
      });;


    }
     
    run();