javascript hyperparameters search
npm install hyperparameters   
:warning: Early version subject to changes.
```
$ npm install hyperparameters
``
import * as hpjs from 'hyperparameters';
- Randomly returns one of the options
- Return a random integer in the range [0, upper)
- Returns a single value uniformly between low and high i.e. any value between low and high has an equal probability of being selected
- returns a quantized value of hp.uniform calculated as round(uniform(low, high) / q) * q
- Returns a value exp(uniform(low, high)) so the logarithm of the return value is uniformly distributed.
- Returns a value round(exp(uniform(low, high)) / q) * q
- Returns a real number that's normally-distributed with mean mu and standard deviation sigma
- Returns a value round(normal(mu, sigma) / q) * q
- Returns a value exp(normal(mu, sigma))
- Returns a value round(exp(normal(mu, sigma)) / q) * q
``
import { RandomState } from 'hyperparameters';
`
example:
const rng = new RandomState(12345);
console.log(rng.randrange(0, 5, 0.5));
`
``
import { sample } from 'hyperparameters';
`
example:
import * as hpjs from 'hyperparameters';
const space = {
x: hpjs.normal(0, 2),
y: hpjs.uniform(0, 1),
choice: hpjs.choice([
undefined, hp.uniform('float', 0, 1),
]),
array: [
hpjs.normal(0, 2), hpjs.uniform(0, 3), hpjs.choice([false, true]),
],
obj: {
u: hpjs.uniform(0, 3),
v: hpjs.uniform(0, 3),
w: hpjs.uniform(-3, 0)
}
};
console.log(hpjs.sample.randomSample(space));
`fmin - find best value of a function over the arguments
``
import * as hpjs from 'hyperparameters';
const trials = hpjs.fmin(optimizationFunction, space, estimator, max_estimates, options);
`
example:
import * as hpjs from 'hyperparameters';
const fn = x => ((x ** 2) - (x + 1));
const space = hpjs.uniform(-5, 5);
fmin(fn, space, hpjs.search.randomSearch, 1000, { rng: new hpjs.RandomState(123456) })
.then(trials => console.log(result.argmin));
`Getting started with tensorflow.js
* include (latest) version from cdn
* create search space
`
const space = {
optimizer: hpjs.choice(['sgd', 'adam', 'adagrad', 'rmsprop']),
epochs: hpjs.quniform(50, 250, 50),
};
``
* create tensorflow.js train function. Parameters are optimizer and epochs. input and output data passed as second argument`
const trainModel = async ({ optimizer, epochs }, { xs, ys }) => {
// Create a simple model.
const model = tf.sequential();
model.add(tf.layers.dense({ units: 1, inputShape: [1] }));
// Prepare the model for training: Specify the loss and the optimizer.
model.compile({
loss: 'meanSquaredError',
optimizer
});
// Train the model using the data.
const h = await model.fit(xs, ys, { epochs });
return { model, loss: h.history.loss[h.history.loss.length - 1] };
};`
* create optimization function`
const modelOpt = async ({ optimizer, epochs }, { xs, ys }) => {
const { loss } = await trainModel({ optimizer, epochs }, { xs, ys });
return { loss, status: hpjs.STATUS_OK };
};
* find optimal hyperparameters
``
const trials = await hpjs.fmin(
modelOpt, space, hpjs.search.randomSearch, 10,
{ rng: new hpjs.RandomState(654321), xs, ys }
);
const opt = trials.argmin;
console.log('best optimizer',opt.optimizer);
console.log('best no of epochs', opt.epochs);
$ npm install hyperparameters
`* import hyperparameters
`
import * as tf from '@tensorflow/tfjs';
import * as hpjs from 'hyperparameters';
`* create search space
`
const space = {
optimizer: hpjs.choice(['sgd', 'adam', 'adagrad', 'rmsprop']),
epochs: hpjs.quniform(50, 250, 50),
};`
* create tensorflow.js train function. Parameters are optimizer and epochs. input and output data passed as second argument
`
const trainModel = async ({ optimizer, epochs }, { xs, ys }) => {
// Create a simple model.
const model = tf.sequential();
model.add(tf.layers.dense({ units: 1, inputShape: [1] }));
// Prepare the model for training: Specify the loss and the optimizer.
model.compile({
loss: 'meanSquaredError',
optimizer
});
// Train the model using the data.
const h = await model.fit(xs, ys, { epochs });
return { model, loss: h.history.loss[h.history.loss.length - 1] };
};
`
* create optimization function
`
const modelOpt = async ({ optimizer, epochs }, { xs, ys }) => {
const { loss } = await trainModel({ optimizer, epochs }, { xs, ys });
return { loss, status: hpjs.STATUS_OK };
};
`* find optimal hyperparameters
`
const trials = await hpjs.fmin(
modelOpt, space, hpjs.search.randomSearch, 10,
{ rng: new hpjs.RandomState(654321), xs, ys }
);
const opt = trials.argmin;
console.log('best optimizer',opt.optimizer);
console.log('best no of epochs', opt.epochs);
``MIT © Atanas Stoyanov & Martin Stoyanov