2019-07-17 13:16:24 +00:00
|
|
|
"use strict";
|
|
|
|
|
2019-07-22 10:53:30 +00:00
|
|
|
import path from 'path';
|
|
|
|
import fs from 'fs';
|
|
|
|
|
2019-07-17 13:16:24 +00:00
|
|
|
import tf from '@tensorflow/tfjs-node-gpu';
|
|
|
|
|
|
|
|
class AITrainer {
|
2019-07-22 10:53:30 +00:00
|
|
|
constructor({ settings, log, root_dir, GatewayRepo, DatasetFetcher }) {
|
2019-07-17 13:16:24 +00:00
|
|
|
this.settings = settings;
|
2019-07-22 10:53:30 +00:00
|
|
|
this.root_dir = root_dir;
|
2019-07-18 16:22:37 +00:00
|
|
|
this.l = log;
|
2019-07-17 14:15:31 +00:00
|
|
|
this.dataset_fetcher = DatasetFetcher;
|
|
|
|
this.repo_gateway = GatewayRepo;
|
2019-07-17 13:16:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
generate_model() {
|
|
|
|
let model = tf.sequential();
|
|
|
|
model.add(tf.layers.dense({
|
|
|
|
units: 256, // 256 nodes
|
|
|
|
activation: "sigmoid", // Sigmoid activation function
|
2019-07-18 16:22:37 +00:00
|
|
|
inputShape: [2], // 2 inputs - lat and long
|
2019-07-17 13:16:24 +00:00
|
|
|
}))
|
|
|
|
model.add(tf.layers.dense({
|
|
|
|
units: 1, // 1 output value - RSSI
|
|
|
|
activation: "sigmoid" // The example code uses softmax, but this is generally best used for classification tasks
|
|
|
|
}));
|
|
|
|
|
|
|
|
model.compile({
|
|
|
|
optimizer: tf.train.adam(),
|
2019-07-18 16:22:37 +00:00
|
|
|
loss: tf.losses.absoluteDifference,
|
|
|
|
metrics: [ tf.metrics.meanSquaredError ]
|
2019-07-17 13:16:24 +00:00
|
|
|
});
|
|
|
|
|
2019-07-18 16:22:37 +00:00
|
|
|
this.l.log(`Model:`);
|
|
|
|
model.summary();
|
|
|
|
|
2019-07-17 13:16:24 +00:00
|
|
|
return model;
|
|
|
|
}
|
|
|
|
|
2019-07-18 15:34:25 +00:00
|
|
|
async train_all() {
|
2019-07-22 11:42:04 +00:00
|
|
|
let index = [];
|
2019-07-17 14:15:31 +00:00
|
|
|
for(let gateway of this.repo_gateway.iterate()) {
|
2019-07-22 10:53:30 +00:00
|
|
|
let filename = path.join(this.root_dir, "..", this.settings.ai.output_directory, `${gateway.id}`);
|
|
|
|
console.log(filename);
|
|
|
|
|
|
|
|
if(!fs.existsSync(path.dirname(filename)))
|
|
|
|
await fs.promises.mkdir(path.dirname(filename), { recursive: true });
|
|
|
|
|
2019-07-22 11:40:19 +00:00
|
|
|
if(!await this.train_gateway(gateway.id, filename)) {
|
|
|
|
this.l.warn(`Warning: Failed to train AI for ${gateway.id}.`);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2019-07-22 11:42:04 +00:00
|
|
|
index.push({
|
2019-07-23 12:44:00 +00:00
|
|
|
id: gateway.id,
|
|
|
|
latitude: gateway.latitude,
|
|
|
|
longitude: gateway.longitude
|
2019-07-22 11:42:04 +00:00
|
|
|
});
|
2019-07-17 14:15:31 +00:00
|
|
|
}
|
2019-07-22 11:40:19 +00:00
|
|
|
|
|
|
|
await fs.promises.writeFile(
|
|
|
|
path.join(
|
|
|
|
path.dirname(this.root_dir),
|
|
|
|
this.settings.ai.output_directory,
|
|
|
|
"index.json"
|
|
|
|
),
|
2019-07-23 14:45:29 +00:00
|
|
|
JSON.stringify({
|
|
|
|
properties: {
|
|
|
|
rssi_min: this.settings.ai.rssi_min,
|
|
|
|
rssi_max: this.settings.ai.rssi_max
|
|
|
|
},
|
|
|
|
index
|
|
|
|
})
|
2019-07-22 11:40:19 +00:00
|
|
|
);
|
2019-07-17 14:15:31 +00:00
|
|
|
}
|
|
|
|
|
2019-07-22 10:53:30 +00:00
|
|
|
/**
|
|
|
|
* Trains an AI to predict the coverage of a specific gateway.
|
|
|
|
* @param {string} gateway_id The id of the gateway to train an AI for.
|
|
|
|
* @param {string} destination_filename The absolute path to the file to serialise the trained to. Required because we can't serialise and return a TensorFlow model, it has to be sent somewhere because the API is backwards and upside-down :-/
|
|
|
|
* @return {Promise} A promise that resolves when training and serialisation is complete.
|
|
|
|
*/
|
|
|
|
async train_gateway(gateway_id, destination_filename) {
|
2019-07-23 14:14:50 +00:00
|
|
|
let model = this.generate_model();
|
|
|
|
|
2019-07-22 10:53:30 +00:00
|
|
|
// TODO: Add samples here for locations that the gateway does NOT cover too
|
2019-07-18 15:34:25 +00:00
|
|
|
let dataset_input = tf.data.generator(
|
2019-07-18 16:22:37 +00:00
|
|
|
this.dataset_fetcher.fetch_input.bind(this.dataset_fetcher, gateway_id)
|
2019-07-18 15:34:25 +00:00
|
|
|
);
|
|
|
|
let dataset_output = tf.data.generator(
|
2019-07-18 16:22:37 +00:00
|
|
|
this.dataset_fetcher.fetch_output.bind(this.dataset_fetcher, gateway_id)
|
2019-07-18 15:34:25 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
let dataset = tf.data.zip({
|
|
|
|
xs: dataset_input,
|
|
|
|
ys: dataset_output
|
2019-07-18 16:22:37 +00:00
|
|
|
}).shuffle(this.settings.ai.batch_size * 4)
|
2019-07-18 15:34:25 +00:00
|
|
|
.batch(this.settings.ai.batch_size);
|
|
|
|
|
2019-07-18 16:22:37 +00:00
|
|
|
|
2019-07-23 14:14:50 +00:00
|
|
|
let result = await model.fitDataset(dataset, {
|
2019-07-18 15:34:25 +00:00
|
|
|
epochs: this.settings.ai.epochs,
|
|
|
|
batchSize: this.settings.ai.batch_size
|
|
|
|
});
|
2019-07-22 10:53:30 +00:00
|
|
|
|
2019-07-23 14:14:50 +00:00
|
|
|
await model.save(`file://${destination_filename}`);
|
2019-07-18 15:34:25 +00:00
|
|
|
console.log(result);
|
2019-07-22 11:40:19 +00:00
|
|
|
|
|
|
|
return true;
|
2019-07-17 13:16:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export default AITrainer;
|