processor.php 19.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle.  If not, see <http://www.gnu.org/licenses/>.

/**
 * Php predictions processor
 *
 * @package   mlbackend_php
 * @copyright 2016 David Monllao {@link http://www.davidmonllao.com}
 * @license   http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
 */

namespace mlbackend_php;

27
defined('MOODLE_INTERNAL') || die();
28

29
use Phpml\Preprocessing\Normalizer;
30
31
use Phpml\CrossValidation\RandomSplit;
use Phpml\Dataset\ArrayDataset;
32
use Phpml\ModelManager;
33
use Phpml\Classification\Linear\LogisticRegression;
34
35
36
37
38
39
40
41

/**
 * PHP predictions processor.
 *
 * @package   mlbackend_php
 * @copyright 2016 David Monllao {@link http://www.davidmonllao.com}
 * @license   http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
 */
42
class processor implements \core_analytics\classifier, \core_analytics\regressor, \core_analytics\packable {
43

44
45
46
    /**
     * Size of training / prediction batches.
     */
47
    const BATCH_SIZE = 5000;
48
49
50
51

    /**
     * Number of train iterations.
     */
52
    const TRAIN_ITERATIONS = 500;
53
54
55
56

    /**
     * File name of the serialised model.
     */
57
58
    const MODEL_FILENAME = 'model.ser';

59
60
61
    /**
     * @var bool
     */
62
63
    protected $limitedsize = false;

64
65
66
67
68
    /**
     * Checks if the processor is ready to use.
     *
     * @return bool
     */
69
    public function is_ready() {
70
71
72
        if (version_compare(phpversion(), '7.0.0') < 0) {
            return get_string('errorphp7required', 'mlbackend_php');
        }
73
74
75
        return true;
    }

76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
    /**
     * Delete the stored models.
     *
     * @param string $uniqueid
     * @param string $modelversionoutputdir
     * @return null
     */
    public function clear_model($uniqueid, $modelversionoutputdir) {
        remove_dir($modelversionoutputdir);
    }

    /**
     * Delete the output directory.
     *
     * @param string $modeloutputdir
91
     * @param string $uniqueid
92
93
     * @return null
     */
94
    public function delete_output_dir($modeloutputdir, $uniqueid) {
95
96
97
        remove_dir($modeloutputdir);
    }

98
    /**
99
     * Train this processor classification model using the provided supervised learning dataset.
100
101
102
103
104
105
     *
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param string $outputdir
     * @return \stdClass
     */
106
    public function train_classification($uniqueid, \stored_file $dataset, $outputdir) {
107

108
        $modelfilepath = $this->get_model_filepath($outputdir);
109

110
        $modelmanager = new ModelManager();
111
112
113
114

        if (file_exists($modelfilepath)) {
            $classifier = $modelmanager->restoreFromFile($modelfilepath);
        } else {
115
            $classifier = $this->instantiate_algorithm();
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
        }

        $fh = $dataset->get_content_file_handle();

        // The first lines are var names and the second one values.
        $metadata = $this->extract_metadata($fh);

        // Skip headers.
        fgets($fh);

        $samples = array();
        $targets = array();
        while (($data = fgetcsv($fh)) !== false) {
            $sampledata = array_map('floatval', $data);
            $samples[] = array_slice($sampledata, 0, $metadata['nfeatures']);
            $targets[] = intval($data[$metadata['nfeatures']]);

133
134
            $nsamples = count($samples);
            if ($nsamples === self::BATCH_SIZE) {
135
                // Training it batches to avoid running out of memory.
136
                $classifier->partialTrain($samples, $targets, json_decode($metadata['targetclasses']));
137
138
139
                $samples = array();
                $targets = array();
            }
140
141
142
            if (empty($morethan1sample) && $nsamples > 1) {
                $morethan1sample = true;
            }
143
144
145
        }
        fclose($fh);

146
147
148
149
150
151
152
        if (empty($morethan1sample)) {
            $resultobj = new \stdClass();
            $resultobj->status = \core_analytics\model::NO_DATASET;
            $resultobj->info = array();
            return $resultobj;
        }

153
154
        // Train the remaining samples.
        if ($samples) {
155
            $classifier->partialTrain($samples, $targets, json_decode($metadata['targetclasses']));
156
157
158
159
160
161
162
163
164
165
166
167
        }

        $resultobj = new \stdClass();
        $resultobj->status = \core_analytics\model::OK;
        $resultobj->info = array();

        // Store the trained model.
        $modelmanager->saveToFile($classifier, $modelfilepath);

        return $resultobj;
    }

168
    /**
169
     * Classifies the provided dataset samples.
170
171
172
173
174
175
     *
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param string $outputdir
     * @return \stdClass
     */
176
    public function classify($uniqueid, \stored_file $dataset, $outputdir) {
177

178
        $classifier = $this->load_classifier($outputdir);
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225

        $fh = $dataset->get_content_file_handle();

        // The first lines are var names and the second one values.
        $metadata = $this->extract_metadata($fh);

        // Skip headers.
        fgets($fh);

        $sampleids = array();
        $samples = array();
        $predictions = array();
        while (($data = fgetcsv($fh)) !== false) {
            $sampledata = array_map('floatval', $data);
            $sampleids[] = $data[0];
            $samples[] = array_slice($sampledata, 1, $metadata['nfeatures']);

            if (count($samples) === self::BATCH_SIZE) {
                // Prediction it batches to avoid running out of memory.

                // Append predictions incrementally, we want $sampleids keys in sync with $predictions keys.
                $newpredictions = $classifier->predict($samples);
                foreach ($newpredictions as $prediction) {
                    array_push($predictions, $prediction);
                }
                $samples = array();
            }
        }
        fclose($fh);

        // Finish the remaining predictions.
        if ($samples) {
            $predictions = $predictions + $classifier->predict($samples);
        }

        $resultobj = new \stdClass();
        $resultobj->status = \core_analytics\model::OK;
        $resultobj->info = array();

        foreach ($predictions as $index => $prediction) {
            $resultobj->predictions[$index] = array($sampleids[$index], $prediction);
        }

        return $resultobj;
    }

    /**
226
     * Evaluates this processor classification model using the provided supervised learning dataset.
227
228
229
     *
     * During evaluation we need to shuffle the evaluation dataset samples to detect deviated results,
     * if the dataset is massive we can not load everything into memory. We know that 2GB is the
230
     * minimum memory limit we should have (\core_analytics\model::heavy_duty_mode), if we substract the memory
231
232
233
234
235
236
237
238
239
240
     * that we already consumed and the memory that Phpml algorithms will need we should still have at
     * least 500MB of memory, which should be enough to evaluate a model. In any case this is a robust
     * solution that will work for all sites but it should minimize memory limit problems. Site admins
     * can still set $CFG->mlbackend_php_no_evaluation_limits to true to skip this 500MB limit.
     *
     * @param string $uniqueid
     * @param float $maxdeviation
     * @param int $niterations
     * @param \stored_file $dataset
     * @param string $outputdir
241
     * @param  string $trainedmodeldir
242
243
     * @return \stdClass
     */
244
245
    public function evaluate_classification($uniqueid, $maxdeviation, $niterations, \stored_file $dataset,
            $outputdir, $trainedmodeldir) {
246
247
        $fh = $dataset->get_content_file_handle();

248
249
250
251
252
253
        if ($trainedmodeldir) {
            // We overwrite the number of iterations as the results will always be the same.
            $niterations = 1;
            $classifier = $this->load_classifier($trainedmodeldir);
        }

254
255
256
257
258
259
260
261
262
263
264
265
        // The first lines are var names and the second one values.
        $metadata = $this->extract_metadata($fh);

        // Skip headers.
        fgets($fh);

        if (empty($CFG->mlbackend_php_no_evaluation_limits)) {
            $samplessize = 0;
            $limit = get_real_size('500MB');

            // Just an approximation, will depend on PHP version, compile options...
            // Double size + zval struct (6 bytes + 8 bytes + 16 bytes) + array bucket (96 bytes)
266
            // https://nikic.github.io/2011/12/12/How-big-are-PHP-arrays-really-Hint-BIG.html.
267
268
269
270
271
272
273
274
275
            $floatsize = (PHP_INT_SIZE * 2) + 6 + 8 + 16 + 96;
        }

        $samples = array();
        $targets = array();
        while (($data = fgetcsv($fh)) !== false) {
            $sampledata = array_map('floatval', $data);

            $samples[] = array_slice($sampledata, 0, $metadata['nfeatures']);
276
            $targets[] = intval($data[$metadata['nfeatures']]);
277
278
279
280

            if (empty($CFG->mlbackend_php_no_evaluation_limits)) {
                // We allow admins to disable evaluation memory usage limits by modifying config.php.

281
                // We will have plenty of missing values in the dataset so it should be a conservative approximation.
282
283
284
285
286
287
288
289
290
291
292
                $samplessize = $samplessize + (count($sampledata) * $floatsize);

                // Stop fetching more samples.
                if ($samplessize >= $limit) {
                    $this->limitedsize = true;
                    break;
                }
            }
        }
        fclose($fh);

293
294
        // We need at least 2 samples belonging to each target.
        $counts = array_count_values($targets);
295
        $ntargets = count(explode(',', $metadata['targetclasses']));
296
297
298
299
300
        foreach ($counts as $count) {
            if ($count < 2) {
                $notenoughdata = true;
            }
        }
301
302
303
        if ($ntargets > count($counts)) {
            $notenoughdata = true;
        }
304
305
        if (!empty($notenoughdata)) {
            $resultobj = new \stdClass();
306
            $resultobj->status = \core_analytics\model::NOT_ENOUGH_DATA;
307
308
309
310
311
            $resultobj->score = 0;
            $resultobj->info = array(get_string('errornotenoughdata', 'mlbackend_php'));
            return $resultobj;
        }

312
313
314
315
316
        $phis = array();

        // Evaluate the model multiple times to confirm the results are not significantly random due to a short amount of data.
        for ($i = 0; $i < $niterations; $i++) {

317
            if (!$trainedmodeldir) {
318
                $classifier = $this->instantiate_algorithm();
319

320
321
                // Split up the dataset in classifier and testing.
                $data = new RandomSplit(new ArrayDataset($samples, $targets), 0.2);
322

323
324
325
326
327
328
329
                $classifier->train($data->getTrainSamples(), $data->getTrainLabels());
                $predictedlabels = $classifier->predict($data->getTestSamples());
                $phis[] = $this->get_phi($data->getTestLabels(), $predictedlabels);
            } else {
                $predictedlabels = $classifier->predict($samples);
                $phis[] = $this->get_phi($targets, $predictedlabels);
            }
330
331
332
333
334
335
        }

        // Let's fill the results changing the returned status code depending on the phi-related calculated metrics.
        return $this->get_evaluation_result_object($dataset, $phis, $maxdeviation);
    }

336
337
338
339
340
341
342
343
    /**
     * Returns the results objects from all evaluations.
     *
     * @param \stored_file $dataset
     * @param array $phis
     * @param float $maxdeviation
     * @return \stdClass
     */
344
345
    protected function get_evaluation_result_object(\stored_file $dataset, $phis, $maxdeviation) {

346
        // Average phi of all evaluations as final score.
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
        if (count($phis) === 1) {
            $avgphi = reset($phis);
        } else {
            $avgphi = \Phpml\Math\Statistic\Mean::arithmetic($phis);
        }

        // Standard deviation should ideally be calculated against the area under the curve.
        if (count($phis) === 1) {
            $modeldev = 0;
        } else {
            $modeldev = \Phpml\Math\Statistic\StandardDeviation::population($phis);
        }

        // Let's fill the results object.
        $resultobj = new \stdClass();

        // Zero is ok, now we add other bits if something is not right.
        $resultobj->status = \core_analytics\model::OK;
        $resultobj->info = array();

        // Convert phi to a standard score (from -1 to 1 to a value between 0 and 1).
        $resultobj->score = ($avgphi + 1) / 2;

        // If each iteration results varied too much we need more data to confirm that this is a valid model.
        if ($modeldev > $maxdeviation) {
372
            $resultobj->status = $resultobj->status + \core_analytics\model::NOT_ENOUGH_DATA;
373
374
375
            $a = new \stdClass();
            $a->deviation = $modeldev;
            $a->accepteddeviation = $maxdeviation;
376
            $resultobj->info[] = get_string('errornotenoughdatadev', 'mlbackend_php', $a);
377
378
379
        }

        if ($resultobj->score < \core_analytics\model::MIN_SCORE) {
380
            $resultobj->status = $resultobj->status + \core_analytics\model::LOW_SCORE;
381
382
383
384
385
386
387
388
389
390
391
392
393
            $a = new \stdClass();
            $a->score = $resultobj->score;
            $a->minscore = \core_analytics\model::MIN_SCORE;
            $resultobj->info[] = get_string('errorlowscore', 'mlbackend_php', $a);
        }

        if ($this->limitedsize === true) {
            $resultobj->info[] = get_string('datasetsizelimited', 'mlbackend_php', display_size($dataset->get_filesize()));
        }

        return $resultobj;
    }

394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
    /**
     * Loads the pre-trained classifier.
     *
     * @throws \moodle_exception
     * @param string $outputdir
     * @return \Phpml\Classification\Linear\LogisticRegression
     */
    protected function load_classifier($outputdir) {
        $modelfilepath = $this->get_model_filepath($outputdir);

        if (!file_exists($modelfilepath)) {
            throw new \moodle_exception('errorcantloadmodel', 'mlbackend_php', '', $modelfilepath);
        }

        $modelmanager = new ModelManager();
        return $modelmanager->restoreFromFile($modelfilepath);
    }

412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
    /**
     * Train this processor regression model using the provided supervised learning dataset.
     *
     * @throws new \coding_exception
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param string $outputdir
     * @return \stdClass
     */
    public function train_regression($uniqueid, \stored_file $dataset, $outputdir) {
        throw new \coding_exception('This predictor does not support regression yet.');
    }

    /**
     * Estimates linear values for the provided dataset samples.
     *
     * @throws new \coding_exception
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param mixed $outputdir
     * @return void
     */
    public function estimate($uniqueid, \stored_file $dataset, $outputdir) {
        throw new \coding_exception('This predictor does not support regression yet.');
    }

    /**
     * Evaluates this processor regression model using the provided supervised learning dataset.
     *
     * @throws new \coding_exception
     * @param string $uniqueid
     * @param float $maxdeviation
     * @param int $niterations
     * @param \stored_file $dataset
     * @param string $outputdir
447
     * @param  string $trainedmodeldir
448
449
     * @return \stdClass
     */
450
451
    public function evaluate_regression($uniqueid, $maxdeviation, $niterations, \stored_file $dataset,
            $outputdir, $trainedmodeldir) {
452
453
454
        throw new \coding_exception('This predictor does not support regression yet.');
    }

455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
    /**
     * Exports the machine learning model.
     *
     * @throws \moodle_exception
     * @param  string $uniqueid  The model unique id
     * @param  string $modeldir  The directory that contains the trained model.
     * @return string            The path to the directory that contains the exported model.
     */
    public function export(string $uniqueid, string $modeldir) : string {

        $modelfilepath = $this->get_model_filepath($modeldir);

        if (!file_exists($modelfilepath)) {
            throw new \moodle_exception('errorexportmodelresult', 'analytics');
        }

        // We can use the actual $modeldir as the directory is not modified during export, just copied into a zip.
        return $modeldir;
    }

    /**
     * Imports the provided machine learning model.
     *
     * @param  string $uniqueid The model unique id
     * @param  string $modeldir  The directory that will contain the trained model.
     * @param  string $importdir The directory that contains the files to import.
     * @return bool Success
     */
    public function import(string $uniqueid, string $modeldir, string $importdir) : bool {

        $importmodelfilepath = $this->get_model_filepath($importdir);
        $modelfilepath = $this->get_model_filepath($modeldir);

        $modelmanager = new ModelManager();

        // Copied from ModelManager::restoreFromFile to validate the serialised contents
        // before restoring them.
        $importconfig = file_get_contents($importmodelfilepath);

        // Clean stuff like function calls.
        $importconfig = preg_replace('/[^a-zA-Z0-9\{\}%\.\*\;\,\:\"\-\0\\\]/', '', $importconfig);

        $object = unserialize($importconfig,
            ['allowed_classes' => ['Phpml\\Classification\\Linear\\LogisticRegression']]);
        if (!$object) {
            return false;
        }

        if (get_class($object) == '__PHP_Incomplete_Class') {
            return false;
        }

        $classifier = $modelmanager->restoreFromFile($importmodelfilepath);

        // This would override any previous classifier.
        $modelmanager->saveToFile($classifier, $modelfilepath);

        return true;
    }

    /**
     * Returns the path to the serialised model file in the provided directory.
     *
     * @param  string $modeldir The model directory
     * @return string           The model file
     */
    protected function get_model_filepath(string $modeldir) : string {
        // Output directory is already unique to the model.
        return $modeldir . DIRECTORY_SEPARATOR . self::MODEL_FILENAME;
    }

526
527
528
529
530
531
532
    /**
     * Returns the Phi correlation coefficient.
     *
     * @param array $testlabels
     * @param array $predictedlabels
     * @return float
     */
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
    protected function get_phi($testlabels, $predictedlabels) {

        // Binary here only as well.
        $matrix = \Phpml\Metric\ConfusionMatrix::compute($testlabels, $predictedlabels, array(0, 1));

        $tptn = $matrix[0][0] * $matrix[1][1];
        $fpfn = $matrix[1][0] * $matrix[0][1];
        $tpfp = $matrix[0][0] + $matrix[1][0];
        $tpfn = $matrix[0][0] + $matrix[0][1];
        $tnfp = $matrix[1][1] + $matrix[1][0];
        $tnfn = $matrix[1][1] + $matrix[0][1];
        if ($tpfp === 0 || $tpfn === 0 || $tnfp === 0 || $tnfn === 0) {
            $phi = 0;
        } else {
            $phi = ( $tptn - $fpfn ) / sqrt( $tpfp * $tpfn * $tnfp * $tnfn);
        }

        return $phi;
    }

553
554
555
556
557
558
559
560
    /**
     * Extracts metadata from the dataset file.
     *
     * The file poiter should be located at the top of the file.
     *
     * @param resource $fh
     * @return array
     */
561
562
563
564
    protected function extract_metadata($fh) {
        $metadata = fgetcsv($fh);
        return array_combine($metadata, fgetcsv($fh));
    }
565
566
567
568
569
570
571
572
573
574

    /**
     * Instantiates the ML algorithm.
     *
     * @return \Phpml\Classification\Linear\LogisticRegression
     */
    protected function instantiate_algorithm(): \Phpml\Classification\Linear\LogisticRegression {
        return new LogisticRegression(self::TRAIN_ITERATIONS, true,
            LogisticRegression::CONJUGATE_GRAD_TRAINING, 'log');
    }
575
}