processor.php 19.1 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle.  If not, see <http://www.gnu.org/licenses/>.

/**
 * Php predictions processor
 *
 * @package   mlbackend_php
 * @copyright 2016 David Monllao {@link http://www.davidmonllao.com}
 * @license   http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
 */

namespace mlbackend_php;

27
defined('MOODLE_INTERNAL') || die();
28

29
use Phpml\Preprocessing\Normalizer;
30
31
use Phpml\CrossValidation\RandomSplit;
use Phpml\Dataset\ArrayDataset;
32
use Phpml\ModelManager;
33
use Phpml\Classification\Linear\LogisticRegression;
34
use Phpml\Metric\ClassificationReport;
35
36
37
38
39
40
41
42

/**
 * PHP predictions processor.
 *
 * @package   mlbackend_php
 * @copyright 2016 David Monllao {@link http://www.davidmonllao.com}
 * @license   http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
 */
43
class processor implements \core_analytics\classifier, \core_analytics\regressor, \core_analytics\packable {
44

45
46
47
    /**
     * Size of training / prediction batches.
     */
48
    const BATCH_SIZE = 5000;
49
50
51
52

    /**
     * Number of train iterations.
     */
53
    const TRAIN_ITERATIONS = 500;
54
55
56
57

    /**
     * File name of the serialised model.
     */
58
59
    const MODEL_FILENAME = 'model.ser';

60
61
62
    /**
     * @var bool
     */
63
64
    protected $limitedsize = false;

65
66
67
68
69
    /**
     * Checks if the processor is ready to use.
     *
     * @return bool
     */
70
    public function is_ready() {
71
72
73
        if (version_compare(phpversion(), '7.0.0') < 0) {
            return get_string('errorphp7required', 'mlbackend_php');
        }
74
75
76
        return true;
    }

77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
    /**
     * Delete the stored models.
     *
     * @param string $uniqueid
     * @param string $modelversionoutputdir
     * @return null
     */
    public function clear_model($uniqueid, $modelversionoutputdir) {
        remove_dir($modelversionoutputdir);
    }

    /**
     * Delete the output directory.
     *
     * @param string $modeloutputdir
92
     * @param string $uniqueid
93
94
     * @return null
     */
95
    public function delete_output_dir($modeloutputdir, $uniqueid) {
96
97
98
        remove_dir($modeloutputdir);
    }

99
    /**
100
     * Train this processor classification model using the provided supervised learning dataset.
101
102
103
104
105
106
     *
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param string $outputdir
     * @return \stdClass
     */
107
    public function train_classification($uniqueid, \stored_file $dataset, $outputdir) {
108

109
        $modelfilepath = $this->get_model_filepath($outputdir);
110

111
        $modelmanager = new ModelManager();
112
113
114
115

        if (file_exists($modelfilepath)) {
            $classifier = $modelmanager->restoreFromFile($modelfilepath);
        } else {
116
            $classifier = $this->instantiate_algorithm();
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
        }

        $fh = $dataset->get_content_file_handle();

        // The first lines are var names and the second one values.
        $metadata = $this->extract_metadata($fh);

        // Skip headers.
        fgets($fh);

        $samples = array();
        $targets = array();
        while (($data = fgetcsv($fh)) !== false) {
            $sampledata = array_map('floatval', $data);
            $samples[] = array_slice($sampledata, 0, $metadata['nfeatures']);
            $targets[] = intval($data[$metadata['nfeatures']]);

134
135
            $nsamples = count($samples);
            if ($nsamples === self::BATCH_SIZE) {
136
                // Training it batches to avoid running out of memory.
137
                $classifier->partialTrain($samples, $targets, json_decode($metadata['targetclasses']));
138
139
140
                $samples = array();
                $targets = array();
            }
141
142
143
            if (empty($morethan1sample) && $nsamples > 1) {
                $morethan1sample = true;
            }
144
145
146
        }
        fclose($fh);

147
148
149
150
151
152
153
        if (empty($morethan1sample)) {
            $resultobj = new \stdClass();
            $resultobj->status = \core_analytics\model::NO_DATASET;
            $resultobj->info = array();
            return $resultobj;
        }

154
155
        // Train the remaining samples.
        if ($samples) {
156
            $classifier->partialTrain($samples, $targets, json_decode($metadata['targetclasses']));
157
158
159
160
161
162
163
164
165
166
167
168
        }

        $resultobj = new \stdClass();
        $resultobj->status = \core_analytics\model::OK;
        $resultobj->info = array();

        // Store the trained model.
        $modelmanager->saveToFile($classifier, $modelfilepath);

        return $resultobj;
    }

169
    /**
170
     * Classifies the provided dataset samples.
171
172
173
174
175
176
     *
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param string $outputdir
     * @return \stdClass
     */
177
    public function classify($uniqueid, \stored_file $dataset, $outputdir) {
178

179
        $classifier = $this->load_classifier($outputdir);
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226

        $fh = $dataset->get_content_file_handle();

        // The first lines are var names and the second one values.
        $metadata = $this->extract_metadata($fh);

        // Skip headers.
        fgets($fh);

        $sampleids = array();
        $samples = array();
        $predictions = array();
        while (($data = fgetcsv($fh)) !== false) {
            $sampledata = array_map('floatval', $data);
            $sampleids[] = $data[0];
            $samples[] = array_slice($sampledata, 1, $metadata['nfeatures']);

            if (count($samples) === self::BATCH_SIZE) {
                // Prediction it batches to avoid running out of memory.

                // Append predictions incrementally, we want $sampleids keys in sync with $predictions keys.
                $newpredictions = $classifier->predict($samples);
                foreach ($newpredictions as $prediction) {
                    array_push($predictions, $prediction);
                }
                $samples = array();
            }
        }
        fclose($fh);

        // Finish the remaining predictions.
        if ($samples) {
            $predictions = $predictions + $classifier->predict($samples);
        }

        $resultobj = new \stdClass();
        $resultobj->status = \core_analytics\model::OK;
        $resultobj->info = array();

        foreach ($predictions as $index => $prediction) {
            $resultobj->predictions[$index] = array($sampleids[$index], $prediction);
        }

        return $resultobj;
    }

    /**
227
     * Evaluates this processor classification model using the provided supervised learning dataset.
228
229
230
     *
     * During evaluation we need to shuffle the evaluation dataset samples to detect deviated results,
     * if the dataset is massive we can not load everything into memory. We know that 2GB is the
231
     * minimum memory limit we should have (\core_analytics\model::heavy_duty_mode), if we substract the memory
232
233
234
235
236
237
238
239
240
241
     * that we already consumed and the memory that Phpml algorithms will need we should still have at
     * least 500MB of memory, which should be enough to evaluate a model. In any case this is a robust
     * solution that will work for all sites but it should minimize memory limit problems. Site admins
     * can still set $CFG->mlbackend_php_no_evaluation_limits to true to skip this 500MB limit.
     *
     * @param string $uniqueid
     * @param float $maxdeviation
     * @param int $niterations
     * @param \stored_file $dataset
     * @param string $outputdir
242
     * @param  string $trainedmodeldir
243
244
     * @return \stdClass
     */
245
246
    public function evaluate_classification($uniqueid, $maxdeviation, $niterations, \stored_file $dataset,
            $outputdir, $trainedmodeldir) {
247
248
        $fh = $dataset->get_content_file_handle();

249
250
251
252
253
254
        if ($trainedmodeldir) {
            // We overwrite the number of iterations as the results will always be the same.
            $niterations = 1;
            $classifier = $this->load_classifier($trainedmodeldir);
        }

255
256
257
258
259
260
261
262
263
264
265
266
        // The first lines are var names and the second one values.
        $metadata = $this->extract_metadata($fh);

        // Skip headers.
        fgets($fh);

        if (empty($CFG->mlbackend_php_no_evaluation_limits)) {
            $samplessize = 0;
            $limit = get_real_size('500MB');

            // Just an approximation, will depend on PHP version, compile options...
            // Double size + zval struct (6 bytes + 8 bytes + 16 bytes) + array bucket (96 bytes)
267
            // https://nikic.github.io/2011/12/12/How-big-are-PHP-arrays-really-Hint-BIG.html.
268
269
270
271
272
273
274
275
276
            $floatsize = (PHP_INT_SIZE * 2) + 6 + 8 + 16 + 96;
        }

        $samples = array();
        $targets = array();
        while (($data = fgetcsv($fh)) !== false) {
            $sampledata = array_map('floatval', $data);

            $samples[] = array_slice($sampledata, 0, $metadata['nfeatures']);
277
            $targets[] = intval($data[$metadata['nfeatures']]);
278
279
280
281

            if (empty($CFG->mlbackend_php_no_evaluation_limits)) {
                // We allow admins to disable evaluation memory usage limits by modifying config.php.

282
                // We will have plenty of missing values in the dataset so it should be a conservative approximation.
283
284
285
286
287
288
289
290
291
292
293
                $samplessize = $samplessize + (count($sampledata) * $floatsize);

                // Stop fetching more samples.
                if ($samplessize >= $limit) {
                    $this->limitedsize = true;
                    break;
                }
            }
        }
        fclose($fh);

294
295
        // We need at least 2 samples belonging to each target.
        $counts = array_count_values($targets);
296
        $ntargets = count(explode(',', $metadata['targetclasses']));
297
298
299
300
301
        foreach ($counts as $count) {
            if ($count < 2) {
                $notenoughdata = true;
            }
        }
302
303
304
        if ($ntargets > count($counts)) {
            $notenoughdata = true;
        }
305
306
        if (!empty($notenoughdata)) {
            $resultobj = new \stdClass();
307
            $resultobj->status = \core_analytics\model::NOT_ENOUGH_DATA;
308
309
310
311
312
            $resultobj->score = 0;
            $resultobj->info = array(get_string('errornotenoughdata', 'mlbackend_php'));
            return $resultobj;
        }

313
        $scores = array();
314
315
316
317

        // Evaluate the model multiple times to confirm the results are not significantly random due to a short amount of data.
        for ($i = 0; $i < $niterations; $i++) {

318
            if (!$trainedmodeldir) {
319
                $classifier = $this->instantiate_algorithm();
320

321
322
                // Split up the dataset in classifier and testing.
                $data = new RandomSplit(new ArrayDataset($samples, $targets), 0.2);
323

324
325
                $classifier->train($data->getTrainSamples(), $data->getTrainLabels());
                $predictedlabels = $classifier->predict($data->getTestSamples());
326
327
                $report = new ClassificationReport($data->getTestLabels(), $predictedlabels,
                    ClassificationReport::WEIGHTED_AVERAGE);
328
329
            } else {
                $predictedlabels = $classifier->predict($samples);
330
331
                $report = new ClassificationReport($targets, $predictedlabels,
                    ClassificationReport::WEIGHTED_AVERAGE);
332
            }
333
334
            $averages = $report->getAverage();
            $scores[] = $averages['f1score'];
335
336
337
        }

        // Let's fill the results changing the returned status code depending on the phi-related calculated metrics.
338
        return $this->get_evaluation_result_object($dataset, $scores, $maxdeviation);
339
340
    }

341
342
343
344
    /**
     * Returns the results objects from all evaluations.
     *
     * @param \stored_file $dataset
345
     * @param array $scores
346
347
348
     * @param float $maxdeviation
     * @return \stdClass
     */
349
    protected function get_evaluation_result_object(\stored_file $dataset, $scores, $maxdeviation) {
350

351
352
353
        // Average f1 score of all evaluations as final score.
        if (count($scores) === 1) {
            $avgscore = reset($scores);
354
        } else {
355
            $avgscore = \Phpml\Math\Statistic\Mean::arithmetic($scores);
356
357
358
        }

        // Standard deviation should ideally be calculated against the area under the curve.
359
        if (count($scores) === 1) {
360
361
            $modeldev = 0;
        } else {
362
            $modeldev = \Phpml\Math\Statistic\StandardDeviation::population($scores);
363
364
365
366
367
368
369
370
        }

        // Let's fill the results object.
        $resultobj = new \stdClass();

        // Zero is ok, now we add other bits if something is not right.
        $resultobj->status = \core_analytics\model::OK;
        $resultobj->info = array();
371
        $resultobj->score = $avgscore;
372
373
374

        // If each iteration results varied too much we need more data to confirm that this is a valid model.
        if ($modeldev > $maxdeviation) {
375
            $resultobj->status = $resultobj->status + \core_analytics\model::NOT_ENOUGH_DATA;
376
377
378
            $a = new \stdClass();
            $a->deviation = $modeldev;
            $a->accepteddeviation = $maxdeviation;
379
            $resultobj->info[] = get_string('errornotenoughdatadev', 'mlbackend_php', $a);
380
381
382
        }

        if ($resultobj->score < \core_analytics\model::MIN_SCORE) {
383
            $resultobj->status = $resultobj->status + \core_analytics\model::LOW_SCORE;
384
385
386
387
388
389
390
391
392
393
394
395
396
            $a = new \stdClass();
            $a->score = $resultobj->score;
            $a->minscore = \core_analytics\model::MIN_SCORE;
            $resultobj->info[] = get_string('errorlowscore', 'mlbackend_php', $a);
        }

        if ($this->limitedsize === true) {
            $resultobj->info[] = get_string('datasetsizelimited', 'mlbackend_php', display_size($dataset->get_filesize()));
        }

        return $resultobj;
    }

397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
    /**
     * Loads the pre-trained classifier.
     *
     * @throws \moodle_exception
     * @param string $outputdir
     * @return \Phpml\Classification\Linear\LogisticRegression
     */
    protected function load_classifier($outputdir) {
        $modelfilepath = $this->get_model_filepath($outputdir);

        if (!file_exists($modelfilepath)) {
            throw new \moodle_exception('errorcantloadmodel', 'mlbackend_php', '', $modelfilepath);
        }

        $modelmanager = new ModelManager();
        return $modelmanager->restoreFromFile($modelfilepath);
    }

415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
    /**
     * Train this processor regression model using the provided supervised learning dataset.
     *
     * @throws new \coding_exception
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param string $outputdir
     * @return \stdClass
     */
    public function train_regression($uniqueid, \stored_file $dataset, $outputdir) {
        throw new \coding_exception('This predictor does not support regression yet.');
    }

    /**
     * Estimates linear values for the provided dataset samples.
     *
     * @throws new \coding_exception
     * @param string $uniqueid
     * @param \stored_file $dataset
     * @param mixed $outputdir
     * @return void
     */
    public function estimate($uniqueid, \stored_file $dataset, $outputdir) {
        throw new \coding_exception('This predictor does not support regression yet.');
    }

    /**
     * Evaluates this processor regression model using the provided supervised learning dataset.
     *
     * @throws new \coding_exception
     * @param string $uniqueid
     * @param float $maxdeviation
     * @param int $niterations
     * @param \stored_file $dataset
     * @param string $outputdir
450
     * @param  string $trainedmodeldir
451
452
     * @return \stdClass
     */
453
454
    public function evaluate_regression($uniqueid, $maxdeviation, $niterations, \stored_file $dataset,
            $outputdir, $trainedmodeldir) {
455
456
457
        throw new \coding_exception('This predictor does not support regression yet.');
    }

458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
    /**
     * Exports the machine learning model.
     *
     * @throws \moodle_exception
     * @param  string $uniqueid  The model unique id
     * @param  string $modeldir  The directory that contains the trained model.
     * @return string            The path to the directory that contains the exported model.
     */
    public function export(string $uniqueid, string $modeldir) : string {

        $modelfilepath = $this->get_model_filepath($modeldir);

        if (!file_exists($modelfilepath)) {
            throw new \moodle_exception('errorexportmodelresult', 'analytics');
        }

        // We can use the actual $modeldir as the directory is not modified during export, just copied into a zip.
        return $modeldir;
    }

    /**
     * Imports the provided machine learning model.
     *
     * @param  string $uniqueid The model unique id
     * @param  string $modeldir  The directory that will contain the trained model.
     * @param  string $importdir The directory that contains the files to import.
     * @return bool Success
     */
    public function import(string $uniqueid, string $modeldir, string $importdir) : bool {

        $importmodelfilepath = $this->get_model_filepath($importdir);
        $modelfilepath = $this->get_model_filepath($modeldir);

        $modelmanager = new ModelManager();

        // Copied from ModelManager::restoreFromFile to validate the serialised contents
        // before restoring them.
        $importconfig = file_get_contents($importmodelfilepath);

        // Clean stuff like function calls.
        $importconfig = preg_replace('/[^a-zA-Z0-9\{\}%\.\*\;\,\:\"\-\0\\\]/', '', $importconfig);

        $object = unserialize($importconfig,
            ['allowed_classes' => ['Phpml\\Classification\\Linear\\LogisticRegression']]);
        if (!$object) {
            return false;
        }

        if (get_class($object) == '__PHP_Incomplete_Class') {
            return false;
        }

        $classifier = $modelmanager->restoreFromFile($importmodelfilepath);

        // This would override any previous classifier.
        $modelmanager->saveToFile($classifier, $modelfilepath);

        return true;
    }

    /**
     * Returns the path to the serialised model file in the provided directory.
     *
     * @param  string $modeldir The model directory
     * @return string           The model file
     */
    protected function get_model_filepath(string $modeldir) : string {
        // Output directory is already unique to the model.
        return $modeldir . DIRECTORY_SEPARATOR . self::MODEL_FILENAME;
    }

529
530
531
532
533
534
535
536
    /**
     * Extracts metadata from the dataset file.
     *
     * The file poiter should be located at the top of the file.
     *
     * @param resource $fh
     * @return array
     */
537
538
539
540
    protected function extract_metadata($fh) {
        $metadata = fgetcsv($fh);
        return array_combine($metadata, fgetcsv($fh));
    }
541
542
543
544
545
546
547
548
549
550

    /**
     * Instantiates the ML algorithm.
     *
     * @return \Phpml\Classification\Linear\LogisticRegression
     */
    protected function instantiate_algorithm(): \Phpml\Classification\Linear\LogisticRegression {
        return new LogisticRegression(self::TRAIN_ITERATIONS, true,
            LogisticRegression::CONJUGATE_GRAD_TRAINING, 'log');
    }
551
}