@@ -308,6 +308,7 @@ angular
308
308
* <pre>
309
309
* {
310
310
* id: 'learning_dataset',
311
+ * type: 'upload_dataset',
311
312
* created_at: '2014-12-14T15:09:08.112Z',
312
313
* updated_at: '2014-12-14T15:08:57.970Z',
313
314
* name: 'Learning dataset',
@@ -341,6 +342,7 @@ angular
341
342
* {
342
343
* ...
343
344
* source_ids: [],
345
+ * type: 'subset',
344
346
* parent_dataset_id: 'learning_dataset_with_model',
345
347
* sampling: 70,
346
348
* nb_of_lines: null,
@@ -354,6 +356,7 @@ angular
354
356
* {
355
357
* ...
356
358
* source_ids: [],
359
+ * type: 'subset',
357
360
* parent_dataset_id: 'learning_dataset_with_model',
358
361
* sampling: -70,
359
362
* nb_of_lines: null,
@@ -393,6 +396,7 @@ angular
393
396
* <pre>
394
397
* {
395
398
* ...
399
+ * type: 'scoreset'
396
400
* classifier_id: '5436431070632d15f4260000',
397
401
* dataset_id: 'scoring_dataset',
398
402
* modalities_set_id: '53fdfa7070632d0fc5030000',
@@ -426,7 +430,8 @@ angular
426
430
* source_ids: ['original_source_id'],
427
431
* header: true,
428
432
* separator: '\t',
429
- * data_file: { filename: 'source.csv' }
433
+ * data_file: { filename: 'source.csv' },
434
+ * type: 'upload_dataset'
430
435
* }
431
436
* </pre>
432
437
*
@@ -442,7 +447,8 @@ angular
442
447
* main_modality: $main_modality$,
443
448
* separator: $separator$,
444
449
* header: $header$,
445
- * data_file: { filename: $name$ }
450
+ * data_file: { filename: $name$ },
451
+ * type: 'scoreset'
446
452
* }
447
453
* </pre>
448
454
*
@@ -481,6 +487,7 @@ angular
481
487
return Sources . create ( source )
482
488
. then ( function ( source ) {
483
489
return self . create ( {
490
+ type : 'upload_dataset' ,
484
491
name : fileName ,
485
492
source_ids : [ source . id ] ,
486
493
data_file : { filename : fileName }
@@ -523,13 +530,15 @@ angular
523
530
sampling = sampling || DEFAULT_SAMPLING ;
524
531
525
532
var learn = {
533
+ type : 'subset' ,
526
534
parent_dataset_id : id ,
527
535
name : 'learned_' + name ,
528
536
data_file : { filename : 'learned_' + filename } ,
529
537
sampling : sampling
530
538
} ;
531
539
532
540
var test = {
541
+ type : 'subset' ,
533
542
parent_dataset_id : id ,
534
543
name : 'tested_' + name ,
535
544
data_file : { filename : 'tested_' + filename } ,
@@ -612,7 +621,7 @@ angular
612
621
} )
613
622
. then ( function ( childrenCandidates ) {
614
623
return childrenCandidates . reduce ( function ( memo , child ) {
615
- if ( child . parent_dataset_id === datasetId ) {
624
+ if ( child . dataset_id === datasetId ) {
616
625
if ( self . isTrainPart ( child , DEFAULT_SAMPLING ) ) {
617
626
memo . train = child ;
618
627
} else if ( self . isTestPart ( child , - DEFAULT_SAMPLING ) ) {
@@ -716,7 +725,7 @@ angular
716
725
* @return {Boolean } <kbd>true</kbd> / <kbd>false</kbd>
717
726
*/
718
727
this . hasChildren = function ( dataset ) {
719
- return Boolean ( dataset . children_dataset_ids . length > 0 ) ;
728
+ return Boolean ( dataset . children_dataset_ids && dataset . children_dataset_ids . length > 0 ) ;
720
729
} ;
721
730
722
731
/**
@@ -729,20 +738,19 @@ angular
729
738
* @return {Boolean } <kbd>true</kbd> / <kbd>false</kbd>
730
739
*/
731
740
this . isParent = function ( dataset ) {
732
- return Boolean ( dataset . parent_dataset_id === null ) ;
741
+ return Boolean ( dataset . type === 'upload_dataset' ) ;
733
742
} ;
734
743
735
744
/**
736
745
* @ngdoc function
737
746
* @methodOf predicsis.jsSDK.models.Datasets
738
747
* @name isChild
739
748
* @description Tells if a dataset is a child dataset
740
- * <b>Note:</b> A dataset is considered as a child if it has a parent. There is no orphan among datasets!
741
749
* @param {Object } dataset Instance of {@link predicsis.jsSDK.models.Datasets dataset}
742
750
* @return {Boolean } <kbd>true</kbd> / <kbd>false</kbd>
743
751
*/
744
752
this . isChild = function ( dataset ) {
745
- return Boolean ( dataset . parent_dataset_id !== null ) ;
753
+ return Boolean ( dataset . type === 'subset' ) ;
746
754
} ;
747
755
748
756
/**
@@ -810,14 +818,12 @@ angular
810
818
* <li><code>dataset.classifier !== null</code></li>
811
819
* <li><code>dataset.dataset_id !== null</code></li>
812
820
* </ul>
821
+ * Since the API implements a type attribute, this check is really simpler
813
822
* @param {Object } dataset Instance of {@link predicsis.jsSDK.models.Datasets dataset}
814
823
* @return {Boolean } <kbd>true</kbd> / <kbd>false</kbd>
815
824
*/
816
825
this . isScore = function ( dataset ) {
817
- return Boolean ( dataset . source_ids . length === 0 )
818
- && Boolean ( dataset . main_modality !== null )
819
- && Boolean ( dataset . classifier !== null )
820
- && Boolean ( dataset . dataset_id !== null ) ;
826
+ return Boolean ( dataset . type === 'scoreset' ) ;
821
827
} ;
822
828
} ) ;
823
829
0 commit comments