voting program

作者: 狼无雨雪 | 来源:发表于2019-07-04 18:58 被阅读0次
    %run simple_voting.py -i S4_2RFH.csv,S4_AthMethPre.csv,S4_KNN.csv,S4_PCP.csv -l 0 -c 5 -n 1
    
    主进程执行中>>> pid=19053
    ('default y_train: ', array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0]))
    ('default y_train: ', array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0]))
    ('default y_train: ', array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0]))
    ('default y_train: ', array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
           1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
           0, 0, 0, 0, 0, 0]))
    子进程执行中>>> pid=26604,ppid=19053
    子进程执行中>>> pid=26609,ppid=19053
    子进程执行中>>> pid=26618,ppid=19053
    子进程执行中>>> pid=26625,ppid=19053
    ('c:', 0.25, 'gamma:', 0.3535533905932738)
    子进程终止>>> pid=26618
    ('c:', 2.8284271247461903, 'gamma:', 0.03125)
    子进程终止>>> pid=26604
    ('c:', 1.2599210498948732, 'gamma:', 0.03125)
    子进程终止>>> pid=26625
    ('c:', 0.5612310241546865, 'gamma:', 0.03125)
    子进程终止>>> pid=26609
    [0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]
    [0.28102849 0.36324147 0.19414418 0.33847167 0.29691215 0.16284887
     0.12135384 0.36245913 0.24920739 0.38217983 0.09023905 0.31774164
     0.39826934 0.31365392 0.30804615 0.32784993 0.24526692 0.39334074
     0.18082637 0.34745861 0.3051261  0.32093135 0.3100606  0.21709327
     0.4149411  0.39312531 0.22159987 0.2867103  0.3033204  0.38323178
     0.37323477 0.36810729 0.37363446 0.31587763 0.32304909 0.39011633
     0.13804402 0.36519576 0.13104828 0.37311254 0.39643956 0.37884835
     0.28566341 0.29278919 0.26737973 0.27430594 0.36112861 0.41268239
     0.32336573 0.1872988  0.34438859 0.3264805  0.25455632 0.38458996
     0.21411267 0.39855877 0.24948146 0.33773553 0.2442758  0.39187768
     0.12087138 0.34104067 0.39782557 0.14413795 0.3608932  0.30992603
     0.3460383  0.2672644  0.31984056 0.30200707 0.34292577 0.38114244
     0.39278838 0.05091849 0.21314344 0.39620026 0.40699928 0.09210357
     0.36360677 0.31427431 0.32323145 0.30313156 0.28969486 0.34243896
     0.24821982 0.39161906 0.21201459 0.36408125 0.36601892 0.33509748
     0.33428113 0.324472   0.33084282 0.26418718 0.26391722 0.38429968
     0.36155034 0.36146755 0.28327816 0.27416132 0.37847582 0.29265039
     0.37553674 0.21988732 0.32838595 0.34460762 0.29834877 0.39734487
     0.34174893 0.37425658 0.31472673 0.36159116 0.38671162 0.34226549
     0.33301865 0.39510938 0.3373161  0.35453165 0.31845133 0.31914655
     0.36571878 0.2287085  0.28070509 0.33708787 0.0887728  0.35842379
     0.37912545 0.22652886 0.35678542 0.36205049 0.33088006 0.23798131
     0.27815204 0.37456475 0.11102423 0.30615323 0.21770032 0.34213001
     0.29390897 0.20512895 0.38557024 0.15149887 0.38176449 0.21423858
     0.35734389 0.39078626 0.17007558 0.3257666  0.38041268 0.33697633
     0.34906076 0.38019815 0.09999805 0.29651838 0.31118159 0.34132546
     0.36367959 0.34318082 0.2034676  0.36694848 0.36987383 0.34183389
     0.22163581 0.34794023 0.32207126 0.30010359 0.3452388  0.19661925
     0.2385628  0.24267002 0.1915579  0.24644051 0.16711613 0.21939656
     0.37193315 0.38667032 0.06169269 0.11763922 0.39375174 0.34414261
     0.37558322 0.35653302 0.21185345 0.39294667 0.34523303 0.36584973
     0.19402833 0.3959973  0.10866267 0.25796454 0.25463944 0.23212345
     0.29692636 0.29265562 0.33125192 0.32922346 0.36177505 0.20926512
     0.24793928 0.26955343 0.27028466 0.36923419 0.30791174 0.32530101
     0.08218485 0.22052405 0.39845892 0.28442466 0.16607396 0.39457579
     0.25613378 0.37411949 0.33624311 0.31939042 0.3187115  0.26252799
     0.40589503 0.22629603 0.34562665 0.28514029 0.27371906 0.10625388
     0.34711701 0.33924288 0.36297859 0.34399326 0.27684331 0.22968531
     0.33793386 0.33576724 0.34574131 0.20807156 0.33804333 0.28164861
     0.23440284 0.34621636 0.20860579 0.35450138 0.35607676 0.29817509
     0.35284276 0.34531679 0.28056184 0.37188764 0.32627724 0.38735012
     0.30769116 0.22398946 0.26290508 0.35823701 0.28350464 0.30121782
     0.33032688 0.35882209 0.35872498 0.39615687 0.39466424 0.37656196
     0.29028501 0.31904175 0.30712068 0.28559874 0.34936766 0.17056011
     0.36389539 0.37565092 0.33177019 0.36095091 0.33832078 0.12396487
     0.31358696 0.31264721 0.31130347 0.36370665 0.09049424 0.20710989
     0.31077427 0.36526459 0.33091715 0.27159839 0.31513669 0.20390967
     0.33421439 0.34122589 0.35682917 0.12624053 0.37351926 0.39065631
     0.3568004  0.30226084 0.34652226 0.21762871 0.2429213  0.31749817
     0.10037201 0.31481057 0.36731391 0.36747563 0.38485442 0.33940802
     0.35433013 0.18371531 0.16254644 0.21541102 0.21189105 0.25968362
     0.28133939 0.18884785 0.16047142 0.21760957 0.38132961 0.36111955
     0.27740611 0.33025501 0.07767995 0.10552746 0.31777761 0.16795724
     0.15042878 0.24242314 0.22709515 0.39418119 0.16013662 0.40948319
     0.40847068 0.35819721 0.23853997 0.11340841 0.09548125 0.09933265
     0.15351112 0.23666505 0.29338762 0.26710037 0.30061771 0.26500499
     0.37849805 0.36543038 0.18859312 0.37803495 0.39408956 0.31319236
     0.35758145 0.33435428 0.27848442 0.07540013 0.3411512  0.38235614
     0.23342944 0.36405074 0.32750072 0.24085637 0.25423942 0.1547882
     0.39967846 0.37214755 0.21080601 0.28869604 0.39626304 0.1808616
     0.29102702 0.37811561 0.31607519 0.2437125  0.35855858 0.38813422
     0.21669322 0.38196694 0.22473326 0.16235818 0.38530837 0.20944065
     0.12588948 0.35614367 0.38118341 0.41833405 0.34625897 0.10522192
     0.36193258 0.10234094 0.37953147 0.34394936 0.14053869 0.32436815
     0.40222582 0.34390528 0.40144094 0.3433158  0.14485544 0.1158859
     0.0648968  0.23152704 0.34992728 0.2988392  0.08843218 0.03363085
     0.08341515 0.14699384 0.07327471 0.18029342 0.12738919 0.13339409
     0.33580985 0.08783227 0.12216343 0.11659777 0.05757227 0.11692746
     0.16622041 0.08618668 0.12251766 0.36373883 0.07890158 0.06147252
     0.15432844 0.08296161 0.07312049 0.18329626 0.27465994 0.33155324
     0.06965634 0.12483246 0.09112161 0.18461345 0.17942776 0.05188502
     0.22143209 0.26169783 0.11246765 0.06516575 0.06405598 0.09277518
     0.13321987 0.16613441 0.11887428 0.07501707 0.13323264 0.14942963
     0.09025258 0.09404215 0.11655511 0.18955056 0.17882106 0.16785625
     0.16760265 0.13694159 0.16211941 0.12601585 0.04914088 0.28467376
     0.07203862 0.29523084 0.09754599 0.20781753 0.05327172 0.32020512
     0.19112721 0.06148696 0.08659863 0.15835228 0.20819003 0.1121086
     0.25056885 0.08649244 0.06985476 0.11933937 0.22845448 0.14139809
     0.18660581 0.15496112 0.07416921 0.11023954 0.04215122 0.2617098
     0.09495061 0.07993844 0.28055153 0.05194397 0.10937802 0.10208535
     0.21251412 0.07029194 0.15747509 0.08596881 0.27454512 0.09402394
     0.25453715 0.28609832 0.11038348 0.27827383 0.17740054 0.12690631
     0.10593114 0.06192576 0.21415238 0.06041947 0.30286521 0.0925095
     0.29713166 0.22470567 0.23498106 0.12805883 0.26817252 0.17732926
     0.09771669 0.27897036 0.37622258 0.29421033 0.08617962 0.22414743
     0.07834793 0.18961399 0.26820834 0.2450617  0.27582429 0.17771324
     0.30265988 0.10150949 0.1253357  0.11589318 0.18643389 0.0633741
     0.23431697 0.10033929 0.15649691 0.12906359 0.20329402 0.1218424
     0.06716393 0.21938412 0.15483608 0.26189721 0.08012269 0.15004832
     0.23141523 0.06687409 0.05629475 0.06223932 0.14736734 0.06333097
     0.07335974 0.10313515 0.131921   0.34576706 0.1018656  0.0658709
     0.21843065 0.25089823 0.12037673 0.07948336 0.17466788 0.3194459
     0.14014119 0.05918218 0.12220282 0.05488714 0.15553671 0.16256737
     0.09009388 0.19874627 0.23136096 0.19181707 0.26866829 0.07788793
     0.08675527 0.21064202 0.04879933 0.06423825 0.07160206 0.12848193
     0.26899649 0.1050048  0.17941274 0.10054802 0.2669195  0.08906067
     0.08110009 0.10026094 0.22477916 0.09681126 0.20132396 0.04323121
     0.17806193 0.0910505  0.12488904 0.09370155 0.12157883 0.06837107
     0.3097659  0.09818879 0.14936765 0.26721664 0.30277663 0.08514697
     0.08748981 0.08071509 0.09132316 0.07718778 0.1595897  0.06890884
     0.09302636 0.09456449 0.09376898 0.17081251 0.11468134 0.09379461
     0.08231053 0.28062543 0.16441982 0.15695499 0.06451476 0.07296714
     0.19153868 0.11313136 0.28166532 0.17720195 0.11844089 0.08985853
     0.08125798 0.09616677 0.10238119 0.11445385 0.09551084 0.08389587
     0.08130528 0.19987794 0.09318118 0.12194891 0.28931732 0.12588777
     0.15457015 0.13615155 0.08938032 0.08262744 0.08586233 0.09363697
     0.05356363 0.11919573 0.33296796 0.1712589  0.3032152  0.11493867
     0.11003459 0.05126828 0.26034928 0.07550472 0.09454848 0.16779255
     0.22968838 0.16701101 0.07431204 0.08986741 0.06511435 0.13642007
     0.29047328 0.2183369  0.35836423 0.25512199 0.07321366 0.10796514
     0.12188686 0.30179465 0.18553443 0.23411171 0.17104052 0.13890148
     0.28664305 0.24840621 0.25047569 0.15911857 0.0613403  0.0586843
     0.07845226 0.08898104 0.10920827 0.11675391 0.05033463 0.16034182
     0.04388738 0.14541222 0.06574937 0.30837472 0.09758899 0.17949586
     0.18939182 0.08141545 0.0717239  0.12944365 0.04644033 0.09444255
     0.26873904 0.08269825 0.14237744 0.0902951  0.23304176 0.07971594
     0.07918619 0.03601523 0.07589934 0.10323306 0.12987726 0.05325562
     0.23935744 0.12543536 0.05364815 0.07453846 0.11371789 0.10046396
     0.08638694 0.20672345 0.05524809 0.22195692 0.28072548 0.20999879
     0.07132924 0.03997603 0.05119933 0.06293517 0.13956053 0.04749206
     0.20683897 0.09686401 0.32246889 0.08747406 0.27423994 0.317709
     0.32571868 0.08704951 0.1221084  0.24899559 0.38329328 0.07333233
     0.32305239 0.19839241 0.04152471 0.06458123 0.12117697 0.0563728
     0.27872865 0.07108151 0.34447993 0.0553961  0.11556732 0.07535296
     0.10386001 0.1917806  0.15390384 0.15464413 0.08728678 0.10365735
     0.2529532  0.14257663 0.16535902 0.19848332 0.08999541 0.08798311
     0.29605591 0.21700495 0.36027735 0.3862162  0.19660895 0.29152823
     0.22604386 0.24606896 0.05487285 0.05182756 0.11491963 0.26479287
     0.06947103 0.06922456 0.03371796 0.08763121 0.11055531 0.16828773
     0.08753378 0.13316708]
    ('ACC', 0.7938144329896907)
    ('final_out_to_excel', [[u'\u7279\u5f81\u96c6', u'\u6837\u672c\u4e2a\u6570', u'\u5206\u7c7b\u5668', u'Accuracy', u'Precision', u'Recall', u'SN', u'SP', u'Gm', u'F_measure', u'F_score', u'MCC', u'ROC\u66f2\u7ebf\u9762\u79ef', u'tp', u'fn', u'fp', u'tn'], ["['S4_2RFH.csv', 'S4_AthMethPre.csv', 'S4_KNN.csv', 'S4_PCP.csv']", '\xe6\xad\xa3\xef\xbc\x9a388\xe8\xb4\x9f\xef\xbc\x9a388', 'svm', 0.7938144329896907, 0.8114754098360656, 0.7654639175257731, 0.7654639175257731, 0.8221649484536082, 0.7933080122472446, 0.7877984084880636, 0.7877984084880636, 0.5885757652396411, 0.8794903815495801, 297.0, 91.0, 69.0, 319.0]])
    主进程终止
    
    # encoding:utf-8
    import getopt
    from sklearn.preprocessing import MinMaxScaler
    import os,time
    from multiprocessing import Process, Manager
    import pandas as pd
    import numpy as np
    import itertools
    from sklearn.model_selection import KFold  
    from sklearn import svm
    # from sklearn.cross_validation import train_test_split
    import math
    from sklearn.model_selection import *
    import sklearn.ensemble
    from sklearn import metrics
    from sklearn.metrics import roc_curve, auc
    import sys
    from sklearn.model_selection import GridSearchCV
    import warnings 
    whole_result=[]
    input_files=""
    whole_dimension=[]
    default_l = 1
    cross_validation_value = 10
    CPU_value = 1
    opts, args = getopt.getopt(sys.argv[1:], "hi:l:c:n:", )
    final_out_to_excel=[]
    row0 = [u'特征集', u'样本个数', u'分类器', u'Accuracy', u'Precision', u'Recall', u'SN', u'SP',
                    u'Gm', u'F_measure', u'F_score', u'MCC', u'ROC曲线面积', u'tp', u'fn', u'fp', u'tn']
    final_out_to_excel.append(row0) #above was used to generate xlsx format Excel file
    for op, value in opts:
        if op == "-i":
            input_files = str(value)
            input_files = input_files.replace(" ", "").split(',')
            for input_file in input_files:
                if input_file == "":
                    print("Warning: please insure no blank in your input files !")
                    sys.exit()
        elif op == "-l":
            if int(value) == 1:
                default_l = 1
            else:
                default_l = -1
        elif op == "-c":
            cross_validation_value = int(value)
        
        elif op == "-n":
            CPU_value = int(value)
    
    def performance(labelArr, predictArr):
        #labelArr[i] is actual value,predictArr[i] is predict value
        TP = 0.; TN = 0.; FP = 0.; FN = 0.
        for i in range(len(labelArr)):
            if labelArr[i] == 1 and predictArr[i] == 1:
                TP += 1.
            if labelArr[i] == 1 and predictArr[i] == 0:
                FN += 1.
            if labelArr[i] == 0 and predictArr[i] == 1:
                FP += 1.
            if labelArr[i] == 0 and predictArr[i] == 0:
                TN += 1.
        if (TP + FN)==0:
            SN=0
        else:
            SN = TP/(TP + FN) #Sensitivity = TP/P  and P = TP + FN
        if (FP+TN)==0:
            SP=0
        else:
            SP = TN/(FP + TN) #Specificity = TN/N  and N = TN + FP
        if (TP+FP)==0:
            precision=0
        else:
            precision=TP/(TP+FP)
        if (TP+FN)==0:
            recall=0
        else:
            recall=TP/(TP+FN)
        GM=math.sqrt(recall*SP)
        #MCC = (TP*TN-FP*FN)/math.sqrt((TP+FP)*(TP+FN)*(TN+FP)*(TN+FN))
        return precision,recall,SN,SP,GM,TP,TN,FP,FN
    
    def worker(X_train, y_train, cross_validation_value, CPU_value, input_file, share_y_predict_dict, share_y_predict_proba_dict):
        print("子进程执行中>>> pid={0},ppid={1}".format(os.getpid(),os.getppid()))
        svc = svm.SVC(probability=True)
        parameters = {'kernel': ['rbf'], 'C':map(lambda x:2**x,np.linspace(-2,5,7)), 'gamma':map(lambda x:2**x,np.linspace(-5,2,7))}
        clf = GridSearchCV(svc, parameters, cv=cross_validation_value, n_jobs=CPU_value, scoring='accuracy')
        clf.fit(X_train, y_train)
        C=clf.best_params_['C']
        gamma=clf.best_params_['gamma']
        print('c:',C,'gamma:',gamma)
    
        
        y_predict=cross_val_predict(svm.SVC(kernel='rbf',C=C,gamma=gamma,),X_train,y_train,cv=cross_validation_value,n_jobs=CPU_value)
        y_predict_prob=cross_val_predict(svm.SVC(kernel='rbf',C=C,gamma=gamma,probability=True),X_train,y_train,cv=cross_validation_value,n_jobs=CPU_value,method='predict_proba')
        input_file = input_file.replace(".csv","")
        y_predict_path = input_file + "_predict.csv"
        y_predict_proba_path = input_file + "_predict_proba.csv"
        share_y_predict_dict[input_file] = y_predict
        share_y_predict_proba_dict[input_file] = y_predict_prob[:,1]
        pd.DataFrame(y_predict).to_csv(y_predict_path, header = None, index = False)
        pd.DataFrame(y_predict_prob[:,1]).to_csv(y_predict_proba_path, header = None, index = False)
        print("子进程终止>>> pid={0}".format(os.getpid()))
            
    if __name__=="__main__":
        print("主进程执行中>>> pid={0}".format(os.getpid()))
        manager = Manager()
        share_y_predict_dict = manager.dict()
        share_y_predict_proba_dict = manager.dict()
        ps=[]
        if default_l == 1:
            data = ""
            x_len = 1000
            y_len = 1000
            file_len = len(input_files)
            threshold = file_len/2
            for index, input_file in enumerate(input_files):
                data = pd.read_csv(input_file,header=None)
                (x_len,y_len) = data.shape
    
                X_train = data.iloc[:,0:y_len-1]
                y_train = data.iloc[:,[y_len-1]]
                X_train = X_train.values
                y_train = y_train.values
                y_train = y_train.reshape(-1)
                p=Process(target=worker,name="worker"+str(index),args=(X_train, y_train, cross_validation_value, CPU_value,input_file,share_y_predict_dict,share_y_predict_proba_dict))
                ps.append(p)
            # 开启进程
            for index, input_file in enumerate(input_files):
                ps[index].start()
    
            # 阻塞进程
            for index, input_file in enumerate(input_files):
                ps[index].join()
            ensembling_prediction = 0
            ensembling_prediction_proba = 0
            for key, value in share_y_predict_dict.items():
                ensembling_prediction = ensembling_prediction + value
            ensembling_prediction = [1 if e > threshold else 0 for e in ensembling_prediction]
            print(ensembling_prediction)
            for key, value in share_y_predict_proba_dict.items():
                ensembling_prediction_proba = ensembling_prediction_proba + value
            ensembling_prediction_proba = ensembling_prediction_proba/3.0
            print(ensembling_prediction_proba/3.0)
            ACC=metrics.accuracy_score(y_train,ensembling_prediction)
            print("ACC",ACC)
            precision, recall, SN, SP, GM, TP, TN, FP, FN = performance(y_train, ensembling_prediction) 
            F1_Score=metrics.f1_score(y_train, ensembling_prediction)
            F_measure=F1_Score
            MCC=metrics.matthews_corrcoef(y_train, ensembling_prediction)
            auc = metrics.roc_auc_score(y_train, ensembling_prediction_proba)
            pos=TP+FN
            neg=FP+TN
            savedata=[str(input_files),"正:"+str(len(y_train[y_train == 1]))+'负:'+str(len(y_train[y_train == 1])),'svm',ACC,precision, recall,SN,SP, GM,F_measure,F1_Score,MCC,auc,TP,FN,FP,TN]
            final_out_to_excel.append(savedata)
            print("final_out_to_excel",final_out_to_excel)
            pd.DataFrame(ensembling_prediction).to_csv("voting_prediction_label.csv", header = None, index = False)
            pd.DataFrame(ensembling_prediction_proba).to_csv("voting_prediction_proba_label.csv", header = None, index = False)
            pd.DataFrame(final_out_to_excel).to_excel('output'+'.xlsx',sheet_name="results",index=False,header=False)
            print("主进程终止")
        else:
            data = ""
            x_len = 1000
            y_len = 1000
            file_len = len(input_files)
            threshold = file_len/2
            for index, input_file in enumerate(input_files):
                data = pd.read_csv(input_file,header=None)
                (x_len,y_len) = data.shape
                X_train = data.values
                half_sequence_number = x_len / 2
                y_train = np.array([1 if e < half_sequence_number else 0 for (e,value) in enumerate(X_train)])
                y_train = y_train.reshape(-1)
                print("default y_train: ", y_train)
                p=Process(target=worker,name="worker"+str(index),args=(X_train, y_train, cross_validation_value, CPU_value,input_file,share_y_predict_dict,share_y_predict_proba_dict))
                ps.append(p)
            # 开启进程
            for index, input_file in enumerate(input_files):
                ps[index].start()
    
            # 阻塞进程
            for index, input_file in enumerate(input_files):
                ps[index].join()
            ensembling_prediction = 0
            ensembling_prediction_proba = 0
            for key, value in share_y_predict_dict.items():
                ensembling_prediction = ensembling_prediction + value
            ensembling_prediction = [1 if e > threshold else 0 for e in ensembling_prediction]
            print(ensembling_prediction)
            for key, value in share_y_predict_proba_dict.items():
                ensembling_prediction_proba = ensembling_prediction_proba + value
            ensembling_prediction_proba = ensembling_prediction_proba/3.0
            print(ensembling_prediction_proba/3.0)
            ACC=metrics.accuracy_score(y_train,ensembling_prediction)
            print("ACC",ACC)
            precision, recall, SN, SP, GM, TP, TN, FP, FN = performance(y_train, ensembling_prediction) 
            F1_Score=metrics.f1_score(y_train, ensembling_prediction)
            F_measure=F1_Score
            MCC=metrics.matthews_corrcoef(y_train, ensembling_prediction)
            auc = metrics.roc_auc_score(y_train, ensembling_prediction_proba)
            pos=TP+FN
            neg=FP+TN
            savedata=[str(input_files),"正:"+str(len(y_train[y_train == 1]))+'负:'+str(len(y_train[y_train == 1])),'svm',ACC,precision, recall,SN,SP, GM,F_measure,F1_Score,MCC,auc,TP,FN,FP,TN]
            final_out_to_excel.append(savedata)
            print("final_out_to_excel",final_out_to_excel)
            pd.DataFrame(ensembling_prediction).to_csv("voting_prediction_label.csv", header = None, index = False)
            pd.DataFrame(ensembling_prediction_proba).to_csv("voting_prediction_proba_label.csv", header = None, index = False)
            pd.DataFrame(final_out_to_excel).to_excel('output'+'.xlsx',sheet_name="results",index=False,header=False)
            print("主进程终止")
    

    相关文章

      网友评论

        本文标题:voting program

        本文链接:https://www.haomeiwen.com/subject/jyxdhctx.html