美文网首页
Logistic Regression

Logistic Regression

作者: 就是果味熊 | 来源:发表于2020-02-25 12:37 被阅读0次

    暂时自己没找数据跑

    import numpy as np
    import random
    #import pandas as pd
    def g_z(z):
        g = 1 / (1 + np.e**(-z))
        return g
    
    def logis_function(k,x):
        x0 = np.ones((x.shape[0]))
        x = np.column_stack((x,x0))
        x_T = np.transpose(x)
        k = np.array(k)
        z = np.dot(k,x_T)
        pred_y = g_z(z)
        return pred_y
    
    '''
    这里的向量k最后一个元素是k0,即截距,这是因为在给定的x的最后一位都加
    了元素1,以便在计算时,符合所有k,包括k0。
    '''
    def loss_func(k,x,y):
        y = np.array(y)
        pred_y = logis_function(k,x)
        single_loss = y * np.log(pred_y) + (1 - y) * np.log(1 - pred_y)
        loss = - np.sum(single_loss) / len(y)
        return loss
    
    def gradient(k,x,y):
        x0 = np.ones((x.shape[0]))
        x1 = np.column_stack((x,x0))
        y = np.array(y)
        pred_y = logis_function(k,x)
        x_T = np.transpose(x1)
        grad_k = np.dot((pred_y - y), x_T) / len(y)
        return grad_k
    
    def logistic_regression(x,y,epochs,lr,k):
        '''
        epochs 迭代次数
        lr 学习率
        k 参数向量
        '''
        best_k = None
        best_cost = np.inf
        k = np.array(k)
        for epoch in range(epochs):
            cost = loss_func(k,x,y)
            print("epoch: %d, k: %d, cost: %d" % (epoch,k,cost))
            if cost <= best_cost:
                best_cost = cost
                best_k = k
            grad_k = gradient(k,x,y)
            k = k - lr * grad_k
    
        return best_k, best_cost
    
    def pred__y(k,x):
        pred_pro_y = logis_function(k,x)
        pred_pro_y[pred_pro_y > 0,5] = 1
        pred_pro_y[pred_pro_y <= 0.5] = 0
        return pred_pro_y
    

    相关文章

      网友评论

          本文标题:Logistic Regression

          本文链接:https://www.haomeiwen.com/subject/uxnzqhtx.html