参考链接:https://blog.csdn.net/u013733326/article/details/79702148

# coding=utf-8
# This is a sample Python script.

# Press ⌃R to execute it or replace it with your code.
# Press Double ⇧ to search everywhere for classes, files, tool windows, actions, and settings.

import numpy as np
import matplotlib.pyplot as plt
from testCases import *
import sklearn
import sklearn.datasets
import sklearn.linear_model
from planar_utils import plot_decision_boundary, sigmoid, load_planar_dataset, load_extra_datasets

np.random.seed(1)


def sigmoid(z):
    return 1 / (1 + np.exp(-z))


def layer_sizes(X, Y):
    n_x = X.shape[0]
    n_y = Y.shape[0]
    n_h = 4
    return n_x, n_h, n_y


def init(nx, nh, ny):
    np.random.seed(2)
    w1 = np.random.randn(nh, nx) * 0.01
    b1 = np.zeros(shape=(nh, 1))
    w2 = np.random.randn(ny, nh) * 0.01
    b2 = np.zeros(shape=(ny, 1))
    assert w1.shape == (nh, nx)
    assert b1.shape == (nh, 1)
    assert w2.shape == (ny, nh)
    assert b2.shape == (ny, 1)

    paramters = {
        "w1": w1,
        "b1": b1,
        "w2": w2,
        "b2": b2
    }

    return paramters


# Press the green button in the gutter to run the script.

def forward(X, paramters):
    w1 = paramters["w1"]
    b1 = paramters["b1"]
    w2 = paramters["w2"]
    b2 = paramters["b2"]
    # 前向传播
    z1 = np.dot(w1, X) + b1
    a1 = np.tanh(z1)
    z2 = np.dot(w2, a1) + b2
    a2 = sigmoid(z2)

    assert a2.shape == (1, X.shape[1])
    cache = {
        "z1": z1,
        "a1": a1,
        "z2": z2,
        "a2": a2
    }

    return cache


def cal_cost(Y, parameters, a2):
    m = Y.shape[1]
    w1 = parameters["w1"]
    w2 = parameters["w2"]

    logprobs = np.multiply(np.log(a2), Y) + np.multiply((1 - Y), np.log(1 - a2))
    cost = -np.sum(logprobs) / m
    cost = float(np.squeeze(cost))

    assert isinstance(cost, float)

    return cost


def backward_propagation(paramters, cache, X, Y):
    m = Y.shape[1]
    w1 = paramters["w1"]
    w2 = paramters["w2"]

    a1 = cache["a1"]
    a2 = cache["a2"]

    dz2 = a2 - Y
    dw2 = 1 / m * np.dot(dz2, a1.T)
    db2 = 1 / m * np.sum(dz2, axis=1, keepdims=True)
    dz1 = np.multiply(np.dot(w2.T, dz2), 1 - np.power(a1, 2))
    dw1 = 1 / m * np.dot(dz1, X.T)
    db1 = 1 / m * np.sum(dz1, axis=1, keepdims=True)
    grads = {
        "dw1":  dw1,
        "db1":  db1,
        "dw2":  dw2,
        "db2":  db2
    }
    return grads


def update(paramters, grads, learning_rate):
    w1, w2 = paramters["w1"], paramters["w2"]
    b1, b2 = paramters["b1"], paramters["b2"]

    dw1, dw2 = grads["dw1"], grads["dw2"]
    db1, db2 = grads["db1"], grads["db2"]

    w1 = w1 - learning_rate * dw1
    b1 = b1 - learning_rate * db1
    w2 = w2 - learning_rate * dw2
    b2 = b2 - learning_rate * db2

    paramters = {
        "w1":   w1,
        "b1":   b1,
        "w2":   w2,
        "b2":   b2
    }

    return paramters


def predict(parameters, X):
    cache = forward(X, parameters)
    a2 = cache["a2"]
    predictions = np.round(a2)
    return predictions

def solve(X, Y, nh, num_iterations, learning_rate):
    parameters = init(X.shape[0], nh, Y.shape[0])

    for i in range(num_iterations):
        cache = forward(X, parameters)
        cost = cal_cost(Y, parameters, cache["a2"])
        grads = backward_propagation(parameters, cache, X, Y)
        parameters = update(parameters, grads, learning_rate)
    return parameters


if __name__ == '__main__':
    # print_hi('PyCharm')
    X, Y = load_planar_dataset()
    hiden = [1, 2, 3, 4, 5, 10, 20, 30, 40, 50]
    for i, n_h in enumerate(hiden):
        print("nh = :", n_h)
        parameters = solve(X, Y, i,  num_iterations=5000, learning_rate=0.5)

        plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)

        plt.title("Decision Boundary for hidden layer size " + str(4))

        predictions = predict(parameters, X)
        print('准确率: %d' % float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) / float(Y.size) * 100) + '%')
        plt.show()
    # X_assess = predict_test_case()
    # predictions = predict(parameters, X)
    # print(str(np.mean(predictions)))
    # plt.scatter(X[0, :], X[1, :], c=Y, s=40, cmap=plt.cm.Spectral)
    # plt.show()
    #
    # print(X.shape)
    # shape_X = X.shape
    # shape_Y = Y.shape
    # m = Y.shape[1]
    # print("X的纬度为:" + str(shape_X))
    # print("X的纬度为:" + str(shape_Y))
    # print("数据集里面的数据有:" + str(m) + "个")
    # clf = sklearn.linear_model.LogisticRegressionCV()
    # clf.fit(X.T, Y.T.ravel())
    # clf.fit(X.T, Y.T)
    # clf = sklearn.linear_model.LogisticRegressionCV()
    # clf.fit(X.T, Y.T)
    # plot_decision_boundary(lambda x: clf.predict(x), X, Y)
    # plt.title("Logistic Regression")
    # LR_predictions = clf.predict(X.T)
    # plt.show()
    # print("逻辑回归的准确性:%d"
    #       % float((np.dot(Y, LR_predictions) + np.dot(1 - Y, 1 - LR_predictions)) /
    #       float(Y.size) * 100)
    #       + "%" + "(正确标记数据点所占的百分比)")

# See PyCharm help at https://www.jetbrains.com/help/pycharm/