Implement sigmoid function with Numpy



With the Sigmoid activation function, we can reduce the loss during training because it eliminates the gradient problem in the machine learning model during training.

# Matplotlib, numpy and math imports

import matplotlib.pyplot as plt

import numpy as np

import math

  

x = np. linspace ( - 10 , 10 , 100 )

z = 1 / ( 1 + np.exp ( - x))

 
plt.plot (x, z)

plt. xlabel ( "x" )

plt .ylabel ( "Sigmoid (X)" )

  < br /> plt.show ()

Output:

Example # 1:

# Import matplotlib, numpy and mathematics

import matplotlib.pyplot as plt

import numpy as np

import math

 

x = np.linspace ( - 100 , 100 , 200 )

z = 1 / ( 1 + np.exp ( - x))

 
plt.plot (x, z)

plt. xlabel ( "x" )

plt .ylabel ( "Sigmoid (X)" )

  
plt.show ()

Output: