Basic Tensorflow

The Tensorflow library integrates neurons for linear and logistic regressions, demonstrating the library’s capabilities in implementing fundamental components of neural networks efficiently.
Author

Vraj Shah

Published

September 1, 2023

Importing Packages

import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.keras import Sequential
WARNING:tensorflow:From C:\Users\vrajs\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead.

Neuron with Linear Regression Model

Dataset

X_train = np.array([[1.0], [2.0], [3.0]], dtype=np.float32)
Y_train = np.array([[300.0], [500.0], [700.0]], dtype=np.float32)

fig, ax = plt.subplots(1, 1)
ax.scatter(X_train, Y_train, marker='x', c='r', label="Data Points")
ax.legend(fontsize='large')
ax.set_ylabel('Price', fontsize='large')
ax.set_xlabel('Size', fontsize='large')
plt.show()

Linear Regression Model

model = Sequential(
    [
        tf.keras.layers.Dense(1, input_dim=1,  activation='linear', name='L1')
    ]
)
WARNING:tensorflow:From C:\Users\vrajs\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\keras\src\backend.py:873: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 L1 (Dense)                  (None, 1)                 2         
                                                                 
=================================================================
Total params: 2 (8.00 Byte)
Trainable params: 2 (8.00 Byte)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

Instantiation of weights

w, b = model.get_weights()      # Random Generation
print(f"w = {w}, b={b}")
w = [[1.3145238]], b=[0.]
a = model(X_train)              # Random Output
print(a)
tf.Tensor(
[[1.3145238]
 [2.6290476]
 [3.9435716]], shape=(3, 1), dtype=float32)

Setting the weights

set_w = np.array([[200]])
set_b = np.array([100])

model.set_weights([set_w, set_b])
print(model.get_weights())
[array([[200.]], dtype=float32), array([100.], dtype=float32)]

Comparing Linear Model Framework and Manual Implementation

\[ f_{\mathbf{w},b}(x^{(i)}) = \mathbf{w}\cdot x^{(i)} + b \]

prediction_tf = model(X_train)
prediction_np = np.dot(X_train, set_w) + set_b
plt.figure(figsize=(12, 6))

plt.subplot(1, 2, 1)
plt.plot(X_train, prediction_tf, label='Model Output',
         color='blue', linewidth=4)
plt.scatter(X_train, Y_train, marker='x', c='r', label="Data Points", s=200)
plt.xlabel('X_train')
plt.ylabel('Prediction')
plt.title('Tensorflow Prediction')
plt.legend()
plt.grid(True)

plt.subplot(1, 2, 2)
plt.plot(X_train, prediction_np, label='Model Output',
         color='blue', linewidth=4)
plt.scatter(X_train, Y_train, marker='x', c='r', label="Data Points", s=200)
plt.xlabel('X_train')
plt.ylabel('Prediction')
plt.title('Numpy Prediction')
plt.legend()
plt.grid(True)

plt.tight_layout()
plt.show()

Neuron with Logistic Regression Model

Dataset

X_train = np.array([0., 1, 2, 3, 4, 5], dtype=np.float32).reshape(-1, 1)
Y_train = np.array([0,  0, 0, 1, 1, 1], dtype=np.float32).reshape(-1, 1)

pos = Y_train == 1
neg = Y_train == 0

fig, ax = plt.subplots(1, 1, figsize=(8, 4))
ax.scatter(X_train[pos], Y_train[pos], marker='x', s=80, c='red', label="y=1")
ax.scatter(X_train[neg], Y_train[neg], marker='o', s=100, label="y=0", lw=3)

ax.set_ylim(-0.08, 1.1)
ax.set_ylabel('y', fontsize=12)
ax.set_xlabel('x', fontsize=12)
ax.set_title('one variable plot')
ax.legend(fontsize=12)
plt.show()

Logistic Regression Model

model = Sequential(
    [
        tf.keras.layers.Dense(1, input_dim=1,  activation='sigmoid')
    ]
)
model.summary()
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense (Dense)               (None, 1)                 2         
                                                                 
=================================================================
Total params: 2 (8.00 Byte)
Trainable params: 2 (8.00 Byte)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

Instantiation of weights

w, b = model.get_weights()      # Random Generation
print(f"w = {w}, b={b}")
w = [[0.3190199]], b=[0.]
a = model(X_train)              # Random Output
print(a)
tf.Tensor(
[[0.5       ]
 [0.57908535]
 [0.6543102 ]
 [0.72253275]
 [0.78178173]
 [0.8313324 ]], shape=(6, 1), dtype=float32)

Setting the weights

set_w = np.array([[2]])
set_b = np.array([-4.5])

model.set_weights([set_w, set_b])
print(model.get_weights())
[array([[2.]], dtype=float32), array([-4.5], dtype=float32)]

Comparing Logistic Model Framework and Manual Implementation

\[ f_{\mathbf{w},b}(x^{(i)}) = g(\mathbf{w}x^{(i)} + b) \]

prediction_tf = model(X_train)
prediction_np = 1/(1+np.exp(-(np.dot(X_train, set_w) + set_b)))
plt.figure(figsize=(12, 6))

plt.subplot(1, 2, 1)
plt.plot(X_train, prediction_tf, label='Model Output',
         color='blue', linewidth=4)
plt.scatter(X_train, Y_train, marker='x', c='r', label="Data Points", s=200)
plt.xlabel('X_train')
plt.ylabel('Prediction')
plt.title('Tensorflow Prediction')
plt.legend()
plt.grid(True)

plt.subplot(1, 2, 2)
plt.plot(X_train, prediction_np, label='Model Output',
         color='blue', linewidth=4)
plt.scatter(X_train, Y_train, marker='x', c='r', label="Data Points", s=200)
plt.xlabel('X_train')
plt.ylabel('Prediction')
plt.title('Numpy Prediction')
plt.legend()
plt.grid(True)

plt.tight_layout()
plt.show()