Software Laboratory II Code
Software Laboratory II Code
Software Laboratory II Code
Assignment No. 1
Code:
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def relu(x):
return np.maximum(0, x)
def tanh(x):
return np.tanh(x)
def softmax(x):
return np.exp(x) / np.sum(np.exp(x))
# Create x values
x = np.linspace(-10, 10, 100)
Output:
Software Laboratory - II
Assignment No. 2
Code:
# importing libraries
import numpy as np
# matrix of inputs
input_table = np.array([
[0,0], # both no
[0,1], # one no, one yes
[1,0], # one yes, one no
[1,1] # bot yes
])
print(f'input table:\n{input_table}')
weights = np.array([1,-1])
T=1
for i in range(0,4):
activation = linear_threshold_gate(dot_products[i], T)
print(f'Activation: {activation}')
Output:
input table:
[[0 0]
[0 1]
[1 0]
[1 1]]
Activation: 0
Software Laboratory - II
Activation: 0
Activation: 1
Activation: 0
Software Laboratory - II
Assignment No.3
Code:
import numpy as np
])
Y = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1])
for i in range(test_X.shape[0]):
x = test_X[i]
y = perceptron.predict(x)
print(f'{x} is {"even" if y == 0 else "odd"}')
Output:
[0 0 0 0 0 0 1 0 0 0] is even
[0 0 0 0 0 0 0 1 0 0] is odd
[0 0 0 0 0 0 0 0 1 0] is even
[0 0 0 0 0 0 0 0 0 1] is odd
[0 0 0 0 0 0 1 1 0 0] is even
[0 0 0 0 0 0 1 0 1 0] is even
[0 0 0 0 0 0 1 1 1 0] is even
[0 0 0 0 0 0 1 1 1 1] is even
[0 0 0 0 0 0 1 0 1 1] is even
[0 0 0 0 0 0 0 1 1 1] is odd
Software Laboratory - II
Assignment No. 4
Code:
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_iris
b += lr * error
Output:
Software Laboratory - II
Assignment No. 5
Code:
import numpy as np
# print output
print("Input x: ", x_test)
print("Output y: ", y_test)
Output:
Input x: [ 1 -1 -1 -1]
Output y: [ 1 -1]
Software Laboratory - II
Assignment No.6
Code:
import numpy as np
class NeuralNetwork:
self.backward_propagation(X, y, y_hat)
if i % 100 == 0:
print("Error at epoch", i, ":", np.mean(np.abs(self.error)))
# Create a neural network with 2 input neurons, 4 neurons in the hidden layer, and 1
output neuron
nn = NeuralNetwork([2, 4, 1], activation='relu')
# Train the neural network on the input and output datasets for 10000 epochs with a learning rate
of 0.1
nn.train(X, y, lr=0.1, epochs=10000)
# Use the trained neural network to make predictions on the same input dataset
predictions = nn.predict(X)
Output:
[[5.55111512e-16]
[6.66666667e-01]
[6.66666667e-01]
[6.66666667e-01]]
Software Laboratory - II
Assignment No. 7
Code:
import numpy as np
class XORNetwork:
def init (self):
# Initialize the weights and biases randomly
self.W1 = np.random.randn(2, 2)
self.b1 = np.random.randn(2)
self.W2 = np.random.randn(2, 1)
self.b2 = np.random.randn(1)
self.z1_error = self.output_delta.dot(self.W2.T)
self.z1_delta = self.z1_error * self.sigmoid_derivative(self.a1)
self.W1 += X.T.dot(self.z1_delta)
self.b1 += np.sum(self.z1_delta, axis=0)
self.W2 += self.a1.T.dot(self.output_delta)
self.b2 += np.sum(self.output_delta, axis=0)
Software Laboratory - II
Output:
[[0.01063456]
[0.98893162]
[0.98893279]
[0.01358006]]
Software Laboratory - II
Assignment No. 8
Code:
import numpy as np
# Define hyperparameters
learning_rate = 0.1
num_epochs = 100000
# Forward propagation
hidden_layer = sigmoid(np.dot(X, hidden_weights))
output_layer = sigmoid(np.dot(hidden_layer, output_weights))
# Backpropagation
output_error = y - output_layer
output_delta = output_error * sigmoid_derivative(output_layer)
hidden_error = output_delta.dot(output_weights.T)
Software Laboratory - II
Output:
Input:
[[0 0]
[0 1]
[1 0]
[1 1]]
Output:
[[0.61385986]
[0.63944088]
[0.8569871 ]
[0.11295854]]
Software Laboratory - II
Assignment No. 9
Code:
import numpy as np
class HopfieldNetwork:
def init (self, n_neurons):
self.n_neurons = n_neurons
self.weights = np.zeros((n_neurons, n_neurons))
n_neurons = patterns.shape[1]
network = HopfieldNetwork(n_neurons)
network.train(patterns)
Output:
Assignment No. 10
Code:
import keras
from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.optimizers import SGD
from keras.preprocessing.image import ImageDataGenerator
Output:
Assignment No. 11
Code:
import tensorflow as tf
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import load_breast_cancer
df=load_breast_cancer()
X_train,X_test,y_train,y_test=train_test_split(df.data,df.target,test_size=0.20,random_state=42)
sc=StandardScaler()
X_train=sc.fit_transform(X_train)
X_test=sc.transform(X_test)
model=tf.keras.models.Sequential([tf.keras.layers.Dense(1,activation='sigmoid',input_shape=(X
_train.shape[1],))])
model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
model.fit(X_train,y_train,epochs=5)
y_pred=model.predict(X_test)
test_loss,test_accuracy=model.evaluate(X_test,y_test)
print("accuracy is",test_accuracy)
Software Laboratory - II
Output:
Epoch 1/5
15/15 [==============================] - 1s 2ms/step - loss: 0.5449 - accuracy: 0.7385
Epoch 2/5
15/15 [==============================] - 0s 2ms/step - loss: 0.4896 - accuracy: 0.7802
Epoch 3/5
15/15 [==============================] - 0s 2ms/step - loss: 0.4439 - accuracy: 0.8286
Epoch 4/5
15/15 [==============================] - 0s 2ms/step - loss: 0.4074 - accuracy: 0.8462
Epoch 5/5
15/15 [==============================] - 0s 3ms/step - loss: 0.3776 - accuracy: 0.8593
4/4 [==============================] - 0s 5ms/step
4/4 [==============================] - 0s 4ms/step - loss: 0.3090 - accuracy: 0.9298
accuracy is 0.9298245906829834
Software Laboratory - II
Assignment No. 12
Code:
import tensorflow as tf
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense
from tensorflow.keras.utils import to_categorical
model = Sequential([
Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)),
MaxPooling2D((2, 2)),
Conv2D(64, (3, 3), activation='relu'),
MaxPooling2D((2, 2)),
Conv2D(64, (3, 3), activation='relu'),
Flatten(),
Dense(64, activation='relu'),
Dense(10, activation='softmax')
])
Output:
Assignment No. 13
Code:
import tensorflow as tf
from tensorflow.keras.datasets import mnist
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten
from tensorflow.keras.optimizers import Adam
Output:
Epoch 1/10
938/938 [==============================] - 5s 4ms/step - loss: 0.2984 - accuracy:
0.9153
Epoch 2/10
938/938 [==============================] - 7s 7ms/step - loss: 0.1353 - accuracy:
0.9612
Epoch 3/10
938/938 [==============================] - 4s 4ms/step - loss: 0.0944 - accuracy:
0.9723
Epoch 4/10
938/938 [==============================] - 4s 5ms/step - loss: 0.0708 - accuracy:
0.9783
Epoch 5/10
938/938 [==============================] - 4s 4ms/step - loss: 0.0558 - accuracy:
0.9833
Epoch 6/10
938/938 [==============================] - 4s 4ms/step - loss: 0.0447 - accuracy:
0.9864
Epoch 7/10
938/938 [==============================] - 4s 4ms/step - loss: 0.0363 - accuracy:
0.9892
Epoch 8/10
938/938 [==============================] - 4s 5ms/step - loss: 0.0293 - accuracy:
0.9913
Epoch 9/10
938/938 [==============================] - 4s 4ms/step - loss: 0.0255 - accuracy:
0.9927
Epoch 10/10
938/938 [==============================] - 4s 4ms/step - loss: 0.0202 - accuracy:
0.9944
313/313 [==============================] - 1s 2ms/step - loss: 0.0679 - accuracy:
0.9804
Test Loss: 0.06786014884710312
Test Accuracy: 0.980400025844574
317533 Software Laboratory - II