From 6d89913bb0dafc7270f7ccd65e113d10067c5d66 Mon Sep 17 00:00:00 2001 From: biswajeetgithub <110840262+biswajeetgithub@users.noreply.github.com> Date: Sun, 13 Aug 2023 20:01:22 +0530 Subject: [PATCH 1/2] Biswajeet Mallick --- ...BiswajeetMallick.ipynb - Colaboratory.html | 37079 ++++++++++++++++ 1 file changed, 37079 insertions(+) create mode 100644 Day-1/Submission/BiswajeetMallick.ipynb - Colaboratory.html diff --git a/Day-1/Submission/BiswajeetMallick.ipynb - Colaboratory.html b/Day-1/Submission/BiswajeetMallick.ipynb - Colaboratory.html new file mode 100644 index 0000000..5674cb0 --- /dev/null +++ b/Day-1/Submission/BiswajeetMallick.ipynb - Colaboratory.html @@ -0,0 +1,37079 @@ + + +Day1LinearRegressionIncomplete.ipynb - Colaboratory + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Google Account
Biswajeet Mallick
biswajeetmallick03@gmail.com
+
+ + + + + + + + +
+ +
+ + + + Code +
Insert code cell below
Ctrl+M B
+ + Text +
Add text cell
+ + + + + + + + + Copy to Drive + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ +
+
+ + +
Notebook +
+
+ + + +
+
+ +
+ +
+
+ +
+
+
+ + Code + + + Text + + +

+
+
+
+
+ +
+ +
+
+
!git clone https://github.com/IEEE-CISCodeCraft/Machine-Learning-Simplified.git
+
+
+
+
+
+ + +
+
+
+
Cloning into 'Machine-Learning-Simplified'...
+remote: Enumerating objects: 31, done.
+remote: Counting objects: 100% (16/16), done.
+remote: Compressing objects: 100% (12/12), done.
+remote: Total 31 (delta 1), reused 16 (delta 1), pack-reused 15
+Receiving objects: 100% (31/31), 377.63 KiB | 29.05 MiB/s, done.
+Resolving deltas: 100% (4/4), done.
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+
+

Replace the None instances pesent in the code.

+
+
+
+ +
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
!cp "/content/Machine-Learning-Simplified/Day-1/LRTestCases.py" "/content/"
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
from LRTestCases import *
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
def error(yhaty):
    error = yhat - y      #Subtract y from yhat
    return error
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#TEST YOUR IMPLEMENTATION
test_error_function(error)
+
+
+
+
+
+ + +
+
+
+
Test passed!
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
def error_square(error):
    square = error **2        #Square the error claculated above
    return square
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#TEST YOUR IMPLEMENTATION
test_error_square_function(error_square)
+
+
+
+
+
+ + +
+
+
+
Test passed!
+
+
+
+
+
+
+
+ +
+ +
+
+
def total_squared_error(errornum):
    total_squared_error = 0
    for i in range(num):
      total_squared_error = total_squared_error + error     #Add the "error" to the "total_sqared_error"
    return total_squared_error
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#TEST YOUR IMPLEMENTATION
test_total_squared_error_function(total_squared_error)
+
+
+
+
+
+ + +
+
+
+
Test passed!
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+
+

Mean Squared Error=12mi=0m1(yŷ )2

+
+
+
+ +
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
def mse(total_squared_errornum):
    denominator = 2*num           #Multipy num with 2
    mse = total_squared_error/denominator   #Divide "total_sqaured_error" by "denominator"
    return num
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#TEST YOUR IMPLEMENTATION
test_mse_function(mse)
+
+
+
+
+
+ + +
+
+
+
Test passed!
+
+
+
+
+
+
+
+
+
+

Finding the predicted value

+
+
+
+ +
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
def predicted_value(wxb):
    yhat = w*x+b     #Multiply 'w' with 'x' and add 'b'
    return yhat
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#TEST YOUR IMPLEMENTATION
test_predicted_value(predicted_value)
+
+
+
+
+
+ + +
+
+
+
Test passed!
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+
+

Cost Function

+

The equation for cost with one variable is: +

J(w,b)=12mi=0m1(ŷ y(i))2

+
+
+
+ +
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+
+

Gradient Descent

+

1. Compute Gradient

+

The gradient is defined as: +

J(w,b)wJ(w,b)b=1mi=0m1(ŷ y(i))x(i)=1mi=0m1(ŷ y(i))

+
+
+
+ +
+
+
+
+
+ +
+ +
+
+
def compute_gradient(xywb):
    # Number of training examples
    m = x.shape[0]
    dj_dw = 0
    dj_db = 0

    for i in range(m):
        yhat = w * x[i] + b
        dj_dw_i = (yhat - y[i]) * x[i]
        dj_db_i = yhat - y[i]
        dj_db += dj_db_i
        dj_dw += dj_dw_i
    dj_dw = dj_dw / m
    dj_db = dj_db / m

    return dj_dw, dj_db
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
test_compute_gradient(compute_gradient)
+
+
+
+
+
+ + +
+
+
+
Test passed!
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+
+

2. Update the parameters num_iterations times

+

repeatwb} until convergence:{=wαJ(w,b)w=bαJ(w,b)b
+where, parameters w, b are updated simultaneously.

+
+
+
+ +
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
def gradient_descent(xylearning_ratenum_iterations):
    # Initialize weights and bias
    w = 0
    b = 0
    # Number of training examples
    m = x.shape[0]
    for _ in range(num_iterations):
        # Compute gradients using the compute_gradient function
        dj_dw, dj_db = compute_gradient(x, y, w, b)

        # Update weights and bias
        w = w - learning_rate * dj_dw
        b = b - learning_rate * dj_db
        # Compute the cost for monitoring
        cost = compute_cost(x, y, w, b)
        print(f'Iteration {_+1}/{num_iterations}, Cost: {cost:.6f}')
    return w, b
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
test_gradient_descent(gradient_descent, compute_cost, compute_gradient)
+
+
+
+
+
+ + +
+
+
+
Iteration 1/10, Cost: 1.782000
+Iteration 2/10, Cost: 1.589760
+Iteration 3/10, Cost: 1.420232
+Iteration 4/10, Cost: 1.270730
+Iteration 5/10, Cost: 1.138884
+Iteration 6/10, Cost: 1.022606
+Iteration 7/10, Cost: 0.920053
+Iteration 8/10, Cost: 0.829601
+Iteration 9/10, Cost: 0.749820
+Iteration 10/10, Cost: 0.679447
+Final parameters: w = 0.7955, b = 0.2545
+Final cost: 0.679447
+Test passed!
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+

+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+
+

Project: Melanoma Tumor Size Prediction

+
+
+
+ +
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, mean_absolute_error
from sklearn.model_selection import train_test_split
import seaborn as sns
import matplotlib.pyplot as plt
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Read the dataset
data=pd.read_csv('/content/Machine-Learning-Simplified/Day-1/melanoma_dataset.csv')
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Display the dataset
data
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Plot a graph to check linearity
data.describe()
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Extract X and Y from data
X= data[['mass_npea']]
Y= data['tumor_size']
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Train a Linear Regression Model
model = LinearRegression()
model.fit(X,Y)
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Evaluate the Model
y_pred = model.predict(X)
mse = mean_squared_error(Y,y_pred)
mse
+
+
+
+
+
+ + +
+
+
+
0.24388273258319765
+
+
+
+ + Code + + + Text + + +

+
+ + + + + + +
+
+
+ +
+ +
+
+
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+ + +
+
+
+
+
+ +
+
+ + + + + +
+
+
+
+
+
+
+ +
+ +
+
+ + + + + +
+
+
+
+
+ +
+
+ + + + + +
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file From ff561e176b5508b9f81a11455ab770a1a33dfea0 Mon Sep 17 00:00:00 2001 From: biswajeetgithub <110840262+biswajeetgithub@users.noreply.github.com> Date: Sun, 13 Aug 2023 20:41:32 +0530 Subject: [PATCH 2/2] Biswajeet Mallick --- ...iswajeet Mallick.ipynb - Colaboratory.html | 17682 ++++++++++++++++ 1 file changed, 17682 insertions(+) create mode 100644 Day-2/Biswajeet Mallick.ipynb - Colaboratory.html diff --git a/Day-2/Biswajeet Mallick.ipynb - Colaboratory.html b/Day-2/Biswajeet Mallick.ipynb - Colaboratory.html new file mode 100644 index 0000000..e2c55de --- /dev/null +++ b/Day-2/Biswajeet Mallick.ipynb - Colaboratory.html @@ -0,0 +1,17682 @@ + + +Day2DecisionTreeIncomplete.ipynb - Colaboratory + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Google Account
Biswajeet Mallick
biswajeetmallick03@gmail.com
+
+ + + + + + + + +
+ +
+ + + + Code +
Insert code cell below
Ctrl+M B
+ + Text +
Add text cell
+ + + + + + + + + Copy to Drive + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ +
+
+ + +
Notebook +
+
+ + + +
+
+ +
+ +
+
+ +
+
+
+
+
+
+
+ +
+ +
+
+
!git clone "https://github.com/IEEE-CISCodeCraft/Machine-Learning-Simplified"
+
+
+
+
+
+ + +
+
+
+
Cloning into 'Machine-Learning-Simplified'...
+remote: Enumerating objects: 31, done.
+remote: Counting objects: 100% (6/6), done.
+remote: Compressing objects: 100% (2/2), done.
+remote: Total 31 (delta 4), reused 4 (delta 4), pack-reused 25
+Receiving objects: 100% (31/31), 375.67 KiB | 1.51 MiB/s, done.
+Resolving deltas: 100% (6/6), done.
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Import libraries
import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error
import seaborn as sns
from sklearn.model_selection import train_test_split
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Read the file
train = pd.read_csv('/content/Machine-Learning-Simplified/Day-2/Train.csv')
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
train
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Visualize the dataset
sns.pairplot(data=train)
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
#Split the dataset using train_test_split
x_train, x_test, y_train, y_test=train_test_split(X,Y,test_size=0.2)
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
# Train a decision tree model
random_forest = RandomForestRegressor()
random_forest.fit(x_train,y_train)
+
+
+
+
+
+ + +
+
+
+
+
+
+
+ + Code + + + Text + + +

+
+
+
+ +
+ +
+
+
# Find the Mean Squared error
pred= random_forest.predict(x_test)
mse= mean_squared_error(y_test,pred)
+
+
+
+
+
+ + Code + + + Text + + +

+
+ + + + + + +
+
+
+ +
+ +
+
+
+
+
+
+
+
+ + +
+
+
+
15.719423716124517
+
+
+
+ + Code + + + Text + + +

+
+
+
+
+ + +
+
+
+
+
+ +
+
+ + + + + +
+
+
+
+
+
+
+ +
+ +
+
+ + + + + +
+
+
+
+
+ +
+
+ + + + + +
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file