Initial Commit

master
Amir Hossein Moghiseh 2020-12-31 20:19:52 +03:30
commit 8c0c0b9d78
11 changed files with 989 additions and 0 deletions

View File

View File

@ -0,0 +1,103 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.cross_validation import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
balance_data = pd.read_excel('Train.xlsx', sheet_name="Sheet1").to_numpy()
# Printing the dataswet shape
print ("Dataset Length: ", len(balance_data))
print ("Dataset Shape: ", balance_data.shape)
# Printing the dataset obseravtions
print ("Dataset: ",balance_data.head())
return balance_data
# Function to split the dataset
def splitdataset(balance_data):
# Separating the target variable
X = balance_data.values[:, 1:5]
Y = balance_data.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()

View File

@ -0,0 +1,103 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
balance_data = pd.read_excel('Train.xlsx', sheet_name="Sheet1").to_numpy()
# Printing the dataswet shape
print ("Dataset Length: ", len(balance_data))
print ("Dataset Shape: ", balance_data.shape)
# Printing the dataset obseravtions
print ("Dataset: ",balance_data.head())
return balance_data
# Function to split the dataset
def splitdataset(balance_data):
# Separating the target variable
X = balance_data.values[:, 1:5]
Y = balance_data.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()

View File

@ -0,0 +1,96 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
balance_data = pd.read_excel('Train.xlsx', sheet_name="Sheet1").to_numpy()
return balance_data
# Function to split the dataset
def splitdataset(balance_data):
# Separating the target variable
X = balance_data.values[:, 1:5]
Y = balance_data.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()

View File

@ -0,0 +1,96 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
dataset = pd.read_excel('Train.xlsx', sheet_name="Sheet1").to_numpy()
return dataset
# Function to split the dataset
def splitdataset(dataset):
# Separating the target variable
X = dataset.values[:, 1:5]
Y = dataset.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()

View File

@ -0,0 +1,96 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
balance_data = pd.read_csv( 'Train.csv',sep= ',', header = None)
return dataset
# Function to split the dataset
def splitdataset(dataset):
# Separating the target variable
X = dataset.values[:, 1:5]
Y = dataset.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()

View File

@ -0,0 +1,96 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
dataset = pd.read_csv( 'Train.csv',sep= ',', header = None)
return dataset
# Function to split the dataset
def splitdataset(dataset):
# Separating the target variable
X = dataset.values[:, 1:5]
Y = dataset.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()

3
.vscode/settings.json vendored 100644
View File

@ -0,0 +1,3 @@
{
"python.pythonPath": "C:\\Users\\user\\AppData\\Local\\Programs\\Python\\Python38-32\\python.exe"
}

300
Train.csv 100644
View File

@ -0,0 +1,300 @@
-1,1,-1,1,1,1,-1,-1,-1,1,0,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,0,1
0,1,1,0,1,1,-1,-1,-1,-1,1,-1,1,1,-1,-1,2
-1,1,1,-1,0,1,-1,-1,-1,-1,1,-1,1,-1,-1,1,2
1,1,1,-1,1,1,-1,-1,-1,-1,1,0,1,1,1,1,2
-1,1,1,-1,1,1,-1,-1,-1,-1,-1,-1,1,1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,-1,0,1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,-1,1,1,0,1,1
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,0,0,2
-1,1,-1,1,1,-1,-1,-1,-1,-1,0,0,1,1,-1,-1,1
-1,1,-1,1,1,1,-1,-1,-1,-1,1,0,1,1,0,0,1
-1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,1,-1,0,0,2
1,1,1,-1,-1,1,1,1,0,1,1,0,-1,-1,1,0,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,0,-1,0,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,0,-1,0,1
1,-1,1,-1,-1,1,-1,1,0,1,1,1,0,-1,-1,1,2
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,0,1,1,-1,-1,1
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
1,1,1,-1,-1,0,1,1,-1,-1,1,-1,-1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,0,0,1,1,2
1,0,1,-1,-1,-1,1,1,1,-1,-1,0,-1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
1,-1,-1,1,1,-1,1,1,1,-1,-1,1,1,1,-1,1,1
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,0,2
1,1,1,-1,-1,-1,1,1,1,1,-1,-1,1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
1,0,-1,1,1,1,-1,-1,-1,1,-1,1,0,1,-1,1,1
1,1,-1,1,1,1,-1,-1,-1,-1,-1,-1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
1,-1,1,-1,-1,-1,1,1,1,1,1,-1,1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,0,-1,-1,-1,-1,0,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,-1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,-1,0,2
1,1,1,-1,-1,-1,1,1,0,-1,1,-1,-1,-1,1,0,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,-1,1,2
1,-1,1,-1,-1,-1,1,1,0,-1,-1,-1,-1,-1,-1,0,2
1,1,1,-1,-1,-1,1,1,-1,-1,-1,-1,-1,1,-1,1,2
-1,0,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,0,-1,-1,-1,1,1,1,-1,1,1
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,0,0,2
1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
1,1,1,-1,-1,1,0,1,-1,-1,1,1,-1,1,-1,0,2
-1,1,-1,1,1,1,-1,-1,-1,1,1,1,1,1,-1,-1,1
-1,1,-1,1,1,1,-1,-1,-1,1,1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,0,1
1,1,1,-1,-1,0,1,1,1,1,-1,-1,-1,-1,1,0,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,-1,0,2
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,-1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,1,0,-1,-1,-1,1,2
1,1,-1,1,1,1,1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,1,-1,1,1,1,1,-1,-1,-1,1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,0,2
1,1,1,1,-1,-1,1,1,1,1,1,-1,-1,1,-1,1,1
1,1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,0,2
1,-1,1,1,1,-1,1,-1,1,1,-1,-1,1,1,-1,1,1
1,-1,1,-1,-1,1,1,1,1,1,1,-1,-1,1,1,1,2
-1,1,1,1,1,1,-1,-1,-1,1,1,-1,1,1,-1,-1,2
-1,1,1,-1,1,1,-1,-1,-1,1,1,1,1,1,-1,0,2
-1,1,1,1,1,1,-1,1,1,1,1,1,1,1,-1,1,2
1,1,1,-1,1,1,-1,-1,-1,1,1,-1,1,1,-1,1,2
-1,-1,-1,1,1,-1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
1,-1,1,-1,-1,1,1,1,1,1,-1,1,-1,1,-1,0,2
1,-1,1,-1,-1,-1,1,1,0,1,1,1,-1,1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
-1,0,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
-1,-1,1,-1,1,1,-1,-1,-1,1,1,1,1,1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
-1,1,1,-1,1,1,1,-1,1,1,1,-1,1,1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,0,1,1,-1,0,1
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,-1,1,1,-1,-1,0,1,1,2
1,-1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,1,2
1,-1,1,-1,1,1,-1,-1,-1,-1,-1,-1,-1,-1,-1,1,2
1,-1,1,-1,1,1,-1,0,0,-1,1,0,0,0,1,1,2
-1,-1,0,-1,1,1,-1,-1,-1,-1,1,1,1,1,-1,1,2
1,-1,-1,-1,1,1,1,-1,-1,1,1,-1,-1,1,-1,1,2
1,1,1,-1,-1,1,1,1,1,1,-1,-1,-1,-1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,0,1,1,1,-1,-1,1
1,-1,-1,-1,1,1,-1,-1,-1,-1,1,1,-1,1,-1,1,2
1,-1,1,-1,1,1,1,-1,-1,-1,1,-1,-1,1,-1,1,2
1,-1,1,-1,1,1,1,-1,0,-1,1,-1,1,1,1,0,2
1,-1,-1,-1,1,1,0,-1,0,-1,-1,-1,-1,1,0,-1,2
0,0,0,0,-1,1,1,1,1,1,0,-1,1,1,-1,0,2
1,1,1,-1,-1,-1,-1,1,1,-1,1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,2
-1,0,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,0,1,-1,-1,1,1,1,-1,1,-1,-1,-1,-1,1,0,2
-1,0,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
-1,0,1,-1,0,0,1,1,1,1,0,0,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
1,1,1,1,1,-1,1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,1,1,-1,-1,-1,-1,1,1,1,1,-1,-1,-1,1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
-1,0,0,1,1,1,-1,-1,-1,1,-1,1,1,1,0,1,1
-1,0,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,-1,1,-1,1,1
1,0,-1,1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1,1
-1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,0,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,0,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
-1,0,1,-1,-1,-1,1,1,1,1,1,-1,-1,1,1,1,2
-1,0,1,-1,-1,1,-1,1,-1,1,1,-1,-1,-1,1,1,2
0,0,1,-1,-1,-1,1,1,0,-1,0,0,0,0,0,0,2
1,0,1,-1,0,0,1,1,1,-1,-1,-1,-1,-1,1,0,2
-1,-1,1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,0,1
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,0,1
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
-1,1,-1,1,1,1,-1,-1,-1,1,1,1,1,-1,-1,1,1
-1,0,1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,1,2
-1,-1,1,-1,-1,1,1,1,1,1,-1,-1,-1,1,-1,1,2
1,-1,1,-1,-1,1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,-1,-1,1,-1,-1,1,1,1,1,-1,-1,1,1,-1,1,1
-1,-1,-1,1,1,1,1,1,1,1,-1,1,1,1,0,1,1
-1,-1,-1,1,1,1,1,1,1,1,-1,1,1,1,-1,1,1
0,1,-1,-1,-1,-1,1,1,1,1,1,-1,-1,1,1,1,2
-1,0,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,-1,0,2
-1,-1,1,-1,-1,1,1,1,1,1,-1,-1,-1,1,0,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,-1,-1,-1,-1,-1,1,1,1,1,-1,1,1,1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,1,1,1,1,1,-1,1,1
-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,1,-1,1,1,2
1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
1,1,0,1,1,1,-1,-1,1,-1,1,0,1,1,-1,-1,2
-1,1,1,-1,-1,1,-1,1,1,1,1,-1,1,-1,1,1,2
-1,-1,1,-1,-1,1,1,1,1,1,1,-1,1,1,-1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
1,1,-1,1,1,1,-1,0,-1,-1,1,1,1,1,-1,-1,1
1,1,-1,1,1,1,1,-1,-1,-1,-1,1,1,1,-1,-1,1
-1,1,1,-1,-1,1,-1,1,1,-1,1,-1,0,0,0,0,2
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
-1,1,1,-1,0,1,1,1,1,1,1,-1,-1,0,-1,0,2
-1,1,-1,-1,1,1,-1,-1,-1,-1,-1,1,1,1,1,1,2
-1,-1,-1,-1,1,1,1,-1,-1,-1,-1,1,1,1,-1,1,2
-1,1,1,-1,1,1,1,-1,-1,-1,1,1,1,1,-1,1,2
-1,1,-1,1,1,1,1,-1,-1,-1,-1,1,1,1,-1,1,1
1,1,-1,-1,1,1,-1,-1,-1,1,1,1,1,1,-1,0,2
-1,1,1,-1,-1,1,1,1,1,1,1,-1,1,-1,1,0,2
1,-1,1,1,1,1,1,1,-1,1,-1,1,-1,1,1,1,1
1,-1,1,1,1,1,1,1,-1,1,1,1,-1,1,1,1,1
-1,-1,1,1,1,1,-1,-1,1,-1,-1,-1,1,1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,1,1,-1,-1,1,-1,1,2
1,-1,1,-1,-1,-1,0,1,1,0,-1,-1,-1,-1,1,0,2
-1,0,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,1,1,-1,-1,-1,1,1,1,1,-1,-1,0,-1,1,1,2
-1,-1,-1,-1,1,1,-1,-1,-1,1,1,1,1,1,-1,1,2
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
-1,1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,2
-1,-1,1,1,-1,-1,1,1,1,1,-1,-1,-1,1,1,1,1
-1,-1,1,-1,-1,-1,1,1,1,1,1,0,-1,-1,1,1,2
0,-1,1,-1,-1,-1,1,1,1,1,1,0,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,2
0,0,1,-1,-1,-1,1,1,1,0,0,-1,-1,-1,0,0,2
-1,-1,1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,2
1,0,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2
-1,-1,1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,0,-1,-1,1,1,2
-1,1,1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,0,-1,1,1,1,1,1,-1,-1,-1,1,0,1,0,0,1
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,0,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,0,1
-1,1,-1,1,1,1,-1,0,-1,1,-1,1,1,1,-1,0,1
-1,-1,-1,-1,-1,1,1,1,1,-1,1,-1,-1,1,1,1,2
-1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,-1,1,-1,-1,1,1,0,1,1,1,-1,-1,-1,1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,0,1
-1,-1,1,-1,-1,1,1,1,1,-1,1,1,-1,1,1,0,2
-1,0,1,1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,1,1
-1,-1,1,-1,-1,-1,1,1,1,1,1,-1,0,-1,1,0,2
1,1,-1,-1,-1,-1,1,1,0,-1,1,-1,-1,-1,1,0,2
-1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,1,1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,1,1,1,1,1,1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,1,2
-1,-1,1,1,1,1,1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,-1,1,-1,-1,1,1,1,1,1,-1,1,-1,-1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,-1,1,-1,1,1
1,0,-1,1,1,1,1,-1,-1,1,-1,1,1,1,-1,1,1
-1,-1,1,-1,-1,-1,1,1,1,-1,-1,0,-1,-1,1,1,2
1,1,1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,-1,1,2
-1,-1,1,-1,-1,1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,-1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
-1,1,1,-1,-1,1,-1,1,1,-1,1,-1,1,-1,1,1,2
1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,1,1,1,1,1,-1,-1,-1,1,1,1,1,1,1,0,2
1,1,1,-1,1,1,-1,-1,0,1,-1,-1,-1,1,1,0,2
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
1,0,1,-1,-1,-1,1,1,1,-1,0,-1,-1,-1,1,0,2
-1,1,1,-1,-1,-1,-1,1,1,-1,1,-1,-1,1,1,1,2
-1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
-1,1,1,-1,1,1,-1,-1,-1,-1,1,-1,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,0,2
-1,-1,-1,1,1,-1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,1,-1,0,1,1,-1,-1,1
-1,0,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,-1,1,-1,-1,1,1,1,1,-1,1,-1,-1,1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,0,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,0,-1,1,1
-1,1,1,1,1,1,1,-1,1,1,-1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,1,-1,1,1,1,-1,-1,1,1,-1,1,1,1,-1,1,1
-1,1,1,-1,-1,-1,1,1,-1,-1,1,-1,-1,-1,1,0,2
-1,1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1
-1,-1,1,-1,-1,1,1,1,1,1,-1,1,-1,1,1,0,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,-1,1,-1,1,1
-1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,1,1,1,-1,-1,-1,1,1,-1,-1,1,2
1,1,1,-1,-1,-1,1,1,0,1,-1,-1,-1,-1,1,0,2
-1,-1,-1,1,1,1,1,-1,-1,1,-1,-1,-1,1,1,1,1
-1,-1,-1,1,-1,1,1,0,1,-1,-1,1,1,1,-1,1,1
1,-1,1,-1,-1,-1,1,1,1,1,1,-1,-1,1,1,1,2
-1,-1,-1,-1,1,1,1,-1,-1,-1,-1,0,-1,1,1,1,1
-1,1,1,-1,-1,-1,1,1,0,1,-1,-1,1,-1,1,1,2
1,-1,1,-1,-1,-1,-1,1,1,1,-1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,2
-1,-1,1,-1,1,-1,1,1,1,-1,-1,-1,-1,1,0,1,2
-1,1,-1,1,1,1,0,-1,-1,-1,-1,0,1,1,-1,-1,1
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
1,-1,1,-1,-1,-1,1,1,0,-1,1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,-1,1
1,1,1,-1,-1,1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,-1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,1,1,1,2
-1,-1,-1,1,1,-1,-1,-1,-1,-1,-1,1,-1,1,-1,-1,1
-1,-1,-1,1,1,-1,-1,-1,-1,-1,-1,1,-1,1,0,1,1
-1,-1,1,-1,-1,-1,1,1,1,-1,1,-1,-1,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,-1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,-1,1,2
1,-1,1,-1,-1,0,1,1,1,-1,0,0,-1,0,0,0,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,0,-1,1,1,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,-1,1,2
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,-1,1,-1,1,1
1,-1,-1,-1,-1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,0,2
1,-1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,-1,1,2
1,1,1,-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,2
-1,1,1,-1,-1,1,1,1,1,-1,0,-1,-1,-1,-1,1,2
1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,-1,1,0,2
-1,-1,-1,1,1,-1,1,1,-1,1,-1,1,1,1,0,1,1
1,-1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,1,1,1
-1,-1,1,-1,1,1,-1,-1,-1,-1,0,-1,1,1,-1,-1,2
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,1,-1,1
-1,-1,1,1,1,1,1,1,-1,1,-1,-1,-1,1,-1,1,1
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,-1,-1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,-1,1
-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1,1,-1,1,2
1,-1,1,1,1,1,1,1,-1,-1,-1,-1,-1,1,-1,0,1
1,-1,-1,1,1,1,-1,-1,-1,1,-1,0,1,1,-1,-1,1
-1,-1,-1,1,1,1,-1,-1,-1,-1,-1,1,1,1,-1,1,1
-1,-1,1,-1,-1,1,1,1,1,1,1,-1,-1,-1,0,1,2
-1,-1,1,-1,-1,1,1,1,1,1,1,-1,-1,-1,1,1,2
-1,-1,1,-1,-1,1,0,1,0,1,1,1,-1,1,1,0,2
1,1,1,0,-1,1,1,1,1,-1,1,-1,1,-1,0,1,2
1,1,1,-1,1,1,-1,1,-1,1,1,-1,1,1,1,1,2
1,1,1,-1,1,1,-1,1,-1,1,1,-1,1,1,-1,0,2
1,-1,1,-1,0,1,0,1,1,1,-1,-1,1,1,-1,1,2
1,-1,1,-1,-1,1,1,1,1,1,-1,0,-1,1,-1,1,2
1,-1,1,-1,-1,1,1,1,-1,1,1,-1,1,1,1,1,2
1,1,1,-1,-1,1,1,1,1,1,1,-1,1,1,1,1,2
-1,1,1,-1,-1,1,1,1,-1,1,1,-1,1,1,-1,0,2
-1,1,-1,1,1,1,0,0,-1,1,-1,1,0,0,0,0,1
-1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,1,1,1
1,1,1,-1,-1,1,1,1,1,1,-1,-1,0,-1,1,0,2
-1,1,-1,-1,-1,-1,1,1,1,1,1,-1,-1,-1,1,1,2
-1,1,1,-1,-1,1,1,1,1,1,-1,-1,1,1,1,1,2
1 -1 1 -1 1 1 1 -1 -1 -1 1 0 1 1 1 -1 1 1
2 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 0 1
3 0 1 1 0 1 1 -1 -1 -1 -1 1 -1 1 1 -1 -1 2
4 -1 1 1 -1 0 1 -1 -1 -1 -1 1 -1 1 -1 -1 1 2
5 1 1 1 -1 1 1 -1 -1 -1 -1 1 0 1 1 1 1 2
6 -1 1 1 -1 1 1 -1 -1 -1 -1 -1 -1 1 1 1 1 2
7 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 -1 0 1 1 1 2
8 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 -1 1 1 0 1 1
9 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
10 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 0 0 2
11 -1 1 -1 1 1 -1 -1 -1 -1 -1 0 0 1 1 -1 -1 1
12 -1 1 -1 1 1 1 -1 -1 -1 -1 1 0 1 1 0 0 1
13 -1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 0 0 2
14 1 1 1 -1 -1 1 1 1 0 1 1 0 -1 -1 1 0 2
15 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 0 -1 0 1
16 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 0 -1 0 1
17 1 -1 1 -1 -1 1 -1 1 0 1 1 1 0 -1 -1 1 2
18 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 2
19 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 0 1 1 -1 -1 1
20 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
21 1 1 1 -1 -1 0 1 1 -1 -1 1 -1 -1 -1 1 1 2
22 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 0 0 1 1 2
23 1 0 1 -1 -1 -1 1 1 1 -1 -1 0 -1 -1 1 1 2
24 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
25 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
26 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 1 2
27 1 -1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
28 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
29 1 -1 -1 1 1 -1 1 1 1 -1 -1 1 1 1 -1 1 1
30 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
31 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
32 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 0 2
33 1 1 1 -1 -1 -1 1 1 1 1 -1 -1 1 -1 1 1 2
34 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
35 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
36 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
37 1 0 -1 1 1 1 -1 -1 -1 1 -1 1 0 1 -1 1 1
38 1 1 -1 1 1 1 -1 -1 -1 -1 -1 -1 1 1 -1 1 1
39 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
40 1 -1 1 -1 -1 -1 1 1 1 1 1 -1 1 -1 1 1 2
41 1 1 1 -1 -1 -1 1 1 1 -1 0 -1 -1 -1 -1 0 2
42 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
43 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 -1 1 2
44 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
45 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 -1 0 2
46 1 1 1 -1 -1 -1 1 1 0 -1 1 -1 -1 -1 1 0 2
47 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 -1 1 2
48 1 -1 1 -1 -1 -1 1 1 0 -1 -1 -1 -1 -1 -1 0 2
49 1 1 1 -1 -1 -1 1 1 -1 -1 -1 -1 -1 1 -1 1 2
50 -1 0 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
51 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
52 -1 1 -1 1 1 1 -1 0 -1 -1 -1 1 1 1 -1 1 1
53 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 0 0 2
54 1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
55 1 1 1 -1 -1 1 0 1 -1 -1 1 1 -1 1 -1 0 2
56 -1 1 -1 1 1 1 -1 -1 -1 1 1 1 1 1 -1 -1 1
57 -1 1 -1 1 1 1 -1 -1 -1 1 1 1 1 1 -1 1 1
58 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
59 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
60 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 0 1
61 1 1 1 -1 -1 0 1 1 1 1 -1 -1 -1 -1 1 0 2
62 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
63 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 -1 0 2
64 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 -1 1 2
65 1 1 1 -1 -1 -1 1 1 1 -1 1 0 -1 -1 -1 1 2
66 1 1 -1 1 1 1 1 -1 -1 -1 -1 1 1 1 -1 1 1
67 -1 1 -1 1 1 1 1 -1 -1 -1 1 1 1 1 -1 1 1
68 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
69 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 2
70 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
71 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 0 2
72 1 1 1 1 -1 -1 1 1 1 1 1 -1 -1 1 -1 1 1
73 1 1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 0 2
74 1 -1 1 1 1 -1 1 -1 1 1 -1 -1 1 1 -1 1 1
75 1 -1 1 -1 -1 1 1 1 1 1 1 -1 -1 1 1 1 2
76 -1 1 1 1 1 1 -1 -1 -1 1 1 -1 1 1 -1 -1 2
77 -1 1 1 -1 1 1 -1 -1 -1 1 1 1 1 1 -1 0 2
78 -1 1 1 1 1 1 -1 1 1 1 1 1 1 1 -1 1 2
79 1 1 1 -1 1 1 -1 -1 -1 1 1 -1 1 1 -1 1 2
80 -1 -1 -1 1 1 -1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
81 1 -1 1 -1 -1 1 1 1 1 1 -1 1 -1 1 -1 0 2
82 1 -1 1 -1 -1 -1 1 1 0 1 1 1 -1 1 -1 1 2
83 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
84 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
85 -1 0 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
86 -1 -1 1 -1 1 1 -1 -1 -1 1 1 1 1 1 -1 1 2
87 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
88 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
89 -1 1 1 -1 1 1 1 -1 1 1 1 -1 1 1 -1 1 2
90 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 0 1 1 -1 0 1
91 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 1 2
92 1 -1 1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 2
93 1 1 1 -1 -1 -1 1 1 -1 1 1 -1 -1 0 1 1 2
94 1 -1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 -1 1 1 2
95 1 -1 1 -1 1 1 -1 -1 -1 -1 -1 -1 -1 -1 -1 1 2
96 1 -1 1 -1 1 1 -1 0 0 -1 1 0 0 0 1 1 2
97 -1 -1 0 -1 1 1 -1 -1 -1 -1 1 1 1 1 -1 1 2
98 1 -1 -1 -1 1 1 1 -1 -1 1 1 -1 -1 1 -1 1 2
99 1 1 1 -1 -1 1 1 1 1 1 -1 -1 -1 -1 -1 1 2
100 -1 -1 -1 1 1 1 -1 -1 -1 1 0 1 1 1 -1 -1 1
101 1 -1 -1 -1 1 1 -1 -1 -1 -1 1 1 -1 1 -1 1 2
102 1 -1 1 -1 1 1 1 -1 -1 -1 1 -1 -1 1 -1 1 2
103 1 -1 1 -1 1 1 1 -1 0 -1 1 -1 1 1 1 0 2
104 1 -1 -1 -1 1 1 0 -1 0 -1 -1 -1 -1 1 0 -1 2
105 0 0 0 0 -1 1 1 1 1 1 0 -1 1 1 -1 0 2
106 1 1 1 -1 -1 -1 -1 1 1 -1 1 -1 -1 -1 1 1 2
107 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
108 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1
109 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
110 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
111 -1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 1 2
112 -1 0 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
113 -1 0 1 -1 -1 1 1 1 -1 1 -1 -1 -1 -1 1 0 2
114 -1 0 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
115 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
116 -1 0 1 -1 0 0 1 1 1 1 0 0 -1 -1 1 1 2
117 1 -1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
118 1 1 1 1 1 -1 1 -1 -1 -1 -1 1 1 1 -1 1 1
119 -1 1 1 -1 -1 -1 -1 1 1 1 1 -1 -1 -1 1 1 2
120 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
121 -1 0 0 1 1 1 -1 -1 -1 1 -1 1 1 1 0 1 1
122 -1 0 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
123 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 -1 1 -1 1 1
124 1 0 -1 1 1 1 -1 1 -1 -1 -1 1 1 1 -1 1 1
125 -1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
126 -1 0 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
127 -1 0 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
128 -1 0 1 -1 -1 -1 1 1 1 1 1 -1 -1 1 1 1 2
129 -1 0 1 -1 -1 1 -1 1 -1 1 1 -1 -1 -1 1 1 2
130 0 0 1 -1 -1 -1 1 1 0 -1 0 0 0 0 0 0 2
131 1 0 1 -1 0 0 1 1 1 -1 -1 -1 -1 -1 1 0 2
132 -1 -1 1 -1 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 2
133 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 0 1
134 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
135 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 0 1
136 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
137 -1 1 -1 1 1 1 -1 -1 -1 1 1 1 1 -1 -1 1 1
138 -1 0 1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 1 2
139 -1 -1 1 -1 -1 1 1 1 1 1 -1 -1 -1 1 -1 1 2
140 1 -1 1 -1 -1 1 1 1 1 -1 -1 -1 -1 -1 1 1 2
141 -1 -1 -1 1 -1 -1 1 1 1 1 -1 -1 1 1 -1 1 1
142 -1 -1 -1 1 1 1 1 1 1 1 -1 1 1 1 0 1 1
143 -1 -1 -1 1 1 1 1 1 1 1 -1 1 1 1 -1 1 1
144 0 1 -1 -1 -1 -1 1 1 1 1 1 -1 -1 1 1 1 2
145 -1 0 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 -1 0 2
146 -1 -1 1 -1 -1 1 1 1 1 1 -1 -1 -1 1 0 1 2
147 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
148 -1 -1 -1 -1 -1 -1 1 1 1 1 -1 1 1 1 1 1 2
149 -1 1 -1 1 1 1 -1 -1 -1 1 1 1 1 1 -1 1 1
150 -1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 1 -1 1 1 2
151 1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
152 1 1 0 1 1 1 -1 -1 1 -1 1 0 1 1 -1 -1 2
153 -1 1 1 -1 -1 1 -1 1 1 1 1 -1 1 -1 1 1 2
154 -1 -1 1 -1 -1 1 1 1 1 1 1 -1 1 1 -1 1 2
155 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
156 1 1 -1 1 1 1 -1 0 -1 -1 1 1 1 1 -1 -1 1
157 1 1 -1 1 1 1 1 -1 -1 -1 -1 1 1 1 -1 -1 1
158 -1 1 1 -1 -1 1 -1 1 1 -1 1 -1 0 0 0 0 2
159 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
160 -1 1 1 -1 0 1 1 1 1 1 1 -1 -1 0 -1 0 2
161 -1 1 -1 -1 1 1 -1 -1 -1 -1 -1 1 1 1 1 1 2
162 -1 -1 -1 -1 1 1 1 -1 -1 -1 -1 1 1 1 -1 1 2
163 -1 1 1 -1 1 1 1 -1 -1 -1 1 1 1 1 -1 1 2
164 -1 1 -1 1 1 1 1 -1 -1 -1 -1 1 1 1 -1 1 1
165 1 1 -1 -1 1 1 -1 -1 -1 1 1 1 1 1 -1 0 2
166 -1 1 1 -1 -1 1 1 1 1 1 1 -1 1 -1 1 0 2
167 1 -1 1 1 1 1 1 1 -1 1 -1 1 -1 1 1 1 1
168 1 -1 1 1 1 1 1 1 -1 1 1 1 -1 1 1 1 1
169 -1 -1 1 1 1 1 -1 -1 1 -1 -1 -1 1 1 1 0 2
170 1 -1 1 -1 -1 -1 1 1 1 1 1 -1 -1 1 -1 1 2
171 1 -1 1 -1 -1 -1 0 1 1 0 -1 -1 -1 -1 1 0 2
172 -1 0 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
173 -1 1 1 -1 -1 -1 1 1 1 1 -1 -1 0 -1 1 1 2
174 -1 -1 -1 -1 1 1 -1 -1 -1 1 1 1 1 1 -1 1 2
175 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
176 -1 1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 1 2
177 -1 -1 1 1 -1 -1 1 1 1 1 -1 -1 -1 1 1 1 1
178 -1 -1 1 -1 -1 -1 1 1 1 1 1 0 -1 -1 1 1 2
179 0 -1 1 -1 -1 -1 1 1 1 1 1 0 -1 -1 1 0 2
180 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 1 2
181 0 0 1 -1 -1 -1 1 1 1 0 0 -1 -1 -1 0 0 2
182 -1 -1 1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 2
183 1 0 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
184 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 2
185 -1 -1 1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 2
186 1 -1 1 -1 -1 -1 1 1 1 1 -1 0 -1 -1 1 1 2
187 -1 1 1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 2
188 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
189 1 0 -1 1 1 1 1 1 -1 -1 -1 1 0 1 0 0 1
190 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
191 -1 0 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 0 1
192 -1 1 -1 1 1 1 -1 0 -1 1 -1 1 1 1 -1 0 1
193 -1 -1 -1 -1 -1 1 1 1 1 -1 1 -1 -1 1 1 1 2
194 -1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
195 -1 -1 1 -1 -1 1 1 0 1 1 1 -1 -1 -1 1 1 2
196 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 0 1
197 -1 -1 1 -1 -1 1 1 1 1 -1 1 1 -1 1 1 0 2
198 -1 0 1 1 1 1 -1 -1 -1 1 -1 -1 -1 1 -1 1 1
199 -1 -1 1 -1 -1 -1 1 1 1 1 1 -1 0 -1 1 0 2
200 1 1 -1 -1 -1 -1 1 1 0 -1 1 -1 -1 -1 1 0 2
201 -1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 1 1 1 2
202 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
203 1 -1 1 -1 -1 1 1 1 1 1 1 -1 -1 -1 1 1 2
204 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 1 2
205 -1 -1 1 1 1 1 1 -1 -1 -1 -1 1 1 1 -1 1 1
206 -1 -1 1 -1 -1 1 1 1 1 1 -1 1 -1 -1 -1 1 2
207 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 -1 1 -1 1 1
208 1 0 -1 1 1 1 1 -1 -1 1 -1 1 1 1 -1 1 1
209 -1 -1 1 -1 -1 -1 1 1 1 -1 -1 0 -1 -1 1 1 2
210 1 1 1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 -1 1 2
211 -1 -1 1 -1 -1 1 1 1 1 -1 -1 -1 -1 -1 1 1 2
212 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
213 -1 -1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
214 -1 1 1 -1 -1 1 -1 1 1 -1 1 -1 1 -1 1 1 2
215 1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
216 -1 1 1 1 1 1 -1 -1 -1 1 1 1 1 1 1 0 2
217 1 1 1 -1 1 1 -1 -1 0 1 -1 -1 -1 1 1 0 2
218 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
219 1 0 1 -1 -1 -1 1 1 1 -1 0 -1 -1 -1 1 0 2
220 -1 1 1 -1 -1 -1 -1 1 1 -1 1 -1 -1 1 1 1 2
221 -1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
222 -1 1 1 -1 1 1 -1 -1 -1 -1 1 -1 -1 -1 1 0 2
223 1 -1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 0 2
224 -1 -1 -1 1 1 -1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
225 -1 1 -1 1 1 1 -1 -1 -1 1 -1 0 1 1 -1 -1 1
226 -1 0 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
227 -1 -1 1 -1 -1 1 1 1 1 -1 1 -1 -1 1 1 1 2
228 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 0 1 2
229 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 0 -1 1 1
230 -1 1 1 1 1 1 1 -1 1 1 -1 1 1 1 -1 1 1
231 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
232 -1 1 -1 1 1 1 -1 -1 1 1 -1 1 1 1 -1 1 1
233 -1 1 1 -1 -1 -1 1 1 -1 -1 1 -1 -1 -1 1 0 2
234 -1 1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 1
235 -1 -1 1 -1 -1 1 1 1 1 1 -1 1 -1 1 1 0 2
236 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 -1 1 -1 1 1
237 -1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
238 1 -1 1 -1 -1 1 1 1 -1 -1 -1 1 1 -1 -1 1 2
239 1 1 1 -1 -1 -1 1 1 0 1 -1 -1 -1 -1 1 0 2
240 -1 -1 -1 1 1 1 1 -1 -1 1 -1 -1 -1 1 1 1 1
241 -1 -1 -1 1 -1 1 1 0 1 -1 -1 1 1 1 -1 1 1
242 1 -1 1 -1 -1 -1 1 1 1 1 1 -1 -1 1 1 1 2
243 -1 -1 -1 -1 1 1 1 -1 -1 -1 -1 0 -1 1 1 1 1
244 -1 1 1 -1 -1 -1 1 1 0 1 -1 -1 1 -1 1 1 2
245 1 -1 1 -1 -1 -1 -1 1 1 1 -1 -1 -1 -1 1 1 2
246 1 -1 1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 2
247 -1 -1 1 -1 1 -1 1 1 1 -1 -1 -1 -1 1 0 1 2
248 -1 1 -1 1 1 1 0 -1 -1 -1 -1 0 1 1 -1 -1 1
249 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1
250 1 -1 1 -1 -1 -1 1 1 0 -1 1 -1 -1 -1 1 1 2
251 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
252 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 -1 1
253 1 1 1 -1 -1 1 1 1 1 -1 -1 -1 -1 -1 1 1 2
254 -1 1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
255 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 -1 1 2
256 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 1 1 1 2
257 -1 -1 -1 1 1 -1 -1 -1 -1 -1 -1 1 -1 1 -1 -1 1
258 -1 -1 -1 1 1 -1 -1 -1 -1 -1 -1 1 -1 1 0 1 1
259 -1 -1 1 -1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 2
260 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 -1 1 2
261 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 -1 1 2
262 1 -1 1 -1 -1 0 1 1 1 -1 0 0 -1 0 0 0 2
263 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 0 -1 1 1 2
264 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
265 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
266 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 -1 1 2
267 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 -1 1 -1 1 1
268 1 -1 -1 -1 -1 -1 1 1 1 1 -1 -1 -1 1 -1 1 1
269 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 0 2
270 1 -1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 -1 1 2
271 1 1 1 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 2
272 -1 1 1 -1 -1 1 1 1 1 -1 0 -1 -1 -1 -1 1 2
273 1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 -1 1 0 2
274 -1 -1 -1 1 1 -1 1 1 -1 1 -1 1 1 1 0 1 1
275 1 -1 -1 1 1 -1 1 -1 -1 1 -1 -1 -1 1 1 1 1
276 -1 -1 1 -1 1 1 -1 -1 -1 -1 0 -1 1 1 -1 -1 2
277 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 1 -1 1
278 -1 -1 1 1 1 1 1 1 -1 1 -1 -1 -1 1 -1 1 1
279 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
280 -1 -1 -1 1 1 1 -1 -1 -1 1 -1 1 1 1 -1 -1 1
281 -1 -1 1 -1 -1 -1 1 1 1 1 -1 -1 -1 1 -1 1 2
282 1 -1 1 1 1 1 1 1 -1 -1 -1 -1 -1 1 -1 0 1
283 1 -1 -1 1 1 1 -1 -1 -1 1 -1 0 1 1 -1 -1 1
284 -1 -1 -1 1 1 1 -1 -1 -1 -1 -1 1 1 1 -1 1 1
285 -1 -1 1 -1 -1 1 1 1 1 1 1 -1 -1 -1 0 1 2
286 -1 -1 1 -1 -1 1 1 1 1 1 1 -1 -1 -1 1 1 2
287 -1 -1 1 -1 -1 1 0 1 0 1 1 1 -1 1 1 0 2
288 1 1 1 0 -1 1 1 1 1 -1 1 -1 1 -1 0 1 2
289 1 1 1 -1 1 1 -1 1 -1 1 1 -1 1 1 1 1 2
290 1 1 1 -1 1 1 -1 1 -1 1 1 -1 1 1 -1 0 2
291 1 -1 1 -1 0 1 0 1 1 1 -1 -1 1 1 -1 1 2
292 1 -1 1 -1 -1 1 1 1 1 1 -1 0 -1 1 -1 1 2
293 1 -1 1 -1 -1 1 1 1 -1 1 1 -1 1 1 1 1 2
294 1 1 1 -1 -1 1 1 1 1 1 1 -1 1 1 1 1 2
295 -1 1 1 -1 -1 1 1 1 -1 1 1 -1 1 1 -1 0 2
296 -1 1 -1 1 1 1 0 0 -1 1 -1 1 0 0 0 0 1
297 -1 -1 1 1 1 1 -1 -1 -1 1 -1 1 1 1 1 1 1
298 1 1 1 -1 -1 1 1 1 1 1 -1 -1 0 -1 1 0 2
299 -1 1 -1 -1 -1 -1 1 1 1 1 1 -1 -1 -1 1 1 2
300 -1 1 1 -1 -1 1 1 1 1 1 -1 -1 1 1 1 1 2

BIN
Train.xlsx 100644

Binary file not shown.

96
app.py 100644
View File

@ -0,0 +1,96 @@
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# Function importing Dataset
def importdata():
dataset = pd.read_csv( 'Train.csv',sep= ',', header = None)
return dataset
# Function to split the dataset
def splitdataset(dataset):
# Separating the target variable
X = dataset.values[:, 1:5]
Y = dataset.values[:, 0]
# Splitting the dataset into train and test
X_train, X_test, y_train, y_test = train_test_split(
X, Y, test_size = 0.3, random_state = 100)
return X, Y, X_train, X_test, y_train, y_test
# Function to perform training with giniIndex.
def train_using_gini(X_train, X_test, y_train):
# Creating the classifier object
clf_gini = DecisionTreeClassifier(criterion = "gini",
random_state = 100,max_depth=3, min_samples_leaf=5)
# Performing training
clf_gini.fit(X_train, y_train)
return clf_gini
# Function to perform training with entropy.
def tarin_using_entropy(X_train, X_test, y_train):
# Decision tree with entropy
clf_entropy = DecisionTreeClassifier(
criterion = "entropy", random_state = 100,
max_depth = 3, min_samples_leaf = 5)
# Performing training
clf_entropy.fit(X_train, y_train)
return clf_entropy
# Function to make predictions
def prediction(X_test, clf_object):
# Predicton on test with giniIndex
y_pred = clf_object.predict(X_test)
print("Predicted values:")
print(y_pred)
return y_pred
# Function to calculate accuracy
def cal_accuracy(y_test, y_pred):
print("Confusion Matrix: ",
confusion_matrix(y_test, y_pred))
print ("Accuracy : ",
accuracy_score(y_test,y_pred)*100)
print("Report : ",
classification_report(y_test, y_pred))
# Driver code
def main():
# Building Phase
data = importdata()
X, Y, X_train, X_test, y_train, y_test = splitdataset(data)
clf_gini = train_using_gini(X_train, X_test, y_train)
clf_entropy = tarin_using_entropy(X_train, X_test, y_train)
# Operational Phase
print("Results Using Gini Index:")
# Prediction using gini
y_pred_gini = prediction(X_test, clf_gini)
cal_accuracy(y_test, y_pred_gini)
print("Results Using Entropy:")
# Prediction using entropy
y_pred_entropy = prediction(X_test, clf_entropy)
cal_accuracy(y_test, y_pred_entropy)
# Calling main function
if __name__=="__main__":
main()