Compare commits
No commits in common. "eb13de982576aa8401a995cdbc0368bad5ccf20e" and "c82ca1dcc99c2bdb95e61d8f9609036246116898" have entirely different histories.
eb13de9825
...
c82ca1dcc9
@ -3,9 +3,9 @@ import numpy as np
|
|||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import seaborn as sns
|
import seaborn as sns
|
||||||
import csv
|
import csv
|
||||||
|
from sklearn.preprocessing import MinMaxScaler, StandardScaler, RobustScaler, MaxAbsScaler
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
import random
|
import random
|
||||||
from sklearn.preprocessing import MinMaxScaler, StandardScaler, RobustScaler, MaxAbsScaler
|
|
||||||
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, matthews_corrcoef
|
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, matthews_corrcoef
|
||||||
|
|
||||||
class Tree(Enum):
|
class Tree(Enum):
|
||||||
@ -19,8 +19,7 @@ class Tree(Enum):
|
|||||||
PLATAAN = 7
|
PLATAAN = 7
|
||||||
|
|
||||||
# Open file
|
# Open file
|
||||||
# file = open('dataset\\csv\\result-2023-10-14T16.13.30.csv', "r")
|
file = open('dataset\\csv\\result-2023-10-14T16.13.30.csv', "r")
|
||||||
file = open('./out/result-2023-10-10T15.08.36.csv', "r")
|
|
||||||
data = list(csv.reader(file, delimiter=","))
|
data = list(csv.reader(file, delimiter=","))
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
@ -33,7 +32,7 @@ tags_int = []
|
|||||||
|
|
||||||
for row in data:
|
for row in data:
|
||||||
tree = row.pop(0)
|
tree = row.pop(0)
|
||||||
# photoId = row.pop(1)
|
row.pop(1) # TODO: Doe dit niet
|
||||||
id = Tree[tree.upper()]
|
id = Tree[tree.upper()]
|
||||||
|
|
||||||
# print("Tree name =", tree, " id =", id.value)
|
# print("Tree name =", tree, " id =", id.value)
|
||||||
@ -63,26 +62,26 @@ for idx, col in enumerate(data[0]):
|
|||||||
column = np.array(column).reshape(len(column))
|
column = np.array(column).reshape(len(column))
|
||||||
|
|
||||||
# DEBUG Print resulting column
|
# DEBUG Print resulting column
|
||||||
# print("NORM", header[idx + 1], "\n", column)
|
print("NORM", header[idx + 1], "\n", column)
|
||||||
|
|
||||||
# Replace original data array
|
# Replace original data array
|
||||||
data[:, idx] = column
|
data[:, idx] = column
|
||||||
|
|
||||||
# # Get a random number for testing
|
# # Get a random number for testing
|
||||||
# validateId = random.randint(0, tags_len - 1)
|
# validateId = random.randint(0, tags_len - 1)
|
||||||
# tag_true = []
|
tag_true = []
|
||||||
# tag_predict = []
|
tag_predict = []
|
||||||
|
|
||||||
# print(tags_len)
|
print(tags_len)
|
||||||
|
|
||||||
# for validateId in range(0, tags_len - 1):
|
for validateId in range(0, tags_len - 1):
|
||||||
# # Remove object from train set
|
# Remove object from train set
|
||||||
# validateTag = tags_int[validateId]
|
validateTag = tags_int[validateId]
|
||||||
# validateObj =np.array([data[validateId]])
|
validateObj =np.array([data[validateId]])
|
||||||
# np.delete(tags_int, validateId)
|
np.delete(tags_int, validateId)
|
||||||
# np.delete(data, validateTag)
|
np.delete(data, validateTag)
|
||||||
|
|
||||||
# tag_true.append(validateTag)
|
tag_true.append(validateTag)
|
||||||
|
|
||||||
# print(validateTag, validateObj)
|
# print(validateTag, validateObj)
|
||||||
|
|
||||||
@ -91,29 +90,27 @@ for idx, col in enumerate(data[0]):
|
|||||||
print(data.dtype, type(data), tags_int.dtype, type(tags_int))
|
print(data.dtype, type(data), tags_int.dtype, type(tags_int))
|
||||||
knn.train(data, cv.ml.ROW_SAMPLE, tags_int)
|
knn.train(data, cv.ml.ROW_SAMPLE, tags_int)
|
||||||
|
|
||||||
knn.save('./out/models/knn_nosift.pkl')
|
|
||||||
|
|
||||||
# print (data)
|
# print (data)
|
||||||
# print('--------------------')
|
# print('--------------------')
|
||||||
# print (validateObj)
|
# print (validateObj)
|
||||||
|
|
||||||
# ret, results, neighbours ,dist = knn.findNearest(validateObj, 3)
|
ret, results, neighbours ,dist = knn.findNearest(validateObj, 3)
|
||||||
# tag_predict.append(results[0][0])
|
tag_predict.append(results[0][0])
|
||||||
|
|
||||||
# print( "result: {}\n".format(results) )
|
# print( "result: {}\n".format(results) )
|
||||||
# print( "neighbours: {}\n".format(neighbours) )
|
# print( "neighbours: {}\n".format(neighbours) )
|
||||||
# print( "distance: {}\n".format(dist) )
|
# print( "distance: {}\n".format(dist) )
|
||||||
|
|
||||||
|
|
||||||
# # Create a heatmap
|
# Create a heatmap
|
||||||
# sns.heatmap(confusion_matrix(tag_true, tag_predict), annot=True)
|
sns.heatmap(confusion_matrix(tag_true, tag_predict), annot=True)
|
||||||
# plt.title( "Confusion Matrix KNN" )
|
plt.title( "Confusion Matrix KNN" )
|
||||||
# plt.show()
|
plt.show()
|
||||||
|
|
||||||
# Score
|
# Score
|
||||||
# print("Accuracy score", accuracy_score(tag_true, tag_predict))
|
print("Accuracy score", accuracy_score(tag_true, tag_predict))
|
||||||
# print("Precision score (macro)", precision_score(tag_true, tag_predict, average='macro'))
|
print("Precision score (macro)", precision_score(tag_true, tag_predict, average='macro'))
|
||||||
# print("Precision score (micro)", precision_score(tag_true, tag_predict, average='micro'))
|
print("Precision score (micro)", precision_score(tag_true, tag_predict, average='micro'))
|
||||||
# print("Recall score (macro)", recall_score(tag_true, tag_predict, average='macro'))
|
print("Recall score (macro)", recall_score(tag_true, tag_predict, average='macro'))
|
||||||
# print("Recall score (micro)", recall_score(tag_true, tag_predict, average='micro'))
|
print("Recall score (micro)", recall_score(tag_true, tag_predict, average='micro'))
|
||||||
# print("MCC", matthews_corrcoef(tag_true, tag_predict))
|
print("MCC", matthews_corrcoef(tag_true, tag_predict))
|
@ -323,7 +323,6 @@
|
|||||||
<child>
|
<child>
|
||||||
<object class="tk.Text" id="testdata" named="True">
|
<object class="tk.Text" id="testdata" named="True">
|
||||||
<property name="height">15</property>
|
<property name="height">15</property>
|
||||||
<property name="state">disabled</property>
|
|
||||||
<property name="text" translatable="yes">No tests have been run yet</property>
|
<property name="text" translatable="yes">No tests have been run yet</property>
|
||||||
<property name="undo">false</property>
|
<property name="undo">false</property>
|
||||||
<property name="width">25</property>
|
<property name="width">25</property>
|
||||||
|
@ -1,103 +0,0 @@
|
|||||||
import cv2 as cv
|
|
||||||
import numpy as np
|
|
||||||
import csv
|
|
||||||
from sklearn.preprocessing import MinMaxScaler, StandardScaler, RobustScaler, MaxAbsScaler
|
|
||||||
import argparse
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(prog='KNN Train CLI')
|
|
||||||
parser.add_argument('-i', '--input', help='Input CSV file', required=True)
|
|
||||||
parser.add_argument('-o', '--output', help='Output model file', required=True)
|
|
||||||
|
|
||||||
class Tree(Enum):
|
|
||||||
ACCASIA = 0
|
|
||||||
BERK = 1
|
|
||||||
EIK = 2
|
|
||||||
ELS = 3
|
|
||||||
ESDOORN = 4
|
|
||||||
ES = 5
|
|
||||||
LINDE = 6
|
|
||||||
PLATAAN = 7
|
|
||||||
|
|
||||||
class CVSuiteTestKNN:
|
|
||||||
def __init__(self, model = None):
|
|
||||||
if model is None:
|
|
||||||
self.knn = cv.ml.KNearest_create()
|
|
||||||
self.trained = False
|
|
||||||
else:
|
|
||||||
self.knn = cv.ml.KNearest_load(model)
|
|
||||||
self.trained = True
|
|
||||||
|
|
||||||
def trainCSV(self, path, output):
|
|
||||||
'''
|
|
||||||
Takes preprocessed data from CVSuite, normalises it and trains the model
|
|
||||||
Function expects first two columns of the dataset to be tag and photoId, the first row should be the CSV header
|
|
||||||
'''
|
|
||||||
file = open(path, mode='r')
|
|
||||||
data = list(csv.reader(file, delimiter=","))
|
|
||||||
file.close()
|
|
||||||
|
|
||||||
header = data.pop(0)
|
|
||||||
print("CSV tags: ", header)
|
|
||||||
|
|
||||||
# Get classifier tags
|
|
||||||
tags_int = []
|
|
||||||
|
|
||||||
for row in data:
|
|
||||||
tree = row.pop(0)
|
|
||||||
# photoId = row.pop(1)
|
|
||||||
id = Tree[tree.upper()]
|
|
||||||
|
|
||||||
# print("Tree name =", tree, " id =", id.value)
|
|
||||||
tags_int.append(id.value)
|
|
||||||
|
|
||||||
# Make into numpy array cus OpenCV is dumb af
|
|
||||||
tags_len = len(tags_int)
|
|
||||||
tags_int = np.array(tags_int, dtype=np.int32)
|
|
||||||
|
|
||||||
# Transform array for normalisation
|
|
||||||
data = np.array(data, dtype=np.float32)
|
|
||||||
|
|
||||||
for idx, col in enumerate(data[0]):
|
|
||||||
# Get column from data
|
|
||||||
column = data[:, idx]
|
|
||||||
|
|
||||||
# Shape it to 2 dimentional
|
|
||||||
column = np.array(column).reshape(-1, 1)
|
|
||||||
|
|
||||||
# Perform Min - Max scaling
|
|
||||||
# scaler = MinMaxScaler()
|
|
||||||
scaler = MaxAbsScaler()
|
|
||||||
|
|
||||||
column = scaler.fit_transform(column)
|
|
||||||
|
|
||||||
# Reshape it back cus scaler is dumb af
|
|
||||||
column = np.array(column).reshape(len(column))
|
|
||||||
|
|
||||||
# DEBUG Print resulting column
|
|
||||||
# print("NORM", header[idx + 1], "\n", column)
|
|
||||||
|
|
||||||
# Replace original data array
|
|
||||||
data[:, idx] = column
|
|
||||||
|
|
||||||
# Pass data to train function
|
|
||||||
self.train(data, tags_int, output)
|
|
||||||
|
|
||||||
def train(self, data, tags, output):
|
|
||||||
'''
|
|
||||||
Data should be normalised before being passed to this function
|
|
||||||
This function should not be run from within the suite
|
|
||||||
'''
|
|
||||||
if self.trained:
|
|
||||||
throw("Model already trained!")
|
|
||||||
else:
|
|
||||||
self.knn.train(data, cv.ml.ROW_SAMPLE, tags)
|
|
||||||
self.knn.save(output)
|
|
||||||
|
|
||||||
def predict(self, data):
|
|
||||||
return self.knn.predict(data)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
args = parser.parse_args()
|
|
||||||
test = CVSuiteTestKNN()
|
|
||||||
test.trainCSV(args.input, args.output)
|
|
19
src/suite.py
19
src/suite.py
@ -19,9 +19,6 @@ from helpers.logger import CVSuiteLogger, C_DBUG
|
|||||||
from helpers.canvas import CVSuiteCanvas
|
from helpers.canvas import CVSuiteCanvas
|
||||||
from helpers.sift import getSiftData
|
from helpers.sift import getSiftData
|
||||||
|
|
||||||
# Tests
|
|
||||||
from helpers.test.knn import CVSuiteTestKNN
|
|
||||||
|
|
||||||
## UI config load
|
## UI config load
|
||||||
PROJECT_PATH = pathlib.Path(__file__).parent
|
PROJECT_PATH = pathlib.Path(__file__).parent
|
||||||
PROJECT_UI = "./src/helpers/gui/main.ui"
|
PROJECT_UI = "./src/helpers/gui/main.ui"
|
||||||
@ -84,9 +81,6 @@ class CVSuite:
|
|||||||
)
|
)
|
||||||
builder.connect_callbacks(self)
|
builder.connect_callbacks(self)
|
||||||
|
|
||||||
# Model tests
|
|
||||||
self.test_knn = CVSuiteTestKNN(config_json["models"]["knn"])
|
|
||||||
|
|
||||||
# Load values from config after UI has been initialised
|
# Load values from config after UI has been initialised
|
||||||
self.img_path.set(config_json["path"])
|
self.img_path.set(config_json["path"])
|
||||||
self.img_size.set(config_json["size"])
|
self.img_size.set(config_json["size"])
|
||||||
@ -273,15 +267,6 @@ class CVSuite:
|
|||||||
self.log.add(f"Mean {label}", mean[idx])
|
self.log.add(f"Mean {label}", mean[idx])
|
||||||
self.log.add(f"Std {label}", std[idx])
|
self.log.add(f"Std {label}", std[idx])
|
||||||
|
|
||||||
def runTest(self, event=None):
|
|
||||||
output = self.builder.get_object("testdata")
|
|
||||||
output.configure(state="normal")
|
|
||||||
output.delete(1.0, "end")
|
|
||||||
|
|
||||||
output.insert("end", "test\n")
|
|
||||||
|
|
||||||
output.configure(state="disabled")
|
|
||||||
|
|
||||||
def updatePath(self):
|
def updatePath(self):
|
||||||
"""
|
"""
|
||||||
Only update image name and path
|
Only update image name and path
|
||||||
@ -421,9 +406,6 @@ class CVSuite:
|
|||||||
self.log.add("SIFT total response", siftData[5])
|
self.log.add("SIFT total response", siftData[5])
|
||||||
self.log.add("SIFT average response", siftData[6])
|
self.log.add("SIFT average response", siftData[6])
|
||||||
|
|
||||||
# Run tests
|
|
||||||
self.runTest()
|
|
||||||
|
|
||||||
# Write results to CSV file
|
# Write results to CSV file
|
||||||
if not part_update:
|
if not part_update:
|
||||||
self.log.update()
|
self.log.update()
|
||||||
@ -434,6 +416,7 @@ class CVSuite:
|
|||||||
plt.show(block=False) ## Graphs
|
plt.show(block=False) ## Graphs
|
||||||
self.canvas.draw(size) ## Images
|
self.canvas.draw(size) ## Images
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app = CVSuite()
|
app = CVSuite()
|
||||||
app.run()
|
app.run()
|
||||||
|
Loading…
Reference in New Issue
Block a user