import numpy as np

from classes.TW_Utility import TW_Utility
from classes.TW_Matrice import TW_Matrice

#https://numpy.org/doc/stable/reference/generated/numpy.histogram2d.html
#https://medium.com/@shuv.sdr/simple-linear-regression-in-python-a0069b325bf8
#https://studybyyourself.com/seminar/linear-algebra/course/chapter-3-matrix-and-elimination/?lang=fr
class TW_Linear_regression:
    def __init__(self, x, y):
        self.x = x
        self.y = y
        self.dimensions = x.shape[len(x.shape) -1]
        self.shape = x.shape
        self.regressor = None

        self.coefficients = []
        self.constant = None

    def process(self):     
        matrice = TW_Matrice()

        columnAdded, X = TW_Utility.adjust_series_nbr_columns(self.x)
        columnAdded, Y = TW_Utility.adjust_series_nbr_columns(self.y)
        self.regressor = matrice.sklearn_linear_regression(X, Y)
        #self.coefficients, self.constant = matrice.gaussian_system_linear_equation(self.x, self.y)

    def predict(self, X):
        columnAdded, XAfter = TW_Utility.adjust_series_nbr_columns(X, self.shape)
        return self.regressor.predict(XAfter)
        #predictedPoint = 0
        '''
        for i in range(0,len(x)):
            predictedPoint = predictedPoint + (x[i] * self.coefficients[i])

        predictedPoint = predictedPoint + self.constant
        return predictedPoint#
        '''
                
    def mean(self, values):
        mean = 0
        for val in values:
            mean = mean + val
        
        mean = mean / len(values)
        return mean

    #((ax1 +b)-y1)^2 + ((ax2 +b)-y2
    def lossFunction(self, X, Y):
        columnAdded, X = TW_Utility.adjust_series_nbr_columns(X)
        columnAdded, Y = TW_Utility.adjust_series_nbr_columns(Y)
        return self.regressor.score(X, Y)

    '''
    def lossFunction(self, X, Y): 
        mean = self.mean(Y)
        nbrVariableColumns = TW_Utility.np_number_columns(X)

        sumOfSquaresMean = 0
        for i in range(0, len(Y)):
            sumOfSquaresMean = sumOfSquaresMean + pow(mean - Y[i], 2)

        sumOfSquaredResidual = 0
        for i in range(0, len(X)):
            localResidual = 0
            extractedPoint = X[i]

            if nbrVariableColumns > 1:
                for j in range(0, nbrVariableColumns):
                    localResidual = localResidual + (self.coefficients[j] * extractedPoint[j])
            else:
                localResidual = localResidual + (self.coefficients[0] * extractedPoint)

            residual = pow(((localResidual + self.constant) - Y[i]), 2)
            sumOfSquaredResidual = sumOfSquaredResidual + residual

        rSquared = (sumOfSquaresMean - sumOfSquaredResidual) / sumOfSquaresMean
        return rSquared
        '''