import numpy as np

#https://numpy.org/doc/stable/reference/generated/numpy.histogram2d.html
#https://medium.com/@shuv.sdr/simple-linear-regression-in-python-a0069b325bf8
#https://studybyyourself.com/seminar/linear-algebra/course/chapter-3-matrix-and-elimination/?lang=fr
class TW_2D_Linear_regression:
    def __init__(self, x1, y1, x2, y2):
        self.x1 = x1
        self.x2 = x2
        self.y1 = y1
        self.y2 = y2

        self.a = None
        self.b = None

    def _equation():
        return 'a(x) +b'
    
    def predict():
        return

    def process(self):
        if self.x2 == self.x1:
            raise ValueError("Les abscisses des deux points doivent être différentes.")
      
        self.a = (self.y2 - self.y1) / (self.x2 - self.x1)
        self.b = self.y1 - (self.a * self.x1)
        return self.a, self.b

    

    def mean(self, values):
        mean = 0
        for val in values:
            mean = mean + val
        
        mean = mean / len(values)
        return mean

    #((ax1 +b)-y1)^2 + ((ax2 +b)-y2
    def lossFunction(self, values): 
        values = np.array(values)
        mean = self.mean(values[:, 1])

        sumOfSquaresMean = 0
        for val in values:
            sumOfSquaresMean = sumOfSquaresMean + pow(mean - val[1], 2)

        sumOfSquaredResidual = 0
        for val in values:
            residual =  pow((((self.a * val[0]) + self.b) - val[1]), 2)
            sumOfSquaredResidual = sumOfSquaredResidual + residual

        rSquared = (sumOfSquaresMean - sumOfSquaredResidual) / sumOfSquaresMean
        return rSquared