Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- const training = [
- [0,3], [1,6], [2,8], [3,7], [9, 100]
- ]
- // alpha
- const learningRate = 0.05
- const hypothesis = (x,params) => params[1] * x + params[0]
- const cost = (training,params) => {
- let sum = 0;
- for (var i = 0; i < training.length; i++) {
- sum += Math.pow(hypothesis(training[i][0],params) - training[i][1], 2);
- }
- console.log(sum/(2*training.length))
- return sum / (2 * training.length);
- }
- const summedError1 = (training,params) => {
- let sum1 = 0
- for(let i=0; i<training.length; i++){
- sum1 += hypothesis(training[i][0],params) - training[i][1]
- }
- return sum1 / training.length
- }
- const summedError2 = (training,params) => {
- let sum2 = 0
- for(let i=0; i<training.length; i++){
- sum2 += (hypothesis(training[i][0], params) - training[i][1]) * training[i][0]
- }
- return sum2/training.length
- }
- const gradientDescent = (training, params, learningRate, iterations) => {
- let count = 0
- while (count < iterations) {
- let temp0 = params[0] - (learningRate * summedError1(training,params))
- let temp1 = params[1] - (learningRate * summedError2(training,params))
- params[0] = temp0
- params[1] = temp1
- count++
- cost(training,params)
- }
- console.log(params)
- }
- gradientDescent(training,[0,0],learningRate,1000)
Add Comment
Please, Sign In to add comment