What's wrong with this code? I tried get marks using array and pass the array in to function parameters and calculate the average in that function.
const marks = [100,100,80];
var summ = 0;
function calculateGrade(){
for(let i=0; i<=marks.length;i ){
summ = summ marks[i];
var avg = (summ/marks.length);
}
if(avg<=59){
console.log('F');
}
else if(avg>=60 && avg<=69){
console.log('D');
}
else if(avg>=70 && avg<=79){
console.log('C');
}
else if(avg>=80 && avg<=89){
console.log('B');
}
else if(avg>=90 && avg<=100){
console.log('A');
}
}
console.log(calculateGrade(marks));
CodePudding user response:
You are very close
const marks = [100, 100, 80];
function calculateGrade(marks) {
let summ = 0;
for (let i = 0; i < marks.length; i ) {
summ = marks[i];
}
const avg = summ / marks.length;
if (avg < 59) {
console.log('F');
} else if (avg <= 69) {
console.log('D');
} else if (avg <= 79) {
console.log('C');
} else if (avg <= 89) {
console.log('B');
} else {
console.log('A');
}
}
calculateGrade(marks);
CodePudding user response:
You just added an extra = in your for loop
i<=marks.length
instead of
i<marks.length
So while calculating the sum & average, a garbage value gets added up.
CodePudding user response:
const marks = [100, 100, 80];
var summ = 0;
//issue one (Tmarks were missing )
function calculateGrade(Tmarks) {
// issues 2 ( <= should be < )
for (let i = 0; i < Tmarks.length; i ) {
summ = Tmarks[i];
}
var avg = summ / Tmarks.length;
if (avg <= 59) {
console.log("F");
} else if (avg >= 60 && avg <= 69) {
console.log("D");
} else if (avg >= 70 && avg <= 79) {
console.log("C");
} else if (avg >= 80 && avg <= 89) {
console.log("B");
} else if (avg >= 90 && avg <= 100) {
console.log("A");
}
}
console.log(calculateGrade(marks));
Following were the issues in your code
- You were not getting the parameters in function definition
- issues 2 ( <= should be < )