**Tell us what’s happening:**

I feel like my code is right, it seems to work for other platforms. Can anyone tell me if I’ve made a mistake or if my method is so bad that it takes too long to run?

**Your code so far**

```
function smallestCommons(arr) {
// establish which is the larger num in the arr
let largest = Math.max(...arr);
let smallest = Math.min(...arr);
let isFound = false;
// ans will be the smallest common multiple found
let ans;
// start with a var equal to the larger num and test whether the num works
// increment num until a multiple is found
for (let num = largest; !isFound; num++) {
if (testMultiple(smallest, largest, num)){
// stop looping
isFound = true;
ans = num;
} else {
isFound = false;
}
}
function testMultiple(lowerRange, higherRange, testNum) {
// test the number for the given range
for(let num = higherRange; num>= lowerRange; num--) {
// if there is a remainder return false
if (testNum % num != 0) {
return false;
}
}
// if it passes all tests return true
return true;
}
return ans;
}
let test = smallestCommons([1,12])
console.log(test);
```

**Your browser information:**

User Agent is: `Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:62.0) Gecko/20100101 Firefox/62.0`

.

**Link to the challenge:**

https://learn.freecodecamp.org/javascript-algorithms-and-data-structures/intermediate-algorithm-scripting/smallest-common-multiple