Given 2 dimensional array a:
let a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 1]
]
How can I scale it by a given factor? For example, array b is array a scaled by 4:
let b =[
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]
]
This is the code I wrote to perform this operation but it is slow (client browser: Chrome) when dealing with large arrays (200 x 200) and scaling lets say by a facor of 16.
// scale an array by a factor of 'scale'
const scaledMatrixArray = (arr, scale) => {
let newArr = [];
arr.forEach((el) => {
let newArrRow = [];
el.forEach((el) => {
for (let j = 0; j < scale; j++) {
newArrRow.push(el);
}
});
for(let i = 0; i < scale ; i++) {
newArr.push(newArrRow);
}
});
return newArr;
};
I understand my implementation is some variant of O(n^2) and is highly inefficient. I am looking for a better way to do this or a library that does it better and faster. My end result is that my N X N array with over N > 200 can scale to an array of 800 x 800 in the most efficient, fastest and least memory intensive way.
Here's a very reduced way, using Array().fill
, It's running faster than the other answers at least in my browser.
I added two versions, one using spread operator, and the other ussing .apply
. I'm getting faster results with apply
.
function scaleSpread(array, factor) {
const scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x.push(...Array(factor).fill(item));
scaled.push(...Array(factor).fill(x));
}
return scaled;
}
function scaleApply(array, factor) {
const scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x.push.apply(x, Array(factor).fill(item));
scaled.push.apply(scaled, Array(factor).fill(x));
}
return scaled;
}
function scaleConcat(array, factor) {
let scaled = [];
for(const row of array) {
let x = [];
for(const item of row)
x = x.concat(Array(factor).fill(item));
scaled = scaled.concat(Array(factor).fill(x));
}
return scaled;
}
var a = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
console.time('spread');
scaleSpread(a, 10000);
console.timeEnd('spread');
console.time('apply');
scaleApply(a, 10000);
console.timeEnd('apply');
console.time('concat');
scaleConcat(a, 10000);
console.timeEnd('concat');
EDIT: Added a version using .concat
since apply and spread causes Maximum call stack size exceeded
with very large arrays.
This approach is using a for loop
, to iterate an n-dimensional array for the decided n
times.
This uses Array.splice method, by grabbing the source value and inserting it to the array at certain index.
PS: The source array (which is a
), is mutated here. But, you can always clone the original array and create b
for the result as you wanted.
var a = [
[0, 0, 1, 0],
[0, 1, 1, 1],
[0, 0, 1, 0],
[0, 0, 1, 1]
],
scale = 4,
scaleTheArray = function (arrayToScale, nTimes) {
for (var idx = 0, i = 0, len = arrayToScale.length * nTimes; i < len; i++) {
var elem = arrayToScale[idx];
/* Insert the element into (idx + 1) */
arrayToScale.splice(idx + 1, 0, elem);
/* Add idx for the next elements */
if ((i + 1) % nTimes === 0) {
idx += nTimes + 1;
}
}
};
console.time('testScale');
/* 1. Expand each of the a[n] length */
for (var i = 0, len = a.length; i < len; i++) {
var arr = a[i];
scaleTheArray(arr, scale - 1);
}
/* 2. Expand each of the a length */
scaleTheArray(a, scale - 1);
console.timeEnd('testScale');
In general, less function calls = less overhead :
function scale1D(arr, n)
{
for (var i = arr.length *= n; i; )
arr[--i] = arr[i / n | 0]
}
function scale2D(arr, n)
{
for (var i = arr.length; i; )
scale1D(arr[--i], n)
scale1D(arr, n)
}
var a = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
console.time( 1e5 )
scale2D(a, 1e5)
console.timeEnd( 1e5 )
var b = [ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ]
scale2D(b, 4)
console.log( JSON.stringify( b ).replace(/],/g, '],\n ') )
The main optimization is that after each of the rows is resized, they are repeated instead of creating all of the # rows * scale rows. So, instead of processing n * scale arrays, only n arrays are processed. Another possible optimization might be that on some browsers, arr.length *= n
might allocate all of the needed contiguous memory at once.
For comparison, the functional approach to the above is about 2 times slower :
const scale1D = (arr, n) => [...Array(arr.length * n)].map((_, i) => arr[i / n | 0])
const scale2D = (arr, n) => scale1D( arr.map((row, i) => scale1D(row, n)), n )
console.time( 1e5 )
let a = scale2D([ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ], 1e5)
console.timeEnd( 1e5 )
let b = scale2D([ [0, 0, 1, 0], [0, 1, 1, 1], [0, 0, 1, 0], [0, 0, 1, 1] ], 4)
console.log( JSON.stringify( b ).replace(/],/g, '],\n ') )
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With