Say we have this structure:
// 16 bins
let BIN_OF_BINS = [
[], // 128 bits each chunk
[], // 256
[], // 512
[], // 1024
[], // 2048
[], // 4096
[], // 8192
[], // 16384
[], // 32768
[], // 65536
[], // 131072
[], // 262144
[], // 524288
[], // 1048576
[], // 2097152
[{ start: 0, count: 100 }], // 4194304
]
Each bin in the BIN_OF_BINS hold a set of nodes representing slots in memory. The n+1 bin holds nodes of twice the size of the n bin. So the first bin holds 128 bit values, the next holds 256 bit values, the next 512, etc. The values contained in a bin can be contiguous, so we might have in the "256 bit value bin" a contiguous chunk of 1024 bits, so that would be represented with:
bin2 = [{ count: 4, addressStartsAt: 0 }]
If it had two non-contiguous chunks of 1024, it would be:
bin2 = [
{ count: 4, addressStartsAt: 0 },
{ count: 4, addressStartsAt: 4096 /* let's say */ }
]
In principle, you can add and remove from these bins as memory is used and freed. But for this question, we are only concerned with using freed memory (i.e. we are not concerned with freeing memory for this question).
So when the BIN_OF_BINS starts, only the top bin has let's say 100 values. So we start with this:
// 16 bins
let BIN_OF_BINS = [
[], // 128 bits each chunk
[], // 256
[], // 512
[], // 1024
[], // 2048
[], // 4096
[], // 8192
[], // 16384
[], // 32768
[], // 65536
[], // 131072
[], // 262144
[], // 524288
[], // 1048576
[], // 2097152
[{ start: 0, count: 100 }], // 4194304
]
Now, when we go to fetch a 256 bit value, we find there are none, so it traverses up the list to larger bins, and divides it in half (or does some other thing, which I'll get to). So if we request 1 256 value from a brand new BIN_OF_BINS, we go up and up and up, finding none until we get to the top. Then we iteratively divide. Starting with 4194304, here is how it goes (after we already iterated through the blank ones to get to the top):
// step 0
[{ start: 0, count: 100 }], // 4194304, bin 16
// step 1
[{ start: 4194304, count: 99 }], // 4194304, bin 16
[{ start: 0, count: 2 }], // 2097152, bin 15
// step 2
[{ start: 4194304, count: 99 }], // 4194304, bin 16
[{ start: 2097152, count: 1 }], // 2097152, bin 15
[{ start: 0, count: 2 }], // 1048576, bin 14
// step 3
[{ start: 4194304, count: 99 }], // 4194304, bin 16
[{ start: 2097152, count: 1 }], // 2097152, bin 15
[{ start: 1048576, count: 1 }], // 1048576, bin 14
[{ start: 0, count: 2 }] // 524288, bin 13
// etc.
We keep dividing like that until we end up with:
[{ start: 0, count: 2 }] // 256, bin 2
Now we can fetch from this "bin 2" a "256 bit memory slot" by simply doing:
node.start += 256
node.count--
And we end up with:
[{ start: 256, count: 1 }] // 256, bin 2
The question is, how can this be implemented efficiently? It is very confusing and difficult for me to wrap my head around.
- When fetching for a size (which will only be one of the first 4 bins), if none exist, try fetching from the parent and dividing in half.
- If parent doesn't have any, subdivide its parent.
- etc.
That's basically it. Here's what I have so far. I would like to do this without recursion (using just an iterative approach with loops), because it will be used in a place without recursion.
// 16 bins
let BINS = [
[], // 4 32-bit values, so 128 bits each chunk
[], // 8 32-bit values, so 256
[], // 16 32-bit values, so 512
[], // 32 32-bit values, so 1024
[], // 2048
[], // 4096
[], // 8192
[], // 16384
[], // 32768
[], // 65536
[], // 131072
[], // 262144
[], // 524288
[], // 1048576
[], // 2097152
[{ start: 0, count: 100 }], // 4194304
]
function fetchBlockWithAllocation(i) {
let block = fetchBlock(i)
if (!block) prepareBlocks(i)
return fetchBlock(i)
}
function fetchBlock(i) {
if (!BINS[i].length) {
return -1
}
let bin = BINS[i]
let node = bin[0]
let address = node.start
node.count--
node.start += i * 32
if (!node.count) {
bin.shift()
}
return address
}
function prepareBlocks(index, howMany = 1024) {
let startBinIndex = index + 1
let scaleFactor = 1
while (startBinIndex < 16) {
let bin = BINS[startBinIndex++]
if (bin.length) {
for (let k = 0, n = bin.length; k < n; k++) {
let node = bin[k]
while (node.count) {
howMany -= scaleFactor
node.count--
}
}
// starting to get lost
} else {
}
}
}The stack/iteration aspect gets me tripped up. It seems like there is something simple which I am missing to create an elegant solution, and I am going off the track. I have the prepareBlocks to preallocate a bunch of blocks, so it sort of does it in bulk whenever it finds none, as an optimization. Ideally it does this without having to create any other temporary arrays.
I keep thinking:
- Bring down the next level.
- How many do we have left?
- Bring down the next level.
- How many do we have left?
Attempting in a more recursive fashion, I still get stuck around the same point:
let BINS = [
{ count: 0, array: [] }, // 4 32-bit values, so 128 bits each chunk
{ count: 0, array: [] }, // 8 32-bit values, so 256
{ count: 0, array: [] }, // 16 32-bit values, so 512
{ count: 0, array: [] }, // 32 32-bit values, so 1024
{ count: 0, array: [] }, // 2048
{ count: 0, array: [] }, // 4096
{ count: 0, array: [] }, // 8192
{ count: 0, array: [] }, // 16384
{ count: 0, array: [] }, // 32768
{ count: 0, array: [] }, // 65536
{ count: 0, array: [] }, // 131072
{ count: 0, array: [] }, // 262144
{ count: 0, array: [] }, // 524288
{ count: 0, array: [] }, // 1048576
{ count: 0, array: [] }, // 2097152
{ count: 0, array: [ { start: 0, count: 100 }] }, // 4194304
]
function prepareBlocks(index, minHowMany = 1024) {
let bin = BINS[index]
if (bin.count === 0) {
return prepareBlocks(index + 1, Math.ceil(minHowMany / 2))
} else {
let diff = Math.max(0, bin.count - minHowMany)
if (diff <= 0) {
return prepareBlocks(index + 1, Math.ceil(minHowMany / 2))
} else {
for (let k = 0, n = bin.length; k < n; k++) {
let node = bin[k]
if (node.count >= minHowMany) {
node.count -= minHowMany
} else {
// getting lost at same point
}
}
}
}
}
It's almost as if it must zig-zag through the first items in each list, then the second, etc., so it only divides what it needs.
Latest pseudocode is:
function allocateBunch(base, size, count) {
let desiredBits = size * count
let totalBits = 0
for bin, i in bins
let blockBits = 128 << i
while (bin.length)
block = bin[0]
let k = 0
let totalNewBits = block.count * blockBits
let totalWithNewBits = totalBits + totalNewBits
let diff = Math.floor(totalNewBits - desiredBits / blockBits)
block.count -= diff
let newChildBlock = { count: diff * (2 ** i) }
base.push(newChildBlock)
totalWithNewBits >= desiredBits
return
bin.shift()
}
Note: It doesn't really matter how many it preallocates when looking for one, I would say max 4096 or something just because seems reasonable enough. So in trying to fetch a block, just divide from wherever is closest, all the way down so you get that many more blocks at your desired size. If it's not enough, then just repeat the process. Just not sure how to yet.
Also note, if you consider how to "free memory" in this system, you could merge every node when paired with an odd value, and merge them up, so you get larger and larger blocks. Maybe that affects how you were to implement this, just wanted to point out.
Looking for maximum efficiency as well, by that I mean using caching or non-naive solutions if possible (such as repeatedly doing calculations or doing more work than necessary).
Bounty will go to most optimized version (in terms of execution steps, no recursion, etc.) that is also straightforward.
from How to efficiently segment a large block of predefined size into smaller blocks that are factors of its size in JavaScript?
No comments:
Post a Comment