Prerequisite : Partition allocation methods
Best fit allocates the process to a partition which is the smallest sufficient partition among the free available partitions.
Example:
Input : blockSize[] = {100, 500, 200, 300, 600};
processSize[] = {212, 417, 112, 426};
Output:
Process No. Process Size Block no.
1 212 4
2 417 2
3 112 3
4 426 5

Implementation:
1- Input memory blocks and processes with sizes.
2- Initialize all memory blocks as free.
3- Start by picking each process and find the
minimum block size that can be assigned to
current process i.e., find min(bockSize[1],
blockSize[2],.....blockSize[n]) >
processSize[current], if found then assign
it to the current process.
5- If not then leave that process and keep checking
the further processes.
Below is implementation.
C++
#include<iostream>
using namespace std;
void bestFit( int blockSize[], int m, int processSize[], int n)
{
int allocation[n];
for ( int i = 0; i < n; i++)
allocation[i] = -1;
for ( int i = 0; i < n; i++)
{
int bestIdx = -1;
for ( int j = 0; j < m; j++)
{
if (blockSize[j] >= processSize[i])
{
if (bestIdx == -1)
bestIdx = j;
else if (blockSize[bestIdx] > blockSize[j])
bestIdx = j;
}
}
if (bestIdx != -1)
{
allocation[i] = bestIdx;
blockSize[bestIdx] -= processSize[i];
}
}
cout << "\nProcess No.\tProcess Size\tBlock no.\n" ;
for ( int i = 0; i < n; i++)
{
cout << " " << i+1 << "\t\t" << processSize[i] << "\t\t" ;
if (allocation[i] != -1)
cout << allocation[i] + 1;
else
cout << "Not Allocated" ;
cout << endl;
}
}
int main()
{
int blockSize[] = {100, 500, 200, 300, 600};
int processSize[] = {212, 417, 112, 426};
int m = sizeof (blockSize) / sizeof (blockSize[0]);
int n = sizeof (processSize) / sizeof (processSize[0]);
bestFit(blockSize, m, processSize, n);
return 0 ;
}
|
Java
public class GFG
{
static void bestFit( int blockSize[], int m, int processSize[],
int n)
{
int allocation[] = new int [n];
for ( int i = 0 ; i < allocation.length; i++)
allocation[i] = - 1 ;
for ( int i= 0 ; i<n; i++)
{
int bestIdx = - 1 ;
for ( int j= 0 ; j<m; j++)
{
if (blockSize[j] >= processSize[i])
{
if (bestIdx == - 1 )
bestIdx = j;
else if (blockSize[bestIdx] > blockSize[j])
bestIdx = j;
}
}
if (bestIdx != - 1 )
{
allocation[i] = bestIdx;
blockSize[bestIdx] -= processSize[i];
}
}
System.out.println( "\nProcess No.\tProcess Size\tBlock no." );
for ( int i = 0 ; i < n; i++)
{
System.out.print( " " + (i+ 1 ) + "\t\t" + processSize[i] + "\t\t" );
if (allocation[i] != - 1 )
System.out.print(allocation[i] + 1 );
else
System.out.print( "Not Allocated" );
System.out.println();
}
}
public static void main(String[] args)
{
int blockSize[] = { 100 , 500 , 200 , 300 , 600 };
int processSize[] = { 212 , 417 , 112 , 426 };
int m = blockSize.length;
int n = processSize.length;
bestFit(blockSize, m, processSize, n);
}
}
|
Python3
def bestFit(blockSize, m, processSize, n):
allocation = [ - 1 ] * n
for i in range (n):
bestIdx = - 1
for j in range (m):
if blockSize[j] > = processSize[i]:
if bestIdx = = - 1 :
bestIdx = j
elif blockSize[bestIdx] > blockSize[j]:
bestIdx = j
if bestIdx ! = - 1 :
allocation[i] = bestIdx
blockSize[bestIdx] - = processSize[i]
print ( "Process No. Process Size Block no." )
for i in range (n):
print (i + 1 , " " , processSize[i],
end = " " )
if allocation[i] ! = - 1 :
print (allocation[i] + 1 )
else :
print ( "Not Allocated" )
if __name__ = = '__main__' :
blockSize = [ 100 , 500 , 200 , 300 , 600 ]
processSize = [ 212 , 417 , 112 , 426 ]
m = len (blockSize)
n = len (processSize)
bestFit(blockSize, m, processSize, n)
|
C#
using System;
public class GFG {
static void bestFit( int []blockSize, int m,
int []processSize, int n)
{
int []allocation = new int [n];
for ( int i = 0; i < allocation.Length; i++)
allocation[i] = -1;
for ( int i = 0; i < n; i++)
{
int bestIdx = -1;
for ( int j = 0; j < m; j++)
{
if (blockSize[j] >= processSize[i])
{
if (bestIdx == -1)
bestIdx = j;
else if (blockSize[bestIdx]
> blockSize[j])
bestIdx = j;
}
}
if (bestIdx != -1)
{
allocation[i] = bestIdx;
blockSize[bestIdx] -= processSize[i];
}
}
Console.WriteLine( "\nProcess No.\tProcess"
+ " Size\tBlock no." );
for ( int i = 0; i < n; i++)
{
Console.Write( " " + (i+1) + "\t\t"
+ processSize[i] + "\t\t" );
if (allocation[i] != -1)
Console.Write(allocation[i] + 1);
else
Console.Write( "Not Allocated" );
Console.WriteLine();
}
}
public static void Main()
{
int []blockSize = {100, 500, 200, 300, 600};
int []processSize = {212, 417, 112, 426};
int m = blockSize.Length;
int n = processSize.Length;
bestFit(blockSize, m, processSize, n);
}
}
|
Javascript
function bestFit(blockSize, m, processSize, n) {
let allocation = new Array(n).fill(-1);
for (let i = 0; i < n; i++) {
let bestIdx = -1;
for (let j = 0; j < m; j++) {
if (blockSize[j] >= processSize[i]) {
if (bestIdx === -1) {
bestIdx = j;
} else if (blockSize[bestIdx] > blockSize[j]) {
bestIdx = j;
}
}
}
if (bestIdx !== -1) {
allocation[i] = bestIdx;
blockSize[bestIdx] -= processSize[i];
}
}
console.log( "Process No. Process Size Block no." );
for (let i = 0; i < n; i++) {
console.log(`${i + 1} ${processSize[i]} ${allocation[i] !== -1 ? allocation[i] + 1 : "Not Allocated" }`);
}
}
let blockSize = [100, 500, 200, 300, 600];
let processSize = [212, 417, 112, 426];
let m = blockSize.length;
let n = processSize.length;
bestFit(blockSize, m, processSize, n);
|
Output:
Process No. Process Size Block no.
1 212 4
2 417 2
3 112 3
4 426 5
The time complexity of Best-Fit algorithm is O(n2) as it requires two loops to process the memory blocks and processes. The outer loop is used to iterate through the processes and the inner loop is used to iterate through the blocks.
The space complexity of Best-Fit algorithm is O(n) as it requires an array of size n to store the block allocation for each process.
Is Best-Fit really best?
Although, best fit minimizes the wastage space, it consumes a lot of processor time for searching the block which is close to required size. Also, Best-fit may perform poorer than other algorithms in some cases. For example, see below exercise.
Example: Consider the requests from processes in given order 300K, 25K, 125K and 50K. Let there be two blocks of memory available of size 150K followed by a block size 350K.
Best Fit:
300K is allocated from block of size 350K. 50 is left in the block.
25K is allocated from the remaining 50K block. 25K is left in the block.
125K is allocated from 150 K block. 25K is left in this block also.
50K can’t be allocated even if there is 25K + 25K space available.
First Fit:
300K request is allocated from 350K block, 50K is left out.
25K is be allocated from 150K block, 125K is left out.
Then 125K and 50K are allocated to remaining left out partitions.
So, first fit can handle requests.
If you like GeeksforGeeks and would like to contribute, you can also write an article using write.geeksforgeeks.org or mail your article to review-team@geeksforgeeks.org. See your article appearing on the GeeksforGeeks main page and help other Geeks.
Please write comments if you find anything incorrect, or you want to share more information about the topic discussed above.
Feeling lost in the world of random DSA topics, wasting time without progress? It's time for a change! Join our DSA course, where we'll guide you on an exciting journey to master DSA efficiently and on schedule.
Ready to dive in? Explore our Free Demo Content and join our DSA course, trusted by over 100,000 geeks!
Last Updated :
13 Sep, 2023
Like Article
Save Article