Short problem:
#include <iostream>
using namespace std;
int main()
{
double **T;
long int L_size;
long int R_size = 100000;
long int i,j;
cout << "enter L_size:";
cin >> L_size;
cin.clear();
cin.ignore(100,'\n');
cout << L_size*R_size << endl;
cout << sizeof(double)*L_size*R_size << endl;
T = new double *[L_size];
for (i=0;i<L_size;i++)
{
T[i] = new double[R_size];
}
cout << "press enter to fill array" << endl;
getchar();
for (i=0;i<L_size;i++)
{
for (j=0;j<R_size;j++)
{
T[i][j] = 10.0;
}
}
cout << "allocated" << endl;
for (i=0;i<L_size;i++)
{
delete[] T[i];
}
delete [] T;
cout << "press enter to close" << endl;
getchar();
return 0;
}
with 2GB of RAM (on 32bit OS) I can't make it work with L_size = 3000
which is pretty obvious since it would need approx. 2.4GB.
But when I start 2 copies of above program each with L_size = 1500
it works - really slow but finally both returns allocated
in console.
So the question is - how is that possible? Is it related to virtual memory?
It is possible to have one big array stored in virtual memory while operating on another - within one program?
Thx.