Hello everyone,
I am using VSTS 2008 + C# + .Net 3.5 to develop a console application and I send request to another server (IIS 7.0 on Windows Server 2008). Here is my code. My question is, as in my code, I use a while loop to read chunks by chunk from server. The timeout request.Timeout = Timeout * 1000 is responsible for (1) timeout for open connection to server, or (2) timeout for each read operation, or (3) the total time used for the while loop?
static void PerformanceWorker()
{
Stream dataStream = null;
HttpWebRequest request = null;
HttpWebResponse response = null;
StreamReader reader = null;
try
{
request = (HttpWebRequest)WebRequest.Create(TargetURL);
request.Timeout = Timeout * 1000;
request.Proxy = null;
response = (HttpWebResponse)request.GetResponse();
dataStream = response.GetResponseStream();
reader = new StreamReader(dataStream);
// 1 M at one time
char[] c = new char[1000 * 10];
while (reader.Read(c, 0, c.Length) > 0)
{
globalCounter++;
}
}
catch (Exception ex)
{
lock (counterLock)
{
globalFailCounter++;
Console.WriteLine("Fail Counter: " + globalFailCounter + "\n" + ex.Message + "\n" + ex.StackTrace);
}
}
finally
{
if (null != reader)
{
reader.Close();
}
if (null != dataStream)
{
dataStream.Close();
}
if (null != response)
{
response.Close();
}
}
}
thanks in advance, George