I'm using the following C# code to FTP a ~40MB CSV file from a remote service provider. Around 50% of the time, the download hangs and eventually times out. In my app log, I get a line like:
> Unable to read data from the transport
> connection: A connection attempt
> failed because the connected party did
> not properly respond after a period of
> time, or established connection failed
> because connected host has failed to
> respond.
When I download the file interactively using a graphical client like LeechFTP, the downloads almost never hang, and complete in about 45 seconds. I'm having a hell of a time understanding what's going wrong.
Can anyone suggest how I can instrument this code to get more insight into what's going on, or a better way to download this file? Should I increase the buffer size? By how much? Avoid the buffered writes to disk and try to swallow the whole file in memory? Any advice appreciated!
...
private void CreateDownloadFile()
{
_OutputFile = new FileStream(_SourceFile, FileMode.Create);
}
public string FTPDownloadFile()
{
this.CreateDownloadFile();
myReq = (FtpWebRequest)FtpWebRequest.Create(new Uri(this.DownloadURI));
myReq.Method = WebRequestMethods.Ftp.DownloadFile;
myReq.UseBinary = true;
myReq.Credentials = new NetworkCredential(_ID, _Password);
FtpWebResponse myResp = (FtpWebResponse)myReq.GetResponse();
Stream ftpStream = myResp.GetResponseStream();
int bufferSize = 2048;
int readCount;
byte[] buffer = new byte[bufferSize];
int bytesRead = 0;
readCount = ftpStream.Read(buffer, 0, bufferSize);
while (readCount > 0)
{
_OutputFile.Write( buffer, 0, readCount );
readCount = ftpStream.Read( buffer, 0, bufferSize );
Console.Write( '.' ); // show progress on the console
bytesRead += readCount;
}
Console.WriteLine();
logger.logActivity( " FTP received " + String.Format( "{0:0,0}", bytesRead ) + " bytes" );
ftpStream.Close();
_OutputFile.Close();
myResp.Close();
return this.GetFTPStatus();
}
public string GetFTPStatus()
{
return ((FtpWebResponse)myReq.GetResponse()).StatusDescription;
}