Public nBuffer As Integer = 4096 ' FTP setup loFtpWebRequest = System.Net.FtpWebRequest.Create(New Uri(lcURI)) loFtpWebRequest.Credentials = New System.Net.NetworkCredential(cUsername, cPassword) loFtpWebRequest.KeepAlive = False loFtpWebRequest.Method = System.Net.WebRequestMethods.Ftp.UploadFile loFtpWebRequest.UseBinary = True loFtpWebRequest.ContentLength = lnFileLength ' If we use FTP/SSL (AUTH SSL) If nNoFTPProtocol = 2 Then loFtpWebRequest.EnableSsl = True End If loStream = loFtpWebRequest.GetRequestStream() ' Read a new set of bytes lnContentLength = loFileStream.Read(loBuffer, 0, nBuffer) ' While there is something to upload While lnContentLength <> 0 ' Write to the FTP loStream.Write(loBuffer, 0, lnContentLength) ' Read a new set of bytes lnContentLength = loFileStream.Read(loBuffer, 0, nBuffer) ' Increment the number of uploaded bytes lnUploaded = lnUploaded + nBuffer lnActual = lnUploaded / lnFileLength * 100 lnActualABS = Math.Abs(lnActual) ' If the last is not the same as the actual If lnLast <> lnActual Then lnElapse = ((Date.Now.Ticks - lnStart) / 10000000) lcMinute = oApp.SecondToFormat(lnElapse) lnRemaining = lnFileLength - lnUploaded ' Update the progress bar loProgressBar.ShowProgress(lnActual, _ cUploading + " " + cLocalFile + "..." + oApp.cCR + _ oApp.GetFormatValue(lnUploaded, ",") + " bytes " + cOf + " " + _ oApp.GetFormatValue(lnFileLength, ",") + " - " + cRemaining + " " + _ oApp.GetFormatValue(lnRemaining, ",").ToString + " bytes - " + lcMinute) Else ' Update the progress bar loProgressBar.ShowProgress(lnActual) End If lnLast = lnActualABS End WhileWhen I use a FTP client to upload to a fast server, on that one, I get 1.25 mbps. When I use my code, I get 300kps. So, I get about 4 times less power when using my interface than a FTP client, such as WS FTP. I am wondering why there is such a difference. Could the buffer be a factor?