[Mono-bugs] [Bug 44438][Wis] New - WebClient.DownloadData() returns only partial page
bugzilla-daemon@rocky.ximian.com
bugzilla-daemon@rocky.ximian.com
Tue, 10 Jun 2003 14:56:59 -0400 (EDT)
Please do not reply to this email- if you want to comment on the bug, go to the
URL shown below and enter your comments there.
Changed by monobugzilla@lojic.com.
http://bugzilla.ximian.com/show_bug.cgi?id=44438
--- shadow/44438 Tue Jun 10 14:56:59 2003
+++ shadow/44438.tmp.23904 Tue Jun 10 14:56:59 2003
@@ -0,0 +1,142 @@
+Bug#: 44438
+Product: Mono/Class Libraries
+Version: unspecified
+OS: other
+OS Details: Redhat 7.3
+Status: NEW
+Resolution:
+Severity:
+Priority: Wishlist
+Component: System
+AssignedTo: mono-bugs@ximian.com
+ReportedBy: monobugzilla@lojic.com
+QAContact: mono-bugs@ximian.com
+TargetMilestone: ---
+URL:
+Cc:
+Summary: WebClient.DownloadData() returns only partial page
+
+The following program was originally part of a test case to demonstrate a
+memory leak in the MS .NET v1.1 garbage collector. When attempting to
+reproduce the results on Linux using Mono, I discovered a problem in which
+WebClient.DownloadData() doesn't return the entire web page, but only a
+portion (4096 bytes coincidentally :).
+
+The results are very dependent on the URL you supply to the program, so you
+may have to try several before you reproduce the results. The page I found
+that demonstrated the problem is on our intranet, so I can't supply the URL.
+
+The program is multi-threaded, so that may have something to do with it.
+
+Brian Adkins
+
+--- snip ---
+using System;
+using System.Collections;
+using System.Net;
+using System.Text;
+using System.Threading;
+
+class MemLeak
+{
+ private const int NUM_THREADS = 3;
+ private const int SLEEP_MS = 250;
+
+ private Queue _pageQueue = new Queue();
+ private Thread[] _threads;
+ private string _url;
+
+ [STAThread]
+ static void Main (string[] args)
+ {
+ if (args.Length != 1)
+ {
+ Console.WriteLine("You must supply a URL as an argument.");
+ return;
+ }
+ MemLeak x = new MemLeak(args[0]);
+ x.Init();
+ }
+
+ public MemLeak (string url)
+ {
+ _url = url;
+ }
+
+ public void Init ()
+ {
+ CreateThreads(NUM_THREADS);
+
+ while (true)
+ {
+ try
+ {
+ string page = DequeuePageElement();
+ Console.WriteLine("Page length is {0}", page.Length);
+ Console.WriteLine(page);
+ Console.WriteLine("=====");
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine(ex.Message);
+ Console.WriteLine(ex.StackTrace);
+ }
+ }
+ }
+
+ private void CreateThreads (int numThreads)
+ {
+ _threads = new Thread[numThreads];
+
+ for (int i = 0; i < _threads.Length; ++i)
+ {
+ _threads[i] = new Thread(new ThreadStart(ProducerThread));
+ _threads[i].Start();
+ }
+ }
+
+ private string DequeuePageElement ()
+ {
+ while (true)
+ {
+ lock (this)
+ {
+ if (_pageQueue.Count > 0)
+ {
+ return (string) _pageQueue.Dequeue();
+ }
+ }
+
+ Thread.Sleep(SLEEP_MS);
+ }
+ }
+
+ private void ProducerThread ()
+ {
+ UTF8Encoding encoding = new UTF8Encoding();
+ WebClient webClient = new WebClient();
+ string page;
+
+ while (true)
+ {
+ try
+ {
+ byte[] bytes = webClient.DownloadData(_url);
+ page = encoding.GetString(bytes);
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine(ex.Message);
+ Console.WriteLine(ex.StackTrace);
+ page = "";
+ Thread.Sleep(SLEEP_MS);
+ }
+
+ lock (this)
+ {
+ _pageQueue.Enqueue(page);
+ }
+ }
+ }
+}
+--- snip ---