tags:

views:

117

answers:

1

Not quite sure if it is ok to do this but, my question is: Is there something wrong with my code ? It doesn't go as fast as I would like, and since I am using lots of async workflows maybe I am doing something wrong. The goal here is to build something that can crawl 20 000 pages in less than an hour.

    open System
    open System.Text
    open System.Net
    open System.IO
    open System.Text.RegularExpressions
    open System.Collections.Generic
    open System.ComponentModel
    open Microsoft.FSharp
    open System.Threading
    //This is the Parallel.Fs file

    type ComparableUri ( uri: string ) = 

        inherit System.Uri( uri )

        let elts (uri:System.Uri) = 
            uri.Scheme, uri.Host, uri.Port, uri.Segments

        interface System.IComparable with 
            member this.CompareTo( uri2 ) = 
                compare (elts this) (elts(uri2 :?> ComparableUri))

        override this.Equals(uri2) = 
            compare this (uri2 :?> ComparableUri ) = 0

        override this.GetHashCode() = 0 


    ///////////////////////////////////////////////Funtions to retreive html string//////////////////////////////
    let mutable error = Set.empty<ComparableUri>
    let mutable visited = Set.empty<ComparableUri>

    let getHtmlPrimitiveAsyncDelay (delay:int) (uri : ComparableUri)  =
        async{
                try
                    let req =  (WebRequest.Create(uri)) :?> HttpWebRequest

                    // 'use' is equivalent to ‘using’ in C# for an IDisposable
                    req.UserAgent<-"Mozilla"

                    //Console.WriteLine("Waiting")
                    do! Async.Sleep(delay * 250)
                    let! resp =    (req.AsyncGetResponse())
                    Console.WriteLine(uri.AbsoluteUri+" got response after delay "+string delay)
                    use stream = resp.GetResponseStream()
                    use reader = new StreamReader(stream)
                    let html = reader.ReadToEnd()
                    return html
                with 
                | _ as ex -> Console.WriteLine( ex.ToString() ) 
                             lock error (fun () -> error<- error.Add uri )
                             lock visited (fun () -> visited<-visited.Add uri )
                             return "BadUri"
                     }



    ///////////////////////////////////////////////Active Pattern Matching to retreive href//////////////////////////////

    let (|Matches|_|) (pat:string) (inp:string) =
        let m = Regex.Matches(inp, pat)
        // Note the List.tl, since the first group is always the entirety of the matched string.
        if m.Count > 0
        then Some (List.tail [ for g in m -> g.Value ])
        else None

    let (|Match|_|) (pat:string) (inp:string) =
        let m = Regex.Match(inp, pat) 
        // Note the List.tl, since the first group is always the entirety of the matched string.
        if m.Success then 
            Some (List.tail [ for g in m.Groups -> g.Value ])
        else 
            None
    ///////////////////////////////////////////////Find Bad  href//////////////////////////////

    let isEmail (link:string) = 
        link.Contains("@")

    let isMailto (link:string) = 
        if Seq.length link >=6 then 
            link.[0..5] = "mailto"
        else
            false

    let isJavascript (link:string) = 
         if Seq.length link >=10 then 
            link.[0..9] = "javascript"
         else
            false

    let isBadUri (link:string) = 
          link="BadUri"

    let isEmptyHttp (link:string) = 
        link="http://"

    let isFile (link:string)=
         if Seq.length link >=6 then 
            link.[0..5] = "file:/"
         else
            false

    let containsPipe (link:string) = 
        link.Contains("|")


    let isAdLink (link:string) = 
          if Seq.length link >=6 then 
            link.[0..5] = "adlink"
          elif Seq.length link >=9 then 
            link.[0..8] = "http://adLink"
          else
            false

///////////////////////////////////////////////Find Bad  href//////////////////////////////

    let getHref (htmlString:string) = 

        let urlPat = "href=\"([^\"]+)"

        match htmlString with 
        | Matches urlPat urls -> urls |> List.map( fun href -> match href with 
                                                               | Match (urlPat) (link::[]) -> link
                                                               | _ -> failwith "The href was not in correct format, there was more than one match" )

        | _ -> Console.WriteLine( "No links for this page" );[] 
        |> List.filter( fun link -> not(isEmail link) )
        |> List.filter( fun link -> not(isMailto link) )
        |> List.filter( fun link -> not(isJavascript link) )
        |> List.filter( fun link -> not(isBadUri link) )
        |> List.filter( fun link -> not(isEmptyHttp link) )
        |> List.filter( fun link -> not(isFile link) )
        |> List.filter( fun link -> not(containsPipe link) )
        |> List.filter( fun link -> not(isAdLink link) )

    let treatAjax (href:System.Uri)  = 
        let link = href.ToString()
        let firstPart = (link.Split([|"#"|],System.StringSplitOptions.None)).[0]
        new Uri(firstPart)

    //only follow pages with certain extnsion or ones with no exensions
    let followHref (href:System.Uri) = 

        let valid2 = set[".py"]
        let valid3 = set[".php";".htm";".asp"]
        let valid4 = set[".php3";".php4";".php5";".html";".aspx"]



        let arrLength = href.Segments |> Array.length
        let lastExtension = (href.Segments).[arrLength-1] 
        let lengthLastExtension = Seq.length lastExtension

        if (lengthLastExtension <= 3)  then 
            not( lastExtension.Contains(".") )
        else
            //test for the 2 case
            let last4 = lastExtension.[(lengthLastExtension-1)-3..(lengthLastExtension-1)]

            let isValid2 = valid2|>Seq.exists(fun validEnd -> last4.EndsWith( validEnd) )

            if isValid2 then 
                true
            else
                if lengthLastExtension <= 4 then 
                    not( last4.Contains(".") )
                else
                    let last5 = lastExtension.[(lengthLastExtension-1)-4..(lengthLastExtension-1)]
                    let isValid3 = valid3|>Seq.exists(fun validEnd -> last5.EndsWith( validEnd) )

                    if isValid3 then 
                        true
                    else
                        if lengthLastExtension <= 5 then 
                            not( last5.Contains(".") )
                        else
                            let last6 = lastExtension.[(lengthLastExtension-1)-5..(lengthLastExtension-1)]
                            let isValid4 = valid4|>Seq.exists(fun validEnd -> last6.EndsWith( validEnd) )

                            if isValid4 then 
                                true
                            else
                                not( last6.Contains(".") ) && not(lastExtension.[0..5] = "mailto")




//Create the correct links / -> add the homepage , make then a comparabel Uri
let hrefLinksToUri ( uri:ComparableUri ) (hrefLinks:string list)  = 
    hrefLinks
    |> List.map( fun link -> try 
                                 if Seq.length link <4 then 
                                    Some(new Uri( uri, link ))
                                 else 
                                    if link.[0..3] = "http" then  
                                        Some(new Uri(link))
                                    else
                                        Some(new Uri( uri, link ))

                             with
                             | _ as ex -> Console.WriteLine(link);
                                          lock error (fun () ->error<-error.Add uri)
                                          None
                            )
    |> List.filter( fun link -> link.IsSome )
    |> List.map( fun o -> o.Value)
    |> List.map( fun uri -> new ComparableUri( string uri ) )

//Treat uri , removing ajax last part , and only following links specified b Benoit
let linksToFollow (hrefUris:ComparableUri list) = 
    hrefUris
    |>List.map( treatAjax )
    |>List.filter( fun link -> followHref link )
    |>List.map( fun uri -> new ComparableUri( string uri ) )
    |>Set.ofList



let needToVisit uri = 
      ( lock visited (fun () -> not( visited.Contains uri) ) ) && (lock error (fun () -> not( error.Contains uri) ))



let getLinksToFollowAsyncDelay (delay:int) ( uri: ComparableUri )  = 
    //write 
    async{    
              let! links = getHtmlPrimitiveAsyncDelay delay uri 

              lock visited (fun () ->visited<-visited.Add uri)

              let linksToFollow = getHref links
                                  |> hrefLinksToUri uri
                                  |> linksToFollow
                                  |> Set.filter( needToVisit )
              return linksToFollow
              }

let getDelay(uri:ComparableUri) (authorityDelay:Dictionary<string,System.Diagnostics.Stopwatch >) = 

    let uriAuthority = uri.Authority
    let hasAuthority,watch = authorityDelay.TryGetValue(uriAuthority)

    if hasAuthority then 
        let elapsed = watch.Elapsed
        let s = TimeSpan(0,0,0,0,500)-elapsed
        if s.TotalMilliseconds < 0.0 then 
            0
        else
            int(s.TotalMilliseconds)

    else 
        let temp = System.Diagnostics.Stopwatch()
        temp.Start()
        authorityDelay.Add(uriAuthority,temp)
        0




let rec getLinksToFollowFromSetAsync maxIteration  ( uris: seq<ComparableUri> )  = 

    let authorityDelay = Dictionary<string,System.Diagnostics.Stopwatch>()

    if maxIteration = 100 then 
        Console.WriteLine("Finished")
    else
        //Unite by authority add delay for those we same authority others ignore 
        let stopwatch= System.Diagnostics.Stopwatch()
        stopwatch.Start()
        let newLinks  = uris
                        |> Seq.map(  fun uri -> let delay = lock authorityDelay (fun () -> getDelay uri authorityDelay )
                                                getLinksToFollowAsyncDelay delay uri )
                        |> Async.Parallel
                        |> Async.RunSynchronously
                        |> Seq.concat
        stopwatch.Stop()
        Console.WriteLine("\n\n\n\n\n\n\nTimeElapse : "+string stopwatch.Elapsed+"\n\n\n\n\n\n\n\n\n")

        getLinksToFollowFromSetAsync (maxIteration+1) newLinks

seq[set[ComparableUri( "http://rue89.com/" )]]
|>PSeq.ofSeq
|>PSeq.iter(getLinksToFollowFromSetAsync 0 )

    getLinksToFollowFromSetAsync 0 (seq[ComparableUri( "http://twitter.com/" )])

    Console.WriteLine("Finished")

Some feedBack would be great ! Thank you (note this is just something I am doing for fun)

+2  A: 

I think the culprit is the line do! Async.Sleep(delay * 250) - you gradually wait longer and longer. What is the reason for it?

Mitya
You dont want to request to frequently to the same server, otherwise the webmaster will be upset your are using all the servers bandwithch(http://stackoverflow.com/questions/3021888/asynchronous-crawling-f). Thank you for looking at the code
jlezard
Agree. If you started everything 'at once', then staggered delays would make sense, but if you just finished this one page that sent you back to the same authority, maybe the time has already passed, but now you're waiting N+1 seconds...
Brian
I think you want to use 'one agent per authority' to ensure that you send no more than 1 request per second (or whatever) to that authority. But you need an agent or something else to keep track of the 'last time you sent a request here'. Right now you just say "wait 15s" to send the 15th request there, even if maybe the 14th request was already sent 40s ago. You need to 'start the clock' at the right time.
Brian
If you like, you could also keep a stopwatch of 'time since the program started', and a dictionary of e.g. the 'stopwatch time last sent to this authority', and then sleep for `let s = 1-(now-lastTime) in if s>0 then s else 0`.
Brian
Right, it looks like you could record a last time you access a particular authority in your authorityDelay map and then sort the lists of url to crawl in that order (so crawl earlier the urls accessed earlier). You can also be smarter here on how you parallelize work - parallelize between different authorities, but crawl urls from the same authority sequentially since you are bound to sleep anyway.
Mitya
Thank you Mitya and Brian for the feedback, I did the change to the delay function. For your last point Mitya isn't my code doing that ? ( create a bunch of computations some with delays some without, then run all of them in parallel ? ). Also I want my code to be asynchronous when requesting multiple urls from same authority, as what matters ( I think) is the time between requests not the time between responses. http://stackoverflow.com/questions/3021888/asynchronous-crawling-f
jlezard
Very good point about time between reqs vs time between resps (this means that agant-per-authority model that Brian suggests won't work either). Be aware that Async.Parallel uses a ThreadPool under the covers. There is a limit on how many thread can execute simultaniously on thread pool. Asyncs will yield the threads soon enough so it is not a big deal, but there is a disadvantage in scheduling too many threads that will immediately yield - you could spend that time more productively actually scheduling web requests. Experiment and see!
Mitya