使用goroutine更改示例?

使用goroutine更改示例?,go,web-crawler,goroutine,Go,Web Crawler,Goroutine,我发现了一个很好的网页无效链接检查器。但是如何使用goroutine更改它以获得完整的示例呢?该网页为:。代码动态地将要搜索的url添加到pending切片中。但是我用goroutine做这件事有一些困难 package main import ( "crypto/tls" "errors" "fmt" "golang.org/x/net/html" "io" "net/http" "net/url" "strings" "

我发现了一个很好的网页无效链接检查器。但是如何使用goroutine更改它以获得完整的示例呢?该网页为:。代码动态地将要搜索的url添加到
pending
切片中。但是我用goroutine做这件事有一些困难

package main
import (
    "crypto/tls"
    "errors"
    "fmt"
    "golang.org/x/net/html"
    "io"
    "net/http"
    "net/url"
    "strings"
    "time"
)
var alreadyCrawledList []string
var pending []string
var brokenLinks []string
const localHostWithPort = "localhost:8080"
func IsLinkInPendingQueue(link string) bool {
    for _, x := range pending {
        if x == link {
            return true
        }
    }
    return false
}
func IsLinkAlreadyCrawled(link string) bool {
    for _, x := range alreadyCrawledList {
        if x == link {
            return true
        }
    }
    return false
}
func AddLinkInAlreadyCrawledList(link string) {
    alreadyCrawledList = append(alreadyCrawledList, link)
}
func AddLinkInPendingQueue(link string) {
    pending = append(pending, link)
}
func AddLinkInBrokenLinksQueue(link string) {
    brokenLinks = append(brokenLinks, link)
}
func main() {
    start := time.Now()
    AddLinkInPendingQueue("http://" + localHostWithPort)
    for count := 0; len(pending) > 0; count++ {
        x := pending[0]
        pending = pending[1:]  //it dynamicly change the search url
        if err := crawlPage(x); err != nil {  //how to use it by using goroutine?
            t.Errorf(err.Error())
        }
    }
    duration := time.Since(start)
    fmt.Println("________________")
    count = 0
    for _, l := range brokenLinks {
        count++
        fmt.Println(count, "Broken. | ", l)
    }
    fmt.Println("Time taken:", duration)
}
func crawlPage(uri string) error {
    if IsLinkAlreadyCrawled(uri) {
        fmt.Println("Already visited: Ignoring uri | ", uri)
        return nil
    }
    transport := &http.Transport{
        TLSClientConfig: &tls.Config{
            InsecureSkipVerify: true,
        },
    }
    client := http.Client{Transport: transport}
    resp, err := client.Get(uri)
    if err != nil {
        fmt.Println("Got error: ", err.Error())
        return err
    }
    if resp.StatusCode != http.StatusOK {
        AddLinkInBrokenLinksQueue(uri)
        return errors.New(fmt.Sprintf("Got %v instead of 200", resp.StatusCode))
    }
    defer resp.Body.Close()
    links := ParseLinks(resp.Body)
    links = ConvertLinksToLocalHost(links)
    for _, link := range links {
        if !InOurDomain(link) {
            continue
        }
        absolute := FixURL(link, uri)
        if !IsLinkAlreadyCrawled(absolute) && !IsLinkInPendingQueue(absolute) && absolute != uri { // Don't enqueue a page twice!
            AddLinkInPendingQueue(absolute)
        }
    }
    AddLinkInAlreadyCrawledList(uri)
    return nil
}
func InOurDomain(link string) bool {
    uri, err := url.Parse(link)
    if err != nil {
        return false
    }
    if uri.Scheme == "http" || uri.Scheme == "https" {
        if uri.Host == localHostWithPort {
            return true
        }
        return false
    }
    return true
}
func ConvertLinksToLocalHost(links []string) []string {
    var convertedLinks []string
    for _, link := range links {
        convertedLinks = append(convertedLinks, strings.Replace(link, "leantricks.com", localHostWithPort, 1))
    }
    return convertedLinks
}
func FixURL(href, base string) string {
    uri, err := url.Parse(href)
    if err != nil {
        return ""
    }
    baseURL, err := url.Parse(base)
    if err != nil {
        return ""
    }
    uri = baseURL.ResolveReference(uri)
    return uri.String()
}
func ParseLinks(httpBody io.Reader) []string {
    var links []string
    page := html.NewTokenizer(httpBody)
    for {
        tokenType := page.Next()
        if tokenType == html.ErrorToken {
            return links
        }

        token := page.Token()
        switch tokenType {
        case html.StartTagToken:
            fallthrough
        case html.SelfClosingTagToken:
            switch token.DataAtom.String() {
            case "a":
                fallthrough
            case "link":
                fallthrough
            case "script":
                for _, attr := range token.Attr {
                    if attr.Key == "href" {
                        links = append(links, attr.Val)
                    }
                }
            }
        }
    }
}

您可以同时调用crawlPage(),并使用互斥锁处理
alreadyCrawledList
挂起的
断开的链接
变量(尽管性能不太好)。另一方面,需要对代码进行大量修改以提高性能

我做了一个快速检查与4个链接,似乎有一半的持续时间。我用一个简单的http服务器及其应用程序编写了一个示例代码

谢谢,
-Anoop

我建议你看看这本著名的书中的一个并发网络爬虫的例子。这不是我想要的。因为需要搜索
BrokenLinks
,但不需要搜索已知结果@Maniankara@user3373877,你能再解释一下吗?从上面的链接中,您可以看到,我已经测试了也断开的链接:。此外,我还没有从这里修改功能:这是我想要的。goroutine和它使用的通道。