Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/go/7.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
如何在超时后重试中自定义http.Client或http.Transport?_Http_Go_Error Handling_Timeout_Retry Logic - Fatal编程技术网

如何在超时后重试中自定义http.Client或http.Transport?

如何在超时后重试中自定义http.Client或http.Transport?,http,go,error-handling,timeout,retry-logic,Http,Go,Error Handling,Timeout,Retry Logic,我想为标准http.Client实现一个自定义http.Transport,如果客户端超时,它将自动重试 另外,由于某种原因,定制的http.Transport是必备的。我已经检查过了,但是它不允许我使用自己的http.Transport 以下是我的尝试,自定义组件: 类型CustomTransport结构{ http.RoundTripper //…私人领域 } func NewCustomTransport(上游*http.Transport)*CustomTransport{ upstre

我想为标准
http.Client
实现一个自定义
http.Transport
,如果客户端超时,它将自动重试

另外,由于某种原因,定制的
http.Transport
必备的。我已经检查过了,但是它不允许我使用自己的
http.Transport

以下是我的尝试,自定义组件:

类型CustomTransport结构{
http.RoundTripper
//…私人领域
}
func NewCustomTransport(上游*http.Transport)*CustomTransport{
upstream.TLSClientConfig=&tls.Config{unsecureskipverify:true}
//…用于传输的其他自定义设置
返回和自定义传输{上游}
}
func(ct*CustomTransport)往返(请求*http.Request)(响应*http.Response,错误){
请求标题集(“机密”、“诸如此类”)
//…每个请求的其他自定义设置

对于i:=1;i不需要自定义http.Client或类似的东西。您只需将获取操作包装到重试中即可——有很多模块可以实现这一功能:

package main

import (
    "io"
    "log"
    "net/http"
    "os"
    "time"

    "github.com/avast/retry-go"
)

func main() {

    r, err := fetchDataWithRetries("http://nonexistant.example.com")
    if err != nil {
        log.Printf("Error fetching data: %s", err)
        os.Exit(1)
    }
    defer r.Body.Close()
    io.Copy(os.Stdout, r.Body)
}

// fetchDataWithRetries is your wrapped retrieval.
// It works with a static configuration for the retries,
// but obviously, you can generalize this function further.
func fetchDataWithRetries(url string) (r *http.Response, err error) {
    retry.Do(
        // The actual function that does "stuff"
        func() error {
            log.Printf("Retrieving data from '%s'", url)
            r, err = http.Get(url)
            return err
        },
        // A function to decide whether you actually want to
        // retry or not. In this case, it would make sense
        // to actually stop retrying, since the host does not exist.
        // Return true if you want to retry, false if not.
        retry.RetryIf(
            func(error) bool {
                log.Printf("Retrieving data: %s", err)
                log.Printf("Deciding whether to retry")
                return true
            }),
        retry.OnRetry(func(try uint, orig error) {
            log.Printf("Retrying to fetch data. Try: %d", try+2)
        }),
        retry.Attempts(3),
        // Basically, we are setting up a delay
        // which randoms between 2 and 4 seconds.
        retry.Delay(3*time.Second),
        retry.MaxJitter(1*time.Second),
    )

    return
}

请注意,http.Client的Timeout字段或多或少已经过时。现在的最佳实践是使用http.Request.Context()进行超时

感谢@Flimzy的灵感!我尝试使用上下文来控制超时,而不是http.Client方式。下面是代码:

func(ct*CustomTransport)往返(req*http.Request)(resp*http.Response,err error){
请求标题集(“机密”、“诸如此类”)
//…每个请求的其他自定义设置

对于i:=1;i没有办法在所有情况下都这样做,因为许多HTTP请求根本无法重试。特别是如果它们更改服务器上的数据,并且不是幂等的。为什么您认为
hashicorp/go retryablehttp
不允许您使用自己的传输?不清楚您发出的问题是什么-您的代码说要重试五次,哟你的输出显示它重试了五次。什么没有按预期工作?@Adrian如果你查看时间戳,它没有重试其余的4次请求。其余的4次请求没有实际发送。请注意,
http.Client
的超时字段或多或少是过时的。现在的最佳做法是使用
http.Request.Context()
超时。谢谢你的回答!我的目标是让所有来电者保持一致,避免整个代码库发生更改。
package main

import (
    "io"
    "log"
    "net/http"
    "os"
    "time"

    "github.com/avast/retry-go"
)

func main() {

    r, err := fetchDataWithRetries("http://nonexistant.example.com")
    if err != nil {
        log.Printf("Error fetching data: %s", err)
        os.Exit(1)
    }
    defer r.Body.Close()
    io.Copy(os.Stdout, r.Body)
}

// fetchDataWithRetries is your wrapped retrieval.
// It works with a static configuration for the retries,
// but obviously, you can generalize this function further.
func fetchDataWithRetries(url string) (r *http.Response, err error) {
    retry.Do(
        // The actual function that does "stuff"
        func() error {
            log.Printf("Retrieving data from '%s'", url)
            r, err = http.Get(url)
            return err
        },
        // A function to decide whether you actually want to
        // retry or not. In this case, it would make sense
        // to actually stop retrying, since the host does not exist.
        // Return true if you want to retry, false if not.
        retry.RetryIf(
            func(error) bool {
                log.Printf("Retrieving data: %s", err)
                log.Printf("Deciding whether to retry")
                return true
            }),
        retry.OnRetry(func(try uint, orig error) {
            log.Printf("Retrying to fetch data. Try: %d", try+2)
        }),
        retry.Attempts(3),
        // Basically, we are setting up a delay
        // which randoms between 2 and 4 seconds.
        retry.Delay(3*time.Second),
        retry.MaxJitter(1*time.Second),
    )

    return
}
2020-07-16T00:06:12.788+0800    DEBUG   begin to get "https://httpbin.org/delay/10"
2020-07-16T00:06:20.794+0800    WARN    #1 got timeout will retry - context deadline exceeded
2020-07-16T00:06:28.794+0800    WARN    #2 got timeout will retry - context deadline exceeded
2020-07-16T00:06:36.799+0800    WARN    #3 got timeout will retry - context deadline exceeded
2020-07-16T00:06:44.803+0800    WARN    #4 got timeout will retry - context deadline exceeded
2020-07-16T00:06:52.809+0800    WARN    #5 got timeout will retry - context deadline exceeded
2020-07-16T00:06:52.809+0800    DEBUG   got final result: context deadline exceeded
2020-07-16T00:06:52.809+0800    WARN    client got error: Get "https://httpbin.org/delay/10": context deadline exceeded
2020-07-16T00:06:52.809+0800    DEBUG   end to get "https://httpbin.org/delay/10", time cost: 40.019334668s