這是一個建立於 的文章,其中的資訊可能已經有所發展或是發生改變。
package mainimport ("compress/gzip""fmt""io""io/ioutil""net/http""reflect""github.com/mozillazg/request")func test1(url string) (bt []byte, err error) {testResp, err := http.Get(url)if err != nil {return}bt, err = ioutil.ReadAll(testResp.Body)return}func test2(url string) (bt []byte, err error) {c := new(http.Client)req := request.NewRequest(c)resp, err := req.Get(url)if err != nil {return}bt, err = ioutil.ReadAll(resp.Body)return}func test3(url string) (bt []byte, err error) {c := new(http.Client)req := request.NewRequest(c)resp, err := req.Get(url)if err != nil {return}var reader io.ReadCloserif resp.Header.Get("Content-Encoding") == "gzip" {reader, err = gzip.NewReader(resp.Body)if err != nil {return}} else {reader = resp.Body}bt, err = ioutil.ReadAll(reader)return}func main() {urls := [...]string{"https://www.baidu.com", "https://httpbin.org/status/418"}for i := range urls {url := urls[i]fmt.Println("\n----------\n")fmt.Println("url =", url)b1, _ := test1(url)b2, _ := test2(url)fmt.Println(b1)fmt.Println(b2)fmt.Println(reflect.DeepEqual(b1, b2))fmt.Println("\n----------\n")b3, _ := test3(url)fmt.Println(b3)fmt.Println(reflect.DeepEqual(b1, b3))}}
亂碼產生的原因是因為網站使用了gzip進行壓縮,而request庫沒有對其進行相應的解壓,手動解壓下就解決了