langchaingo icon indicating copy to clipboard operation
langchaingo copied to clipboard

Use Proxy to Get OpenAI response return "malformed HTTP response " ,How to fix

Open Eumenides1 opened this issue 10 months ago • 1 comments

This code return "https://ai-yyds.com/v1/chat/completions": malformed HTTP response ;But I can use postman to get response,why

var flagDebugHTTP = flag.Bool("debug-http", true, "enable debugging of HTTP requests and responses")

// DebugTransport 结构封装了 HTTP 代理功能和请求/响应调试
type DebugTransport struct {
	ProxyURL *url.URL
}

// RoundTrip 执行单个 HTTP 事务,并打印请求/响应详细信息
func (d *DebugTransport) RoundTrip(req *http.Request) (*http.Response, error) {
	// 将请求打印到控制台
	if *flagDebugHTTP {
		dump, err := httputil.DumpRequestOut(req, true)
		if err != nil {
			log.Fatalf("Error dumping request: %v", err)
		}
		fmt.Printf("%s\n\n", dump)
	}

	// 设置代理并发送请求
	proxy := http.ProxyURL(d.ProxyURL)
	transport := &http.Transport{Proxy: proxy}
	return transport.RoundTrip(req)
}

func main() {
	// 设置代理
	proxyUrl, err := url.Parse("https://ai-yyds.com")
	if err != nil {
		log.Fatalf("proxy url error %v\n", err)
	}

	// 使用 DebugTransport
	httpClient := &http.Client{
		Transport: &DebugTransport{
			ProxyURL: proxyUrl,
		},
	}

	flag.Parse()
	var opts []openai.Option
	if *flagDebugHTTP {
		// 使用自定义的 HTTP 客户端
		opts = append(opts, openai.WithHTTPClient(httpClient))
		// opts = append(opts, openai.WithModel("gpt-4-1106-preview"))
	}

	ctx := context.Background()
	llm, err := openai.New(opts...)
	if err != nil {
		panic(err)
	}
	prompt := "What would be a good company name for a company that makes colorful socks?"
	completion, err := llms.GenerateFromSinglePrompt(ctx,
		llm,
		prompt,
		llms.WithTemperature(0.8),
	)
	if err != nil {
		log.Fatal(err)
	}
	fmt.Println(completion)
}

Eumenides1 avatar Apr 06 '24 14:04 Eumenides1

Maybe you should check your proxy service,It's ok I try this with local proxy service

CrazyWr avatar Apr 11 '24 03:04 CrazyWr