Skip to content

Commit

Permalink
Merge pull request #1004 from Abirdcfly/httpdebug
Browse files Browse the repository at this point in the history
feat: add http debug client for llm call
  • Loading branch information
bjwswang committed Apr 10, 2024
2 parents 088a3ca + 3442609 commit 5248b4f
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 2 deletions.
41 changes: 41 additions & 0 deletions pkg/langchainwrap/debug_transport.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
// inspire by https://github.com/tmc/langchaingo/pull/702
package langchainwrap

import (
"net/http"
"net/http/httputil"

"k8s.io/klog/v2"
)

// DebugHTTPClient is a http.Client that logs the request and response with full contents.
var DebugHTTPClient = &http.Client{ //nolint:gochecknoglobals
Transport: &logTransport{http.DefaultTransport},
}

type logTransport struct {
Transport http.RoundTripper
}

// RoundTrip logs the request and response with full contents using httputil.DumpRequest and httputil.DumpResponse.
func (t *logTransport) RoundTrip(req *http.Request) (*http.Response, error) {
if klog.FromContext(req.Context()).V(5).Enabled() {
dump, err := httputil.DumpRequestOut(req, true)
if err != nil {
return nil, err
}
klog.FromContext(req.Context()).V(5).Info(string(dump))
}
resp, err := t.Transport.RoundTrip(req)
if err != nil {
return nil, err
}
if klog.FromContext(req.Context()).V(5).Enabled() {
dump, err := httputil.DumpResponse(resp, true)
if err != nil {
return nil, err
}
klog.FromContext(req.Context()).V(5).Info(string(dump))
}
return resp, nil
}
4 changes: 2 additions & 2 deletions pkg/langchainwrap/llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ func GetLangchainLLM(ctx context.Context, llm *v1alpha1.LLM, c client.Client, mo
}
model = models[0]
}
return openai.New(openai.WithToken(apiKey), openai.WithBaseURL(llm.Get3rdPartyLLMBaseURL()), openai.WithModel(model), openai.WithCallback(log.KLogHandler{LogLevel: 3}))
return openai.New(openai.WithToken(apiKey), openai.WithBaseURL(llm.Get3rdPartyLLMBaseURL()), openai.WithModel(model), openai.WithCallback(log.KLogHandler{LogLevel: 3}), openai.WithHTTPClient(DebugHTTPClient))
case llms.Gemini:
if model == "" {
models := llm.GetModelList()
Expand Down Expand Up @@ -104,7 +104,7 @@ func GetLangchainLLM(ctx context.Context, llm *v1alpha1.LLM, c client.Client, mo
if os.Getenv(GatewayUseExternalURLEnv) == "true" {
gatewayURL = gateway.ExternalAPIServer
}
return openai.New(openai.WithModel(modelName), openai.WithBaseURL(gatewayURL), openai.WithToken("fake"), openai.WithCallback(log.KLogHandler{LogLevel: 3}))
return openai.New(openai.WithModel(modelName), openai.WithBaseURL(gatewayURL), openai.WithToken("fake"), openai.WithCallback(log.KLogHandler{LogLevel: 3}), openai.WithHTTPClient(DebugHTTPClient))
}
return nil, fmt.Errorf("unknown provider type")
}

0 comments on commit 5248b4f

Please sign in to comment.