Skip to content

Commit

Permalink
Polish
Browse files Browse the repository at this point in the history
  • Loading branch information
anunnakian committed May 1, 2024
1 parent 8dd12fd commit 37ad3d5
Show file tree
Hide file tree
Showing 6 changed files with 4 additions and 27 deletions.
2 changes: 1 addition & 1 deletion docs/docs/integrations/language-models/open-ai.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ OpenAiChatModel model = OpenAiChatModel.builder()
.proxy(...)
.logRequests(...)
.logResponses(...)
.logStreamingResponses(...)
.tokenizer(...)
.customHeaders(...)
.isStreaming(...)
.build();
```
See the description of some of the parameters above [here](https://platform.openai.com/docs/api-reference/chat/create).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ class OllamaOpenAiStreamingChatModelIT extends AbstractOllamaLanguageModelInfras
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ public OpenAiChatModel(String baseUrl,
Boolean logRequests,
Boolean logResponses,
Tokenizer tokenizer,
Map<String, String> customHeaders,
boolean isStreaming) {
Map<String, String> customHeaders) {

baseUrl = getOrDefault(baseUrl, OPENAI_URL);
if (OPENAI_DEMO_API_KEY.equals(apiKey)) {
Expand Down Expand Up @@ -178,11 +177,7 @@ public int estimateTokenCount(List<ChatMessage> messages) {
}

public static OpenAiChatModel withApiKey(String apiKey) {
return withApiKey(apiKey, false);
}

public static OpenAiChatModel withApiKey(String apiKey, boolean isStreaming) {
return builder().apiKey(apiKey).isStreaming(isStreaming).build();
return builder().apiKey(apiKey).build();
}

public static OpenAiChatModelBuilder builder() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ class OpenAiStreamingChatModelIT {
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

StreamingChatLanguageModel visionModel = OpenAiChatModel.builder()
Expand All @@ -53,7 +52,6 @@ class OpenAiStreamingChatModelIT {
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

ToolSpecification calculator = ToolSpecification.builder()
Expand Down Expand Up @@ -311,7 +309,6 @@ void should_execute_multiple_tools_in_parallel_then_stream_answer() throws Excep
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

UserMessage userMessage = userMessage("2+2=? 3+3=?");
Expand Down Expand Up @@ -423,7 +420,6 @@ void should_stream_valid_json() throws Exception {
.responseFormat("json_object")
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

// when
Expand Down Expand Up @@ -576,7 +572,6 @@ void should_use_enum_as_model_name() {
.modelName(GPT_3_5_TURBO)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

String question = "What is the capital of Germany?";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,17 +39,8 @@ static Stream<StreamingChatLanguageModel> models() {
.organizationId(System.getenv("OPENAI_ORGANIZATION_ID"))
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build(),
Azure







.builder()
AzureOpenAiStreamingChatModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.logRequestsAndResponses(true)
Expand Down Expand Up @@ -254,7 +245,6 @@ void should_execute_multiple_tools_sequentially_then_answer() throws Exception {
.modelName(GPT_3_5_TURBO_0613) // this model can only call tools sequentially
.temperature(0.0)
.logRequests(true)
.isStreaming(true)
.logResponses(true)
.build();

Expand Down Expand Up @@ -354,7 +344,6 @@ void should_execute_multiple_tools_in_parallel_then_answer() throws Exception {
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build();

ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ static Stream<StreamingChatLanguageModel> models() {
.temperature(0.0)
.logRequests(true)
.logResponses(true)
.isStreaming(true)
.build(),
MistralAiStreamingChatModel.builder()
.apiKey(System.getenv("MISTRAL_AI_API_KEY"))
Expand Down

0 comments on commit 37ad3d5

Please sign in to comment.