Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
90 commits
Select commit Hold shift + click to select a range
26c8c61
feat(dotAI): replace OpenAIClient with LangChain4J abstraction layer …
ihoffmann-dot Mar 24, 2026
48f268b
feat(dotAI): remove legacy config support, require providerConfig
ihoffmann-dot Mar 24, 2026
0e56d8c
test(dotAI): add unit and integration tests for LangChain4J client layer
ihoffmann-dot Mar 25, 2026
5179db3
fix(dotAI): change BOOL+hidden params to STRING in dotAI.yml
ihoffmann-dot Mar 25, 2026
86057b1
fix(dotAI): remove legacy hidden params from dotAI.yml
ihoffmann-dot Mar 25, 2026
3a5387c
fix(dotAI): update dotAI.yml description to reflect LangChain4J integ…
ihoffmann-dot Mar 26, 2026
ebbeaf1
fix(dotAI): remove legacy OpenAI model validation from AIAppValidator
ihoffmann-dot Mar 26, 2026
3de8a67
fix(dotAI): update /completions/config to reflect providerConfig-base…
ihoffmann-dot Mar 26, 2026
cfdd3cf
fix(dotAI): support maxCompletionTokens for o-series OpenAI models
ihoffmann-dot Mar 27, 2026
bfa54d1
fix(dotAI): replace legacy getApiKey guard with isEnabled in ImageRes…
ihoffmann-dot Mar 30, 2026
6a1213d
refactor(dotAI): remove dead OpenAI model-fetch flow from AIModels
ihoffmann-dot Mar 30, 2026
80820a6
fix(dotAI): handle base64 image responses for models that don't retur…
ihoffmann-dot Mar 30, 2026
07f7bc1
fix(dotAI): send text content (not token IDs) to LangChain4J embeddin…
ihoffmann-dot Mar 30, 2026
d79b37c
fix(dotAI): skip token encoding guard when model not in jtokkit registry
ihoffmann-dot Mar 30, 2026
46028c2
fix(dotAI): add missing IPUtils import in AIModelsTest
ihoffmann-dot Mar 31, 2026
abe115e
refactor(dotAI): PR review comments fixes
ihoffmann-dot Apr 1, 2026
e01cb46
refactor(dotAI): extract build helper in LangChain4jModelFactory to r…
ihoffmann-dot Apr 1, 2026
d9078de
refactor(dotAI): remove unused loadModels, activateModels and getAvai…
ihoffmann-dot Apr 1, 2026
ba7f173
refactor(dotAI): convert ProviderConfig to Immutables interface
ihoffmann-dot Apr 1, 2026
805fc7d
fix(dotAI): update unit tests for ProviderConfig Immutables migration
ihoffmann-dot Apr 2, 2026
14c417f
fix(dotAI): remove broken test methods for removed AIModels methods
ihoffmann-dot Apr 2, 2026
433c9c8
fix(dotAI): address Claude bot security and correctness review comments
ihoffmann-dot Apr 2, 2026
91ca878
fix(dotAI): remove maximumSize from model caches, keep TTL only
ihoffmann-dot Apr 2, 2026
92fae06
refactor(dotAI): remove dead AIModels and AIModelFallbackStrategy
ihoffmann-dot Apr 2, 2026
fe8cb0d
fix(dotAI): address remaining PR review comments
ihoffmann-dot Apr 6, 2026
52085fb
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 6, 2026
30b0a59
fix(dotAI): address remaining PR review comments
ihoffmann-dot Apr 6, 2026
598dac9
fix(dotAI): migrate integration tests to providerConfig flow
ihoffmann-dot Apr 6, 2026
4dd52e3
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 6, 2026
f754c93
feat(dotAI): add Azure OpenAI provider support
ihoffmann-dot Apr 6, 2026
48cf56c
test(dotAI): add Azure OpenAI factory tests, fix Netty/Reactor versio…
ihoffmann-dot Apr 6, 2026
025bb0c
style(dotAI): remove section comment from LangChain4jModelFactory
ihoffmann-dot Apr 6, 2026
111a0af
fix(dotAI): restore comment
ihoffmann-dot Apr 7, 2026
61a4c35
refactor(dotAI): address remaining PR review comments
ihoffmann-dot Apr 8, 2026
c5f873a
refactor(dotAI): address remaining PR review comments
ihoffmann-dot Apr 8, 2026
9c5509c
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 8, 2026
31cb86e
test(dotAI): fix WireMock stubs and CompletionsAPI for LangChain4J co…
ihoffmann-dot Apr 9, 2026
b44c35b
feat(dotAI): validate required ProviderConfig fields in LangChain4jMo…
ihoffmann-dot Apr 9, 2026
1d537af
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 9, 2026
039aae8
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 9, 2026
1dae27f
feat(dotAI): validate required Azure OpenAI fields in LangChain4jMode…
ihoffmann-dot Apr 9, 2026
17fd32a
test(dotAI): add missing validation tests to LangChain4jModelFactoryTest
ihoffmann-dot Apr 9, 2026
da008c0
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 9, 2026
18c5b44
fix(dotAI): remove legacy fields from config endpoint, fix providerCo…
ihoffmann-dot Apr 10, 2026
5fcd079
feat(dotAI): remove model/temperature params from workflow actionlets
ihoffmann-dot Apr 10, 2026
b1145dc
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 13, 2026
62d2e05
fix(dotAI): address PR review comments (dimensions, maxTokens, isEnab…
ihoffmann-dot Apr 13, 2026
111db46
feat(dotAI): implement streaming chat via LangChain4J StreamingChatModel
ihoffmann-dot Apr 13, 2026
ba763c6
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 13, 2026
d4b6855
feat(dotAI): add Azure OpenAI streaming chat support
ihoffmann-dot Apr 13, 2026
07252a4
fix(dotAI): use plain ObjectMapper for providerConfig redaction to av…
ihoffmann-dot Apr 14, 2026
924f6b2
fix(dotAI): use plain ObjectMapper in AppConfig, add isEnabled diagno…
ihoffmann-dot Apr 14, 2026
7fd1b4f
fix(dotAI): check providerConfig instead of apiKey for config warning…
ihoffmann-dot Apr 14, 2026
f3e1d0f
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 14, 2026
86e1776
fix(dotAI): allow unquoted control chars in providerConfig JSON parsing
ihoffmann-dot Apr 14, 2026
4fbb6a4
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 14, 2026
27cc6ee
fix(ai): strip control chars from providerConfig before JSON parse
ihoffmann-dot Apr 14, 2026
a39bf1c
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 14, 2026
b1dea4c
test(ai): add AppConfigTest for providerConfig JSON parsing with embe…
ihoffmann-dot Apr 14, 2026
4aaa0fd
fix(ai): sanitize providerConfig at construction time so all consumer…
ihoffmann-dot Apr 14, 2026
9e61911
debug(ai): log providerConfig snippet around parse error position
ihoffmann-dot Apr 14, 2026
1822dc8
fix(ai): restore DotObjectMapperProvider and remove diagnostic logging
ihoffmann-dot Apr 14, 2026
9e6a393
fix(ai): address PR review comments on LangChain4J integration
ihoffmann-dot Apr 15, 2026
c4421e5
fix(ai): replace new ObjectMapper() with DotObjectMapperProvider in C…
ihoffmann-dot Apr 15, 2026
febd3d7
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 15, 2026
187c493
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 15, 2026
92be76c
fix(ai): update Postman stubs and collection for LangChain4J migration
ihoffmann-dot Apr 15, 2026
fb72963
fix(ai): make imageSize optional in dotAI.yml
ihoffmann-dot Apr 15, 2026
df7d464
fix(ai): update WireMock body patterns for LangChain4J JSON spacing
ihoffmann-dot Apr 15, 2026
9acf22e
fix(ai): relax embedding count assertion to greaterThan(0)
ihoffmann-dot Apr 15, 2026
aefd7da
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 15, 2026
7587479
fix(ai): relax search result assertions to handle identical WireMock …
ihoffmann-dot Apr 15, 2026
2684167
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 15, 2026
0ca23c2
fix(ai): add missing openAiResponse fields and relax seo assertion
ihoffmann-dot Apr 16, 2026
ca551a5
fix(ai): update config endpoint and test to reflect new providerConfi…
ihoffmann-dot Apr 16, 2026
ff423a1
fix(ai-tests): convert SSE stubs to JSON, add stream-specific variant…
ihoffmann-dot Apr 16, 2026
6b094dd
fix(ai-tests): fix WireMock stub priority and pattern for AIViewToolT…
ihoffmann-dot Apr 16, 2026
d6b5788
fix(dotAI): apply PR review fixes (SSE double newline, null modelName…
ihoffmann-dot Apr 17, 2026
258dc17
Merge branch 'main' into dot-ai-langchain-integration
fabrizzio-dotCMS Apr 17, 2026
1632c7e
Merge branch 'main' into dot-ai-langchain-integration
fabrizzio-dotCMS Apr 17, 2026
af6109b
fix(dotAI): address PR review comments - stale logs, NPE, model overr…
ihoffmann-dot Apr 17, 2026
c79ac52
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 17, 2026
d426c74
fix(dotAI): address PR review comments - temperature guard, dead toke…
ihoffmann-dot Apr 17, 2026
2a3fa03
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 17, 2026
9525fc8
Merge branch 'main' into dot-ai-langchain-integration
fabrizzio-dotCMS Apr 17, 2026
4dbf79e
fix(dotAI): warn on temperature <= 0 in prompt()
ihoffmann-dot Apr 17, 2026
871cbda
fix(dotAI): add missing Logger import in CompletionsAPIImpl
ihoffmann-dot Apr 17, 2026
931000c
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 17, 2026
9e82c57
Merge branch 'main' into dot-ai-langchain-integration
ihoffmann-dot Apr 20, 2026
d17ac92
Merge branch 'dot-ai-langchain-integration' into dot-ai-langchain-azu…
ihoffmann-dot Apr 20, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 48 additions & 5 deletions bom/application/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
<graalvm.polyglot.version>25.0.1</graalvm.polyglot.version>
<micrometer.version>1.13.10</micrometer.version>
<opensearch.version>3.3.0</opensearch.version>
<langchain4j.version>1.0.0</langchain4j.version>
</properties>
<dependencyManagement>

Expand Down Expand Up @@ -70,13 +71,55 @@
<scope>import</scope>
</dependency>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-bom</artifactId>
<version>${langchain4j.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>


<!-- Asynchronous NIO client server framework for the jvm -->
<!-- <dependency>
<!-- reactor-core: pin to 3.4.41 to resolve conflict between existing deps (3.3.16)
and langchain4j-azure-open-ai (3.4+). Required for ContextView. -->
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>
<version>3.4.41</version>
</dependency>

<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</dependency>-->
<!-- Netty: pin to 4.1.118.Final to resolve conflict between pgjdbc-ng (4.1.63)
and langchain4j-azure-open-ai (4.1.118). Required for DefaultHeaders$ValueValidator. -->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
<version>4.1.118.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
<version>4.1.118.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport</artifactId>
<version>4.1.118.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-resolver</artifactId>
<version>4.1.118.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec</artifactId>
<version>4.1.118.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId>
<version>4.1.118.Final</version>
</dependency>


<!--
Expand Down
10 changes: 10 additions & 0 deletions dotCMS/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -499,6 +499,16 @@
<groupId>io.vavr</groupId>
<artifactId>vavr</artifactId>
</dependency>
<dependency>
<!-- LangChain4J OpenAI provider: Chat, Embedding, Image models via OpenAI API -->
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
</dependency>
<dependency>
<!-- LangChain4J Azure OpenAI provider: Chat, Embedding, Image models via Azure OpenAI Service -->
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-azure-open-ai</artifactId>
</dependency>
<dependency>
<groupId>jakarta.inject</groupId>
<artifactId>jakarta.inject-api</artifactId>
Expand Down
1 change: 1 addition & 0 deletions dotCMS/src/main/java/com/dotcms/ai/AiKeys.java
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ public class AiKeys {
public static final String COUNT = "count";
public static final String INPUT = "input";
public static final String RESPONSE_FORMAT = "response_format";
public static final String B64_JSON = "b64_json";

private AiKeys() {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ public class AsyncEmbeddingsCallStrategy implements EmbeddingsCallStrategy {

@Override
public void bulkEmbed(final List<String> inodes, final EmbeddingsForm embeddingsForm) {
DotConcurrentFactory.getInstance().getSubmitter(OPEN_AI_THREAD_POOL_KEY).submit(new BulkEmbeddingsRunner(inodes, embeddingsForm));
DotConcurrentFactory.getInstance().getSubmitter(AI_THREAD_POOL_KEY).submit(new BulkEmbeddingsRunner(inodes, embeddingsForm));
}

@Override
public void embed(final EmbeddingsAPIImpl embeddingsAPI,
final Contentlet contentlet,
final String content,
final String indexName) {
DotConcurrentFactory.getInstance().getSubmitter(OPEN_AI_THREAD_POOL_KEY).submit(new EmbeddingsRunner(embeddingsAPI, contentlet, content, indexName));
DotConcurrentFactory.getInstance().getSubmitter(AI_THREAD_POOL_KEY).submit(new EmbeddingsRunner(embeddingsAPI, contentlet, content, indexName));
}

}
29 changes: 22 additions & 7 deletions dotCMS/src/main/java/com/dotcms/ai/api/CompletionsAPIImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import com.dotmarketing.business.web.WebAPILocator;
import com.dotmarketing.exception.DotRuntimeException;
import com.dotmarketing.util.Config;
import com.dotmarketing.util.Logger;
import com.dotmarketing.util.UtilMethods;
import com.dotmarketing.util.json.JSONArray;
import com.dotmarketing.util.json.JSONObject;
Expand Down Expand Up @@ -73,6 +74,9 @@ public JSONObject prompt(final String systemPrompt,
final Model model = config.resolveModelOrThrow(modelIn, AIModelType.TEXT)._2;
final JSONObject json = new JSONObject();

if (temperature <= 0) {
Logger.warn(this.getClass(), "Temperature is " + temperature + ". Set a positive value in providerConfig if unintended.");
}
json.put(AiKeys.TEMPERATURE, temperature);
buildMessages(systemPrompt, userPrompt, json);

Expand Down Expand Up @@ -130,10 +134,10 @@ public void summarizeStream(final CompletionsForm summaryRequest, final OutputSt

@Override
public JSONObject raw(final JSONObject json, final String userId) {
config.debugLogger(this.getClass(), () -> "OpenAI request:" + json.toString(2));
config.debugLogger(this.getClass(), () -> "AI request:" + json.toString(2));

final String response = sendRequest(config, json, userId).getResponse();
config.debugLogger(this.getClass(), () -> "OpenAI response:" + response);
config.debugLogger(this.getClass(), () -> "AI response:" + response);

return new JSONObject(response);
}
Expand Down Expand Up @@ -226,10 +230,18 @@ private ResolvedModel resolveModel(final CompletionsForm completionsForm) {
.collect(Collectors.toList());

if (UtilMethods.isSet(models)) {
final Tuple2<AIModel, Model> modelTuple = config
.resolveModelOrThrow(completionsForm.model, AIModelType.TEXT);

return new ResolvedModel(modelTuple._2.getName(), modelTuple._1.getMaxTokens());
if (UtilMethods.isSet(completionsForm.model)) {
final Tuple2<AIModel, Model> modelTuple = config
.resolveModelOrThrow(completionsForm.model, AIModelType.TEXT);
final int maxTokens = modelTuple._1.getMaxTokens() > 0
? modelTuple._1.getMaxTokens()
: DEFAULT_AI_MAX_NUMBER_OF_TOKENS_VALUE.get();
return new ResolvedModel(modelTuple._2.getName(), maxTokens);
}
final int maxTokens = aiModel.getMaxTokens() > 0
? aiModel.getMaxTokens()
: DEFAULT_AI_MAX_NUMBER_OF_TOKENS_VALUE.get();
return new ResolvedModel(aiModel.getCurrentModel(), maxTokens);
} else if (UtilMethods.isSet(completionsForm.model)) {
return new ResolvedModel(completionsForm.model, DEFAULT_AI_MAX_NUMBER_OF_TOKENS_VALUE.get());
} else {
Expand Down Expand Up @@ -304,12 +316,15 @@ private String reduceStringToTokenSize(final String incomingString, final int ma

private JSONObject buildRequestJson(final CompletionsForm form) {
final AIModel aiModel = config.getModel();
final int effectiveMaxTokens = aiModel.getMaxTokens() > 0
? aiModel.getMaxTokens()
: DEFAULT_AI_MAX_NUMBER_OF_TOKENS_VALUE.get();
final int promptTokens = countTokens(form.prompt);

final JSONArray messages = new JSONArray();
final String textPrompt = reduceStringToTokenSize(
form.prompt,
aiModel.getMaxTokens() - form.responseLengthTokens - promptTokens);
effectiveMaxTokens - form.responseLengthTokens - promptTokens);

messages.add(Map.of(AiKeys.ROLE, AiKeys.USER, AiKeys.CONTENT, textPrompt));

Expand Down
2 changes: 1 addition & 1 deletion dotCMS/src/main/java/com/dotcms/ai/api/EmbeddingsAPI.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
*/
public interface EmbeddingsAPI {

String OPEN_AI_THREAD_POOL_KEY = "OpenAIThreadPool";
String AI_THREAD_POOL_KEY = "AIThreadPool";

void shutdown();

Expand Down
34 changes: 18 additions & 16 deletions dotCMS/src/main/java/com/dotcms/ai/api/EmbeddingsAPIImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ public int deleteByQuery(@NotNull final String deleteQuery, final Optional<Strin
@Override
public void shutdown() {

Try.run(()->DotConcurrentFactory.getInstance().shutdown(OPEN_AI_THREAD_POOL_KEY));
Try.run(()->DotConcurrentFactory.getInstance().shutdown(AI_THREAD_POOL_KEY));
}

@Override
Expand Down Expand Up @@ -196,7 +196,7 @@ public boolean generateEmbeddingsForContent(@NotNull final Contentlet contentlet
return false;
}

DotConcurrentFactory.getInstance().getSubmitter(OPEN_AI_THREAD_POOL_KEY).submit(new EmbeddingsRunner(this, contentlet, parsed.get(), indexName));
DotConcurrentFactory.getInstance().getSubmitter(AI_THREAD_POOL_KEY).submit(new EmbeddingsRunner(this, contentlet, parsed.get(), indexName));

return true;
}
Expand Down Expand Up @@ -343,9 +343,11 @@ public Tuple2<Integer, List<Float>> pullOrGenerateEmbeddings(final String conten
.getEncoding()
.map(encoding -> encoding.encode(content))
.orElse(List.of());
final int tokenCount = tokens.isEmpty() ? content.split("\\s+").length : tokens.size();
if (tokens.isEmpty()) {
config.debugLogger(this.getClass(), () -> String.format("No tokens for content ID '%s' were encoded: %s", contentId, content));
return Tuple.of(0, List.of());
config.debugLogger(this.getClass(), () -> String.format(
"Encoding unavailable for content ID '%s', using word count (%d) as token estimate",
contentId, tokenCount));
}

final Tuple3<String, Integer, List<Float>> dbEmbeddings =
Expand All @@ -358,13 +360,13 @@ public Tuple2<Integer, List<Float>> pullOrGenerateEmbeddings(final String conten
return Tuple.of(dbEmbeddings._2, dbEmbeddings._3);
}

final Tuple2<Integer, List<Float>> openAiEmbeddings = Tuple.of(
tokens.size(),
sendTokensToOpenAI(contentId, tokens, userId));
saveEmbeddingsForCache(content, openAiEmbeddings);
EMBEDDING_CACHE.put(hashed, openAiEmbeddings);
final Tuple2<Integer, List<Float>> embeddings = Tuple.of(
tokenCount,
generateEmbeddings(contentId, content, userId));
saveEmbeddingsForCache(content, embeddings);
EMBEDDING_CACHE.put(hashed, embeddings);

return openAiEmbeddings;
return embeddings;
}

@CloseDBIfOpened
Expand Down Expand Up @@ -434,20 +436,20 @@ private void saveEmbeddingsForCache(final String content, final Tuple2<Integer,
*
* @return A {@link List} of {@link Float} values representing the embeddings.
*/
private List<Float> sendTokensToOpenAI(final String contentId,
@NotNull final List<Integer> tokens,
private List<Float> generateEmbeddings(final String contentId,
@NotNull final String content,
final String userId) {
final JSONObject json = new JSONObject();
json.put(AiKeys.MODEL, config.getEmbeddingsModel().getCurrentModel());
json.put(AiKeys.INPUT, tokens);
config.debugLogger(this.getClass(), () -> String.format("Content tokens for content ID '%s': %s", contentId, tokens));
json.put(AiKeys.INPUT, content);
config.debugLogger(this.getClass(), () -> String.format("Generating embeddings for content ID '%s'", contentId));
final String responseString = AIProxyClient.get()
.callToAI(JSONObjectAIRequest.quickEmbeddings(config, json, userId))
.getResponse();
config.debugLogger(this.getClass(), () -> String.format("OpenAI Response for content ID '%s': %s",
config.debugLogger(this.getClass(), () -> String.format("AI Response for content ID '%s': %s",
contentId, responseString.replace("\n", BLANK)));
final JSONObject jsonResponse = Try.of(() -> new JSONObject(responseString)).getOrElseThrow(e -> {
Logger.error(this, "OpenAI Response String is not a valid JSON", e);
Logger.error(this, "AI Response String is not a valid JSON", e);
config.debugLogger(this.getClass(), () -> String.format("Invalid JSON Response: %s", responseString));
return new DotCorruptedDataException(e);
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
*/
public interface EmbeddingsCallStrategy {

String OPEN_AI_THREAD_POOL_KEY = "OpenAIThreadPool";
String AI_THREAD_POOL_KEY = "AIThreadPool";
/**
* Embeds contentlets based on the provided inodes and form data.
*
Expand Down
30 changes: 21 additions & 9 deletions dotCMS/src/main/java/com/dotcms/ai/api/OpenAIImageAPIImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,11 @@
import io.vavr.control.Try;

import javax.servlet.http.HttpServletRequest;
import java.io.ByteArrayInputStream;
import java.net.URI;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Base64;
import java.util.Date;

public class OpenAIImageAPIImpl implements ImageAPI {
Expand Down Expand Up @@ -60,7 +63,7 @@ public JSONObject sendRequest(final JSONObject jsonObject) {

String responseString = "";
try {
responseString = doRequest(config.getApiImageUrl(), jsonObject);
responseString = doRequest(jsonObject);

JSONObject returnObject = new JSONObject(responseString);
if (returnObject.containsKey(AiKeys.ERROR)) {
Expand Down Expand Up @@ -99,21 +102,30 @@ public JSONObject sendTextPrompt(final String textPrompt) {
}

private JSONObject createTempFile(final JSONObject imageResponse) {
final String url = imageResponse.optString(AiKeys.URL);
if (UtilMethods.isEmpty(() -> url)) {
Logger.warn(this.getClass(), "imageResponse does not include URL:" + imageResponse);
throw new DotRuntimeException("Image Response does not include URL:" + imageResponse);
}

try {
final String fileName = generateFileName(imageResponse.getString(AiKeys.ORIGINAL_PROMPT));
imageResponse.put("tempFileName", fileName);

final DotTempFile file = tempFileApi.createTempFileFromUrl(fileName, getRequest(), new URL(url), 20);
final String url = imageResponse.optString(AiKeys.URL);
final String b64 = imageResponse.optString(AiKeys.B64_JSON);
final DotTempFile file;

if (!UtilMethods.isEmpty(() -> url)) {
file = tempFileApi.createTempFileFromUrl(fileName, getRequest(), URI.create(url).toURL(), 20);
} else if (!UtilMethods.isEmpty(() -> b64)) {
final byte[] imageBytes = Base64.getDecoder().decode(b64);
file = tempFileApi.createTempFile(fileName, getRequest(), new ByteArrayInputStream(imageBytes));
} else {
Logger.warn(this.getClass(), "imageResponse does not include URL or base64 data:" + imageResponse);
throw new DotRuntimeException("Image Response does not include URL or base64 data:" + imageResponse);
}

imageResponse.put(AiKeys.RESPONSE, file.id);
imageResponse.put("tempFile", file.file.getAbsolutePath());

return imageResponse;
} catch (DotRuntimeException e) {
throw e;
} catch (Exception e) {
imageResponse.put(AiKeys.RESPONSE, e.getMessage());
imageResponse.put(AiKeys.ERROR, e.getMessage());
Expand Down Expand Up @@ -173,7 +185,7 @@ private String generateFileName(final String originalPrompt) {
}

@VisibleForTesting
String doRequest(final String urlIn, final JSONObject json) {
String doRequest(final JSONObject json) {
return AIProxyClient.get()
.callToAI(JSONObjectAIRequest.quickImage(config, json, UtilMethods.extractUserIdOrNull(user)))
.getResponse();
Expand Down
Loading
Loading