Skip to content

Commit 5effe69

Browse files
saurabh-ossclaude
andcommitted
refactor: implement LLMProvider as proper Describable ExtensionPoint
LLMProvider is now an abstract class implementing ExtensionPoint and Describable<LLMProvider>. Each provider has: - @DataBoundConstructor with sensible defaults (endpoint, modelId) - An @extension DescriptorImpl with getDisplayName() and doTestConnection() - Its own config.jelly rendered inline via f:hetero-radio Common fields (endpoint, modelId, apiKeyCredentialId) live in LLMProvider base. Common LLM call settings (temperature, timeout, maxTokens) remain in ForgeAIGlobalConfiguration and are read at call time — no duplication. ForgeAIGlobalConfiguration now stores a single LLMProvider instance selected via f:hetero-radio. Removed: providerType string, llmEndpoint, modelId, apiKeyCredentialId, doFillProviderTypeItems, resolveApiKey, doFillApiKeyCredentialIdItems (moved to LLMProviderDescriptor base class). LLMProviderFactory is kept as a deprecated pass-through for backward compatibility but is no longer the canonical way to obtain a provider. Third-party plugins can now add new LLM backends by extending LLMProvider and annotating their Descriptor with @extension — no changes required here. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent 82993d3 commit 5effe69

10 files changed

Lines changed: 350 additions & 290 deletions

File tree

src/main/java/io/forgeai/jenkins/config/ForgeAIGlobalConfiguration.java

Lines changed: 35 additions & 116 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,17 @@
11
package io.forgeai.jenkins.config;
22

3-
import com.cloudbees.plugins.credentials.CredentialsMatchers;
4-
import com.cloudbees.plugins.credentials.CredentialsProvider;
5-
import com.cloudbees.plugins.credentials.common.StandardListBoxModel;
6-
import com.cloudbees.plugins.credentials.domains.DomainRequirement;
73
import hudson.Extension;
8-
import hudson.security.ACL;
94
import hudson.util.FormValidation;
10-
import hudson.util.ListBoxModel;
5+
import io.forgeai.jenkins.llm.LLMProvider;
6+
import io.forgeai.jenkins.llm.LLMProvider.LLMProviderDescriptor;
7+
import io.forgeai.jenkins.llm.OpenAICompatibleProvider;
118
import jenkins.model.GlobalConfiguration;
129
import jenkins.model.Jenkins;
1310
import net.sf.json.JSONObject;
1411
import org.jenkinsci.Symbol;
15-
import org.jenkinsci.plugins.plaincredentials.StringCredentials;
1612
import org.kohsuke.stapler.DataBoundSetter;
17-
import org.kohsuke.stapler.QueryParameter;
1813
import org.kohsuke.stapler.StaplerRequest;
1914
import org.kohsuke.stapler.verb.POST;
20-
import io.forgeai.jenkins.llm.*;
21-
22-
import java.util.Collections;
2315

2416
/**
2517
* Global configuration page for ForgeAI Pipeline Intelligence.
@@ -29,29 +21,28 @@
2921
@Symbol("forgeAI")
3022
public class ForgeAIGlobalConfiguration extends GlobalConfiguration {
3123

32-
// ── LLM Provider Settings ──────────────────────────────────────────
33-
private String providerType = "openai"; // openai | anthropic | ollama
34-
private String llmEndpoint = "https://api.openai.com/";
35-
private String modelId = "gpt-4o";
36-
private String apiKeyCredentialId = "";
37-
private double temperature = 0.2;
24+
// ── LLM Provider (selected via hetero-radio) ───────────────────
25+
private LLMProvider provider = new OpenAICompatibleProvider();
26+
27+
// ── Common LLM call settings ───────────────────────────────────
28+
private double temperature = 0.2;
3829
private int timeoutSeconds = 120;
39-
private int maxTokens = 4096;
30+
private int maxTokens = 4096;
4031

41-
// ── Feature Toggles ────────────────────────────────────────────────
42-
private boolean enableCodeReview = true;
32+
// ── Feature Toggles ────────────────────────────────────────────
33+
private boolean enableCodeReview = true;
4334
private boolean enableVulnerabilityAnalysis = true;
44-
private boolean enableArchitectureDrift = true;
45-
private boolean enableTestGapAnalysis = true;
46-
private boolean enableReleaseReadiness = true;
47-
private boolean enableCommitIntelligence = true;
48-
private boolean enableDependencyRisk = true;
49-
private boolean enablePipelineAdvisor = true;
50-
51-
// ── Reporting ──────────────────────────────────────────────────────
52-
private boolean publishHtmlReport = true;
53-
private boolean failOnCritical = false;
54-
private int criticalThreshold = 3; // fail if score < 3/10
35+
private boolean enableArchitectureDrift = true;
36+
private boolean enableTestGapAnalysis = true;
37+
private boolean enableReleaseReadiness = true;
38+
private boolean enableCommitIntelligence = true;
39+
private boolean enableDependencyRisk = true;
40+
private boolean enablePipelineAdvisor = true;
41+
42+
// ── Reporting ──────────────────────────────────────────────────
43+
private boolean publishHtmlReport = true;
44+
private boolean failOnCritical = false;
45+
private int criticalThreshold = 3;
5546
private String customSystemPrompt = "";
5647

5748
public ForgeAIGlobalConfiguration() {
@@ -62,115 +53,43 @@ public static ForgeAIGlobalConfiguration get() {
6253
return GlobalConfiguration.all().get(ForgeAIGlobalConfiguration.class);
6354
}
6455

65-
// ── Persist ────────────────────────────────────────────────────────
6656
@Override
6757
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
6858
req.bindJSON(this, json);
6959
save();
7060
return true;
7161
}
7262

73-
// ── Resolve the API key from Jenkins Credentials store ────────────
74-
public String resolveApiKey() {
75-
if (apiKeyCredentialId == null || apiKeyCredentialId.isBlank()) return "";
76-
StringCredentials cred = CredentialsMatchers.firstOrNull(
77-
CredentialsProvider.lookupCredentialsInItemGroup(
78-
StringCredentials.class,
79-
Jenkins.get(),
80-
ACL.SYSTEM2,
81-
Collections.<DomainRequirement>emptyList()),
82-
CredentialsMatchers.withId(apiKeyCredentialId));
83-
return cred != null ? cred.getSecret().getPlainText() : "";
84-
}
85-
86-
// ── Validation helpers shown in real-time in the UI ────────────────
87-
@POST
88-
public FormValidation doCheckLlmEndpoint(@QueryParameter String value) {
89-
Jenkins.get().checkPermission(Jenkins.ADMINISTER);
90-
if (value == null || value.isBlank()) return FormValidation.error("Endpoint is required");
91-
if (!value.startsWith("http")) return FormValidation.error("Must start with http:// or https://");
92-
return FormValidation.ok();
93-
}
94-
95-
@POST
96-
public FormValidation doCheckModelId(@QueryParameter String value) {
97-
Jenkins.get().checkPermission(Jenkins.ADMINISTER);
98-
if (value == null || value.isBlank()) return FormValidation.error("Model ID is required");
99-
return FormValidation.ok();
63+
/** Returns all registered LLMProvider descriptors for the hetero-radio widget. */
64+
public java.util.List<LLMProviderDescriptor> getProviderDescriptors() {
65+
return LLMProvider.all();
10066
}
10167

10268
@POST
103-
public FormValidation doTestConnection(@QueryParameter String providerType,
104-
@QueryParameter String llmEndpoint,
105-
@QueryParameter String modelId,
106-
@QueryParameter String apiKeyCredentialId) {
69+
public FormValidation doTestConnection() {
10770
Jenkins.get().checkPermission(Jenkins.ADMINISTER);
71+
if (provider == null) return FormValidation.error("No provider configured.");
10872
try {
109-
// Temporarily build a provider for the test
110-
ForgeAIGlobalConfiguration temp = new ForgeAIGlobalConfiguration();
111-
temp.setProviderType(providerType);
112-
temp.setLlmEndpoint(llmEndpoint);
113-
temp.setModelId(modelId);
114-
temp.setApiKeyCredentialId(apiKeyCredentialId);
115-
116-
LLMProvider provider = LLMProviderFactory.create(temp);
117-
if (provider.healthCheck()) {
118-
return FormValidation.ok("Connection successful — %s is reachable.", provider.displayName());
119-
} else {
120-
return FormValidation.error("Health-check failed. Verify endpoint, API key, and model ID.");
121-
}
73+
return provider.healthCheck()
74+
? FormValidation.ok("Connection successful — %s is reachable.", provider.displayName())
75+
: FormValidation.error("Health-check failed. Verify endpoint, API key, and model ID.");
12276
} catch (Exception e) {
12377
return FormValidation.error("Connection test failed: " + e.getMessage());
12478
}
12579
}
12680

127-
/** Populate the API-key credential dropdown. */
128-
@POST
129-
public ListBoxModel doFillApiKeyCredentialIdItems(@QueryParameter String apiKeyCredentialId) {
130-
if (!Jenkins.get().hasPermission(Jenkins.ADMINISTER)) {
131-
return new StandardListBoxModel().includeCurrentValue(apiKeyCredentialId);
132-
}
133-
return new StandardListBoxModel()
134-
.includeEmptyValue()
135-
.includeMatchingAs(
136-
ACL.SYSTEM2,
137-
Jenkins.get(),
138-
StringCredentials.class,
139-
Collections.<DomainRequirement>emptyList(),
140-
CredentialsMatchers.always())
141-
.includeCurrentValue(apiKeyCredentialId);
142-
}
143-
144-
/** Provider type dropdown. */
145-
public ListBoxModel doFillProviderTypeItems() {
146-
ListBoxModel items = new ListBoxModel();
147-
items.add("OpenAI / OpenAI-Compatible (LM Studio, vLLM, LocalAI)", "openai");
148-
items.add("Anthropic Claude", "anthropic");
149-
items.add("Ollama (Local)", "ollama");
150-
return items;
151-
}
152-
153-
// ── Getters & Setters (DataBoundSetter for Jenkins persistence) ───
154-
155-
public String getProviderType() { return providerType; }
156-
@DataBoundSetter public void setProviderType(String v) { this.providerType = v; }
157-
158-
public String getLlmEndpoint() { return llmEndpoint; }
159-
@DataBoundSetter public void setLlmEndpoint(String v) { this.llmEndpoint = v; }
160-
161-
public String getModelId() { return modelId; }
162-
@DataBoundSetter public void setModelId(String v) { this.modelId = v; }
81+
// ── Getters & Setters ──────────────────────────────────────────
16382

164-
public String getApiKeyCredentialId() { return apiKeyCredentialId; }
165-
@DataBoundSetter public void setApiKeyCredentialId(String v) { this.apiKeyCredentialId = v; }
83+
public LLMProvider getProvider() { return provider; }
84+
@DataBoundSetter public void setProvider(LLMProvider v) { this.provider = v; }
16685

16786
public double getTemperature() { return temperature; }
16887
@DataBoundSetter public void setTemperature(double v) { this.temperature = v; }
16988

170-
public int getTimeoutSeconds() { return timeoutSeconds; }
89+
public int getTimeoutSeconds() { return timeoutSeconds > 0 ? timeoutSeconds : 120; }
17190
@DataBoundSetter public void setTimeoutSeconds(int v) { this.timeoutSeconds = v; }
17291

173-
public int getMaxTokens() { return maxTokens; }
92+
public int getMaxTokens() { return maxTokens > 0 ? maxTokens : 4096; }
17493
@DataBoundSetter public void setMaxTokens(int v) { this.maxTokens = v; }
17594

17695
public boolean isEnableCodeReview() { return enableCodeReview; }

src/main/java/io/forgeai/jenkins/llm/AnthropicProvider.java

Lines changed: 50 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -3,51 +3,48 @@
33
import com.google.gson.Gson;
44
import com.google.gson.JsonArray;
55
import com.google.gson.JsonObject;
6+
import hudson.Extension;
67
import hudson.util.Secret;
8+
import io.forgeai.jenkins.config.ForgeAIGlobalConfiguration;
79
import okhttp3.*;
10+
import org.jenkinsci.Symbol;
11+
import org.kohsuke.stapler.DataBoundConstructor;
12+
import org.kohsuke.stapler.QueryParameter;
13+
import org.kohsuke.stapler.verb.POST;
814

915
import java.io.IOException;
1016
import java.util.concurrent.TimeUnit;
1117

12-
/**
13-
* Native Anthropic Claude API provider.
14-
* Uses the Messages API (v1/messages) directly.
15-
*/
16-
public class AnthropicProvider implements LLMProvider {
18+
public class AnthropicProvider extends LLMProvider {
1719

1820
private static final long serialVersionUID = 1L;
1921
private static final String DEFAULT_ENDPOINT = "https://api.anthropic.com/";
2022
private static final String API_VERSION = "2023-06-01";
2123
private static final MediaType JSON_MEDIA = MediaType.get("application/json; charset=utf-8");
2224
private static final Gson GSON = new Gson();
2325

24-
private final String endpoint;
25-
private final Secret apiKey;
26-
private final String model;
27-
private final double temperature;
28-
private final int timeoutSeconds;
29-
30-
public AnthropicProvider(String endpoint, String apiKey, String model,
31-
double temperature, int timeoutSeconds) {
32-
this.endpoint = (endpoint == null || endpoint.isBlank()) ? DEFAULT_ENDPOINT
33-
: (endpoint.endsWith("/") ? endpoint : endpoint + "/");
34-
this.apiKey = Secret.fromString(apiKey);
35-
this.model = model;
36-
this.temperature = temperature;
37-
this.timeoutSeconds = timeoutSeconds;
26+
@DataBoundConstructor
27+
public AnthropicProvider() {
28+
this.endpoint = DEFAULT_ENDPOINT;
29+
this.modelId = "claude-opus-4-7";
3830
}
3931

4032
@Override
4133
public String complete(String systemPrompt, String userPrompt, int maxTokens) throws LLMException {
34+
ForgeAIGlobalConfiguration cfg = ForgeAIGlobalConfiguration.get();
35+
Secret apiKey = Secret.fromString(resolveApiKey());
36+
String ep = (endpoint == null || endpoint.isBlank()) ? DEFAULT_ENDPOINT
37+
: (endpoint.endsWith("/") ? endpoint : endpoint + "/");
38+
4239
OkHttpClient client = new OkHttpClient.Builder()
43-
.connectTimeout(timeoutSeconds, TimeUnit.SECONDS)
44-
.readTimeout(timeoutSeconds * 2L, TimeUnit.SECONDS)
40+
.connectTimeout(cfg.getTimeoutSeconds(), TimeUnit.SECONDS)
41+
.readTimeout(cfg.getTimeoutSeconds() * 2L, TimeUnit.SECONDS)
4542
.build();
4643

4744
JsonObject body = new JsonObject();
48-
body.addProperty("model", model);
45+
body.addProperty("model", modelId);
4946
body.addProperty("max_tokens", maxTokens);
50-
body.addProperty("temperature", temperature);
47+
body.addProperty("temperature", cfg.getTemperature());
5148
body.addProperty("system", systemPrompt);
5249

5350
JsonArray messages = new JsonArray();
@@ -57,10 +54,8 @@ public String complete(String systemPrompt, String userPrompt, int maxTokens) th
5754
messages.add(userMsg);
5855
body.add("messages", messages);
5956

60-
String url = endpoint + "v1/messages";
61-
6257
Request request = new Request.Builder()
63-
.url(url)
58+
.url(ep + "v1/messages")
6459
.post(RequestBody.create(body.toString(), JSON_MEDIA))
6560
.addHeader("Content-Type", "application/json")
6661
.addHeader("x-api-key", Secret.toString(apiKey))
@@ -70,8 +65,7 @@ public String complete(String systemPrompt, String userPrompt, int maxTokens) th
7065
try (Response response = client.newCall(request).execute()) {
7166
String responseBody = response.body() != null ? response.body().string() : "";
7267
if (!response.isSuccessful()) {
73-
throw new LLMException(
74-
"Anthropic API returned HTTP " + response.code() + ": " + responseBody,
68+
throw new LLMException("Anthropic API returned HTTP " + response.code() + ": " + responseBody,
7569
response.code(), displayName());
7670
}
7771
JsonObject json = GSON.fromJson(responseBody, JsonObject.class);
@@ -92,15 +86,39 @@ public String complete(String systemPrompt, String userPrompt, int maxTokens) th
9286
@Override
9387
public boolean healthCheck() {
9488
try {
95-
String result = complete("You are a health-check bot.", "Reply OK", 10);
96-
return result != null && !result.isBlank();
89+
return complete("You are a health-check bot.", "Reply OK", 10) != null;
9790
} catch (LLMException e) {
9891
return false;
9992
}
10093
}
10194

10295
@Override
103-
public String displayName() {
104-
return "Anthropic Claude (" + model + ")";
96+
public String displayName() { return "Anthropic Claude (" + modelId + ")"; }
97+
98+
@Extension
99+
@Symbol("anthropic")
100+
public static class DescriptorImpl extends LLMProviderDescriptor {
101+
102+
@Override
103+
public String getDisplayName() { return "Anthropic Claude"; }
104+
105+
@POST
106+
public hudson.util.FormValidation doTestConnection(
107+
@QueryParameter String endpoint,
108+
@QueryParameter String modelId,
109+
@QueryParameter String apiKeyCredentialId) {
110+
jenkins.model.Jenkins.get().checkPermission(jenkins.model.Jenkins.ADMINISTER);
111+
try {
112+
AnthropicProvider p = new AnthropicProvider();
113+
p.setEndpoint(endpoint);
114+
p.setModelId(modelId);
115+
p.setApiKeyCredentialId(apiKeyCredentialId);
116+
return p.healthCheck()
117+
? hudson.util.FormValidation.ok("Connection successful — Anthropic API reachable.")
118+
: hudson.util.FormValidation.error("Health-check failed. Verify endpoint, API key, and model.");
119+
} catch (Exception e) {
120+
return hudson.util.FormValidation.error("Connection test failed: " + e.getMessage());
121+
}
122+
}
105123
}
106124
}

0 commit comments

Comments
 (0)