Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion api/v1alpha1/olsconfig_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ type ProviderSpec struct {
// Provider type
// +kubebuilder:validation:Required
// +required
// +kubebuilder:validation:Enum=azure_openai;bam;openai;watsonx;rhoai_vllm;rhelai_vllm;fake_provider
// +kubebuilder:validation:Enum=azure_openai;openai;watsonx;rhoai_vllm;rhelai_vllm;fake_provider
// +operator-sdk:csv:customresourcedefinitions:type=spec,displayName="Provider Type"
Type string `json:"type"`
// Azure OpenAI deployment name
Expand Down
1 change: 0 additions & 1 deletion bundle/manifests/ols.openshift.io_olsconfigs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,6 @@ spec:
description: Provider type
enum:
- azure_openai
- bam
- openai
- watsonx
- rhoai_vllm
Expand Down
1 change: 0 additions & 1 deletion config/crd/bases/ols.openshift.io_olsconfigs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,6 @@ spec:
description: Provider type
enum:
- azure_openai
- bam
- openai
- watsonx
- rhoai_vllm
Expand Down
2 changes: 1 addition & 1 deletion internal/controller/appserver/assets_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ var _ = Describe("App server assets", func() {
Name: "testProvider",
URL: testURL,
CredentialsPath: "/etc/apikeys/test-secret",
Type: "bam",
Type: "openai",
Models: []utils.ModelConfig{
{
Name: "testModel",
Expand Down
143 changes: 142 additions & 1 deletion internal/controller/lcore/assets_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ func TestBuildLlamaStackYAML_SupportedProvider(t *testing.T) {

func TestBuildLlamaStackYAML_UnsupportedProvider(t *testing.T) {
// Test unsupported providers
unsupportedProviders := []string{"watsonx", "bam", "rhoai_vllm", "rhelai_vllm"}
unsupportedProviders := []string{"rhoai_vllm", "rhelai_vllm"}

for _, providerType := range unsupportedProviders {
t.Run(providerType, func(t *testing.T) {
Expand Down Expand Up @@ -255,6 +255,147 @@ func TestBuildLlamaStackYAML_AzureProvider(t *testing.T) {
t.Logf("Successfully validated Llama Stack YAML with Azure provider (%d bytes)", len(yamlOutput))
}

func TestBuildLlamaStackYAML_WatsonxProvider(t *testing.T) {
// Create a fake secret with API token for Watsonx provider
secret := &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: "watsonx-secret",
Namespace: "test-namespace",
},
Data: map[string][]byte{
"apitoken": []byte("test-api-key"),
},
}

// Create a fake client with the secret
scheme := runtime.NewScheme()
_ = corev1.AddToScheme(scheme)
_ = olsv1alpha1.AddToScheme(scheme)
fakeClient := fake.NewClientBuilder().
WithScheme(scheme).
WithObjects(secret).
Build()

// Create a test reconciler
logger := zap.New(zap.UseDevMode(true))
testReconciler := utils.NewTestReconciler(
fakeClient,
logger,
scheme,
"test-namespace",
)

// Create a test CR with Watsonx provider
cr := &olsv1alpha1.OLSConfig{
Spec: olsv1alpha1.OLSConfigSpec{
LLMConfig: olsv1alpha1.LLMSpec{
Providers: []olsv1alpha1.ProviderSpec{
{
Name: "watsonx",
Type: "watsonx",
URL: "https://us-south.ml.cloud.ibm.com",
WatsonProjectID: "my-project-id",
Models: []olsv1alpha1.ModelSpec{
{
Name: "ibm/granite-13b-chat-v2",
ContextWindowSize: 8192,
},
},
CredentialsSecretRef: corev1.LocalObjectReference{
Name: "watsonx-secret",
},
},
},
},
},
}

// Build the YAML
ctx := context.Background()
yamlOutput, err := buildLlamaStackYAML(testReconciler, ctx, cr)
if err != nil {
t.Fatalf("buildLlamaStackYAML returned error for Watsonx provider: %v", err)
}

// Verify it's valid YAML
var result map[string]interface{}
err = yaml.Unmarshal([]byte(yamlOutput), &result)
if err != nil {
t.Fatalf("buildLlamaStackYAML produced invalid YAML: %v", err)
}

// Verify Watsonx provider configuration
providers, ok := result["providers"].(map[string]interface{})
if !ok {
t.Fatalf("providers section not found or invalid type")
}

inference, ok := providers["inference"].([]interface{})
if !ok || len(inference) == 0 {
t.Fatalf("inference providers not found or empty")
}

// Find the Watsonx provider (not the sentence-transformers one)
var watsonxProvider map[string]interface{}
for _, provider := range inference {
p, ok := provider.(map[string]interface{})
if !ok {
continue
}
if p["provider_type"] == "remote::watsonx" {
watsonxProvider = p
break
}
}

if watsonxProvider == nil {
t.Fatalf("Watsonx provider not found in inference providers")
}

// Check provider_type
if watsonxProvider["provider_type"] != "remote::watsonx" {
t.Errorf("Expected provider_type 'remote::watsonx', got '%v'", watsonxProvider["provider_type"])
}

// Check config fields
config, ok := watsonxProvider["config"].(map[string]interface{})
if !ok {
t.Fatalf("provider config not found or invalid type")
}

// Verify Watsonx-specific fields are present
requiredFields := []string{
"api_key", // API key
"project_id", // Watsonx project ID
"base_url", // Watsonx endpoint (Llama Stack uses base_url not url)
}
for _, field := range requiredFields {
if _, exists := config[field]; !exists {
t.Errorf("Expected field '%s' not found in Watsonx provider config", field)
}
}

// Verify api_key has the correct env var format
if apiKey, ok := config["api_key"].(string); ok && apiKey != "" {
if !strings.HasPrefix(apiKey, "${env.") || !strings.HasSuffix(apiKey, "_API_KEY}") {
t.Errorf("api_key doesn't have correct env var format, got: %s", apiKey)
}
} else {
t.Errorf("api_key field is missing or empty")
}

// Verify project_id is set correctly
if projectID, ok := config["project_id"].(string); ok {
if projectID != "my-project-id" {
t.Errorf("Expected project_id 'my-project-id', got '%s'", projectID)
}
} else {
t.Errorf("project_id field is missing or invalid type")
}

t.Logf("Successfully validated Llama Stack YAML with Watsonx provider (%d bytes)", len(yamlOutput))
}

// Helper function to check if a string contains a substring
func contains(s, substr string) bool {
return len(s) >= len(substr) && (s == substr || len(s) > len(substr) && findSubstring(s, substr))
Expand Down
24 changes: 20 additions & 4 deletions internal/controller/lcore/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -242,14 +242,30 @@ func buildLlamaStackInferenceProviders(_ reconciler.Reconciler, _ context.Contex
}
providerConfig["config"] = config

case "watsonx", "rhoai_vllm", "rhelai_vllm", "bam":
// These providers are not supported by Llama Stack
case "watsonx":
providerConfig["provider_type"] = "remote::watsonx"
config := map[string]interface{}{}

// Set environment variable name for API key
config["api_key"] = fmt.Sprintf("${env.%s_API_KEY}", envVarName)

// Watsonx-specific fields
if provider.WatsonProjectID != "" {
config["project_id"] = provider.WatsonProjectID
}
if provider.URL != "" {
config["base_url"] = provider.URL
}
providerConfig["config"] = config

case "rhoai_vllm", "rhelai_vllm":
// These providers are not currently supported by Llama Stack
// They are handled directly by lightspeed-stack (LCS), not Llama Stack
return nil, fmt.Errorf("provider type '%s' (provider '%s') is not currently supported by Llama Stack. Supported types: openai, azure_openai", provider.Type, provider.Name)
return nil, fmt.Errorf("provider type '%s' (provider '%s') is not currently supported by Llama Stack. Supported types: openai, azure_openai, watsonx", provider.Type, provider.Name)

default:
// Unknown provider type
return nil, fmt.Errorf("unknown provider type '%s' (provider '%s'). Supported types: openai, azure_openai", provider.Type, provider.Name)
return nil, fmt.Errorf("unknown provider type '%s' (provider '%s'). Supported types: openai, azure_openai, watsonx", provider.Type, provider.Name)
}

providers = append(providers, providerConfig)
Expand Down
2 changes: 1 addition & 1 deletion internal/controller/utils/test_fixtures.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ func GetDefaultOLSConfigCR() *olsv1alpha1.OLSConfig {
Providers: []olsv1alpha1.ProviderSpec{
{
Name: "testProvider",
Type: "bam",
Type: "openai",
URL: "https://testURL",
Models: []olsv1alpha1.ModelSpec{
{
Expand Down
3 changes: 1 addition & 2 deletions internal/controller/utils/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,7 @@ type ProviderConfig struct {
// Provider API URL
URL string `json:"url,omitempty"`
// Path to the file containing API provider credentials in the app server container.
// default to "bam_api_key.txt"
CredentialsPath string `json:"credentials_path,omitempty" default:"bam_api_key.txt"`
CredentialsPath string `json:"credentials_path,omitempty"`
// List of models from the provider
Models []ModelConfig `json:"models,omitempty"`
// Provider type
Expand Down