@@ -74,15 +74,16 @@ func buildLlamaStackCoreConfig(_ reconciler.Reconciler, _ *olsv1alpha1.OLSConfig
7474 return map [string ]interface {}{
7575 "version" : "2" ,
7676 "image_name" : "minimal-viable-llama-stack-configuration" ,
77- // Minimal APIs for RAG + MCP: agents (for MCP), files, inference, safety (required by agents), tool_runtime, vector_io
78- // Commented out: datasetio, eval, post_training, scoring, telemetry (not available in this Llama Stack version)
79- "apis" : []string {"agents" /* "datasetio", "eval", */ , "files" , "inference" /* , "post_training", */ , "safety" /* , "scoring", "telemetry" */ , "tool_runtime" , "vector_io" },
77+ // Minimal APIs for RAG + MCP: agents (for MCP), files, inference, safety (required by agents), telemetry, tool_runtime, vector_io
78+ // Commented out: datasetio, eval, post_training, scoring (not needed for basic RAG + MCP)
79+ // Commented out: datasetio, eval, post_training, prompts, scoring, telemetry
80+ "apis" : []string {"agents" /* "datasetio", "eval", */ , "files" , "inference" /* , "post_training", */ , "safety" /* , "scoring", "telemetry"*/ , "tool_runtime" , "vector_io" },
8081 "benchmarks" : []interface {}{},
8182 "container_image" : nil ,
8283 "datasets" : []interface {}{},
8384 "external_providers_dir" : nil ,
8485 "inference_store" : map [string ]interface {}{
85- "db_path" : "/tmp/llama-stack /inference_store.db" ,
86+ "db_path" : ".llama/distributions/ollama /inference_store.db" ,
8687 "type" : "sqlite" ,
8788 },
8889 "logging" : nil ,
@@ -205,15 +206,23 @@ func buildLlamaStackInferenceProviders(_ reconciler.Reconciler, _ context.Contex
205206 envVarName := utils .ProviderNameToEnvVarName (provider .Name )
206207
207208 // Map OLSConfig provider types to Llama Stack provider types
208- // Note: Only providers supported by Llama Stack are included
209209 switch provider .Type {
210- case "openai" :
211- providerConfig ["provider_type" ] = "remote::openai"
210+ case "openai" , "rhoai_vllm" , "rhelai_vllm" :
212211 config := map [string ]interface {}{}
213-
214- // Set environment variable name for API key
215- // Llama Stack will substitute ${env.VAR_NAME} with the actual env var value
216- config ["api_key" ] = fmt .Sprintf ("${env.%s_API_KEY}" , envVarName )
212+ // Determine the appropriate Llama Stack provider type
213+ // - OpenAI uses remote::openai (validates against OpenAI model whitelist)
214+ // - vLLM uses remote::vllm (accepts any custom model names)
215+ if provider .Type == "openai" {
216+ providerConfig ["provider_type" ] = "remote::openai"
217+ // Set API key from environment variable
218+ // Llama Stack will substitute ${env.VAR_NAME} with the actual env var value
219+ config ["api_key" ] = fmt .Sprintf ("${env.%s_API_KEY}" , envVarName )
220+ } else {
221+ providerConfig ["provider_type" ] = "remote::vllm"
222+ // Set API key from environment variable
223+ // Llama Stack will substitute ${env.VAR_NAME} with the actual env var value
224+ config ["api_token" ] = fmt .Sprintf ("${env.%s_API_KEY}" , envVarName )
225+ }
217226
218227 // Add custom URL if specified
219228 if provider .URL != "" {
@@ -246,14 +255,14 @@ func buildLlamaStackInferenceProviders(_ reconciler.Reconciler, _ context.Contex
246255 }
247256 providerConfig ["config" ] = config
248257
249- case "watsonx" , "rhoai_vllm" , "rhelai_vllm" , " bam" :
258+ case "watsonx" , "bam" :
250259 // These providers are not supported by Llama Stack
251260 // They are handled directly by lightspeed-stack (LCS), not Llama Stack
252- return nil , fmt .Errorf ("provider type '%s' (provider '%s') is not currently supported by Llama Stack. Supported types: openai, azure_openai" , provider .Type , provider .Name )
261+ return nil , fmt .Errorf ("provider type '%s' (provider '%s') is not currently supported by Llama Stack. Supported types: openai, azure_openai, rhoai_vllm, rhelai_vllm " , provider .Type , provider .Name )
253262
254263 default :
255264 // Unknown provider type
256- return nil , fmt .Errorf ("unknown provider type '%s' (provider '%s'). Supported types: openai, azure_openai" , provider .Type , provider .Name )
265+ return nil , fmt .Errorf ("unknown provider type '%s' (provider '%s'). Supported types: openai, azure_openai, rhoai_vllm, rhelai_vllm " , provider .Type , provider .Name )
257266 }
258267
259268 providers = append (providers , providerConfig )
0 commit comments