diff --git a/README.md b/README.md index 0587832..a60a7eb 100644 --- a/README.md +++ b/README.md @@ -73,10 +73,10 @@ subx-cli config set ai.provider openai # Azure OpenAI setup export AZURE_OPENAI_API_KEY="your-azure-api-key" export AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com" -export AZURE_OPENAI_DEPLOYMENT_ID="your-deployment-id" export AZURE_OPENAI_API_VERSION="2025-04-01-preview" +# Note: Azure OpenAI deployment ID is now configured via `ai.model` instead of a separate field subx-cli config set ai.provider azure-openai -subx-cli config set ai.deployment_id "your-deployment-id" +subx-cli config set ai.model "your-deployment-id" subx-cli config set ai.api_version "2025-04-01-preview" # Configure VAD settings diff --git a/README.zh-TW.md b/README.zh-TW.md index aa70c48..1bce1a4 100644 --- a/README.zh-TW.md +++ b/README.zh-TW.md @@ -67,10 +67,10 @@ subx-cli config set ai.provider openai # Azure OpenAI 設定 export AZURE_OPENAI_API_KEY="your-azure-api-key" export AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com" -export AZURE_OPENAI_DEPLOYMENT_ID="your-deployment-id" export AZURE_OPENAI_API_VERSION="2025-04-01-preview" +# 注意:Azure OpenAI 部署識別符現已通過 `ai.model` 設定,而非獨立欄位 subx-cli config set ai.provider azure-openai -subx-cli config set ai.deployment_id "your-deployment-id" +subx-cli config set ai.model "your-deployment-id" subx-cli config set ai.api_version "2025-04-01-preview" # 配置 VAD 設定 diff --git a/docs/configuration-guide.md b/docs/configuration-guide.md index 718b06f..5a763b6 100644 --- a/docs/configuration-guide.md +++ b/docs/configuration-guide.md @@ -71,10 +71,9 @@ base_url = "https://openrouter.ai/api/v1" [ai] provider = "azure-openai" api_key = "your-azure-api-key" +model = "your-deployment-id" # Use the Azure OpenAI deployment name here base_url = "https://your-resource.openai.azure.com" -deployment_id = "your-deployment-id" api_version = "2025-04-01-preview" -model = "gpt-4o" ``` ## Format Configuration (`[formats]`) diff --git a/src/config/field_validator.rs b/src/config/field_validator.rs index 8913e6f..f98f526 100644 --- a/src/config/field_validator.rs +++ b/src/config/field_validator.rs @@ -79,9 +79,6 @@ pub fn validate_field(key: &str, value: &str) -> Result<()> { } // Azure OpenAI specific fields - "ai.deployment_id" => { - validate_non_empty_string(value, "Azure OpenAI deployment ID")?; - } "ai.api_version" => { validate_non_empty_string(value, "Azure OpenAI API version")?; } @@ -207,7 +204,6 @@ pub fn get_field_description(key: &str) -> &'static str { "ai.retry_attempts" => "Number of retry attempts for AI requests", "ai.retry_delay_ms" => "Delay between retry attempts in milliseconds", "ai.request_timeout_seconds" => "Request timeout in seconds", - "ai.deployment_id" => "Azure OpenAI deployment ID (required for azure-openai)", "ai.api_version" => "Azure OpenAI API version (optional, defaults to latest)", "sync.default_method" => "Synchronization method ('auto', 'vad', or 'manual')", diff --git a/src/config/mod.rs b/src/config/mod.rs index bfe82d0..8415936 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -163,10 +163,6 @@ pub struct AIConfig { /// For slow networks or complex requests, you may need to increase this value. pub request_timeout_seconds: u64, - /// Azure OpenAI deployment ID (required for azure-openai provider) - #[serde(default)] - pub deployment_id: Option, - /// Azure OpenAI API version (optional, defaults to latest) #[serde(default)] pub api_version: Option, @@ -187,7 +183,6 @@ impl Default for AIConfig { // Set to 120 seconds to handle slow networks and complex AI requests // This is especially important for users with high-latency connections request_timeout_seconds: 120, - deployment_id: None, api_version: None, } } diff --git a/src/config/service.rs b/src/config/service.rs index 26c4b83..5f1c625 100644 --- a/src/config/service.rs +++ b/src/config/service.rs @@ -284,15 +284,16 @@ impl ProductionConfigService { debug!("ProductionConfigService: Found AZURE_OPENAI_ENDPOINT environment variable"); app_config.ai.base_url = endpoint; } + if let Some(version) = self.env_provider.get_var("AZURE_OPENAI_API_VERSION") { + debug!("ProductionConfigService: Found AZURE_OPENAI_API_VERSION environment variable"); + app_config.ai.api_version = Some(version); + } + // Special handling for Azure OpenAI deployment ID environment variable if let Some(deployment) = self.env_provider.get_var("AZURE_OPENAI_DEPLOYMENT_ID") { debug!( "ProductionConfigService: Found AZURE_OPENAI_DEPLOYMENT_ID environment variable" ); - app_config.ai.deployment_id = Some(deployment); - } - if let Some(version) = self.env_provider.get_var("AZURE_OPENAI_API_VERSION") { - debug!("ProductionConfigService: Found AZURE_OPENAI_API_VERSION environment variable"); - app_config.ai.api_version = Some(version); + app_config.ai.model = deployment; } // Validate the configuration @@ -371,13 +372,6 @@ impl ProductionConfigService { let v = value.parse().unwrap(); // Validation already done config.ai.request_timeout_seconds = v; } - ["ai", "deployment_id"] => { - if !value.is_empty() { - config.ai.deployment_id = Some(value.to_string()); - } else { - config.ai.deployment_id = None; - } - } ["ai", "api_version"] => { if !value.is_empty() { config.ai.api_version = Some(value.to_string()); diff --git a/src/config/validator.rs b/src/config/validator.rs index fc7ed32..39734ab 100644 --- a/src/config/validator.rs +++ b/src/config/validator.rs @@ -97,17 +97,6 @@ pub fn validate_ai_config(ai_config: &AIConfig) -> Result<()> { validate_ai_model(&ai_config.model)?; validate_temperature(ai_config.temperature)?; validate_positive_number(ai_config.max_tokens as f64)?; - if let Some(dep) = &ai_config.deployment_id { - if dep.trim().is_empty() { - return Err(SubXError::config( - "Azure OpenAI deployment_id must not be empty", - )); - } - } else { - return Err(SubXError::config( - "Azure OpenAI deployment_id is required".to_string(), - )); - } if let Some(ver) = &ai_config.api_version { if ver.trim().is_empty() { return Err(SubXError::config( @@ -352,7 +341,7 @@ mod tests { let mut ai_config = AIConfig::default(); ai_config.provider = "azure-openai".to_string(); ai_config.api_key = Some("azure-key-123".to_string()); - ai_config.deployment_id = Some("dep123".to_string()); + ai_config.model = "dep123".to_string(); ai_config.api_version = Some("2025-04-01-preview".to_string()); assert!(validate_ai_config(&ai_config).is_ok()); } diff --git a/src/core/factory.rs b/src/core/factory.rs index 5176501..c782cbc 100644 --- a/src/core/factory.rs +++ b/src/core/factory.rs @@ -340,9 +340,8 @@ mod tests { let mut config = crate::config::Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("azure-key-123".to_string()); - config.ai.deployment_id = Some("dep123".to_string()); + config.ai.model = "dep123".to_string(); config.ai.api_version = Some("2025-04-01-preview".to_string()); - config.ai.model = "gpt-test".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); let result = create_ai_provider(&config.ai); assert!(result.is_ok()); diff --git a/src/services/ai/azure_openai.rs b/src/services/ai/azure_openai.rs index ba1c265..d57074b 100644 --- a/src/services/ai/azure_openai.rs +++ b/src/services/ai/azure_openai.rs @@ -12,7 +12,7 @@ use reqwest::Client; use serde_json::{Value, json}; use std::time::Duration; use tokio::time; -use url::Url; +use url::{ParseError, Url}; /// Azure OpenAI client implementation #[derive(Debug)] @@ -21,7 +21,6 @@ pub struct AzureOpenAIClient { api_key: String, model: String, base_url: String, - deployment_id: String, api_version: String, temperature: f32, max_tokens: u32, @@ -39,7 +38,6 @@ impl AzureOpenAIClient { api_key: String, model: String, base_url: String, - deployment_id: String, api_version: String, temperature: f32, max_tokens: u32, @@ -56,7 +54,6 @@ impl AzureOpenAIClient { api_key, model, base_url: base_url.trim_end_matches('/').to_string(), - deployment_id, api_version, temperature, max_tokens, @@ -74,34 +71,43 @@ impl AzureOpenAIClient { .filter(|key| !key.trim().is_empty()) .ok_or_else(|| SubXError::config("Missing Azure OpenAI API Key".to_string()))? .clone(); - let deployment_id = config - .deployment_id - .clone() - .ok_or_else(|| SubXError::config("Missing Azure OpenAI deployment ID".to_string()))?; + // Use the model value as the deployment identifier; ensure it's provided + let deployment_name = config.model.clone(); + if deployment_name.trim().is_empty() { + return Err(SubXError::config( + "Missing Azure OpenAI deployment name in model field".to_string(), + )); + } let api_version = config .api_version .clone() .unwrap_or_else(|| DEFAULT_AZURE_API_VERSION.to_string()); - // Validate base URL format - let parsed = Url::parse(&config.base_url) - .map_err(|e| SubXError::config(format!("Invalid Azure OpenAI endpoint: {}", e)))?; + // Validate base URL format, handle missing host specially + let parsed = match Url::parse(&config.base_url) { + Ok(u) => u, + Err(ParseError::EmptyHost) => { + return Err(SubXError::config( + "Azure OpenAI endpoint missing host".to_string(), + )); + } + Err(e) => { + return Err(SubXError::config(format!( + "Invalid Azure OpenAI endpoint: {}", + e + ))); + } + }; if !matches!(parsed.scheme(), "http" | "https") { return Err(SubXError::config( "Azure OpenAI endpoint must use http or https".to_string(), )); } - if parsed.host().is_none() { - return Err(SubXError::config( - "Azure OpenAI endpoint missing host".to_string(), - )); - } Ok(Self::new_with_all( api_key, config.model.clone(), config.base_url.clone(), - deployment_id, api_version, config.temperature, config.max_tokens, @@ -163,7 +169,7 @@ impl AzureOpenAIClient { async fn chat_completion(&self, messages: Vec) -> crate::Result { let url = format!( "{}/openai/deployments/{}/chat/completions?api-version={}", - self.base_url, self.deployment_id, self.api_version + self.base_url, self.model, self.api_version ); let mut req = self .client @@ -241,69 +247,63 @@ mod tests { use crate::config::Config; #[test] - fn test_azure_openai_client_creation_success() { + fn test_azure_openai_from_config_and_url_construction() { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); config.ai.api_version = Some("2025-04-01-preview".to_string()); - let result = AzureOpenAIClient::from_config(&config.ai); - assert!( - result.is_ok(), - "Failed to create Azure OpenAI client: {:?}", - result.err() + let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); + let url = format!( + "{}/openai/deployments/{}/chat/completions?api-version={}", + client.base_url, client.model, client.api_version ); + assert!(url.contains("deployment-name")); } #[test] - fn test_azure_openai_client_creation_with_defaults() { + fn test_missing_model_error() { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); - // api_version will default to DEFAULT_AZURE_API_VERSION - let result = AzureOpenAIClient::from_config(&config.ai); - assert!( - result.is_ok(), - "Failed to create Azure OpenAI client with defaults: {:?}", - result.err() - ); + let err = AzureOpenAIClient::from_config(&config.ai) + .unwrap_err() + .to_string(); + assert!(err.contains("Missing Azure OpenAI deployment name in model field")); } #[test] - fn test_azure_openai_client_missing_api_key() { + fn test_azure_openai_client_creation_with_defaults() { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); - config.ai.api_key = None; - config.ai.model = "gpt-test".to_string(); + config.ai.api_key = Some("test-api-key".to_string()); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); + // api_version defaults to DEFAULT_AZURE_API_VERSION - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_err()); - let error_msg = result.err().unwrap().to_string(); - assert!(error_msg.contains("Missing Azure OpenAI API Key")); + let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); + assert_eq!( + client.api_version, + super::DEFAULT_AZURE_API_VERSION.to_string() + ); } #[test] - fn test_azure_openai_client_missing_deployment_id() { + fn test_azure_openai_client_missing_api_key() { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); - config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.api_key = None; + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = None; let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_err()); - let error_msg = result.err().unwrap().to_string(); - assert!(error_msg.contains("Missing Azure OpenAI deployment ID")); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Missing Azure OpenAI API Key")); } #[test] @@ -311,14 +311,12 @@ mod tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "invalid-url".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_err()); - let error_msg = result.err().unwrap().to_string(); - assert!(error_msg.contains("Invalid Azure OpenAI endpoint")); + let err = result.unwrap_err().to_string(); + assert!(err.contains("Invalid Azure OpenAI endpoint")); } #[test] @@ -326,14 +324,12 @@ mod tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "ftp://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_err()); - let error_msg = result.err().unwrap().to_string(); - assert!(error_msg.contains("must use http or https")); + let err = result.unwrap_err().to_string(); + assert!(err.contains("must use http or https")); } #[test] @@ -341,33 +337,29 @@ mod tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_err()); - let error_msg = result.err().unwrap().to_string(); - // Print the actual error message for debugging - println!("Actual error message: {}", error_msg); - assert!(error_msg.contains("empty host") || error_msg.contains("missing host")); + let err = result.unwrap_err().to_string(); + assert!(err.contains("missing host")); } #[test] - fn test_azure_openai_with_custom_deployment_and_version() { - let mock_deployment = "custom-deployment-123"; + fn test_azure_openai_with_custom_model_and_version() { + let mock_model = "custom-model-123"; let mock_version = "2023-12-01-preview"; let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock_model.to_string(); config.ai.base_url = "https://custom.openai.azure.com".to_string(); - config.ai.deployment_id = Some(mock_deployment.to_string()); config.ai.api_version = Some(mock_version.to_string()); - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_ok()); + let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); + assert_eq!(client.model, mock_model); + assert_eq!(client.api_version, mock_version); } #[test] @@ -375,12 +367,14 @@ mod tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com/".to_string(); // Trailing slash - config.ai.deployment_id = Some("test-deployment".to_string()); - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_ok(), "Should handle trailing slash in base URL"); + let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); + assert_eq!( + client.base_url, + "https://example.openai.azure.com".to_string() + ); } #[test] @@ -388,30 +382,31 @@ mod tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); config.ai.temperature = 0.8; config.ai.max_tokens = 2000; - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_ok()); + let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); + assert!((client.temperature - 0.8).abs() < f32::EPSILON); + assert_eq!(client.max_tokens, 2000); } #[test] - fn test_azure_openai_with_custom_retry_settings() { + fn test_azure_openai_with_custom_retry_and_timeout() { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); config.ai.retry_attempts = 5; config.ai.retry_delay_ms = 2000; config.ai.request_timeout_seconds = 180; - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_ok()); + let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); + assert_eq!(client.retry_attempts, 5); + assert_eq!(client.retry_delay_ms, 2000); + assert_eq!(client.request_timeout_seconds, 180); } #[test] @@ -420,7 +415,6 @@ mod tests { "test-api-key".to_string(), "gpt-test".to_string(), "https://example.openai.azure.com".to_string(), - "test-deployment".to_string(), "2025-04-01-preview".to_string(), 0.7, 4000, @@ -428,8 +422,6 @@ mod tests { 1000, 120, ); - - // Just verify the client was created successfully assert!(format!("{:?}", client).contains("AzureOpenAIClient")); } @@ -438,27 +430,13 @@ mod tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("".to_string()); // Empty string - config.ai.model = "gpt-test".to_string(); + config.ai.model = "deployment-name".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_err()); - let error_msg = result.err().unwrap().to_string(); - assert!(error_msg.contains("Missing Azure OpenAI API Key")); - } - - #[test] - fn test_azure_openai_valid_http_url() { - let mut config = Config::default(); - config.ai.provider = "azure-openai".to_string(); - config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); - config.ai.base_url = "http://localhost:8080".to_string(); // HTTP for local testing - config.ai.deployment_id = Some("test-deployment".to_string()); - - let result = AzureOpenAIClient::from_config(&config.ai); - assert!(result.is_ok(), "Should accept HTTP URLs for local testing"); + let err = AzureOpenAIClient::from_config(&config.ai) + .unwrap_err() + .to_string(); + assert!(err.contains("Missing Azure OpenAI API Key")); } } diff --git a/src/services/ai/openai.rs b/src/services/ai/openai.rs index 72de7d3..42f4c61 100644 --- a/src/services/ai/openai.rs +++ b/src/services/ai/openai.rs @@ -143,7 +143,6 @@ mod tests { retry_attempts: 2, retry_delay_ms: 150, request_timeout_seconds: 60, - deployment_id: None, api_version: None, }; let client = OpenAIClient::from_config(&config).unwrap(); @@ -166,7 +165,6 @@ mod tests { retry_attempts: 2, retry_delay_ms: 150, request_timeout_seconds: 30, - deployment_id: None, api_version: None, }; let err = OpenAIClient::from_config(&config).unwrap_err(); diff --git a/src/services/ai/openrouter.rs b/src/services/ai/openrouter.rs index b5f8c4c..8a8879b 100644 --- a/src/services/ai/openrouter.rs +++ b/src/services/ai/openrouter.rs @@ -514,7 +514,6 @@ mod tests { retry_attempts: 3, retry_delay_ms: 150, request_timeout_seconds: 120, - deployment_id: None, api_version: None, }; @@ -540,7 +539,6 @@ mod tests { retry_attempts: 2, retry_delay_ms: 100, request_timeout_seconds: 30, - deployment_id: None, api_version: None, }; @@ -568,7 +566,6 @@ mod tests { retry_attempts: 2, retry_delay_ms: 100, request_timeout_seconds: 30, - deployment_id: None, api_version: None, }; diff --git a/tests/azure_openai_api_integration_tests.rs b/tests/azure_openai_api_integration_tests.rs index 616c2f4..c8d55ff 100644 --- a/tests/azure_openai_api_integration_tests.rs +++ b/tests/azure_openai_api_integration_tests.rs @@ -46,9 +46,8 @@ mod azure_openai_api_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -75,9 +74,8 @@ mod azure_openai_api_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -104,9 +102,8 @@ mod azure_openai_api_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -129,9 +126,8 @@ mod azure_openai_api_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("Bearer test-token".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -155,9 +151,8 @@ mod azure_openai_api_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -183,9 +178,8 @@ mod azure_openai_factory_tests { let mut config = config_service.config_mut(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "test-deployment".to_string(); config.ai.base_url = "https://example.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); config.ai.api_version = Some("2025-04-01-preview".to_string()); } diff --git a/tests/azure_openai_deployment_tests.rs b/tests/azure_openai_deployment_tests.rs index 6f2103e..a392301 100644 --- a/tests/azure_openai_deployment_tests.rs +++ b/tests/azure_openai_deployment_tests.rs @@ -37,9 +37,8 @@ mod azure_openai_deployment_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = custom_deployment.to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(custom_deployment.to_string()); config.ai.api_version = Some(custom_version.to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -65,9 +64,8 @@ mod azure_openai_deployment_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -95,9 +93,8 @@ mod azure_openai_deployment_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); config.ai.retry_attempts = 1; // Single retry attempt config.ai.retry_delay_ms = 5; // Very short delay @@ -124,9 +121,8 @@ mod azure_openai_deployment_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); config.ai.request_timeout_seconds = 5; // Minimal timeout @@ -151,9 +147,8 @@ mod azure_openai_deployment_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -178,9 +173,8 @@ mod azure_openai_deployment_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("Bearer sk-test123".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); diff --git a/tests/azure_openai_environment_tests.rs b/tests/azure_openai_environment_tests.rs index f890721..c445dd8 100644 --- a/tests/azure_openai_environment_tests.rs +++ b/tests/azure_openai_environment_tests.rs @@ -25,7 +25,7 @@ mod azure_openai_environment_tests { assert_eq!(config.ai.provider, "azure-openai"); assert_eq!(config.ai.api_key, Some("test-azure-api-key".to_string())); assert_eq!(config.ai.base_url, "https://test.openai.azure.com"); - assert_eq!(config.ai.deployment_id, Some("test-deployment".to_string())); + assert_eq!(config.ai.model, "test-deployment".to_string()); assert_eq!( config.ai.api_version, Some("2025-01-01-preview".to_string()) diff --git a/tests/azure_openai_error_handling_tests.rs b/tests/azure_openai_error_handling_tests.rs index 3c7d5fe..3509fdb 100644 --- a/tests/azure_openai_error_handling_tests.rs +++ b/tests/azure_openai_error_handling_tests.rs @@ -33,9 +33,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -61,9 +60,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); config.ai.retry_attempts = 3; config.ai.retry_delay_ms = 10; // Fast retry for testing @@ -94,9 +92,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); config.ai.retry_attempts = 3; config.ai.retry_delay_ms = 10; // Fast retry for testing @@ -124,9 +121,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); config.ai.request_timeout_seconds = 1; // 1 second timeout config.ai.retry_attempts = 0; // No retries to test timeout directly @@ -148,9 +144,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -172,9 +167,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -196,9 +190,8 @@ mod azure_openai_error_handling_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = "test-deployment".to_string(); config.ai.base_url = "https://invalid-nonexistent-host-12345.openai.azure.com".to_string(); - config.ai.deployment_id = Some("test-deployment".to_string()); config.ai.retry_attempts = 1; // Quick fail for testing config.ai.retry_delay_ms = 10; // Fast retry config.ai.request_timeout_seconds = 1; // Short timeout diff --git a/tests/azure_openai_response_parsing_tests.rs b/tests/azure_openai_response_parsing_tests.rs index 4bd0e5c..fe7abcf 100644 --- a/tests/azure_openai_response_parsing_tests.rs +++ b/tests/azure_openai_response_parsing_tests.rs @@ -53,9 +53,8 @@ mod azure_openai_parsing_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -83,9 +82,8 @@ mod azure_openai_parsing_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap(); @@ -120,9 +118,8 @@ mod azure_openai_parsing_tests { let mut config = Config::default(); config.ai.provider = "azure-openai".to_string(); config.ai.api_key = Some("test-api-key".to_string()); - config.ai.model = "gpt-test".to_string(); + config.ai.model = mock.deployment_id().to_string(); config.ai.base_url = mock.base_url(); - config.ai.deployment_id = Some(mock.deployment_id().to_string()); config.ai.api_version = Some(mock.api_version().to_string()); let client = AzureOpenAIClient::from_config(&config.ai).unwrap();