@@ -109,7 +109,7 @@ trait HasOpenAITextParams extends HasOpenAISharedParams {
109109 val maxTokens : ServiceParam [Int ] = new ServiceParam [Int ](
110110 this , " maxTokens" ,
111111 " The maximum number of tokens to generate. Has minimum of 0." ,
112- isRequired = false ){
112+ isRequired = false ) {
113113 override val payloadName : String = " max_tokens"
114114 }
115115
@@ -217,7 +217,7 @@ trait HasOpenAITextParams extends HasOpenAISharedParams {
217217 val cacheLevel : ServiceParam [Int ] = new ServiceParam [Int ](
218218 this , " cacheLevel" ,
219219 " can be used to disable any server-side caching, 0=no cache, 1=prompt prefix enabled, 2=full cache" ,
220- isRequired = false ){
220+ isRequired = false ) {
221221 override val payloadName : String = " cache_level"
222222 }
223223
@@ -233,7 +233,7 @@ trait HasOpenAITextParams extends HasOpenAISharedParams {
233233 this , " presencePenalty" ,
234234 " How much to penalize new tokens based on their existing frequency in the text so far." +
235235 " Decreases the likelihood of the model to repeat the same line verbatim. Has minimum of -2 and maximum of 2." ,
236- isRequired = false ){
236+ isRequired = false ) {
237237 override val payloadName : String = " presence_penalty"
238238 }
239239
@@ -249,7 +249,7 @@ trait HasOpenAITextParams extends HasOpenAISharedParams {
249249 this , " frequencyPenalty" ,
250250 " How much to penalize new tokens based on whether they appear in the text so far." +
251251 " Increases the likelihood of the model to talk about new topics." ,
252- isRequired = false ){
252+ isRequired = false ) {
253253 override val payloadName : String = " frequency_penalty"
254254 }
255255
@@ -265,7 +265,7 @@ trait HasOpenAITextParams extends HasOpenAISharedParams {
265265 this , " bestOf" ,
266266 " How many generations to create server side, and display only the best." +
267267 " Will not stream intermediate progress if best_of > 1. Has maximum value of 128." ,
268- isRequired = false ){
268+ isRequired = false ) {
269269 override val payloadName : String = " best_of"
270270 }
271271
@@ -311,7 +311,12 @@ abstract class OpenAIServicesBase(override val uid: String) extends CognitiveSer
311311
312312 override protected def getInternalTransformer (schema : StructType ): PipelineModel = {
313313 if (PlatformDetails .runningOnFabric() && usingDefaultOpenAIEndpoint) {
314- getModelStatus(getDeploymentName)
314+ try {
315+ getModelStatus(getDeploymentName)
316+ } catch {
317+ case e : Throwable => logWarning(
318+ " Could not get model status, you are likely running in the system context of Fabric" , e)
319+ }
315320 }
316321 super .getInternalTransformer(schema)
317322 }
0 commit comments