Skip to content

Commit dad315c

Browse files
authored
feat: use oci as default when available (#201)
1 parent cb74428 commit dad315c

File tree

2 files changed

+5
-8
lines changed

2 files changed

+5
-8
lines changed

skynet/modules/ttt/llm_selector.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
oci_model_id,
1919
oci_service_endpoint,
2020
openai_api_base_url,
21-
use_oci,
2221
)
2322
from skynet.logs import get_logger
2423
from skynet.modules.ttt.summaries.v1.models import Processors
@@ -48,11 +47,10 @@ def get_job_processor(customer_id: str, job_id: Optional[str] = None) -> Process
4847
elif api_type == CredentialsType.AZURE_OPENAI.value:
4948
return Processors.AZURE
5049

51-
# OCI doesn't have a secret since it's provisioned for the instance as a whole.
52-
if use_oci or api_type == CredentialsType.OCI.value:
53-
if oci_available:
54-
return Processors.OCI
55-
log.warning(f'OCI is not available, falling back to local processing for customer {customer_id}')
50+
if oci_available:
51+
return Processors.OCI
52+
53+
log.warning(f'OCI is not available, falling back to local processing for customer {customer_id}')
5654

5755
return Processors.LOCAL
5856

skynet/modules/ttt/processor.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
from skynet.constants import response_prefix
1414

15-
from skynet.env import llama_n_ctx, use_oci
15+
from skynet.env import llama_n_ctx, modules, use_oci
1616
from skynet.logs import get_logger
1717
from skynet.modules.ttt.assistant.constants import assistant_rag_question_extractor
1818
from skynet.modules.ttt.assistant.utils import get_assistant_chat_messages
@@ -32,7 +32,6 @@
3232
summary_text,
3333
)
3434
from skynet.modules.ttt.summaries.v1.models import DocumentPayload, HintType, Job, JobType, Processors
35-
from skynet.env import modules
3635

3736
log = get_logger(__name__)
3837

0 commit comments

Comments
 (0)