File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
src/llama_cpp_agent/providers Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -151,7 +151,7 @@ def create_completion(
151151 if grammar in self .grammar_cache :
152152 grammar = self .grammar_cache [grammar ]
153153 else :
154- self .grammar_cache [grammar ] = LlamaGrammar .from_string (grammar )
154+ self .grammar_cache [grammar ] = LlamaGrammar .from_string (grammar , verbose = self . llama_model . verbose )
155155 grammar = self .grammar_cache [grammar ]
156156
157157 settings_dictionary = deepcopy (settings .as_dict ())
@@ -179,7 +179,7 @@ def create_chat_completion(
179179 if grammar in self .grammar_cache :
180180 grammar = self .grammar_cache [grammar ]
181181 else :
182- self .grammar_cache [grammar ] = LlamaGrammar .from_string (grammar )
182+ self .grammar_cache [grammar ] = LlamaGrammar .from_string (grammar , verbose = self . llama_model . verbose )
183183 grammar = self .grammar_cache [grammar ]
184184 settings_dictionary = deepcopy (settings .as_dict ())
185185 settings_dictionary ["max_tokens" ] = settings_dictionary .pop ("n_predict" )
You can’t perform that action at this time.
0 commit comments