diff --git a/parser/src/constraint.rs b/parser/src/constraint.rs index f7d38462..079dbaec 100644 --- a/parser/src/constraint.rs +++ b/parser/src/constraint.rs @@ -2,6 +2,7 @@ use anyhow::{ensure, Result}; use toktrie::{Splice, StepArg, StepResult, TokenId}; use crate::{ + loginfo, output::{ParserOutput, Reporter}, TokenParser, }; @@ -115,6 +116,16 @@ impl Constraint { /// The splice is never returned when ff_tokens are disabled in InferenceCapabilities. /// After this returns, commit_token() must be called with the sampled token if any. pub fn compute_mask(&mut self) -> Result<&StepResult> { + loginfo!( + self.parser.logger, + "\ncompute_mask() {}", + if self.delayed_stop { + "delayed stop" + } else { + "" + } + ); + if !self.started { self.started = true; self.parser.start_without_prompt(); @@ -149,6 +160,8 @@ impl Constraint { /// This commits the sampled token (if any), and sees if this forces any more tokens /// on the output (if ff_tokens are enabled in InferenceCapabilities). pub fn commit_token(&mut self, sampled_token: Option) -> Result { + loginfo!(self.parser.logger, "\ncommit_token({:?})", sampled_token); + ensure!( self.step_arg.is_none(), "commit_token() called twice or without compute_bias()" diff --git a/parser/src/tokenparser.rs b/parser/src/tokenparser.rs index 400af7ea..8fa72c3f 100644 --- a/parser/src/tokenparser.rs +++ b/parser/src/tokenparser.rs @@ -306,8 +306,6 @@ impl TokenParser { None }; - infoln!(self, "\n"); - let r = self.mid_process_inner(arg); if self.test_trace { @@ -361,7 +359,12 @@ impl TokenParser { infoln!( self, - "post tokens: bt={} {}", + "{}: bt={} {}", + if self.no_bias_this_mid_process { + "commit_token" + } else { + "compute_mask" + }, arg.backtrack, trie.tokens_dbg(&arg.tokens) );