diff --git a/parser/llguidance.h b/parser/llguidance.h index d6c51ec0..6646c1cc 100644 --- a/parser/llguidance.h +++ b/parser/llguidance.h @@ -242,6 +242,11 @@ int32_t llg_compute_mask(struct LlgConstraint *cc, struct LlgMaskResult *res_p); */ int32_t llg_commit_token(struct LlgConstraint *cc, LlgToken token, struct LlgCommitResult *res_p); +/** + * Clone the constraint + */ +struct LlgConstraint *llg_clone_constraint(const struct LlgConstraint *cc); + /** * Construct a new tokenizer from the given TokenizerInit */ @@ -249,6 +254,12 @@ struct LlgTokenizer *llg_new_tokenizer(const struct LlgTokenizerInit *tok_init, char *error_string, size_t error_string_len); +/** + * Clone a tokenizer. + * This increments a reference count and does a small allocation. + */ +struct LlgTokenizer *llg_clone_tokenizer(const struct LlgTokenizer *tok); + /** * Tokenize the given bytes and return the tokens. * Always returns the number of tokens that would be written to output_tokens diff --git a/parser/src/ffi.rs b/parser/src/ffi.rs index e29ec5dc..69b89bab 100644 --- a/parser/src/ffi.rs +++ b/parser/src/ffi.rs @@ -210,6 +210,7 @@ pub struct LlgConstraintInit { pub limits: ParserLimits, } +#[derive(Clone)] pub struct LlgConstraint { local_error: Option, last_logs: String, @@ -511,6 +512,12 @@ pub extern "C" fn llg_commit_token( cc.get_error_code() } +/// Clone the constraint +#[no_mangle] +pub extern "C" fn llg_clone_constraint(cc: &LlgConstraint) -> *mut LlgConstraint { + Box::into_raw(Box::new(cc.clone())) +} + /// Construct a new tokenizer from the given TokenizerInit #[no_mangle] pub extern "C" fn llg_new_tokenizer( @@ -535,6 +542,15 @@ pub extern "C" fn llg_new_tokenizer( } } +/// Clone a tokenizer. +/// This increments a reference count and does a small allocation. +#[no_mangle] +pub extern "C" fn llg_clone_tokenizer(tok: &LlgTokenizer) -> *mut LlgTokenizer { + Box::into_raw(Box::new(LlgTokenizer { + token_env: tok.token_env.clone(), + })) +} + /// Tokenize the given bytes and return the tokens. /// Always returns the number of tokens that would be written to output_tokens /// if output_tokens_len was large enough.