from optimum.intel import OVModelForMaskedLM from transformers import AutoTokenizer, pipeline # model_id should be set to either a local directory or a model available on the HuggingFace hub. model_id = "helenai/google-bert-bert-base-uncased-ov" tokenizer = AutoTokenizer.from_pretrained(model_id) model = OVModelForMaskedLM.from_pretrained(model_id) pipe = pipeline("fill-mask", model=model, tokenizer=tokenizer) result = pipe(f"I am a {tokenizer.mask_token} model") print(result)