Getting the below error
UnboundLocalError Traceback (most recent call last)
Cell In[4], line 6
3 import torch
5 # stage 1
----> 6 stage_1 = DiffusionPipeline.from_pretrained("DeepFloyd/IF-I-XL-v1.0", variant="fp16", torch_dtype=torch.float16)
7 stage_1.enable_xformers_memory_efficient_attention() # remove line if torch.version >= 2.0.0
8 stage_1.enable_model_cpu_offload()
File ~/anaconda3/lib/python3.11/site-packages/diffusers/pipelines/pipeline_utils.py:1105, in DiffusionPipeline.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
1102 loaded_sub_model = passed_class_obj[name]
1103 else:
1104 # load sub model
-> 1105 loaded_sub_model = load_sub_model(
1106 library_name=library_name,
1107 class_name=class_name,
1108 importable_classes=importable_classes,
1109 pipelines=pipelines,
1110 is_pipeline_module=is_pipeline_module,
1111 pipeline_class=pipeline_class,
1112 torch_dtype=torch_dtype,
1113 provider=provider,
1114 sess_options=sess_options,
1115 device_map=device_map,
1116 max_memory=max_memory,
1117 offload_folder=offload_folder,
1118 offload_state_dict=offload_state_dict,
1119 model_variants=model_variants,
1120 name=name,
1121 from_flax=from_flax,
1122 variant=variant,
1123 low_cpu_mem_usage=low_cpu_mem_usage,
1124 cached_folder=cached_folder,
1125 )
1126 logger.info(
1127 f"Loaded {name} as {class_name} from {name}
subfolder of {pretrained_model_name_or_path}."
1128 )
1130 init_kwargs[name] = loaded_sub_model # UNet(...), # DiffusionSchedule(...)
File ~/anaconda3/lib/python3.11/site-packages/diffusers/pipelines/pipeline_utils.py:472, in load_sub_model(library_name, class_name, importable_classes, pipelines, is_pipeline_module, pipeline_class, torch_dtype, provider, sess_options, device_map, max_memory, offload_folder, offload_state_dict, model_variants, name, from_flax, variant, low_cpu_mem_usage, cached_folder)
470 # check if the module is in a subdirectory
471 if os.path.isdir(os.path.join(cached_folder, name)):
--> 472 loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)
473 else:
474 # else load from the root directory
475 loaded_sub_model = load_method(cached_folder, **loading_kwargs)
File ~/anaconda3/lib/python3.11/site-packages/transformers/tokenization_utils_base.py:1854, in PreTrainedTokenizerBase.from_pretrained(cls, pretrained_model_name_or_path, cache_dir, force_download, local_files_only, token, revision, *init_inputs, **kwargs)
1851 else:
1852 logger.info(f"loading file {file_path} from cache at {resolved_vocab_files[file_id]}")
-> 1854 return cls._from_pretrained(
1855 resolved_vocab_files,
1856 pretrained_model_name_or_path,
1857 init_configuration,
1858 *init_inputs,
1859 token=token,
1860 cache_dir=cache_dir,
1861 local_files_only=local_files_only,
1862 _commit_hash=commit_hash,
1863 _is_local=is_local,
1864 **kwargs,
1865 )
File ~/anaconda3/lib/python3.11/site-packages/transformers/tokenization_utils_base.py:2017, in PreTrainedTokenizerBase._from_pretrained(cls, resolved_vocab_files, pretrained_model_name_or_path, init_configuration, token, cache_dir, local_files_only, _commit_hash, _is_local, *init_inputs, **kwargs)
2015 # Instantiate tokenizer.
2016 try:
-> 2017 tokenizer = cls(*init_inputs, **init_kwargs)
2018 except OSError:
2019 raise OSError(
2020 "Unable to load vocabulary from file. "
2021 "Please check that the provided vocabulary is accessible and not corrupted."
2022 )
File ~/anaconda3/lib/python3.11/site-packages/transformers/models/t5/tokenization_t5.py:194, in T5Tokenizer.init(self, vocab_file, eos_token, unk_token, pad_token, extra_ids, additional_special_tokens, sp_model_kwargs, legacy, **kwargs)
191 self.vocab_file = vocab_file
192 self._extra_ids = extra_ids
--> 194 self.sp_model = self.get_spm_processor()
File ~/anaconda3/lib/python3.11/site-packages/transformers/models/t5/tokenization_t5.py:200, in T5Tokenizer.get_spm_processor(self)
198 with open(self.vocab_file, "rb") as f:
199 sp_model = f.read()
--> 200 model_pb2 = import_protobuf()
201 model = model_pb2.ModelProto.FromString(sp_model)
202 if not self.legacy:
File ~/anaconda3/lib/python3.11/site-packages/transformers/convert_slow_tokenizer.py:40, in import_protobuf()
38 else:
39 from transformers.utils import sentencepiece_model_pb2_new as sentencepiece_model_pb2
---> 40 return sentencepiece_model_pb2
UnboundLocalError: cannot access local variable 'sentencepiece_model_pb2' where it is not associated with a value