Spaces:
Running
Running
File size: 1,364 Bytes
a745a5e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | from transformers import (
AutoTokenizer,
BlipForConditionalGeneration,
BlipProcessor,
GitForCausalLM,
GitProcessor,
VisionEncoderDecoderModel,
ViTImageProcessor,
)
def push_blip(
local_dir: str = "saved_model_phase2",
repo_id: str = "pchandragrid/blip-caption-model",
) -> None:
model = BlipForConditionalGeneration.from_pretrained(local_dir)
processor = BlipProcessor.from_pretrained(local_dir)
model.push_to_hub(repo_id)
processor.push_to_hub(repo_id)
def push_vit_gpt2(
local_dir: str = "saved_vit_gpt2",
repo_id: str = "pchandragrid/vit-gpt2-caption-model",
) -> None:
model = VisionEncoderDecoderModel.from_pretrained(local_dir)
image_processor = ViTImageProcessor.from_pretrained(local_dir)
tokenizer = AutoTokenizer.from_pretrained(local_dir)
model.push_to_hub(repo_id)
image_processor.push_to_hub(repo_id)
tokenizer.push_to_hub(repo_id)
def push_git(
local_dir: str = "saved_git_model",
repo_id: str = "pchandragrid/git-caption-model",
) -> None:
model = GitForCausalLM.from_pretrained(local_dir)
processor = GitProcessor.from_pretrained(local_dir)
model.push_to_hub(repo_id)
processor.push_to_hub(repo_id)
if __name__ == "__main__":
push_blip()
push_vit_gpt2()
push_git()
print("Uploaded: BLIP, ViT-GPT2, and GIT models.") |