| | import torch |
| | from transformers import GPT2LMHeadModel, GPT2Tokenizer |
| |
|
| | |
| | model_name = "EleutherAI/gpt-neo-1.3B" |
| | tokenizer = GPT2Tokenizer.from_pretrained(model_name) |
| | model = GPT2LMHeadModel.from_pretrained(model_name) |
| |
|
| | |
| | model.config.eos_token_id = tokenizer.encode("<|endoftext|>")[0] |
| |
|
| | |
| | prompt_text = "Generate NSFW image:" |
| | input_ids = tokenizer.encode(prompt_text, return_tensors="pt") |
| | output = model.generate(input_ids, max_length=150) |
| |
|
| | |
| | nsfw_image = tokenizer.decode(output[0], skip_special_tokens=True) |
| | print("NSFW Image Generated:") |
| | print(nsfw_image) |
| |
|