mh采样python实现
时间: 2023-09-03 18:05:41 浏览: 161
基础的MH_mh采样;python_
5星 · 资源好评率100%
import torch
from transformers import GPT2LMHeadModel, GPT2Tokenizer
# 加载预训练模型和分词器
model_name = 'gpt2'
model = GPT2LMHeadModel.from_pretrained(model_name)
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
# 设置生成文本的长度和温度
max_length = 100
temperature = 0.7
# 输入文本
input_text = "今天天气真好,"
# 将文本编码成输入张量
input_ids = tokenizer.encode(input_text, return_tensors='pt')
# 使用GPT2模型生成文本
output = model.generate(input_ids, max_length=max_length,
temperature=temperature, num_return_sequences=1)
# 将生成的输出解码成文本
output_text = tokenizer.decode(output, skip_special_tokens=True)
print(output_text)
阅读全文