Skip to content

Instantly share code, notes, and snippets.

@tg-bomze
Last active November 3, 2020 18:15
Show Gist options
  • Select an option

  • Save tg-bomze/ee488b92fb1a2a292f656587b2dad525 to your computer and use it in GitHub Desktop.

Select an option

Save tg-bomze/ee488b92fb1a2a292f656587b2dad525 to your computer and use it in GitHub Desktop.
#pip3 install transformers==2.8.0
import torch
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("sberbank-ai/rugpt3large_based_on_gpt2")
model = AutoModelWithLMHead.from_pretrained("sberbank-ai/rugpt3large_based_on_gpt2")
model.eval()
model = model.to('cuda')
def generate_text(prompts, lenght):
chars = 0
while chars < lenght:
chars += 1
indexed_tokens = tokenizer.encode(prompts)
tokens_tensors = torch.tensor([indexed_tokens])
tokens_tensors = tokens_tensors.to('cuda')
with torch.no_grad():
outputs = model(tokens_tensors)
predictions = outputs[0]
predicted_index = torch.argmax(predictions[0,-1,:]).item()
prompts = tokenizer.decode(indexed_tokens + [predicted_index])
return prompts
prompts = "Вопрос: В чём заключён смысл жизни, вселенной и всего такого? Ответ:" # запрос
lenght = 50 # длинна ответа
generate_text(prompts, lenght)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment