来自用户给定段落的句子和单词标记化
from nltk.tokenize import sent_tokenize, word_tokenize
example_text = input("Enter the text: ")
print("Sentence Tokens:")
print(sent_tokenize(example_text))
print("Word Tokens:")
print(word_tokenize(example_text))
from nltk.tokenize import sent_tokenize, word_tokenize
example_text = input("Enter the text: ")
print("Sentence Tokens:")
print(sent_tokenize(example_text))
print("Word Tokens:")
print(word_tokenize(example_text))