來自使用者給定段落的句子和單詞標記化
from nltk.tokenize import sent_tokenize, word_tokenize
example_text = input("Enter the text: ")
print("Sentence Tokens:")
print(sent_tokenize(example_text))
print("Word Tokens:")
print(word_tokenize(example_text))
from nltk.tokenize import sent_tokenize, word_tokenize
example_text = input("Enter the text: ")
print("Sentence Tokens:")
print(sent_tokenize(example_text))
print("Word Tokens:")
print(word_tokenize(example_text))