import nltk.data
import glob
root_dir = "~/data/*/"
txt_path_list = glob.glob(root_dir+'*.txt',recursive=True)
print(txt_path_list)
for this_file in txt_path_list:
with open(this_file,'r',encoding='utf8') as f,open(this_file+'.tokenized','w',encoding='utf8') as fw:
for line in f:
fw.write(' '.join(nltk.word_tokenize(line))+'\n')
网友评论