importre
importnumpyasnp
WINDOW_SIZE=2
defcreate_vocabulary(training_data):
"""通过标记训练数据返回排序后的单词列表。"""
all_words=''.join(training_data).lower()
all_words=all_words.replace('.','')
all_words=all_words.split('')
vocab=list(set(all_words))
vocab.sort()
returnvocab
defone_hot(word,vocab,vocab_size):
"""返回单词的独热编码向量。"""
one_hot=[0]*vocab_size
pos=vocab.index(word)
one_hot[pos]=1
one_hot=np.array(one_hot)
returnone_hot
defcreate_vector_word_map(vocab,vocab_size):
"""返回一个词典映射,将独热向量转换回单词。"""
vec_to_word={str(one_hot(word,vocab,vocab_size)):wordforwordinvocab}
returnvec_to_word
defencode_training_data(training_data,vocab_size,window_size):
"""Encodethecenterandoutsidewordsasone-hotvectors."""
encoded_training_data=[]
forsentenceintraining_data:
#Tokenizethesentence
tokens=re.sub(r'[^\w\s]','',sentence).lower().split('')
#Encodeeachcenterwordanditssurroundingcontextwords
forword_pos,wordinenumerate(tokens):
center_word=one_hot(word,vocab,vocab_size)
foroutside_posinrange(word_pos-window_size,
word_pos+window_size+1):
if(outside_pos>=0)and(outside_pos<len(tokens))\
and(outside_pos!=word_pos):
outside_word=one_hot(tokens[outside_pos],
vocab,
vocab_size)
encoded_training_data.append([center_word,outside_word])
returnencoded_training_data
defprint_training_encodings(encoded_training_data,vocab,vec_to_word):
"""
rinttheencodingsforeach(centerword-outsidewords)set."""
max_len=len(max(vocab,key=len))
fornum,(cw_vector,ow_vectors)inenumerate(encoded_training_data):
cw=vec_to_word[str(cw_vector)]
ow=vec_to_word[str(ow_vectors)]
print(f'CenterWord#{num}:{cw}{cw_vector}')
print(f'OutsideWords:{ow}{ow_vectors}')
#Createtrainingdata
training_data=['Thedogchasedthecataroundthegarden.']
#Encodetrainingdata
vocab=create_vocabulary(training_data)
vocab_size=len(vocab)
vec_to_word=create_vector_word_map(vocab,vocab_size)
encoded_training_data=encode_training_data(training_data,
vocab_size,
window_size=WINDOW_SIZE)
##Printoutresults
print_training_encodings(encoded_training_data,vocab,vec_to_word)