word2vec/demo-analogy.sh

12 lines
627 B
Bash
Raw Normal View History

2013-08-01 01:00:52 +02:00
make
if [ ! -e text8 ]; then
wget http://mattmahoney.net/dc/text8.zip -O text8.gz
gzip -d text8.gz -f
fi
echo -----------------------------------------------------------------------------------------------------
echo Note that for the word analogy to perform well, the models should be trained on much larger data sets
echo Example input: paris france berlin
echo -----------------------------------------------------------------------------------------------------
2013-08-01 21:28:23 +02:00
time ./word2vec -train text8 -output vectors.bin -cbow 0 -size 200 -window 5 -negative 0 -hs 1 -sample 1e-3 -threads 12 -binary 1
2013-08-01 01:00:52 +02:00
./word-analogy vectors.bin