-
Notifications
You must be signed in to change notification settings - Fork 0
/
jenkins.sh
executable file
·91 lines (76 loc) · 3.49 KB
/
jenkins.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
#!/usr/bin/env bash
cd `dirname $0`
#download nltk python dependencies
pip install --upgrade -r pip-req.txt --allow-external matplotlib --allow-unverified matplotlib
pip install --upgrade https://github.com/PyCQA/pylint/archive/master.zip
#download nltk data packages
python -c "import nltk; nltk.download('all')" || echo "NLTK data download failed: $?"
#download external dependencies
pushd ${HOME}
[[ ! -d 'third' ]] && mkdir 'third'
pushd 'third'
# Download nltk stanford dependencies
#stanford_corenlp_package_zip_name=$(curl -s 'https://stanfordnlp.github.io/CoreNLP/' | grep -o 'stanford-corenlp-full-.*\.zip' | head -n1)
stanford_corenlp_package_zip_name="stanford-corenlp-full-2017-06-09.zip"
[[ ${stanford_corenlp_package_zip_name} =~ (.+)\.zip ]]
stanford_corenlp_package_name=${BASH_REMATCH[1]}
if [[ ! -d ${stanford_corenlp_package_name} ]]; then
wget -nv "http://nlp.stanford.edu/software/$stanford_corenlp_package_zip_name"
unzip ${stanford_corenlp_package_zip_name}
rm ${stanford_corenlp_package_zip_name}
ln -sf ${stanford_corenlp_package_name} 'stanford-corenlp'
fi
#stanford_parser_package_zip_name=$(curl -s 'https://nlp.stanford.edu/software/lex-parser.shtml' | grep -o 'stanford-parser-full-.*\.zip' | head -n1)
stanford_parser_package_zip_name="stanford-parser-full-2017-06-09.zip"
[[ ${stanford_parser_package_zip_name} =~ (.+)\.zip ]]
stanford_parser_package_name=${BASH_REMATCH[1]}
if [[ ! -d ${stanford_parser_package_name} ]]; then
wget -nv "https://nlp.stanford.edu/software/$stanford_parser_package_zip_name"
unzip ${stanford_parser_package_zip_name}
rm ${stanford_parser_package_zip_name}
ln -sf ${stanford_parser_package_name} 'stanford-parser'
fi
#stanford_tagger_package_zip_name=$(curl -s 'https://nlp.stanford.edu/software/tagger.shtml' | grep -o 'stanford-postagger-full-.*\.zip' | head -n1)
stanford_tagger_package_zip_name="stanford-postagger-full-2017-06-09.zip"
[[ ${stanford_tagger_package_zip_name} =~ (.+)\.zip ]]
stanford_tagger_package_name=${BASH_REMATCH[1]}
if [[ ! -d ${stanford_tagger_package_name} ]]; then
wget -nv "https://nlp.stanford.edu/software/$stanford_tagger_package_zip_name"
unzip ${stanford_tagger_package_zip_name}
rm ${stanford_tagger_package_zip_name}
ln -sf ${stanford_tagger_package_name} 'stanford-postagger'
fi
# Download SENNA
senna_file_name=$(curl -s 'https://ronan.collobert.com/senna/download.html' | grep -o 'senna-v.*.tgz' | head -n1)
senna_folder_name='senna'
if [[ ! -d $senna_folder_name ]]; then
wget -nv "https://ronan.collobert.com/senna/$senna_file_name"
tar -xvzf ${senna_file_name}
rm ${senna_file_name}
fi
# Setup the Enviroment variable
export CLASSPATH=$(pwd)"/${stanford_corenlp_package_name}"
export CLASSPATH=${CLASSPATH}:$(pwd)"/${stanford_parser_package_name}"
export CLASSPATH=${CLASSPATH}:$(pwd)"/${stanford_tagger_package_name}"
export STANFORD_CORENLP=$(pwd)'/stanford-corenlp'
export STANFORD_PARSER=$(pwd)'/stanford-parser'
export STANFORD_MODELS=$(pwd)'/stanford-postagger/models'
export STANFORD_POSTAGGER=$(pwd)'/stanford-postagger'
export SENNA=$(pwd)'/senna'
popd
popd
echo "---- CLASSPATH: ----"
echo $CLASSPATH
echo "---- MODELS: ----"
echo $STANFORD_MODELS
echo "---- NLTK runtests.py ----"
#coverage
coverage erase
coverage run --source=nltk nltk/test/runtests.py -v --with-xunit
coverage xml --omit=nltk/test/*
iconv -c -f utf-8 -t utf-8 nosetests.xml > nosetests_scrubbed.xml
# Create a default pylint configuration file.
touch ~/.pylintrc
pylint -f parseable nltk > pylintoutput
#script always succeeds
true