forked from xsf/xmpp.org
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
116 lines (94 loc) · 4.24 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
PY=python
PELICAN=pelican
PELICANOPTS=
BASEDIR=$(CURDIR)
INPUTDIR=$(BASEDIR)/content
OUTPUTDIR=$(BASEDIR)/output
CONFFILE=$(BASEDIR)/pelicanconf.py
PUBLISHCONF=$(BASEDIR)/publishconf.py
FTP_HOST=localhost
FTP_USER=anonymous
FTP_TARGET_DIR=/
SSH_HOST=localhost
SSH_PORT=22
SSH_USER=root
SSH_TARGET_DIR=/var/www
S3_BUCKET=my_s3_bucket
CLOUDFILES_USERNAME=my_rackspace_username
CLOUDFILES_API_KEY=my_rackspace_api_key
CLOUDFILES_CONTAINER=my_cloudfiles_container
DROPBOX_DIR=~/Dropbox/Public/
DEBUG ?= 0
ifeq ($(DEBUG), 1)
PELICANOPTS += -D
endif
help:
@echo 'Makefile for a pelican Web site '
@echo ' '
@echo 'Usage: '
@echo ' make html (re)generate the web site '
@echo ' make clean remove the generated files '
@echo ' make regenerate regenerate files upon modification '
@echo ' make publish generate using production settings '
@echo ' make serve [PORT=8000] serve site at http://localhost:8000'
@echo ' make devserver [PORT=8000] start/restart develop_server.sh '
@echo ' make stopserver stop local server '
@echo ' make ssh_upload upload the web site via SSH '
@echo ' make rsync_upload upload the web site via rsync+ssh '
@echo ' make dropbox_upload upload the web site via Dropbox '
@echo ' make ftp_upload upload the web site via FTP '
@echo ' make s3_upload upload the web site via S3 '
@echo ' make cf_upload upload the web site via Cloud Files'
@echo ' make github upload the web site via gh-pages '
@echo ' '
@echo 'Set the DEBUG variable to 1 to enable debugging, e.g. make DEBUG=1 html'
@echo ' '
html:
$(PELICAN) $(INPUTDIR) -o $(OUTPUTDIR) -s $(CONFFILE) $(PELICANOPTS)
clean:
[ ! -d $(OUTPUTDIR) ] || rm -rf $(OUTPUTDIR)
regenerate:
$(PELICAN) -r $(INPUTDIR) -o $(OUTPUTDIR) -s $(CONFFILE) $(PELICANOPTS)
serve:
ifdef PORT
cd $(OUTPUTDIR) && $(PY) -m pelican.server $(PORT)
else
cd $(OUTPUTDIR) && $(PY) -m pelican.server
endif
devserver:
ifdef PORT
$(BASEDIR)/develop_server.sh restart $(PORT)
else
$(BASEDIR)/develop_server.sh restart
endif
stopserver:
kill -9 `cat pelican.pid`
kill -9 `cat srv.pid`
@echo 'Stopped Pelican and SimpleHTTPServer processes running in background.'
publish:
$(PELICAN) $(INPUTDIR) -o $(OUTPUTDIR) -s $(PUBLISHCONF) $(PELICANOPTS)
ssh_upload: publish
scp -P $(SSH_PORT) -r $(OUTPUTDIR)/* $(SSH_USER)@$(SSH_HOST):$(SSH_TARGET_DIR)
rsync_upload: publish
rsync -e "ssh -p $(SSH_PORT)" -P -rvz --delete $(OUTPUTDIR)/ $(SSH_USER)@$(SSH_HOST):$(SSH_TARGET_DIR) --cvs-exclude
dropbox_upload: publish
cp -r $(OUTPUTDIR)/* $(DROPBOX_DIR)
ftp_upload: publish
lftp ftp://$(FTP_USER)@$(FTP_HOST) -e "mirror -R $(OUTPUTDIR) $(FTP_TARGET_DIR) ; quit"
s3_upload: publish
s3cmd sync $(OUTPUTDIR)/ s3://$(S3_BUCKET) --acl-public --delete-removed
cf_upload: publish
cd $(OUTPUTDIR) && swift -v -A https://auth.api.rackspacecloud.com/v1.0 -U $(CLOUDFILES_USERNAME) -K $(CLOUDFILES_API_KEY) upload -c $(CLOUDFILES_CONTAINER) .
github: publish
ifeq ($(TRAVIS_PULL_REQUEST), false)
ghp-import -n $(OUTPUTDIR)
mkdir /tmp/xep-images
curl -o /tmp/xep-images/xmpp-text.pdf https://xmpp.org/images/xmpp-text.pdf
curl -o /tmp/xep-images/xmpp.pdf https://xmpp.org/images/xmpp.pdf
git clone https://github.com/xsf/xeps /tmp/xeps
git clone https://github.com/xsf/xsf-tools /tmp/xsf-tools
export PYTHONPATH=$PYTHONPATH:/usr/local/lib/python2.7/dist-packages/
/tmp/xsf-tools/build.py -d -x /tmp/xeps -o /home/travis/build/xsf/xmpp.org/output/extensions --imagespath /tmp/xep-images
@git push -fq https://${GH_TOKEN}@github.com/$(TRAVIS_REPO_SLUG).git gh-pages > /dev/null
endif
.PHONY: html help clean regenerate serve devserver publish ssh_upload rsync_upload dropbox_upload ftp_upload s3_upload cf_upload github