-
Notifications
You must be signed in to change notification settings - Fork 2
/
config_example.py
147 lines (109 loc) · 3.29 KB
/
config_example.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
'''
Sets up global variables for the app.
'''
import os,sys,socket
from dotenv import load_dotenv
load_dotenv()
"""
Server resource paths and hosting options
"""
approot = os.getenv('APPROOT')
if not approot:
approot = os.path.dirname(os.path.abspath(__file__))
sys.path.append(approot)
DEVELOPMENT = False
HOST = '' #'localhost' # AWS example: 'ec2-54-175-135-17.compute-1.amazonaws.com' #socket.getfqdn()
if 'local' in HOST:
HOST = 'localhost'
PORT = 8888
URL_PREFIX = 'http://'+HOST
if PORT != 80:
URL_PREFIX = URL_PREFIX+':'+str(PORT)
URL_PREFIX = URL_PREFIX+'/'
user = os.environ['USER']
mime_dictionary = {
".jpg" : "image/jpeg",
".jpeg" : "image/jpeg",
".gif" : "image/gif",
".png" : "image/png"
}
"""
AWS Options
"""
# If this is set to true, user running the app must have AWS CLI config set up
# for an IAM user that has permission to write to the s3 bucket named
# APPNAME-blobs
# All uploaded images and videos will be saved to that bucket
USE_AWS_S3 = False
"""
Classifier params
"""
classifier_type = 'linear svm'
lin_svm = dict(
C = 1.0,
dual = True,
verbose = True
)
threshold = -1.0
# How many of the predictions to ask about in one round
query_num = 200
# the active query strategy
active_query_strategy='most_confident'
'''
Flask App
'''
APPNAME = 'kaizen'
DEBUG = True
CSRF_ENABLED = True
SECRET_KEY = 'robots are people too'
basedir = os.path.abspath(os.path.dirname(__file__))
BLOB_DIR = os.path.join(basedir, 'app', 'static', 'blobs')
DATASET_DIR = os.path.join(basedir, 'app', 'static', 'datasets')
CACHE_DIR = os.path.join(basedir, 'app', 'static', 'cache')
LOG_DIR = os.path.join(approot, 'app', 'static', 'logs')
LOG_FILE = os.path.join(LOG_DIR, APPNAME+'.log')
#Remote DB ex: SQLALCHEMY_DATABASE_URI = 'postgresql://'+user+'@localhost/'+APPNAME
POSTGRES = {
'user': os.getenv("USER"),
'pw': os.getenv("USER"),
'db': APPNAME+'-local',
'host': 'localhost',
'port': '5432',
}
SQLALCHEMY_DATABASE_URI = 'postgresql://%(user)s:%(pw)s@%(host)s:%(port)s/%(db)s' % POSTGRES
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
SQLALCHEMY_TRACK_MODIFICATIONS = False
BROKER_URL="sqla+"+SQLALCHEMY_DATABASE_URI
CELERY_RESULT_BACKEND="db+"+SQLALCHEMY_DATABASE_URI
# mail server settings
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USERNAME = None
MAIL_PASSWORD = None
# administrator list
ADMINS = ['[email protected]']
USER_ENABLE_CONFIRM_EMAIL = False
USER_ENABLE_EMAIL = False
# Used to retrieve meta-data from ec2 machines
def ec2_metadata(tag):
md_cmd = 'curl -s --connect-timeout 1 http://<ip-address>/latest/meta-data/'
return os.popen(md_cmd+tag).read();
# set logging level to 2 to suppress caffe output
os.environ['GLOG_minloglevel'] = '2'
#USE_GPU = False
#instance_type = ec2_metadata('instance-type')
#EC2 = instance_type != ''
#if instance_type.startswith("g"):
USE_GPU = True
GPU_DEVICE_IDS = [0]
EC2 = False
for dir in (BLOB_DIR, DATASET_DIR, CACHE_DIR, LOG_DIR):
sub = os.path.basename(dir)
if not os.path.exists(dir):
if EC2:
os.system('sudo mkdir -p /mnt/$USER')
os.system('sudo chown $USER /mnt/$USER')
os.system('mkdir -p /mnt/$USER-space/'+sub)
os.system('ln -sf /mnt/$USER-space/'+sub+' '+dir)
else:
os.mkdir(dir)