forked from mixxorz/behave-django
-
Notifications
You must be signed in to change notification settings - Fork 0
/
tests.py
155 lines (127 loc) · 5.99 KB
/
tests.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
"""
Test suite for behave-django. See features folder for implementation.
Run it by
- ``python setup.py -q test -v`` or
- ``python manage.py test`` or
- ``python tests.py`` (preferred)
"""
from django.core.management import call_command
from mock import patch
from os import linesep as LF
from subprocess import PIPE, Popen
import django
import os
import unittest
from imp import reload
def run_silently(command):
"""Run a shell command and return both exit_status and console output."""
command_args = command.split()
process = Popen(command_args, stdout=PIPE, stderr=PIPE, stdin=PIPE)
stdout, stderr = process.communicate()
output = (stdout.decode('UTF-8') + LF +
stderr.decode('UTF-8')).strip()
return process.returncode, output
def run_management_command(command, *args, **kwargs):
try:
# required only since version 1.7
django.setup()
except AttributeError:
pass
call_command(command, *args, **kwargs)
class BehaveDjangoTestCase(unittest.TestCase):
def setUp(self):
# NOTE: this may potentially have side-effects, making tests pass
# that would otherwise fail, because it *always* overrides which
# settings module is used.
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_project.settings'
def test_flake8(self):
exit_status, output = run_silently('flake8')
assert exit_status == 0
def test_additional_management_command_options(self):
exit_status, output = run_silently('python manage.py behave --help')
assert exit_status == 0
assert (LF + ' --use-existing-database' + LF) in output
@patch('behave_django.management.commands.behave.behave_main', return_value=0) # noqa
@patch('sys.exit')
def test_command_should_exit_zero_if_passing(self,
mock_sys_exit,
mock_behave_main):
# If the exit status returned by behave_main is 0, make sure sys.exit
# does not get called
run_management_command('behave', dry_run=True)
assert not mock_sys_exit.called
@patch('behave_django.management.commands.behave.behave_main', return_value=1) # noqa
@patch('sys.exit')
def test_command_should_exit_nonzero_if_failing(self,
mock_sys_exit,
mock_behave_main):
# If the exit status returned by behave_main is anything other than 0,
# make sure sys.exit gets called with the exit code
# Dry run to not create the database for faster tests
run_management_command('behave', dry_run=True)
mock_sys_exit.assert_called_once_with(1)
@patch('behave_django.management.commands.behave.behave_main', return_value=0) # noqa
@patch('behave_django.management.commands.behave.ExistingDatabaseTestRunner') # noqa
def test_dont_create_db_with_dryrun(self,
mock_existing_database_runner,
mock_behave_main):
run_management_command('behave', dry_run=True)
mock_behave_main.assert_called_once_with(args=[])
mock_existing_database_runner.assert_called_once_with()
@patch('behave_django.management.commands.behave.behave_main', return_value=0) # noqa
@patch('behave_django.management.commands.behave.ExistingDatabaseTestRunner') # noqa
def test_dont_create_db_with_useexistingdb(self,
mock_existing_database_runner,
mock_behave_main):
run_management_command('behave', use_existing_database=True)
mock_behave_main.assert_called_once_with(args=[])
mock_existing_database_runner.assert_called_once_with()
def test_should_accept_behave_arguments(self):
from behave_django.management.commands.behave import Command
command = Command()
args = command.get_behave_args(
argv=['manage.py', 'behave',
'--format', 'progress',
'--settings', 'test_project.settings',
'features/running-tests.feature'])
assert '--format' in args
assert 'progress' in args
def test_should_not_include_non_behave_arguments(self):
from behave_django.management.commands.behave import Command
command = Command()
args = command.get_behave_args(
argv=['manage.py', 'behave',
'--format', 'progress',
'--settings', 'test_project.settings',
'features/running-tests.feature'])
assert '--settings' not in args
assert 'test_project.settings' not in args
def test_should_return_positional_args(self):
from behave_django.management.commands.behave import Command
command = Command()
args = command.get_behave_args(
argv=['manage.py', 'behave',
'--format', 'progress',
'--settings', 'test_project.settings',
'features/running-tests.feature'])
assert 'features/running-tests.feature' in args
def test_no_arguments_should_not_cause_issues(self):
from behave_django.management.commands.behave import Command
command = Command()
args = command.get_behave_args(
argv=['manage.py', 'behave'])
assert args == []
def test_command_import_dont_patch_behave_options(self):
# We reload the tested imports because they
# could have been imported by previous tests.
import behave.configuration
reload(behave.configuration)
behave_options_backup = [
(first, second.copy())
for (first, second) in behave.configuration.options
]
import behave_django.management.commands.behave
reload(behave_django.management.commands.behave)
assert behave.configuration.options == behave_options_backup
if __name__ == '__main__':
unittest.main()