Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update integration tests to include project in payload #476

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion integration/test_backfill.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@

class TestBackFill(BaseIntegrationTestCase):
def _prepare_ds(self, values):
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(
as_entity({"name": "test_backfill_values"})).refresh()
as_entity({"name": "test_backfill_values", "project": project.self})).refresh()
# We need a numeric PK
pk = ds.variables.create(
as_entity(
Expand Down
7 changes: 6 additions & 1 deletion integration/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,12 @@ def test_append_dataset_with_variables_list_filters_and_exclusion(self):

class TestCategories(BaseIntegrationTestCase):
def test_edit_category(self):
ds = self.site.datasets.create(as_entity({"name": "test_edit_category"})).refresh()
project = self.site.projects.create(
as_entity({"name": "foo"})
)
ds = self.site.datasets.create(
as_entity({"name": "test_edit_category", "project": project.self})
).refresh()

categories = [
{"id": 1, "name": "One", "missing": False, "numeric_value": None},
Expand Down
33 changes: 24 additions & 9 deletions integration/test_expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,19 @@ def _create_mr_dataset(self, name, rows):
}]
},
}
ds = self.site.datasets.create({
'element': 'shoji:entity',
'body': {
project = self.site.projects.create(
as_entity({"name": "foo"})
)
ds = self.site.datasets.create(
as_entity({
'name': name,
'table': {
'element': 'crunch:table',
'metadata': _dataset_metadata
},
}
}).refresh()
'project': project.self,
})
).refresh()
Importer().append_rows(ds, rows)
scrunch_dataset = get_mutable_dataset(ds.body.id, self.site)
return ds, scrunch_dataset
Expand Down Expand Up @@ -109,7 +112,10 @@ def test_multiple_response_any_add_filter_subvar(self):
ds.delete()

def test_categorical_array_any_add_filter(self):
ds = self.site.datasets.create(as_entity({"name": "test_any_categorical_add_filter"})).refresh()
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity(
{"name": "test_any_categorical_add_filter", "project": project.self}
)).refresh()
ds.variables.create(as_entity({
"name": "Categorical Var",
"alias": "categorical_var",
Expand Down Expand Up @@ -213,7 +219,10 @@ def test_categorical_array_any_add_filter(self):
ds.delete()

def test_categorical_array_any_w_bracket_subvar(self):
ds = self.site.datasets.create(as_entity({"name": "test_any_categorical_w_bracket_add_filter"})).refresh()
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity(
{"name": "test_any_categorical_w_bracket_add_filter", "project": project.self}
)).refresh()
cat_var = ds.variables.create(as_entity({
"name": "Categorical Var",
"alias": "categorical_var",
Expand Down Expand Up @@ -366,7 +375,10 @@ def test_append_dataset_any_filter_multiple_response_single_subvar(self):
ds_to_append.delete()

def test_categorical_any_add_filter_value(self):
ds = self.site.datasets.create(as_entity({"name": "test_any_categorical_filter"})).refresh()
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity(
{"name": "test_any_categorical_filter", "project": project.self}
)).refresh()
categories = [
{"id": 1, "name": "One", "missing": False, "numeric_value": None},
{"id": 2, "name": "Two", "missing": False, "numeric_value": None},
Expand Down Expand Up @@ -398,7 +410,10 @@ def test_categorical_any_add_filter_value(self):
ds.delete()

def test_categorical_any_add_filter_multiple_values(self):
ds = self.site.datasets.create(as_entity({"name": "test_any_categorical_filter_multiple_values"})).refresh()
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity(
{"name": "test_any_categorical_filter_multiple_values", "project": project.self}
)).refresh()
categories = [
{"id": 1, "name": "One", "missing": False, "numeric_value": None},
{"id": 2, "name": "Two", "missing": False, "numeric_value": None},
Expand Down
4 changes: 3 additions & 1 deletion integration/test_folders.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

"""

from pycrunch.shoji import Catalog
from pycrunch.shoji import Catalog, as_entity

from fixtures import NEWS_DATASET, BaseIntegrationTestCase
from scrunch import get_dataset
Expand Down Expand Up @@ -69,6 +69,7 @@ def setup_folders(ds):
class TestFolders(BaseIntegrationTestCase):
def setUp(self):
super(TestFolders, self).setUp()
project = self.site.projects.create(as_entity({"name": "foo"}))
self._ds = self.site.datasets.create({
'element': 'shoji:entity',
'body': {
Expand All @@ -77,6 +78,7 @@ def setUp(self):
'element': 'crunch:table',
'metadata': NEWS_DATASET
},
'project': project.self,
}
}).refresh()
ds = self._ds
Expand Down
13 changes: 9 additions & 4 deletions integration/test_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,25 +72,30 @@ def test_move_dataset(self):
fo = get_user(username_2)
fo_site = connect(fo.email, password_2, self.host)

project = self.new_project("foo")
fo_project = fo_site.projects.create(shoji_entity_wrapper({"name": "foo"}))

# These two datasets are created by the default logged user
_ds1 = self.site.datasets.create(shoji_entity_wrapper({
'name': 'test_move_dataset1'
'name': 'test_move_dataset1',
'project': project.url,
})).refresh()
_ds2 = self.site.datasets.create(shoji_entity_wrapper({
'name': 'test_move_dataset2'
'name': 'test_move_dataset2',
'project': project.url,
})).refresh()

# This dataset is created and owned by the other user
_ds4 = fo_site.datasets.create(shoji_entity_wrapper({
'name': 'test_move_dataset4',
'owner': fo.url
'project': fo_project.self,
})).refresh()

ds1 = get_dataset(_ds1.body.id)
ds2 = get_dataset(_ds2.body.id)
ds4 = get_dataset(_ds4.body.id, connection=fo_site)

ds2.add_user(fo, edit=True)
project.add_user(fo, edit=True)

# Create a hierarchy A -> B
pa = self.new_project("test_move_dataset_A")
Expand Down
16 changes: 10 additions & 6 deletions integration/test_recodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,25 @@
from scrunch.streaming_dataset import get_streaming_dataset
from scrunch.mutable_dataset import get_mutable_dataset
from pycrunch.importing import Importer
from pycrunch.shoji import as_entity


class TestRecodes(BaseIntegrationTestCase):
def test_recodes(self):
raise self.skipTest("Temporarily disabling for API update")
# Create a dataset for usage
ds = self.site.datasets.create({
'element': 'shoji:entity',
'body': {
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(
as_entity({
'name': 'test_recodes',
'table': {
'element': 'crunch:table',
'metadata': NEWS_DATASET
},
'streaming': 'streaming'
}
}).refresh()
'streaming': 'streaming',
'project': project.self,
})
).refresh()
dataset = get_streaming_dataset(ds.body.id, self.site)
print("Dataset %s created" % dataset.id)

Expand Down Expand Up @@ -171,6 +173,7 @@ def prepare_ds(self):
]
}
}
project = self.site.projects.create(as_entity({"name": "foo"}))
ds_payload = {
'element': 'shoji:entity',
'body': {
Expand All @@ -179,6 +182,7 @@ def prepare_ds(self):
'element': 'crunch:table',
'metadata': metadata
},
'project': project.self,
}
}

Expand Down
17 changes: 12 additions & 5 deletions integration/test_scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,10 @@ def new_project(self, name):
return Project(res)

def test_define_view_strict_subvariable_syntax(self):
project = self.new_project("test_view_strict_subvariable")
ds = self.site.datasets.create(as_entity({"name": "test_dataset_script"})).refresh()
proj = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity(
{"name": "test_dataset_script", "project": proj.self}
)).refresh()
categories = [
{"id": 2, "name": "Home"},
{"id": 3, "name": "Work"},
Expand Down Expand Up @@ -66,15 +68,18 @@ def test_define_view_strict_subvariable_syntax(self):
""".format(ds.body.id)

scrunch_dataset = get_mutable_dataset(ds.body.id, self.site)
project = self.new_project("test_view_strict_subvariable")
project.move_here([scrunch_dataset])
resp = project.execute(script_body, strict_subvariable_syntax=True)
wait_progress(resp, self.site.session)
view = scrunch_dataset.views.get_by_name("My view")
assert view.project.name == project.name

def test_define_view_strict_subvariable_syntax_error(self):
project = self.new_project("test_view_strict_subvariable_false")
ds = self.site.datasets.create(as_entity({"name": "test_dataset_script_false"})).refresh()
proj = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity(
{"name": "test_dataset_script_false", "project": proj.self}
)).refresh()
categories = [
{"id": 2, "name": "Home"},
{"id": 3, "name": "Work"},
Expand Down Expand Up @@ -118,6 +123,7 @@ def test_define_view_strict_subvariable_syntax_error(self):

try:
scrunch_dataset = get_mutable_dataset(ds.body.id, self.site)
project = self.new_project("test_view_strict_subvariable_false")
project.move_here([scrunch_dataset])
resp = project.execute(script_body)
with pytest.raises(TaskError) as err:
Expand All @@ -133,7 +139,8 @@ def test_define_view_strict_subvariable_syntax_error(self):

class TestDatasetScripts(BaseIntegrationTestCase):
def _create_ds(self):
ds = self.site.datasets.create(as_entity({"name": "test_script"})).refresh()
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(as_entity({"name": "test_script", "project": project.self})).refresh()
variable = ds.variables.create(
as_entity(
{
Expand Down
5 changes: 4 additions & 1 deletion integration/test_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@ class TestViews(BaseIntegrationTestCase):
]

def _create_ds(self):
ds = self.site.datasets.create(as_entity({"name": "test_script"})).refresh()
project = self.site.projects.create(as_entity({"name": "foo"}))
ds = self.site.datasets.create(
as_entity({"name": "test_script", "project": project.self})
).refresh()

for alias, v_type in self.FIXTURE_VARIABLES:
var_body = as_entity({
Expand Down
4 changes: 2 additions & 2 deletions scrunch/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -3410,7 +3410,8 @@ def create_tmp_ds(self, csv_file):
"table": {
"element": "crunch:table",
"metadata": metadata
}
},
"project": self.dataset.project.url,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jjdelc you think it's okay to use the same project as the source dataset?

})).refresh()
try:
importing.importer.append_csv_string(tmp_ds, csv_file)
Expand Down Expand Up @@ -3508,4 +3509,3 @@ def execute(self, csv_file):
folders_by_name[folder_name].entity.delete()
# Always delete the tmp dataset no matter what
tmp_ds.delete()