Skip to content

Commit

Permalink
Updating code to use pandas<=1.5.3 and numpy<=1.24.2
Browse files Browse the repository at this point in the history
- Changed np.object to object
- fixed timestamp.astype('datetime[ms]') to work with pandas <=1.5.3 and numpy <=1.24.2
- update setup.py to be according with setuptools new rules (uses extra_require instead of tests_require)
  • Loading branch information
Samuel Rohr committed Mar 29, 2023
1 parent 392a7f5 commit 8170356
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 34 deletions.
7 changes: 5 additions & 2 deletions arctic/serialization/numpy_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,11 @@ def _multi_index_to_records(index, empty_index):
index_tz = []
for i in index.levels:
if isinstance(i, DatetimeIndex):
tmp = get_timezone(i.tz)
index_tz.append(str(tmp) if tmp is not None else None)
if i.tz is None:
index_tz.append(None)
else:
tmp = get_timezone(i.tz)
index_tz.append(str(tmp) if tmp is not None else None)
else:
index_tz.append(None)

Expand Down
4 changes: 2 additions & 2 deletions arctic/store/_version_store_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ def _split_arrs(array_2d, slices):
but avoids fancy indexing
"""
if len(array_2d) == 0:
return np.empty(0, dtype=np.object)
return np.empty(0, dtype=object)

rtn = np.empty(len(slices) + 1, dtype=np.object)
rtn = np.empty(len(slices) + 1, dtype=object)
start = 0
for i, s in enumerate(slices):
rtn[i] = array_2d[start:s]
Expand Down
3 changes: 2 additions & 1 deletion arctic/tickstore/tickstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -721,9 +721,10 @@ def _pandas_to_bucket(df, symbol, initial_image):
rtn[COLUMNS][col] = col_data
rtn[INDEX] = Binary(
lz4_compressHC(np.concatenate(
([recs[index_name][0].astype('datetime64[ms]').view('uint64')],
([np.array(recs[index_name][0]).astype('datetime64[ms]').view('uint64')],
np.diff(
recs[index_name].astype('datetime64[ms]').view('uint64')))).tostring()))

return rtn, final_image

@staticmethod
Expand Down
53 changes: 28 additions & 25 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,42 +71,45 @@ def run_tests(self):
cmdclass={'test': PyTest},
setup_requires=["numpy<1.19.0",
"setuptools-git",
],
],
install_requires=["decorator",
"enum-compat",
"mock",
"mockextras",
"pandas<1.1.0",
"numpy<1.19.0",
"pandas<=1.5.3",
"numpy<=1.24.2",
"pymongo>=3.6.0, <= 3.11.0",
"pytz",
"tzlocal",
"lz4",
],
],
# Note: pytest >= 4.1.0 is not compatible with pytest-cov < 2.6.1.
# deprecated
tests_require=["mock<=2.0.0",
"mockextras",
"pytest",
"pytest-cov",
"pytest-server-fixtures",
"pytest-timeout",
"pytest-xdist<=1.26.1",
"tomli<2; python_version=='3.6'",
"lz4",
"tzlocal<=1.4; python_version<='3.6'"
],
extras_require={
"tests": [
"mock<=2.0.0",
"mockextras",
"pytest",
"pytest-cov",
"pytest-server-fixtures",
"pytest-timeout",
"pytest-xdist<=1.26.1",
"tomli<2; python_version=='3.6'",
"lz4",
"tzlocal<=1.4; python_version<='3.6'"
],
},
entry_points={'console_scripts': [
'arctic_init_library = arctic.scripts.arctic_init_library:main',
'arctic_list_libraries = arctic.scripts.arctic_list_libraries:main',
'arctic_delete_library = arctic.scripts.arctic_delete_library:main',
'arctic_enable_sharding = arctic.scripts.arctic_enable_sharding:main',
'arctic_copy_data = arctic.scripts.arctic_copy_data:main',
'arctic_create_user = arctic.scripts.arctic_create_user:main',
'arctic_prune_versions = arctic.scripts.arctic_prune_versions:main',
'arctic_fsck = arctic.scripts.arctic_fsck:main',
]
},
'arctic_init_library = arctic.scripts.arctic_init_library:main',
'arctic_list_libraries = arctic.scripts.arctic_list_libraries:main',
'arctic_delete_library = arctic.scripts.arctic_delete_library:main',
'arctic_enable_sharding = arctic.scripts.arctic_enable_sharding:main',
'arctic_copy_data = arctic.scripts.arctic_copy_data:main',
'arctic_create_user = arctic.scripts.arctic_create_user:main',
'arctic_prune_versions = arctic.scripts.arctic_prune_versions:main',
'arctic_fsck = arctic.scripts.arctic_fsck:main',
]
},
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/store/test_version_store_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

def test_split_arrs_empty():
split = _split_arrs(np.empty(0), [])
assert np.all(split == np.empty(0, dtype=np.object))
assert np.all(split == np.empty(0, dtype=object))


def test_split_arrs():
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/tickstore/test_tickstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def test_mongo_date_range_query():
call({'sy': 's1', 's': dt(2014, 1, 1, 0, 0, tzinfo=mktz())}, {'e': 1}),
call({'sy': 's2', 's': dt(2014, 1, 1, 12, 0, tzinfo=mktz())}, {'e': 1})]

assert query == {'s': {'$gte': dt(2014, 1, 1, 12, 0, tzinfo=mktz()), '$lte': dt(2014, 1, 3, 0, 0, tzinfo=mktz())}}
assert query == {'s': {'$gte': dt(2014, 1, 1, 12, 0, tzinfo=mktz("UTC")), '$lte': dt(2014, 1, 3, 0, 0, tzinfo=mktz())}}


def test_mongo_date_range_query_asserts():
Expand Down Expand Up @@ -92,7 +92,7 @@ def test_tickstore_to_bucket_with_image():
assert get_coldata(bucket[COLUMNS]['A']) == ([124, 125], [1, 1, 0, 0, 0, 0, 0, 0])
assert get_coldata(bucket[COLUMNS]['B']) == ([27.2], [0, 1, 0, 0, 0, 0, 0, 0])
assert get_coldata(bucket[COLUMNS]['D']) == ([0], [1, 0, 0, 0, 0, 0, 0, 0])
index = [dt.fromtimestamp(int(i/1000)).replace(tzinfo=mktz(tz)) for i in
index = [dt.fromtimestamp(int(i/1000)).astimezone(mktz(tz)).replace(tzinfo=mktz(tz)) for i in
list(np.cumsum(np.frombuffer(decompress(bucket[INDEX]), dtype='uint64')))]
assert index == [i['index'] for i in data]
assert bucket[COLUMNS]['A'][DTYPE] == 'int64'
Expand Down Expand Up @@ -157,7 +157,7 @@ def test_tickstore_pandas_to_bucket_image():
assert np.isnan(values[1])
assert values[0] == 1 and values[2] == 1
assert rowmask == [1, 1, 1, 0, 0, 0, 0, 0]
index = [dt.fromtimestamp(int(i/1000)).replace(tzinfo=mktz(tz)) for i in
index = [dt.fromtimestamp(int(i/1000)).astimezone(mktz(tz)).replace(tzinfo=mktz(tz)) for i in
list(np.cumsum(np.frombuffer(decompress(bucket[INDEX]), dtype='uint64')))]
assert index == tick_index
assert bucket[COLUMNS]['A'][DTYPE] == 'int64'
Expand Down

0 comments on commit 8170356

Please sign in to comment.