Skip to content

Commit

Permalink
Merge branch 'devel' into robust_us
Browse files Browse the repository at this point in the history
  • Loading branch information
cwehmeyer committed Jun 7, 2016
2 parents be3f399 + d968f89 commit 899d150
Show file tree
Hide file tree
Showing 24 changed files with 117 additions and 117 deletions.
1 change: 1 addition & 0 deletions devtools/ci/jenkins/update_versions_json.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#!/usr/bin/env python
# author: marscher
# purpose: update version.json file on new software release.
from __future__ import print_function
Expand Down
7 changes: 5 additions & 2 deletions devtools/ci/travis/after_success.sh
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
#!/bin/bash
if [ "$TRAVIS_PULL_REQUEST" = true ]; then

# The pull request number if the current job is a pull request, “false” if it’s not a pull request.
if [[ ! "$TRAVIS_PULL_REQUEST" == "false" ]]; then
echo "This is a pull request. No deployment will be done."; exit 0
fi


# For builds not triggered by a pull request this is the name of the branch currently being built;
# whereas for builds triggered by a pull request this is the name of the branch targeted by the pull request (in many cases this will be master).
if [ "$TRAVIS_BRANCH" != "devel" ]; then
echo "No deployment on BRANCH='$TRAVIS_BRANCH'"; exit 0
fi
Expand Down
2 changes: 1 addition & 1 deletion devtools/ci/travis/dev_pkgs_del_old.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
)
to_delete = []

while len(sorted_by_version) > N_KEEP:
while len(sorted_by_version) > n_keep:
to_delete.append(sorted_by_version.pop())

# remove old releases from anaconda.org
Expand Down
1 change: 0 additions & 1 deletion devtools/conda-recipe/bld.bat
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,4 @@ if not defined APPVEYOR (
cmd /E:ON /V:ON /C %APPVEYOR_BUILD_FOLDER%\devtools\ci\appveyor\run_with_env.cmd "%PYTHON%" setup.py install
)
set build_status=%ERRORLEVEL%
"%PYTHON%" devtools\conda-recipe\dev_version.py
if %build_status% == 1 exit 1
1 change: 0 additions & 1 deletion devtools/conda-recipe/build.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
#!/bin/bash
$PYTHON setup.py install
$PYTHON devtools/conda-recipe/dev_version.py
5 changes: 0 additions & 5 deletions devtools/conda-recipe/dev_version.py

This file was deleted.

2 changes: 1 addition & 1 deletion devtools/conda-recipe/meta.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package:
name: pyemma-dev
#version: 0
version: {{ environ.get('GIT_DESCRIBE_TAG','')+environ.get('GIT_BUILD_STR', 'unknown')[1:] }}
source:
path: ../..

Expand Down
10 changes: 10 additions & 0 deletions doc/source/CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,16 @@
Changelog
=========

2.2.1 ()
--------

**New features**:
- ...

**Fixes**:

- clustering: fixed KMeans with minRMSD metric. #814

2.2 (5-17-16)
-------------

Expand Down
16 changes: 8 additions & 8 deletions pyemma/coordinates/clustering/src/kmeans.c
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ static PyObject *cluster(PyObject *self, PyObject *args) {
int i, j;
int closest_center_index;
npy_intp dims[2];
float (*distance)(float*, float*, size_t, float*, float*);
float (*distance)(float*, float*, size_t, float*, float*, float*);
PyObject* return_new_centers;
debug = 0;
if(debug) printf("KMEANS: \n----------- cluster called ----------\n");
Expand Down Expand Up @@ -160,7 +160,7 @@ static PyObject *cluster(PyObject *self, PyObject *args) {
for (i = 0; i < N_frames; i++) {
mindist = FLT_MAX;
for(j = 0; j < N_centers; ++j) {
d = distance(&chunk[i*dim], centers[j], dim, buffer_a, buffer_b);
d = distance(&chunk[i*dim], centers[j], dim, buffer_a, buffer_b, NULL);
if(d<mindist) {
mindist = d;
closest_center_index = j;
Expand Down Expand Up @@ -220,7 +220,7 @@ static PyObject* costFunction(PyObject *self, PyObject *args) {
PyObject *ret_cost;
Py_ssize_t dim, n_frames;
PyArrayObject *np_data, *np_centers;
float (*distance)(float*, float*, size_t, float*, float*);
float (*distance)(float*, float*, size_t, float*, float*, float*);
float *buffer_a, *buffer_b;

k = 0; r = 0; i = 0; j = 0; value = 0.0; d = 0.0;
Expand Down Expand Up @@ -250,7 +250,7 @@ static PyObject* costFunction(PyObject *self, PyObject *args) {
for(r = 0; r < k; r++) {
centers = PyArray_DATA(PyList_GetItem(np_centers,r));
for(i = 0; i < n_frames; i++) {
value += pow(distance(&data[i*dim], &centers[0], dim, buffer_a, buffer_b), 2);
value += pow(distance(&data[i*dim], &centers[0], dim, buffer_a, buffer_b, NULL), 2);
}
}
ret_cost = Py_BuildValue("f", value);
Expand Down Expand Up @@ -281,7 +281,7 @@ static PyObject* initCentersKMpp(PyObject *self, PyObject *args) {
float *buffer_a, *buffer_b;
void *arr_data;
float *squared_distances;
float (*distance)(float*, float*, size_t, float*, float*);
float (*distance)(float*, float*, size_t, float*, float*, float*);

ret_init_centers = Py_BuildValue("");
py_callback_result = NULL;
Expand Down Expand Up @@ -365,7 +365,7 @@ static PyObject* initCentersKMpp(PyObject *self, PyObject *args) {
/* squared_distances[i] = distance(x_j, x_i)*distance(x_j, x_i) */
for(i = 0; i < n_frames; i++) {
if(i != first_center_index) {
d = pow(distance(&data[i*dim], &data[first_center_index*dim], dim, buffer_a, buffer_b), 2);
d = pow(distance(&data[i*dim], &data[first_center_index*dim], dim, buffer_a, buffer_b, NULL), 2);
squared_distances[i] = d;
/* build up dist_sum which keeps the sum of all squared distances */
dist_sum += d;
Expand Down Expand Up @@ -407,7 +407,7 @@ static PyObject* initCentersKMpp(PyObject *self, PyObject *args) {
for(j = 0; j < n_trials; j++) {
if(next_center_candidates[j] == -1) break;
if(next_center_candidates[j] != i) {
d = pow(distance(&data[i*dim], &data[next_center_candidates[j]*dim], dim, buffer_a, buffer_b), 2);
d = pow(distance(&data[i*dim], &data[next_center_candidates[j]*dim], dim, buffer_a, buffer_b, NULL), 2);
if(d < squared_distances[i]) {
next_center_candidates_potential[j] += d;
} else {
Expand Down Expand Up @@ -464,7 +464,7 @@ static PyObject* initCentersKMpp(PyObject *self, PyObject *args) {
/* the new one. */
for(i = 0; i < n_frames; i++) {
if(!taken_points[i]) {
d = pow(distance(&data[i*dim], &data[best_candidate*dim], dim, buffer_a, buffer_b), 2);
d = pow(distance(&data[i*dim], &data[best_candidate*dim], dim, buffer_a, buffer_b, NULL), 2);
if(d < squared_distances[i]) {
dist_sum += d - squared_distances[i];
squared_distances[i] = d;
Expand Down
15 changes: 13 additions & 2 deletions pyemma/coordinates/data/_base/datasource.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,19 @@ def filenames(self, filename_list):

# ensure all trajs have same dim
if not np.unique(ndims).size == 1:
raise ValueError("input data has different dimensions!"
" Dimensions are = %s" % zip(filename_list, ndims))
# group files by their dimensions to give user indicator
ndims = np.array(ndims)
filename_list = np.asarray(filename_list)
sort_inds = np.argsort(ndims)
import itertools, operator
res = {}
for dim, files in itertools.groupby(zip(ndims[sort_inds], filename_list[sort_inds]),
operator.itemgetter(0)):
res[dim] = list(f[1] for f in files)

raise ValueError("Input data has different dimensions ({dims})!"
" Files grouped by dimensions: {groups}".format(dims=res.keys(),
groups=res))

self._ndim = ndims[0]
self._lengths = lengths
Expand Down
5 changes: 4 additions & 1 deletion pyemma/coordinates/data/featurization/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@ def _describe_atom(topology, index):
:return:
"""
at = topology.atom(index)
return "%s %i %s %i" % (at.residue.name, at.residue.resSeq, at.name, at.index)
if topology.n_chains > 1:
return "%s %i %s %i %i" % (at.residue.name, at.residue.resSeq, at.name, at.index, at.residue.chain.index )
else:
return "%s %i %s %i" % (at.residue.name, at.residue.resSeq, at.name, at.index)


def _catch_unhashable(x):
Expand Down
42 changes: 37 additions & 5 deletions pyemma/coordinates/tests/test_featurizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

# from pyemma.coordinates.data import featurizer as ft
from pyemma.coordinates.data.featurization.featurizer import MDFeaturizer, CustomFeature
from pyemma.coordinates.data.featurization.util import _parse_pairwise_input
from pyemma.coordinates.data.featurization.util import _parse_pairwise_input, _describe_atom
from six.moves import range
import pkg_resources

Expand Down Expand Up @@ -63,10 +63,10 @@
ATOM 001 CA ASN A 01 1.0000 0.000 0.0000 1.00 0.000 C
ATOM 002 MW ACE A 02 2.0000 0.000 0.0000 1.00 0.000 X
ATOM 003 CA ASN A 03 3.0000 0.000 0.0000 1.00 0.000 C
ATOM 004 MW ACE A 04 4.0000 0.000 0.0000 1.00 0.000 X
ATOM 005 CA ASN A 05 5.0000 0.000 0.0000 1.00 0.000 C
ATOM 006 MW ACE A 06 6.0000 0.000 0.0000 1.00 0.000 X
ATOM 007 CA ASN A 07 7.0000 0.000 0.0000 1.00 0.000 C
ATOM 004 MW ACE B 04 4.0000 0.000 0.0000 1.00 0.000 X
ATOM 005 CA ASN B 05 5.0000 0.000 0.0000 1.00 0.000 C
ATOM 006 MW ACE B 06 6.0000 0.000 0.0000 1.00 0.000 X
ATOM 007 CA ASN B 07 7.0000 0.000 0.0000 1.00 0.000 C
"""

def verbose_assertion_minrmsd(ref_Y, test_Y, test_obj):
Expand Down Expand Up @@ -744,6 +744,38 @@ def test_two_redundants_overlap(self):
)))
assert np.allclose(dist_list, _parse_pairwise_input(group1, group2, self.feat._logger))

class TestUtils(unittest.TestCase):
@classmethod
def setUpClass(cls):
import tempfile
cls.bogus_geom_pdbfile = tempfile.mkstemp(suffix=".pdb")[1]
print(cls.bogus_geom_pdbfile)
with open(cls.bogus_geom_pdbfile, 'w') as fh:
fh.write(bogus_geom_pdbfile)
super(TestUtils, cls).setUpClass()


@classmethod
def tearDownClass(cls):
try:
os.unlink(cls.bogus_geom_pdbfile)
except EnvironmentError:
pass

super(TestUtils, cls).tearDownClass()

@classmethod
def setUp(self):
self.traj = mdtraj.load(self.bogus_geom_pdbfile)

def test_describe_atom(self):
str1 = _describe_atom(self.traj.topology, 0)
str2 = _describe_atom(self.traj.topology,self.traj.n_atoms-1)
assert len(str1.split()) >=4
assert len(str2.split()) >=4
assert str1.split()[-1] == '0'
assert str2.split()[-1] == '1'

class TestStaticMethods(unittest.TestCase):

def setUp(self):
Expand Down
3 changes: 3 additions & 0 deletions pyemma/coordinates/tests/test_kmeans.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,5 +172,8 @@ def test_kmeans_convex_hull(self):
self.assertGreaterEqual(np.inner(np.array([0, -144337500, -102061250], dtype=float), res) + 353560531, 0)
self.assertGreaterEqual(np.inner(np.array([0, 0, -10000], dtype=float), res) + 17321, 0)

def test_with_n_jobs_minrmsd(self):
kmeans = cluster_kmeans(np.random.rand(500,3), 10, metric='minRMSD')

if __name__ == "__main__":
unittest.main()
13 changes: 12 additions & 1 deletion pyemma/coordinates/tests/test_numpyfilereader.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,18 @@ def test_usecols(self):
with it:
for x in it:
np.testing.assert_equal(x, self.d2[:, cols])


def test_different_shapes_value_error(self):
with tempfile.NamedTemporaryFile(delete=False, suffix='.npy') as f:
x=np.zeros((3, 42))
np.save(f.name, x)
myfiles = self.files2d[:]
myfiles.insert(1, f.name)

with self.assertRaises(ValueError) as cm:
NumPyFileReader(myfiles)
self.assertIn("different dimensions", cm.exception.args[0])
print (cm.exception.args)


if __name__ == "__main__":
Expand Down
13 changes: 7 additions & 6 deletions pyemma/msm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,14 @@
# Low-level MSM functions (imported from msmtools)
# backward compatibility to PyEMMA 1.2.x
# TODO: finally remove this stuff...
import warnings as _warnings
from pyemma.util.exceptions import PyEMMA_DeprecationWarning as _dep_warning
with _warnings.catch_warnings():
_warnings.filterwarnings('ignore', category=_dep_warning)
from . import analysis, estimation, generation, dtraj, flux
from pyemma.util._ext.shimmodule import ShimModule
analysis = ShimModule(src='pyemma.msm.analysis', mirror='msmtools.analysis')
estimation = ShimModule(src='pyemma.msm.estimation', mirror='msmtools.estimation')
generation = ShimModule(src='pyemma.msm.generation', mirror='msmtools.generation')
dtraj = ShimModule(src='pyemma.msm.dtraj', mirror='msmtools.dtraj')
io = dtraj
del _warnings, _dep_warning
flux = ShimModule(src='pyemma.msm.flux', mirror='msmtools.flux')
del ShimModule
######################################################
from msmtools.analysis.dense.pcca import PCCA

Expand Down
12 changes: 0 additions & 12 deletions pyemma/msm/analysis/__init__.py

This file was deleted.

12 changes: 0 additions & 12 deletions pyemma/msm/dtraj/__init__.py

This file was deleted.

11 changes: 0 additions & 11 deletions pyemma/msm/estimation/__init__.py

This file was deleted.

11 changes: 0 additions & 11 deletions pyemma/msm/flux/__init__.py

This file was deleted.

11 changes: 0 additions & 11 deletions pyemma/msm/generation/__init__.py

This file was deleted.

10 changes: 0 additions & 10 deletions pyemma/msm/tests/test_msm_lowlevel_deprecation.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from pyemma.util.exceptions import PyEMMA_DeprecationWarning


@unittest.skipIf(sys.version_info.major == 2, "disabled on py2 for nosetest stupidness")
class TestShowDeprecationWarningOnLowLevelAPIUsage(unittest.TestCase):

@classmethod
Expand Down Expand Up @@ -37,15 +36,6 @@ def test_analysis(self):
self.assertIsInstance(cm[0].message, PyEMMA_DeprecationWarning)
self.assertIn('analysis', cm[0].message.args[0])

@unittest.skipIf(sys.version_info.major == 2, "not on py2")
def test_warn_was_called(self):
shim_mod = sys.modules['pyemma.msm.analysis']
with mock.patch.object(shim_mod, '_warn') as m:
from pyemma.msm import analysis
analysis.is_transition_matrix

m.assert_called_once()

def test_estimation(self):
with warnings.catch_warnings(record=True) as cm:
warnings.simplefilter("always")
Expand Down
Loading

0 comments on commit 899d150

Please sign in to comment.