diff --git a/qutip/core/superop_reps.py b/qutip/core/superop_reps.py index 4413cfafb5..029aeab2cc 100644 --- a/qutip/core/superop_reps.py +++ b/qutip/core/superop_reps.py @@ -166,9 +166,10 @@ def kraus_to_choi(kraus_ops: list[Qobj]) -> Qobj: choi_dims = [kraus_ops[0].dims] * 2 # transform a list of Qobj matrices list[sum_ij k_ij |i>> = sum_I k_I |I>> - kraus_vectors = np.asarray( - [np.reshape(kraus_op.full(), len_op, "F") for kraus_op in kraus_ops] - ) + kraus_vectors = np.asarray([ + np.reshape(kraus_op.full(), len_op, order="F") + for kraus_op in kraus_ops + ]) # sum_{I} |k_I|^2 |I>><ik", - self.noise.reshape(-1, N, self.num), + self.noise.reshape([-1, N, self.num]), np.arange(N - 0.5, 0, -1), ) * self.dt @@ -132,11 +132,11 @@ def dW(self, dt): if noise.shape[0] % N: noise = noise[: -(noise.shape[0] % N)] out = np.empty((noise.shape[0] // N, 2, self.num), dtype=float) - out[:, 0, :] = noise.reshape(-1, N, self.num).sum(axis=1) + out[:, 0, :] = noise.reshape([-1, N, self.num]).sum(axis=1) out[:, 1, :] = ( np.einsum( "ijk,j->ik", - self.noise.reshape(-1, N, self.num), + self.noise.reshape([-1, N, self.num]), np.arange(N - 0.5, 0, -1), ) * self.dt diff --git a/qutip/tests/core/data/test_expect.py b/qutip/tests/core/data/test_expect.py index c43da02def..47b8c79ce0 100644 --- a/qutip/tests/core/data/test_expect.py +++ b/qutip/tests/core/data/test_expect.py @@ -49,7 +49,7 @@ class TestExpectSuper(BinaryOpMixin): def op_numpy(self, op, state): n = np.sqrt(state.shape[0]).astype(int) out_shape = (n, n) - return np.trace(np.reshape(op@state, newshape=out_shape)) + return np.trace(np.reshape(op@state, out_shape)) _dim = 100 _super_ket = pytest.param((_dim, 1), id="super_ket") diff --git a/qutip/tests/core/data/test_reshape.py b/qutip/tests/core/data/test_reshape.py index 5f23b06b83..f9e2947baa 100644 --- a/qutip/tests/core/data/test_reshape.py +++ b/qutip/tests/core/data/test_reshape.py @@ -20,7 +20,7 @@ def op_numpy(self, matrix): class TestColumnStack(UnaryOpMixin): def op_numpy(self, matrix): out_shape = (matrix.shape[0]*matrix.shape[1], 1) - return np.reshape(matrix, newshape=out_shape, order='F') + return np.reshape(matrix, out_shape, order='F') specialisations = [ pytest.param(data.column_stack_csr, CSR, CSR), @@ -32,7 +32,7 @@ def op_numpy(self, matrix): class TestColumnUnstack(UnaryOpMixin): def op_numpy(self, matrix, rows): out_shape = (rows, matrix.shape[0]*matrix.shape[1]//rows) - return np.reshape(matrix, newshape=out_shape, order='F') + return np.reshape(matrix, out_shape, order='F') shapes = [ (pytest.param((10, 1), id="ket"), ), @@ -83,7 +83,7 @@ def generate_incorrect_rows_raises(self, metafunc): class TestReshape(UnaryOpMixin): def op_numpy(self, matrix, rows, columns): out_shape = (rows, columns) - return np.reshape(matrix, newshape=out_shape, order='C') + return np.reshape(matrix, out_shape, order='C') # All matrices should have the same number of elements in total, so we can # use the same (rows, columns) parametrisation for each input.