Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/qutip/qutip into misc.num…
Browse files Browse the repository at this point in the history
…py2.1_win_issue
  • Loading branch information
Ericgig committed Aug 20, 2024
2 parents ff692d8 + e86f131 commit 81c8e03
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 14 deletions.
11 changes: 7 additions & 4 deletions qutip/core/superop_reps.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,10 @@ def kraus_to_choi(kraus_ops: list[Qobj]) -> Qobj:
choi_dims = [kraus_ops[0].dims] * 2
# transform a list of Qobj matrices list[sum_ij k_ij |i><j|]
# into an array of array vectors sum_ij k_ij |i, j>> = sum_I k_I |I>>
kraus_vectors = np.asarray(
[np.reshape(kraus_op.full(), len_op, "F") for kraus_op in kraus_ops]
)
kraus_vectors = np.asarray([
np.reshape(kraus_op.full(), len_op, order="F")
for kraus_op in kraus_ops
])
# sum_{I} |k_I|^2 |I>><<I|
choi_array = np.tensordot(
kraus_vectors, kraus_vectors.conj(), axes=([0], [0])
Expand Down Expand Up @@ -205,7 +206,9 @@ def _super_tofrom_choi(q_oper):
d1 = np.prod(flatten(new_dims[1]))
s0 = np.prod(dims[0][0])
s1 = np.prod(dims[1][1])
data = data.reshape([s0, s1, s0, s1]).transpose(3, 1, 2, 0).reshape(d0, d1)
data = (
data.reshape([s0, s1, s0, s1]).transpose(3, 1, 2, 0).reshape([d0, d1])
)
return Qobj(data,
dims=new_dims,
superrep='super' if q_oper.superrep == 'choi' else 'choi',
Expand Down
2 changes: 1 addition & 1 deletion qutip/partial_transpose.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _partial_transpose_dense(rho, mask):
Very fast for dense problems.
"""
nsys = len(mask)
pt_dims = np.arange(2 * nsys).reshape(2, nsys).T
pt_dims = np.arange(2 * nsys).reshape([2, nsys]).T
pt_idx = np.concatenate([[pt_dims[n, mask[n]] for n in range(nsys)],
[pt_dims[n, 1 - mask[n]] for n in range(nsys)]])

Expand Down
10 changes: 5 additions & 5 deletions qutip/solver/sode/_noise.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(self, noise, tlist, n_sc_ops, heterodyne, is_measurement):
"Noise is not of the expected shape: "
f"{(n_sc_ops/2, 2, len(tlist)-1)}"
)
noise = np.reshape(noise, (n_sc_ops, len(tlist)-1), "C")
noise = np.reshape(noise, (n_sc_ops, len(tlist)-1), order="C")
else:
if noise.shape != (n_sc_ops, len(tlist)-1):
raise ValueError(
Expand Down Expand Up @@ -107,7 +107,7 @@ def dw(self, dt):
Ito integral I(i).
"""
N = int(np.round(dt / self.dt))
return self.noise.reshape(-1, N, self.num).sum(axis=1)
return self.noise.reshape([-1, N, self.num]).sum(axis=1)

def dz(self, dt):
"""
Expand All @@ -117,7 +117,7 @@ def dz(self, dt):
return (
np.einsum(
"ijk,j->ik",
self.noise.reshape(-1, N, self.num),
self.noise.reshape([-1, N, self.num]),
np.arange(N - 0.5, 0, -1),
)
* self.dt
Expand All @@ -132,11 +132,11 @@ def dW(self, dt):
if noise.shape[0] % N:
noise = noise[: -(noise.shape[0] % N)]
out = np.empty((noise.shape[0] // N, 2, self.num), dtype=float)
out[:, 0, :] = noise.reshape(-1, N, self.num).sum(axis=1)
out[:, 0, :] = noise.reshape([-1, N, self.num]).sum(axis=1)
out[:, 1, :] = (
np.einsum(
"ijk,j->ik",
self.noise.reshape(-1, N, self.num),
self.noise.reshape([-1, N, self.num]),
np.arange(N - 0.5, 0, -1),
)
* self.dt
Expand Down
2 changes: 1 addition & 1 deletion qutip/tests/core/data/test_expect.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class TestExpectSuper(BinaryOpMixin):
def op_numpy(self, op, state):
n = np.sqrt(state.shape[0]).astype(int)
out_shape = (n, n)
return np.trace(np.reshape(op@state, newshape=out_shape))
return np.trace(np.reshape(op@state, out_shape))

_dim = 100
_super_ket = pytest.param((_dim, 1), id="super_ket")
Expand Down
6 changes: 3 additions & 3 deletions qutip/tests/core/data/test_reshape.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def op_numpy(self, matrix):
class TestColumnStack(UnaryOpMixin):
def op_numpy(self, matrix):
out_shape = (matrix.shape[0]*matrix.shape[1], 1)
return np.reshape(matrix, newshape=out_shape, order='F')
return np.reshape(matrix, out_shape, order='F')

specialisations = [
pytest.param(data.column_stack_csr, CSR, CSR),
Expand All @@ -32,7 +32,7 @@ def op_numpy(self, matrix):
class TestColumnUnstack(UnaryOpMixin):
def op_numpy(self, matrix, rows):
out_shape = (rows, matrix.shape[0]*matrix.shape[1]//rows)
return np.reshape(matrix, newshape=out_shape, order='F')
return np.reshape(matrix, out_shape, order='F')

shapes = [
(pytest.param((10, 1), id="ket"), ),
Expand Down Expand Up @@ -83,7 +83,7 @@ def generate_incorrect_rows_raises(self, metafunc):
class TestReshape(UnaryOpMixin):
def op_numpy(self, matrix, rows, columns):
out_shape = (rows, columns)
return np.reshape(matrix, newshape=out_shape, order='C')
return np.reshape(matrix, out_shape, order='C')

# All matrices should have the same number of elements in total, so we can
# use the same (rows, columns) parametrisation for each input.
Expand Down

0 comments on commit 81c8e03

Please sign in to comment.