stumpy icon indicating copy to clipboard operation
stumpy copied to clipboard

Increase the usage of augmented assignment statements

Open elfring opened this issue 4 years ago • 5 comments

:eyes: Some source code analysis tools can help to find opportunities for improving software components. :thought_balloon: I propose to increase the usage of augmented assignment statements accordingly.

diff --git a/stumpy/aampdist_snippets.py b/stumpy/aampdist_snippets.py
index 5584e44..c69771a 100644
--- a/stumpy/aampdist_snippets.py
+++ b/stumpy/aampdist_snippets.py
@@ -237,7 +237,7 @@ def aampdist_snippets(
     for i in range(k):
         mask = snippets_profiles[i] <= total_min
         snippets_fractions[i] = np.sum(mask) / total_min.shape[0]
-        total_min = total_min - mask.astype(np.float64)
+        total_min -= mask.astype(np.float64)
         slices = _get_mask_slices(mask)
         snippets_regimes_list.append(slices)
 
diff --git a/stumpy/chains.py b/stumpy/chains.py
index f111762..ebd87f4 100644
--- a/stumpy/chains.py
+++ b/stumpy/chains.py
@@ -125,7 +125,7 @@ def allc(IL, IR):
                 else:
                     j = IR[j]
                     L[j] = -1
-                    L[i] = L[i] + 1
+                    L[i] += 1
                     C.append(j)
             S.update([tuple(C)])
     C = atsc(IL, IR, L.argmax())
diff --git a/stumpy/mstump.py b/stumpy/mstump.py
index a9341c1..f788cc5 100644
--- a/stumpy/mstump.py
+++ b/stumpy/mstump.py
@@ -331,7 +331,7 @@ def subspace(T, m, subseq_idx, nn_idx, k, include=None, discords=False, normaliz
     disc_neighbors = _discretize(neighbors[S], bins)
     n_val = np.unique(disc_subseqs - disc_neighbors).shape[0]
     bit_size = n_bit * (T.shape[0] * m * 2 - k * m)
-    bit_size = bit_size + k * m * np.log2(n_val) + n_val * n_bit
+    bit_size += k * m * np.log2(n_val) + n_val * n_bit
 
     return S
 
@@ -684,7 +684,7 @@ def _compute_PI(d, idx, D, D_prime, range_start, P, I):
     """
     D_prime[:] = 0.0
     for i in range(d):
-        D_prime = D_prime + np.sqrt(D[i])
+        D_prime += np.sqrt(D[i])
 
         min_index = np.argmin(D_prime)
         pos = idx - range_start
diff --git a/stumpy/scraamp.py b/stumpy/scraamp.py
index 93ddf71..6bedb78 100644
--- a/stumpy/scraamp.py
+++ b/stumpy/scraamp.py
@@ -143,7 +143,7 @@ def _prescraamp(
                 I[j + k] = i + k
         QT_j = QT_j_prime
         for k in range(1, min(s, i + 1, j + 1)):
-            QT_j = QT_j - T_B[i - k + m] * T_A[j - k + m] + T_B[i - k] * T_A[j - k]
+            QT_j -= T_B[i - k + m] * T_A[j - k + m] - T_B[i - k] * T_A[j - k]
             if (
                 not T_A_subseq_isfinite[i - k] or not T_B_subseq_isfinite[j - k]
             ):  # pragma: no cover
diff --git a/stumpy/scrump.py b/stumpy/scrump.py
index bc2edd5..fcd1b37 100644
--- a/stumpy/scrump.py
+++ b/stumpy/scrump.py
@@ -133,7 +133,7 @@ def _prescrump(
                 I[j + k] = i + k
         QT_j = QT_j_prime
         for k in range(1, min(s, i + 1, j + 1)):
-            QT_j = QT_j - T_B[i - k + m] * T_A[j - k + m] + T_B[i - k] * T_A[j - k]
+            QT_j -= T_B[i - k + m] * T_A[j - k + m] - T_B[i - k] * T_A[j - k]
             D_squared = core._calculate_squared_distance(
                 m,
                 QT_j,
diff --git a/stumpy/snippets.py b/stumpy/snippets.py
index 623ae99..ac4e113 100644
--- a/stumpy/snippets.py
+++ b/stumpy/snippets.py
@@ -262,7 +262,7 @@ def snippets(
     for i in range(k):
         mask = snippets_profiles[i] <= total_min
         snippets_fractions[i] = np.sum(mask) / total_min.shape[0]
-        total_min = total_min - mask.astype(np.float64)
+        total_min -= mask.astype(np.float64)
         slices = _get_mask_slices(mask)
         snippets_regimes_list.append(slices)
 
diff --git a/stumpy/stimp.py b/stumpy/stimp.py
index 4f60688..e3fd265 100644
--- a/stumpy/stimp.py
+++ b/stumpy/stimp.py
@@ -109,7 +109,7 @@ def _normalize_pan(pan, ms, bfs_indices, n_processed):
     """
     idx = bfs_indices[:n_processed]
     norm = 1.0 / np.sqrt(2 * ms[:n_processed])
-    pan[idx] = pan[idx] * norm[:, np.newaxis]
+    pan[idx] *= norm[:, np.newaxis]
 
 
 def _contrast_pan(pan, threshold, bfs_indices, n_processed):
diff --git a/stumpy/stump.py b/stumpy/stump.py
index fd457a4..e722237 100644
--- a/stumpy/stump.py
+++ b/stumpy/stump.py
@@ -176,7 +176,7 @@ def _compute_diagonal(
                 #     * (T_A[i + m - 1] - μ_Q_m_1[i])
                 #     - (T_B[i + k - 1] - M_T_m_1[i + k]) * (T_A[i - 1] - μ_Q_m_1[i])
                 # )
-                cov = cov + constant * (
+                cov += constant * (
                     cov_a[i + k] * cov_b[i] - cov_c[i + k] * cov_d[i]
                 )
 
diff --git a/tests/naive.py b/tests/naive.py
index a3a77e9..03e4ef9 100644
--- a/tests/naive.py
+++ b/tests/naive.py
@@ -1132,7 +1132,7 @@ def mpdist_snippets(
     for i in range(k):
         mask = snippets_profiles[i] <= total_min
         snippets_fractions[i] = np.sum(mask) / total_min.shape[0]
-        total_min = total_min - mask.astype(float)
+        total_min -= mask.astype(float)
         slices = _get_mask_slices(mask)
         snippets_regimes_list.append(slices)
 
@@ -1218,7 +1218,7 @@ def aampdist_snippets(
     for i in range(k):
         mask = snippets_profiles[i] <= total_min
         snippets_fractions[i] = np.sum(mask) / total_min.shape[0]
-        total_min = total_min - mask.astype(float)
+        total_min -= mask.astype(float)
         slices = _get_mask_slices(mask)
         snippets_regimes_list.append(slices)
 
@@ -1359,7 +1359,7 @@ def normalize_pan(pan, ms, bfs_indices, n_processed):
     idx = bfs_indices[:n_processed]
     for i in range(n_processed):
         norm = 1.0 / np.sqrt(2 * ms[i])
-        pan[idx] = pan[idx] * norm
+        pan[idx] *= norm
 
 
 def contrast_pan(pan, threshold, bfs_indices, n_processed):

elfring avatar Nov 21 '21 13:11 elfring

@elfring Apologies for the delayed response as I didn’t seem to receive a Github notification for this. I understand your proposal but I it is unclear to me what benefits there are to gain in using augmented assignment statements. Hopefully, I don’t come across as argumentative and I genuinely want to understand how this will make the package better. Personally, I find augmented assignments to be less readable

seanlaw avatar Dec 19 '21 02:12 seanlaw

I understand your proposal

Thanks for such a positive feedback.

but I it is unclear to me what benefits there are to gain in using augmented assignment statements.

:thought_balloon: I find that the rationale of the Python enhancement proposal 203 (from 2000-07-13) can indicate also motivation for another bit of collateral evolution. :thinking:

Hopefully, I don’t come across as argumentative

Your enquiry is fine.

and I genuinely want to understand how this will make the package better.

I dare to point another software transformation approach out.

Personally, I find augmented assignments to be less readable

I am curious if this view can evolve somehow.

elfring avatar Dec 19 '21 08:12 elfring

I am curious if this view can evolve somehow.

Sure. Would you mind submitting a PR for this to be reviewed? I would welcome it.

Some source code analysis tools can help to find opportunities for improving software components

What other tools are you thinking of? We currently use flake8 and black

seanlaw avatar Dec 19 '21 12:12 seanlaw

Would you mind submitting a PR for this to be reviewed?

:thought_balloon: Can the chances grow to integrate the shown source code adjustments also directly (with support from other contributors eventually)?

What other tools are you thinking of?

  • I presented data processing results from one of my Perl command examples (which I published together with related issues).
  • Semantic patch languages (according to computation tree logic variants)

We currently use flake8 and black

Would you like to influence the evolution any further for such software analysis tools (besides my feature requests)? :thinking:

elfring avatar Dec 19 '21 13:12 elfring

Please submit an appropriate PR

seanlaw avatar Dec 19 '21 13:12 seanlaw