From 67dea115a914654c50c81add7c827a62e0fea669 Mon Sep 17 00:00:00 2001 From: Antony Lee Date: Tue, 12 Nov 2019 12:32:15 +0100 Subject: [PATCH] np.concatenate cleanups. Replace np.concatenate by iterable unpacking when the arrays are short (so performance doesn't matter) and legibility is improved, or by np.roll(), or by np.append. --- examples/specialty_plots/radar_chart.py | 4 +-- lib/matplotlib/bezier.py | 16 +++++------ lib/matplotlib/mlab.py | 5 ++-- lib/matplotlib/patches.py | 3 +- lib/matplotlib/tests/test_ticker.py | 4 +-- lib/mpl_toolkits/axisartist/angle_helper.py | 32 ++++++--------------- 6 files changed, 21 insertions(+), 43 deletions(-) diff --git a/examples/specialty_plots/radar_chart.py b/examples/specialty_plots/radar_chart.py index 47a0bc36cb10..4989b6daf11e 100644 --- a/examples/specialty_plots/radar_chart.py +++ b/examples/specialty_plots/radar_chart.py @@ -66,8 +66,8 @@ def _close_line(self, line): x, y = line.get_data() # FIXME: markers at x[0], y[0] get doubled-up if x[0] != x[-1]: - x = np.concatenate((x, [x[0]])) - y = np.concatenate((y, [y[0]])) + x = np.append(x, x[0]) + y = np.append(y, y[0]) line.set_data(x, y) def set_varlabels(self, labels): diff --git a/lib/matplotlib/bezier.py b/lib/matplotlib/bezier.py index e1771abee20d..da10a72e70f0 100644 --- a/lib/matplotlib/bezier.py +++ b/lib/matplotlib/bezier.py @@ -268,8 +268,6 @@ def split_path_inout(path, inside, tolerance=0.01, reorder_inout=False): ctl_points_old = ctl_points - concat = np.concatenate - iold = 0 i = 1 @@ -277,7 +275,7 @@ def split_path_inout(path, inside, tolerance=0.01, reorder_inout=False): iold = i i += len(ctl_points) // 2 if inside(ctl_points[-2:]) != begin_inside: - bezier_path = concat([ctl_points_old[-2:], ctl_points]) + bezier_path = np.concatenate([ctl_points_old[-2:], ctl_points]) break ctl_points_old = ctl_points else: @@ -302,15 +300,15 @@ def split_path_inout(path, inside, tolerance=0.01, reorder_inout=False): verts_right = right[:] if path.codes is None: - path_in = Path(concat([path.vertices[:i], verts_left])) - path_out = Path(concat([verts_right, path.vertices[i:]])) + path_in = Path(np.concatenate([path.vertices[:i], verts_left])) + path_out = Path(np.concatenate([verts_right, path.vertices[i:]])) else: - path_in = Path(concat([path.vertices[:iold], verts_left]), - concat([path.codes[:iold], codes_left])) + path_in = Path(np.concatenate([path.vertices[:iold], verts_left]), + np.concatenate([path.codes[:iold], codes_left])) - path_out = Path(concat([verts_right, path.vertices[i:]]), - concat([codes_right, path.codes[i:]])) + path_out = Path(np.concatenate([verts_right, path.vertices[i:]]), + np.concatenate([codes_right, path.codes[i:]])) if reorder_inout and not begin_inside: path_in, path_out = path_out, path_in diff --git a/lib/matplotlib/mlab.py b/lib/matplotlib/mlab.py index 3023269226c0..01b360738ab3 100644 --- a/lib/matplotlib/mlab.py +++ b/lib/matplotlib/mlab.py @@ -572,9 +572,8 @@ def _spectral_helper(x, y=None, NFFT=None, Fs=None, detrend_func=None, if sides == 'twosided': # center the frequency range at zero - freqs = np.concatenate((freqs[freqcenter:], freqs[:freqcenter])) - result = np.concatenate((result[freqcenter:, :], - result[:freqcenter, :]), 0) + freqs = np.roll(freqs, -freqcenter, axis=0) + result = np.roll(result, -freqcenter, axis=0) elif not pad_to % 2: # get the last value correctly, it is negative otherwise freqs[-1] *= -1 diff --git a/lib/matplotlib/patches.py b/lib/matplotlib/patches.py index 3b59c235430f..76eb4677a389 100644 --- a/lib/matplotlib/patches.py +++ b/lib/matplotlib/patches.py @@ -2342,8 +2342,7 @@ def transmute(self, x0, y0, width, height, mutation_size): width, height, mutation_size) # Add a trailing vertex to allow us to close the polygon correctly - saw_vertices = np.concatenate([np.array(saw_vertices), - [saw_vertices[0]]], axis=0) + saw_vertices = np.concatenate([saw_vertices, [saw_vertices[0]]]) codes = ([Path.MOVETO] + [Path.CURVE3, Path.CURVE3] * ((len(saw_vertices)-1)//2) + [Path.CLOSEPOLY]) diff --git a/lib/matplotlib/tests/test_ticker.py b/lib/matplotlib/tests/test_ticker.py index 7c6fe6a6112d..d15209cace11 100644 --- a/lib/matplotlib/tests/test_ticker.py +++ b/lib/matplotlib/tests/test_ticker.py @@ -357,9 +357,7 @@ def test_minor(self, lims, expected_low_ticks): else: # subsample _LogitHelper.assert_almost_equal( - np.sort(np.concatenate((major_ticks, minor_ticks))), - expected_ticks, - ) + sorted([*major_ticks, *minor_ticks]), expected_ticks) def test_minor_attr(self): loc = mticker.LogitLocator(nbins=100) diff --git a/lib/mpl_toolkits/axisartist/angle_helper.py b/lib/mpl_toolkits/axisartist/angle_helper.py index ce8fe5d00fbe..235317a2be09 100644 --- a/lib/mpl_toolkits/axisartist/angle_helper.py +++ b/lib/mpl_toolkits/axisartist/angle_helper.py @@ -20,19 +20,11 @@ def select_step_degree(dv): second_limits_ = np.array(minsec_limits_) / 3600 second_factors = [3600.] * len(second_limits_) - degree_limits = np.concatenate([second_limits_, - minute_limits_, - degree_limits_]) + degree_limits = [*second_limits_, *minute_limits_, *degree_limits_] + degree_steps = [*minsec_steps_, *minsec_steps_, *degree_steps_] + degree_factors = [*second_factors, *minute_factors, *degree_factors] - degree_steps = np.concatenate([minsec_steps_, - minsec_steps_, - degree_steps_]) - - degree_factors = np.concatenate([second_factors, - minute_factors, - degree_factors]) - - n = degree_limits.searchsorted(dv) + n = np.searchsorted(degree_limits, dv) step = degree_steps[n] factor = degree_factors[n] @@ -54,19 +46,11 @@ def select_step_hour(dv): second_limits_ = np.array(minsec_limits_) / 3600 second_factors = [3600.] * len(second_limits_) - hour_limits = np.concatenate([second_limits_, - minute_limits_, - hour_limits_]) - - hour_steps = np.concatenate([minsec_steps_, - minsec_steps_, - hour_steps_]) - - hour_factors = np.concatenate([second_factors, - minute_factors, - hour_factors]) + hour_limits = [*second_limits_, *minute_limits_, *hour_limits_] + hour_steps = [*minsec_steps_, *minsec_steps_, *hour_steps_] + hour_factors = [*second_factors, *minute_factors, *hour_factors] - n = hour_limits.searchsorted(dv) + n = np.searchsorted(hour_limits, dv) step = hour_steps[n] factor = hour_factors[n]