Skip to content

Commit 5faf62e

Browse files
committed
Support numpy 1.5 and make code PEP8 compliant.
1 parent 36bff9f commit 5faf62e

File tree

4 files changed

+126
-84
lines changed

4 files changed

+126
-84
lines changed

nibabel/streamlines/array_sequence.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class ArraySequence(object):
2222
same for every ndarray.
2323
"""
2424

25-
BUFFER_SIZE = 87382*4 # About 4 Mb if item shape is 3 (e.g. 3D points).
25+
BUFFER_SIZE = 87382 * 4 # About 4 Mb if item shape is 3 (e.g. 3D points).
2626

2727
def __init__(self, iterable=None):
2828
"""
@@ -72,7 +72,7 @@ def __init__(self, iterable=None):
7272

7373
offsets.append(offset)
7474
lengths.append(len(e))
75-
self._data[offset:offset+len(e)] = e
75+
self._data[offset:offset + len(e)] = e
7676
offset += len(e)
7777

7878
self._offsets = np.asarray(offsets)
@@ -148,14 +148,14 @@ def extend(self, elements):
148148
next_offset = self._data.shape[0]
149149

150150
if is_array_sequence(elements):
151-
self._data.resize((self._data.shape[0]+sum(elements._lengths),
151+
self._data.resize((self._data.shape[0] + sum(elements._lengths),
152152
self._data.shape[1]))
153153

154154
offsets = []
155155
for offset, length in zip(elements._offsets, elements._lengths):
156156
offsets.append(next_offset)
157-
chunk = elements._data[offset:offset+length]
158-
self._data[next_offset:next_offset+length] = chunk
157+
chunk = elements._data[offset:offset + length]
158+
self._data[next_offset:next_offset + length] = chunk
159159
next_offset += length
160160

161161
self._lengths = np.r_[self._lengths, elements._lengths]
@@ -182,8 +182,8 @@ def copy(self):
182182
offsets = []
183183
for offset, length in zip(self._offsets, self._lengths):
184184
offsets.append(next_offset)
185-
chunk = self._data[offset:offset+length]
186-
seq._data[next_offset:next_offset+length] = chunk
185+
chunk = self._data[offset:offset + length]
186+
seq._data[next_offset:next_offset + length] = chunk
187187
next_offset += length
188188

189189
seq._offsets = np.asarray(offsets)
@@ -212,7 +212,7 @@ def __getitem__(self, idx):
212212
"""
213213
if isinstance(idx, (int, np.integer)):
214214
start = self._offsets[idx]
215-
return self._data[start:start+self._lengths[idx]]
215+
return self._data[start:start + self._lengths[idx]]
216216

217217
elif isinstance(idx, (slice, list)):
218218
seq = ArraySequence()
@@ -241,7 +241,7 @@ def __iter__(self):
241241
" len(self._lengths) != len(self._offsets)")
242242

243243
for offset, lengths in zip(self._offsets, self._lengths):
244-
yield self._data[offset: offset+lengths]
244+
yield self._data[offset: offset + lengths]
245245

246246
def __len__(self):
247247
return len(self._offsets)

nibabel/streamlines/tests/test_array_sequence.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,12 @@ def setup():
1818
global SEQ_DATA
1919
rng = np.random.RandomState(42)
2020
SEQ_DATA['rng'] = rng
21-
SEQ_DATA['data'] = generate_data(nb_arrays=10, common_shape=(3,), rng=rng)
21+
SEQ_DATA['data'] = generate_data(nb_arrays=5, common_shape=(3,), rng=rng)
2222
SEQ_DATA['seq'] = ArraySequence(SEQ_DATA['data'])
2323

2424

2525
def generate_data(nb_arrays, common_shape, rng):
26-
data = [rng.rand(*(rng.randint(10, 50),) + common_shape)
26+
data = [rng.rand(*(rng.randint(3, 20),) + common_shape)
2727
for _ in range(nb_arrays)]
2828
return data
2929

@@ -79,7 +79,7 @@ def test_creating_arraysequence_from_list(self):
7979
for ndim in range(0, N+1):
8080
common_shape = tuple([SEQ_DATA['rng'].randint(1, 10)
8181
for _ in range(ndim-1)])
82-
data = generate_data(nb_arrays=10, common_shape=common_shape,
82+
data = generate_data(nb_arrays=5, common_shape=common_shape,
8383
rng=SEQ_DATA['rng'])
8484
check_arr_seq(ArraySequence(data), data)
8585

@@ -213,7 +213,7 @@ def test_arraysequence_getitem(self):
213213
SEQ_DATA['rng'].shuffle(indices)
214214
seq_view = SEQ_DATA['seq'][indices]
215215
check_arr_seq_view(seq_view, SEQ_DATA['seq'])
216-
check_arr_seq(seq_view, np.asarray(SEQ_DATA['data'])[indices])
216+
check_arr_seq(seq_view, [SEQ_DATA['data'][i] for i in indices])
217217

218218
# Get slice (this will create a view).
219219
seq_view = SEQ_DATA['seq'][::2]
@@ -224,7 +224,9 @@ def test_arraysequence_getitem(self):
224224
selection = np.array([False, True, True, False, True])
225225
seq_view = SEQ_DATA['seq'][selection]
226226
check_arr_seq_view(seq_view, SEQ_DATA['seq'])
227-
check_arr_seq(seq_view, np.asarray(SEQ_DATA['data'])[selection])
227+
check_arr_seq(seq_view,
228+
[SEQ_DATA['data'][i]
229+
for i, keep in enumerate(selection) if keep])
228230

229231
# Test invalid indexing
230232
assert_raises(TypeError, SEQ_DATA['seq'].__getitem__, 'abc')

nibabel/streamlines/tractogram.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -295,9 +295,10 @@ def apply_affine(self, affine, lazy=False):
295295
return self
296296

297297
BUFFER_SIZE = 10000000 # About 128 Mb since pts shape is 3.
298-
for i in range(0, len(self.streamlines._data), BUFFER_SIZE):
299-
pts = self.streamlines._data[i:i+BUFFER_SIZE]
300-
self.streamlines._data[i:i+BUFFER_SIZE] = apply_affine(affine, pts)
298+
for start in range(0, len(self.streamlines._data), BUFFER_SIZE):
299+
end = start + BUFFER_SIZE
300+
pts = self.streamlines._data[start:end]
301+
self.streamlines._data[start:end] = apply_affine(affine, pts)
301302

302303
# Update the affine that brings back the streamlines to RASmm.
303304
self._affine_to_rasmm = np.dot(self._affine_to_rasmm,

0 commit comments

Comments
 (0)