8acf04da9110e1fdb5846c4bf511c8d9f29a6565,librosa/util.py,,pad_center,#,86
Before Change
kwargs.setdefault("mode", "constant")
lpad = (size - len(data))/2
return np.pad(data, (lpad, size - len(data) - lpad), **kwargs)
def fix_length(y, n, **kwargs):
"""Fix the length of a one-dimensional array ``y`` to exactly ``n``.
After Change
lpad = (size - n)/2
lengths = [(0, 0)] * d
lengths[axis] = (lpad, size - n - lpad)
if lpad < 0:
raise ValueError("Target size {:d} is smaller than input {:d}".format(size, n))
return np.pad(data, lengths, **kwargs)
def fix_length(y, n, **kwargs):
"""Fix the length of a one-dimensional array ``y`` to exactly ``n``.
In pattern: SUPERPATTERN
Frequency: 3
Non-data size: 4
Instances
Project Name: librosa/librosa
Commit Name: 8acf04da9110e1fdb5846c4bf511c8d9f29a6565
Time: 2014-11-06
Author: brian.mcfee@nyu.edu
File Name: librosa/util.py
Class Name:
Method Name: pad_center
Project Name: kevinzakka/recurrent-visual-attention
Commit Name: 520e8fb57b890a7249334d9e90c9ad209d0b849f
Time: 2018-02-10
Author: kevinarmandzakka@gmail.com
File Name: modules.py
Class Name: retina
Method Name: extract_patch
Project Name: librosa/librosa
Commit Name: 489494cdbbd3bf9f75b37c58e15ffbbbc1dd0188
Time: 2015-01-03
Author: brian.mcfee@nyu.edu
File Name: librosa/util.py
Class Name:
Method Name: fix_length