Advertisement
Guest User

Untitled

a guest
Oct 22nd, 2019
133
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.89 KB | None | 0 0
  1. from scipy.signal import firls
  2. import numpy as np
  3. import torch
  4. import torch.nn.functional as F
  5.  
  6. elc = np.array([[31.5, -29.9], # freq, response
  7. [63, -23.9],
  8. [100, -19.8],
  9. [200, -13.8],
  10. [400, -7.8],
  11. [800, -1.9],
  12. [1000, 0.],
  13. [2000, 5.6],
  14. [3150, 9.0],
  15. [4000, 10.5],
  16. [5000, 11.7],
  17. [6300, 12.2],
  18. [7100, 12.0],
  19. [8000, 11.4],
  20. [9000, 10.1],
  21. [10000, 8.1],
  22. [12500, 0],
  23. [14000, -5.3],
  24. [16000, -11.7],
  25. [20000, -22.2],
  26. [31500, -42.7]])
  27.  
  28. def equal_filter(n_tap, sr):
  29. """returns a linear-phase FIR filter that simulates the equal loudness contour.
  30. (suppress low freq; amplify 3kHz)
  31. """
  32. assert n_tap % 2 == 1, "num tap should be an odd number otherwise it's odd.."
  33. freq_idx = sum(elc[:, 0] <= sr // 2)
  34. freq = elc[:freq_idx, 0]
  35. desired = 10 ** (elc[:freq_idx, 1] / 20.)
  36. return firls(n_tap, freq, desired, fs=sr)
  37.  
  38.  
  39. if __name__ == '__main__':
  40.  
  41. SR = 44100
  42. len_filter = 9
  43. audio_signal = torch.from_numpy(librosa.load('some_audio_file.wav', sr=SR, mono=True))
  44. # get the filter taps
  45. elc_filter = torch.from_numpy(equal_filter(n_tap=len_filter, sr=SR))
  46.  
  47. # flip it to use torch's conv1d function, which does NOT flip the kernel.
  48. elc_filter = torch.flip(elc_filter, dims=(0,))
  49.  
  50. # make it 3d for F.conv1d compatibility
  51. elc_filter = torch.reshape(elc_filter, (1, 1, -1)) # in_ch, out_ch, filter_length
  52.  
  53. # make input batch for F.conv1d compatibility
  54. batch_audio = torch.reshape(audio_signal, (1, 1, -1)) # now (1, 1, time), ready for F.conv1d
  55.  
  56. perceptual_simulated_batch_audio = F.conv1d(batch_audio, elc_filter, padding=len_filter // 2)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement