from scipy.linalg import _cblas DLL load failed while importing _fblas

33 Views Asked by At

I encountered a problem while importing

from pykalman import KalmanFilter

The problem is

Cell In[26], line 5
  3 import pandas as pd
  4 import matplotlib.pyplot as plt
----> 5 from pykalman import KalmanFilter
  6 from hmmlearn import hmm
File C:\ProgramData\Anaconda3\lib\site-packages\pykalman\__init__.py:10
  1 '''
  2 =============
  3 Kalman Module
  (...)
  7 spaces.
  8 '''
---> 10 from .standard import KalmanFilter
 11 from .unscented import AdditiveUnscentedKalmanFilter, 
UnscentedKalmanFilter
 13 __all__ = [
 14     "KalmanFilter",
 15     "AdditiveUnscentedKalmanFilter",
(...)
 18     "sqrt"
 19 ]

File C:\ProgramData\Anaconda3\lib\site-packages\pykalman\standard.py:12
  9 import warnings
 11 import numpy as np
---> 12 from scipy import linalg
 14 from .utils import array1d, array2d, check_random_state, \
 15     get_params, log_multivariate_normal_density, preprocess_arguments
 17 # Dimensionality of each Kalman Filter parameter for a single time step

File C:\ProgramData\Anaconda3\lib\site-packages\scipy\linalg\__init__.py:194
  1 """
  2 ====================================
  3 Linear algebra (:mod:`scipy.linalg`)
 (...)
191 
192 """  # noqa: E501
--> 194 from .misc import *
195 from .basic import *
196 from .decomp import *

File C:\ProgramData\Anaconda3\lib\site-packages\scipy\linalg\misc.py:3
  1 import numpy as np
  2 from numpy.linalg import LinAlgError
 ----> 3 from .blas import get_blas_funcs
  4 from .lapack import get_lapack_funcs
  6 __all__ = ['LinAlgError', 'LinAlgWarning', 'norm']

File C:\ProgramData\Anaconda3\lib\site-packages\scipy\linalg\blas.py:213
210 import numpy as _np
211 import functools
--> 213 from scipy.linalg import _fblas
214 try:
215     from scipy.linalg import _cblas

ImportError: DLL load failed while importing _fblas: The specified module could not be found.

I have pip install numpy-1.22.4+mkl-cp38-cp38-win_amd64.whl in python/script, but the problem still exists. Thanks for you attention.

0

There are 0 best solutions below