import os
import numpy as np
from scipy.signal import *
import csv
import matplotlib.pyplot as plt
from scipy import signal
from brainflow.board_shim import BoardShim, BrainFlowInputParams, LogLevels, BoardIds
from brainflow.data_filter import DataFilter, FilterTypes, AggOperations, WindowFunctions, DetrendOperations
from sklearn.cluster import KMeans
#Options to read: EEG-IO , EEG-VV , EEG-VR , EEG-MB
data_folder = EEG-IO
# Parameters and bandpass filtering
fs = 250.0
# Reading data files
file_idx = 0
list_of_files = [f for f in os.listdir(data_folder) if os.path.isfile(os.path.join(data_folder, f)) and _data in f] #List of all the files, Lists are randomized, its only looking for file with _data in it
print(list_of_files)
file_sig = list_of_files[file_idx] # Data File
file_stim = list_of_files[file_idx].replace( _data , _labels ) #Label File, Replacing _data with _labels
print ("Reading: ", file_sig, file_stim)
# Loading data
if data_folder == EEG-IO or data_folder == EEG-MB :
data_sig = np.loadtxt(open(os.path.join(data_folder,file_sig), "rb"), delimiter=";", skiprows=1, usecols=(0,1,2)) #data_sig would be a buffer
elif data_folder == EEG-VR or data_folder == EEG-VV :
data_sig = np.loadtxt(open(os.path.join(data_folder,file_sig), "rb"), delimiter=",", skiprows=5, usecols=(0,1,2))
data_sig = data_sig[0:(int(200*fs)+1),:] # getting data ready -- not needed for previous 2 datasets
data_sig = data_sig[:,0:3] #
data_sig[:,0] = np.array(range(0,len(data_sig)))/fs
############ Calculating PSD ############
index, ch = data_sig.shape[0], data_sig.shape[1]
# print(index)
feature_vectors = [[], []]
feature_vectorsa = [[], []]
feature_vectorsb = [[], []]
feature_vectorsc = [[], []]
#for x in range(ch):
#for x in range(1,3):
#while x <
#while x>0:
x=1
while x>0 and x<3:
if x==1:
data_sig[:,1] = lowpass(data_sig[:,1], 10, fs, 4)
elif x==2:
data_sig[:,2] = lowpass(data_sig[:,2], 10, fs, 4)
for y in range(500, 19328 ,500):
#print(ch)
if x==1:
DataFilter.detrend(data_sig[y-500:y, 1], DetrendOperations.LINEAR.value)
psd = DataFilter.get_psd_welch(data_sig[y-500:y, 1], nfft, nfft//2, 250,
WindowFunctions.BLACKMAN_HARRIS.value)
band_power_delta = DataFilter.get_band_power(psd, 1.0, 4.0)
# Theta 4-8
band_power_theta = DataFilter.get_band_power(psd, 4.0, 8.0)
#Alpha 8-12
band_power_alpha = DataFilter.get_band_power(psd, 8.0, 12.0)
#Beta 12-30
band_power_beta = DataFilter.get_band_power(psd, 12.0, 30.0)
# print(feature_vectors.shape)
feature_vectors[x].insert(y, [band_power_delta, band_power_theta, band_power_alpha, band_power_beta])
feature_vectorsa[x].insert(y, [band_power_delta, band_power_theta])
elif x==2:
DataFilter.detrend(data_sig[y-500:y, 2], DetrendOperations.LINEAR.value)
psd = DataFilter.get_psd_welch(data_sig[y-500:y, 2], nfft, nfft//2, 250,
WindowFunctions.BLACKMAN_HARRIS.value)
band_power_delta = DataFilter.get_band_power(psd, 1.0, 4.0)
# Theta 4-8
band_power_theta = DataFilter.get_band_power(psd, 4.0, 8.0)
#Alpha 8-12
band_power_alpha = DataFilter.get_band_power(psd, 8.0, 12.0)
#Beta 12-30
band_power_beta = DataFilter.get_band_power(psd, 12.0, 30.0)
# print(feature_vectors.shape)
# feature_vectorsc[x].insert(y, [band_power_delta, band_power_theta, band_power_alpha, band_power_beta])
# feature_vectorsd[x].insert(y, [band_power_delta, band_power_theta])
x = x+1
print(feature_vectorsa)
powers = np.log10(np.asarray(feature_vectors, dtype=float))
powers1 = np.log10(np.asarray(feature_vectorsa, dtype=float))
# powers2 = np.log10(np.asarray(feature_vectorsb))
# powers3 = np.log10(np.asarray(feature_vectorsc))
print(powers.shape)
print(powers1.shape)
超级混乱。 在我执行我的法典时,我继续犯这一错误:
数值错误:确定一个有顺序的阵列要素。 所要求的阵列在1个层面之后具有不祥的形态。 检测到的体质为2,+无烟部分。
追查:
File "/Users/mikaelhaji/Downloads/EEG-EyeBlinks/read_data.py", line 170, in powers = np.log10(np.asarray(feature_vectors, dtype=float)) File "/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/numpy/core/_asarray.py", line 102, in asarray return array(a, dtype, copy=False, order=order) 数值错误:确定一个有顺序的阵列要素。 所要求的阵列在1个层面之后具有不祥的形态。 检测到的体质为2,+无烟部分。
如果大家对可能发生这种情况的原因有任何想法/答案,请让我知道。
事先得到答复。