本文整理汇总了Python中mne.connectivity.spectral_connectivity函数的典型用法代码示例。如果您正苦于以下问题:Python spectral_connectivity函数的具体用法?Python spectral_connectivity怎么用?Python spectral_connectivity使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了spectral_connectivity函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: compute_and_save_spectral_connectivity
def compute_and_save_spectral_connectivity(data,con_method,sfreq,fmin,fmax,index = 0,mode = 'multitaper',export_to_matlab = False):
import sys,os
from mne.connectivity import spectral_connectivity
import numpy as np
from scipy.io import savemat
print data.shape
if len(data.shape) < 3:
if con_method in ['coh','cohy','imcoh']:
data = data.reshape(1,data.shape[0],data.shape[1])
elif con_method in ['pli','plv','ppc' ,'pli','pli2_unbiased' ,'wpli' ,'wpli2_debiased']:
print "warning, only work with epoched time series"
sys.exit()
if mode == 'multitaper':
con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data, method=con_method, sfreq=sfreq, fmin= fmin, fmax=fmax, faverage=True, tmin=None, mode = 'multitaper', mt_adaptive=False, n_jobs=1)
con_matrix = np.array(con_matrix[:,:,0])
elif mode == 'cwt_morlet':
frequencies = np.arange(fmin, fmax, 1)
n_cycles = frequencies / 7.
con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data, method=con_method, sfreq=sfreq, faverage=True, tmin=None, mode='cwt_morlet', cwt_frequencies= frequencies, cwt_n_cycles= n_cycles, n_jobs=1)
con_matrix = np.mean(np.array(con_matrix[:,:,0,:]),axis = 2)
else:
print "Error, mode = %s not implemented"%(mode)
return []
print con_matrix.shape
print np.min(con_matrix),np.max(con_matrix)
conmat_file = os.path.abspath("conmat_" + str(index) + "_" + con_method + ".npy")
np.save(conmat_file,con_matrix)
if export_to_matlab == True:
conmat_matfile = os.path.abspath("conmat_" + str(index) + "_" + con_method + ".mat")
savemat(conmat_matfile,{"conmat":con_matrix + np.transpose(con_matrix)})
return conmat_file
开发者ID:annapasca,项目名称:neuropype_ephy,代码行数:53,代码来源:spectral.py
示例2: calc_coh
def calc_coh(subject, conditions, task, meg_electordes_names, meg_electrodes_data, tmin=0, tmax=2.5, sfreq=1000, fmin=55, fmax=110, bw=15, n_jobs=6):
input_file = op.join(ELECTRODES_DIR, subject, task, 'electrodes_data_trials.mat')
output_file = op.join(ELECTRODES_DIR, subject, task, 'electrodes_coh.npy')
d = sio.loadmat(input_file)
# Remove and sort the electrodes according to the meg_electordes_names
electrodes = get_electrodes_names(subject, task)
electrodes_to_remove = set(electrodes) - set(meg_electordes_names)
indices_to_remove = [electrodes.index(e) for e in electrodes_to_remove]
electrodes = scipy.delete(electrodes, indices_to_remove).tolist()
electrodes_indices = np.array([electrodes.index(e) for e in meg_electordes_names])
electrodes = np.array(electrodes)[electrodes_indices].tolist()
assert(np.all(electrodes==meg_electordes_names))
for cond, data in enumerate([d[conditions[0]], d[conditions[1]]]):
data = scipy.delete(data, indices_to_remove, 1)
data = data[:, electrodes_indices, :]
data = downsample_data(data)
data = data[:, :, :meg_electrodes_data.shape[2]]
if cond == 0:
coh_mat = np.zeros((data.shape[1], data.shape[1], 2))
con_cnd, _, _, _, _ = spectral_connectivity(
data, method='coh', mode='multitaper', sfreq=sfreq,
fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
tmin=tmin, tmax=tmax)
con_cnd = np.mean(con_cnd, axis=2)
coh_mat[:, :, cond] = con_cnd
np.save(output_file[:-4], coh_mat)
return con_cnd
开发者ID:ofek-schechner,项目名称:mmvt,代码行数:29,代码来源:meg_electrodes.py
示例3: calc_electrodes_coh
def calc_electrodes_coh(subject, conditions, mat_fname, t_max, from_t_ind, to_t_ind, sfreq=1000, fmin=55, fmax=110, bw=15,
dt=0.1, window_len=0.1, n_jobs=6):
from mne.connectivity import spectral_connectivity
import time
input_file = op.join(SUBJECTS_DIR, subject, 'electrodes', mat_fname)
d = sio.loadmat(input_file)
output_file = op.join(MMVT_DIR, subject, 'electrodes_coh.npy')
windows = np.linspace(0, t_max - dt, t_max / dt)
for cond, data in enumerate([d[cond] for cond in conditions]):
if cond == 0:
coh_mat = np.zeros((data.shape[1], data.shape[1], len(windows), 2))
# coh_mat = np.load(output_file)
# continue
ds_data = downsample_data(data)
ds_data = ds_data[:, :, from_t_ind:to_t_ind]
now = time.time()
for win, tmin in enumerate(windows):
print('cond {}, tmin {}'.format(cond, tmin))
utils.time_to_go(now, win + 1, len(windows))
con_cnd, _, _, _, _ = spectral_connectivity(
ds_data, method='coh', mode='multitaper', sfreq=sfreq,
fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
tmin=tmin, tmax=tmin + window_len)
con_cnd = np.mean(con_cnd, axis=2)
coh_mat[:, :, win, cond] = con_cnd
# plt.matshow(con_cnd)
# plt.show()
np.save(output_file[:-4], coh_mat)
return coh_mat
开发者ID:pelednoam,项目名称:mmvt,代码行数:31,代码来源:connections.py
示例4: multiple_windowed_spectral_proc
def multiple_windowed_spectral_proc(ts_file,sfreq,freq_band,con_method):
import numpy as np
import os
from mne.connectivity import spectral_connectivity
all_data = np.load(ts_file)
print all_data.shape
#print sfreq
print freq_band
if len(all_data.shape) != 4:
print "Warning, all_data should have 4 dimensions: nb_trials * nb_wondows * nb_nodes * nb_timepoints"
return []
all_con_matrices = []
for i in range(all_data.shape[0]):
trial_con_matrices = []
for j in range(all_data.shape[1]):
cur_data = all_data[i,j,:,:]
print cur_data.shape
data = cur_data.reshape(1,cur_data.shape[0],cur_data.shape[1])
con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data, method=con_method, mode='multitaper', sfreq=sfreq, fmin= freq_band[0], fmax=freq_band[1], faverage=True, tmin=None, mt_adaptive=False, n_jobs=1)
print con_matrix.shape
con_matrix = np.array(con_matrix[:,:,0])
print con_matrix.shape
print np.min(con_matrix),np.max(con_matrix)
trial_con_matrices.append(con_matrix)
all_con_matrices.append(trial_con_matrices)
np_all_con_matrices = np.array(all_con_matrices)
print np_all_con_matrices.shape
conmat_file = os.path.abspath("multiple_windowed_conmat_"+ con_method + ".npy")
np.save(conmat_file,np_all_con_matrices)
return conmat_file
开发者ID:annapasca,项目名称:neuropype_ephy,代码行数:57,代码来源:spectral.py
示例5: calc_meg_electrodes_coh
def calc_meg_electrodes_coh(subject, tmin=0, tmax=2.5, sfreq=1000, fmin=55, fmax=110, bw=15, n_jobs=6):
input_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts.npy')
output_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts_coh.npy')
data = np.load(input_file)
for cond in range(data.shape[3]):
data_cond = data[:, :, :, cond]
if cond == 0:
coh_mat = np.zeros((data_cond.shape[1], data_cond.shape[1], 2))
con_cnd, _, _, _, _ = spectral_connectivity(
data_cond, method='coh', mode='multitaper', sfreq=sfreq,
fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
tmin=tmin, tmax=tmax)
con_cnd = np.mean(con_cnd, axis=2)
coh_mat[:, :, cond] = con_cnd
np.save(output_file[:-4], coh_mat)
return con_cnd
开发者ID:ofek-schechner,项目名称:mmvt,代码行数:16,代码来源:meg_electrodes.py
示例6: multiple_spectral_proc
def multiple_spectral_proc(ts_file,sfreq,freq_band,con_method):
import numpy as np
import os
from mne.connectivity import spectral_connectivity
all_data = np.load(ts_file)
print all_data.shape
#print sfreq
print freq_band
if len(all_data.shape) != 3:
print "Warning, all_data should have several samples"
return []
conmat_files = []
for i in range(all_data.shape[0]):
cur_data = all_data[i,:,:]
data = cur_data.reshape(1,cur_data.shape[0],cur_data.shape[1])
print data.shape
con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data, method=con_method, mode='multitaper', sfreq=sfreq, fmin= freq_band[0], fmax=freq_band[1], faverage=True, tmin=None, mt_adaptive=False, n_jobs=1)
con_matrix = np.array(con_matrix[:,:,0])
print con_matrix.shape
print np.min(con_matrix),np.max(con_matrix)
conmat_file = os.path.abspath("conmat_"+ con_method + "_" + str(i) + ".npy")
np.save(conmat_file,con_matrix)
conmat_files.append(conmat_file)
return conmat_files
开发者ID:annapasca,项目名称:neuropype_ephy,代码行数:44,代码来源:spectral.py
示例7: calc_meg_electrodes_coh_windows
def calc_meg_electrodes_coh_windows(subject, tmin=0, tmax=2.5, sfreq=1000,
freqs = ((8, 12), (12, 25), (25,55), (55,110)), bw=15, dt=0.1, window_len=0.2, n_jobs=6):
input_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts.npy')
output_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts_coh_windows_{}.npy'.format(window_len))
data = np.load(input_file)
windows = np.linspace(tmin, tmax - dt, tmax / dt)
for cond in range(data.shape[3]):
data_cond = data[:, :, :, cond]
if cond == 0:
coh_mat = np.zeros((data_cond.shape[1], data_cond.shape[1], len(windows), len(freqs), 2))
for freq_ind, (fmin, fmax) in enumerate(freqs):
for win, tmin in enumerate(windows):
con_cnd, _, _, _, _ = spectral_connectivity(
data[:, :, :, cond], method='coh', mode='multitaper', sfreq=sfreq,
fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
tmin=tmin, tmax=tmin + window_len)
con_cnd = np.mean(con_cnd, axis=2)
coh_mat[:, :, win, freq_ind, cond] = con_cnd
np.save(output_file[:-4], coh_mat)
return con_cnd
开发者ID:ofek-schechner,项目名称:mmvt,代码行数:21,代码来源:meg_electrodes.py
示例8: calc_electrodes_coh_windows
def calc_electrodes_coh_windows(subject, input_fname, conditions, bipolar, meg_electordes_names, meg_electrodes_data, tmin=0, tmax=2.5, sfreq=1000,
freqs=((8, 12), (12, 25), (25,55), (55,110)), bw=15, dt=0.1, window_len=0.2, n_jobs=6):
output_file = op.join(ELECTRODES_DIR, subject, task, 'electrodes_coh_{}windows_{}.npy'.format('bipolar_' if bipolar else '', window_len))
if input_fname[-3:] == 'mat':
d = sio.loadmat(matlab_input_file)
conds_data = [d[conditions[0]], d[conditions[1]]]
electrodes = get_electrodes_names(subject, task)
elif input_fname[-3:] == 'npz':
d = np.load(input_fname)
conds_data = d['data']
conditions = d['conditions']
electrodes = d['names'].tolist()
pass
indices_to_remove, electrodes_indices = electrodes_tp_remove(electrodes, meg_electordes_names)
windows = np.linspace(tmin, tmax - dt, tmax / dt)
for cond, data in enumerate(conds_data):
data = scipy.delete(data, indices_to_remove, 1)
data = data[:, electrodes_indices, :]
data = downsample_data(data)
data = data[:, :, :meg_electrodes_data.shape[2]]
if cond == 0:
coh_mat = np.zeros((data.shape[1], data.shape[1], len(windows), len(freqs), 2))
# coh_mat = np.load(output_file)
# continue
now = time.time()
for freq_ind, (fmin, fmax) in enumerate(freqs):
for win, tmin in enumerate(windows):
try:
print('cond {}, tmin {}'.format(cond, tmin))
utils.time_to_go(now, win + 1, len(windows))
con_cnd, _, _, _, _ = spectral_connectivity(
data, method='coh', mode='multitaper', sfreq=sfreq,
fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
tmin=tmin, tmax=tmin + window_len)
con_cnd = np.mean(con_cnd, axis=2)
coh_mat[:, :, win, freq_ind, cond] = con_cnd
except:
print('Error with freq {} and win {}'.format(freq_ind, win))
np.save(output_file[:-4], coh_mat)
开发者ID:ofek-schechner,项目名称:mmvt,代码行数:40,代码来源:meg_electrodes.py
示例9: word_connectivity
def word_connectivity(wordEpochs,indices,step=2):
wordconnectivity = numpy.empty((int(wordEpochs.shape[0]/step),wordEpochs.shape[1],wordEpochs.shape[1],len(cwt_frequencies),epochLength*samplingRate))
# this array is wordEpochs x chans x chans x freqs x timeSamples
print 'wordconnectivity',wordconnectivity.shape
total = wordEpochs.shape[0]-wordEpochs.shape[0]%step
for i in range(0,total/step):
word = wordEpochs[step*i:step*(i+1)]
if i == 0:
print 'word',word.shape
if step == 1:
word = word.reshape((1,word.shape[0],word.shape[1]))
if i == 0:
if step == 1:
print 'reshaped word',word.shape
print 'cwt_frequencies',cwt_frequencies.shape
print 'cwt_n_cycles',cwt_n_cycles.shape
if i % 200 == 0:
print 'Epoch %d/%d (%d)' % (i,total/step,total)
wordconnectivity[i], freqs, times, _, _ = spectral_connectivity(word, indices=indices,
method='coh', mode='cwt_morlet', sfreq=samplingRate,
cwt_frequencies=cwt_frequencies, cwt_n_cycles=cwt_n_cycles, n_jobs=NJOBS, verbose='WARNING')
return(wordconnectivity)
开发者ID:vansky,项目名称:meg_playground,代码行数:22,代码来源:meg_coherence_linerunner.py
示例10: seed_target_indices
# Use 'MEG 2343' as seed
seed_ch = 'MEG 2343'
picks_ch_names = [raw.ch_names[i] for i in picks]
# Create seed-target indices for connectivity computation
seed = picks_ch_names.index(seed_ch)
targets = np.arange(len(picks))
indices = seed_target_indices(seed, targets)
# Define wavelet frequencies and number of cycles
cwt_freqs = np.arange(7, 30, 2)
cwt_n_cycles = cwt_freqs / 7.
# Run the connectivity analysis using 2 parallel jobs
sfreq = raw.info['sfreq'] # the sampling frequency
con, freqs, times, _, _ = spectral_connectivity(
epochs, indices=indices,
method='wpli2_debiased', mode='cwt_morlet', sfreq=sfreq,
cwt_freqs=cwt_freqs, cwt_n_cycles=cwt_n_cycles, n_jobs=1)
# Mark the seed channel with a value of 1.0, so we can see it in the plot
con[np.where(indices[1] == seed)] = 1.0
# Show topography of connectivity from seed
title = 'WPLI2 - Visual - Seed %s' % seed_ch
layout = mne.find_layout(epochs.info, 'meg') # use full layout
tfr = AverageTFR(epochs.info, con, times, freqs, len(epochs))
tfr.plot_topo(fig_facecolor='w', font_color='k', border='k')
开发者ID:Eric89GXL,项目名称:mne-python,代码行数:30,代码来源:plot_cwt_sensor_connectivity.py
示例11: spectral_connectivity
# Pick MEG gradiometers
picks = mne.pick_types(raw.info, meg='grad', eeg=False, stim=False, eog=True,
exclude='bads')
# Create epochs for the visual condition
event_id, tmin, tmax = 3, -0.2, 0.5
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=dict(grad=4000e-13, eog=150e-6))
# Compute connectivity for band containing the evoked response.
# We exclude the baseline period
fmin, fmax = 3., 9.
sfreq = raw.info['sfreq'] # the sampling frequency
tmin = 0.0 # exclude the baseline period
con, freqs, times, n_epochs, n_tapers = spectral_connectivity(epochs,
method='pli', mode='multitaper', sfreq=sfreq,
fmin=fmin, fmax=fmax, faverage=True, tmin=tmin,
mt_adaptive=False, n_jobs=2)
# the epochs contain an EOG channel, which we remove now
ch_names = epochs.ch_names
idx = [ch_names.index(name) for name in ch_names if name.startswith('MEG')]
con = con[idx][:, idx]
# con is a 3D array where the last dimension is size one since we averaged
# over frequencies in a single band. Here we make it 2D
con = con[:, :, 0]
# Now, visualize the connectivity in 3D
try:
from enthought.mayavi import mlab
except:
开发者ID:BushraR,项目名称:mne-python,代码行数:32,代码来源:plot_sensor_connectivity.py
示例12: spectra
pick_ori="normal", return_generator=True)
# Now we are ready to compute the coherence in the alpha and beta band.
# fmin and fmax specify the lower and upper freq. for each band, resp.
fmin = (8., 13.)
fmax = (13., 30.)
sfreq = raw.info['sfreq'] # the sampling frequency
# Now we compute connectivity. To speed things up, we use 2 parallel jobs
# and use mode='fourier', which uses a FFT with a Hanning window
# to compute the spectra (instead of multitaper estimation, which has a
# lower variance but is slower). By using faverage=True, we directly
# average the coherence in the alpha and beta band, i.e., we will only
# get 2 frequency bins
coh, freqs, times, n_epochs, n_tapers = spectral_connectivity(stcs,
method='coh', mode='fourier', indices=indices,
sfreq=sfreq, fmin=fmin, fmax=fmax, faverage=True, n_jobs=2)
print 'Frequencies in Hz over which coherence was averaged for alpha: '
print freqs[0]
print 'Frequencies in Hz over which coherence was averaged for beta: '
print freqs[1]
# Generate a SourceEstimate with the coherence. This is simple since we
# used a single seed. For more than one seeds we would have to split coh.
# Note: We use a hack to save the frequency axis as time
tmin = np.mean(freqs[0])
tstep = np.mean(freqs[1]) - tmin
coh_stc = mne.SourceEstimate(coh, vertices=stc.vertno, tmin=1e-3 * tmin,
tstep=1e-3 * tstep, subject='sample')
开发者ID:dichaelen,项目名称:mne-python,代码行数:30,代码来源:plot_mne_inverse_coherence_epochs.py
示例13: epochs_snr
def epochs_snr(epochs, n_perm=10, fmin=0, fmax=300, tmin=None, tmax=None,
kind='coh', normalize_coherence=True):
'''
Computes the coherence between the mean of subsets of trails. This can
be used to assess signal stability in response to a stimulus (repeated or
otherwise).
Parameters
----------
epochs : instance of Epochs
The data on which to calculate coherence. Coherence will be calculated
between the mean of random splits of trials
n_perm : int
The number of permuatations to run
fmin : float
The minimum coherence frequency
fmax : float
The maximum coherence frequency
tmin : float
Start time for coherence estimation
tmax : float
Stop time for coherence estimation
kind : 'coh' | 'corr'
Whether to use coherence or correlation as the similarity statistic
normalize_coherence : bool
If True, subtract the grand mean coherence across permutations and
channels from the output matrix. This is a way to "baseline" your
coherence values to show deviations from the global means
Outputs
-------
permutations : np.array, shape (n_perms, n_signals, n_freqs)
A collection of coherence values for each permutation.
coh_freqs : np.array, shape (n_freqs,)
The frequency values in the coherence analysis
'''
sfreq = epochs.info['sfreq']
nep, n_chan, ntime = epochs._data.shape
permutations = []
for iperm in tqdm(xrange(n_perm)):
# Split our epochs into two random groups, take mean of each
t1, t2 = np.split(np.random.permutation(np.arange(nep)), [nep/2.])
mn1, mn2 = [epochs[this_ixs]._data.mean(0)
for this_ixs in [t1, t2]]
# Now compute coherence between the two
this_similarity = []
for ch, this_mean1, this_mean2 in zip(epochs.ch_names, mn1, mn2):
this_means = np.vstack([this_mean1, this_mean2])
if kind == 'coh':
sim, coh_freqs, _, _, _ = spectral_connectivity(
this_means[None, :, :], sfreq=sfreq, fmin=fmin, fmax=fmax,
tmin=tmin, tmax=tmax, mt_adaptive=True, verbose=0)
sim = sim[1, 0, :].squeeze()
elif kind == 'corr':
sim, _ = sp.stats.pearsonr(vals1, vals2)
this_similarity.append(sim)
permutations.append(this_similarity)
permutations = np.array(permutations)
if normalize_coherence is True:
# Normalize coherence values be their grand average
permutations -= permutations.mean((0, 1))
if kind == 'coh':
return permutations, coh_freqs
elif kind == 'corr':
return permutations
开发者ID:kingjr,项目名称:ecogtools,代码行数:69,代码来源:strf.py
示例14: snr_epochs
def snr_epochs(epochs, n_perm=10, fmin=1, fmax=300, tmin=None, tmax=None,
kind='coh', normalize_coherence=False):
'''
Computes the coherence between the mean of subsets of epochs. This can
be used to assess signal stability in response to a stimulus (repeated or
otherwise).
Parameters
----------
epochs : instance of Epochs
The data on which to calculate coherence. Coherence will be calculated
between the mean of random splits of trials
n_perm : int
The number of permuatations to run
fmin : float
The minimum coherence frequency
fmax : float
The maximum coherence frequency
tmin : float
Start time for coherence estimation
tmax : float
Stop time for coherence estimation
kind : 'coh' | 'corr'
Specifies the similarity statistic.
If corr, calculate correlation between the mean of subsets of epochs.
If coh, then calculate the coherence.
normalize_coherence : bool
If True, subtract the grand mean coherence across permutations and
channels from the output matrix. This is a way to "baseline" your
coherence values to show deviations from the global means
Outputs
-------
permutations : np.array, shape (n_perms, n_signals, n_freqs)
A collection of coherence values for each permutation.
coh_freqs : np.array, shape (n_freqs,)
The frequency values in the coherence analysis
'''
sfreq = epochs.info['sfreq']
epochs = epochs.crop(tmin, tmax, copy=True)
nep, n_chan, ntime = epochs._data.shape
# Run permutations
permutations = []
for iperm in tqdm(xrange(n_perm)):
# Split our epochs into two random groups, take mean of each
t1, t2 = np.split(np.random.permutation(np.arange(nep)), [nep/2.])
mn1, mn2 = [epochs[this_ixs]._data.mean(0)
for this_ixs in [t1, t2]]
# Now compute similarity between the two
this_similarity = []
for ch, this_mean1, this_mean2 in zip(epochs.ch_names, mn1, mn2):
this_means = np.vstack([this_mean1, this_mean2])
if kind == 'coh':
# import ecogtools as et; et.embed()
this_means = this_means[np.newaxis, :, :]
ixs = ([0], [1])
sim, coh_freqs, _, _, _ = spectral_connectivity(
this_means, sfreq=sfreq, method='coh', fmin=fmin,
fmax=fmax, tmin=tmin, tmax=tmax, indices=ixs, verbose=0)
sim = sim.squeeze()
elif kind == 'corr':
sim, _ = pearsonr(*this_means)
else:
raise ValueError('Unknown similarity type: {0}'.format(kind))
this_similarity.append(sim)
permutations.append(this_similarity)
permutations = np.array(permutations)
if normalize_coherence is True:
# Normalize coherence values to their grand average
permutations -= permutations.mean((0, 1))
if kind == 'coh':
return permutations, coh_freqs
elif kind == 'corr':
return permutations
开发者ID:monfera,项目名称:ecogtools,代码行数:79,代码来源:strf.py
示例15: test_spectral_connectivity
def test_spectral_connectivity(method, mode):
"""Test frequency-domain connectivity methods."""
# Use a case known to have no spurious correlations (it would bad if
# tests could randomly fail):
rng = np.random.RandomState(0)
trans_bandwidth = 2.
sfreq = 50.
n_signals = 3
n_epochs = 8
n_times = 256
tmin = 0.
tmax = (n_times - 1) / sfreq
data = rng.randn(n_signals, n_epochs * n_times)
times_data = np.linspace(tmin, tmax, n_times)
# simulate connectivity from 5Hz..15Hz
fstart, fend = 5.0, 15.0
data[1, :] = filter_data(data[0, :], sfreq, fstart, fend,
filter_length='auto', fir_design='firwin2',
l_trans_bandwidth=trans_bandwidth,
h_trans_bandwidth=trans_bandwidth)
# add some noise, so the spectrum is not exactly zero
data[1, :] += 1e-2 * rng.randn(n_times * n_epochs)
data = data.reshape(n_signals, n_epochs, n_times)
data = np.transpose(data, [1, 0, 2])
# First we test some invalid parameters:
pytest.raises(ValueError, spectral_connectivity, data, method='notamethod')
pytest.raises(ValueError, spectral_connectivity, data,
mode='notamode')
# test invalid fmin fmax settings
pytest.raises(ValueError, spectral_connectivity, data, fmin=10,
fmax=10 + 0.5 * (sfreq / float(n_times)))
pytest.raises(ValueError, spectral_connectivity, data, fmin=10, fmax=5)
pytest.raises(ValueError, spectral_connectivity, data, fmin=(0, 11),
fmax=(5, 10))
pytest.raises(ValueError, spectral_connectivity, data, fmin=(11,),
fmax=(12, 15))
# define some frequencies for cwt
cwt_freqs = np.arange(3, 24.5, 1)
if method == 'coh' and mode == 'multitaper':
# only check adaptive estimation for coh to reduce test time
check_adaptive = [False, True]
else:
check_adaptive = [False]
if method == 'coh' and mode == 'cwt_morlet':
# so we also test using an array for num cycles
cwt_n_cycles = 7. * np.ones(len(cwt_freqs))
else:
cwt_n_cycles = 7.
for adaptive in check_adaptive:
if adaptive:
mt_bandwidth = 1.
else:
mt_bandwidth = None
con, freqs, times, n, _ = spectral_connectivity(
data, method=method, mode=mode, indices=None, sfreq=sfreq,
mt_adaptive=adaptive, mt_low_bias=True,
mt_bandwidth=mt_bandwidth, cwt_freqs=cwt_freqs,
cwt_n_cycles=cwt_n_cycles)
assert (n == n_epochs)
assert_array_almost_equal(times_data, times)
if mode == 'multitaper':
upper_t = 0.95
lower_t = 0.5
else: # mode == 'fourier' or mode == 'cwt_morlet'
# other estimates have higher variance
upper_t = 0.8
lower_t = 0.75
# test the simulated signal
gidx = np.searchsorted(freqs, (fstart, fend))
bidx = np.searchsorted(freqs,
(fstart - trans_bandwidth * 2,
fend + trans_bandwidth * 2))
if method == 'coh':
assert np.all(con[1, 0, gidx[0]:gidx[1]] > upper_t), \
con[1, 0, gidx[0]:gidx[1]].min()
# we see something for zero-lag
assert (np.all(con[1, 0, :bidx[0]] < lower_t))
assert np.all(con[1, 0, bidx[1]:] < lower_t), \
con[1, 0, bidx[1:]].max()
elif method == 'cohy':
# imaginary coh will be zero
check = np.imag(con[1, 0, gidx[0]:gidx[1]])
assert np.all(check < lower_t), check.max()
# we see something for zero-lag
assert np.all(np.abs(con[1, 0, gidx[0]:gidx[1]]) > upper_t)
assert np.all(np.abs(con[1, 0, :bidx[0]]) < lower_t)
assert np.all(np.abs(con[1, 0, bidx[1]:]) < lower_t)
#.........这里部分代码省略.........
开发者ID:Eric89GXL,项目名称:mne-python,代码行数:101,代码来源:test_spectral.py
示例16: apply_inverse_epochs
# Compute inverse solution and for each epoch. By using "return_generator=True"
# stcs will be a generator object instead of a list. This allows us so to
# compute the coherence without having to keep all source estimates in memory.
snr = 1.0 # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
pick_ori="normal", return_generator=True)
# Now we are ready to compute the coherence in the gamma band.
# fmin and fmax specify the lower and upper freq. for each band, resp.
sfreq = raw.info['sfreq'] # the sampling frequency
cwt_frequencies = np.array([40])
coh, freqs, times, n_epochs, n_tapers = spectral_connectivity(stcs,
method='plv', mode='cwt_morlet', indices=indices,
sfreq=sfreq, cwt_frequencies=cwt_frequencies, cwt_n_cycles=7, faverage=True, n_jobs=2)
#tmin = np.mean(freqs[0])
#tstep = np.mean(freqs[1]) - tmin
coh_new=coh.squeeze()
coh_stc = mne.SourceEstimate(coh_new, vertices=stc.vertno, tmin=tmin,#tmin=1e-3 * tmin,
tstep=1e-3*1, subject=subject)
#tstep=1e-3 * tstep, subject=subject)
coh_stc.save(subject_path+'/ROI_'+subject+'_' +trigger, ftype='stc')
# Now we can visualize the coherence using the plot method
#brain = coh_stc.plot(subject, 'inflated', 'lh', fmin=0.25, fmid=0.4,
# fmax=0.65, time_label='Coherence %0.1f ms', time_viewer=True,
# subjects_dir=subjects_dir)
#brain.show_view('lateral')
开发者ID:dongqunxi,项目名称:GrangerCausality,代码行数:30,代码来源:ROI_time_freq_version2.py
示例17: test_spectral_connectivity
def test_spectral_connectivity(main_path = "/mnt/Data/Projet-Karim", con_method = 'coh', freq_bands = [[15.,40.]], freq_band_names = ['beta']):
import os
from mne.connectivity import spectral_connectivity
from mne.io import RawFIF
subj_path = os.path.join(main_path ,'balai')
print subj_path
fif_files = [f for f in os.listdir(subj_path) if f.endswith("fif")]
print fif_files
for fif_f in fif_files:
basename = os.path.splitext(fif_f)[0]
raw = RawFIF(os.path.join(subj_path,fif_f),preload = True)
print raw
print len(raw.ch_names)
sfreq = raw.info['sfreq']
select_sensors, = np.where(np.array([ch_name[0] == 'M' for ch_name in raw.ch_names],dtype = 'bool') == True)
### save electrode locations
sens_loc = [raw.info['chs'][i]['loc'][:3] for i in select_sensors]
sens_loc = np.array(sens_loc)
loc_filename = os.path.join(subj_path,basename +"_correct_channel_coords.txt")
np.savetxt(loc_filename,sens_loc , fmt = '%s')
print sens_loc
### save electrode names
sens_names = np.array([raw.ch_names[pos] for pos in select_sensors],dtype = "str")
names_filename = os.path.join(subj_path,basename +"_correct_channel_names.txt")
np.savetxt(names_filename,sens_names , fmt = '%s')
#start, stop = raw.time_as_index([0, 100])
data,times = raw[select_sensors,:]
print np.max(data,axis = 0)
for i,freq_band in enumerate(freq_band_names):
con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data.reshape(1,data.shape[0],data.shape[1]), method=con_method, mode='multitaper', sfreq=sfreq, fmin= freq_bands[i][0], fmax=freq_bands[i][1], faverage=True, tmin=None, mt_adaptive=False, n_jobs=1)
#print con
con_matrix = np.array(con_matrix[:,:,0])
print con_matrix.shape
print np.min(con_matrix),np.max(con_matrix)
#0/0
#print data_filtered.shape
#print data-data
#print np.max(data-data_filtered,axis = 0)
#0/0
np_filename = os.path.join(subj_path,basename+ "_" + con_method +"_" + freq_band +".npy")
np.save(np_filename,con_matrix)
开发者ID:annapasca,项目名称:neuropype_ephy,代码行数:69,代码来源:spectral.py
示例18: epoched_multiple_spectral_proc
def epoched_multiple_spectral_proc(ts_file,sfreq,freq_band_name,freq_band,con_method,epoch_window_length):
import numpy as np
import os
from mne.connectivity import spectral_connectivity
all_data = np.load(ts_file)
print all_data.shape
#print sfreq
print freq_band
print freq_band_name
if len(all_data.shape) != 3:
print "Warning, all_data should have several samples"
return []
conmat_files = []
for i in range(all_data.shape[0]):
cur_data = all_data[i,:,:]
print cur_data.shape
if epoch_window_length == None :
data = cur_data.reshape(1,cur_data.shape[0],cur_data.shape[1])
else:
nb_splits = cur_data.shape[1] // (epoch_window_length * sfreq)
print "epoching data with {}s by window, resulting in {} epochs".format(epoch_window_length,nb_splits)
list_epoched_data = np.array_split(cur_data,nb_splits,axis = 1)
print len(list_epoched_data)
data = np.array(list_epoched_data)
print data.shape
con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data, method=con_method,
mode='multitaper', sfreq=sfreq,
fmin= freq_band[0], fmax=freq_band[1],
faverage=True, tmin=None,
mt_adaptive=False, n_jobs=1)
print con_matrix.shape
con_matrix = np.array(con_matrix[:,:,0])
print con_matrix.shape
print np.min(con_matrix),np.max(con_matrix)
conmat_file = os.path.abspath("conmat_"+ con_method + "_" + str(i) + ".npy")
np.save(conmat_file,con_matrix)
conmat_files.append(conmat_file)
return conmat_files
开发者ID:annapasca,项目名称:neuropype_ephy,代码行数:67,代码来源:spectral.py
示例19: memory
##################################################3333
# Now we are ready to compute the connectivity in the alpha band. Notice
# from the status messages, how mne-python: 1) reads an epoch from the raw
# file, 2) applies SSP and baseline correction, 3) computes the inverse to
# obtain a source estimate, 4) averages the source estimate to obtain a
# time series for each label, 5) includes the label time series in the
# connectivity computation, and then moves to the next epoch. This
# behaviour is because we are using generators and allows us to
# compute connectivity in computationally efficient manner where the amount
# of memory (RAM) needed is independent from the number of epochs.
# #fmin = 4.
# #fmax = 8.
sfreq = raw.info['sfreq'] # the sampling frequency
con_methods = ['plv', 'pli']
con, freqs, times, n_epochs, n_tapers = spectral_connectivity(label_ts,
method=con_methods, mode='fourier', sfreq=sfreq, fmin=fmin,
fmax=fmax, faverage=True)
print con
# con is a 3D array, get the connectivity for the first (and only) freq. band for each method
con_res = dict()
for method, c in zip(con_methods, con):
con_res[method] = c[:, :, 0]
print con_res
####Save ConnectivityMatrix as text file for fuirther averaging
#np.savetxt(coh_fname, con_res['coh'], delimiter = ',')
np.savetxt(plv_fname, con_res['plv'], delimiter = ',')
np.savetxt(pli_fname, con_res['pli'], delimiter = ',')
##################################################################################################
###################### OR #######################
#######Read from text - coherence matrix file and plot the connectivity circle. :)
开发者ID:CandidaUstine,项目名称:MCW_MEG,代码行数:31,代码来源:plot_mne_inverse_coherence_epochs.py
示例20: test_spectral_connectivity
def test_spectral_connectivity():
"""Test frequency-domain connectivity methods"""
# Use a case known to have no spurious correlations (it would bad if
# nosetests could randomly fail):
np.random.seed(0)
sfreq = 50.
n_signals = 3
n_epochs = 10
n_times = 500
tmin = 0.
tmax = (n_times - 1) / sfreq
data = np.random.randn(n_epochs, n_signals, n_times)
times_data = np.linspace(tmin, tmax, n_times)
# simulate connectivity from 5Hz..15Hz
fstart, fend = 5.0, 15.0
for i in range(n_epochs):
data[i, 1, :] = band_pass_filter(data[i, 0, :], sfreq, fstart, fend)
# add some noise, so the spectrum is not exactly zero
data[i, 1, :] += 1e-2 * np.random.randn(n_times)
# First we test some invalid parameters:
assert_raises(ValueError, spectral_connectivity, data, method='notamethod')
assert_raises(ValueError, spectral_connectivity, data,
mode='notamode')
# test invalid fmin fmax settings
assert_raises(ValueError, spectral_connectivity, data, fmin=10,
fmax=10 + 0.5 * (sfreq / float(n_times)))
assert_raises(ValueError, spectral_connectivity, data, fmin=10, fmax=5)
assert_raises(ValueError, spectral_connectivity, data, fmin=
|
请发表评论