You can download and run the notebook locally:

Download jupyter notebook


Table S6#

[1]:
import scipy.stats
import numpy as np
from statsmodels.stats.multicomp import pairwise_tukeyhsd
from statsmodels.sandbox.stats.multicomp import get_tukey_pvalue
import joblib
import pandas as pd
[2]:
decoding_results = pd.read_hdf("../data/Figure5Revision.h5", data="key")
[3]:
def get_data(decoding_results, task, decoders, methods, window, modality, num_neurons):
    if modality == "ca":
        index = 0
    elif modality == "np":
        index = 1
    accs = []
    keys = []
    for decoder, method in zip(decoders, methods):
        key = f"{modality}_{method}_{window}"
        if "joint" in method:
            seeds = decoding_results[task][decoder][key][num_neurons]
            acc = [abs(s[index]).mean() for s in seeds]
        else:
            acc = abs(np.array(decoding_results[task][decoder][key][num_neurons])).mean(
                axis=-1
            )
        accs.append(acc)
        keys.append([f"{key}_{decoder}"] * len(acc))
    return np.concatenate(accs), np.concatenate(keys)


def concat_neurons(decoding_results, task, decoder, method, window, modality, n=1000):
    if modality == "ca":
        index = 0
    elif modality == "np":
        index = 1
    key = f"{modality}_{method}_{window}"
    accs = []
    if "joint" in method:
        seeds = decoding_results[task][decoder][key][n]
        accs.append([abs(s[index]).mean() for s in seeds])
    else:
        accs.append(
            abs(np.array(decoding_results[task][decoder][key][n])).mean(axis=-1)
        )
    return np.concatenate(accs)

ANOVA for CEBRA, CEBRA-joint, baseline 330 ms (10 frame window), 1000 neurons:#

[4]:
np_total_stats = scipy.stats.f_oneway(
    concat_neurons(decoding_results, "frame_err", "knn", "cebra", "330", "np"),
    concat_neurons(decoding_results, "frame_err", "knn", "cebra_joint", "330", "np"),
    concat_neurons(decoding_results, "frame_err", "knn", "baseline", "330", "np"),
    concat_neurons(decoding_results, "frame_err", "bayes", "baseline", "330", "np"),
)


print(f"NP total stats \n {np_total_stats}")
NP total stats
 F_onewayResult(statistic=20.21719889707136, pvalue=1.0857679658422556e-05)
[5]:
num_neurons = [1000]
for i in num_neurons:
    print(f"For {i} neurons from np recording (330ms):")

    np_data, np_keys = get_data(
        decoding_results,
        "frame_err",
        ["knn", "knn", "knn", "bayes"],
        ["cebra", "cebra_joint", "baseline", "baseline"],
        "330",
        "np",
        i,
    )

    stats = pairwise_tukeyhsd(
        np_data.flatten(),
        np_keys,
    )
    print(stats)
For 1000 neurons from np recording (330ms):
                Multiple Comparison of Means - Tukey HSD, FWER=0.05
====================================================================================
        group1                group2         meandiff p-adj   lower    upper  reject
------------------------------------------------------------------------------------
np_baseline_330_bayes    np_baseline_330_knn   1.8628 0.5277  -1.9939  5.7195  False
np_baseline_330_bayes       np_cebra_330_knn  -6.3068 0.0013 -10.1635 -2.4501   True
np_baseline_330_bayes np_cebra_joint_330_knn  -6.4403 0.0011  -10.297 -2.5836   True
  np_baseline_330_knn       np_cebra_330_knn  -8.1696 0.0001 -12.0263 -4.3129   True
  np_baseline_330_knn np_cebra_joint_330_knn  -8.3031 0.0001 -12.1597 -4.4464   True
     np_cebra_330_knn np_cebra_joint_330_knn  -0.1334 0.9996  -3.9901  3.7232  False
------------------------------------------------------------------------------------