diff --git a/.gitignore b/.gitignore
index f5488d5..d9dde27 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,4 @@ dist/*
.pytest_cache/*
venv/
docs/_build
+docs/sg_execution_times.rst
diff --git a/README.md b/README.md
index 0e6ff8b..9689196 100644
--- a/README.md
+++ b/README.md
@@ -59,12 +59,12 @@ This will work on clean systems, but if you encounter issues you may need to run
Requirements
+ python>=3.6
-+ pandas>=0.18.0
-+ seaborn>=0.7.1
-+ matplotlib>=1.5.1
-+ scipy>=0.17.1
-+ numpy>=1.10.4
-+ future
++ pandas>=2.0.0
++ seaborn>=0.12.0
++ matplotlib>=3.5.0
++ scipy>=1.10.0
++ numpy>=2.0.0
++ joblib>=1.3.0
+ pytest (for development)
If installing from github (instead of pip), you must also install the requirements:
diff --git a/benchmark_cluster.py b/benchmark_cluster.py
new file mode 100644
index 0000000..34a214a
--- /dev/null
+++ b/benchmark_cluster.py
@@ -0,0 +1,37 @@
+import quail
+import time
+import numpy as np
+import pandas as pd
+
+def run_benchmark():
+ print("Generating data...")
+ # 20 subjects, 10 lists each, 16 items
+ n_subj = 20
+ n_lists = 10
+ list_len = 16
+
+ # Create random items
+ # Pres: ints
+ pres = [[[str(k) for k in range(list_len)] for j in range(n_lists)] for i in range(n_subj)]
+ # Rec: random shuffle of pres
+ rec = [[[str(k) for k in np.random.permutation(range(list_len))] for j in range(n_lists)] for i in range(n_subj)]
+
+ # Features: Scalar 'val'
+ features = [{'item': str(k), 'val': float(k)} for k in range(list_len)]
+ # Expand features to 3D
+ pres_feat = [[features for j in range(n_lists)] for i in range(n_subj)]
+
+ egg = quail.Egg(pres=pres, rec=rec, features=pres_feat)
+
+ print("Starting fingerprint analysis...")
+ start = time.time()
+ # Compute fingerprint with permutations (expensive part)
+ # 10 perms
+ res = egg.analyze('fingerprint', n_perms=10, permute=True, parallel=False)
+ end = time.time()
+
+ print(f"Analysis complete in {end - start:.2f} seconds.")
+ print(f"Result shape: {res.data.shape}")
+
+if __name__ == '__main__':
+ run_benchmark()
diff --git a/docs/_build/doctrees/api.doctree b/docs/_build/doctrees/api.doctree
index ee1e291..a21e92e 100644
Binary files a/docs/_build/doctrees/api.doctree and b/docs/_build/doctrees/api.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/crack_egg.doctree b/docs/_build/doctrees/auto_examples/crack_egg.doctree
index a7bf222..22f29b4 100644
Binary files a/docs/_build/doctrees/auto_examples/crack_egg.doctree and b/docs/_build/doctrees/auto_examples/crack_egg.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/create_egg.doctree b/docs/_build/doctrees/auto_examples/create_egg.doctree
index 666c8b0..0ec1a5e 100644
Binary files a/docs/_build/doctrees/auto_examples/create_egg.doctree and b/docs/_build/doctrees/auto_examples/create_egg.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/create_multisubject_egg.doctree b/docs/_build/doctrees/auto_examples/create_multisubject_egg.doctree
index 30aef90..a845d08 100644
Binary files a/docs/_build/doctrees/auto_examples/create_multisubject_egg.doctree and b/docs/_build/doctrees/auto_examples/create_multisubject_egg.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/decode_speech.doctree b/docs/_build/doctrees/auto_examples/decode_speech.doctree
index 10579c6..b355db4 100644
Binary files a/docs/_build/doctrees/auto_examples/decode_speech.doctree and b/docs/_build/doctrees/auto_examples/decode_speech.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/fingerprint_optimalpresenter.doctree b/docs/_build/doctrees/auto_examples/fingerprint_optimalpresenter.doctree
index 6f3f12e..5c89552 100644
Binary files a/docs/_build/doctrees/auto_examples/fingerprint_optimalpresenter.doctree and b/docs/_build/doctrees/auto_examples/fingerprint_optimalpresenter.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/index.doctree b/docs/_build/doctrees/auto_examples/index.doctree
index 8630c0e..353c9b3 100644
Binary files a/docs/_build/doctrees/auto_examples/index.doctree and b/docs/_build/doctrees/auto_examples/index.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_accuracy.doctree b/docs/_build/doctrees/auto_examples/plot_accuracy.doctree
index 8462e23..cbf5950 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_accuracy.doctree and b/docs/_build/doctrees/auto_examples/plot_accuracy.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_existing_axes.doctree b/docs/_build/doctrees/auto_examples/plot_existing_axes.doctree
index 4ea4031..2c39509 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_existing_axes.doctree and b/docs/_build/doctrees/auto_examples/plot_existing_axes.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_fingerprint.doctree b/docs/_build/doctrees/auto_examples/plot_fingerprint.doctree
index 2a70bb8..3f9b246 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_fingerprint.doctree and b/docs/_build/doctrees/auto_examples/plot_fingerprint.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_lagcrp.doctree b/docs/_build/doctrees/auto_examples/plot_lagcrp.doctree
index a8b7523..44ff65d 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_lagcrp.doctree and b/docs/_build/doctrees/auto_examples/plot_lagcrp.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_pfr.doctree b/docs/_build/doctrees/auto_examples/plot_pfr.doctree
index 50c11c0..e9235f6 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_pfr.doctree and b/docs/_build/doctrees/auto_examples/plot_pfr.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_pnr.doctree b/docs/_build/doctrees/auto_examples/plot_pnr.doctree
index 96c5bd1..5505ed3 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_pnr.doctree and b/docs/_build/doctrees/auto_examples/plot_pnr.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_spc.doctree b/docs/_build/doctrees/auto_examples/plot_spc.doctree
index f942a10..0b88b92 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_spc.doctree and b/docs/_build/doctrees/auto_examples/plot_spc.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/plot_tempclust.doctree b/docs/_build/doctrees/auto_examples/plot_tempclust.doctree
deleted file mode 100644
index 1a92b24..0000000
Binary files a/docs/_build/doctrees/auto_examples/plot_tempclust.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/auto_examples/plot_temporal.doctree b/docs/_build/doctrees/auto_examples/plot_temporal.doctree
index 127c211..0264942 100644
Binary files a/docs/_build/doctrees/auto_examples/plot_temporal.doctree and b/docs/_build/doctrees/auto_examples/plot_temporal.doctree differ
diff --git a/docs/_build/doctrees/auto_examples/recmat2egg.doctree b/docs/_build/doctrees/auto_examples/recmat2egg.doctree
index 685cd78..f072511 100644
Binary files a/docs/_build/doctrees/auto_examples/recmat2egg.doctree and b/docs/_build/doctrees/auto_examples/recmat2egg.doctree differ
diff --git a/docs/_build/doctrees/environment.pickle b/docs/_build/doctrees/environment.pickle
index 048fc31..b8a36f1 100644
Binary files a/docs/_build/doctrees/environment.pickle and b/docs/_build/doctrees/environment.pickle differ
diff --git a/docs/_build/doctrees/index.doctree b/docs/_build/doctrees/index.doctree
index 005abed..3ac229b 100644
Binary files a/docs/_build/doctrees/index.doctree and b/docs/_build/doctrees/index.doctree differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_0.png
deleted file mode 100644
index 921703e..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_1.png
deleted file mode 100644
index bcda101..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_14_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_14_0.png
deleted file mode 100644
index 1e6f0ef..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_14_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_16_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_16_0.png
deleted file mode 100644
index 10151d0..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_16_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_18_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_18_0.png
deleted file mode 100644
index f203eaa..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_18_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_0.png
deleted file mode 100644
index 1d819ea..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_1.png
deleted file mode 100644
index 1be6ddc..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_0.png
deleted file mode 100644
index 06ee0b5..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_1.png
deleted file mode 100644
index 70840f6..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_0.png
deleted file mode 100644
index 2fcc599..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_1.png
deleted file mode 100644
index 6ef4942..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_2.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_2.png
deleted file mode 100644
index d53c958..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_2.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_3.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_3.png
deleted file mode 100644
index d7a2c79..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_3.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_0.png
deleted file mode 100644
index 20a29af..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_1.png
deleted file mode 100644
index a0cd659..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_2.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_2.png
deleted file mode 100644
index 515e183..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_2.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_3.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_3.png
deleted file mode 100644
index e10e27b..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_3.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_5_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_5_0.png
deleted file mode 100644
index 414e7b8..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_5_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_7_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_7_0.png
deleted file mode 100644
index 620981b..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_7_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_9_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_9_0.png
deleted file mode 100644
index 3f48b4f..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_9_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_0.png
deleted file mode 100644
index 2af412e..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_1.png
deleted file mode 100644
index 0822a09..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_0.png
deleted file mode 100644
index 7cf2a16..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_1.png
deleted file mode 100644
index 9da2cec..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_0.png
deleted file mode 100644
index 7157b9c..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_1.png
deleted file mode 100644
index 95b80dc..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_0.png
deleted file mode 100644
index 42d60a5..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_1.png
deleted file mode 100644
index 13b34b3..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_0.png
deleted file mode 100644
index c1b954c..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_1.png
deleted file mode 100644
index 90d306b..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_0.png
deleted file mode 100644
index e323955..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_1.png
deleted file mode 100644
index 5f68ba0..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_37_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_37_0.png
deleted file mode 100644
index a35777f..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_37_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_39_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_39_0.png
deleted file mode 100644
index b7a7719..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_39_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_2_0.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_2_0.png
deleted file mode 100644
index 33f481e..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_2_0.png and /dev/null differ
diff --git a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_6_1.png b/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_6_1.png
deleted file mode 100644
index 18348c4..0000000
Binary files a/docs/_build/doctrees/nbsphinx/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_6_1.png and /dev/null differ
diff --git a/docs/_build/doctrees/quail.Egg.doctree b/docs/_build/doctrees/quail.Egg.doctree
index f8a2b20..13a8199 100644
Binary files a/docs/_build/doctrees/quail.Egg.doctree and b/docs/_build/doctrees/quail.Egg.doctree differ
diff --git a/docs/_build/doctrees/quail.analyze.doctree b/docs/_build/doctrees/quail.analyze.doctree
index a17cd51..4a4e811 100644
Binary files a/docs/_build/doctrees/quail.analyze.doctree and b/docs/_build/doctrees/quail.analyze.doctree differ
diff --git a/docs/_build/doctrees/quail.crack_egg.doctree b/docs/_build/doctrees/quail.crack_egg.doctree
index aa81b47..871252d 100644
Binary files a/docs/_build/doctrees/quail.crack_egg.doctree and b/docs/_build/doctrees/quail.crack_egg.doctree differ
diff --git a/docs/_build/doctrees/quail.decode_speech.doctree b/docs/_build/doctrees/quail.decode_speech.doctree
index cc5a54f..68e5abb 100644
Binary files a/docs/_build/doctrees/quail.decode_speech.doctree and b/docs/_build/doctrees/quail.decode_speech.doctree differ
diff --git a/docs/_build/doctrees/quail.load_egg.doctree b/docs/_build/doctrees/quail.load_egg.doctree
index c119fa5..1ed376d 100644
Binary files a/docs/_build/doctrees/quail.load_egg.doctree and b/docs/_build/doctrees/quail.load_egg.doctree differ
diff --git a/docs/_build/doctrees/quail.load_example_data.doctree b/docs/_build/doctrees/quail.load_example_data.doctree
index f51cb2b..98a6291 100644
Binary files a/docs/_build/doctrees/quail.load_example_data.doctree and b/docs/_build/doctrees/quail.load_example_data.doctree differ
diff --git a/docs/_build/doctrees/quail.plot.doctree b/docs/_build/doctrees/quail.plot.doctree
index 531dc52..c481189 100644
Binary files a/docs/_build/doctrees/quail.plot.doctree and b/docs/_build/doctrees/quail.plot.doctree differ
diff --git a/docs/_build/doctrees/quail.recmat2egg.doctree b/docs/_build/doctrees/quail.recmat2egg.doctree
index 037bf10..2bc343c 100644
Binary files a/docs/_build/doctrees/quail.recmat2egg.doctree and b/docs/_build/doctrees/quail.recmat2egg.doctree differ
diff --git a/docs/_build/doctrees/quail.stack_eggs.doctree b/docs/_build/doctrees/quail.stack_eggs.doctree
index ecad55a..4396111 100644
Binary files a/docs/_build/doctrees/quail.stack_eggs.doctree and b/docs/_build/doctrees/quail.stack_eggs.doctree differ
diff --git a/docs/_build/doctrees/tutorial.doctree b/docs/_build/doctrees/tutorial.doctree
index 23072b3..05e0152 100644
Binary files a/docs/_build/doctrees/tutorial.doctree and b/docs/_build/doctrees/tutorial.doctree differ
diff --git a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/advanced_plotting-checkpoint.doctree b/docs/_build/doctrees/tutorial/.ipynb_checkpoints/advanced_plotting-checkpoint.doctree
deleted file mode 100644
index 5b266e7..0000000
Binary files a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/advanced_plotting-checkpoint.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/basic_analyze_and_plot-checkpoint.doctree b/docs/_build/doctrees/tutorial/.ipynb_checkpoints/basic_analyze_and_plot-checkpoint.doctree
deleted file mode 100644
index dc99200..0000000
Binary files a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/basic_analyze_and_plot-checkpoint.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/egg-checkpoint.doctree b/docs/_build/doctrees/tutorial/.ipynb_checkpoints/egg-checkpoint.doctree
deleted file mode 100644
index 773c372..0000000
Binary files a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/egg-checkpoint.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/fingerprint-checkpoint.doctree b/docs/_build/doctrees/tutorial/.ipynb_checkpoints/fingerprint-checkpoint.doctree
deleted file mode 100644
index 8ff3992..0000000
Binary files a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/fingerprint-checkpoint.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/optimal presenter-checkpoint.doctree b/docs/_build/doctrees/tutorial/.ipynb_checkpoints/optimal presenter-checkpoint.doctree
deleted file mode 100644
index 5d56506..0000000
Binary files a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/optimal presenter-checkpoint.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/speech_decoding-checkpoint.doctree b/docs/_build/doctrees/tutorial/.ipynb_checkpoints/speech_decoding-checkpoint.doctree
deleted file mode 100644
index e97b525..0000000
Binary files a/docs/_build/doctrees/tutorial/.ipynb_checkpoints/speech_decoding-checkpoint.doctree and /dev/null differ
diff --git a/docs/_build/doctrees/tutorial/advanced_plotting.doctree b/docs/_build/doctrees/tutorial/advanced_plotting.doctree
index 47a8a49..63a0a1a 100644
Binary files a/docs/_build/doctrees/tutorial/advanced_plotting.doctree and b/docs/_build/doctrees/tutorial/advanced_plotting.doctree differ
diff --git a/docs/_build/doctrees/tutorial/basic_analyze_and_plot.doctree b/docs/_build/doctrees/tutorial/basic_analyze_and_plot.doctree
index 3bba439..4e701d2 100644
Binary files a/docs/_build/doctrees/tutorial/basic_analyze_and_plot.doctree and b/docs/_build/doctrees/tutorial/basic_analyze_and_plot.doctree differ
diff --git a/docs/_build/doctrees/tutorial/egg.doctree b/docs/_build/doctrees/tutorial/egg.doctree
index dd1cfd9..9132552 100644
Binary files a/docs/_build/doctrees/tutorial/egg.doctree and b/docs/_build/doctrees/tutorial/egg.doctree differ
diff --git a/docs/_build/doctrees/tutorial/fingerprint.doctree b/docs/_build/doctrees/tutorial/fingerprint.doctree
index f47cf99..1899998 100644
Binary files a/docs/_build/doctrees/tutorial/fingerprint.doctree and b/docs/_build/doctrees/tutorial/fingerprint.doctree differ
diff --git a/docs/_build/doctrees/tutorial/optimal presenter.doctree b/docs/_build/doctrees/tutorial/optimal presenter.doctree
index 19015b8..70ed27e 100644
Binary files a/docs/_build/doctrees/tutorial/optimal presenter.doctree and b/docs/_build/doctrees/tutorial/optimal presenter.doctree differ
diff --git a/docs/_build/doctrees/tutorial/speech_decoding.doctree b/docs/_build/doctrees/tutorial/speech_decoding.doctree
index cb8a64b..80ac95f 100644
Binary files a/docs/_build/doctrees/tutorial/speech_decoding.doctree and b/docs/_build/doctrees/tutorial/speech_decoding.doctree differ
diff --git a/docs/_build/html/.buildinfo b/docs/_build/html/.buildinfo
index c34f821..3330c1f 100644
--- a/docs/_build/html/.buildinfo
+++ b/docs/_build/html/.buildinfo
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
-config: c7fe727da017c84d722f3c6469a0ea04
+config: 06f8dbfe2189432395f7cdc548d7e758
tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/docs/_build/html/_downloads/auto_examples_jupyter.zip b/docs/_build/html/_downloads/auto_examples_jupyter.zip
deleted file mode 100644
index 3b14983..0000000
Binary files a/docs/_build/html/_downloads/auto_examples_jupyter.zip and /dev/null differ
diff --git a/docs/_build/html/_downloads/auto_examples_python.zip b/docs/_build/html/_downloads/auto_examples_python.zip
deleted file mode 100644
index 82c94f3..0000000
Binary files a/docs/_build/html/_downloads/auto_examples_python.zip and /dev/null differ
diff --git a/docs/_build/html/_downloads/crack_egg.ipynb b/docs/_build/html/_downloads/crack_egg.ipynb
deleted file mode 100644
index 7f097d9..0000000
--- a/docs/_build/html/_downloads/crack_egg.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Crack Egg\n\n\nThis an example of how to crack an egg (take a slice of subjects/lists from it)\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n#import\nimport quail\n\n#load data\negg = quail.load('example')\n\n#crack egg\ncracked_egg = quail.crack_egg(egg, subjects=[0], lists=[0])\n\ncracked_egg.info()\n\npres = cracked_egg.get_pres_items().to_numpy()[0]\nrec = cracked_egg.get_rec_items().to_numpy()[0]\n\ndef distmat(egg, feature, distdict):\n f = [xi[feature] for xi in egg.get_pres_features()]\n return cdist(f, f, distdict[feature])\n\n\nfor idx in range(len(rec)-1):\n ind1 = np.where(pres==rec[idx])[0][0]\n ind2 = np.where(pres==rec[idx+1])[0][0]\n dists = dist[ind1, :]\n cdist = dist[ind1, ind2]\n rank = np.mean(np.where(np.sort(dists)[::-1] == cdist))"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/crack_egg.py b/docs/_build/html/_downloads/crack_egg.py
deleted file mode 100644
index 3966b6e..0000000
--- a/docs/_build/html/_downloads/crack_egg.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Crack Egg
-=============================
-
-This an example of how to crack an egg (take a slice of subjects/lists from it)
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-#import
-import quail
-
-#load data
-egg = quail.load('example')
-
-#crack egg
-cracked_egg = quail.crack_egg(egg, subjects=[0], lists=[0])
-
-cracked_egg.info()
-
-pres = cracked_egg.get_pres_items().to_numpy()[0]
-rec = cracked_egg.get_rec_items().to_numpy()[0]
-
-def distmat(egg, feature, distdict):
- f = [xi[feature] for xi in egg.get_pres_features()]
- return cdist(f, f, distdict[feature])
-
-
-for idx in range(len(rec)-1):
- ind1 = np.where(pres==rec[idx])[0][0]
- ind2 = np.where(pres==rec[idx+1])[0][0]
- dists = dist[ind1, :]
- cdist = dist[ind1, ind2]
- rank = np.mean(np.where(np.sort(dists)[::-1] == cdist))
diff --git a/docs/_build/html/_downloads/create_egg.ipynb b/docs/_build/html/_downloads/create_egg.ipynb
deleted file mode 100644
index d8a571b..0000000
--- a/docs/_build/html/_downloads/create_egg.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Create an egg\n\n\nAn egg is made up of two primary pieces of data: `pres`, which are the\nwords/stimuli that were presented to a subject and `rec`, which are the\nwords/stimuli that were recalled by the subject.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\nimport quail\n\n\n# generate some fake data\nnext_presented = ['CAT', 'DOG', 'SHOE', 'HORSE']\nnext_recalled = ['HORSE', 'DOG', 'CAT']\n\nnext_features = [{\n 'category' : 'animal',\n 'size' : 'bigger',\n 'starting letter' : 'C',\n 'length' : 3\n },\n {\n 'category' : 'animal',\n 'size' : 'bigger',\n 'starting letter' : 'D',\n 'length' : 3\n },\n {\n 'category' : 'object',\n 'size' : 'smaller',\n 'starting letter' : 'S',\n 'length' : 4\n },\n {\n 'category' : 'animal',\n 'size' : 'bigger',\n 'starting letter' : 'H',\n 'length' : 5\n }\n]\ndist_funcs = {\n 'category' : 'lambda a, b: int(a!=b)',\n 'size' : 'lambda a, b: int(a!=b)',\n 'starting letter' : 'lambda a, b: int(a!=b)',\n 'length' : 'lambda a, b: np.linalg.norm(np.subtract(a,b))'\n}\negg = quail.Egg(pres=[next_presented], rec=[next_recalled], features=[next_features], dist_funcs=dist_funcs)\n\negg.analyze('lagcrp').plot()"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/create_egg.py b/docs/_build/html/_downloads/create_egg.py
deleted file mode 100644
index c7c8dd9..0000000
--- a/docs/_build/html/_downloads/create_egg.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Create an egg
-=============================
-
-An egg is made up of two primary pieces of data: `pres`, which are the
-words/stimuli that were presented to a subject and `rec`, which are the
-words/stimuli that were recalled by the subject.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-import quail
-
-
-# generate some fake data
-next_presented = ['CAT', 'DOG', 'SHOE', 'HORSE']
-next_recalled = ['HORSE', 'DOG', 'CAT']
-
-next_features = [{
- 'category' : 'animal',
- 'size' : 'bigger',
- 'starting letter' : 'C',
- 'length' : 3
- },
- {
- 'category' : 'animal',
- 'size' : 'bigger',
- 'starting letter' : 'D',
- 'length' : 3
- },
- {
- 'category' : 'object',
- 'size' : 'smaller',
- 'starting letter' : 'S',
- 'length' : 4
- },
- {
- 'category' : 'animal',
- 'size' : 'bigger',
- 'starting letter' : 'H',
- 'length' : 5
- }
-]
-dist_funcs = {
- 'category' : 'lambda a, b: int(a!=b)',
- 'size' : 'lambda a, b: int(a!=b)',
- 'starting letter' : 'lambda a, b: int(a!=b)',
- 'length' : 'lambda a, b: np.linalg.norm(np.subtract(a,b))'
-}
-egg = quail.Egg(pres=[next_presented], rec=[next_recalled], features=[next_features], dist_funcs=dist_funcs)
-
-egg.analyze('lagcrp').plot()
diff --git a/docs/_build/html/_downloads/create_multisubject_egg.ipynb b/docs/_build/html/_downloads/create_multisubject_egg.ipynb
deleted file mode 100644
index d44e46c..0000000
--- a/docs/_build/html/_downloads/create_multisubject_egg.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Create a multisubject egg\n\n\nAn egg is made up of two primary pieces of data: `pres`, which are the\nwords/stimuli that were presented to a subject and `rec`, which are the\nwords/stimuli that were recalled by the subject.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\nimport quail\nimport numpy as np\n\n# presented words\npresented_words = [[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']],[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']]]\n\n# recalled words\nrecalled_words = [[['bat', 'cat', 'goat', 'hat'],['animal', 'horse', 'zoo']],[['bat', 'cat', 'goat'],['animal', 'horse']]]\n\n# create egg\negg = quail.Egg(pres=presented_words, rec=recalled_words)\n\n# analyze and plot\negg.analyze('accuracy').plot(plot_style='violin', title='Average Recall Accuracy')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/create_multisubject_egg.py b/docs/_build/html/_downloads/create_multisubject_egg.py
deleted file mode 100644
index 6f84c9d..0000000
--- a/docs/_build/html/_downloads/create_multisubject_egg.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Create a multisubject egg
-=============================
-
-An egg is made up of two primary pieces of data: `pres`, which are the
-words/stimuli that were presented to a subject and `rec`, which are the
-words/stimuli that were recalled by the subject.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-import quail
-import numpy as np
-
-# presented words
-presented_words = [[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']],[['cat', 'bat', 'hat', 'goat'],['zoo', 'animal', 'zebra', 'horse']]]
-
-# recalled words
-recalled_words = [[['bat', 'cat', 'goat', 'hat'],['animal', 'horse', 'zoo']],[['bat', 'cat', 'goat'],['animal', 'horse']]]
-
-# create egg
-egg = quail.Egg(pres=presented_words, rec=recalled_words)
-
-# analyze and plot
-egg.analyze('accuracy').plot(plot_style='violin', title='Average Recall Accuracy')
diff --git a/docs/_build/html/_downloads/decode_speech.ipynb b/docs/_build/html/_downloads/decode_speech.ipynb
deleted file mode 100644
index b412b4a..0000000
--- a/docs/_build/html/_downloads/decode_speech.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Decode speech\n\n\nThis example plots free recall accuracy for a single subject.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n#import\nimport quail\n\n# decode speech\nrecall_data = quail.decode_speech('../data/sample.wav', save=True,\n speech_context=['DONKEY', 'PETUNIA'],\n keypath='/Users/andyheusser/Documents/cdl/credentials/efficient-learning-553bf474f805.json')\n\n# print results\nprint(recall_data)"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/decode_speech.py b/docs/_build/html/_downloads/decode_speech.py
deleted file mode 100644
index df8fa3e..0000000
--- a/docs/_build/html/_downloads/decode_speech.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Decode speech
-=============================
-
-This example plots free recall accuracy for a single subject.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-#import
-import quail
-
-# decode speech
-recall_data = quail.decode_speech('../data/sample.wav', save=True,
- speech_context=['DONKEY', 'PETUNIA'],
- keypath='/Users/andyheusser/Documents/cdl/credentials/efficient-learning-553bf474f805.json')
-
-# print results
-print(recall_data)
diff --git a/docs/_build/html/_downloads/fingerprint_optimalpresenter.ipynb b/docs/_build/html/_downloads/fingerprint_optimalpresenter.ipynb
deleted file mode 100644
index b716fac..0000000
--- a/docs/_build/html/_downloads/fingerprint_optimalpresenter.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Optimal presenter\n\n\nAn example of how to reorder stimuli with the optimal presenter class\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\nimport numpy as np\nimport quail\nfrom quail import Fingerprint, OptimalPresenter\n\n# generate some fake data\nnext_presented = ['CAT', 'DOG', 'SHOE', 'BAT']\nnext_recalled = ['DOG', 'CAT', 'BAT', 'SHOE']\n\nnext_features = [{\n 'category' : 'animal',\n 'size' : 'bigger',\n 'starting letter' : 'C',\n 'length' : 3\n },\n {\n 'category' : 'animal',\n 'size' : 'bigger',\n 'starting letter' : 'D',\n 'length' : 3\n },\n {\n 'category' : 'object',\n 'size' : 'smaller',\n 'starting letter' : 'S',\n 'length' : 4\n },\n {\n 'category' : 'animal',\n 'size' : 'bigger',\n 'starting letter' : 'B',\n 'length' : 3\n }]\n\negg = quail.Egg(pres=[next_presented], rec=[next_recalled], features=[next_features])\n\n# initialize fingerprint\nfingerprint = Fingerprint(init=egg)\n\n# initialize presenter\nparams = {\n 'fingerprint' : fingerprint}\npresenter = OptimalPresenter(params=params, strategy='stabilize')\n\n# update the fingerprint\nfingerprint.update(egg)\n\n# reorder next list\nreordered_egg = presenter.order(egg, method='permute', nperms=100)"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/fingerprint_optimalpresenter.py b/docs/_build/html/_downloads/fingerprint_optimalpresenter.py
deleted file mode 100644
index 2c979b6..0000000
--- a/docs/_build/html/_downloads/fingerprint_optimalpresenter.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Optimal presenter
-=============================
-
-An example of how to reorder stimuli with the optimal presenter class
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-import numpy as np
-import quail
-from quail import Fingerprint, OptimalPresenter
-
-# generate some fake data
-next_presented = ['CAT', 'DOG', 'SHOE', 'BAT']
-next_recalled = ['DOG', 'CAT', 'BAT', 'SHOE']
-
-next_features = [{
- 'category' : 'animal',
- 'size' : 'bigger',
- 'starting letter' : 'C',
- 'length' : 3
- },
- {
- 'category' : 'animal',
- 'size' : 'bigger',
- 'starting letter' : 'D',
- 'length' : 3
- },
- {
- 'category' : 'object',
- 'size' : 'smaller',
- 'starting letter' : 'S',
- 'length' : 4
- },
- {
- 'category' : 'animal',
- 'size' : 'bigger',
- 'starting letter' : 'B',
- 'length' : 3
- }]
-
-egg = quail.Egg(pres=[next_presented], rec=[next_recalled], features=[next_features])
-
-# initialize fingerprint
-fingerprint = Fingerprint(init=egg)
-
-# initialize presenter
-params = {
- 'fingerprint' : fingerprint}
-presenter = OptimalPresenter(params=params, strategy='stabilize')
-
-# update the fingerprint
-fingerprint.update(egg)
-
-# reorder next list
-reordered_egg = presenter.order(egg, method='permute', nperms=100)
diff --git a/docs/_build/html/_downloads/plot_accuracy.ipynb b/docs/_build/html/_downloads/plot_accuracy.ipynb
deleted file mode 100644
index fa592d4..0000000
--- a/docs/_build/html/_downloads/plot_accuracy.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Plot free recall accuracy\n\n\nThis example plots free recall accuracy for a single subject.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n#import\nimport quail\n\n#load data\negg = quail.load('example')\n\n#analysis\nfegg = egg.analyze('accuracy', listgroup=['condition1']*4+['condition2']*4)\n\n#plot by list\nfegg.plot(plot_style='violin', title='Average Recall Accuracy')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_accuracy.py b/docs/_build/html/_downloads/plot_accuracy.py
deleted file mode 100644
index 91c2197..0000000
--- a/docs/_build/html/_downloads/plot_accuracy.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot free recall accuracy
-=============================
-
-This example plots free recall accuracy for a single subject.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-#import
-import quail
-
-#load data
-egg = quail.load('example')
-
-#analysis
-fegg = egg.analyze('accuracy', listgroup=['condition1']*4+['condition2']*4)
-
-#plot by list
-fegg.plot(plot_style='violin', title='Average Recall Accuracy')
diff --git a/docs/_build/html/_downloads/plot_existing_axes.ipynb b/docs/_build/html/_downloads/plot_existing_axes.ipynb
deleted file mode 100644
index e0f3343..0000000
--- a/docs/_build/html/_downloads/plot_existing_axes.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Plot free recall accuracy in an existing ax object\n\n\nThis example plots free recall accuracy in an existing Matplotlib Axes object.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n#import\nimport quail\nimport matplotlib.pyplot as plt\n\n#load data\negg = quail.load('example')\n\n#analysis\nfegg = egg.analyze('accuracy', listgroup=['condition1']*4+['condition2']*4)\n\n#plot by list\nfig = plt.figure()\nax = fig.add_subplot(2,1,1)\nfegg.plot(plot_style='violin', title='Average Recall Accuracy', ax=ax)"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_existing_axes.py b/docs/_build/html/_downloads/plot_existing_axes.py
deleted file mode 100644
index eb687ee..0000000
--- a/docs/_build/html/_downloads/plot_existing_axes.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot free recall accuracy in an existing ax object
-=============================
-
-This example plots free recall accuracy in an existing Matplotlib Axes object.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-#import
-import quail
-import matplotlib.pyplot as plt
-
-#load data
-egg = quail.load('example')
-
-#analysis
-fegg = egg.analyze('accuracy', listgroup=['condition1']*4+['condition2']*4)
-
-#plot by list
-fig = plt.figure()
-ax = fig.add_subplot(2,1,1)
-fegg.plot(plot_style='violin', title='Average Recall Accuracy', ax=ax)
diff --git a/docs/_build/html/_downloads/plot_fingerprint.ipynb b/docs/_build/html/_downloads/plot_fingerprint.ipynb
deleted file mode 100644
index c0349c2..0000000
--- a/docs/_build/html/_downloads/plot_fingerprint.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Plot memory fingerprint\n\n\nThis example plots a fingerprint. Briefly, a fingerprint\ncan be described as a summary of how a subject organizes information with\nrespect to the multiple features of the stimuli. In addition to presentation\nand recall data, a features object is passed to the Egg class. It is comprised\nof a dictionary for each presented stimulus that contains feature dimensions and\nvalues for each stimulus.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n#import\nimport quail\n\n#load data\negg = quail.load('example')\n\n# analyze and plot\negg.analyze('fingerprint', listgroup=['average']*8, features=['temporal']).plot(title='Memory Fingerprint')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_fingerprint.py b/docs/_build/html/_downloads/plot_fingerprint.py
deleted file mode 100644
index 76f47e0..0000000
--- a/docs/_build/html/_downloads/plot_fingerprint.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot memory fingerprint
-=============================
-
-This example plots a fingerprint. Briefly, a fingerprint
-can be described as a summary of how a subject organizes information with
-respect to the multiple features of the stimuli. In addition to presentation
-and recall data, a features object is passed to the Egg class. It is comprised
-of a dictionary for each presented stimulus that contains feature dimensions and
-values for each stimulus.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-#import
-import quail
-
-#load data
-egg = quail.load('example')
-
-# analyze and plot
-egg.analyze('fingerprint', listgroup=['average']*8, features=['temporal']).plot(title='Memory Fingerprint')
diff --git a/docs/_build/html/_downloads/plot_lagcrp.ipynb b/docs/_build/html/_downloads/plot_lagcrp.ipynb
deleted file mode 100644
index f46db0e..0000000
--- a/docs/_build/html/_downloads/plot_lagcrp.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Plot Lag-CRP\n\n\nThis example plots a Lag-CRP as described in Kahana et al (1996).\nGiven the recall of a stimulus in position n, this plot shows the probability of\nrecalling stimuli in neighboring stimulus positions (n+/-5).\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n# load data\negg = quail.load('example')\n\n# analyze and plot\negg.analyze('lagcrp', listgroup=['average']*8).plot(title='Lag-CRP')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_lagcrp.py b/docs/_build/html/_downloads/plot_lagcrp.py
deleted file mode 100644
index f1fb2b9..0000000
--- a/docs/_build/html/_downloads/plot_lagcrp.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot Lag-CRP
-=============================
-
-This example plots a Lag-CRP as described in Kahana et al (1996).
-Given the recall of a stimulus in position n, this plot shows the probability of
-recalling stimuli in neighboring stimulus positions (n+/-5).
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-# load data
-egg = quail.load('example')
-
-# analyze and plot
-egg.analyze('lagcrp', listgroup=['average']*8).plot(title='Lag-CRP')
diff --git a/docs/_build/html/_downloads/plot_pfr.ipynb b/docs/_build/html/_downloads/plot_pfr.ipynb
deleted file mode 100644
index 6fe30b9..0000000
--- a/docs/_build/html/_downloads/plot_pfr.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n# Plot probability of first recall\n\n\nThis example plots the probability of an item being recalled first given its\nlist position.\n\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n#load data\negg = quail.load_example_data()\n\n# analysis\nanalyzed_data = quail.analyze(egg, analysis='pfr', listgroup=['average']*8)\n\n# plot\nquail.plot(analyzed_data, title='Probability of First Recall')"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.11",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_pfr.py b/docs/_build/html/_downloads/plot_pfr.py
deleted file mode 100644
index d8a4cf2..0000000
--- a/docs/_build/html/_downloads/plot_pfr.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot probability of first recall
-=============================
-
-This example plots the probability of an item being recalled first given its
-list position.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-#load data
-egg = quail.load_example_data()
-
-# analysis
-analyzed_data = quail.analyze(egg, analysis='pfr', listgroup=['average']*8)
-
-# plot
-quail.plot(analyzed_data, title='Probability of First Recall')
diff --git a/docs/_build/html/_downloads/plot_pnr.ipynb b/docs/_build/html/_downloads/plot_pnr.ipynb
deleted file mode 100644
index 95a5898..0000000
--- a/docs/_build/html/_downloads/plot_pnr.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Plot probability of nth recall\n\n\nThis example plots the probability of an item being recalled nth given its\nlist position.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n#load data\negg = quail.load_example_data()\n\n# analysis\nanalyzed_data = quail.analyze(egg, analysis='pnr', listgroup=['average']*8,\n position=5)\n\n# plot\nquail.plot(analyzed_data, title='Probability of Recall')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_pnr.py b/docs/_build/html/_downloads/plot_pnr.py
deleted file mode 100644
index 6abf805..0000000
--- a/docs/_build/html/_downloads/plot_pnr.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot probability of nth recall
-=============================
-
-This example plots the probability of an item being recalled nth given its
-list position.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-#load data
-egg = quail.load_example_data()
-
-# analysis
-analyzed_data = quail.analyze(egg, analysis='pnr', listgroup=['average']*8,
- position=5)
-
-# plot
-quail.plot(analyzed_data, title='Probability of Recall')
diff --git a/docs/_build/html/_downloads/plot_spc.ipynb b/docs/_build/html/_downloads/plot_spc.ipynb
deleted file mode 100644
index 4d244bf..0000000
--- a/docs/_build/html/_downloads/plot_spc.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Plot serial position curve\n\n\nThis example plots the probability of recall success as a function of serial\nposition during stimulus encoding.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n#load data\negg = quail.load_example_data()\n\n# analyze and plot\negg.analyze('spc', listgroup=['average']*8).plot(title='Serial Position Curve')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_spc.py b/docs/_build/html/_downloads/plot_spc.py
deleted file mode 100644
index da03266..0000000
--- a/docs/_build/html/_downloads/plot_spc.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot serial position curve
-=============================
-
-This example plots the probability of recall success as a function of serial
-position during stimulus encoding.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-#load data
-egg = quail.load_example_data()
-
-# analyze and plot
-egg.analyze('spc', listgroup=['average']*8).plot(title='Serial Position Curve')
diff --git a/docs/_build/html/_downloads/plot_tempclust.ipynb b/docs/_build/html/_downloads/plot_tempclust.ipynb
deleted file mode 100644
index 2d0dfe2..0000000
--- a/docs/_build/html/_downloads/plot_tempclust.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n# Plot temporal clustering\n\n\nThis example plots temporal clustering, the extent to which subject tend to\nrecall neighboring items sequentially.\n\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n#load data\negg = quail.load_example_data()\n\n#analysis\nanalyzed_data = quail.analyze(egg, analysis='temporal', listgroup=['early']*8+['late']*8)\n\n#plot\nquail.plot(analyzed_data, title='Temporal Clustering')"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.11",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_tempclust.py b/docs/_build/html/_downloads/plot_tempclust.py
deleted file mode 100644
index 15e6e33..0000000
--- a/docs/_build/html/_downloads/plot_tempclust.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot temporal clustering
-=============================
-
-This example plots temporal clustering, the extent to which subject tend to
-recall neighboring items sequentially.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-#load data
-egg = quail.load_example_data()
-
-#analysis
-analyzed_data = quail.analyze(egg, analysis='temporal', listgroup=['early']*8+['late']*8)
-
-#plot
-quail.plot(analyzed_data, title='Temporal Clustering')
diff --git a/docs/_build/html/_downloads/plot_temporal.ipynb b/docs/_build/html/_downloads/plot_temporal.ipynb
deleted file mode 100644
index b09c96e..0000000
--- a/docs/_build/html/_downloads/plot_temporal.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "nbformat_minor": 0,
- "nbformat": 4,
- "cells": [
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "%matplotlib inline"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- },
- {
- "source": [
- "\n# Plot temporal clustering\n\n\nThis example plots temporal clustering, the extent to which subject tend to\nrecall neighboring items sequentially.\n\n\n"
- ],
- "cell_type": "markdown",
- "metadata": {}
- },
- {
- "execution_count": null,
- "cell_type": "code",
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n#load data\negg = quail.load_example_data()\n\n#analysis\nanalyzed_data = quail.analyze(egg, analysis='temporal', listgroup=['early']*4+['late']*4)\n\n#plot\nquail.plot(analyzed_data, title='Temporal Clustering')"
- ],
- "outputs": [],
- "metadata": {
- "collapsed": false
- }
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 2",
- "name": "python2",
- "language": "python"
- },
- "language_info": {
- "mimetype": "text/x-python",
- "nbconvert_exporter": "python",
- "name": "python",
- "file_extension": ".py",
- "version": "2.7.11",
- "pygments_lexer": "ipython2",
- "codemirror_mode": {
- "version": 2,
- "name": "ipython"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/plot_temporal.py b/docs/_build/html/_downloads/plot_temporal.py
deleted file mode 100644
index a7226e8..0000000
--- a/docs/_build/html/_downloads/plot_temporal.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Plot temporal clustering
-=============================
-
-This example plots temporal clustering, the extent to which subject tend to
-recall neighboring items sequentially.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-#load data
-egg = quail.load_example_data()
-
-#analysis
-analyzed_data = quail.analyze(egg, analysis='temporal', listgroup=['early']*4+['late']*4)
-
-#plot
-quail.plot(analyzed_data, title='Temporal Clustering')
diff --git a/docs/_build/html/_downloads/recmat2egg.ipynb b/docs/_build/html/_downloads/recmat2egg.ipynb
deleted file mode 100644
index b15483c..0000000
--- a/docs/_build/html/_downloads/recmat2egg.ipynb
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n# Make egg out of recall matrix\n\n\nThis example shows how to make an egg out of a precomputed recall matrix so that\nthe analysis and plotting functions can be used.\n\n\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "# Code source: Andrew Heusser\n# License: MIT\n\n# import\nimport quail\n\n# create fake recall matrix\nrecmat = [[[5,4,3,0,1], [3,1,2,0]]]\n\n# create egg\negg = quail.recmat2egg(recmat, list_length=6)"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.5"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
diff --git a/docs/_build/html/_downloads/recmat2egg.py b/docs/_build/html/_downloads/recmat2egg.py
deleted file mode 100644
index b66af02..0000000
--- a/docs/_build/html/_downloads/recmat2egg.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-=============================
-Make egg out of recall matrix
-=============================
-
-This example shows how to make an egg out of a precomputed recall matrix so that
-the analysis and plotting functions can be used.
-
-"""
-
-# Code source: Andrew Heusser
-# License: MIT
-
-# import
-import quail
-
-# create fake recall matrix
-recmat = [[[5,4,3,0,1], [3,1,2,0]]]
-
-# create egg
-egg = quail.recmat2egg(recmat, list_length=6)
diff --git a/docs/_build/html/_images/advanced_plotting_11_0.png b/docs/_build/html/_images/advanced_plotting_11_0.png
index 878d9aa..1c05ba5 100644
Binary files a/docs/_build/html/_images/advanced_plotting_11_0.png and b/docs/_build/html/_images/advanced_plotting_11_0.png differ
diff --git a/docs/_build/html/_images/advanced_plotting_11_1.png b/docs/_build/html/_images/advanced_plotting_11_1.png
deleted file mode 100644
index 94282d0..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_11_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_14_0.png b/docs/_build/html/_images/advanced_plotting_14_0.png
index 2bf0738..aa71795 100644
Binary files a/docs/_build/html/_images/advanced_plotting_14_0.png and b/docs/_build/html/_images/advanced_plotting_14_0.png differ
diff --git a/docs/_build/html/_images/advanced_plotting_16_0.png b/docs/_build/html/_images/advanced_plotting_16_0.png
index 3978b53..cd6bb16 100644
Binary files a/docs/_build/html/_images/advanced_plotting_16_0.png and b/docs/_build/html/_images/advanced_plotting_16_0.png differ
diff --git a/docs/_build/html/_images/advanced_plotting_18_0.png b/docs/_build/html/_images/advanced_plotting_18_0.png
index de9bf13..d30ee9c 100644
Binary files a/docs/_build/html/_images/advanced_plotting_18_0.png and b/docs/_build/html/_images/advanced_plotting_18_0.png differ
diff --git a/docs/_build/html/_images/advanced_plotting_18_1.png b/docs/_build/html/_images/advanced_plotting_18_1.png
deleted file mode 100644
index 0f80c36..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_18_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_20_0.png b/docs/_build/html/_images/advanced_plotting_20_0.png
deleted file mode 100644
index 998c439..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_20_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_20_1.png b/docs/_build/html/_images/advanced_plotting_20_1.png
deleted file mode 100644
index 5752353..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_20_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_23_0.png b/docs/_build/html/_images/advanced_plotting_23_0.png
deleted file mode 100644
index 0949f4b..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_23_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_23_1.png b/docs/_build/html/_images/advanced_plotting_23_1.png
deleted file mode 100644
index 614d7e3..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_23_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_26_0.png b/docs/_build/html/_images/advanced_plotting_26_0.png
deleted file mode 100644
index ff70025..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_26_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_26_1.png b/docs/_build/html/_images/advanced_plotting_26_1.png
index c1d76ac..8122b86 100644
Binary files a/docs/_build/html/_images/advanced_plotting_26_1.png and b/docs/_build/html/_images/advanced_plotting_26_1.png differ
diff --git a/docs/_build/html/_images/advanced_plotting_26_2.png b/docs/_build/html/_images/advanced_plotting_26_2.png
deleted file mode 100644
index 2f53b84..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_26_2.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_28_0.png b/docs/_build/html/_images/advanced_plotting_28_0.png
deleted file mode 100644
index 5a746bf..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_28_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_28_1.png b/docs/_build/html/_images/advanced_plotting_28_1.png
deleted file mode 100644
index 5387d2f..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_28_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_28_2.png b/docs/_build/html/_images/advanced_plotting_28_2.png
deleted file mode 100644
index 4106b90..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_28_2.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_5_0.png b/docs/_build/html/_images/advanced_plotting_5_0.png
deleted file mode 100644
index ff9dd47..0000000
Binary files a/docs/_build/html/_images/advanced_plotting_5_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/advanced_plotting_7_0.png b/docs/_build/html/_images/advanced_plotting_7_0.png
index 4bc919c..2d85572 100644
Binary files a/docs/_build/html/_images/advanced_plotting_7_0.png and b/docs/_build/html/_images/advanced_plotting_7_0.png differ
diff --git a/docs/_build/html/_images/advanced_plotting_9_0.png b/docs/_build/html/_images/advanced_plotting_9_0.png
index 9c9e126..3cda272 100644
Binary files a/docs/_build/html/_images/advanced_plotting_9_0.png and b/docs/_build/html/_images/advanced_plotting_9_0.png differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_14_0.png b/docs/_build/html/_images/basic_analyze_and_plot_14_0.png
deleted file mode 100644
index 5164e62..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_14_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_19_0.png b/docs/_build/html/_images/basic_analyze_and_plot_19_0.png
deleted file mode 100644
index 4d52fc0..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_19_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_23_0.png b/docs/_build/html/_images/basic_analyze_and_plot_23_0.png
deleted file mode 100644
index 4188a13..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_23_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_27_0.png b/docs/_build/html/_images/basic_analyze_and_plot_27_0.png
deleted file mode 100644
index b8c8c08..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_27_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_29_0.png b/docs/_build/html/_images/basic_analyze_and_plot_29_0.png
deleted file mode 100644
index c1b954c..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_29_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_33_0.png b/docs/_build/html/_images/basic_analyze_and_plot_33_0.png
deleted file mode 100644
index 04e0d72..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_33_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_35_0.png b/docs/_build/html/_images/basic_analyze_and_plot_35_0.png
deleted file mode 100644
index e323955..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_35_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/basic_analyze_and_plot_39_0.png b/docs/_build/html/_images/basic_analyze_and_plot_39_0.png
deleted file mode 100644
index b7a7719..0000000
Binary files a/docs/_build/html/_images/basic_analyze_and_plot_39_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/fingerprint1.png b/docs/_build/html/_images/fingerprint1.png
deleted file mode 100644
index 0b66451..0000000
Binary files a/docs/_build/html/_images/fingerprint1.png and /dev/null differ
diff --git a/docs/_build/html/_images/fingerprint25.png b/docs/_build/html/_images/fingerprint25.png
deleted file mode 100644
index 3383348..0000000
Binary files a/docs/_build/html/_images/fingerprint25.png and /dev/null differ
diff --git a/docs/_build/html/_images/fingerprint3.png b/docs/_build/html/_images/fingerprint3.png
deleted file mode 100644
index 5f84617..0000000
Binary files a/docs/_build/html/_images/fingerprint3.png and /dev/null differ
diff --git a/docs/_build/html/_images/fingerprint4.png b/docs/_build/html/_images/fingerprint4.png
deleted file mode 100644
index c5d1edf..0000000
Binary files a/docs/_build/html/_images/fingerprint4.png and /dev/null differ
diff --git a/docs/_build/html/_images/plot_fingerprint.png b/docs/_build/html/_images/plot_fingerprint.png
index 4c22c13..251204e 100644
Binary files a/docs/_build/html/_images/plot_fingerprint.png and b/docs/_build/html/_images/plot_fingerprint.png differ
diff --git a/docs/_build/html/_images/plot_lagcrp.png b/docs/_build/html/_images/plot_lagcrp.png
index 3fd16fb..9967591 100644
Binary files a/docs/_build/html/_images/plot_lagcrp.png and b/docs/_build/html/_images/plot_lagcrp.png differ
diff --git a/docs/_build/html/_images/sphx_glr_crack_egg_thumb.png b/docs/_build/html/_images/sphx_glr_crack_egg_thumb.png
index cbc8e0f..1bcc781 100644
Binary files a/docs/_build/html/_images/sphx_glr_crack_egg_thumb.png and b/docs/_build/html/_images/sphx_glr_crack_egg_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_create_egg_thumb.png b/docs/_build/html/_images/sphx_glr_create_egg_thumb.png
index cbc8e0f..1bcc781 100644
Binary files a/docs/_build/html/_images/sphx_glr_create_egg_thumb.png and b/docs/_build/html/_images/sphx_glr_create_egg_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_create_multisubject_egg_thumb.png b/docs/_build/html/_images/sphx_glr_create_multisubject_egg_thumb.png
index cbc8e0f..1bcc781 100644
Binary files a/docs/_build/html/_images/sphx_glr_create_multisubject_egg_thumb.png and b/docs/_build/html/_images/sphx_glr_create_multisubject_egg_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_decode_speech_thumb.png b/docs/_build/html/_images/sphx_glr_decode_speech_thumb.png
index cbc8e0f..1bcc781 100644
Binary files a/docs/_build/html/_images/sphx_glr_decode_speech_thumb.png and b/docs/_build/html/_images/sphx_glr_decode_speech_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_fingerprint_optimalpresenter_thumb.png b/docs/_build/html/_images/sphx_glr_fingerprint_optimalpresenter_thumb.png
index 233f8e6..1bcc781 100644
Binary files a/docs/_build/html/_images/sphx_glr_fingerprint_optimalpresenter_thumb.png and b/docs/_build/html/_images/sphx_glr_fingerprint_optimalpresenter_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_accuracy_001.png b/docs/_build/html/_images/sphx_glr_plot_accuracy_001.png
index a774ca3..0c7cf04 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_accuracy_001.png and b/docs/_build/html/_images/sphx_glr_plot_accuracy_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_accuracy_thumb.png b/docs/_build/html/_images/sphx_glr_plot_accuracy_thumb.png
index 392278e..acffb44 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_accuracy_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_accuracy_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_existing_axes_001.png b/docs/_build/html/_images/sphx_glr_plot_existing_axes_001.png
index 47b6ca7..afdb876 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_existing_axes_001.png and b/docs/_build/html/_images/sphx_glr_plot_existing_axes_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_existing_axes_thumb.png b/docs/_build/html/_images/sphx_glr_plot_existing_axes_thumb.png
index 513f479..3f88eea 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_existing_axes_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_existing_axes_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_fingerprint_001.png b/docs/_build/html/_images/sphx_glr_plot_fingerprint_001.png
index e8d0d32..251204e 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_fingerprint_001.png and b/docs/_build/html/_images/sphx_glr_plot_fingerprint_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_fingerprint_thumb.png b/docs/_build/html/_images/sphx_glr_plot_fingerprint_thumb.png
index 50fcc09..ae7a213 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_fingerprint_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_fingerprint_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_lagcrp_001.png b/docs/_build/html/_images/sphx_glr_plot_lagcrp_001.png
index bb7e152..9967591 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_lagcrp_001.png and b/docs/_build/html/_images/sphx_glr_plot_lagcrp_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_lagcrp_thumb.png b/docs/_build/html/_images/sphx_glr_plot_lagcrp_thumb.png
index 9ada197..8a0c750 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_lagcrp_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_lagcrp_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_pfr_001.png b/docs/_build/html/_images/sphx_glr_plot_pfr_001.png
index 3532e5c..3b6e555 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_pfr_001.png and b/docs/_build/html/_images/sphx_glr_plot_pfr_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_pfr_thumb.png b/docs/_build/html/_images/sphx_glr_plot_pfr_thumb.png
index ada934f..227af70 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_pfr_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_pfr_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_pnr_001.png b/docs/_build/html/_images/sphx_glr_plot_pnr_001.png
index 5d6ca16..5586627 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_pnr_001.png and b/docs/_build/html/_images/sphx_glr_plot_pnr_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_pnr_thumb.png b/docs/_build/html/_images/sphx_glr_plot_pnr_thumb.png
index 68864e0..9369335 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_pnr_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_pnr_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_spc_001.png b/docs/_build/html/_images/sphx_glr_plot_spc_001.png
index 8c58b95..d3c8ba8 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_spc_001.png and b/docs/_build/html/_images/sphx_glr_plot_spc_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_spc_thumb.png b/docs/_build/html/_images/sphx_glr_plot_spc_thumb.png
index 7ab83f3..d249266 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_spc_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_spc_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_tempclust_001.png b/docs/_build/html/_images/sphx_glr_plot_tempclust_001.png
deleted file mode 100644
index 1e0682f..0000000
Binary files a/docs/_build/html/_images/sphx_glr_plot_tempclust_001.png and /dev/null differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_temporal_001.png b/docs/_build/html/_images/sphx_glr_plot_temporal_001.png
index 86f47e6..a6eda92 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_temporal_001.png and b/docs/_build/html/_images/sphx_glr_plot_temporal_001.png differ
diff --git a/docs/_build/html/_images/sphx_glr_plot_temporal_thumb.png b/docs/_build/html/_images/sphx_glr_plot_temporal_thumb.png
index 0e4af1e..e70d512 100644
Binary files a/docs/_build/html/_images/sphx_glr_plot_temporal_thumb.png and b/docs/_build/html/_images/sphx_glr_plot_temporal_thumb.png differ
diff --git a/docs/_build/html/_images/sphx_glr_recmat2egg_thumb.png b/docs/_build/html/_images/sphx_glr_recmat2egg_thumb.png
index cbc8e0f..1bcc781 100644
Binary files a/docs/_build/html/_images/sphx_glr_recmat2egg_thumb.png and b/docs/_build/html/_images/sphx_glr_recmat2egg_thumb.png differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_0.png
deleted file mode 100644
index 921703e..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_1.png
deleted file mode 100644
index bcda101..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_11_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_14_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_14_0.png
deleted file mode 100644
index 1e6f0ef..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_14_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_16_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_16_0.png
deleted file mode 100644
index 10151d0..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_16_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_18_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_18_0.png
deleted file mode 100644
index f203eaa..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_18_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_0.png
deleted file mode 100644
index 1d819ea..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_1.png
deleted file mode 100644
index 1be6ddc..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_20_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_0.png
deleted file mode 100644
index 06ee0b5..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_1.png
deleted file mode 100644
index 70840f6..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_23_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_0.png
deleted file mode 100644
index 2fcc599..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_1.png
deleted file mode 100644
index 6ef4942..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_2.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_2.png
deleted file mode 100644
index d53c958..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_2.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_3.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_3.png
deleted file mode 100644
index d7a2c79..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_26_3.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_0.png
deleted file mode 100644
index 20a29af..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_1.png
deleted file mode 100644
index a0cd659..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_2.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_2.png
deleted file mode 100644
index 515e183..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_2.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_3.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_3.png
deleted file mode 100644
index e10e27b..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_28_3.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_5_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_5_0.png
deleted file mode 100644
index 414e7b8..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_5_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_7_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_7_0.png
deleted file mode 100644
index 620981b..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_7_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_9_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_9_0.png
deleted file mode 100644
index 3f48b4f..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_advanced_plotting-checkpoint_9_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_0.png
deleted file mode 100644
index 2af412e..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_1.png
deleted file mode 100644
index 0822a09..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_14_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_0.png
deleted file mode 100644
index 7cf2a16..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_1.png
deleted file mode 100644
index 9da2cec..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_19_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_0.png
deleted file mode 100644
index 7157b9c..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_1.png
deleted file mode 100644
index 95b80dc..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_23_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_0.png
deleted file mode 100644
index 42d60a5..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_1.png
deleted file mode 100644
index 13b34b3..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_27_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_0.png
deleted file mode 100644
index c1b954c..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_1.png
deleted file mode 100644
index 90d306b..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_29_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_0.png
deleted file mode 100644
index e323955..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_1.png
deleted file mode 100644
index 5f68ba0..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_35_1.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_37_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_37_0.png
deleted file mode 100644
index a35777f..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_37_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_39_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_39_0.png
deleted file mode 100644
index b7a7719..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_basic_analyze_and_plot-checkpoint_39_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_2_0.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_2_0.png
deleted file mode 100644
index 33f481e..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_2_0.png and /dev/null differ
diff --git a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_6_1.png b/docs/_build/html/_images/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_6_1.png
deleted file mode 100644
index 18348c4..0000000
Binary files a/docs/_build/html/_images/tutorial_.ipynb_checkpoints_optimal_presenter-checkpoint_6_1.png and /dev/null differ
diff --git a/docs/_build/html/_modules/index.html b/docs/_build/html/_modules/index.html
index fc9f0f7..8ca6fb2 100644
--- a/docs/_build/html/_modules/index.html
+++ b/docs/_build/html/_modules/index.html
@@ -1,44 +1,35 @@
-
-
+
-
-
+
+
Overview: module code — quail 0.2.0 documentation
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
+
-#!/usr/bin/env python
-from__future__importdivision
-importnumpyasnp
-importpandasaspd
-importwarnings
-from.helpersimport*
-
-defanalyze_chunk(data,subjgroup=None,subjname='Subject',listgroup=None,listname='List',analysis=None,analysis_type=None,pass_features=False,**kwargs):
- """
- Private function that groups data by subject/list number and performs analysis for a chunk of data.
-
- Parameters
- ----------
- data : Egg data object
- The data to be analyzed
-
- subjgroup : list of strings or ints
- String/int variables indicating how to group over subjects. Must be
- the length of the number of subjects
-
- subjname : string
- Name of the subject grouping variable
-
- listgroup : list of strings or ints
- String/int variables indicating how to group over list. Must be
- the length of the number of lists
-
- listname : string
- Name of the list grouping variable
-
- analysis : function
- This function analyzes data and returns it.
-
- pass_features : bool
- Logical indicating whether the analyses uses the features field of the Egg
-
- Returns
- ----------
- analyzed_data : Pandas DataFrame
- DataFrame containing the analysis results
-
- """
- # if no grouping, set default to iterate over each list independently
- subjgroup=subjgroupifsubjgroupelsedata.pres.index.levels[0].values
- listgroup=listgroupiflistgroupelsedata.pres.index.levels[1].values
-
- # create a dictionary for grouping
- subjdict={subj:data.pres.index.levels[0].values[subj==np.array(subjgroup)]forsubjinset(subjgroup)}
- # listdict = {lst : data.pres.index.levels[1].values[lst==np.array(listgroup)] for lst in set(listgroup)}
-
- # allow for lists of listgroup arguments
- ifall(isinstance(el,list)forelinlistgroup):
- listdict=[{lst:data.pres.index.levels[1].values[lst==np.array(listgrpsub)]forlstinset(listgrpsub)}forlistgrpsubinlistgroup]
- else:
- listdict=[{lst:data.pres.index.levels[1].values[lst==np.array(listgroup)]forlstinset(listgroup)}forsubjinsubjdict]
-
- # perform the analysis
- defperform_analysis(subj,lst):
-
- # get data slice for presentation and recall
- pres_slice=data.pres.loc[[(s,l)forsinsubjdict[subj]forlinlistdict[subj][lst]ifall(~pd.isnull(data.pres.loc[(s,l)]))]]
- pres_slice.list_length=data.list_length
-
- rec_slice=data.rec.loc[[(s,l)forsinsubjdict[subj]forlinlistdict[subj][lst]ifall(~pd.isnull(data.pres.loc[(s,l)]))]]
-
- # if features are need for analysis, get the features for this slice of data
- ifpass_features:
- feature_slice=data.features.loc[[(s,l)forsinsubjdict[subj]forlinlistdict[subj][lst]ifall(~pd.isnull(data.pres.loc[(s,l)]))]]
-
- # generate indices
- index=pd.MultiIndex.from_arrays([[subj],[lst]],names=[subjname,listname])
-
- # perform analysis for each data chunk
- ifpass_features:
- returnpd.DataFrame([analysis(pres_slice,rec_slice,feature_slice,data.dist_funcs,**kwargs)],index=index,columns=[featureforfeatureinfeature_slice[0].as_matrix()[0].keys()])
- else:
- returnpd.DataFrame([analysis(pres_slice,rec_slice,**kwargs)],index=index)
-
- # create list of chunks to process
- a=[]
- b=[]
- forsubjinsubjdict:
- forlstinlistdict[0]:
- a.append(subj)
- b.append(lst)
-
- # handle parellel kwarg
- parallel=kwargs['parallel']
- delkwargs['parallel']
-
- # if we're running permutation tests, use multiprocessing
- ifparallel==True:
- importmultiprocessing
- frompathos.multiprocessingimportProcessingPoolasPool
- p=Pool(multiprocessing.cpu_count())
- analyzed_data=p.map(perform_analysis,a,b)
- else:
- analyzed_data=[perform_analysis(ai,bi)forai,biinzip(a,b)]
-
- # concatenate slices
- analyzed_data=pd.concat(analyzed_data)
-
- analyzed_data.attrs={
- 'analysis_type':analysis_type,
- 'list_length':data.list_length
- }
-
- forkeyinkwargs:
- analyzed_data.attrs[key]=kwargs[key]
-
- returnanalyzed_data
-
-# recall matrix
-defrecall_matrix(presented,recalled):
- """
- Computes recall matrix given list of presented and list of recalled words
-
- Parameters
- ----------
- presented : list of list of strings
- presentedWords are the words presented in the experiment, in order, grouped by list
-
- recalled : list of list of strings
- recalledWords are the words recalled by the subject, in order, grouped by list
-
- Returns
- ----------
- recall_matrix : list of lists of ints
- each integer represents the presentation position of the recalled word in a given list in order of recall
- 0s represent recalled words not presented
- negative ints represent words recalled from previous lists
-
- """
-
- defrecall_pos(pres_list,rec_list):
- pres_list=list(pres_list)
- rec_list=list(rec_list)
- result=np.zeros(len(pres_list))iflen(pres_list)>=len(rec_list)elsenp.zeros(len(rec_list))
- result.fill(np.nan)
- foridx,rec_wordinenumerate(rec_list):
- ifrec_wordinpres_list:
- iftype(rec_word)isstr:
- result[idx]=int(pres_list.index(rec_word)+1)
- returnresult
-
- result=[]
- forpres_list,rec_listinzip(presented.values,recalled.values):
- result.append(recall_pos(pres_list,rec_list))
- returnresult
-
-defcompute_distances(pres_list,feature_list,dist_funcs):
- """
- Compute distances between list words along n feature dimensions
-
- Parameters
- ----------
- pres_list : list
- list of presented words
- feature_list : list
- list of feature dicts for presented words
- dist_funcs : dict
- dict of distance functions for each feature
-
- Returns
- ----------
- distances : dict
- dict of distance matrices for each feature
- """
-
- # initialize dist dict
- distances={}
-
- # for each feature in dist_funcs
- forfeatureindist_funcs:
-
- # initialize dist matrix
- dists=np.zeros((len(pres_list),len(pres_list)))
-
- # for each word in the list
- foridx1,item1inenumerate(pres_list):
-
- # for each word in the list
- foridx2,item2inenumerate(pres_list):
-
- # compute the distance between word 1 and word 2 along some feature dimension
- dists[idx1,idx2]=dist_funcs[feature](feature_list[idx1][feature],feature_list[idx2][feature])
-
- # set that distance matrix to the value of a dict where the feature name is the key
- distances[feature]=dists
-
- returndistances
-
-defcompute_feature_weights(pres_list,rec_list,feature_list,distances):
- """
- Compute clustering scores along a set of feature dimensions
-
- Parameters
- ----------
- pres_list : list
- list of presented words
- rec_list : list
- list of recalled words
- feature_list : list
- list of feature dicts for presented words
- distances : dict
- dict of distance matrices for each feature
-
- Returns
- ----------
- weights : list
- list of clustering scores for each feature dimension
- """
-
- # initialize the weights object for just this list
- weights={}
- forfeatureinfeature_list[0]:
- weights[feature]=[]
-
- # return default list if there is not enough data to compute the fingerprint
- iflen(rec_list)<=2:
- print('Not enough recalls to compute fingerprint, returning default fingerprint.. (everything is .5)')
- forfeatureinfeature_list[0]:
- weights[feature]=.5
- return[weights[key]forkeyinweights]
-
- # initialize past word list
- past_words=[]
- past_idxs=[]
-
- # loop over words
- foriinrange(len(rec_list)-1):
-
- # grab current word
- c=rec_list[i]
-
- # grab the next word
- n=rec_list[i+1]
-
- # if both recalled words are in the encoding list and haven't been recalled before
- if(cinpres_listandninpres_list)and(cnotinpast_wordsandnnotinpast_words):
-
- # for each feature
- forfeatureinfeature_list[0]:
-
- # get the distance vector for the current word
- dists=distances[feature][pres_list.index(c),:]
-
- # distance between current and next word
- cdist=dists[pres_list.index(n)]
-
- # filter dists removing the words that have already been recalled
- dists_filt=np.array([distforidx,distinenumerate(dists)ifidxnotinpast_idxs])
-
- # get indices
- avg_rank=np.mean(np.where(np.sort(dists_filt)[::-1]==cdist)[0]+1)
-
- # compute the weight
- weights[feature].append(avg_rank/len(dists_filt))
-
- # keep track of what has been recalled already
- past_idxs.append(pres_list.index(c))
- past_words.append(c)
-
- # average over the cluster scores for a particular dimension
- forfeatureinweights:
- withwarnings.catch_warnings():
- warnings.simplefilter("ignore",category=RuntimeWarning)
- weights[feature]=np.nanmean(weights[feature])
-
- return[weights[key]forkeyinweights]
-
-# def single_perm(p, r, f, distances):
-# r_real = compute_feature_weights(p, r, f, distances)
-# perm = list(np.random.permutation(r))
-# r_perm = compute_feature_weights(p, perm, f, distances)
-# return [feature_perm < r_real[idx] for idx, feature_perm in enumerate(r_perm)]
-#
-# def permute_fingerprint_parallel(p, r, f, distances, n_perms=100):
-#
-# executor = concurrent.futures.ThreadPoolExecutor(10)
-# futures = [executor.submit(single_perm, p, r, f, distances) for perm in range(n_perms)]
-# concurrent.futures.wait(futures)
-#
-# results = [perm.result() for perm in futures]
-#
-# print(np.sum(np.array(results), axis=0) / n_perms)
-#
-# return np.sum(np.array(results), axis=0) / n_perms
-
-defpermute_fingerprint_serial(p,r,f,distances,n_perms=100):
-
- r_perms=[]
- r_real=compute_feature_weights(p,r,f,distances)
-
- foriperminrange(n_perms):
- r_perm=list(np.random.permutation(r))
- r_perms.append(compute_feature_weights(p,r_perm,f,distances))
-
- r_perms_bool=[]
- forperminr_perms:
- r_perm_bool=[]
- foridx,feature_perminenumerate(perm):
- r_perm_bool.append(feature_perm<r_real[idx])
- r_perms_bool.append(r_perm_bool)
-
- returnnp.sum(np.array(r_perms_bool),axis=0)/n_perms
-
-# accuracy analysis
-defaccuracy_helper(pres_slice,rec_slice):
- """
- Computes proportion of words recalled
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
-
- Returns
- ----------
- prop_recalled : numpy array
- proportion of words recalled
-
- """
-
- # compute recall_matrix for data slice
- recall=recall_matrix(pres_slice,rec_slice)
-
- # simple function that returns 1 if item encoded in position n is in recall list
- defcompute_acc(lst):
- returnlen([iforiinnp.unique(lst)ifi>0])/(pres_slice.list_length)
-
- # get spc for each row in recall matrix
- acc_matrix=[compute_acc(lst)forlstinrecall]
-
- # average over rows
- prop_recalled=np.mean(acc_matrix,axis=0)
-
- returnprop_recalled
-
-# serial position curve
-defspc_helper(pres_slice,rec_slice):
- """
- Computes probability of a word being recalled (in the appropriate recall list), given its presentation position
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
-
- Returns
- ----------
- prop_recalled : numpy array
- each number represents the probability of recall for a word presented in given position/index
-
- """
-
- # compute recall_matrix for data slice
- recall=recall_matrix(pres_slice,rec_slice)
-
- # get spc for each row in recall matrix
- spc_matrix=[[1ifposinlstelse0forposinrange(1,len(lst)+1)]forlstinrecall]
-
- # average over rows
- prop_recalled=np.mean(spc_matrix,axis=0)
-
- returnprop_recalled
-
-# probability of nth recall
-defpnr_helper(pres_slice,rec_slice,n):
-
- """
- Computes probability of a word being recalled nth (in the appropriate recall
- list), given its presentation position. Note: zero indexed
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
-
- Returns
- ----------
- prob_recalled : numpy array
- each number represents the probability of nth recall for a word presented in given position/index
-
- """
-
- # compute recall_matrix for data slice
- recall=recall_matrix(pres_slice,rec_slice)
-
- # simple function that returns 1 if item encoded in position n is recalled first
- defpos_recalled_first(pos,lst,n):
- return1ifpos==lst[n]else0
-
- # get pfr for each row in recall matrix
- pnr_matrix=[[pos_recalled_first(pos,lst,n)forposinrange(1,len(lst)+1)]forlstinrecall]
-
- # average over rows
- prob_recalled=np.mean(pnr_matrix,axis=0)
-
- returnprob_recalled
-
-# lag-crp
-deflagcrp_helper(pres_slice,rec_slice):
- """
- Computes probabilities for each transition distance (probability that a word recalled will be a given distance--in presentation order--from the previous recalled word)
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
-
- Returns
- ----------
- prob_recalled : numpy array
- each float is the probability of transition distance (distnaces indexed by position, from -(n-1) to (n-1), excluding zero
-
- """
-
- # compute recall_matrix for data slice
- recall=recall_matrix(pres_slice,rec_slice)
-
- defcheck_pair(a,b):
- if(a>0andb>0)and(a!=b):
- returnTrue
- else:
- returnFalse
-
- defcompute_actual(recall_list,list_length):
- arr=pd.Series(data=np.zeros((list_length)*2),index=list(range(-list_length,0))+list(range(1,list_length+1)))
- recalled=[]
- fortrialinrange(0,list_length-1):
- a=recall_list[trial]
- b=recall_list[trial+1]
- ifcheck_pair(a,b)and(anotinrecalled)and(bnotinrecalled):
- arr[b-a]+=1
- recalled.append(a)
- returnarr
-
- defcompute_possible(recall_list,list_length):
- arr=pd.Series(data=np.zeros((list_length)*2),index=list(range(-list_length,0))+list(range(1,list_length+1)))
- recalled=[]
- fortrialinrecall_list:
-
- ifnp.isnan(trial):
- pass
- else:
-
- low_bound=int(1-trial)
- up_bound=int(list_length-trial)
-
- chances=list(range(low_bound,0))+list(range(1,up_bound+1))
- #ALL transitions
-
- #remove transitions not possible
- foreachinrecalled:
- ifeach-trialinchances:
- chances.remove(each-trial)
-
- #update array with possible transitions
- arr[chances]+=1
-
- recalled.append(trial)
-
- returnarr
-
- ########
-
- list_crp=[]
- forn_listinrecall:
- actual=compute_actual(n_list,pres_slice.list_length)
- possible=compute_possible(n_list,pres_slice.list_length)
- crp=[0.0ifj==0elsei/jfori,jinzip(actual,possible)]
- crp.insert(int(len(crp)/2),np.nan)
- list_crp.append(crp)
-
- prob_recalled=np.mean(list_crp,axis=0)
-
- returnprob_recalled
-
-# temporal clustering analysis
-deftemporal_helper(pres_slice,rec_slice,permute=False,n_perms=1000):
- """
- Computes temporal clustering score
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
-
- Returns
- ----------
- score : float
- a number representing temporal clustering
-
- """
-
- # initialize temporal clustering list
- temporal_clustering=[]
-
- # define distance function for temporal clustering
- dist_funcs={
- 'temporal':lambdaa,b:np.abs(a-b)
- }
-
- # define features (just positions for temporal clustering)
- f=[{'temporal':i}foriinrange(pres_slice.list_length+1)]
-
- # loop over lists
- forp,rinzip(pres_slice.as_matrix(),rec_slice.as_matrix()):
-
- # turn arrays into lists
- p=list(p)
- r=list(filter(lambdari:isinstance(ri,str),list(r)))
-
- iflen(r)>1:
-
- # compute distances
- distances=compute_distances(p,f,dist_funcs)
-
- # add optional bootstrapping
- ifpermute:
- temporal_clustering.append(permute_fingerprint_serial(p,r,f,distances,n_perms=n_perms))
- else:
- temporal_clustering.append(compute_feature_weights(p,r,f,distances))
- else:
- temporal_clustering.append([np.nan]*len(f[0].keys()))
-
- # return average over rows
- returnnp.nanmean(temporal_clustering,axis=0)
-
-# fingerprint analysis
-deffingerprint_helper(pres_slice,rec_slice,feature_slice,dist_funcs,permute=False,n_perms=1000):
- """
- Computes clustering along a set of feature dimensions
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
- feature_slice : Pandas Dataframe
- chunk of features data to be analyzed
- dist_funcs : dict
- Dictionary of distance functions for feature clustering analyses
-
- Returns
- ----------
- probabilities : numpy array
- each number represents clustering along a different feature dimension
-
- """
- importtime
-
- # compute fingerprint for each list within a chunk
- fingerprint_matrix=[]
-
- forp,r,finzip(pres_slice.as_matrix(),rec_slice.as_matrix(),feature_slice.as_matrix()):
-
- # turn arrays into lists
- p=list(p)
- f=list(f)
- r=list(filter(lambdari:isinstance(ri,str),list(r)))
-
- iflen(r)>1:
-
- # compute distances
- distances=compute_distances(p,f,dist_funcs)
-
- # add optional bootstrapping
- ifpermute:
- fingerprint_matrix.append(permute_fingerprint_serial(p,r,f,distances,n_perms=n_perms))
- else:
- fingerprint_matrix.append(compute_feature_weights(p,r,f,distances))
- else:
- fingerprint_matrix.append([np.nan]*len(f[0].keys()))
-
- # return average over rows
- returnnp.mean(fingerprint_matrix,axis=0)
-
-# fingerprint + temporal clustering analysis
-deffingerprint_temporal_helper(pres_slice,rec_slice,feature_slice,dist_funcs,permute=True,n_perms=1000):
- """
- Computes clustering along a set of feature dimensions
-
- Parameters
- ----------
- pres_slice : Pandas Dataframe
- chunk of presentation data to be analyzed
- rec_slice : Pandas Dataframe
- chunk of recall data to be analyzed
- feature_slice : Pandas Dataframe
- chunk of features data to be analyzed
- dist_funcs : dict
- Dictionary of distance functions for feature clustering analyses
-
- Returns
- ----------
- probabilities : numpy array
- each number represents clustering along a different feature dimension
-
- """
- # compute fingerprint for each list within a chunk
- fingerprint_matrix=[]
-
- forp,r,finzip(pres_slice.as_matrix(),rec_slice.as_matrix(),feature_slice.as_matrix()):
-
- # turn arrays into lists
- p=list(p)
- f=list(f)
- r=list(filter(lambdari:isinstance(ri,str),list(r)))
-
- # add in temporal clustering
- nf=[]
- foridx,fiinenumerate(f):
- fi['temporal']=idx
- nf.append(fi)
-
- dist_funcs_copy=dist_funcs.copy()
- dist_funcs_copy['temporal']=lambdaa,b:np.abs(a-b)
-
- # if there is at least 1 transition
- iflen(r)>1:
-
- # compute distances
- distances=compute_distances(p,nf,dist_funcs_copy)
-
- # add optional bootstrapping
- ifpermute:
- fingerprint_matrix.append(permute_fingerprint_serial(p,r,nf,distances,n_perms=n_perms))
- else:
- fingerprint_matrix.append(compute_feature_weights(p,r,nf,distances))
- else:
- fingerprint_matrix.append([np.nan]*len(nf[0].keys()))
-
- returnnp.nanmean(fingerprint_matrix,axis=0)
-
-# main analysis function
-
[docs]defanalyze(data,subjgroup=None,listgroup=None,subjname='Subject',
- listname='List',analysis=None,n=0,permute=False,n_perms=1000,
- parallel=False):
- """
- General analysis function that groups data by subject/list number and performs analysis.
-
- Parameters
- ----------
- data : Egg data object
- The data to be analyzed
-
- subjgroup : list of strings or ints
- String/int variables indicating how to group over subjects. Must be
- the length of the number of subjects
-
- subjname : string
- Name of the subject grouping variable
-
- listgroup : list of strings or ints
- String/int variables indicating how to group over list. Must be
- the length of the number of lists
-
- listname : string
- Name of the list grouping variable
-
- analysis : string
- This is the analysis you want to run. Can be accuracy, spc, pfr,
- temporal or fingerprint
-
- n : int
- Optional argument for pnr analysis. Defines encoding position of item
- to run pnr. Default is 0, and it is zero indexed
-
- permute : bool
- Optional argument for fingerprint/temporal cluster analyses. Determines
- whether to correct clustering scores by shuffling recall order for each list
- to create a distribution of clustering scores (for each feature). The
- "corrected" clustering score is the proportion of clustering scores in
- that random distribution that were lower than the clustering score for
- the observed recall sequence. Default is False.
-
- n_perms : int
- Optional argument for fingerprint/temporal cluster analyses. Number of
- permutations to run for "corrected" clustering scores. Default is 1000 (
- per recall list).
-
- parallel : bool
- Option to use multiprocessing (this can help speed up the permutations
- tests in the clustering calculations)
-
- Returns
- ----------
- analyzed_data : Pandas DataFrame
- DataFrame containing the analysis results
-
- """
-
- # make sure an analysis is specified
- ifanalysisisNone:
- raiseValueError('You must pass an analysis type.')
-
- # check if subject/list grouping variables exist on the egg
- ifhasattr(data,'subjgroup'):
- ifdata.subjgroupisnotNone:
- subjgroup=data.subjgroup
-
- ifhasattr(data,'subjname'):
- ifdata.subjnameisnotNone:
- subjname=data.subjname
-
- ifhasattr(data,'listgroup'):
- ifdata.listgroupisnotNone:
- listgroup=data.listgroup
-
- ifhasattr(data,'listname'):
- ifdata.listnameisnotNone:
- listname=data.listname
-
- iftype(data)!=list:
- data=[data]
-
- iftype(analysis)!=list:
- analysis=[analysis]
-
- result=[[]fordinrange(len(data))]
-
- foridx,dinenumerate(data):
- forainanalysis:
-
- ifais'accuracy':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=accuracy_helper,
- analysis_type='accuracy',
- pass_features=False,
- parallel=parallel)
- elifais'spc':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=spc_helper,
- analysis_type='spc',
- pass_features=False,
- parallel=parallel)
- elifais'pfr':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=pnr_helper,
- analysis_type='pfr',
- pass_features=False,
- n=0,
- parallel=parallel)
- elifais'pnr':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=pnr_helper,
- analysis_type='pnr',
- pass_features=False,
- n=n,
- parallel=parallel)
- elifais'lagcrp':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=lagcrp_helper,
- analysis_type='lagcrp',
- pass_features=False,
- parallel=parallel)
- # set indices for lagcrp
- r.columns=range(-int((len(r.columns)-1)/2),int((len(r.columns)-1)/2)+1)
- elifais'fingerprint':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=fingerprint_helper,
- analysis_type='fingerprint',
- pass_features=True,
- permute=permute,
- n_perms=n_perms,
- parallel=parallel)
- elifais'temporal':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=temporal_helper,
- analysis_type='temporal',
- permute=permute,
- n_perms=n_perms,
- parallel=parallel)
- elifais'fingerprint_temporal':
- r=analyze_chunk(d,subjgroup=subjgroup,
- listgroup=listgroup,
- subjname=subjname,
- listname=listname,
- analysis=fingerprint_temporal_helper,
- analysis_type='fingerprint_temporal',
- pass_features=True,
- permute=permute,
- n_perms=n_perms,
- parallel=parallel)
-
- result[idx].append(r)
-
- # return analysis result
- iflen(data)>1andlen(analysis)>1:
- returnresult
- eliflen(data)>1andlen(analysis)==1:
- return[item[0]foriteminresult]
- eliflen(data)==1andlen(analysis)>1:
- return[itemforiteminresult[0]]
- else:
- returnresult[0][0]
[docs]defanalyze(egg,subjgroup=None,listgroup=None,subjname='Subject',listname='List',analysis=None,position=0,permute=False,n_perms=1000,parallel=False,match='exact',distance='euclidean',features=None,ts=None):
- """
+""" General analysis function that groups data by subject/list number and performs analysis. Parameters
@@ -219,7 +217,7 @@
Source code for quail.analysis.analysis
raiseValueError('Analysis not recognized. Choose one of the following: ''accuracy, spc, pfr, lag-crp, fingerprint, temporal')
- from..eggimportFriedEgg
+ from..eggimportFriedEggifhasattr(egg,'subjgroup'):ifegg.subjgroupisnotNone:
@@ -252,26 +250,26 @@
-def_analyze_chunk(data,subjgroup=None,subjname='Subject',listgroup=None,
+def_analyze_chunk(data,subjgroup=None,subjname='Subject',listgroup=None,listname='List',analysis=None,analysis_type=None,pass_features=False,features=None,parallel=False,**kwargs):
- """
+""" Private function that groups data by subject/list number and performs analysis for a chunk of data.
@@ -309,16 +307,16 @@
subjdict={subj:data.pres.index.levels[0].values[subj==np.array(subjgroup)]forsubjinset(subjgroup)}ifall(isinstance(el,list)forelinlistgroup):
+ # If listgroup is specific to each subject, this logic is complex/broken for grouping?
+ # But keeping it as list (legacy behavior?) or trying to map?
+ # The iterating chunks uses listdict[0].
+ # For now, let's fix the shared case which is crashing.listdict=[{lst:data.pres.index.levels[1].values[lst==np.array(listgrpsub)]forlstinset(listgrpsub)}forlistgrpsubinlistgroup]else:
- listdict=[{lst:data.pres.index.levels[1].values[lst==np.array(listgroup)]forlstinset(listgroup)}forsubjinsubjdict]
-
- chunks=[(subj,lst)forsubjinsubjdictforlstinlistdict[0]]
+ # Shared list grouping
+ ld={lst:data.pres.index.levels[1].values[lst==np.array(listgroup)]forlstinset(listgroup)}
+ listdict={subj:ldforsubjinsubjdict}
+
+ ifisinstance(listdict,dict):
+ # Shared list grouping (Dict keyed by subject group)
+ # Use the specific lists for each subject group
+ chunks=[(subj,lst)forsubjinsubjdictforlstinlistdict[subj]]
+ else:
+ # Nested list grouping (List of dicts)
+ # Assuming listdict[0] keys are representative? Or iterate?
+ # Legacy behavior used listdict[0]
+ chunks=[(subj,lst)forsubjinsubjdictforlstinlistdict[0]]ifparallel:
- importmultiprocessing
- frompathos.multiprocessingimportProcessingPoolasPool
- p=Pool(multiprocessing.cpu_count())
- res=p.map(_analysis,chunks)
+ importmultiprocessing
+ frompathos.multiprocessingimportProcessingPoolasPool
+ try:
+ p=Pool(multiprocessing.cpu_count())
+ res=p.map(_analysis,chunks)
+ finally:
+ p.close()
+ p.join()
+ p.clear()else:res=[_analysis(c)forcinchunks]
@@ -364,7 +381,7 @@
[docs]defdecode_speech(path,keypath=None,save=False,speech_context=None,
- sample_rate=44100,max_alternatives=1,language_code='en-US',
- enable_word_time_offsets=True,return_raw=False):
- """
- Decode speech for a file or folder and return results
-
- This function wraps the Google Speech API and ffmpeg to decode speech for
- free recall experiments. Note: in order for this to work, you must have a
- Google Speech account, a google speech credentials file referenced in your
- _bash_profile, and ffmpeg installed on your computer. See our readthedocs
- for more information on how to set this up:
- http://cdl-quail.readthedocs.io/en/latest/.
+
[docs]defdecode_speech(path,model_size='base',save=False,return_raw=False,**kwargs):
+"""
+ Decode speech for a file or folder and return results using OpenAI Whisper. Parameters ---------- path : str Path to a wav file, or a folder of wav files.
- keypath : str
- Google Cloud Speech API key filepath. This is a JSON file containing
- credentials that was generated when creating a service account key.
- If None, assumes you have a local key that is set with an environmental
- variable. See the speech decoding tutorial for details.
+ model_size : str
+ Whisper model size: 'tiny', 'base', 'small', 'medium', 'large'.
+ Default is 'base'. save : boolean
- False by default, but if set to true, will save a pickle with the results
- object from google speech, and a text file with the decoded words.
-
- speech_context : list of str
- This allows you to give some context to the speech decoding algorithm.
- For example, this could be the words studied on a given list, or all
- words in an experiment.
-
- sample_rate : float
- The sample rate of your audio files (default is 44100).
-
- max_alternatives : int
- You can specify the speech decoding to return multiple guesses to the
- decoding. This will be saved in the results object (default is 1).
-
- language_code : str
- Decoding language code. Default is en-US. See here for more details:
- https://cloud.google.com/speech/docs/languages
-
- enable_word_time_offsets : bool
- Returns timing information s(onsets/offsets) for each word (default is
- True).
+ False by default. If true, saves results object (pickle) and text transcript. return_raw : boolean
- Intead of returning the parsed results objects (i.e. the words), you can
- return the raw reponse object. This has more details about the decoding,
- such as confidence.
+ If True, returns the full Whisper result dictionary.
+ If False (default), returns a list of (WORD, START, END) tuples.
+
+ **kwargs : dict
+ Additional arguments passed to whisper.transcribe (e.g. language). Returns ---------- words : list of str, or list of lists of str
- The results of the speech decoding. This will be a list if only one file
- is input, or a list of lists if more than one file is decoded.
-
- raw : google speech object, or list of objects
- You can optionally return the google speech object instead of the parsed
- results by using the return_raw flag.
-
+ The results of the speech decoding. """
+
+ ifnotHAS_WHISPER:
+ raiseImportError("openai-whisper not installed. pip install openai-whisper")
- # SUBFUNCTIONS
- defdecode_file(file_path,client,speech_context,sample_rate,
- max_alternatives,enable_word_time_offsets):
-
- defrecognize(chunk,file_path):
- """
- Subfunction that loops over audio segments to recognize speech
- """
- # export as flac
- chunk.export(file_path+".flac",format="flac",bitrate="44.1k")
-
- # open flac file
- withopen(file_path+".flac",'rb')assc:
- speech_content=sc.read()
-
- # initialize speech sample
- sample=types.RecognitionAudio(content=speech_content)
-
- # run speech decoding
- try:
- result=client.recognize(opts,sample)
- exceptValueErrorase:
- print(e)
- result=None
-
- returnresult
-
- opts={}
- opts['encoding']=enums.RecognitionConfig.AudioEncoding.FLAC
- opts['language_code']=language_code
- opts['sample_rate_hertz']=sample_rate
- opts['max_alternatives']=max_alternatives
- opts['enable_word_time_offsets']=enable_word_time_offsets
- ifspeech_context:
- opts['speech_contexts']=[types.SpeechContext(phrases=speech_context)]
-
- # read in wav
- audio=AudioSegment.from_wav(file_path)
-
- # segment into 1 minute chunks
- iflen(audio)>60000:
- segments=list(range(0,len(audio),60000))
- ifsegments[-1]<len(audio):
- segments.append(len(audio)-1)
- print('Audio clip is longer than 1 minute. Splitting into %d one minute segments...'%(len(segments)-1))
- audio_chunks=[]
- foriinrange(len(segments)-1):
- audio_chunks.append(audio[segments[i]:segments[i+1]])
- else:
- audio_chunks=[audio]
-
- # loop over audio segments
- results=[]
- foridx,chunkinenumerate(audio_chunks):
- results.append(recognize(chunk,file_path+str(idx)))
-
- # return list of results
- returnresults
-
- defparse_response(results):
- """Parses response from google speech"""
-
- words=[]
-
- forresultinresults[0].results:
- alternative=result.alternatives[0]
- print('Transcript: {}'.format(alternative.transcript))
- print('Confidence: {}'.format(alternative.confidence))
-
- forword_infoinalternative.words:
- word=word_info.word
- start_time=word_info.start_time
- end_time=word_info.end_time
- print('Word: {}, start_time: {}, end_time: {}'.format(
- word,
- start_time.seconds+start_time.nanos*1e-9,
- end_time.seconds+end_time.nanos*1e-9))
- words.append((str(word).upper(),start_time.seconds+start_time.nanos*1e-9,
- end_time.seconds+end_time.nanos*1e-9))
- returnwords
-
- # def parse_response(results):
- # """Parses response from google speech"""
- # words = []
- # for idx, result in enumerate(results):
- # if result is None:
- # warnings.warn('No speech was decoded for segment %d' % (idx+1))
- # words.append(None)
- # else:
- # try:
- # for segment in result:
- # for chunk in segment.transcript.split(' '):
- # if chunk != '':
- # words.append(str(chunk).upper())
- # except:
- # warnings.warn('Error parsing response for segment %d' % (idx+1))
- #
- # return words
-
- # MAIN #####################################################################
-
- # initialize speech client
- ifkeypath:
- credentials=service_account.Credentials.from_service_account_file(keypath)
- scoped_credentials=credentials.with_scopes(['https://www.googleapis.com/auth/cloud-platform'])
- client=speech.SpeechClient(credentials=scoped_credentials)
- else:
- client=speech.SpeechClient()
+ # Load model
+ print(f"Loading Whisper model: {model_size}...")
+ model=whisper.load_model(model_size)
+ print("Model loaded.")# make a list of filesfiles=[]
- ifpath.endswith(".wav"):
+ ifpath.endswith(".wav")orpath.endswith(".mp3")orpath.endswith(".m4a")orpath.endswith(".flac"):files=[path]
- else:
+ elifos.path.isdir(path):listdirectory=os.listdir(path)forfilenameinlistdirectory:
- iffilename.endswith(".wav"):
- files.append(path+filename)
-
- # initialize list of words
- words=[]
- raw=[]
+ iffilename.lower().endswith((".wav",".mp3",".m4a",".flac")):
+ files.append(os.path.join(path,filename))
+ else:
+ raiseValueError("Path must be an audio file or directory of audio files.")
+ # initialize results
+ results=[]
+
# loop over filesfori,finenumerate(files):
-
- # print progress
- print('Decoding file '+str(i+1)+' of '+str(len(files)))
-
+ print('Decoding file '+str(i+1)+' of '+str(len(files))+f": {f}")
+ start=time.time()
+
try:
-
- # start timer
- start=time.time()
-
- # decode file
- results=decode_file(f,client,speech_context,sample_rate,
- max_alternatives,enable_word_time_offsets)
-
- # parsing response
- parsed_results=parse_response(results)
-
- # save the processed file
- words.append(parsed_results)
-
- # save the processed file
- raw.append(results)
-
+ # Decode
+ # Whisper expects path or array. Path is fine.
+ # word_timestamps=True needed for offsets
+ result=model.transcribe(f,word_timestamps=True,**kwargs)
+
+ ifreturn_raw:
+ parsed=result
+ else:
+ # Parse into (WORD, START, END) format matching Quail legacy
+ parsed=[]
+ forsegmentinresult['segments']:
+ # Ensure words are available
+ if'words'insegment:
+ forwinsegment['words']:
+ parsed.append((w['word'].strip().upper(),w['start'],w['end']))
+ else:
+ # Fallback if no word level timestamps (shouldn't happen with word_timestamps=True)
+ # Just split text? Timestamps will be approximate segment level
+ text=segment['text'].strip().upper()
+ fort_wordintext.split():
+ parsed.append((t_word,segment['start'],segment['end']))
+
+ # Saveifsave:
- # save the raw response in a pickle
- pickle.dump(results,open(f+".p","wb"))
-
- # save a text file with just the words
- pd.DataFrame(parsed_results).to_csv(f+'.txt',header=False,
- index=False)
-
- # print when finished
- print('Finished file '+str(i+1)+' of '+str(len(files))+' in '+
- str(round(time.time()-start,2))+' seconds.')
-
- # handle when something goes wrong
- exceptValueErrorase:
+ # save raw pickle
+ withopen(f+".p","wb")aspfile:
+ pickle.dump(result,pfile)
+
+ # save text
+ ifnotreturn_raw:
+ pd.DataFrame(parsed).to_csv(f+'.txt',header=False,index=False)
+ else:
+ withopen(f+'.txt','w')astfile:
+ tfile.write(result['text'])
+
+ results.append(parsed)
+
+ print('Finished in '+str(round(time.time()-start,2))+' seconds.')
- words.append("Error")
- print(e)
- print('Decoding of file '+str(i)+'failed. Moving on to next file.')
+ exceptExceptionase:
+ print(f"Error decoding {f}: {e}")
+ results.append("Error")
- ifreturn_raw:
- iflen(words)>1:
- returnraw
- else:
- returnraw[0]
+ iflen(results)==1:
+ returnresults[0]else:
- iflen(words)>1:
- returnwords
- else:
- returnwords[0]
[docs]classEgg(object):
+""" Data object for the quail package An Egg data object contains the data you need to analyze free recall experiments.
@@ -152,7 +150,7 @@
Source code for quail.egg
presented together in one block (or list). Each item within the list can be a string representing the stimulus or a dictionary representing the stimuli and its features. If dictionaries are passed, identify the stimulus
- name using the 'stimulus' key and a string label. To represent additional
+ name using the 'item' key and a string label. To represent additional stimulus features, use any text (str) label as the key and a value of the following types: string, int, float, list, array.
@@ -163,7 +161,7 @@
Source code for quail.egg
presented together in one block (or list). Each item within the list can be a string representing the stimulus or a dictionary representing the stimuli and its features. If dictionaries are passed, identify the stimulus
- name using the 'stimulus' key and a string label. To represent additional
+ name using the 'item' key and a string label. To represent additional stimulus features, use any text (str) label as the key and a value of the following types: string, int, float, list, array.
@@ -246,7 +244,7 @@
rec=fill_missing(rec)# if pres is strings, reformat
- iftype(pres[0][0][0])isnotdict:
- pres=[[[{'item':x}forxiny]foryinz]forzinpres]
+ iflen(pres)>0andlen(pres[0])>0andlen(pres[0][0])>0andtype(pres[0][0][0])isnotdict:
+ pres=[[[{'item':x}forxiny]foryinz]forzinpres]
- # if pres is strings, reformat
- iftype(rec[0][0][0])isnotdict:
+ # if rec is strings, reformat
+ # Check for empty lists
+ iflen(rec)>0andlen(rec[0])>0andlen(rec[0][0])>0andtype(rec[0][0][0])isnotdict:rec=[[[{'item':x}forxiny]foryinz]forzinrec]
+ elifrecandrec[0]andrec[0][0]and(len(rec[0][0])==0):
+ # Empty recall list. Need to ensure it's structured correctly?
+ # If input was [[]], it's list(list(list)).
+ # But "fill_missing" might have padded it?
+ pass# if item is missing from pres, add it
- if'item'notinpres[0][0][0]:
+ iflen(pres)>0andlen(pres[0])>0andlen(pres[0][0])>0and'item'notinpres[0][0][0]:[[[x.update({'item':i})fori,xinenumerate(y)]foryinz]forzinpres]
- if'temporal'notinpres[0][0][0]:
+ iflen(pres)>0andlen(pres[0])>0andlen(pres[0][0])>0and'temporal'notinpres[0][0][0]:[[[x.update({'temporal':i})fori,xinenumerate(y)]foryinz]forzinpres]# if item is missing from rec, add it
- if'item'notinrec[0][0][0]:
+ iflen(rec)>0andlen(rec[0])>0andlen(rec[0][0])>0and'item'notinrec[0][0][0]:[[[x.update({'item':i})fori,xinenumerate(y)]foryinz]forzinrec]
- if'temporal'notinrec[0][0][0]:
+ iflen(rec)>0andlen(rec[0])>0andlen(rec[0][0])>0and'temporal'notinrec[0][0][0]:[[[x.update({'temporal':i})fori,xinenumerate(y)]foryinz]forzinrec]# attach features and dist funcs if they are passed
@@ -303,9 +307,9 @@
Source code for quail.egg
self.dist_funcs=default_dist_funcs(dist_funcs,pres[0][0][0])# attach the rest of the variables
- self.pres=list2pd(pres).applymap(lambdax:{'item':np.nan}ifpd.isnull(x)elsex)
+ self.pres=list2pd(pres).map(lambdax:{'item':np.nan}ifpd.isnull(x)elsex)self.feature_names=list(self.get_pres_features()[0][0][0])
- self.rec=list2pd(rec).applymap(lambdax:{'item':np.nan}ifpd.isnull(x)elsex)
+ self.rec=list2pd(rec).map(lambdax:{'item':np.nan}ifpd.isnull(x)elsex)self.subjgroup=subjgroupself.subjname=subjnameself.listgroup=listgroup
@@ -322,58 +326,57 @@
[docs]defget_pres_items(self):
- """
+ defget_pres_items(self):
+""" Returns a df of presented items """
- returnself.pres.applymap(lambdax:x['item'])
+ returnself.pres.map(lambdax:x['item'])
-
[docs]defget_pres_features(self,features=None):
- """
+ defget_pres_features(self,features=None):
+""" Returns a df of features for presented items """iffeaturesisNone:features=self.dist_funcs.keys()elifnotisinstance(features,list):features=[features]
- returnself.pres.applymap(lambdax:{k:vfork,vinx.items()ifkinfeatures}ifxisnotNoneelseNone)
[docs]defget_rec_features(self,features=None):
- """
+ defget_rec_features(self,features=None):
+""" Returns a df of features for recalled items """iffeaturesisNone:features=self.dist_funcs.keys()elifnotisinstance(features,list):features=[features]
- returnself.rec.applymap(lambdax:{k:vfork,vinx.items()ifk!='item'}ifxisnotNoneelseNone)
[docs]definfo(self):
- """
+ definfo(self):
+""" Print info about the data egg """print('Number of subjects: '+str(self.n_subjects))print('Number of lists per subject: '+str(self.n_lists))print('Number of words per list: '+str(self.list_length))print('Date created: '+str(self.date_created))
- print('Meta data: '+str(self.meta))
+ print('Meta data: '+str(self.meta))
-
[docs]defsave(self,fname,compression='blosc'):
- """
+ defsave(self,fname,compression='zlib'):
+""" Save method for the Egg object The data will be saved as a 'egg' file, which is a dictionary containing
- the elements of a Egg saved in the hd5 format using
- `deepdish`.
+ the elements of a Egg saved using `joblib`. Parameters ----------
@@ -383,8 +386,7 @@
Source code for quail.egg
it will be appended. compression : str
- The kind of compression to use. See the deepdish documentation for
- options: http://deepdish.readthedocs.io/en/latest/api_io.html#deepdish.io.save
+ options: https://joblib.readthedocs.io/en/latest/generated/joblib.dump.html """
@@ -408,10 +410,10 @@
[docs]defcrack(self,subjects=None,lists=None):
- """
+ defcrack(self,subjects=None,lists=None):
+""" Wraps crack_egg function to take an egg and returns a subset of the subjects Parameters
@@ -427,77 +429,100 @@
Source code for quail.egg
new_egg : Egg data object A mega egg comprised of the input eggs stacked together """
- returncrack_egg(self,subjects,lists)
[docs]classFriedEgg(object):
+""" Object containing results of a quail analyses Parameters ----------
+ data : Pandas.DataFrame
+ Dataframes containing result of an analysis
+
+ analysis : str
+ The type of analysis (e.g. lag-crp)
+
+ list_length : int
+ Length of the lists
+
+ n_lists : int
+ Number of lists
+
+ n_subjects : int
+ Number of subjects
+
+ position : int
+ Position argument (for pnr/pfr)
+
+ date_created : str
+ Date the egg was created
+
+ meta : dict
+ Meta data Attributes ----------
- data : List of Pandas.DataFrame
- List of Dataframes containing result of an analysis
+ data : Pandas.DataFrame
+ Dataframes containing result of an analysis
- type : str
+ analysis : str The type of analysis (e.g. lag-crp) """
-
- defplot(self,**kwargs):
+ defplot(self,**kwargs):returnplot(self,**kwargs)
- defget_data(self):
- """
+ defget_data(self):
+""" Return a copy of the data """returnself.data.copy()
- defsave(self,fname,compression='blosc'):
- """
+ defsave(self,fname,compression='zlib'):
+""" Save method for the FriedEgg object The data will be saved as a 'fegg' file, which is a dictionary containing
- the elements of a FriedEgg saved in the hd5 format using
- `deepdish`.
+ the elements of a FriedEgg saved using `joblib`. Parameters ----------
@@ -507,8 +532,7 @@
Source code for quail.egg
it will be appended. compression : str
- The kind of compression to use. See the deepdish documentation for
- options: http://deepdish.readthedocs.io/en/latest/api_io.html#deepdish.io.save
+ options: https://joblib.readthedocs.io/en/latest/generated/joblib.dump.html """
@@ -520,15 +544,16 @@
subjindex=[idxforidx,subjinenumerate(all_data)]ifnotsubjindexelsesubjindex
- defmake_multi_index(listindex,sub_num):
+ defmake_multi_index(listindex,sub_num):returnpd.MultiIndex.from_tuples([(sub_num,lst)forlstinlistindex],names=['Subject','List'])
+ iflen(all_data)==0or(len(all_data)>0andlen(all_data[0])==0):
+ # Even if empty data, we might want an index if passed?
+ # But DataFrame without columns/data doesn't utilize index well.
+ # If rec is empty but pres has shape, Egg expects rec to match pres index shape.
+ returnpd.DataFrame()
+
listindex=list(listindex)subjindex=list(subjindex)
- subs_list_of_dfs=[pd.DataFrame(sub_data,index=make_multi_index(listindex[sub_num],subjindex[sub_num]))forsub_num,sub_datainenumerate(all_data)]
+ subs_list_of_dfs=[pd.DataFrame(sub_data,index=make_multi_index(listindex[sub_num],subjindex[sub_num]))forsub_num,sub_datainenumerate(all_data)ifsub_data]
+
+ ifnotsubs_list_of_dfs:
+ returnpd.DataFrame()returnpd.concat(subs_list_of_dfs)
-defformat2tidy(df,subjname,listname,subjgroup,analysis=None,position=0):
+defformat2tidy(df,subjname,listname,subjgroup,analysis=None,position=0):melted_df=pd.melt(df.T)melted_df[subjname]=""foridx,subinenumerate(melted_df['Subject'].unique()):
@@ -182,8 +189,8 @@
[docs]defrecmat2egg(recmat,list_length=None):
+""" Creates egg data object from zero-indexed recall matrix Parameters
@@ -201,15 +208,15 @@
Source code for quail.helpers
egg : Egg data object egg data object computed from the recall matrix """
- from.eggimportEggasEgg
+ from.eggimportEggasEggpres=[[[str(word)forwordinlist(range(0,list_length))]forreclistinrecsub]forrecsubinrecmat]rec=[[[str(word)forwordinreclistifwordisnotNone]forreclistinrecsub]forrecsubinrecmat]returnEgg(pres=pres,rec=rec)
-defdefault_dist_funcs(dist_funcs,feature_example):
- """
+defdefault_dist_funcs(dist_funcs,feature_example):
+""" Fills in default distance metrics for fingerprint analyses """
@@ -228,8 +235,8 @@
[docs]defstack_eggs(eggs,meta='concatenate'):
+''' Takes a list of eggs, stacks them and reindexes the subject number Parameters
@@ -248,12 +255,12 @@
Source code for quail.helpers
A mega egg comprised of the input eggs stacked together '''
- from.eggimportEgg
+ from.eggimportEggpres=[egg.pres.loc[sub,:].values.tolist()foreggineggsforsubinegg.pres.index.levels[0].values.tolist()]rec=[egg.rec.loc[sub,:].values.tolist()foreggineggsforsubinegg.rec.index.levels[0].values.tolist()]
- ifmetais'concatenate':
+ ifmeta=='concatenate':new_meta={}foreggineggs:forkeyinegg.meta:
@@ -263,13 +270,13 @@
[docs]defcrack_egg(egg,subjects=None,lists=None):
+''' Takes an egg and returns a subset of the subjects or lists Parameters
@@ -289,7 +296,7 @@
Source code for quail.helpers
A sliced egg, good on a salad '''
- from.eggimportEgg
+ from.eggimportEggifhasattr(egg,'features'):all_have_features=egg.featuresisnotNone
@@ -320,8 +327,8 @@
Source code for quail.helpers
returnEgg(pres=pres,rec=rec,**opts)
-defdf2list(df):
- """
+defdf2list(df):
+""" Convert a MultiIndex df to list Parameters
@@ -345,8 +352,8 @@
Source code for quail.helpers
lst=[df.loc[sub,:].values.tolist()forsubinsubjects]returnlst
-deffill_missing(x):
- """
+deffill_missing(x):
+""" Fills in missing lists (assumes end lists are missing) """
@@ -365,16 +372,16 @@
Source code for quail.helpers
subs.append(new_sub)returnsubs
-defparse_egg(egg):
- """Parses an egg and returns fields"""
+defparse_egg(egg):
+"""Parses an egg and returns fields"""pres_list=egg.get_pres_items().values[0]rec_list=egg.get_rec_items().values[0]feature_list=egg.get_pres_features().values[0]dist_funcs=egg.dist_funcsreturnpres_list,rec_list,feature_list,dist_funcs
-defmerge_pres_feats(pres,features):
- """
+defmerge_pres_feats(pres,features):
+""" Helper function to merge pres and features to support legacy features argument """
@@ -390,15 +397,15 @@
Source code for quail.helpers
sub.append(exp)returnsub
-defcheck_nan(x):
+defcheck_nan(x):y=pd.isnull(x)iftype(y)isbool:returnyelse:returnFalse
-defr2z(r):
- """
+defr2z(r):
+""" Function that calculates the Fisher z-transformation Parameters
@@ -416,8 +423,8 @@
Source code for quail.helpers
withnp.errstate(invalid='ignore',divide='ignore'):return0.5*(np.log(1+r)-np.log(1-r))
-defz2r(z):
- """
+defz2r(z):
+""" Function that calculates the inverse Fisher z-transformation Parameters
@@ -435,7 +442,7 @@
"""try:
- egg=FriedEgg(**dd.io.load(filepath))
+ egg=FriedEgg(**joblib.load(filepath))exceptValueErrorase:print(e)# if error, try loading old format
@@ -195,8 +195,8 @@
"""try:
- egg=Egg(**dd.io.load(filepath))
+ egg=Egg(**joblib.load(filepath))except:# if error, try loading old formatwithopen(filepath,'rb')asf:
@@ -231,9 +231,9 @@
Source code for quail.load
else:returnegg
-defloadEL(dbpath=None,recpath=None,remove_subs=None,wordpool=None,groupby=None,experiments=None,
- filters=None):
- '''
+defloadEL(dbpath=None,recpath=None,remove_subs=None,wordpool=None,groupby=None,experiments=None,
+ filters=None):# pragma: no cover
+''' Function that loads sql files generated by autoFR Experiment '''
@@ -245,8 +245,8 @@
Source code for quail.load
############################################################################# subfunctions #############################################################
- defdb2df(db,filter_func=None):
- '''
+ defdb2df(db,filter_func=None):
+''' Loads db file and converts to dataframe '''db_url="sqlite:///"+db
@@ -286,7 +286,7 @@
Source code for quail.load
# flatten nested list so we just have a list of the trialdata recorded# each time psiturk.recordTrialData(trialdata) was called.
- defisNotNumber(s):
+ defisNotNumber(s):try:float(s)returnFalse
@@ -331,7 +331,7 @@
Source code for quail.load
returndata_frame# custom filter to clean db file
- defexperimenter_filter(data_frame):
+ defexperimenter_filter(data_frame):data=[]indexes=[]forlineindata_frame.iterrows():
@@ -349,7 +349,7 @@
# this function takes the data frame and returns subject specific data based on the subid variable
- deffilterData(data_frame,subid):
+ deffilterData(data_frame,subid):filtered_stim_data=data_frame[data_frame['stimulus'].notnull()&data_frame['listNumber'].notnull()]filtered_stim_data=filtered_stim_data[filtered_stim_data['trial_type']=='single-stim']filtered_stim_data=filtered_stim_data[filtered_stim_data['uniqueid']==subid]returnfiltered_stim_data
- defcreateStimDict(data):
+ defcreateStimDict(data):stimDict=[]forindex,rowindata.iterrows():try:
@@ -429,7 +429,7 @@
Source code for quail.load
returnstimDict# this function loads in the recall data into an array of arrays, where each array represents a list of words
- defloadRecallData(subid):
+ defloadRecallData(subid):recalledWords=[]foriinrange(0,16):try:
@@ -461,7 +461,7 @@
Source code for quail.load
returnrecalledWords# this function computes accuracy for a series of lists
- defcomputeListAcc(stimDict,recalledWords):
+ defcomputeListAcc(stimDict,recalledWords):accVec=[]foriinrange(0,16):stim=[stim['text']forstiminstimDictifstim['listnum']==i]
@@ -476,7 +476,7 @@
[docs]defload_example_data(dataset='automatic'):
+""" Loads example data
- The example data is an egg containing 30 subjects who completed a free
+ The automatic and manual example data are eggs containing 30 subjects who completed a free recall experiment as described here: https://psyarxiv.com/psh48/. The subjects studied 8 lists of 16 words each and then performed a free recall test.
+ The naturalistic example data is is an egg containing 17 subjects who viewed and verbally
+ recounted an episode of the BBC series Sherlock, as described here:
+ https://www.nature.com/articles/nn.4450. We fit a topic model to hand-annotated
+ text-descriptions of scenes from the video and used the model to transform both the
+ scene descriptions and manual transcriptions of each subject's verbal recall. We then
+ used a Hidden Markov Model to segment the video model and the recall models, by subject,
+ into k events.
+
Parameters ---------- dataset : str
- The dataset to load. Can be 'automatic' or 'manual'. The free recall
+ The dataset to load. Can be 'automatic', 'manual', or 'naturalistic'. The free recall audio recordings for the 'automatic' dataset was transcribed by Google
- Cloud Speech and the 'manual' dataset was transcribed by humans.
+ Cloud Speech and the 'manual' dataset was transcribed by humans. The 'naturalistic'
+ dataset was transcribed by humans and transformed as described above. Returns ----------
@@ -646,18 +655,23 @@
Source code for quail.load
"""# can only be auto or manual
- assertdatasetin['automatic','manual'],"Dataset can only be automatic or manual"
+ assertdatasetin['automatic','manual','naturalistic'],"Dataset can only be automatic, manual, or naturalistic"
+
+ ifdataset=='naturalistic':
+ # open naturalistic egg
+ egg=Egg(**joblib.load(os.path.dirname(os.path.abspath(__file__))+'/data/'+dataset+'.egg'))
+ else:# open pickled egg
- try:
- withopen(os.path.dirname(os.path.abspath(__file__))+'/data/'+dataset+'.egg','rb')ashandle:
- egg=pickle.load(handle)
- except:
- f=dd.io.load(os.path.dirname(os.path.abspath(__file__))+'/data/'+dataset+'.egg')
- egg=Egg(pres=f['pres'],rec=f['rec'],dist_funcs=f['dist_funcs'],
- subjgroup=f['subjgroup'],subjname=f['subjname'],
- listgroup=f['listgroup'],listname=f['listname'],
- date_created=f['date_created'])
+ try:
+ withopen(os.path.dirname(os.path.abspath(__file__))+'/data/'+dataset+'.egg','rb')ashandle:
+ egg=pickle.load(handle)
+ except:
+ f=joblib.load(os.path.dirname(os.path.abspath(__file__))+'/data/'+dataset+'.egg')
+ egg=Egg(pres=f['pres'],rec=f['rec'],dist_funcs=f['dist_funcs'],
+ subjgroup=f['subjgroup'],subjname=f['subjname'],
+ listgroup=f['listgroup'],listname=f['listname'],
+ date_created=f['date_created'])returnegg.crack()
[docs]defplot(results,subjgroup=None,subjname='Subject Group',listgroup=None,listname='List',subjconds=None,listconds=None,plot_type=None,
- plot_style=None,title=None,legend=True,xlim=None,ylim=None,
+ plot_style=None,title=None,legend=None,xlim=None,ylim=None,save_path=None,show=True,ax=None,**kwargs):
- """
+""" General plot function that groups data by subject/list number and performs analysis. Parameters
@@ -196,134 +194,147 @@
results.data=results.data.loc[idx[subjconds,:],:]# filter subjgroup
- subjgroup=filter(lambdax:xinsubjconds,subjgroup)
+ subjgroup=list(filter(lambdax:xinsubjconds,subjgroup))iflistconds:# make sure its a list
@@ -357,10 +368,25 @@
Source code for quail.plot
# convert to tiny and format for plottingtidy_data=format2tidy(results.data,subjname,listname,subjgroup,analysis=results.analysis,position=results.position)
+ # Auto-suppress legend if only one group
+ # Auto-suppress legend if only one group and user didn't specify
+ iflegendisNone:
+ legend=True
+ try:
+ ifplot_type=='list':
+ # Check unique listnames
+ iflen(tidy_data[listname].unique())<=1:
+ legend=False
+ elifplot_type=='subject':
+ iflen(tidy_data[subjname].unique())<=1:
+ legend=False
+ except:
+ pass
+
ifnotax==None:kwargs['ax']=ax
- #plot!
+ # plot!ifresults.analysis=='accuracy':ax=plot_acc(tidy_data,plot_style,plot_type,listname,subjname,**kwargs)elifresults.analysis=='temporal':
@@ -383,10 +409,8 @@