Skip to content

Commit

Permalink
first commit with npy files using lfs
Browse files Browse the repository at this point in the history
  • Loading branch information
Basile CONFAVREUX committed Jan 11, 2024
0 parents commit c46a008
Show file tree
Hide file tree
Showing 58 changed files with 11,443 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
*.psd filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
141 changes: 141 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
# log folders
sbi-logs/

# Files
*.npz

# Images
*.png
*.svg

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

notebooks/tmp/
170 changes: 170 additions & 0 deletions Analysis/Example.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "1f52d2e4",
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2 "
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "5ab04fd6",
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import random\n",
"import os\n",
"from matplotlib import rcParams\n",
"import h5py\n",
"import hashlib\n",
"import time\n",
"from typing import List\n",
"import aux_analysis as aux\n",
"from sklearn import manifold\n",
"from sklearn import decomposition\n",
"from synapsbi.utils import apply_n_conditions, load_and_merge\n",
"from sbi.analysis import pairplot"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "f6ae1552",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"retrieved 354300/354300 simulations\n",
"retrieved 10/10 simulations\n"
]
}
],
"source": [
"save_dir = \"../data_synapsesbi/bg_IF_EEEIIEII_6pPol/\"\n",
"\n",
"# load all the rules simulated in the paper (pi0 -> pi3)\n",
"dataset_all = load_and_merge(save_dir,\n",
" (\"bg_IF_EEEIIEII_6pPol_all.npy\",))\n",
"\n",
"#load the new rules simulated in the tutorial\n",
"dataset_tuto = load_and_merge(save_dir,\n",
" (\"pi3_r5to10Hz_metrics.npy\",))"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "076e9773",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"In the original dataset:\n",
"60004 simulations out of 354300 are plausible, aka 16.935930002822467 %\n",
"52474 simulations out of 354300 have rates between 5 and 10Hz, aka 14.810612475303413 %\n",
"6959 simulations out of 354300 are plausible and have rates between 5 and 10Hz, aka 1.9641546711826137 %\n",
"\n",
"For the newly sampled rules:\n",
"4 simulations out of 10 are plausible, aka 40.0 %\n",
"7 simulations out of 10 have rates between 5 and 10Hz, aka 70.0 %\n",
"3 simulations out of 10 are plausible and have rates between 5 and 10Hz, aka 30.0 %\n"
]
}
],
"source": [
"# Set of conditions for plausibility from the paper, if you are using MLP rules, remove the weif and wiif conditions\n",
"cond_r = (\"rate\", 1, 50)\n",
"cond_cv = (\"cv_isi\", 0.7, 1000)\n",
"cond_sf = (\"spatial_Fano\", 0.5, 2.5)\n",
"cond_tf = (\"temporal_Fano\", 0.5, 2.5)\n",
"cond_ac = (\"auto_cov\", 0, 0.1)\n",
"cond_fft = (\"fft\", 0, 1)\n",
"cond_wb = (\"w_blow\", 0, 0.1)\n",
"cond_srt = (\"std_rate_temporal\", 0, 0.5)\n",
"cond_srs = (\"std_rate_spatial\", 0, 5)\n",
"cond_scv = (\"std_cv\", 0, 0.2)\n",
"cond_wc = (\"w_creep\", 0, 0.05)\n",
"cond_ri = (\"rate_i\", 1, 50)\n",
"cond_weef =(\"weef\", 0 ,0.5)\n",
"cond_weif =(\"weif\", 0 ,0.5)\n",
"cond_wief =(\"wief\", 0 ,5)\n",
"cond_wiif =(\"wiif\", 0 ,5)\n",
"\n",
"cond_plausible = (cond_r,cond_ri,\n",
" cond_wb,cond_wc,cond_weef,cond_weif, cond_wief, cond_wiif,\n",
" cond_ac,cond_cv,cond_fft,cond_srt,cond_srs,cond_sf,cond_tf)\n",
"\n",
"cond_r5to10 = ((\"rate\", 5, 10),)\n",
"\n",
"cond_plpausible_r5to10 = ((\"rate\", 5, 10),cond_ri,\n",
" cond_wb,cond_wc,cond_weef,cond_weif, cond_wief, cond_wiif,\n",
" cond_ac,cond_cv,cond_fft,cond_srt,cond_srs,cond_sf,cond_tf)\n",
"\n",
"cond_plausible_all = apply_n_conditions(dataset_all, cond_plausible)\n",
"cond_r5to10_all = apply_n_conditions(dataset_all, cond_r5to10)\n",
"cond_plpausible_r5to10_all = apply_n_conditions(dataset_all, cond_plpausible_r5to10)\n",
"\n",
"cond_plausible_tuto = apply_n_conditions(dataset_tuto, cond_plausible)\n",
"cond_r5to10_tuto = apply_n_conditions(dataset_tuto, cond_r5to10)\n",
"cond_plpausible_r5to10_tuto = apply_n_conditions(dataset_tuto, cond_plpausible_r5to10)\n",
"\n",
"print(\"In the original dataset:\")\n",
"print(np.sum(cond_plausible_all), \"simulations out of\", len(dataset_all), \"are plausible, aka\", np.sum(cond_plausible_all)/len(dataset_all)*100,\"%\")\n",
"print(np.sum(cond_r5to10_all), \"simulations out of\", len(dataset_all), \"have rates between 5 and 10Hz, aka\", np.sum(cond_r5to10_all)/len(dataset_all)*100,\"%\")\n",
"print(np.sum(cond_plpausible_r5to10_all), \"simulations out of\", len(dataset_all), \"are plausible and have rates between 5 and 10Hz, aka\", np.sum(cond_plpausible_r5to10_all)/len(dataset_all)*100,\"%\")\n",
"\n",
"print(\"\\nFor the newly sampled rules:\")\n",
"print(np.sum(cond_plausible_tuto), \"simulations out of\", len(dataset_tuto), \"are plausible, aka\", np.sum(cond_plausible_tuto)/len(dataset_tuto)*100,\"%\")\n",
"print(np.sum(cond_r5to10_tuto), \"simulations out of\", len(dataset_tuto), \"have rates between 5 and 10Hz, aka\", np.sum(cond_r5to10_tuto)/len(dataset_tuto)*100,\"%\")\n",
"print(np.sum(cond_plpausible_r5to10_tuto), \"simulations out of\", len(dataset_tuto), \"are plausible and have rates between 5 and 10Hz, aka\", np.sum(cond_plpausible_r5to10_tuto)/len(dataset_tuto)*100,\"%\")"
]
},
{
"cell_type": "markdown",
"id": "70df64dc",
"metadata": {},
"source": [
"Congratulations, you are done with the tutorial, thank you for going through it :)\n",
"\n",
"Obviously to get more reliable percentages for the \"new\" posterior, we would need much more than 10 samples, but this require many more spiking network simulations.\n",
"\n",
"You can check the analysis carried out in the paper, as well as the figures in the other notebooks of this folder.\n",
"\n",
"Don't hesitate to contact us if you have any further questions."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Loading

0 comments on commit c46a008

Please sign in to comment.