diff --git a/.circleci/config.yml b/.circleci/config.yml
new file mode 100644
index 0000000000..6984730035
--- /dev/null
+++ b/.circleci/config.yml
@@ -0,0 +1,63 @@
+version: 2
+
+jobs:
+ build:
+ docker:
+ - image: circleci/python:3.6
+ environment:
+ DISTRIB: "conda"
+ PYTHON_VERSION: "3.6"
+ NUMPY_VERSION: "*"
+ SCIPY_VERSION: "*"
+ SCIKIT_LEARN_VERSION: "*"
+ MATPLOTLIB_VERSION: "*"
+
+ steps:
+ - checkout
+ # Get rid of existing virtualenvs on circle ci as they conflict with conda.
+ # Trick found here:
+ # https://discuss.circleci.com/t/disable-autodetection-of-project-or-application-of-python-venv/235/10
+ - run: cd && rm -rf ~/.pyenv && rm -rf ~/virtualenvs
+ # We need to remove conflicting texlive packages.
+ - run: sudo -E apt-get -yq remove texlive-binaries --purge
+ # Installing required packages for `make -C doc check command` to work.
+ - run: sudo -E apt-get -yq update
+ - run: sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install dvipng texlive-latex-base texlive-latex-extra
+ - restore_cache:
+ key: v1-packages+datasets-{{ .Branch }}
+ - run: wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh
+ - run: chmod +x ~/miniconda.sh && ~/miniconda.sh -b
+ - run:
+ name: Setup conda path in env variables
+ command: |
+ echo 'export PATH="$HOME/miniconda3/bin:$PATH"' >> $BASH_ENV
+ - run:
+ name: Create conda env
+ command: |
+ conda create -n testenv python=3.6 numpy scipy scikit-learn matplotlib pandas \
+ lxml mkl sphinx pillow pandas -yq
+ conda install -n testenv nibabel -c conda-forge -yq
+ - run:
+ name: Running CircleCI test (make html)
+ command: |
+ source activate testenv
+ pip install -e .
+ set -o pipefail && cd doc && make html-strict 2>&1 | tee ~/log.txt
+ no_output_timeout: 5h
+ - save_cache:
+ key: v1-packages+datasets-{{ .Branch }}
+ paths:
+ - $HOME/nilearn_data
+ - $HOME/miniconda3
+
+ - store_artifacts:
+ path: doc/_build/html
+ - store_artifacts:
+ path: coverage
+ - store_artifacts:
+ path: $HOME/log.txt
+ destination: log.txt
+
+
+
+
diff --git a/.gitignore b/.gitignore
index aa06e3863d..4beefc864a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -33,3 +33,5 @@ tags
*.tgz
.idea/
+
+doc/themes/nilearn/static/jquery.js
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 9efc43c123..7572544ac5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -20,27 +20,33 @@ matrix:
include:
# Oldest supported versions (with neurodebian)
- env: DISTRIB="conda" PYTHON_VERSION="2.7"
- NUMPY_VERSION="1.8.2" SCIPY_VERSION="0.14"
- SCIKIT_LEARN_VERSION="0.15.1" MATPLOTLIB_VERSION="1.3.1"
- PANDAS_VERSION="0.13.0" NIBABEL_VERSION="2.0.2" COVERAGE="true"
+ NUMPY_VERSION="1.11.2" SCIPY_VERSION="0.17"
+ SCIKIT_LEARN_VERSION="0.18" MATPLOTLIB_VERSION="1.5.1"
+ PANDAS_VERSION="0.18.0" NIBABEL_VERSION="2.0.2" COVERAGE="true"
# Oldest supported versions without matplotlib
- env: DISTRIB="conda" PYTHON_VERSION="2.7"
- NUMPY_VERSION="1.8.2" SCIPY_VERSION="0.14"
- SCIKIT_LEARN_VERSION="0.15"
+ NUMPY_VERSION="1.11.2" SCIPY_VERSION="0.17"
+ SCIKIT_LEARN_VERSION="0.18"
# Fake Ubuntu Xenial (Travis doesn't support Xenial yet)
- env: DISTRIB="conda" PYTHON_VERSION="2.7"
- NUMPY_VERSION="1.11" SCIPY_VERSION="0.17"
- SCIKIT_LEARN_VERSION="0.17"
+ NUMPY_VERSION="1.13" SCIPY_VERSION="0.19"
+ SCIKIT_LEARN_VERSION="0.18.1"
NIBABEL_VERSION="2.0.2"
# Python 3.4 with intermediary versions
- env: DISTRIB="conda" PYTHON_VERSION="3.4"
- NUMPY_VERSION="1.8" SCIPY_VERSION="0.14"
- SCIKIT_LEARN_VERSION="0.15" MATPLOTLIB_VERSION="1.4"
+ NUMPY_VERSION="1.11.2" SCIPY_VERSION="0.17"
+ SCIKIT_LEARN_VERSION="0.18" MATPLOTLIB_VERSION="1.5.1"
# Most recent versions
- env: DISTRIB="conda" PYTHON_VERSION="3.5"
NUMPY_VERSION="*" SCIPY_VERSION="*" PANDAS_VERSION="*"
SCIKIT_LEARN_VERSION="*" MATPLOTLIB_VERSION="*" COVERAGE="true"
- # FLAKE8 linting on diff wrt common ancestor with upstream/master
+ LXML_VERSION="*"
+ - env: DISTRIB="conda" PYTHON_VERSION="3.7"
+ NUMPY_VERSION="*" SCIPY_VERSION="*" PANDAS_VERSION="*"
+ SCIKIT_LEARN_VERSION="*" MATPLOTLIB_VERSION="*" COVERAGE="true"
+ LXML_VERSION="*"
+
+ # FLAKE8 linting on diff wrt common ancestor with upstream/master
# Note: the python value is only there to trigger allow_failures
- python: 2.7
env: DISTRIB="conda" PYTHON_VERSION="2.7" FLAKE8_VERSION="*" SKIP_TESTS="true"
diff --git a/AUTHORS.rst b/AUTHORS.rst
index 516858ed0c..c67751492c 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -24,6 +24,8 @@ particular:
* Andres Hoyos Idrobo
* Salma Bougacha
* Mehdi Rahim
+* Sylvain Lanuzel
+* `Kshitij Chawla `_
Many of also contributed outside of Parietal, notably:
@@ -43,6 +45,8 @@ Mehdi Rahim, Philippe Gervais where payed by the `NiConnect
project, funded by the French `Investissement d'Avenir
`_.
+NiLearn is also supported by `DigiCosme `_ |digicomse logo|
+
.. _citing:
Citing nilearn
@@ -69,3 +73,7 @@ guarantee the future of the toolkit, if you use it, please cite it.
See the scikit-learn documentation on `how to cite
`_.
+
+.. |digicomse logo| image:: logos/digi-saclay-logo-small.png
+ :height: 25
+ :alt: DigiComse Logo
\ No newline at end of file
diff --git a/README.rst b/README.rst
index 382a062817..7885ed878b 100644
--- a/README.rst
+++ b/README.rst
@@ -40,13 +40,13 @@ The required dependencies to use the software are:
* Python >= 2.7,
* setuptools
-* Numpy >= 1.6.1
-* SciPy >= 0.14
-* Scikit-learn >= 0.15
+* Numpy >= 1.11
+* SciPy >= 0.17
+* Scikit-learn >= 0.18
* Nibabel >= 2.0.2
If you are using nilearn plotting functionalities or running the
-examples, matplotlib >= 1.1.1 is required.
+examples, matplotlib >= 1.5.1 is required.
If you want to run the tests, you need nose >= 1.2.1 and coverage >= 3.6.
diff --git a/circle.yml b/circle.yml
deleted file mode 100644
index e9ab38b13f..0000000000
--- a/circle.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-machine:
- environment:
- PATH: /home/ubuntu/miniconda2/bin:$PATH
-
-dependencies:
- cache_directories:
- - "~/nilearn_data"
-
- pre:
- # Get rid of existing virtualenvs on circle ci as they conflict with conda.
- # Trick found here:
- # https://discuss.circleci.com/t/disable-autodetection-of-project-or-application-of-python-venv/235/10
- - cd && rm -rf ~/.pyenv && rm -rf ~/virtualenvs
- # We need to remove conflicting texlive packages.
- - sudo -E apt-get -yq remove texlive-binaries --purge
- # Installing required packages for `make -C doc check command` to work.
- - sudo -E apt-get -yq update
- - sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install dvipng texlive-latex-base texlive-latex-extra
-
- override:
- # Moving to nilearn directory before performing the installation.
- - cd ~/nilearn
- - source continuous_integration/install.sh:
- environment:
- DISTRIB: "conda"
- PYTHON_VERSION: "3.5"
- NUMPY_VERSION: "*"
- SCIPY_VERSION: "*"
- SCIKIT_LEARN_VERSION: "*"
- MATPLOTLIB_VERSION: "*"
- - conda install sphinx coverage pillow pandas -y -n testenv
-
- # Generating html documentation (with warnings as errors)
- # we need to do this here so the datasets will be cached
- - source continuous_integration/circle_ci_test_doc.sh:
- timeout: 2500 # seconds
-
-test:
- override:
- # override is needed otherwise nosetests is run by default
- - echo "Documentation has been built in the 'dependencies' step. No additional test to run"
-
-general:
- artifacts:
- - "doc/_build/html"
- - "coverage"
- - "~/log.txt"
diff --git a/continuous_integration/circle_ci_test_doc.sh b/continuous_integration/circle_ci_test_doc.sh
deleted file mode 100644
index 3d74fc78c3..0000000000
--- a/continuous_integration/circle_ci_test_doc.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!bin/bash
-
-# on circle ci, each command run with it's own execution context so we have to
-# activate the conda testenv on a per command basis. That's why we put calls to
-# python (conda) in a dedicated bash script and we activate the conda testenv
-# here.
-source activate testenv
-
-# pipefail is necessary to propagate exit codes
-set -o pipefail && cd doc && make html-strict 2>&1 | tee ~/log.txt
diff --git a/continuous_integration/install.sh b/continuous_integration/install.sh
index 7615f7f87c..512cbdf2f3 100755
--- a/continuous_integration/install.sh
+++ b/continuous_integration/install.sh
@@ -35,7 +35,8 @@ print_conda_requirements() {
# - for scikit-learn, SCIKIT_LEARN_VERSION is used
TO_INSTALL_ALWAYS="pip nose"
REQUIREMENTS="$TO_INSTALL_ALWAYS"
- TO_INSTALL_MAYBE="python numpy scipy matplotlib scikit-learn pandas flake8"
+ TO_INSTALL_MAYBE="python numpy scipy matplotlib scikit-learn pandas \
+flake8 lxml"
for PACKAGE in $TO_INSTALL_MAYBE; do
# Capitalize package name and add _VERSION
PACKAGE_VERSION_VARNAME="${PACKAGE^^}_VERSION"
@@ -61,10 +62,10 @@ create_new_conda_env() {
# Use the miniconda installer for faster download / install of conda
# itself
- wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh \
+ wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
-O ~/miniconda.sh
chmod +x ~/miniconda.sh && ~/miniconda.sh -b
- export PATH=$HOME/miniconda2/bin:$PATH
+ export PATH=$HOME/miniconda3/bin:$PATH
echo $PATH
conda update --quiet --yes conda
diff --git a/doc/conf.py b/doc/conf.py
index 475928cf07..baa278440e 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -14,9 +14,30 @@
import sys
import os
+import shutil
import sphinx
from distutils.version import LooseVersion
+# jquery is included in plotting package data because it is needed for
+# interactive plots. It is also needed by the documentation, so we copy
+# it to the themes/nilearn/static folder.
+shutil.copy(
+ os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ 'nilearn', 'plotting', 'data', 'js', 'jquery.min.js'),
+ os.path.join(os.path.dirname(__file__), 'themes', 'nilearn', 'static',
+ 'jquery.js'))
+
+
+# -- Parallel computing ------------------------------------------------------
+try:
+ from sklearn.utils import parallel_backend, cpu_count
+ parallel_backend(max(cpu_count, 4))
+except:
+ pass
+
+# ----------------------------------------------------------------------------
+
+
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
@@ -294,10 +315,10 @@
'reference_url' : {
'nilearn': None,
'matplotlib': 'http://matplotlib.org',
- 'numpy': 'http://docs.scipy.org/doc/numpy-1.6.0',
- 'scipy': 'http://docs.scipy.org/doc/scipy-0.11.0/reference',
+ 'numpy': 'http://docs.scipy.org/doc/numpy-1.11.0',
+ 'scipy': 'http://docs.scipy.org/doc/scipy-0.17.0/reference',
'nibabel': 'http://nipy.org/nibabel',
- 'sklearn': 'http://scikit-learn.org/0.17/',
+ 'sklearn': 'http://scikit-learn.org/0.18/',
'pandas': 'http://pandas.pydata.org'}
}
@@ -319,6 +340,8 @@ def touch_example_backreferences(app, what, name, obj, options, lines):
# Add the 'copybutton' javascript, to hide/show the prompt in code
# examples
+
+
def setup(app):
app.add_javascript('copybutton.js')
app.connect('autodoc-process-docstring', touch_example_backreferences)
diff --git a/doc/connectivity/functional_connectomes.rst b/doc/connectivity/functional_connectomes.rst
index 4e96db8e0e..e816d1a081 100644
--- a/doc/connectivity/functional_connectomes.rst
+++ b/doc/connectivity/functional_connectomes.rst
@@ -205,29 +205,37 @@ In the case of the MSDL atlas
with MNI coordinates for each region (see for instance example:
:ref:`sphx_glr_auto_examples_03_connectivity_plot_probabilistic_atlas_extraction.py`).
+.. image:: ../auto_examples/03_connectivity/images/sphx_glr_plot_probabilistic_atlas_extraction_002.png
+ :target: ../auto_examples/03_connectivity/plot_probabilistic_atlas_extraction.html
+
..
For doctesting
>>> from nilearn import datasets
>>> atlas_filename = datasets.fetch_atlas_msdl().maps # doctest: +SKIP
-For another atlas this information can be computed for each region with
-the :func:`nilearn.plotting.find_xyz_cut_coords` function
-(see example:
-:ref:`sphx_glr_auto_examples_03_connectivity_plot_multi_subject_connectome.py`)::
+As you can see, the correlation matrix gives a very "full" graph: every
+node is connected to every other one. This is because it also captures
+indirect connections. In the next section we will see how to focus on
+direct connections only.
- >>> from nilearn import image, plotting
- >>> atlas_region_coords = [plotting.find_xyz_cut_coords(img) for img in image.iter_img(atlas_filename)] # doctest: +SKIP
+A functional connectome: extracting coordinates of regions
+==========================================================
+For atlases without readily available label coordinates, center coordinates
+can be computed for each region on hard parcellation or probabilistic atlases.
+ * For hard parcellation atlases (eg. :func:`nilearn.datasets.fetch_atlas_destrieux_2009`),
+ use the :func:`nilearn.plotting.find_parcellation_cut_coords`
+ function. See example:
+ :ref:`sphx_glr_auto_examples_03_connectivity_plot_atlas_comparison.py`
+ * For probabilistic atlases (eg. :func:`nilearn.datasets.fetch_atlas_msdl`), use the
+ :func:`nilearn.plotting.find_probabilistic_atlas_cut_coords` function.
+ See example: :ref:`sphx_glr_auto_examples_03_connectivity_plot_multi_subject_connectome.py`::
-.. image:: ../auto_examples/03_connectivity/images/sphx_glr_plot_probabilistic_atlas_extraction_002.png
- :target: ../auto_examples/03_connectivity/plot_probabilistic_atlas_extraction.html
+ >>> from nilearn import plotting
+ >>> atlas_region_coords = plotting.find_probabilistic_atlas_cut_coords(atlas_filename) # doctest: +SKIP
-As you can see, the correlation matrix gives a very "full" graph: every
-node is connected to every other one. This is because it also captures
-indirect connections. In the next section we will see how to focus on
-only direct connections.
|
diff --git a/doc/decoding/decoding_intro.rst b/doc/decoding/decoding_intro.rst
index bf7a6b4b36..4b92f5ff76 100644
--- a/doc/decoding/decoding_intro.rst
+++ b/doc/decoding/decoding_intro.rst
@@ -224,10 +224,10 @@ in a `K-Fold strategy
>>> cv = 2
There is a specific function,
-:func:`sklearn.cross_validation.cross_val_score` that computes for you
+:func:`sklearn.model_selection.cross_val_score` that computes for you
the score for the different folds of cross-validation::
- >>> from sklearn.cross_validation import cross_val_score # doctest: +SKIP
+ >>> from sklearn.model_selection import cross_val_score # doctest: +SKIP
>>> cv_scores = cross_val_score(svc, fmri_masked, target, cv=5) # doctest: +SKIP
`cv=5` stipulates a 5-fold cross-validation. Note that this function is located
@@ -267,7 +267,7 @@ caveats, and guidelines*, Neuroimage 2017).
Here, in the Haxby example, we are going to leave a session out, in order
to have a test set independent from the train set. For this, we are going
to use the session label, present in the behavioral data file, and
-:class:`sklearn.cross_validation.LeaveOneLabelOut`.
+:class:`sklearn.model_selection.LeaveOneGroupOut`.
.. note::
@@ -320,9 +320,9 @@ at chance, is to use a *"dummy"* classifier,
**Permutation testing**: A more controlled way, but slower, is to do
permutation testing on the labels, with
-:func:`sklearn.cross_validation.permutation_test_score`::
+:func:`sklearn.model_selection.permutation_test_score`::
- >>> from sklearn.cross_validation import permutation_test_score
+ >>> from sklearn.model_selection import permutation_test_score
>>> null_cv_scores = permutation_test_score(svc, fmri_masked, target, cv=cv) # doctest: +SKIP
|
diff --git a/doc/decoding/estimator_choice.rst b/doc/decoding/estimator_choice.rst
index 0b4a33388c..78490a0f6b 100644
--- a/doc/decoding/estimator_choice.rst
+++ b/doc/decoding/estimator_choice.rst
@@ -115,7 +115,7 @@ not perform as well on new data.
:scale: 60
With scikit-learn nested cross-validation is done via
-:class:`sklearn.grid_search.GridSearchCV`. It is unfortunately time
+:class:`sklearn.model_selection.GridSearchCV`. It is unfortunately time
consuming, but the ``n_jobs`` argument can spread the load on multiple
CPUs.
diff --git a/doc/decoding/searchlight.rst b/doc/decoding/searchlight.rst
index dc7e419fb5..a5a8ddba97 100644
--- a/doc/decoding/searchlight.rst
+++ b/doc/decoding/searchlight.rst
@@ -113,7 +113,7 @@ Kriegskorte et al. use a 5.6mm radius because it yielded the best detection
performance in their simulation.
.. literalinclude:: ../../examples/02_decoding/plot_haxby_searchlight.py
- :start-after: import nilearn.decoding
+ :start-after: cv = KFold(n_splits=4)
:end-before: # F-scores computation
Visualization
diff --git a/doc/developers/group_sparse_covariance.rst b/doc/developers/group_sparse_covariance.rst
index da3cf7767e..56e15e1c45 100644
--- a/doc/developers/group_sparse_covariance.rst
+++ b/doc/developers/group_sparse_covariance.rst
@@ -135,7 +135,7 @@ Synthetic dataset
=================
For testing purposes, a function for synthesis of signals based on
sparse precision matrices has been written:
-`nilearn._utils.testing.generate_group_sparse_gaussian_graphs`.
+`nilearn._utils.data_gen.generate_group_sparse_gaussian_graphs`.
Synthesizing such signals is a hard problem that wasn't solved in the
present implementation. It is hopefully good enough.
@@ -166,7 +166,7 @@ precise location of zeros. Two different sparsity patterns with the
same number of zeros can lead to two significantly different sparsity
level in precision matrices. In practice, it means that for a given
value of the `density` parameter in
-`nilearn._utils.testing.generate_group_sparse_gaussian_graphs`,
+`nilearn._utils.data_gen.generate_group_sparse_gaussian_graphs`,
the actual number of zeros in the precision matrices can fluctuate
widely depending on the random number generation.
diff --git a/doc/images/papaya_stat_map_plot_screenshot.png b/doc/images/papaya_stat_map_plot_screenshot.png
new file mode 100644
index 0000000000..7950348745
Binary files /dev/null and b/doc/images/papaya_stat_map_plot_screenshot.png differ
diff --git a/doc/images/papaya_stat_map_plot_screenshot_notebook.png b/doc/images/papaya_stat_map_plot_screenshot_notebook.png
new file mode 100644
index 0000000000..b703dc597b
Binary files /dev/null and b/doc/images/papaya_stat_map_plot_screenshot_notebook.png differ
diff --git a/doc/images/plotly_connectome_plot.png b/doc/images/plotly_connectome_plot.png
new file mode 100644
index 0000000000..e56d9b47eb
Binary files /dev/null and b/doc/images/plotly_connectome_plot.png differ
diff --git a/doc/images/plotly_markers_plot.png b/doc/images/plotly_markers_plot.png
new file mode 100644
index 0000000000..be0e34d3cb
Binary files /dev/null and b/doc/images/plotly_markers_plot.png differ
diff --git a/doc/images/plotly_surface_atlas_plot.png b/doc/images/plotly_surface_atlas_plot.png
new file mode 100644
index 0000000000..44058d0f66
Binary files /dev/null and b/doc/images/plotly_surface_atlas_plot.png differ
diff --git a/doc/images/plotly_surface_plot.png b/doc/images/plotly_surface_plot.png
new file mode 100644
index 0000000000..3a9b357009
Binary files /dev/null and b/doc/images/plotly_surface_plot.png differ
diff --git a/doc/images/plotly_surface_plot_notebook_screenshot.png b/doc/images/plotly_surface_plot_notebook_screenshot.png
new file mode 100644
index 0000000000..38f72c8c3c
Binary files /dev/null and b/doc/images/plotly_surface_plot_notebook_screenshot.png differ
diff --git a/doc/install_doc_component.html b/doc/install_doc_component.html
index a81b5869f5..c6584d3961 100644
--- a/doc/install_doc_component.html
+++ b/doc/install_doc_component.html
@@ -46,14 +46,14 @@
We recommend that you install a complete
64 bit scientific Python
distribution like Anaconda
+ https://www.anaconda.com/download/" target="_blank">Anaconda
. Since it meets all the requirements of nilearn, it will save
you time and trouble. You could also check PythonXY
as an alternative.
Nilearn requires a Python installation and the following
- dependencies: ipython, scikit-learn, matplotlib and nibabel.
+ dependencies: ipython, scipy, scikit-learn, matplotlib and nibabel.
Second: open a Command Prompt
(Press "Win-R", type "cmd" and press "Enter". This will open
@@ -72,18 +72,18 @@
Nilearn requires a Python installation and the following
- dependencies: ipython, scikit-learn, matplotlib and nibabel.
+ dependencies: ipython, scipy, scikit-learn, matplotlib and nibabel.
Second: open a Terminal
(Navigate to /Applications/Utilities and double-click on
@@ -113,7 +113,7 @@
Install or ask your system administrator to install the following
packages using the distribution package manager: ipython
- , scikit-learn (sometimes called sklearn,
+ , scipy, scikit-learn (sometimes called sklearn,
or python-sklearn), matplotlib (sometimes
called python-matplotlib) and nibabel
(sometimes called python-nibabel).
diff --git a/doc/logos/digi-saclay-logo-small.png b/doc/logos/digi-saclay-logo-small.png
new file mode 100644
index 0000000000..2190fc5a51
Binary files /dev/null and b/doc/logos/digi-saclay-logo-small.png differ
diff --git a/doc/manipulating_images/manipulating_images.rst b/doc/manipulating_images/manipulating_images.rst
index e338d985b8..57993de8ad 100644
--- a/doc/manipulating_images/manipulating_images.rst
+++ b/doc/manipulating_images/manipulating_images.rst
@@ -146,6 +146,8 @@ Computing and applying spatial masks
Relevant functions:
* compute a mask from EPI images: :func:`nilearn.masking.compute_epi_mask`
+* compute a grey-matter mask using the MNI template:
+ :func:`nilearn.masking.compute_gray_matter_mask`.
* compute a mask from images with a flat background:
:func:`nilearn.masking.compute_background_mask`
* compute for multiple sessions/subjects:
@@ -166,16 +168,18 @@ can be computed from the data:
the brain stands out of a constant background. This is typically the
case when working on statistic maps output after a brain extraction
- :func:`nilearn.masking.compute_epi_mask` for EPI images
+- :func:`nilearn.masking.compute_gray_matter_mask` to compute a
+ gray-matter mask using the MNI template.
-.. figure:: ../auto_examples/01_plotting/images/sphx_glr_plot_visualization_002.png
- :target: ../auto_examples/01_plotting/plot_visualization.html
- :align: right
- :scale: 50%
.. literalinclude:: ../../examples/01_plotting/plot_visualization.py
- :start-after: # Extracting a brain mask
+ :start-after: # Simple computation of a mask from the fMRI data
:end-before: # Applying the mask to extract the corresponding time series
+.. figure:: ../auto_examples/01_plotting/images/sphx_glr_plot_visualization_002.png
+ :target: ../auto_examples/01_plotting/plot_visualization.html
+ :scale: 50%
+
.. _mask_4d_2_3d:
diff --git a/doc/manipulating_images/masker_objects.rst b/doc/manipulating_images/masker_objects.rst
index c1dad572aa..ac8e774678 100644
--- a/doc/manipulating_images/masker_objects.rst
+++ b/doc/manipulating_images/masker_objects.rst
@@ -133,56 +133,37 @@ Alternatively, the mask computation parameters can still be modified.
See the :class:`NiftiMasker` documentation for a complete list of
mask computation parameters.
-As a first example, we will now automatically build a mask from a dataset.
-We will here use the Haxby dataset because it provides the original mask that
-we can compare the data-derived mask against.
-
-Generate a mask with default parameters and visualize it (it is in the
-`mask_img_` attribute of the masker):
+The mask can be retrieved and visualized from the `mask_img_` attribute
+of the masker:
.. literalinclude:: ../../examples/04_manipulating_images/plot_mask_computation.py
- :start-after: # Simple mask extraction from EPI images
+ :start-after: # We need to specify an 'epi' mask_strategy, as this is raw EPI data
:end-before: # Generate mask with strong opening
-.. figure:: ../auto_examples/04_manipulating_images/images/sphx_glr_plot_mask_computation_002.png
- :target: ../auto_examples/04_manipulating_images/plot_mask_computation.html
- :scale: 50%
-
-Changing mask parameters: opening, cutoff
-..........................................
-
-We can then fine-tune the outline of the mask by increasing the number of
-opening steps (`opening=10`) using the `mask_args` argument of the
-:class:`NiftiMasker`. This effectively performs erosion and dilation operations
-on the outer voxel layers of the mask, which can for example remove remaining
-skull parts in the image.
-
-.. literalinclude:: ../../examples/04_manipulating_images/plot_mask_computation.py
- :start-after: # Generate mask with strong opening
- :end-before: # Generate mask with a high lower cutoff
-
-
-.. figure:: ../auto_examples/04_manipulating_images/images/sphx_glr_plot_mask_computation_003.png
+.. figure:: ../auto_examples/04_manipulating_images/images/sphx_glr_plot_mask_computation_004.png
:target: ../auto_examples/04_manipulating_images/plot_mask_computation.html
:scale: 50%
+Different masking strategies
+.............................
-Looking at the :func:`nilearn.masking.compute_epi_mask` called by the
-:class:`NiftiMasker` object, we see two interesting parameters:
-`lower_cutoff` and `upper_cutoff`. These set the grey-value bounds in
-which the masking algorithm will search for its threshold
-(0 being the minimum of the image and 1 the maximum). We will here increase
-the lower cutoff to enforce selection of those
-voxels that appear as bright in the EPI image.
+The `mask_strategy` argument controls how the mask is computed:
+* `background`: detects a continuous background
+* `epi`: suitable for EPI images
+* `template`: uses an MNI grey-matter template
-.. literalinclude:: ../../examples/04_manipulating_images/plot_mask_computation.py
- :start-after: # Generate mask with a high lower cutoff
- :end-before: ###############################################################################
+Extra mask parameters: opening, cutoff...
+..........................................
+The underlying function is :func:`nilearn.masking.compute_epi_mask`
+called using the `mask_args` argument of the :class:`NiftiMasker`.
+Controling these arguments set the fine aspects of the mask. See the
+functions documentation, or :doc:`the NiftiMasker example
+<../auto_examples/04_manipulating_images/plot_mask_computation>`.
-.. figure:: ../auto_examples/04_manipulating_images/images/sphx_glr_plot_mask_computation_004.png
+.. figure:: ../auto_examples/04_manipulating_images/images/sphx_glr_plot_mask_computation_005.png
:target: ../auto_examples/04_manipulating_images/plot_mask_computation.html
:scale: 50%
@@ -196,17 +177,30 @@ preparation::
>>> from nilearn import input_data
>>> masker = input_data.NiftiMasker()
- >>> masker
- NiftiMasker(detrend=False, high_pass=None, low_pass=None, mask_args=None,
- mask_img=None, mask_strategy='background',
- memory=Memory(cachedir=None), memory_level=1, sample_mask=None,
+ >>> masker # doctest: +ELLIPSIS
+ NiftiMasker(detrend=False, dtype=None, high_pass=None, low_pass=None,
+ mask_args=None, mask_img=None, mask_strategy='background',
+ memory=Memory(...), memory_level=1, sample_mask=None,
sessions=None, smoothing_fwhm=None, standardize=False, t_r=None,
target_affine=None, target_shape=None, verbose=0)
+.. note::
+
+ From scikit-learn 0.20, the argument `cachedir` is deprecated in
+ favour of `location`. Hence `cachedir` might not be seen as here.
+
The meaning of each parameter is described in the documentation of
:class:`NiftiMasker` (click on the name :class:`NiftiMasker`), here we
comment on the most important.
+.. topic:: **`dtype` argument**
+
+ Forcing your data to have a `dtype` of **float32** can help
+ save memory and is often a good-enough numerical precision.
+ You can force this cast by choosing `dtype` to be 'auto'.
+ In the future this cast will be the default behaviour.
+
+
.. seealso::
If you do not want to use the :class:`NiftiMasker` to perform these
@@ -308,14 +302,14 @@ Inverse transform: unmasking data
Once voxel signals have been processed, the result can be visualized as
images after unmasking (masked-reduced data transformed back into
-the original whole-brain space). This step is present in almost all
-the :ref:`examples ` provided in nilearn. Below you will find
+the original whole-brain space). This step is present in many
+:ref:`examples ` provided in nilearn. Below you will find
an excerpt of :ref:`the example performing Anova-SVM on the Haxby data
`):
.. literalinclude:: ../../examples/02_decoding/plot_haxby_anova_svm.py
:start-after: # Look at the SVC's discriminating weights
- :end-before: # Create the figure
+ :end-before: # Use the mean image as a background
|
diff --git a/doc/modules/reference.rst b/doc/modules/reference.rst
index a03b51fb29..a3b0f3856e 100644
--- a/doc/modules/reference.rst
+++ b/doc/modules/reference.rst
@@ -76,6 +76,7 @@ uses.
fetch_atlas_aal
fetch_atlas_basc_multiscale_2015
fetch_atlas_allen_2011
+ fetch_atlas_pauli_2017
fetch_coords_dosenbach_2010
fetch_abide_pcp
fetch_adhd
@@ -88,7 +89,7 @@ uses.
fetch_miyawaki2008
fetch_nyu_rest
fetch_surf_nki_enhanced
- fetch_surf_fsaverage5
+ fetch_surf_fsaverage
fetch_atlas_surf_destrieux
fetch_atlas_talairach
fetch_oasis_vbm
@@ -96,6 +97,8 @@ uses.
fetch_cobre
fetch_neurovault
fetch_neurovault_ids
+ fetch_neurovault_auditory_computation_task
+ fetch_neurovault_motor_task
get_data_dirs
load_mni152_template
load_mni152_brain_mask
@@ -224,6 +227,8 @@ uses.
compute_epi_mask
compute_multi_epi_mask
+ compute_gray_matter_mask
+ compute_multi_gray_matter_mask
compute_background_mask
compute_multi_background_mask
intersect_masks
@@ -305,6 +310,8 @@ uses.
find_cut_slices
find_xyz_cut_coords
+ find_parcellation_cut_coords
+ find_probabilistic_atlas_cut_coords
plot_anat
plot_img
plot_epi
@@ -317,6 +324,11 @@ uses.
plot_surf
plot_surf_roi
plot_surf_stat_map
+ view_surf
+ view_img_on_surf
+ view_connectome
+ view_markers
+ view_stat_map
show
**Classes**:
diff --git a/doc/plotting/index.rst b/doc/plotting/index.rst
index 90f24d68b1..3684fa9ad6 100644
--- a/doc/plotting/index.rst
+++ b/doc/plotting/index.rst
@@ -97,6 +97,11 @@ different heuristics to find cutting coordinates.
|hack|
Plotting a connectome
+ Functions for automatic extraction of coords based on
+ brain parcellations useful for :func:`plot_connectome`
+ are demonstrated in
+ **Example:** :ref:`sphx_glr_auto_examples_03_connectivity_plot_atlas_comparison.py`
+
|plot_prob_atlas| :func:`plot_prob_atlas`
|hack|
Plotting 4D probabilistic atlas maps
@@ -226,6 +231,23 @@ Different display modes
================= =========================================================
+Available Colormaps
+===================
+
+Nilearn plotting library ships with a set of extra colormaps, as seen in the
+image below
+
+.. image:: ../auto_examples/01_plotting/images/sphx_glr_plot_colormaps_001.png
+ :target: ../auto_examples/01_plotting/plot_colormaps.html
+ :scale: 50
+
+These colormaps can be used as any other matplotlib colormap.
+
+.. image:: ../auto_examples/01_plotting/images/sphx_glr_plot_colormaps_002.png
+ :target: ../auto_examples/01_plotting/plot_colormaps.html
+ :scale: 50
+
+
.. _display_modules:
Adding overlays, edges, contours, contour fillings and markers
@@ -255,7 +277,7 @@ plot, and has methods to add overlays, contours or edge maps::
:scale: 50
.. |plot_overlay| image:: ../auto_examples/01_plotting/images/sphx_glr_plot_overlay_002.png
- :target: ../auto_examples/_01_plotting/plot_overlay.html
+ :target: ../auto_examples/01_plotting/plot_overlay.html
:scale: 50
================= =========================================================
@@ -327,6 +349,8 @@ that can be used to save the plot to an image file::
# Don't forget to close the display
>>> display.close() # doctest: +SKIP
+.. _surface-plotting:
+
Surface plotting
================
@@ -335,9 +359,6 @@ on a brain surface.
.. versionadded:: 0.3
-NOTE: These functions works for only with matplotlib higher than 1.3.1.
-
-
.. |plot_surf_roi| image:: ../auto_examples/01_plotting/images/sphx_glr_plot_surf_atlas_001.png
:target: ../auto_examples/01_plotting/plot_surf_atlas.html
:scale: 50
@@ -363,3 +384,123 @@ NOTE: These functions works for only with matplotlib higher than 1.3.1.
:ref:`sphx_glr_auto_examples_01_plotting_plot_surf_stat_map.py`
===================== ===================================================================
+
+
+.. _interactive-plotting:
+
+Interactive plots
+=================
+
+Nilearn also has functions for making interactive plots that can be
+seen in a web browser.
+
+.. versionadded:: 0.5
+
+ Interactive plotting is new in nilearn 0.5
+
+For 3D surface plots of statistical maps or surface atlases, use
+:func:`view_img_on_surf` and :func:`view_surf`. Both produce a 3D plot on the
+cortical surface. The difference is that :func:`view_surf` takes as input a
+surface map and a cortical mesh, whereas :func:`view_img_on_surf` takes as input
+a volume statistical map, and projects it on the cortical surface before making
+the plot.
+
+For 3D plots of a connectome, use :func:`view_connectome`. To see only markers,
+use :func:`view_markers`.
+
+
+.. _interactive-surface-plotting:
+
+3D Plots of statistical maps or atlases on the cortical surface
+---------------------------------------------------------------
+
+:func:`view_img_on_surf`: Surface plot using a 3D statistical map::
+
+ >>> from nilearn import plotting, datasets # doctest: +SKIP
+ >>> img = datasets.fetch_localizer_button_task()['tmaps'][0] # doctest: +SKIP
+ >>> view = plotting.view_img_on_surf(img, threshold='90%', surf_mesh='fsaverage') # doctest: +SKIP
+
+If you are running a notebook, displaying ``view`` will embed an interactive
+plot (this is the case for all interactive plots produced by nilearn's "view"
+functions):
+
+.. image:: ../images/plotly_surface_plot_notebook_screenshot.png
+
+If you are not using a notebook, you can open the plot in a browser like this::
+
+ >>> view.open_in_browser() # doctest: +SKIP
+
+This will open this 3D plot in your web browser:
+
+.. image:: ../images/plotly_surface_plot.png
+
+
+Or you can save it to an html file::
+
+ >>> view.save_as_html("surface_plot.html") # doctest: +SKIP
+
+
+:func:`view_surf`: Surface plot using a surface map and a cortical mesh::
+
+ >>> from nilearn import plotting, datasets # doctest: +SKIP
+ >>> destrieux = datasets.fetch_atlas_surf_destrieux() # doctest: +SKIP
+ >>> fsaverage = datasets.fetch_surf_fsaverage() # doctest: +SKIP
+ >>> view = plotting.view_surf(fsaverage['infl_left'], destrieux['map_left'], # doctest: +SKIP
+ ... cmap='gist_ncar', symmetric_cmap=False) # doctest: +SKIP
+ ...
+ >>> view.open_in_browser() # doctest: +SKIP
+
+
+.. image:: ../images/plotly_surface_atlas_plot.png
+
+.. _interactive-connectome-plotting:
+
+3D Plots of connectomes
+-----------------------
+
+:func:`view_connectome`: 3D plot of a connectome::
+
+ >>> view = plotting.view_connectome(correlation_matrix, coords, threshold='90%') # doctest: +SKIP
+ >>> view.open_in_browser() # doctest: +SKIP
+
+
+.. image:: ../images/plotly_connectome_plot.png
+
+
+.. _interactive-markers-plotting:
+
+3D Plots of markers
+-------------------
+
+:func:`view_markers`: showing markers (e.g. seed locations) in 3D::
+
+ >>> from nilearn import plotting # doctest: +SKIP
+ >>> dmn_coords = [(0, -52, 18), (-46, -68, 32), (46, -68, 32), (1, 50, -5)] # doctest: +SKIP
+ >>> view = plotting.view_markers( # doctest: +SKIP
+ >>> dmn_coords, ['red', 'cyan', 'magenta', 'orange'], marker_size=10) # doctest: +SKIP
+ >>> view.open_in_browser() # doctest: +SKIP
+
+
+
+.. image:: ../images/plotly_markers_plot.png
+
+
+.. _interactive-stat-map-plotting:
+
+Interactive visualization of statistical map slices
+---------------------------------------------------
+
+:func:`view_stat_map`: open stat map in a Papaya viewer (https://github.com/rii-mango/Papaya)::
+
+ >>> from nilearn import plotting, datasets # doctest: +SKIP
+ >>> img = datasets.fetch_localizer_button_task()['tmaps'][0] # doctest: +SKIP
+ >>> view = plotting.view_stat_map(img, threshold=2, vmax=4) # doctest: +SKIP
+
+in a Jupyter notebook, you can view the image like this:
+
+.. image:: ../images/papaya_stat_map_plot_screenshot_notebook.png
+
+Or you can open a viewer in your web browser if you are not in the
+notebook::
+
+ >>> view.open_in_browser() # doctest: +SKIP
diff --git a/doc/themes/nilearn/layout.html b/doc/themes/nilearn/layout.html
index 9d2ce2e1e0..d4c42eb9ff 100644
--- a/doc/themes/nilearn/layout.html
+++ b/doc/themes/nilearn/layout.html
@@ -105,10 +105,10 @@
for(i in sections){
if(sections[i] > pos){
break;
- };
- if($('a.internal[href$="' + i + '"]').is(':visible')){
+ }
+ if($('a.internal[href$="' + i + '"]').is(':visible')){
current_section = i;
- };
+ }
}
$('a.internal[href$="' + current_section + '"]').addClass('active');
});
diff --git a/doc/themes/nilearn/static/copybutton.js b/doc/themes/nilearn/static/copybutton.js
index b56d9b2f00..925d44f743 100644
--- a/doc/themes/nilearn/static/copybutton.js
+++ b/doc/themes/nilearn/static/copybutton.js
@@ -5,7 +5,7 @@ $(document).ready(function() {
var div = $('.highlight-python .highlight,' +
'.highlight-python3 .highlight,' +
'.highlight-pycon .highlight,' +
- '.highlight-default .highlight')
+ '.highlight-default .highlight');
var pre = div.find('pre');
// get the styles from the current theme
@@ -21,14 +21,14 @@ $(document).ready(function() {
'border-width': border_width, 'color': border_color, 'text-size': '75%',
'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em',
'border-radius': '0 3px 0 0'
- }
+ };
// create and add the button to all the code blocks that contain >>>
div.each(function(index) {
var jthis = $(this);
if (jthis.find('.gp').length > 0) {
var button = $('>>>');
- button.css(button_styles)
+ button.css(button_styles);
button.attr('title', hide_text);
button.data('hidden', 'false');
jthis.prepend(button);
diff --git a/doc/themes/nilearn/static/jquery.js b/doc/themes/nilearn/static/jquery.js
deleted file mode 100644
index 16ad06c5ac..0000000000
--- a/doc/themes/nilearn/static/jquery.js
+++ /dev/null
@@ -1,4 +0,0 @@
-/*! jQuery v1.7.2 jquery.com | jquery.org/license */
-(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cu(a){if(!cj[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){ck||(ck=c.createElement("iframe"),ck.frameBorder=ck.width=ck.height=0),b.appendChild(ck);if(!cl||!ck.createElement)cl=(ck.contentWindow||ck.contentDocument).document,cl.write((f.support.boxModel?"":"")+""),cl.close();d=cl.createElement(a),cl.body.appendChild(d),e=f.css(d,"display"),b.removeChild(ck)}cj[a]=e}return cj[a]}function ct(a,b){var c={};f.each(cp.concat.apply([],cp.slice(0,b)),function(){c[this]=a});return c}function cs(){cq=b}function cr(){setTimeout(cs,0);return cq=f.now()}function ci(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ch(){try{return new a.XMLHttpRequest}catch(b){}}function cb(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g0){if(c!=="border")for(;e=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?+d:j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.2",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){if(typeof c!="string"||!c)return null;var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c
a",d=p.getElementsByTagName("*"),e=p.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=p.getElementsByTagName("input")[0],b={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:p.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,pixelMargin:!0},f.boxModel=b.boxModel=c.compatMode==="CSS1Compat",i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete p.test}catch(r){b.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",function(){b.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),i.setAttribute("name","t"),p.appendChild(i),j=c.createDocumentFragment(),j.appendChild(p.lastChild),b.checkClone=j.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,j.removeChild(i),j.appendChild(p);if(p.attachEvent)for(n in{submit:1,change:1,focusin:1})m="on"+n,o=m in p,o||(p.setAttribute(m,"return;"),o=typeof p[m]=="function"),b[n+"Bubbles"]=o;j.removeChild(p),j=g=h=p=i=null,f(function(){var d,e,g,h,i,j,l,m,n,q,r,s,t,u=c.getElementsByTagName("body")[0];!u||(m=1,t="padding:0;margin:0;border:",r="position:absolute;top:0;left:0;width:1px;height:1px;",s=t+"0;visibility:hidden;",n="style='"+r+t+"5px solid #000;",q="