[med-svn] [pycorrfit] 01/02: Imported Upstream version 0.8.0-2~beta

Alex Mestiashvili malex-guest at moszumanska.debian.org
Fri Nov 29 02:01:46 UTC 2013


This is an automated email from the git hooks/post-receive script.

malex-guest pushed a commit to branch master
in repository pycorrfit.

commit 4a257fe19771650c0cdd65c58c4d9b9e54f3c6e5
Author: Alexandre Mestiashvili <alex at biotec.tu-dresden.de>
Date:   Fri Nov 29 02:56:01 2013 +0100

    Imported Upstream version 0.8.0-2~beta
---
 .gitignore                                         |   66 +
 ChangeLog.txt                                      |  134 ++
 MANIFEST.in                                        |    5 +
 PKG-INFO                                           |   33 +
 PyCorrFit_doc.pdf                                  |  Bin 0 -> 521911 bytes
 README.md                                          |   23 +
 bin/pycorrfit                                      |   11 +
 doc-src/Bibliography.bib                           | 1834 ++++++++++++++++++++
 doc-src/Images/PyCorrFit_Screenshot_Main.png       |  Bin 0 -> 81892 bytes
 doc-src/Images/PyCorrFit_icon.png                  |  Bin 0 -> 12651 bytes
 doc-src/Images/PyCorrFit_icon.svg                  |  109 ++
 doc-src/Images/PyCorrFit_icon_dark.svg             |  109 ++
 doc-src/Images/PyCorrFit_logo.svg                  |  121 ++
 doc-src/Images/PyCorrFit_logo_dark.pdf             |  Bin 0 -> 9646 bytes
 doc-src/Images/PyCorrFit_logo_dark.png             |  Bin 0 -> 4412 bytes
 doc-src/Images/PyCorrFit_logo_dark.svg             |  121 ++
 doc-src/Images/SchwilleLogo.jpg                    |  Bin 0 -> 14580 bytes
 doc-src/Images/TU_Logo_SW.pdf                      |  Bin 0 -> 4424 bytes
 doc-src/PyCorrFit_doc.tex                          |  180 ++
 doc-src/PyCorrFit_doc_content.tex                  |  596 +++++++
 doc-src/PyCorrFit_doc_models.tex                   |  391 +++++
 .../ExampleFunc_CS_2D+2D+S+T.txt                   |   85 +
 external_model_functions/ExampleFunc_CS_3D+S+T.txt |   81 +
 .../ExampleFunc_Exp_correlated_noise.txt           |   16 +
 .../ExampleFunc_SFCS_1C_2D_Autocorrelation.txt     |   24 +
 .../ExampleFunc_SFCS_1C_2D_Cross-correlation.txt   |   32 +
 .../ExampleFunc_TIRF_zOnly.txt                     |   38 +
 .../Model_AC_3D+T_confocal.txt                     |   20 +
 .../Model_Flow_AC_3D_confocal.txt                  |   41 +
 .../Model_Flow_CC_Backward_3D_confocal.txt         |   49 +
 .../Model_Flow_CC_Forward_3D_confocal.txt          |   48 +
 setup.cfg                                          |    5 +
 setup.py                                           |   51 +
 src/PyCorrFit.py                                   |  147 ++
 src/__init__.py                                    |   25 +
 src/doc.py                                         |  344 ++++
 src/edclasses.py                                   |  221 +++
 src/frontend.py                                    | 1519 ++++++++++++++++
 src/icon.py                                        |  246 +++
 src/leastsquaresfit.py                             |  356 ++++
 src/misc.py                                        |  275 +++
 src/models/MODEL_TIRF_1C.py                        |  239 +++
 src/models/MODEL_TIRF_2D2D.py                      |  133 ++
 src/models/MODEL_TIRF_3D2D.py                      |  138 ++
 src/models/MODEL_TIRF_3D2Dkin_Ries.py              |  401 +++++
 src/models/MODEL_TIRF_3D3D.py                      |  144 ++
 src/models/MODEL_TIRF_gaussian_1C.py               |  131 ++
 src/models/MODEL_TIRF_gaussian_3D2D.py             |  242 +++
 src/models/MODEL_TIRF_gaussian_3D3D.py             |  259 +++
 src/models/MODEL_classic_gaussian_2D.py            |  297 ++++
 src/models/MODEL_classic_gaussian_3D.py            |  309 ++++
 src/models/MODEL_classic_gaussian_3D2D.py          |  160 ++
 src/models/__init__.py                             |  357 ++++
 src/openfile.py                                    |  721 ++++++++
 src/page.py                                        |  999 +++++++++++
 src/plotting.py                                    |  457 +++++
 src/readfiles/__init__.py                          |  226 +++
 src/readfiles/read_ASC_ALV_6000.py                 |  157 ++
 src/readfiles/read_CSV_PyCorrFit.py                |  134 ++
 src/readfiles/read_FCS_Confocor3.py                |  363 ++++
 src/readfiles/read_SIN_correlator_com.py           |  240 +++
 src/readfiles/read_mat_ries.py                     |  220 +++
 src/tools/__init__.py                              |  102 ++
 src/tools/average.py                               |  344 ++++
 src/tools/background.py                            |  498 ++++++
 src/tools/batchcontrol.py                          |  177 ++
 src/tools/chooseimport.py                          |  250 +++
 src/tools/comment.py                               |   79 +
 src/tools/datarange.py                             |  262 +++
 src/tools/example.py                               |   76 +
 src/tools/globalfit.py                             |  299 ++++
 src/tools/info.py                                  |  315 ++++
 src/tools/overlaycurves.py                         |  383 ++++
 src/tools/parmrange.py                             |  146 ++
 src/tools/plotexport.py                            |   70 +
 src/tools/simulation.py                            |  445 +++++
 src/tools/statistics.py                            |  574 ++++++
 src/tools/trace.py                                 |  103 ++
 src/usermodel.py                                   |  305 ++++
 79 files changed, 18111 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..a43b898
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,66 @@
+*.py[cod]
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.tox
+nosetests.xml
+
+# Translations
+*.mo
+
+# Mr Developer
+.mr.developer.cfg
+.project
+.pydevproject
+
+# Latex
+*.aux
+*.glo
+*.idx
+*.log
+*.toc
+*.ist
+*.acn
+*.acr
+*.alg
+*.bbl
+*.blg
+*.dvi
+*.glg
+*.gls
+*.ilg
+*.ind
+*.lof
+*.lot
+*.maf
+*.mtc
+*.mtc1
+*.out
+*.synctex.gz
+*.pdf
+*.bak
+
+# ~
+*.py~
+*.md~
diff --git a/ChangeLog.txt b/ChangeLog.txt
new file mode 100644
index 0000000..ae8ca66
--- /dev/null
+++ b/ChangeLog.txt
@@ -0,0 +1,134 @@
+0.8.0-2beta
+- Thanks to Alex Mestiashvili for providing initial setup.py files (@mestia)
+- Bugfixes
+   - Some ConfoCor files were not imported
+   - The cpp was not calculated correctly in case of background correction (#45)
+   - Enabled averaging of single pages (#58)
+- Improvements of the user interface
+   - The menus have been reordered (#47, #50)
+   - The fitting panel has been optimized (#49)
+   - the slider simulation got a reset button (#51)
+   - The Help menu contains documentation and wiki (#56)
+   - Model functions that are somehow redundant have been removed from the menu,
+     but are still supported
+   - The model doc strings were fully converted to unicode
+   - Several text messages were modified for better coherence
+   - The background correction tool is more intuitive
+   - Statistics panel improvements (#43)
+   - Run information is included in the Data set title
+   - The page counter starts at "1" instead of "0" (#44)
+0.8.0
+- Filename/title of each tab now shows up in the notebook (#39)
+- Statistics tool can plot parameters and page selection with the Overlay
+  tool is possible (#31)
+0.7.9
+- Support for Mac OSx
+- Enhancements:
+   - Export file format (.csv) layout improved
+   - Model function info text in UTF-8
+   - Improved waring message when opening sessions from future versions
+   - New feature lets user set the range for the fitting parameters
+- Bugfixes:
+   - Cleaned minor tracebacks and exceptions created by the frontend
+   - Mac version now works as expected, but .app bundling failed
+   - Latex plotting features now support more characters such as "[]{}^"
+0.7.8
+- Enhancements:
+   - Averages can now be calculated from user-selected pages
+   - Pages selected in the Overlay tool are now automatically set
+     for computation of average and for global fitting
+   - Source pages are now displayed in average title
+   - Graph normalization with particle numbers is now possible
+- Bugfixes:
+   - Errors during fitting with weights equal to zero
+   - Overlay tool displayed last curve after all pages have been removed
+   - Global fit did not work with weights
+- Session saving now uses 20 digits accuracy
+- CSV export is now using tab-delimited data for easier Excel-import
+- Added version checking for session management
+0.7.7
+- Fixed: Tools windows could not be closed (or moved on MS Windows)
+- Fixed: .csv export failed in some cases where no weights were used
+- Enhancement: The user is now asked before a page is closed
+- Enhancement: Tool "Page Info" and in exported .csv files, variables and
+               values are now separated by a tab stop instead of a "="
+- Fixed: Opening a session with an empty page failed in some cases
+- Fixed: Tool "Statistics" missed to output the column "filename/title"
+         if that key value is empty - replaced empty strings with "NoName"
+- Enhancement: Tool "Overlay" now asks the user to check kept curves
+               instead of showing the curves to be removed
+- Enhancement: Tool "Overlay" now has a "Cancel" button
+0.7.6
+- Improved handling
+   - Tools are now sorted according to a standard work-flow
+   - Renamed "Curve selection" to "Overlay tool" - this is more intuitive
+   - Tools will now stay open or may be opened when there are no open pages (#25)
+   - Filenames and runs are now displayed on each page (also added filename/title tag) (#23)
+   - Notebook: moved close button to each tab to prevent accidental closing of tabs
+- Improved tool "Statistics" (#21)
+   - Fixed the case where "useless" data was produced - instead we write "NaN" data,
+     removed warning message accordingly
+   - Row-wise ordering according to page numbers (#22)
+   - Column-wise ordering is now more intuitive
+     (Fitted parameters with errors first)
+   - Some columns are now checked by default
+   - PyCorrFit remembers checked parameters for a page (not saved in session)
+- Improved tool "Overlay" (#23)
+   - New feature: Overlay shows run number of each file (upon import),
+     the run (or index) of an experimental file is unique to PyCorrFit
+   - Upon import, filenames and runs are displayed
+   - In a session, the filename/title is displayed
+- Web address of PyCorrFit changed
+  from "fcstools.dyndns.org/pycorrfit" to "pycorrfit.craban.de"
+- Minor bugfixes: Batch control, Global fitting, import dialog
+0.7.5
+ - Added model functions to documentation.
+ - Weights from fitting are now exported in .csv files.
+ - Rework of info panel for fitting
+ - Cleared unintuitive behavior of session saving:
+   The fitting parameters were read from the frontend. This could have
+   led to saving false fit meta data.
+ - During fitting, units are now displayed as "human readable" (#17).
+ - Slider simulation now also uses human readable units (#17).
+ - Secured support for Ubuntu 12.10 and 13.04
+ - Fixed: new line (\n) characters for LaTeX plotting on Windows
+0.7.4
+ - New tool: Colorful curve selection
+ - Import data: Curve selection possible
+ - Average: Crop average according to current page.
+ - Fixed: Page now displays Chi-squared of global fit.
+ - Fixed: Chi-squared and parameters of global fitting are now stored in sessions.
+0.7.3
+ - Issue closed. External weights from averages saved in session (#11).
+ - Solved minor bugs
+ - Added estimation of errors of fit (Issue #12/#14)
+ - Fixed: Some .fcs files containing averages were not imported.
+0.7.2
+ - Bugfix: Issue #10; we now have a goodness of the fit, if weighted
+    fitting is performed
+ - Bugfix: Weights for fitting not properly calculated (sqrt(std)).
+ - Bugfix: Batch control IndexError with Info window opened
+ - Tool Statistics: Sort values according to page numbers
+ - Tool global: Added weighted fitting
+ - Residuals: According to weighted fitting, weighted residuals are plotted
+ - Average: Variances from averaging can be used for weighted fitting
+0.7.1
+ - Feature: Added Python shell
+ - Bugfix: Saving image was not performed using WXAgg
+ - Bugfix: Notebook pages were drag'n'dropable
+ - Update function now works in its own thread
+ - Code cleanup: documentation of model functions
+ - Added program icon
+0.7.0
+ - File import dialog was enhanced (#4, #5 - subsequently #7, #8):
+   - Now there is only one "load data" dialog in the file menu.
+   - The model function is chosen for each type of data that is 
+      to be imported (AC, CC, etc.).
+   - Loading files that do not contain data a pointed out to the
+     user and the program continues with the other files.
+ - Bugfix: Channel selection window causes crash on file import (#1).
+ - Bugfix: Hidden feature changes fixed parameters during fitting (#2).
+ - Feature: Convert TIR model function parameters lambda and NA to sigma (#3).
+ - Code cleanup: Opening data files is now handled internally differently.
+0.6.9
+ - Initital GitHub commit
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..b3fdf25
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,5 @@
+include doc-src/*
+include external_model_functions/*
+include README.md
+include ChangeLog.txt
+include PyCorrFit_doc.pdf
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..86dda8a
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,33 @@
+Metadata-Version: 1.0
+Name: pycorrfit
+Version: 0.8.0
+Summary: UNKNOWN
+Home-page: https://github.com/paulmueller/PyCorrFit
+Author: Paul Mueller
+Author-email: paul.mueller at biotec.tu-dresden.de
+License: GPL v2
+Description: ![PyCorrFit](https://raw.github.com/paulmueller/PyCorrFit/master/doc-src/Images/PyCorrFit_logo_dark.png)
+        =========
+        
+        This repository contains the source code of PyCorrFit - a scientific tool for fitting
+        correlation curves on a logarithmic plot.
+        
+        In current biomedical research, fluorescence correlation spectroscopy (FCS) is  applied
+        to characterize molecular dynamic processes in vitro and in living cells.  Commercial
+        FCS setups only permit data analysis that is limited to  a specific instrument by
+        the use of in-house file formats or a  finite number of implemented correlation
+        model functions. PyCorrFit is a general-purpose FCS evaluation software that,
+        amongst other formats, supports the established Zeiss ConfoCor3 ~.fcs  file format.
+        PyCorrFit comes with several built-in model functions, covering a wide range of
+        applications in standard confocal FCS. In addition, it contains equations dealing
+        with different excitation geometries like total internal reflection (TIR). For more
+        information, visit the official homepage at http://pycorrfit.craban.de.
+        
+        
+        - [Download the latest version](https://github.com/paulmueller/PyCorrFit/releases)  
+        - [Documentation](https://github.com/paulmueller/PyCorrFit/raw/master/PyCorrFit_doc.pdf)
+        - [Run PyCorrFit from source](https://github.com/paulmueller/PyCorrFit/wiki/Running-PyCorrFit-from-source)
+        - [Write model functions](https://github.com/paulmueller/PyCorrFit/wiki/Writing-model-functions)
+        - [Need help?](https://github.com/paulmueller/PyCorrFit/wiki/Creating-a-new-issue)
+        
+Platform: UNKNOWN
diff --git a/PyCorrFit_doc.pdf b/PyCorrFit_doc.pdf
new file mode 100644
index 0000000..ca6d5fa
Binary files /dev/null and b/PyCorrFit_doc.pdf differ
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..cacf31a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,23 @@
+![PyCorrFit](https://raw.github.com/paulmueller/PyCorrFit/master/doc-src/Images/PyCorrFit_logo_dark.png)
+=========
+
+This repository contains the source code of PyCorrFit - a scientific tool for fitting
+correlation curves on a logarithmic plot.
+
+In current biomedical research, fluorescence correlation spectroscopy (FCS) is  applied
+to characterize molecular dynamic processes in vitro and in living cells.  Commercial
+FCS setups only permit data analysis that is limited to  a specific instrument by
+the use of in-house file formats or a  finite number of implemented correlation
+model functions. PyCorrFit is a general-purpose FCS evaluation software that,
+amongst other formats, supports the established Zeiss ConfoCor3 ~.fcs  file format.
+PyCorrFit comes with several built-in model functions, covering a wide range of
+applications in standard confocal FCS. In addition, it contains equations dealing
+with different excitation geometries like total internal reflection (TIR). For more
+information, visit the official homepage at http://pycorrfit.craban.de.
+
+
+- [Download the latest version](https://github.com/paulmueller/PyCorrFit/releases)  
+- [Documentation](https://github.com/paulmueller/PyCorrFit/raw/master/PyCorrFit_doc.pdf)
+- [Run PyCorrFit from source](https://github.com/paulmueller/PyCorrFit/wiki/Running-PyCorrFit-from-source)
+- [Write model functions](https://github.com/paulmueller/PyCorrFit/wiki/Writing-model-functions)
+- [Need help?](https://github.com/paulmueller/PyCorrFit/wiki/Creating-a-new-issue)
diff --git a/bin/pycorrfit b/bin/pycorrfit
new file mode 100644
index 0000000..07a5d62
--- /dev/null
+++ b/bin/pycorrfit
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+if [ -f "/usr/share/pyshared/pycorrfit/PyCorrFit.py" ]
+then
+    python /usr/share/pyshared/pycorrfit/PyCorrFit.py
+elif [ -f /usr/local/lib/python2.7/dist-packages/pycorrfit/PyCorrFit.py ]
+then
+    python /usr/local/lib/python2.7/dist-packages/pycorrfit/PyCorrFit.py
+else
+    echo "Could not find PyCorrFit.py. Please notify the author."
+fi
diff --git a/doc-src/Bibliography.bib b/doc-src/Bibliography.bib
new file mode 100755
index 0000000..59d521c
--- /dev/null
+++ b/doc-src/Bibliography.bib
@@ -0,0 +1,1834 @@
+% This file was created with JabRef 2.5.
+% Encoding: UTF-8
+
+ at ARTICLE{Aragon1976,
+  author = {S. R. Aragon and R. Pecora},
+  title = {Fluorescence correlation spectroscopy as a probe of molecular dynamics},
+  journal = {The Journal of Chemical Physics},
+  year = {1976},
+  volume = {64},
+  pages = {1791-1803},
+  number = {4},
+  doi = {10.1063/1.432357},
+  owner = {paul},
+  publisher = {AIP},
+  timestamp = {2012.11.02},
+  url = {http://link.aip.org/link/?JCP/64/1791/1}
+}
+
+ at ARTICLE{Ashkin1970,
+  author = {Ashkin, A.},
+  title = {Acceleration and Trapping of Particles by Radiation Pressure},
+  journal = {Physical Review Letters},
+  year = {1970},
+  volume = {24},
+  pages = {156--159},
+  month = {Jan},
+  doi = {10.1103/PhysRevLett.24.156},
+  issue = {4},
+  owner = {paul},
+  publisher = {American Physical Society},
+  timestamp = {2012.11.13},
+  url = {http://link.aps.org/doi/10.1103/PhysRevLett.24.156}
+}
+
+ at ARTICLE{Axelrod1984,
+  author = {Axelrod, D and Burghardt, T P and Thompson, N L},
+  title = {Total Internal Reflection Fluorescence},
+  journal = {Annual Review of Biophysics and Biomolecular Structure},
+  year = {1984},
+  volume = {13},
+  pages = {247--268},
+  number = {1},
+  month = jun,
+  booktitle = {Annual Review of Biophysics and Bioengineering},
+  comment = {doi: 10.1146/annurev.bb.13.060184.001335},
+  doi = {10.1146/annurev.bb.13.060184.001335},
+  issn = {0084-6589},
+  owner = {paul},
+  publisher = {Annual Reviews},
+  timestamp = {2012.02.14},
+  url = {http://dx.doi.org/10.1146/annurev.bb.13.060184.001335}
+}
+
+ at ARTICLE{Bag2012,
+  author = {Bag, Nirmalya and Sankaran, Jagadish and Paul, Alexandra and Kraut,
+	Rachel S. and Wohland, Thorsten},
+  title = {Calibration and Limits of Camera-Based Fluorescence Correlation Spectroscopy:
+	A Supported Lipid Bilayer Study},
+  journal = {ChemPhysChem},
+  year = {2012},
+  volume = {13},
+  pages = {2784--2794},
+  number = {11},
+  doi = {10.1002/cphc.201200032},
+  issn = {1439-7641},
+  keywords = {fluorescence spectroscopy, membrane, multiplexing, point spread function,
+	total internal reflection},
+  owner = {paul},
+  publisher = {WILEY-VCH Verlag},
+  timestamp = {2012.09.20},
+  url = {http://dx.doi.org/10.1002/cphc.201200032}
+}
+
+ at ARTICLE{Bestvater2010,
+  author = {Felix Bestvater and Zahir Seghiri and Moon Sik Kang and Nadine Gr\"{o}ner
+	and Ji Young Lee and Kang-Bin Im and Malte Wachsmuth},
+  title = {EMCCD-based spectrally resolved fluorescence correlation spectroscopy},
+  journal = {Optics Express},
+  year = {2010},
+  volume = {18},
+  pages = {23818--23828},
+  number = {23},
+  month = {Nov},
+  abstract = {We present an implementation of fluorescence correlation spectroscopy
+	with spectrally resolved detection based on a combined commercial
+	confocal laser scanning/fluorescence correlation spectroscopy microscope.
+	We have replaced the conventional detection scheme by a prism-based
+	spectrometer and an electron-multiplying charge-coupled device camera
+	used to record the photons. This allows us to read out more than
+	80,000 full spectra per second with a signal-to-noise ratio and a
+	quantum efficiency high enough to allow single photon counting. We
+	can identify up to four spectrally different quantum dots in vitro
+	and demonstrate that spectrally resolved detection can be used to
+	characterize photophysical properties of fluorophores by measuring
+	the spectral dependence of quantum dot fluorescence emission intermittence.
+	Moreover, we can confirm intracellular cross-correlation results
+	as acquired with a conventional setup and show that spectral flexibility
+	can help to optimize the choice of the detection windows.},
+  doi = {10.1364/OE.18.023818},
+  keywords = {CCD, charge-coupled device; Confocal microscopy; Spectroscopy, fluorescence
+	and luminescence},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.11.07},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-18-23-23818}
+}
+
+ at ARTICLE{Blom2009,
+  author = {Blom, Hans and Chmyrov, Andriy and Hassler, Kai and Davis, Lloyd
+	M. and Widengren, Jerker},
+  title = {Triplet-State Investigations of Fluorescent Dyes at Dielectric Interfaces
+	Using Total Internal Reflection Fluorescence Correlation Spectroscopy},
+  journal = {The Journal of Physical Chemistry A},
+  year = {2009},
+  volume = {113},
+  pages = {5554-5566},
+  number = {19},
+  doi = {10.1021/jp8110088},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/jp8110088},
+  owner = {paul},
+  timestamp = {2012.11.02},
+  url = {http://pubs.acs.org/doi/abs/10.1021/jp8110088}
+}
+
+ at ARTICLE{Blom2002,
+  author = {Hans Blom and Mathias Johansson and Anna-Sara Hedman and Liselotte
+	Lundberg and Anders Hanning and Sverker H{\aa}rd and Rudolf Rigler},
+  title = {Parallel Fluorescence Detection of Single Biomolecules in Microarrays
+	by a Diffractive-Optical-Designed 2 x 2 Fan-Out Element},
+  journal = {Applied Optics},
+  year = {2002},
+  volume = {41},
+  pages = {3336--3342},
+  number = {16},
+  month = {Jun},
+  abstract = {We have developed a multifocal diffractive-optical fluorescence correlation
+	spectroscopy system for parallel excitation and detection of single
+	tetramethylrhodamine biomolecules in microarrays. Multifocal excitation
+	was made possible through the use of a 2 {\texttimes} 2 fan-out diffractive-optical
+	element with uniform intensity in all foci. Characterization of the
+	2 {\texttimes} 2 fan-out diffractive-optical element shows formation
+	of almost perfect Gaussian foci of submicrometer lateral diameter,
+	as analyzed by thermal motion of tetramethylrhodamine dye molecules
+	in solution. Results of parallel excitation and detection in a high-density
+	microarray of circular wells show single-biomolecule sensitivity
+	in all four foci simultaneously.},
+  doi = {10.1364/AO.41.003336},
+  keywords = {Diffractive optics; Confocal microscopy; Fluorescence microscopy;
+	Fluorescence, laser-induced},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.11.07},
+  url = {http://ao.osa.org/abstract.cfm?URI=ao-41-16-3336}
+}
+
+ at ARTICLE{Brinkmeier1999,
+  author = {M. Brinkmeier and K. Dörre and J. Stephan and M. Eigen},
+  title = {Two-beam cross-correlation:  a method to characterize transport phenomena
+	in micrometer-sized structures.},
+  journal = {Analytical Chemistry},
+  year = {1999},
+  volume = {71},
+  pages = {609--616},
+  number = {3},
+  month = {Feb},
+  abstract = {To determine flow properties, namely, the velocity and angle of the
+	flow in microstructured channels, an experimental realization based
+	on fluorescence correlation spectroscopy is described. For this purpose,
+	two micrometer-sized spatially separated volume elements have been
+	created. The cross-correlation signal from these has been recorded
+	and evaluated mathematically. In addition to previous results, two-beam
+	cross-correlation allows for fast and easy determination of even
+	small (down to 200 μm/s) flow velocities, as well as simultaneous
+	measurement of diffusion properties of single dye molecules within
+	a rather short detection time of 5-100 s and an error rate of less
+	than 20\%. The spatial flow resolution is around 1-2 μm, limited
+	by the diameter of the volume element. Furthermore, vectorial flow
+	data can be obtained and evaluated. A discussion of the theoretical
+	background and an experimental verification of the theoretical results
+	is performed. The feasibility of fast and easy data processing is
+	shown if the flow time is the only desired information. Possible
+	applications of this precise and simple method are the determination
+	of transportation effects within artificial microstructures for CE
+	and HPLC, fast chemical kinetics, and high-throughput screening.},
+  doi = {10.1021/ac980820i},
+  institution = {Max-Planck-Institut für biophysikalische Chemie, Am Fassberg, D-37077
+	Göttingen, Germany.},
+  language = {eng},
+  medline-pst = {ppublish},
+  owner = {paul},
+  pmid = {21662718},
+  timestamp = {2012.11.07},
+  url = {http://dx.doi.org/10.1021/ac980820i}
+}
+
+ at ARTICLE{Brutzer2012,
+  author = {Brutzer, Hergen and Schwarz, Friedrich W. and Seidel, Ralf},
+  title = {Scanning Evanescent Fields Using a pointlike Light Source and a Nanomechanical
+	DNA Gear},
+  journal = {Nano Letters},
+  year = {2012},
+  volume = {12},
+  pages = {473-478},
+  number = {1},
+  doi = {10.1021/nl203876w},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/nl203876w},
+  owner = {paul},
+  timestamp = {2012.08.09},
+  url = {http://pubs.acs.org/doi/abs/10.1021/nl203876w}
+}
+
+ at ARTICLE{Buchholz2012,
+  author = {Jan Buchholz and Jan Wolfgang Krieger and G\'{a}bor Mocs\'{a}r and
+	Bal\'{a}zs Kreith and Edoardo Charbon and Gy\"{o}rgy V\'{a}mosi and
+	Udo Kebschull and J\"{o}rg Langowski},
+  title = {FPGA implementation of a 32x32 autocorrelator array for analysis
+	of fast image series},
+  journal = {Optics Express},
+  year = {2012},
+  volume = {20},
+  pages = {17767--17782},
+  number = {16},
+  month = {Jul},
+  abstract = {With the evolving technology in CMOS integration, new classes of 2D-imaging
+	detectors have recently become available. In particular, single photon
+	avalanche diode (SPAD) arrays allow detection of single photons at
+	high acquisition rates (\&\#x02265; 100kfps), which is about two
+	orders of magnitude higher than with currently available cameras.
+	Here we demonstrate the use of a SPAD array for imaging fluorescence
+	correlation spectroscopy (imFCS), a tool to create 2D maps of the
+	dynamics of fluorescent molecules inside living cells. Time-dependent
+	fluorescence fluctuations, due to fluorophores entering and leaving
+	the observed pixels, are evaluated by means of autocorrelation analysis.
+	The multi-\&\#x003C4; correlation algorithm is an appropriate choice,
+	as it does not rely on the full data set to be held in memory. Thus,
+	this algorithm can be efficiently implemented in custom logic. We
+	describe a new implementation for massively parallel multi-\&\#x003C4;
+	correlation hardware. Our current implementation can calculate 1024
+	correlation functions at a resolution of 10\&\#x003BC;s in real-time
+	and therefore correlate real-time image streams from high speed single
+	photon cameras with thousands of pixels.},
+  doi = {10.1364/OE.20.017767},
+  keywords = {Detectors; Arrays; Cameras; Correlators ; Fluorescence microscopy;
+	Three-dimensional microscopy; Spectroscopy, fluorescence and luminescence;
+	Avalanche photodiodes (APDs)},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.10.24},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-20-16-17767}
+}
+
+ at PHDTHESIS{Burkhardt2010,
+  author = {Burkhardt, Markus},
+  title = {Electron multiplying CCD – based detection in Fluorescence Correlation
+	Spectroscopy and measurements in living zebrafish embryos},
+  school = {Biophysics, BIOTEC, Technische Universität Dresden, Tatzberg 47–51,
+	01307 Dresden, Germany},
+  year = {2010},
+  note = {\url{http://nbn-resolving.de/urn:nbn:de:bsz:14-qucosa-61021}},
+  owner = {paul},
+  timestamp = {2012.10.24}
+}
+
+ at ARTICLE{Burkhardt:06,
+  author = {Markus Burkhardt and Petra Schwille},
+  title = {Electron multiplying CCD based detection for spatially resolved fluorescence
+	correlation spectroscopy},
+  journal = {Optics Express},
+  year = {2006},
+  volume = {14},
+  pages = {5013--5020},
+  number = {12},
+  month = {Jun},
+  abstract = {Fluorescence correlation spectroscopy (FCS) is carried out with an
+	electron multiplying CCD (EMCCD). This new strategy is compared to
+	standard detection by an avalanche photo diode showing good agreement
+	with respect to the resulting autocorrelation curves. Applying different
+	readout modes, a time resolution of 20 {\textmu}s can be achieved,
+	which is sufficient to resolve the diffusion of free dye in solution.
+	The advantages of implementing EMCCD cameras in wide-field ultra
+	low light imaging, as well as in multi-spot confocal laser scanning
+	microscopy, can consequently also be exploited for spatially resolved
+	FCS. First proof-of-principle FCS measurements with two excitation
+	volumes demonstrate the advantage of the flexible CCD area detection.},
+  doi = {10.1364/OE.14.005013},
+  keywords = {CCD, charge-coupled device; Medical optics and biotechnology; Fluorescence,
+	laser-induced},
+  publisher = {OSA},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-14-12-5013}
+}
+
+ at ARTICLE{Chiantia2006,
+  author = {Chiantia , Salvatore and Ries , Jonas and Kahya, Nicoletta and Schwille,
+	Petra},
+  title = {Combined AFM and Two-Focus SFCS Study of Raft-Exhibiting Model Membranes},
+  journal = {ChemPhysChem},
+  year = {2006},
+  volume = {7},
+  pages = {2409--2418},
+  number = {11},
+  doi = {10.1002/cphc.200600464},
+  issn = {1439-7641},
+  keywords = {fluorescent probes, force measurements, membranes, sphingolipids},
+  owner = {paul},
+  publisher = {WILEY-VCH Verlag},
+  timestamp = {2012.10.24},
+  url = {http://dx.doi.org/10.1002/cphc.200600464}
+}
+
+ at ARTICLE{Dertinger2007,
+  author = {Dertinger, Thomas and Pacheco, Victor and von der Hocht, Iris and
+	Hartmann, Rudolf and Gregor, Ingo and Enderlein, Jörg},
+  title = {Two-Focus Fluorescence Correlation Spectroscopy: A New Tool for Accurate
+	and Absolute Diffusion Measurements},
+  journal = {ChemPhysChem},
+  year = {2007},
+  volume = {8},
+  pages = {433--443},
+  number = {3},
+  doi = {10.1002/cphc.200600638},
+  issn = {1439-7641},
+  keywords = {diffusion coefficients, fluorescence spectroscopy, fluorescent dyes,
+	time-resolved spectroscopy},
+  owner = {paul},
+  publisher = {WILEY-VCH Verlag},
+  timestamp = {2012.02.14},
+  url = {http://dx.doi.org/10.1002/cphc.200600638}
+}
+
+ at ARTICLE{Einstein1905,
+  author = {Einstein, A.},
+  title = {Über die von der molekularkinetischen Theorie der Wärme geforderte
+	Bewegung von in ruhenden Flüssigkeiten suspendierten Teilchen},
+  journal = {Annalen der Physik},
+  year = {1905},
+  volume = {322},
+  pages = {549--560},
+  number = {8},
+  doi = {10.1002/andp.19053220806},
+  issn = {1521-3889},
+  owner = {paul},
+  publisher = {WILEY-VCH Verlag},
+  timestamp = {2012.11.02},
+  url = {http://dx.doi.org/10.1002/andp.19053220806}
+}
+
+ at ARTICLE{Elson1974,
+  author = {Elson, Elliot L. and Magde, Douglas},
+  title = {Fluorescence correlation spectroscopy. I. Conceptual basis and theory},
+  journal = {Biopolymers},
+  year = {1974},
+  volume = {13},
+  pages = {1--27},
+  number = {1},
+  doi = {10.1002/bip.1974.360130102},
+  issn = {1097-0282},
+  owner = {paul},
+  publisher = {Wiley Subscription Services, Inc., A Wiley Company},
+  timestamp = {2012.09.24},
+  url = {http://dx.doi.org/10.1002/bip.1974.360130102}
+}
+
+ at ARTICLE{Enderlein1999,
+  author = {J\"{o}rg Enderlein and Thomas Ruckstuhl and Stefan Seeger},
+  title = {Highly Efficient Optical Detection of Surface-Generated Fluorescence},
+  journal = {Applied Optics},
+  year = {1999},
+  volume = {38},
+  pages = {724--732},
+  number = {4},
+  month = {Feb},
+  abstract = {We present a theoretical study of a new highly efficient system for
+	optical light collection, designed for ultrasensitive fluorescence
+	detection of surface-bound molecules. The main core of the system
+	is a paraboloid glass segment acting as a mirror for collecting the
+	fluorescence. A special feature of the system is its ability to sample
+	not only fluorescence that is emitted below the angle of total internal
+	reflection (the critical angle) but also particularly the light above
+	the critical angle. As shown, this is especially advantageous for
+	collecting the fluorescence of surface-bound molecules. A comparison
+	is made with conventional high-aperture microscope objectives. Furthermore,
+	it is shown that the system allows not only for highly efficient
+	light collection but also for confocal imaging of the detection region,
+	which is of great importance for rejecting scattered light in potential
+	applications such as the detection of only a few molecules.},
+  doi = {10.1364/AO.38.000724},
+  keywords = {Geometric optical design; Microscopy; Detection; Fluorescence microscopy},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.11.02},
+  url = {http://ao.osa.org/abstract.cfm?URI=ao-38-4-724}
+}
+
+ at ARTICLE{Hansen1998,
+  author = {Hansen, Richard L and Harris, Joel M},
+  title = {Measuring Reversible Adsorption Kinetics of Small Molecules at Solid/Liquid
+	Interfaces by Total Internal Reflection Fluorescence Correlation
+	Spectroscopy},
+  journal = {Analytical Chemistry},
+  year = {1998},
+  volume = {70},
+  pages = {4247--4256},
+  number = {20},
+  doi = {10.1021/ac980925l},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://pubs.acs.org/doi/abs/10.1021/ac980925l}
+}
+
+ at ARTICLE{Hashmi2007,
+  author = {Sara M. Hashmi and Michael Loewenberg and Eric R. Dufresne},
+  title = {Spatially extended FCS for visualizing and quantifying high-speed
+	multiphase flows in microchannels},
+  journal = {Optics Express},
+  year = {2007},
+  volume = {15},
+  pages = {6528--6533},
+  number = {10},
+  month = {May},
+  abstract = {We report the development of spatially extended fluorescence correlation
+	spectroscopy for visualizing and quantifying multiphase flows in
+	microchannels. We employ simultaneous detection with a high-speed
+	camera across the width of the channel, enabling investigation of
+	the dynamics of the flow at short time scales. We take advantage
+	of the flow to scan the sample past the fixed illumination, capturing
+	frames up to 100 KHz. At these rates, we can resolve the motion of
+	sub-micron particles at velocities up to the order of 1 cm/s. We
+	visualize flows with kymographs and quantify velocity profiles by
+	cross-correlations within the focal volume. We demonstrate the efficacy
+	of our approach by measuring the depth-resolved velocity profile
+	of suspensions of sub-micron diameter silica particles flowing up
+	to 1.5 mm/s.},
+  doi = {10.1364/OE.15.006528},
+  keywords = {Velocimetry; Flow diagnostics; Fluorescence, laser-induced},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.11.07},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-15-10-6528}
+}
+
+ at ARTICLE{Hassler2005,
+  author = {Hassler, Kai and Anhut, Tiemo and Rigler, Rudolf and G\"{o}sch, Michael
+	and Lasser, Theo},
+  title = {High Count Rates with Total Internal Reflection Fluorescence Correlation
+	Spectroscopy},
+  journal = {Biophysical Journal},
+  year = {2005},
+  volume = {88},
+  pages = {L01--L03},
+  number = {1},
+  month = jan,
+  doi = {10.1529/biophysj.104.053884},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(05)73079-4 DOI - 10.1529/biophysj.104.053884},
+  timestamp = {2012.05.02},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349505730794}
+}
+
+ at ARTICLE{Hassler2005a,
+  author = {Kai Hassler and Marcel Leutenegger and Per Rigler and Ramachandra
+	Rao and Rudolf Rigler and Michael G\"{o}sch and Theo Lasser},
+  title = {Total internal reflection fluorescence correlation spectroscopy (TIR-FCS)
+	with low background and high count-rate per molecule},
+  journal = {Optics Express},
+  year = {2005},
+  volume = {13},
+  pages = {7415--7423},
+  number = {19},
+  month = {Sep},
+  abstract = {We designed a fluorescence correlation spectroscopy (FCS) system for
+	measurements on surfaces. The system consists of an objective-type
+	total internal reflection fluorescence (TIRF) microscopy setup, adapted
+	to measure FCS. Here, the fluorescence exciting evanescent wave is
+	generated by epi-illumination through the periphery of a high NA
+	oil-immersion objective. The main advantages with respect to conventional
+	FCS systems are an improvement in terms of counts per molecule (cpm)
+	and a high signal to background ratio. This is demonstrated by investigating
+	diffusion as well as binding and release of single molecules on a
+	glass surface. Furthermore, the size and shape of the molecule detection
+	efficiency (MDE) function was calculated, using a wave-vectorial
+	approach and taking into account the influence of the dielectric
+	interface on the emission properties of fluorophores.},
+  doi = {10.1364/OPEX.13.007415},
+  keywords = {Spectroscopy, fluorescence and luminescence; Spectroscopy, surface;
+	Fluorescence, laser-induced},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.09.21},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-13-19-7415}
+}
+
+ at OTHER{HaunertG,
+  author = {Gerhard Haunert},
+  howpublished = {Personal communication},
+  note = {Acal BFi Germany GmbH},
+  owner = {paul},
+  timestamp = {2012.10.11},
+  year = {2012}
+}
+
+ at ARTICLE{Haupts1998,
+  author = {Haupts, Ulrich and Maiti, Sudipta and Schwille, Petra and Webb, Watt
+	W.},
+  title = {Dynamics of fluorescence fluctuations in green fluorescent protein
+	observed by fluorescence correlation spectroscopy},
+  journal = {Proceedings of the National Academy of Sciences},
+  year = {1998},
+  volume = {95},
+  pages = {13573-13578},
+  number = {23},
+  abstract = {We have investigated the pH dependence of the dynamics of conformational
+	fluctuations of green fluorescent protein mutants EGFP (F64L/S65T)
+	and GFP-S65T in small ensembles of molecules in solution by using
+	fluorescence correlation spectroscopy (FCS). FCS utilizes time-resolved
+	measurements of fluctuations in the molecular fluorescence emission
+	for determination of the intrinsic dynamics and thermodynamics of
+	all processes that affect the fluorescence. Fluorescence excitation
+	of a bulk solution of EGFP decreases to zero at low pH (pKa = 5.8)
+	paralleled by a decrease of the absorption at 488 nm and an increase
+	at 400 nm. Protonation of the hydroxyl group of Tyr-66, which is
+	part of the chromophore, induces these changes. When FCS is used
+	the fluctuations in the protonation state of the chromophore are
+	time resolved. The autocorrelation function of fluorescence emission
+	shows contributions from two chemical relaxation processes as well
+	as diffusional concentration fluctuations. The time constant of the
+	fast, pH-dependent chemical process decreases with pH from 300 μs
+	at pH 7 to 45 μs at pH 5, while the time-average fraction of molecules
+	in a nonfluorescent state increases to 80% in the same range. A second,
+	pH-independent, process with a time constant of 340 μs and an associated
+	fraction of 13% nonfluorescent molecules is observed between pH 8
+	and 11, possibly representing an internal proton transfer process
+	and associated conformational rearrangements. The FCS data provide
+	direct measures of the dynamics and the equilibrium properties of
+	the protonation processes. Thus FCS is a convenient, intrinsically
+	calibrated method for pH measurements in subfemtoliter volumes with
+	nanomolar concentrations of EGFP.},
+  doi = {10.1073/pnas.95.23.13573},
+  eprint = {http://www.pnas.org/content/95/23/13573.full.pdf+html},
+  owner = {paul},
+  timestamp = {2012.11.01},
+  url = {http://www.pnas.org/content/95/23/13573.abstract}
+}
+
+ at ARTICLE{Haustein2007,
+  author = {Haustein, Elke and Schwille, Petra},
+  title = {Fluorescence Correlation Spectroscopy: Novel Variations of an Established
+	Technique},
+  journal = {Annual Review of Biophysics and Biomolecular Structure},
+  year = {2007},
+  volume = {36},
+  pages = {151-169},
+  number = {1},
+  doi = {10.1146/annurev.biophys.36.040306.132612},
+  eprint = {http://www.annualreviews.org/doi/pdf/10.1146/annurev.biophys.36.040306.132612},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://www.annualreviews.org/doi/abs/10.1146/annurev.biophys.36.040306.132612}
+}
+
+ at ARTICLE{Helmers2003,
+  author = {Heinz Helmers and Markus Schellenberg},
+  title = {CMOS vs. CCD sensors in speckle interferometry},
+  journal = {Optics \& Laser Technology},
+  year = {2003},
+  volume = {35},
+  pages = {587 - 595},
+  number = {8},
+  doi = {10.1016/S0030-3992(03)00078-1},
+  issn = {0030-3992},
+  keywords = {CCD sensors},
+  owner = {paul},
+  timestamp = {2012.10.06},
+  url = {http://www.sciencedirect.com/science/article/pii/S0030399203000781}
+}
+
+ at ARTICLE{Holekamp2008,
+  author = {Terrence F. Holekamp and Diwakar Turaga and Timothy E. Holy},
+  title = {Fast Three-Dimensional Fluorescence Imaging of Activity in Neural
+	Populations by Objective-Coupled Planar Illumination Microscopy},
+  journal = {Neuron},
+  year = {2008},
+  volume = {57},
+  pages = {661 - 672},
+  number = {5},
+  doi = {10.1016/j.neuron.2008.01.011},
+  issn = {0896-6273},
+  keywords = {SYSBIO},
+  owner = {paul},
+  timestamp = {2012.11.13},
+  url = {http://www.sciencedirect.com/science/article/pii/S0896627308000445}
+}
+
+ at ARTICLE{Humpolickova2006,
+  author = {Jana Humpol\'{i}\v{c}kov\'{a} and Ellen Gielen and Ale\v{s} Benda
+	and Veronika Fagulova and Jo Vercammen and Martin vandeVen and Martin
+	Hof and Marcel Ameloot and Yves Engelborghs},
+  title = {Probing Diffusion Laws within Cellular Membranes by Z-Scan Fluorescence
+	Correlation Spectroscopy},
+  journal = {Biophysical Journal},
+  year = {2006},
+  volume = {91},
+  pages = {L23 - L25},
+  number = {3},
+  doi = {10.1529/biophysj.106.089474},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.10.25},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349506717878}
+}
+
+ at ARTICLE{Jin2004,
+  author = {Jin, S. and Huang, P. and Park, J. and Yoo, J. Y. and Breuer, K.
+	S.},
+  title = {Near-surface velocimetry using evanescent wave illumination},
+  journal = {Experiments in Fluids},
+  year = {2004},
+  volume = {37},
+  pages = {825-833},
+  affiliation = {School of Mechanical and Aerospace Engineering Seoul National University
+	Seoul 151-742 Korea},
+  doi = {10.1007/s00348-004-0870-7},
+  issn = {0723-4864},
+  issue = {6},
+  keyword = {Technik},
+  owner = {paul},
+  publisher = {Springer Berlin / Heidelberg},
+  timestamp = {2012.02.14},
+  url = {http://dx.doi.org/10.1007/s00348-004-0870-7}
+}
+
+ at ARTICLE{Kannan2006,
+  author = {Kannan, Balakrishnan and Har, Jia Yi and Liu, Ping and Maruyama,
+	Ichiro and Ding, Jeak Ling and Wohland, Thorsten},
+  title = {Electron Multiplying Charge-Coupled Device Camera Based Fluorescence
+	Correlation Spectroscopy},
+  journal = {Analytical Chemistry},
+  year = {2006},
+  volume = {78},
+  pages = {3444-3451},
+  number = {10},
+  doi = {10.1021/ac0600959},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/ac0600959},
+  owner = {paul},
+  timestamp = {2012.11.07},
+  url = {http://pubs.acs.org/doi/abs/10.1021/ac0600959}
+}
+
+ at INCOLLECTION{Kohl2005,
+  author = {Kohl, Tobias and Schwille, Petra},
+  title = {Fluorescence Correlation Spectroscopy with Autofluorescent Proteins},
+  booktitle = {Microscopy Techniques},
+  publisher = {Springer Berlin / Heidelberg},
+  year = {2005},
+  editor = {Rietdorf, Jens},
+  volume = {95},
+  series = {Advances in Biochemical Engineering/Biotechnology},
+  pages = {1316-1317},
+  affiliation = {Pastor-Sander-Bogen 92 37083 Göttingen Germany},
+  doi = {10.1007/b102212},
+  isbn = {978-3-540-23698-6},
+  keyword = {Chemistry and Materials Science},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://dx.doi.org/10.1007/b102212}
+}
+
+ at ARTICLE{Korson1969,
+  author = {Korson, Lawrence and Drost-Hansen, Walter and Millero, Frank J.},
+  title = {Viscosity of water at various temperatures},
+  journal = {The Journal of Physical Chemistry},
+  year = {1969},
+  volume = {73},
+  pages = {34-39},
+  number = {1},
+  doi = {10.1021/j100721a006},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/j100721a006},
+  owner = {paul},
+  timestamp = {2012.10.29},
+  url = {http://pubs.acs.org/doi/abs/10.1021/j100721a006}
+}
+
+ at BOOK{LandauLifshitsStatPhys,
+  title = {{Statistical Physics, Third Edition, Part 1: Volume 5 (Course of
+	Theoretical Physics, Volume 5)}},
+  publisher = {Butterworth-Heinemann},
+  year = {1980},
+  author = {Landau, L. D. and Lifshitz, E. M.},
+  edition = {3},
+  month = jan,
+  abstract = {{A lucid presentation of statistical physics and thermodynamics which
+	develops from the general principles to give a large number of applications
+	of the theory.}},
+  citeulike-article-id = {1284487},
+  citeulike-linkout-0 = {http://www.amazon.ca/exec/obidos/redirect?tag=citeulike09-20\&path=ASIN/0750633727},
+  citeulike-linkout-1 = {http://www.amazon.de/exec/obidos/redirect?tag=citeulike01-21\&path=ASIN/0750633727},
+  citeulike-linkout-2 = {http://www.amazon.fr/exec/obidos/redirect?tag=citeulike06-21\&path=ASIN/0750633727},
+  citeulike-linkout-3 = {http://www.amazon.jp/exec/obidos/ASIN/0750633727},
+  citeulike-linkout-4 = {http://www.amazon.co.uk/exec/obidos/ASIN/0750633727/citeulike00-21},
+  citeulike-linkout-5 = {http://www.amazon.com/exec/obidos/redirect?tag=citeulike07-20\&path=ASIN/0750633727},
+  citeulike-linkout-6 = {http://www.worldcat.org/isbn/0750633727},
+  citeulike-linkout-7 = {http://books.google.com/books?vid=ISBN0750633727},
+  citeulike-linkout-8 = {http://www.amazon.com/gp/search?keywords=0750633727\&index=books\&linkCode=qs},
+  citeulike-linkout-9 = {http://www.librarything.com/isbn/0750633727},
+  day = {15},
+  howpublished = {Paperback},
+  isbn = {0750633727},
+  keywords = {fermi\_statistics, statistical\_physics},
+  owner = {paul},
+  posted-at = {2011-03-03 11:38:41},
+  priority = {2},
+  timestamp = {2012.02.03},
+  url = {http://www.worldcat.org/isbn/0750633727}
+}
+
+ at ARTICLE{Leutenegger2012,
+  author = {Marcel Leutenegger and Christian Ringemann and Theo Lasser and Stefan
+	W. Hell and Christian Eggeling},
+  title = {Fluorescence correlation spectroscopy with a total internal reflection
+	fluorescence STED microscope (TIRF-STED-FCS)},
+  journal = {Optics Express},
+  year = {2012},
+  volume = {20},
+  pages = {5243--5263},
+  number = {5},
+  month = {Feb},
+  abstract = {We characterize a novel fluorescence microscope which combines the
+	high spatial discrimination of a total internal reflection epi-fluorescence
+	(epi-TIRF) microscope with that of stimulated emission depletion
+	(STED) nanoscopy. This combination of high axial confinement and
+	dynamic-active lateral spatial discrimination of the detected fluorescence
+	emission promises imaging and spectroscopy of the structure and function
+	of cell membranes at the macro-molecular scale. Following a full
+	theoretical description of the sampling volume and the recording
+	of images of fluorescent beads, we exemplify the performance and
+	limitations of the TIRF-STED nanoscope with particular attention
+	to the polarization state of the laser excitation light. We demonstrate
+	fluorescence correlation spectroscopy (FCS) with the TIRF-STED nanoscope
+	by observing the diffusion of dye molecules in aqueous solutions
+	and of fluorescent lipid analogs in supported lipid bilayers in the
+	presence of background signal. The nanoscope reduced the out-of-focus
+	background signal. A lateral resolution down to 40--50 nm was attained
+	which was ultimately limited by the low lateral signal-to-background
+	ratio inherent to the confocal epi-TIRF scheme. Together with the
+	estimated axial confinement of about 55 nm, our TIRF-STED nanoscope
+	achieved an almost isotropic and less than 1 attoliter small all-optically
+	induced measurement volume.},
+  doi = {10.1364/OE.20.005243},
+  keywords = {Diffraction; Fluorescence microscopy; Fluorescence},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.09.21},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-20-5-5243}
+}
+
+ at ARTICLE{Lieto2003a,
+  author = {Lieto, Alena M. and Cush, Randall C. and Thompson, Nancy L.},
+  title = {Ligand-Receptor Kinetics Measured by Total Internal Reflection with
+	Fluorescence Correlation Spectroscopy},
+  journal = {Biophysical Journal},
+  year = {2003},
+  volume = {85},
+  pages = {3294--3302},
+  number = {5},
+  month = nov,
+  doi = {10.1016/S0006-3495(03)74748-1},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(03)74748-1 DOI - 10.1016/S0006-3495(03)74748-1},
+  timestamp = {2012.09.21},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349503747481}
+}
+
+ at ARTICLE{Lieto2003,
+  author = {Lieto, Alena M. and Lagerholm, B. Christoffer and Thompson, Nancy
+	L.},
+  title = {Lateral Diffusion from Ligand Dissociation and Rebinding at Surfaces†},
+  journal = {Langmuir},
+  year = {2003},
+  volume = {19},
+  pages = {1782-1787},
+  number = {5},
+  doi = {10.1021/la0261601},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/la0261601},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://pubs.acs.org/doi/abs/10.1021/la0261601}
+}
+
+ at ARTICLE{Lieto2004,
+  author = {Alena M. Lieto and Nancy L. Thompson},
+  title = {Total Internal Reflection with Fluorescence Correlation Spectroscopy:
+	Nonfluorescent Competitors},
+  journal = {Biophysical Journal},
+  year = {2004},
+  volume = {87},
+  pages = {1268 - 1278},
+  number = {2},
+  doi = {10.1529/biophysj.103.035030},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349504736061}
+}
+
+ at ARTICLE{Magde1972,
+  author = {Magde, Douglas and Elson, Elliot and Webb, W. W.},
+  title = {Thermodynamic Fluctuations in a Reacting System - Measurement by
+	Fluorescence Correlation Spectroscopy},
+  journal = {Physical Review Letters},
+  year = {1972},
+  volume = {29},
+  pages = {705--708},
+  month = {Sep},
+  doi = {10.1103/PhysRevLett.29.705},
+  issue = {11},
+  owner = {paul},
+  publisher = {American Physical Society},
+  timestamp = {2012.11.01},
+  url = {http://link.aps.org/doi/10.1103/PhysRevLett.29.705}
+}
+
+ at ARTICLE{Magde1974,
+  author = {Magde, Douglas and Elson, Elliot L. and Webb, Watt W.},
+  title = {Fluorescence correlation spectroscopy. II. An experimental realization},
+  journal = {Biopolymers},
+  year = {1974},
+  volume = {13},
+  pages = {29--61},
+  number = {1},
+  doi = {10.1002/bip.1974.360130103},
+  issn = {1097-0282},
+  owner = {paul},
+  publisher = {Wiley Subscription Services, Inc., A Wiley Company},
+  timestamp = {2012.09.21},
+  url = {http://dx.doi.org/10.1002/bip.1974.360130103}
+}
+
+ at ARTICLE{Nitsche2004,
+  author = {Johannes M. Nitsche and Hou-Chien Chang and Paul A. Weber and Bruce
+	J. Nicholson},
+  title = {A Transient Diffusion Model Yields Unitary Gap Junctional Permeabilities
+	from Images of Cell-to-Cell Fluorescent Dye Transfer Between Xenopus
+	Oocytes},
+  journal = {Biophysical Journal},
+  year = {2004},
+  volume = {86},
+  pages = {2058 - 2077},
+  number = {4},
+  doi = {10.1016/S0006-3495(04)74267-8},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.11.08},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349504742678}
+}
+
+ at ARTICLE{Ohsugi2009,
+  author = {Ohsugi, Yu and Kinjo, Masataka},
+  title = {Multipoint fluorescence correlation spectroscopy with total internal
+	reflection fluorescence microscope},
+  journal = {Journal of Biomedical Optics},
+  year = {2009},
+  volume = {14},
+  pages = {014030-014030-4},
+  number = {1},
+  doi = {10.1117/1.3080723},
+  owner = {paul},
+  timestamp = {2012.11.12},
+  url = { + http://dx.doi.org/10.1117/1.3080723}
+}
+
+ at ARTICLE{Ohsugi2006,
+  author = {Ohsugi, Yu and Saito, Kenta and Tamura, Mamoru and Kinjo, Masataka},
+  title = {Lateral mobility of membrane-binding proteins in living cells measured
+	by total internal reflection fluorescence correlation spectroscopy.},
+  journal = {Biophysical Journal},
+  year = {2006},
+  volume = {91},
+  pages = {3456--3464},
+  number = {9},
+  doi = {10.1529/biophysj.105.074625},
+  owner = {paul},
+  publisher = {Biophysical Society},
+  timestamp = {2012.02.14},
+  url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1614500&tool=pmcentrez&rendertype=abstract}
+}
+
+ at ARTICLE{Palmer1987,
+  author = {A. G. Palmer and N. L. Thompson},
+  title = {Theory of sample translation in fluorescence correlation spectroscopy.},
+  journal = {Biophysical Journal},
+  year = {1987},
+  volume = {51},
+  pages = {339--343},
+  number = {2},
+  month = {Feb},
+  abstract = {New applications of the technique of fluorescence correlation spectroscopy
+	(FCS) require lateral translation of the sample through a focused
+	laser beam (Peterson, N.O., D.C. Johnson, and M.J. Schlesinger, 1986,
+	Biophys. J., 49:817-820). Here, the effect of sample translation
+	on the shape of the FCS autocorrelation function is examined in general.
+	It is found that if the lateral diffusion coefficients of the fluorescent
+	species obey certain conditions, then the FCS autocorrelation function
+	is a simple product of one function that depends only on transport
+	coefficients and another function that depends only on the rate constants
+	of chemical reactions that occur in the sample. This simple form
+	should allow manageable data analyses in new FCS experiments that
+	involve sample translation.},
+  doi = {10.1016/S0006-3495(87)83340-4},
+  keywords = {Kinetics; Lasers; Mathematics; Models, Theoretical; Spectrometry,
+	Fluorescence, methods},
+  language = {eng},
+  medline-pst = {ppublish},
+  owner = {paul},
+  pii = {S0006-3495(87)83340-4},
+  pmid = {3828464},
+  timestamp = {2012.11.02},
+  url = {http://dx.doi.org/10.1016/S0006-3495(87)83340-4}
+}
+
+ at ARTICLE{Pero2006-06,
+  author = {Pero, JK and Haas, EM and Thompson, NL},
+  title = {Size dependence of protein diffusion very close to membrane surfaces:
+	measurement by total internal reflection with fluorescence correlation
+	spectroscopy.},
+  journal = {The Journal of Physical Chemistry. B},
+  year = {2006},
+  volume = {110},
+  pages = {10910-8},
+  number = {5},
+  doi = {10.1021/jp056990y},
+  issn = {1520-6106},
+  owner = {paul},
+  timestamp = {2012.09.21}
+}
+
+ at ARTICLE{Petrasek2008,
+  author = {Petr\'{a}\v{s}ek, Zden\v{e}k and Schwille, Petra},
+  title = {Precise Measurement of Diffusion Coefficients using Scanning Fluorescence
+	Correlation Spectroscopy},
+  journal = {Biophysical Journal},
+  year = {2008},
+  volume = {94},
+  pages = {1437--1448},
+  number = {4},
+  month = feb,
+  doi = {10.1529/biophysj.107.108811},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(08)70660-X DOI - 10.1529/biophysj.107.108811},
+  timestamp = {2012.05.20},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S000634950870660X}
+}
+
+ at INCOLLECTION{Petrov:2008,
+  author = {Petrov, E. P. and Schwille, P.},
+  title = {State of the Art and Novel Trends in Fluorescence Correlation Spectroscopy},
+  booktitle = {Standardization and Quality Assurance in Fluorescence Measurements
+	II},
+  publisher = {Springer Berlin Heidelberg},
+  year = {2008},
+  editor = {Resch-Genger, Ute},
+  volume = {6},
+  series = {Springer Series on Fluorescence},
+  pages = {145-197},
+  affiliation = {Biophysics, BIOTEC, Technische Universität Dresden, Tatzberg 47–51,
+	01307 Dresden, Germany},
+  doi = {10.1007/4243_2008_032},
+  isbn = {978-3-540-70571-0},
+  keyword = {Chemistry},
+  url = {http://dx.doi.org/10.1007/4243_2008_032}
+}
+
+ at ARTICLE{Qian1991,
+  author = {Hong Qian and Elliot L. Elson},
+  title = {Analysis of confocal laser-microscope optics for 3-D fluorescence
+	correlation spectroscopy},
+  journal = {Applied Optics},
+  year = {1991},
+  volume = {30},
+  pages = {1185--1195},
+  number = {10},
+  month = {Apr},
+  abstract = {Quantitative fluorescence correlation spectroscopy (FCS) and fluorescence
+	photobleaching recovery (FPR) measurements in bulk solution require
+	a well characterized confocal laser microscope optical system. The
+	introduction of a characteristic function, the collection efficiency
+	function (CEF), provides a quantitative theoretical analysis of this
+	system, which yields an interpretation of the FCS and FPR measurements
+	in three dimensions. We demonstrate that when the proper field diaphragm
+	is introduced, the 3-D FCS measurements can be mimicked by a 2-D
+	theory with only minor error. The FPR characteristic recovery time
+	for diffusion is expected to be slightly longer than the corresponding
+	time measured by FCS in the same conditions. This is because the
+	profile of the laser beam used for photobleaching is not affected
+	by the field diaphragm. The CEF is also important for quantitative
+	analysis of standard scanning confocal microscopy when it is carried
+	out using a finite detection pinhole.},
+  doi = {10.1364/AO.30.001185},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.11.02},
+  url = {http://ao.osa.org/abstract.cfm?URI=ao-30-10-1185}
+}
+
+ at ELECTRONIC{ImageJ,
+  author = {Rasband, W.S.},
+  year = {1997-2012},
+  title = {ImageJ},
+  organization = {U. S. National Institutes of Health},
+  note = {\url{http://imagej.nih.gov/ij/}},
+  owner = {paul},
+  timestamp = {2012.10.16}
+}
+
+ at ARTICLE{Richter2006,
+  author = {Richter, Ralf P. and Bérat, Rémi and Brisson, Alain R.},
+  title = {Formation of Solid-Supported Lipid Bilayers:  An Integrated View},
+  journal = {Langmuir},
+  year = {2006},
+  volume = {22},
+  pages = {3497-3505},
+  number = {8},
+  doi = {10.1021/la052687c},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/la052687c},
+  owner = {paul},
+  timestamp = {2012.11.12},
+  url = {http://pubs.acs.org/doi/abs/10.1021/la052687c}
+}
+
+ at PHDTHESIS{Ries:08,
+  author = {Ries, E.},
+  title = {Advanced Fluorescence Correlation Techniques to Study Membrane Dynamics},
+  school = {Biophysics, BIOTEC, Technische Universität Dresden, Tatzberg 47–51,
+	01307 Dresden, Germany},
+  year = {2008},
+  note = {\url{http://nbn-resolving.de/urn:nbn:de:bsz:14-ds-1219846317196-73420}},
+  url = {http://nbn-resolving.de/urn:nbn:de:bsz:14-ds-1219846317196-73420}
+}
+
+ at ARTICLE{Ries2009,
+  author = {Jonas Ries and Salvatore Chiantia and Petra Schwille},
+  title = {Accurate Determination of Membrane Dynamics with Line-Scan FCS},
+  journal = {Biophysical Journal},
+  year = {2009},
+  volume = {96},
+  pages = {1999 - 2008},
+  number = {5},
+  doi = {10.1016/j.bpj.2008.12.3888},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.11.08},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349509002112}
+}
+
+ at ARTICLE{Ries2008390,
+  author = {Jonas Ries and Eugene P. Petrov and Petra Schwille},
+  title = {Total Internal Reflection Fluorescence Correlation Spectroscopy:
+	Effects of Lateral Diffusion and Surface-Generated Fluorescence},
+  journal = {Biophysical Journal},
+  year = {2008},
+  volume = {95},
+  pages = {390 - 399},
+  number = {1},
+  doi = {10.1529/biophysj.107.126193},
+  issn = {0006-3495},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349508703126}
+}
+
+ at ARTICLE{Ries2008,
+  author = {Ries, Jonas and Schwille, Petra},
+  title = {New concepts for fluorescence correlation spectroscopy on membranes},
+  journal = {Physical Chemistry Chemical Physics},
+  year = {2008},
+  volume = {10},
+  pages = {--},
+  number = {24},
+  abstract = {Fluorescence correlation spectroscopy (FCS) is a powerful tool to
+	measure useful physical quantities such as concentrations, diffusion
+	coefficients, diffusion modes or binding parameters, both in model
+	and cell membranes. However, it can suffer from severe artifacts,
+	especially in non-ideal systems. Here we assess the potential and
+	limitations of standard confocal FCS on lipid membranes and present
+	recent developments which facilitate accurate and quantitative measurements
+	on such systems. In particular, we discuss calibration-free diffusion
+	and concentration measurements using z-scan FCS and two focus FCS
+	and present several approaches using scanning FCS to accurately measure
+	slow dynamics. We also show how surface confined FCS enables the
+	study of membrane dynamics even in presence of a strong cytosolic
+	background and how FCS with a variable detection area can reveal
+	submicroscopic heterogeneities in cell membranes.},
+  issn = {1463-9076},
+  owner = {paul},
+  publisher = {The Royal Society of Chemistry},
+  timestamp = {2012.02.14},
+  url = {http://dx.doi.org/10.1039/B718132A}
+}
+
+ at ARTICLE{Rigler1993,
+  author = {Rigler, R. and Mets, {\"U}. and Widengren, J. and Kask, P.},
+  title = {Fluorescence correlation spectroscopy with high count rate and low
+	background: analysis of translational diffusion},
+  journal = {European Biophysics Journal},
+  year = {1993},
+  volume = {22},
+  pages = {169-175},
+  doi = {10.1007/BF00185777},
+  issn = {0175-7571},
+  issue = {3},
+  keywords = {Fluorescence correlation spectroscopy; Fluorescence intensity fluctuations;
+	Translational diffusion; Epifluorescence microscope; Silicon photon
+	counter},
+  language = {English},
+  owner = {paul},
+  publisher = {Springer-Verlag},
+  timestamp = {2012.11.02},
+  url = {http://dx.doi.org/10.1007/BF00185777}
+}
+
+ at ARTICLE{Ruan2004,
+  author = {Ruan, Qiaoqiao and Cheng, Melanie A. and Levi, Moshe and Gratton,
+	Enrico and Mantulin, William W.},
+  title = {Spatial-Temporal Studies of Membrane Dynamics: Scanning Fluorescence
+	Correlation Spectroscopy (SFCS)},
+  journal = {Biophysical Journal},
+  year = {2004},
+  volume = {87},
+  pages = {1260--1267},
+  number = {2},
+  month = aug,
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(04)73605-X DOI - 10.1529/biophysj.103.036483},
+  timestamp = {2012.02.14},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S000634950473605X}
+}
+
+ at ARTICLE{Sankaran2009,
+  author = {Sankaran, Jagadish and Manna, Manoj and Guo, Lin and Kraut, Rachel
+	and Wohland, Thorsten},
+  title = {Diffusion, Transport, and Cell Membrane Organization Investigated
+	by Imaging Fluorescence Cross-Correlation Spectroscopy},
+  journal = {Biophysical Journal},
+  year = {2009},
+  volume = {97},
+  pages = {2630--2639},
+  number = {9},
+  month = nov,
+  doi = {10.1016/j.bpj.2009.08.025},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(09)01387-3 DOI - 10.1016/j.bpj.2009.08.025},
+  timestamp = {2012.09.21},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349509013873}
+}
+
+ at ARTICLE{Sankaran2010,
+  author = {Jagadish Sankaran and Xianke Shi and Liang Yoong Ho and Ernst H K
+	Stelzer and Thorsten Wohland},
+  title = {ImFCS: a software for imaging FCS data analysis and visualization.},
+  journal = {Optics Express},
+  year = {2010},
+  volume = {18},
+  pages = {25468--25481},
+  number = {25},
+  month = {Dec},
+  abstract = {The multiplexing of fluorescence correlation spectroscopy (FCS), especially
+	in imaging FCS using fast, sensitive array detectors, requires the
+	handling of large amounts of data. One can easily collect in excess
+	of 100,000 FCS curves a day, too many to be treated manually. Therefore,
+	ImFCS, an open-source software which relies on standard image files
+	was developed and provides a wide range of options for the calculation
+	of spatial and temporal auto- and cross-correlations, as well as
+	differences in Cross-Correlation Functions (ΔCCF). ImFCS permits
+	fitting of standard models to correlation functions and provides
+	optimized histograms of fitted parameters. Applications include the
+	measurement of diffusion and flow with Imaging Total Internal Reflection
+	FCS (ITIR-FCS) and Single Plane Illumination Microscopy FCS (SPIM-FCS)
+	in biologically relevant samples. As a compromise between ITIR-FCS
+	and SPIM-FCS, we extend the applications to Imaging Variable Angle-FCS
+	(IVA-FCS) where sub-critical oblique illumination provides sample
+	sectioning close to the cover slide.},
+  doi = {10.1364/OE.18.025468},
+  institution = {Singapore-MIT Alliance, National University of Singapore, E4-04-10,
+	4 Engineering Drive 3, 117576 Singapore.},
+  keywords = {Algorithms; Pattern Recognition, Automated, methods; Software; Spectrometry,
+	Fluorescence, methods},
+  language = {eng},
+  medline-pst = {ppublish},
+  owner = {paul},
+  pii = {208325},
+  pmid = {21164894},
+  timestamp = {2012.10.24}
+}
+
+ at ARTICLE{SbalzariniSPT,
+  author = {I. F. Sbalzarini and P. Koumoutsakos},
+  title = {Feature Point Tracking and Trajectory Analysis for Video Imaging
+	in Cell Biology},
+  journal = {Journal of Structural Biology},
+  year = {2005},
+  volume = {151(2)},
+  pages = {182-195},
+  doi = {10.1016/j.jsb.2005.06.002},
+  owner = {paul},
+  timestamp = {2012.10.16}
+}
+
+ at ARTICLE{Schwille2000,
+  author = {Schwille, Petra and Kummer, Susanne and Heikal, Ahmed A. and Moerner,
+	W. E. and Webb, Watt W.},
+  title = {Fluorescence correlation spectroscopy reveals fast optical excitation-driven
+	intramolecular dynamics of yellow fluorescent proteins},
+  journal = {Proceedings of the National Academy of Sciences},
+  year = {2000},
+  volume = {97},
+  pages = {151-156},
+  number = {1},
+  abstract = {Fast excitation-driven fluctuations in the fluorescence emission of
+	yellow-shifted green fluorescent protein mutants T203Y and T203F,
+	with S65G/S72A, are discovered in the 10−6–10−3-s time range, by
+	using fluorescence correlation spectroscopy at 10−8 M. This intensity-dependent
+	flickering is conspicuous at high pH, with rate constants independent
+	of pH and viscosity with a minor temperature effect. The mean flicker
+	rate increases linearly with excitation intensity for at least three
+	decades, but the mean dark fraction of the molecules undergoing these
+	dynamics is independent of illumination intensity over ≈6 × 102 to
+	5 × 106 W/cm2. These results suggest that optical excitation establishes
+	an equilibration between two molecular states of different spectroscopic
+	properties that are coupled only via the excited state as a gateway.
+	This reversible excitation-driven transition has a quantum efficiency
+	of ≈10−3. Dynamics of external protonation, reversibly quenching
+	the fluorescence, are also observed at low pH in the 10- to 100-μs
+	time range. The independence of these two bright–dark flicker processes
+	implies the existence of at least two separate dark states of these
+	green fluorescent protein mutants. Time-resolved fluorescence measurements
+	reveal a single exponential decay of the excited state population
+	with 3.8-ns lifetime, after 500-nm excitation, that is pH independent.
+	Our fluorescence correlation spectroscopy results are discussed in
+	terms of recent theoretical studies that invoke isomerization of
+	the chromophore as a nonradiative channel of the excited state relaxation.},
+  doi = {10.1073/pnas.97.1.151},
+  eprint = {http://www.pnas.org/content/97/1/151.full.pdf+html},
+  owner = {paul},
+  timestamp = {2012.09.24},
+  url = {http://www.pnas.org/content/97/1/151.abstract}
+}
+
+ at ARTICLE{Schwille1997,
+  author = {Schwille, P. and Meyer-Almes, F.J. and Rigler, R.},
+  title = {Dual-color fluorescence cross-correlation spectroscopy for multicomponent
+	diffusional analysis in solution},
+  journal = {Biophysical Journal},
+  year = {1997},
+  volume = {72},
+  pages = {1878--1886},
+  number = {4},
+  month = apr,
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(97)78833-7 DOI - 10.1016/S0006-3495(97)78833-7},
+  timestamp = {2012.02.14},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349597788337}
+}
+
+ at ARTICLE{Schatzel1990,
+  author = {K. Sch{\"a}tzel},
+  title = {Noise on photon correlation data. I. Autocorrelation functions},
+  journal = {Quantum Optics: Journal of the European Optical Society Part B},
+  year = {1990},
+  volume = {2},
+  pages = {287},
+  number = {4},
+  abstract = {An adequate analysis of photon correlation data requires knowledge
+	about the statistical accuracy of the measured data. For the model
+	of gamma-distributed intensities, that is including the effect of
+	a finite intercept, the full covariance matrix is calculated for
+	all the channels of the photon autocorrelation functions. A thorough
+	discussion of multiple sample time correlation illuminates the importance
+	of temporal averaging effects at large lag times. A practical estimation
+	scheme is given for the noise in photon correlation data from a multiple
+	sample time measurement.},
+  doi = {10.1088/0954-8998/2/4/002},
+  owner = {paul},
+  timestamp = {2012.11.02},
+  url = {http://stacks.iop.org/0954-8998/2/i=4/a=002}
+}
+
+ at ARTICLE{Scomparin2009,
+  author = {Scomparin, C. and Lecuyer, S. and Ferreira, M. and Charitat, T. and
+	Tinland, B.},
+  title = {Diffusion in supported lipid bilayers: Influence of substrate and
+	preparation technique on the internal dynamics},
+  journal = {The European Physical Journal E: Soft Matter and Biological Physics},
+  year = {2009},
+  volume = {28},
+  pages = {211-220},
+  affiliation = {CNRS UPR 3118 CINAM 13288 Marseille Cedex 09 France},
+  doi = {10.1140/epje/i2008-10407-3},
+  issn = {1292-8941},
+  issue = {2},
+  keyword = {Physik und Astronomie},
+  owner = {paul},
+  publisher = {Springer Berlin / Heidelberg},
+  timestamp = {2012.10.22},
+  url = {http://dx.doi.org/10.1140/epje/i2008-10407-3}
+}
+
+ at ARTICLE{Seu2007,
+  author = {Seu, Kalani J. and Pandey, Anjan P. and Haque, Farzin and Proctor,
+	Elizabeth A. and Ribbe, Alexander E. and Hovis, Jennifer S.},
+  title = {Effect of Surface Treatment on Diffusion and Domain Formation in
+	Supported Lipid Bilayers},
+  journal = {Biophysical Journal},
+  year = {2007},
+  volume = {92},
+  pages = {2445--2450},
+  number = {7},
+  month = apr,
+  doi = {10.1529/biophysj.106.099721},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(07)71049-4 DOI - 10.1529/biophysj.106.099721},
+  timestamp = {2012.10.22},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349507710494}
+}
+
+ at ARTICLE{Shannon1984,
+  author = {Shannon, C.E.},
+  title = {Communication in the presence of noise},
+  journal = {Proceedings of the IEEE},
+  year = {1984},
+  volume = {72},
+  pages = { 1192 - 1201},
+  number = {9},
+  month = {sept.},
+  doi = {10.1109/PROC.1984.12998},
+  issn = {0018-9219},
+  owner = {paul},
+  timestamp = {2012.11.12}
+}
+
+ at ARTICLE{Skinner2005,
+  author = {Joseph P Skinner and Yan Chen and Joachim D Müller},
+  title = {Position-sensitive scanning fluorescence correlation spectroscopy.},
+  journal = {Biophysical Journal},
+  year = {2005},
+  volume = {89},
+  pages = {1288--1301},
+  number = {2},
+  month = {Aug},
+  abstract = {Fluorescence correlation spectroscopy (FCS) uses a stationary laser
+	beam to illuminate a small sample volume and analyze the temporal
+	behavior of the fluorescence fluctuations within the stationary observation
+	volume. In contrast, scanning FCS (SFCS) collects the fluorescence
+	signal from a moving observation volume by scanning the laser beam.
+	The fluctuations now contain both temporal and spatial information
+	about the sample. To access the spatial information we synchronize
+	scanning and data acquisition. Synchronization allows us to evaluate
+	correlations for every position along the scanned trajectory. We
+	use a circular scan trajectory in this study. Because the scan radius
+	is constant, the phase angle is sufficient to characterize the position
+	of the beam. We introduce position-sensitive SFCS (PSFCS), where
+	correlations are calculated as a function of lag time and phase.
+	We present the theory of PSFCS and derive expressions for diffusion,
+	diffusion in the presence of flow, and for immobilization. To test
+	PSFCS we compare experimental data with theory. We determine the
+	direction and speed of a flowing dye solution and the position of
+	an immobilized particle. To demonstrate the feasibility of the technique
+	for applications in living cells we present data of enhanced green
+	fluorescent protein measured in the nucleus of COS cells.},
+  doi = {10.1529/biophysj.105.060749},
+  institution = {School of Physics and Astronomy, University of Minnesota, Minneapolis,
+	55455, USA. josephs at physics.umn.edu},
+  keywords = {Algorithms; Image Enhancement, methods; Image Interpretation, Computer-Assisted,
+	methods; Information Storage and Retrieval, methods; Microscopy,
+	Confocal, methods; Reproducibility of Results; Sensitivity and Specificity;
+	Spectrometry, Fluorescence, methods},
+  language = {eng},
+  medline-pst = {ppublish},
+  owner = {paul},
+  pii = {S0006-3495(05)72776-4},
+  pmid = {15894645},
+  timestamp = {2012.10.28},
+  url = {http://dx.doi.org/10.1529/biophysj.105.060749}
+}
+
+ at ARTICLE{Starr2001,
+  author = {Tammy E. Starr and Nancy L. Thompson},
+  title = {Total Internal Reflection with Fluorescence Correlation Spectroscopy:
+	Combined Surface Reaction and Solution Diffusion},
+  journal = {Biophysical Journal},
+  year = {2001},
+  volume = {80},
+  pages = {1575 - 1584},
+  number = {3},
+  doi = {10.1016/S0006-3495(01)76130-9},
+  issn = {0006-3495},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349501761309}
+}
+
+ at ARTICLE{Sutherland1905,
+  author = {Sutherland, William},
+  title = {A dynamical theory of diffusion for non-electrolytes and the molecular
+	mass of albumin},
+  journal = {Philosophical Magazine Series 6},
+  year = {1905},
+  volume = {9},
+  pages = {781-785},
+  number = {54},
+  __markedentry = {[paul]},
+  doi = {10.1080/14786440509463331},
+  eprint = {http://www.tandfonline.com/doi/pdf/10.1080/14786440509463331},
+  owner = {paul},
+  timestamp = {2012.11.14},
+  url = {http://www.tandfonline.com/doi/abs/10.1080/14786440509463331}
+}
+
+ at ARTICLE{Tamm1985,
+  author = {Tamm, L.K. and McConnell, H.M.},
+  title = {Supported phospholipid bilayers},
+  journal = {Biophysical Journal},
+  year = {1985},
+  volume = {47},
+  pages = {105--113},
+  number = {1},
+  month = jan,
+  doi = {10.1016/S0006-3495(85)83882-0},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(85)83882-0 DOI - 10.1016/S0006-3495(85)83882-0},
+  timestamp = {2012.10.29},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349585838820}
+}
+
+ at INCOLLECTION{Thomps:bookFCS2002,
+  author = {Thompson, Nancy},
+  title = {Fluorescence Correlation Spectroscopy},
+  booktitle = {Topics in Fluorescence Spectroscopy},
+  publisher = {Springer US},
+  year = {2002},
+  editor = {Lakowicz, Joseph and Geddes, Chris D. and Lakowicz, Joseph R.},
+  volume = {1},
+  series = {Topics in Fluorescence Spectroscopy},
+  pages = {337-378},
+  affiliation = {University of North Carolina at Chapel Hill Department of Chemistry
+	Chapel Hill North Carolina 27599-3290 USA},
+  doi = {10.1007/0-306-47057-8_6},
+  isbn = {978-0-306-47057-8},
+  keyword = {Biomedical and Life Sciences},
+  owner = {paul},
+  timestamp = {2012.01.10},
+  url = {http://dx.doi.org/10.1007/0-306-47057-8_6}
+}
+
+ at ARTICLE{Thompson1983,
+  author = {N.L. Thompson and D. Axelrod},
+  title = {Immunoglobulin surface-binding kinetics studied by total internal
+	reflection with fluorescence correlation spectroscopy},
+  journal = {Biophysical Journal},
+  year = {1983},
+  volume = {43},
+  pages = {103 - 114},
+  number = {1},
+  doi = {10.1016/S0006-3495(83)84328-8},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349583843288}
+}
+
+ at ARTICLE{Thompson1981,
+  author = {N.L. Thompson and T.P. Burghardt and D. Axelrod},
+  title = {Measuring surface dynamics of biomolecules by total internal reflection
+	fluorescence with photobleaching recovery or correlation spectroscopy},
+  journal = {Biophysical Journal},
+  year = {1981},
+  volume = {33},
+  pages = {435 - 454},
+  number = {3},
+  doi = {10.1016/S0006-3495(81)84905-3},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349581849053}
+}
+
+ at ARTICLE{Thompson1997,
+  author = {Thompson, Nancy L. and Drake, Andrew W. and Chen, Lixin and Broek,
+	Willem Vanden},
+  title = {Equilibrium, Kinetics, Diffusion and Self-Association of Proteins
+	at Membrane Surfaces: Measurement by Total Internal Reflection Fluorescence
+	Microscopy},
+  journal = {Photochemistry and Photobiology},
+  year = {1997},
+  volume = {65},
+  pages = {39--46},
+  number = {1},
+  doi = {10.1111/j.1751-1097.1997.tb01875.x},
+  issn = {1751-1097},
+  owner = {paul},
+  publisher = {Blackwell Publishing Ltd},
+  timestamp = {2012.02.14},
+  url = {http://dx.doi.org/10.1111/j.1751-1097.1997.tb01875.x}
+}
+
+ at ARTICLE{Thompson1997a,
+  author = {Nancy L Thompson and B Christoffer Lagerholm},
+  title = {Total internal reflection fluorescence: applications in cellular
+	biophysics},
+  journal = {Current Opinion in Biotechnology},
+  year = {1997},
+  volume = {8},
+  pages = {58 - 64},
+  number = {1},
+  doi = {10.1016/S0958-1669(97)80158-9},
+  issn = {0958-1669},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://www.sciencedirect.com/science/article/pii/S0958166997801589}
+}
+
+ at ARTICLE{Toomre2001,
+  author = {Derek Toomre and Dietmar J. Manstein},
+  title = {Lighting up the cell surface with evanescent wave microscopy},
+  journal = {Trends in Cell Biology},
+  year = {2001},
+  volume = {11},
+  pages = {298 - 303},
+  number = {7},
+  doi = {10.1016/S0962-8924(01)02027-X},
+  issn = {0962-8924},
+  keywords = {green-fluorescent protein (GFP)},
+  owner = {paul},
+  timestamp = {2012.02.14},
+  url = {http://www.sciencedirect.com/science/article/pii/S096289240102027X}
+}
+
+ at ARTICLE{Unruh2008,
+  author = {Unruh, Jay R. and Gratton, Enrico},
+  title = {Analysis of Molecular Concentration and Brightness from Fluorescence
+	Fluctuation Data with an Electron Multiplied CCD Camera},
+  journal = {Biophysical Journal},
+  year = {2008},
+  volume = {95},
+  pages = {5385--5398},
+  number = {11},
+  month = dec,
+  doi = {10.1529/biophysj.108.130310},
+  issn = {0006-3495},
+  owner = {paul},
+  publisher = {Cell Press},
+  refid = {S0006-3495(08)78962-8 DOI - 10.1529/biophysj.108.130310},
+  timestamp = {2012.09.21},
+  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349508789628}
+}
+
+ at ARTICLE{Vacha2009,
+  author = {V\'{a}cha, Robert and Siu, Shirley W. I. and Petrov, Michal and Böckmann,
+	Rainer A. and Barucha-Kraszewska, Justyna and Jurkiewicz, Piotr and
+	Hof, Martin and Berkowitz, Max L. and Jungwirth, Pavel},
+  title = {Effects of Alkali Cations and Halide Anions on the DOPC Lipid Membrane},
+  journal = {The Journal of Physical Chemistry A},
+  year = {2009},
+  volume = {113},
+  pages = {7235-7243},
+  number = {26},
+  note = {PMID: 19290591},
+  doi = {10.1021/jp809974e},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/jp809974e},
+  owner = {paul},
+  timestamp = {2012.10.24},
+  url = {http://pubs.acs.org/doi/abs/10.1021/jp809974e}
+}
+
+ at ELECTRONIC{VisserRol,
+  author = {G. Visser and J. Rolinski},
+  year = {2010},
+  title = {Basic Photophysics},
+  note = {Photobiological Sciences Online (KC Smith, ed.) American Society
+	for Photobiology \url{http://www.photobiology.info}.},
+  url = {http://www.photobiology.info/Visser-Rolinski.html},
+  owner = {paul},
+  timestamp = {2012.02.14}
+}
+
+ at ARTICLE{Widengren1995,
+  author = {Widengren, Jerker and Mets, {\"U}lo and Rigler, Rudolf},
+  title = {Fluorescence correlation spectroscopy of triplet states in solution:
+	a theoretical and experimental study},
+  journal = {The Journal of Physical Chemistry},
+  year = {1995},
+  volume = {99},
+  pages = {13368-13379},
+  number = {36},
+  doi = {10.1021/j100036a009},
+  eprint = {http://pubs.acs.org/doi/pdf/10.1021/j100036a009},
+  owner = {paul},
+  timestamp = {2012.02.20},
+  url = {http://pubs.acs.org/doi/abs/10.1021/j100036a009}
+}
+
+ at ARTICLE{Widengren1994,
+  author = {Widengren, Jerker and Rigler, Rudolf and Mets, {\"U}lo},
+  title = {Triplet-state monitoring by fluorescence correlation spectroscopy},
+  journal = {Journal of Fluorescence},
+  year = {1994},
+  volume = {4},
+  pages = {255-258},
+  affiliation = {Department of Medical Biochemistry and Biophysics Karolinska Institute
+	S-171 77 Stockholm Sweden},
+  doi = {10.1007/BF01878460},
+  issn = {1053-0509},
+  issue = {3},
+  keyword = {Biomedizin & Life Sciences},
+  owner = {paul},
+  publisher = {Springer Netherlands},
+  timestamp = {2012.09.24},
+  url = {http://dx.doi.org/10.1007/BF01878460}
+}
+
+ at ARTICLE{Wohland2001,
+  author = {Wohland, Thorsten and Rigler, Rudolf and Vogel, Horst},
+  title = {The Standard Deviation in Fluorescence Correlation Spectroscopy},
+  journal = {Biophysical Journal},
+  year = {2001},
+  volume = {80},
+  pages = {2987--2999},
+  number = {6},
+  month = jun,
+  doi = {10.1016/S0006-3495(01)76264-9},
+  issn = {0006-3495},
+  owner = {paul},
+  timestamp = {2012.09.08},
+  url = {http://www.sciencedirect.com/science/article/pii/S0006349501762649}
+}
+
+ at ARTICLE{Wohland2010,
+  author = {Thorsten Wohland and Xianke Shi and Jagadish Sankaran and Ernst H.K.
+	Stelzer},
+  title = {Single Plane Illumination Fluorescence Correlation Spectroscopy (SPIM-FCS)
+	probes inhomogeneous three-dimensional environments},
+  journal = {Optics Express},
+  year = {2010},
+  volume = {18},
+  pages = {10627--10641},
+  number = {10},
+  month = {May},
+  abstract = {The life sciences require new highly sensitive imaging tools, which
+	allow the quantitative measurement of molecular parameters within
+	a physiological three-dimensional (3D) environment. Therefore, we
+	combined single plane illumination microscopy (SPIM) with camera
+	based fluorescence correlation spectroscopy (FCS). SPIM-FCS provides
+	contiguous particle number and diffusion coefficient images with
+	a high spatial resolution in homo- and heterogeneous 3D specimens
+	and live zebrafish embryos. Our SPIM-FCS recorded up to 4096 spectra
+	within 56 seconds at a laser power of 60 \&\#x03BC;W without damaging
+	the embryo. This new FCS modality provides more measurements per
+	time and more, less photo-toxic measurements per sample than confocal
+	based methods. In essence, SPIM-FCS offers new opportunities to observe
+	biomolecular interactions quantitatively and functions in a highly
+	multiplexed manner within a physiologically relevant 3D environment.},
+  doi = {10.1364/OE.18.010627},
+  keywords = {Fluorescence microscopy; Three-dimensional microscopy; Spectroscopy,
+	fluorescence and luminescence},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.11.07},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-18-10-10627}
+}
+
+ at ARTICLE{Yordanov2009,
+  author = {Stoyan Yordanov and Andreas Best and Hans-J\"{u}rgen Butt and Kaloian
+	Koynov},
+  title = {Direct studies of liquid flows near solid surfaces by total internal
+	reflection fluorescence cross-correlation spectroscopy},
+  journal = {Optics Express},
+  year = {2009},
+  volume = {17},
+  pages = {21149--21158},
+  number = {23},
+  month = {Nov},
+  abstract = {We present a new method to study flow of liquids near solid surface:
+	Total internal reflection fluorescence cross-correlation spectroscopy
+	(TIR-FCCS). Fluorescent tracers flowing with the liquid are excited
+	by evanescent light, produced by epi-illumination through the periphery
+	of a high numerical aperture oil-immersion objective. The time-resolved
+	fluorescence intensity signals from two laterally shifted observation
+	volumes, created by two confocal pinholes are independently measured.
+	The cross-correlation of these signals provides information of the
+	tracers' velocities. By changing the evanescent wave penetration
+	depth, flow profiling at distances less than 200 nm from the interface
+	can be performed. Due to the high sensitivity of the method fluorescent
+	species with different size, down to single dye molecules can be
+	used as tracers. We applied this method to study the flow of aqueous
+	electrolyte solutions near a smooth hydrophilic surface and explored
+	the effect of several important parameters, e.g. tracer size, ionic
+	strength, and distance between the observation volumes.},
+  doi = {10.1364/OE.17.021149},
+  keywords = {Velocimetry; Fluorescence, laser-induced; Spectroscopy, surface},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.09.21},
+  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-17-23-21149}
+}
+
+ at ARTICLE{Yordanov2011,
+  author = {Stoyan Yordanov and Andreas Best and Klaus Weisshart and Kaloian
+	Koynov},
+  title = {Note: An easy way to enable total internal reflection-fluorescence
+	correlation spectroscopy (TIR-FCS) by combining commercial devices
+	for FCS and TIR microscopy},
+  journal = {Review of Scientific Instruments},
+  year = {2011},
+  volume = {82},
+  pages = {036105},
+  number = {3},
+  eid = {036105},
+  doi = {10.1063/1.3557412},
+  keywords = {fluorescence spectroscopy; optical microscopy},
+  numpages = {3},
+  owner = {paul},
+  publisher = {AIP},
+  timestamp = {2012.05.02},
+  url = {http://link.aip.org/link/?RSI/82/036105/1}
+}
+
+ at ARTICLE{Zhang2007,
+  author = {Bo Zhang and Josiane Zerubia and Jean-Christophe Olivo-Marin},
+  title = {Gaussian approximations of fluorescence microscope point-spread function
+	models},
+  journal = {Applied Optics},
+  year = {2007},
+  volume = {46},
+  pages = {1819--1829},
+  number = {10},
+  month = {Apr},
+  abstract = {We comprehensively study the least-squares Gaussian approximations
+	of the diffraction-limited 2D-3D paraxial-nonparaxial point-spread
+	functions (PSFs)of the wide field fluorescence microscope (WFFM),
+	the laser scanning confocal microscope(LSCM), and the disk scanning
+	confocal microscope (DSCM). The PSFs are expressed using the Debye
+	integral. Under anL$\infty$ constraint imposing peak matching, optimal
+	and near-optimal Gaussian parameters are derived for the PSFs. With
+	anL1 constraint imposing energy conservation, an optimal Gaussian
+	parameter is derived for the 2D paraxial WFFM PSF. We found that
+	(1) the 2D approximations are all very accurate; (2) no accurate
+	Gaussian approximation exists for 3D WFFM PSFs; and (3) with typical
+	pinhole sizes, the 3D approximations are accurate for the DSCM and
+	nearly perfect for the LSCM. All the Gaussian parameters derived
+	in this study are in explicit analytical form, allowing their direct
+	use in practical applications.},
+  doi = {10.1364/AO.46.001819},
+  keywords = {Numerical approximation and analysis; Microscopy; Confocal microscopy;
+	Fluorescence microscopy; Three-dimensional microscopy},
+  owner = {paul},
+  publisher = {OSA},
+  timestamp = {2012.09.20},
+  url = {http://ao.osa.org/abstract.cfm?URI=ao-46-10-1819}
+}
+
+ at BOOK{Rigler:FCSbook,
+  title = {Fluorescence Correlation Spectroscopy, Theory and Applications},
+  publisher = {Springer Berlin Heidelberg},
+  year = {2001},
+  editor = {R. Rigler and E.S. Elson},
+  edition = {1},
+  howpublished = {Paperback},
+  isbn = {978-3540674337},
+  owner = {paul},
+  timestamp = {2012.11.02}
+}
+
+ at ELECTRONIC{AndorNeoSpec,
+  title = {Andor Technology, Neo sCMOS Specifications},
+  organization = {Andor Technology},
+  note = {\url{http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf}
+	(Okt. 2012)},
+  url = {http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf},
+  citeseerurl = {http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf},
+  owner = {paul},
+  timestamp = {2012.10.08}
+}
+
+ at ELECTRONIC{HamamatsuOrcaSpec,
+  title = {Hamamatsu, ORCA-Flash4.0 CMOS datasheet},
+  organization = {Hamamatsu},
+  note = {\url{http://sales.hamamatsu.com/assets/pdf/hpspdf/e_flash4.pdf} (Okt.
+	2012)},
+  url = {http://sales.hamamatsu.com/assets/pdf/hpspdf/e_flash4.pdf},
+  citeseerurl = {http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf},
+  owner = {paul},
+  timestamp = {2012.10.08}
+}
+
+ at ELECTRONIC{InvitrogenDiO,
+  month = {November},
+  title = {Invitrogen, catalog number D-275 (DiO)},
+  note = {\url{http://products.invitrogen.com/ivgn/product/D275} (Okt. 2012)},
+  owner = {paul},
+  timestamp = {2012.10.18}
+}
+
+ at ELECTRONIC{vaxavis,
+  title = {Dynamic viscosity of liquid water from 0 \degC},
+  note = {\url{http://www.vaxasoftware.com/doc_eduen/qui/viscoh2o.pdf} (Okt.
+	2012)},
+  owner = {paul},
+  timestamp = {2012.10.29}
+}
+
+ at ELECTRONIC{WikipediaBrown,
+  title = {Brownian motion, Wikipedia - The Free Encyclopedia},
+  note = {\url{http://en.wikipedia.org/wiki/Brownian_motion} (Okt. 2012)},
+  owner = {paul},
+  timestamp = {2012.10.18}
+}
+
+ at ELECTRONIC{AndorNeo,
+  year = {2011},
+  title = {Andor Technology, Neo sCMOS Hardware Guide},
+  organization = {Andor Technology},
+  owner = {paul},
+  timestamp = {2012.10.06}
+}
+
diff --git a/doc-src/Images/PyCorrFit_Screenshot_Main.png b/doc-src/Images/PyCorrFit_Screenshot_Main.png
new file mode 100755
index 0000000..913ef46
Binary files /dev/null and b/doc-src/Images/PyCorrFit_Screenshot_Main.png differ
diff --git a/doc-src/Images/PyCorrFit_icon.png b/doc-src/Images/PyCorrFit_icon.png
new file mode 100644
index 0000000..999f2a7
Binary files /dev/null and b/doc-src/Images/PyCorrFit_icon.png differ
diff --git a/doc-src/Images/PyCorrFit_icon.svg b/doc-src/Images/PyCorrFit_icon.svg
new file mode 100644
index 0000000..49f4144
--- /dev/null
+++ b/doc-src/Images/PyCorrFit_icon.svg
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="280"
+   height="280"
+   id="svg2"
+   version="1.1"
+   inkscape:version="0.48.3.1 r9886"
+   sodipodi:docname="Icon.svg"
+   inkscape:export-filename="/home/paul/Teufel/PyCorrFit/doc-src/Images/Icon.png"
+   inkscape:export-xdpi="82.285713"
+   inkscape:export-ydpi="82.285713">
+  <defs
+     id="defs4" />
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="2.8"
+     inkscape:cx="242.5741"
+     inkscape:cy="162.07305"
+     inkscape:document-units="px"
+     inkscape:current-layer="layer1"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1053"
+     inkscape:window-height="751"
+     inkscape:window-x="41"
+     inkscape:window-y="24"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Ebene 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-72.322502,-192.61096)">
+    <path
+       sodipodi:type="arc"
+       style="fill:#ffffff;fill-opacity:0.17210681;stroke:#ffffff;stroke-width:6.37775278;stroke-linecap:round;stroke-miterlimit:4;stroke-opacity:0.44510384;stroke-dasharray:none"
+       id="path3820"
+       sodipodi:cx="112.14286"
+       sodipodi:cy="46.627132"
+       sodipodi:rx="125"
+       sodipodi:ry="125.71429"
+       d="m 237.14286,46.627132 c 0,69.430088 -55.96441,125.714288 -125,125.714288 -69.035593,0 -125,-56.2842 -125,-125.714288 0,-69.430083 55.964407,-125.714286 125,-125.714286 69.03559,0 125,56.284203 125,125.714286 z"
+       transform="matrix(0.8109252,0,0,0.80631769,129.88302,299.077)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#008000;fill-opacity:1;stroke:#008000;stroke-width:11.25699711000000036;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-3"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,129.9134,45.15562)" />
+    <path
+       style="fill:none;stroke:#008000;stroke-width:7;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       d="m 83.272048,410.80442 c 0,0 78.208002,-28.60158 143.480012,-77.09641 68.08557,-50.5852 91.8236,-119.88089 91.8236,-119.88089"
+       id="path3780"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="csc" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#c48d0a;fill-opacity:1;stroke:#008000;stroke-width:11.25699711000000036;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,47.268604,93.89513)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#303dc7;fill-opacity:1;stroke:#008000;stroke-width:11.25699711000000036;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-1"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,195.09091,-23.441194)" />
+  </g>
+</svg>
diff --git a/doc-src/Images/PyCorrFit_icon_dark.svg b/doc-src/Images/PyCorrFit_icon_dark.svg
new file mode 100644
index 0000000..0f89d56
--- /dev/null
+++ b/doc-src/Images/PyCorrFit_icon_dark.svg
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="280"
+   height="280"
+   id="svg2"
+   version="1.1"
+   inkscape:version="0.48.3.1 r9886"
+   sodipodi:docname="Icon_dark.svg"
+   inkscape:export-filename="/home/paul/Teufel/PyCorrFit/source-doc/Images/Icon.png"
+   inkscape:export-xdpi="82.285713"
+   inkscape:export-ydpi="82.285713">
+  <defs
+     id="defs4" />
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.98994949"
+     inkscape:cx="174.6044"
+     inkscape:cy="111.97851"
+     inkscape:document-units="px"
+     inkscape:current-layer="layer1"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1053"
+     inkscape:window-height="751"
+     inkscape:window-x="41"
+     inkscape:window-y="3"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Ebene 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-72.322502,-192.61096)">
+    <path
+       sodipodi:type="arc"
+       style="fill:#000000;fill-opacity:0.17210681999999999;stroke:#000000;stroke-width:6.37775277999999979;stroke-linecap:round;stroke-miterlimit:4;stroke-opacity:0.44510385;stroke-dasharray:none"
+       id="path3820"
+       sodipodi:cx="112.14286"
+       sodipodi:cy="46.627132"
+       sodipodi:rx="125"
+       sodipodi:ry="125.71429"
+       d="m 237.14286,46.627132 c 0,69.430088 -55.96441,125.714288 -125,125.714288 -69.035593,0 -125,-56.2842 -125,-125.714288 0,-69.430083 55.964407,-125.714286 125,-125.714286 69.03559,0 125,56.284203 125,125.714286 z"
+       transform="matrix(0.8109252,0,0,0.80631769,129.88302,299.077)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#008000;fill-opacity:1;stroke:#008000;stroke-width:11.25699711000000036;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-3"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,129.9134,45.15562)" />
+    <path
+       style="fill:none;stroke:#008000;stroke-width:7;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       d="m 83.272048,410.80442 c 0,0 78.208002,-28.60158 143.480012,-77.09641 68.08557,-50.5852 91.8236,-119.88089 91.8236,-119.88089"
+       id="path3780"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="csc" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#c48d0a;fill-opacity:1;stroke:#008000;stroke-width:11.25699711000000036;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,47.268604,93.89513)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#303dc7;fill-opacity:1;stroke:#008000;stroke-width:11.25699711000000036;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-1"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,195.09091,-23.441194)" />
+  </g>
+</svg>
diff --git a/doc-src/Images/PyCorrFit_logo.svg b/doc-src/Images/PyCorrFit_logo.svg
new file mode 100644
index 0000000..2f0b86e
--- /dev/null
+++ b/doc-src/Images/PyCorrFit_logo.svg
@@ -0,0 +1,121 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="787.474"
+   height="230.34308"
+   id="svg2"
+   version="1.1"
+   inkscape:version="0.48.3.1 r9886"
+   sodipodi:docname="PyCorrFit_logo.svg"
+   inkscape:export-filename="/home/paul/Arbeitsfläche/123.png"
+   inkscape:export-xdpi="82.285713"
+   inkscape:export-ydpi="82.285713">
+  <defs
+     id="defs4" />
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.35"
+     inkscape:cx="-44.858191"
+     inkscape:cy="46.809141"
+     inkscape:document-units="px"
+     inkscape:current-layer="layer1"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1053"
+     inkscape:window-height="751"
+     inkscape:window-x="49"
+     inkscape:window-y="24"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title />
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Ebene 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-79.58144,-210.26787)">
+    <path
+       sodipodi:type="arc"
+       style="fill:#ffffff;fill-opacity:0.17210681;stroke:#ffffff;stroke-width:6.37775278;stroke-linecap:round;stroke-miterlimit:4;stroke-opacity:0.44510384;stroke-dasharray:none"
+       id="path3820"
+       sodipodi:cx="112.14286"
+       sodipodi:cy="46.627132"
+       sodipodi:rx="125"
+       sodipodi:ry="125.71429"
+       d="m 237.14286,46.627132 c 0,69.430088 -55.96441,125.714288 -125,125.714288 -69.035593,0 -125,-56.2842 -125,-125.714288 0,-69.430083 55.964407,-125.714286 125,-125.714286 69.03559,0 125,56.284203 125,125.714286 z"
+       transform="matrix(0.8109252,0,0,0.80631769,129.88302,299.077)" />
+    <path
+       style="fill:none;stroke:#008000;stroke-width:7;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       d="m 83.272048,410.80442 c 0,0 78.208002,-28.60158 143.480012,-77.09641 68.08557,-50.5852 91.8236,-119.88089 91.8236,-119.88089"
+       id="path3780"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="csc" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#008000;fill-opacity:1;stroke:#008000;stroke-width:11.25699711;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-3"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,129.9134,45.15562)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#c48d0a;fill-opacity:1;stroke:#008000;stroke-width:11.25699711;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,47.268604,93.89513)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#303dc7;fill-opacity:1;stroke:#008000;stroke-width:11.25699711;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-1"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,195.09091,-23.441194)" />
+    <text
+       xml:space="preserve"
+       style="font-size:99.34378815px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#ffffff;fill-opacity:0.80473376;stroke:none;font-family:Sans"
+       x="337.98184"
+       y="364.08014"
+       id="text3793"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan3795"
+         x="337.98184"
+         y="364.08014"
+         style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;fill:#ffffff;fill-opacity:0.80473376;font-family:Courier New;-inkscape-font-specification:Courier New">PyCorrFit</tspan></text>
+  </g>
+</svg>
diff --git a/doc-src/Images/PyCorrFit_logo_dark.pdf b/doc-src/Images/PyCorrFit_logo_dark.pdf
new file mode 100644
index 0000000..820d265
Binary files /dev/null and b/doc-src/Images/PyCorrFit_logo_dark.pdf differ
diff --git a/doc-src/Images/PyCorrFit_logo_dark.png b/doc-src/Images/PyCorrFit_logo_dark.png
new file mode 100644
index 0000000..b4c3403
Binary files /dev/null and b/doc-src/Images/PyCorrFit_logo_dark.png differ
diff --git a/doc-src/Images/PyCorrFit_logo_dark.svg b/doc-src/Images/PyCorrFit_logo_dark.svg
new file mode 100644
index 0000000..20eb821
--- /dev/null
+++ b/doc-src/Images/PyCorrFit_logo_dark.svg
@@ -0,0 +1,121 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="787.474"
+   height="230.34308"
+   id="svg2"
+   version="1.1"
+   inkscape:version="0.48.3.1 r9886"
+   sodipodi:docname="PyCorrFit_logo_dark.svg"
+   inkscape:export-filename="/home/paul/repos/PyCorrFit/doc-src/Images/PyCorrFit_logo_dark.png"
+   inkscape:export-xdpi="23.429344"
+   inkscape:export-ydpi="23.429344">
+  <defs
+     id="defs4" />
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.98994949"
+     inkscape:cx="414.12721"
+     inkscape:cy="90.812106"
+     inkscape:document-units="px"
+     inkscape:current-layer="layer1"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1280"
+     inkscape:window-height="724"
+     inkscape:window-x="0"
+     inkscape:window-y="24"
+     inkscape:window-maximized="1" />
+  <metadata
+     id="metadata7">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Ebene 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-79.58144,-210.26787)">
+    <path
+       sodipodi:type="arc"
+       style="fill:#000000;fill-opacity:0.17210681;stroke:#000000;stroke-width:6.37775278;stroke-linecap:round;stroke-miterlimit:4;stroke-opacity:0.44510384;stroke-dasharray:none"
+       id="path3820"
+       sodipodi:cx="112.14286"
+       sodipodi:cy="46.627132"
+       sodipodi:rx="125"
+       sodipodi:ry="125.71429"
+       d="m 237.14286,46.627132 c 0,69.430088 -55.96441,125.714288 -125,125.714288 -69.035593,0 -125,-56.2842 -125,-125.714288 0,-69.430083 55.964407,-125.714286 125,-125.714286 69.03559,0 125,56.284203 125,125.714286 z"
+       transform="matrix(0.8109252,0,0,0.80631769,129.88302,299.077)" />
+    <path
+       style="fill:none;stroke:#008000;stroke-width:7;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       d="m 83.272048,410.80442 c 0,0 78.208002,-28.60158 143.480012,-77.09641 68.08557,-50.5852 91.8236,-119.88089 91.8236,-119.88089"
+       id="path3780"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="csc" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#008000;fill-opacity:1;stroke:#008000;stroke-width:11.25699711;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-3"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,129.9134,45.15562)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#c48d0a;fill-opacity:1;stroke:#008000;stroke-width:11.25699711;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,47.268604,93.89513)" />
+    <path
+       sodipodi:type="arc"
+       style="fill:#303dc7;fill-opacity:1;stroke:#008000;stroke-width:11.25699711;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
+       id="path2985-1"
+       sodipodi:cx="161.6244"
+       sodipodi:cy="424.0473"
+       sodipodi:rx="44.446712"
+       sodipodi:ry="36.36549"
+       d="m 206.07112,424.0473 c 0,20.08411 -19.89947,36.36549 -44.44672,36.36549 -24.54724,0 -44.44671,-16.28138 -44.44671,-36.36549 0,-20.0841 19.89947,-36.36549 44.44671,-36.36549 24.54725,0 44.44672,16.28139 44.44672,36.36549 z"
+       transform="matrix(0.5624713,0,0,0.68746496,195.09091,-23.441194)" />
+    <text
+       xml:space="preserve"
+       style="font-size:99.34378815px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:0.80473376;stroke:none;font-family:Sans"
+       x="337.98184"
+       y="364.08014"
+       id="text3793"
+       sodipodi:linespacing="125%"><tspan
+         sodipodi:role="line"
+         id="tspan3795"
+         x="337.98184"
+         y="364.08014"
+         style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;fill:#000000;fill-opacity:0.80473376;font-family:Courier New;-inkscape-font-specification:Courier New">PyCorrFit</tspan></text>
+  </g>
+</svg>
diff --git a/doc-src/Images/SchwilleLogo.jpg b/doc-src/Images/SchwilleLogo.jpg
new file mode 100755
index 0000000..880a890
Binary files /dev/null and b/doc-src/Images/SchwilleLogo.jpg differ
diff --git a/doc-src/Images/TU_Logo_SW.pdf b/doc-src/Images/TU_Logo_SW.pdf
new file mode 100755
index 0000000..8309dca
Binary files /dev/null and b/doc-src/Images/TU_Logo_SW.pdf differ
diff --git a/doc-src/PyCorrFit_doc.tex b/doc-src/PyCorrFit_doc.tex
new file mode 100755
index 0000000..8f90844
--- /dev/null
+++ b/doc-src/PyCorrFit_doc.tex
@@ -0,0 +1,180 @@
+\documentclass[a4paper,12pt]{scrartcl}
+
+% apt-get install texlive-science
+%Wir arbeiten mit PDF-Latex.
+%Bei Texmaker unter Werkzeuge > PDFLaTeX (F6)
+
+\usepackage[utf8x]{inputenc}
+%\usepackage{tipa} % apt-get install tipa
+%Für deutsche Schriften:
+%\usepackage[ngerman]{babel}
+%\usepackage{sistyle}
+%\SIstyle{German}
+%\usepackage{icomma} % Komma als Dezimaltrenner (im Mathe Modus)
+		    % Standardmäßig läßt LaTeX im Mathe Modus immer etwas 
+		    % Platz nach einem Komma, für 3,45 etc. ist das falsch.
+		    % Mit icomma gilt:
+		    % Wenn auf das Komma ein Leerzeichen folgt, soll auch
+		    % eins kommen, wenn nicht, schreibe es als Operator:
+		    % z.B. $f(x, y) = 3,45$
+
+%Für englische Schriften:
+\usepackage[english]{babel}
+\usepackage{sistyle}
+
+
+\usepackage[top = 2cm, left = 2.5cm, right = 2cm, bottom = 2.5cm]{geometry}
+
+\usepackage{amsmath}
+\usepackage{amssymb}
+\usepackage{array}
+
+\usepackage{cite} %Für Zitate und Quellen
+\usepackage{url}
+\urlstyle{tt}
+
+%\usepackage{longtable} % mehrseitige Tabellen
+%\usepackage{multirow} %Zusammenfassen von Spalten/Zeilen
+
+
+
+\usepackage{subfig} % Vereinen von Bildern in gesamte Figure
+
+%Schönere Unterschriften für Bilder und Tabellen
+%Setze captions mit Kommas und Beschriftungen
+\DeclareCaptionLabelFormat{mycaption}{#1 #2}
+\DeclareCaptionLabelSeparator{comma}{, }
+\captionsetup{font=small,labelfont=bf,labelformat=mycaption,labelsep=comma}
+\setcapindent{0pt} % Zeileneinzug ab zweiter Zeile
+
+\newcommand{\mycaption}[2]{\caption[~#1]{\textbf{#1:} #2}}
+
+\usepackage{textcomp} % Sonderzeichen
+\usepackage{wrapfig}
+
+\usepackage{fancyvrb}
+\usepackage[svgnames]{xcolor} %Farben wie DarkBlue
+
+%%
+%%
+%% Definitionen für schöne Links innerhalb des Dokuments
+  %%% graphicx: support for graphics
+  \usepackage[pdftex]{graphicx}
+
+
+  \pdfcompresslevel=9
+
+  %%% hyperref (hyperlinks in PDF): for more options or more detailed
+  %%%          explanations, see the documentation of the hyperref-package
+  \usepackage[%
+    %%% general options
+    pdftex=true,      %% sets up hyperref for use with the pdftex program
+    %plainpages=false, %% set it to false, if pdflatex complains: ``destination with same identifier already exists''
+    %
+    pdfstartview={XYZ 0 0 1.0} , %% Startet das PDF mit 100% Zoom, also Originalgroesse
+    %%% extension options
+    backref,      %% adds a backlink text to the end of each item in the bibliography
+    pagebackref=false, %% if true, creates backward references as a list of page numbers in the bibliography
+    colorlinks=true,   %% turn on colored links (true is better for on-screen reading, false is better for printout versions)
+    linkcolor=DarkBlue,		%% Aendern der Linkfarbe
+    urlcolor=DarkBlue,		%% Aendern der Url-Linkfarbe und andere serioese Farben
+    anchorcolor = black,
+    citecolor = DarkGreen,
+    filecolor = black,
+    urlcolor = DarkBlue,
+    breaklinks=false,
+    %
+    %%% PDF-specific display options
+    bookmarks=true,          %% if true, generate PDF bookmarks (requires two passes of pdflatex)
+    bookmarksopen=true,     %% if true, show all PDF bookmarks expanded
+    bookmarksnumbered=false, %% if true, add the section numbers to the bookmarks
+    %pdfstartpage={1},        %% determines, on which page the PDF file is opened
+    %pdfpagemode=None         %% None, UseOutlines (=show bookmarks), UseThumbs (show thumbnails), FullScreen
+  ]{hyperref}
+  %%% provide all graphics (also) in this format, so you don't have
+  %%% to add the file extensions to the \includegraphics-command
+  %%% and/or you don't have to distinguish between generating
+  %%% dvi/ps (through latex) and pdf (through pdflatex)
+%  \DeclareGraphicsExtensions{.pdf}
+%%
+%%
+
+%\newcommand{\kommentar}[1]{\marginpar{\textcolor{red}{#1}}}  % Kommentarkommando
+%\newcommand{\fehler}[3]{\SI{(#1}{}\SI{\pm #2)}{#3}} % Fehlerkommando
+
+%Ort für mögliche Bilddateien (Unterordner)
+\graphicspath{{bilder/}{messwerte/}{auswertung/}}
+
+%Neue Befehle
+\newcommand{\hyref}[2]{\hyperref[#2]{#1~\ref{#2}}} %Schönerer link statt "link zu Bild \ref{im:bild}" -> "\hyref{link zu Bild}{im:bild}"
+
+\newcommand{\mytilde}{\raisebox{-0.9ex}{\~{ }}}
+
+\setcounter{page}{1}
+
+% Tell latex how to break the program names
+\hyphenation{Py-Corr-Fit Py-Scan-FCS}
+
+% For non-italic greek letters
+\usepackage{upgreek}
+
+\usepackage{doi}
+
+\begin{document}
+
+
+
+\vspace{-5em}
+\begin{flushright}
+	\includegraphics[angle=0,width=35mm]{Images/TU_Logo_SW} \hspace{1em}
+	\raisebox{-0.2em}{\includegraphics[angle=0,width=40mm]{Images/SchwilleLogo} }
+	\includegraphics[angle=0,width=40mm]{Images/PyCorrFit_logo_dark.pdf} 
+\end{flushright}
+%\vspace{-3em}
+\begin{center}\rule{\textwidth}{0.1ex}\par\end{center}
+%\vspace{-4em}
+%\begin{center}\rule{\textwidth}{0.1ex}\par\end{center}
+
+
+
+\vspace{1em}
+
+\noindent\begin{tabular}{ll}
+\textbf{PyCorrFit - Generic cross-platform FCS fitting tool}\\
+\textit{Software Guide} \\
+\\
+
+ Paul Müller \\
+ Biotechnology Center of the TU Dresden \\
+ \today 
+ 
+\end{tabular}
+
+\vspace{2em}
+
+\tableofcontents
+
+
+\newpage
+
+\graphicspath{{Images/}}
+\include{PyCorrFit_doc_content}
+
+
+\section*{Acknowledgements}
+\addcontentsline{toc}{section}{Acknowledgements}
+I thank André Scholich (TU Dresden, Germany) for initial proof reading of the manuscript and Grzegorz Chwastek, Franziska Thomas, and Thomas Weidemann (Biotec, TU Dresden, Germany) for critical feedback on PyCorrFit.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Literaturverzeichnis
+
+\pagestyle{plain}             % nur Nummerierung in der Fuzeile
+
+\bibliographystyle{plainurl}  % Zitierstil: alphadin = [Nam88] apt-get install bibtex-extras
+\bibliography{Bibliography}      % BibTeX-Datei name.bib ohne .bib hier einfgen
+%\nocite{*}                    % Listet alle Eintrge der Datei auf, wenn aktiv
+
+
+
+\end{document}
diff --git a/doc-src/PyCorrFit_doc_content.tex b/doc-src/PyCorrFit_doc_content.tex
new file mode 100755
index 0000000..6935a36
--- /dev/null
+++ b/doc-src/PyCorrFit_doc_content.tex
@@ -0,0 +1,596 @@
+\section{Introduction}
+
+\subsection{Preface}
+\texttt{PyCorrFit} emerged from my work in the Schwille Lab\footnote{\url{http://www.biochem.mpg.de/en/rd/schwille/}} at the Biotechnology Center of the TU Dresden in 2011/2012. The program source code is available at GitHub\footnote{\url{https://github.com/paulmueller/PyCorrFit}}. Please do not hesitate to sign up and add a feature request. If you you found a bug, please let me know via GitHub.\\
+
+\noindent \texttt{PyCorrFit} was written to simplify the work with experimentally obtained correlation curves. These can be processed independently (operating system, location, time). PyCorrFit supports commonly used file formats and enables users to allocate and organize their data in a simple way.\\
+
+\noindent PyCorrFit is free software: you can redistribute it and/or modify it
+under the terms of the GNU General Public License as published 
+by the Free Software Foundation, either version 2 of the License, 
+or (at your option) any later version\footnote{\url{http://www.gnu.org/licenses/gpl.html}}.
+
+\subsubsection*{What PyCorrFit can do}
+\begin{itemize}
+\item Load correlation curves from numerous correlators
+\item Process these curves (e.g. background correction, s. Tools section \ref{sec:tools} )
+\item Fit a model function (many included) to an experimental curve
+\item Import user defined models for fitting
+\item Many batch processing features
+\item Save/load entire PyCorrFit sessions
+\end{itemize}
+
+\subsubsection*{What PyCorrFit is not}
+\begin{itemize}
+\item A multiple-$\tau$ correlator
+\item A software to operate hardware correlators
+\end{itemize}
+
+\subsection{System prerequisites}
+\subsubsection{Hardware}
+This documentation addresses the processing of correlation curves with PyCorrFit. {PyCorrFit} was successfully used with the following setups:
+\begin{itemize}
+\item[1.]
+     APD: Photon Counting Device from PerkinElmer Optoelectronics, Model: 	 \texttt{SPCM-CD3017}\\
+     Correlator: Flex02-01D/C from correlator.com with the shipped software 	
+	    		 \texttt{flex02-1dc.exe}.
+\item[2.]
+    APD: Photon Counting Device from PerkinElmer Optoelectronics\\
+    Correlator: ALV-6000
+\item[3.] LSM Confocor2 or Confocor3 setups from Zeiss, Germany.
+\end{itemize}
+
+\subsubsection{Software}
+\label{cha:soft}
+The latest version of PyCorrFit can be obtained from the internet at \url{http://pycorrfit.craban.de}.
+\begin{itemize}
+\item \textbf{MacOSx}.
+Binary files for MacOSx $>$10.6.8 are available from the download page but have not yet been fully tested for stability.
+\item \textbf{Windows}.
+For Windows XP or Windows 7, stand-alone binary executables are available from the download page. 
+\item \textbf{Linux}.
+There are executable binaries for widely used distributions (e.g. Ubuntu).
+\item \textbf{Sources}
+The program was written in Python, keeping the concept of cross-platform programming in mind. To run PyCorrFit on any other operating system, the installation of Python v.2.7 is required. To obtain the latest source, visit PyCorrFit at GitHub (\url{https://github.com/paulmueller/PyCorrFit}). PyCorrFit depends on the following python modules:\\
+\texttt{\\
+python-matplotlib ($\geq$ 1.0.1) \\
+python-numpy ($\geq$ 1.5.1) \\
+python-scipy ($\geq$ 0.8.0) \\
+python-sympy ($\geq$ 0.7.2) \\
+python-yaml \\
+python-wxtools \\
+python-wxgtk2.8-dbg \\
+}
+\\
+For older versions of Ubuntu, some of the above package versions are not listed in the package repository. To enable the use of PyCorrFit on those systems, the following tasks have to be performed:
+\begin{itemize}
+\item[ ] \textbf{matplotlib}. The tukss-ppa includes version 1.0.1. After adding the repository (\texttt{apt-add-repository ppa:tukss/ppa}), matplotlib can be installed as usual.
+\item[ ] \textbf{numpy}. The package from a later version of Ubuntu can be installed: \url{https://launchpad.net/ubuntu/+source/python-numpy/}
+\item[ ] \textbf{scipy}. The package from a later version of Ubuntu can be installed: \url{https://launchpad.net/ubuntu/+source/python-scipy/}
+\item[ ] \textbf{sympy}. To enable importing external model functions, sympy is required. It is available from \url{http://code.google.com/p/sympy/downloads/list}. Unpacking the archive and executing \texttt{python setup.py install} within the unpacked directory will install sympy.
+\end{itemize}
+\end{itemize}
+Alternatively \texttt{python-pip} (\url{http://pypi.python.org/pypi/pip}) can be used to install up-to-date python modules.
+
+\noindent \textbf{\LaTeX}.
+PyCorrFit can save correlation curves as images using matplotlib. It is also possible to utilize Latex to generate these plots. On Windows, installing MiKTeX  with ``automatic package download'' will enable this feature. On MacOSx, the MacTeX distribution can be used. On other systems, the packages LaTeX, dvipng, Ghostscript and the scientific latex packages \texttt{texlive-science} and \texttt{texlive-math-extra} need to be installed.
+
+\subsection{Running PyCorrFit}
+\label{sec:run}
+\paragraph*{Windows}
+Download the executable file and double-click on the \texttt{PyCorrFit.exe} icon.
+\paragraph*{Linux/Ubuntu}
+Make sure the binary has the executable bit set, then simply double-click on the binary  \texttt{PyCorrFit}.
+\paragraph*{Mac OSx}
+When downloading the archive \texttt{PyCorrFit.zip}, the binary should be extracted automatically (if not, extract the archive) and you can double-click it to run PyCorrFit.
+\paragraph*{from source}
+Invoke \texttt{python PyCorrFit.py} from the command line.
+
+
+\section{Working with PyCorrFit}
+
+\subsection{Workflow}
+\label{cha_graphint}
+\label{sec:PyCorrFitUserInterface}
+
+The following chapter introduces the general idea of how to start and accomplish a fitting project. FCS experiments produce different sets of experimental correlation functions which must be interpreted with appropriate physical models. Each correlation function refers to a single contiguous signal trace or ``run''. In \textit{PyCorrFit}, the user must assign a mathematical model function to each correlation function during the loading procedure. The assignment is irreversible in the sen [...]
+
+Let's briefly discuss a typical example: To determine the diffusion coefficient of a fluorescently labeled protein in free solution, one has to deal with two sets of autocorrelation data: measurements of a diffusion standard (e.g. free dye for which a diffusion coefficient has been published) to calibrate the detection volume and measurements of the protein sample. The protein sample may contain small amounts of slowly diffusing aggregates. While the calibration measurements can be fitte [...]
+
+
+\begin{enumerate}
+\item Create separate sessions for each type of sample and assign different model functions.
+\item Assign a one-component model to the dye measurements and a two-component model to the protein measurements when loading consecutively into the same session.
+\item Assign a two-component model for all data and, when appropriate, manually inactivate one component by fixing its contribution to 0\%.
+\end{enumerate}
+
+
+The first approach is straightforward, however, it requires homogeneous diffusion behavior for each data set. The second strategy has the advantage that the dye and the protein curves, as well as the obtained parameters can be visually compared during the fitting analysis within the same session. In this case, batch fitting is still possible because it discriminates data sets assigned to different models. In the third case, simultaneous batch fitting is also possible. However, for each d [...]
+
+The fitting itself is usually explored with a representative data set. Here, the user has to decide on starting parameters, the range in which they should be varied, corrections like background, and other fitting options. Once the fit looks good, the chosen settings can be transferred at once to all other pages assigned to the same model using the \textit{Batch control} tool (\hyref{Section}{sec:tm.bc}). After flipping through the data for visual inspection one may check the parameters a [...]
+
+\subsection{The \textit{main window}}
+
+%\hyref{Figure}{fig:PyCorrFitMain} shows the main window of PyCorrFit. It contains a menu bar to access all tools, a notebook with tabs, each tab representing a single curve, and a page - the content of the currently selected tab. 
+
+
+Together with a system's terminal of the platform on which PyCorrFit was installed (Windows, Linux, MacOS), the \textit{main window} opens when starting the program as described in \hyref{section}{sec:run}. The window title bar contains the version of \textit{PyCorrFit} and, if a session was re-opened or saved, the name of the fitting session. A menu bar provides access to many supporting tools and additional information as thoroughly described in \hyref{Chapter}{sec:mb}. 
+
+There are three gateways for experimental data into a pre-existing or a new \textit{PyCorrFit} session (\textit{File / Load data}, \textit{File / Open session}, and \textit{Current page / Import data}). When a session has been opened or correlation data have been loaded, each correlation curve is displayed on a separate page of a notebook. For quick identification of the active data set, a tab specifies the page number, the correlated channels (AC/CC), and the run number in case there ar [...]
+
+\begin{figure}[h]
+\centering
+\includegraphics[width=\linewidth]{PyCorrFit_Screenshot_Main.png}
+ \mycaption{user interface of PyCorrFit}{A circular scanning FCS (CS-FCS) curve of DiO on a supported lipid bilayer (glass substrate) is shown. The measurement yields a diffusion coefficient of \SI{0.28}{\mu m^2s^{-1}} ($F1=1$, so only one component is fitted). Note that a 2D diffusion model is used and not a 3D model (as shown in \hyref{figure}{fig:extxt}). \label{fig:PyCorrFitMain}}
+\end{figure}
+
+The page containing a correlation function is divided in two halves. At the left hand side the page\textit{ }shows a pile of boxes containing values or fitting options associated to the current model and data set: 
+
+
+
+\begin{itemize}
+\item \textit{Data set}, a unique identifier for each correlation curve which is automatically assembled from different fields during the loading procedure (\hyref{Section}{sec:fm.ld}). This window can also be manually edited, thereby allowing to re-name or flag certain data during the fitting analysis. 
+\item \textit{Model parameters} displays the values which determine the current shape of the assigned model function. Initially, starting values are loaded as they were defined in the model description (\hyref{Section}{sec:fm.im}). Little buttons allow a stepwise increase or decrease in units of 1/10\textsuperscript{th}. It is also possible to directly enter some numbers. A checkbox is used to set the parameter status to ``varied'' (checked) or ``fixed'' (unchecked) during the fitting. A [...]
+\item \textit{Amplitude corrections} applies additional rescaling to amplitude related parameters like the number of particles $n$ or fractions thereof associated with different correlation times ($n_1$, $n_2$, etc.). Experimental values of non-correlated background intensity can be manually entered for each channel. In addition, the correlation curves can be normalized, to facilitate a visual comparison of their time dependence.
+\item \textit{Fitting options} offers weighted fitting. The underlying idea is that data points with higher accuracy should also have a higher impact on model parameters. To derive weights, \textit{PyCorrFit} calculates the variance of the difference between the actual data and a smooth, empiric representation of the curve for a certain neighborhood. The number of neighboring data points at each side ($j > 0$) can be set. For such a smooth representation a 5-knot spline function or the m [...]
+\end{itemize}
+At the right hand side are two graphics windows. The dimensionless correlation functions $G(\tau)$ are plotted against the lag time ($\tau$) in logarithmic scale. Below, a second window shows the residuals, the actual numerical difference between the correlation data and the model function. Fitting with appropriate models will scatter the residuals symmetrically around zero ($x$-axis). When weighted fitting was performed, the weighted residuals are shown. A good fit will not leave residu [...]
+
+The main window can be rescaled as a whole to improve data representation. In addition, to zoom in, one can drag a rectangle within the plot area; a double click then restores the initial scale. Experimental data points are linked by grey lines, the state of the model function is shown in blue. When a weighted fit was applied, the variance of the fit is calculated for each data point and displayed in cyan.
+
+\section{The menu bar}
+\label{sec:mb}
+
+PyCorrFit is organized in panels which group certain functions. The menu organizes data management (File), data analysis (Tools), display of correlation functions (Current Page), numerical examples (Model), software settings (Preferences), and software metadata (Help). The documentation refers to the version 0.8.1.
+
+\subsection{File menu}
+\label{sec:fm}
+The File menu organizes the import of theoretical models, experimental correlation data, and opening and saving of entire \textit{PyCorrFit} fitting sessions. However, the numerical fit results are exported from the \textit{Statistics view} panel which can be found under \textit{Tools} (\hyref{Section}{sec:tm.sv}).
+
+\subsubsection{File / Import model}
+\label{sec:fm.im}
+Correlation data must be fitted to models describing the underlying physical processes which give rise to a particular time dependence and magnitude of the recorded signal fluctuations. Models are mathematical expressions containing parameters with physical meaning, like the molecular brightness or the dwell time through an illuminated volume etc. While a number of standard functions are built-in, the user can define new expressions. Some examples can be found at GitHub in the \textit{Py [...]
+
+Model functions are imported as text files (*.txt) using certain syntax:
+
+\begin{itemize}
+\item \textbf{Encoding}: PyCorrFit can interpret the standard Unicode character set (UTF-8).
+\item \textbf{Comments}: Lines starting with a hash (\#), empty lines, or lines containing only white space characters are ignored. The only exception is the first line starting with a hash followed by a white space and a short name of the model. This line is evaluated to complement the list of models in the dialogue\textit{ Choose }\textit{model}, when loading the data.
+\item \textbf{Units}: PyCorrFit works with internal units for:
+
+\begin{itemize}
+\item Time: \SI{1}{ms}
+\item Distance: \SI{100}{nm}
+\item Diffusion coefficient: \SI{10}{\mu m^2s^{-1}} 
+\item Inverse time: \SI{1000}{s^{-1}} 
+\item Inverse area: \SI{100}{\mu m^{-2}} 
+\item Inverse volume: \SI{1000}{\mu m^{-3}} 
+\end{itemize}
+\item \textbf{Parameters:} To define a new model function new parameters can be introduced. Parameters are defined by a sequence of strings separated by white spaces containing name, the dimension in angular brackets, the equal sign, and a starting value which appears in the main window for fitting. For example: D [50 µm\textsuperscript{2}s\textsuperscript{{}-1}] = 50.00. It is important to note that when the dimensions differ from the internal units (10 µm\textsuperscript{2}s\textsupers [...]
+\item \textbf{Placeholder:} When defining composite mathematical expressions for correlation functions one can use placeholders. Placeholders start with a lowercase ‘g’. For example, the standard, Gaussian 3D diffusion in free solution may be written as
+
+\begin{itemize}
+\item \texttt{gTrp = 1+ T/(1-T)*exp(-tau/tautrip)}
+\item \texttt{gTwoD = 1/(1+tau/taudiff)}
+\item \texttt{gThrD = 1/sqrt(1+tau/(taudiff*S**2))}
+\end{itemize}
+\end{itemize}
+The individual parts are then combined in the last line of the *.txt file, where the correlation function is defined starting with uppercase ’G’:
+
+\begin{equation}
+\texttt{G = 1/n * gTrp * gTwoD * gThrD} \notag
+\end{equation}
+For reference of mathematical operators check for example \href{http://www.tutorialspoint.com/python/python_basic_operators.htm}{www.tutorialspoint.com / python / python\_basic\_operators.htm}. To illustrate a more complex example see the model function for circular scanning FCS in \hyref{figure}{fig:extxt}. 
+
+
+\begin{figure}
+% for case sensitiver Verbatim, we need the package fancyvrb
+\begin{Verbatim}[frame = single]
+
+# CS-FCS 3D+S+T (Confocal)
+
+# Circular Scanning FCS model function. 3D diffusion + Triplet.
+
+## Definition of parameters:
+# First, the parameters and their starting values for the model function
+# need to be defined. If the parameter has a unit of measurement, then it 
+# may be added separated by a white space before the "=" sign. The starting
+# value should be a floating point number. Floating point abbreviations 
+# like "1e-3" instead of "0.001" may be used.
+
+# Diffusion coefficient
+D [10 µm²/s] = 200.0
+# Structural parameter
+w = 5.0
+# Waist of the lateral detection area
+a [100 nm] = 1.0
+# Particle number
+n = 5.0
+# Scan radius
+R [100 nm] = 5.0
+# Frequency
+f [kHz] = 20.0
+# Triplet fraction
+T = 0.1
+# Triplet time
+tautrip [ms] = 0.001
+
+# The user may wish to substitute certain parts of the correlation function
+# with other values to keep the formula simple. This can be done by using the
+# prefix "g". All common mathematical functions, such as "sqrt()" or "exp()"
+# may be used. For convenience, "pi" and "e" are available as well.
+
+gTrip = 1. + T/(1-T)*exp(-tau/tautrip)
+gScan = exp(-(R*sin(pi*f*tau))**2/(a**2+D*tau))
+gTwoD = 1./(1.+D*tau/a**2)
+gOneD = 1./sqrt(1.+D*tau/(w*a)**2)
+gThrD = gTwoD * gOneD
+
+# The final line with the correlation function should start with a "G"
+# before the "=" sign.
+
+G = 1./n * gThrD * gScan * gTrip
+
+\end{Verbatim}
+\mycaption{user defined model function for PyCorrFit}{The working example shows a model function for circular scanning FCS.\label{fig:extxt}}
+\end{figure}
+
+
+\subsubsection{File / Load data}
+\label{sec:fm.ld}
+\textit{Load data }is the first way to import multiple correlation data sets into a \textit{PyCorrFit} session. The supported file formats can be found in a drop-down list of supported file endings in the pop-up dialog \textit{Open data files}:
+
+
+\begin{enumerate}
+\item All supported files \ \ \ \ \ \ [default]
+\item Confocor3 (*.fcs)\ \ \ \ \ \ [AIM 4.2, ZEN 2010, Zeiss, Germany]
+\item Correlator ALV6000 (*.ASC)\ \ [ALV Laser GmbH, Langen, Germany]
+\item Correlator.com (*.SIN)\ \ \ \ [www.correlator.com, USA]
+\item Matlab ‘Ries (*.mat)\ \ \ \ \ \ [EMBL Heidelberg, Germany]
+\item PyCorrFit (*.csv)\ \ \ \ \ \ [Paul Müller, TU Dresden, Germany]
+\item Zip files (*.zip)\ \ \ \ \ \ [Paul Müller, TU Dresden, Germany]
+\end{enumerate}
+While (1)-(4) are file formats associated with commercial hardware, (5) refers to a MATLAB based FCS evaluation software developed by Jonas Ries in the Schwille lab at TU Dresden, (6) is the txt-file containing comma-separated values (csv) generated with PyCorrFit via the command \textit{Current Page / Save data}. Zip-files are automatically decompressed and can be imported when matching one of the above mentioned formats. In particular loading of zip files is a possibility to re-import  [...]
+
+When loading data, the user is prompted to assign fit models in the \textit{Choose }\textit{Models} dialogue window. There, curves are sorted according to channel (for example AC1, AC2, CC12, and CC21, as a typical outcome of a dual-color cross-correlation experiment). For each channel a fit model must be selected from the list (see Section 3.4 and Appendix xxx):
+
+If a file format is not yet listed, the correlation data could be converted into a compatible text-file (*.csv) or bundles of *.csv files within a compressed archive *.zip. For reformatting the following points should be considered:
+
+
+\begin{itemize}
+\item \textbf{Encoding}: \textit{PyCorrFit} uses the standard Unicode character set (UTF-8). However, since no special characters are needed to save experimental data, other encodings may also work. New line characters are {\textbackslash}r{\textbackslash}n (Windows).
+\item \textbf{Comments}: Lines starting with a hash (\#), empty lines, or lines containing only white space characters are ignored. Exceptions are the keywords listed below.
+\item \textbf{Units}: PyCorrFit works with units/values for:
+
+\begin{itemize}
+\item Time: 1 ms
+\item Intensity: 1 kHz
+\item Amplitude offset: G(0) = 0 (not 1)
+\end{itemize}
+\item \textbf{Keywords: }\textit{PyCorrFit} reads the first two columns containing numerical values. The first table (non-hashed) is recognized as the correlation data containing the lag times in the first and the correlation data in the second column. (In case the *.csv file has been generated with \textit{PyCorrFit} up to three additional columns containing the fit function are ignored, see Section 3.1.6). The table ends, when the keyword \# BEGIN TRACE appears. Below this line the tim [...]
+\item \textbf{Tags:} Channel information can be entered using defined syntax in a header. The keyword \# Type AC/CC Autocorrelation [uppercase?] assigns the tag ‘AC’ and \# Type AC/CC Cross-correlation assigns the tag ‘CC’ to the correlation curve. These strings are consistently displayed in the user interface of the respective data page in \textit{PyCorrFit}. If no data type is specified, autocorrelation is assumed. Tags may be specified with additional information like channel numbers, [...]
+\end{itemize}
+
+\subsubsection{File / Open session}
+\label{sec:fm.os}
+This command is the second way to import data into PyCorrFit. In contrast to \textit{Load data}, it opens an entire fitting project, which was previously saved with \textit{PyCorrFit}. Sessions are bundles of files named *.fcsfit-session.zip. Sessions contain, comments, model assigned correlation data, and the current state of parameters for each data page (Section 3.1.6).
+
+\subsubsection{File / Comment session}
+\label{sec:fm.cs}
+This command opens a window to place text messages that can be used to annotate a fitting session.
+
+\subsubsection{File / Clear session}
+\label{sec:cls}
+This command closes all pages while the PyCorrFit.exe keeps running. The user is prompted to save the session under the same or a different name. At this stage both options \textit{No} or \textit{Cancel} lead to clearance and a potential loss of recent modifications.
+
+\subsubsection{File / Save session}
+\label{sec:fm.ss}
+In addition to display and fit individual curves, a strong feature of PyCorrFit is to save an entire fitting project as a single session. Sessions allow the user to revisit and explore different models, fitting strategies, and data sets. Importantly the work can be saved at any stage.
+
+The number of files bundled in a session varies depending on the number of data sets (pages), the number of used models, and what was done during the fitting. A detailed description can be found in the Readme.txt file attached to each session. For example, the numerical correlation and intensity data are saved separately as *.csv text files. However, in contrast to the \textit{Save data (*.csv)} command of the \textit{Current Page} menu, there are no metadata in the header, just tables c [...]
+
+\subsubsection{File / Exit}
+\label{sec:fm.e}
+This command closes down \textit{PyCorrFit}. The user is prompted to save the session under the same or a different name. At this stage \textit{No} leads to the loss of recent changes, while \textit{Cancel} keeps \textit{PyCorrFit} running.
+
+\subsection{Tools menu}
+\label{sec:tm}
+The \textit{Tools} menu provides access to a series of accessory panels which extent the capability of the main window. These accessory panels can stay open during the entire analysis. Open panels appear checked in the menu. Most operations can be executed across the entire data set with a single mouse click. 
+
+\subsubsection{Tools / Data range}
+\label{sec:tm.dr}
+This panel limits the range of lag times which are displayed in the main window panel. At the same time it defines the range of points which are used for fitting. For example, this feature can be applied to remove dominant after-pulsing of the avalanche photo diodes (APDs) which may interfere with Triplet blinking at short lag times. The user has the options to \textit{Apply} the channel settings only to the current page or he can \textit{Apply to all pages}. In contrast to \textit{Batch [...]
+
+Power user, who frequently load and remove data sets, may take advantage of a checkbox to fix the channel selection for all newly loaded data sets.
+
+\subsubsection{Tools / Overlay curves}
+\label{sec:tm.oc}
+This window displays the correlation data (not the fit curves) of all pages in a single plot. The curves can be discriminated by color. If only one curve is selected it appears in red. Curves with ambiguous shape can easily be identified, selected, and removed by clicking \textit{Apply}. A warning dialogue lists the pages which will be kept.
+
+Data representation is synchronized with the page display in the \textit{Main window}. For example, narrowing the range of lag times by \textit{Data range }is immediately updated in the \textit{Overlay curves }tool. Likewise, their normalization of the amplitudes to unity.
+
+The other way round, some tools directly respond to the selections made in the \textit{Overlay curves} tool: \textit{Global fitting}, \textit{Average curves}, and \textit{Statistics view} allow to perform operations on an arbitrary selection of pages which can be specified by page number. Instead of manually typing their numbers, the curves may be selected within the \textit{Overlay curves} tool. The respective input fields are immediately updated.
+
+The tool is closed by the button \textit{Cancel}. All the listed data sets will be kept. However, the selections transferred to the \textit{Global fitting}, \textit{Average curves}, and \textit{Statistics view} tools are kept as well.
+
+\subsubsection{Tools / Batch control}
+\label{sec:tm.bc}
+By default the current page is taken as a reference to perform automated fitting. A batch is defined as the ensemble of correlation data sets (pages) assigned to the same model function within a session. A session can therefore have several batches, even for the same data. 
+
+For fitting it is crucial to carefully define the starting parameters, whether parameters should be fixed or varied, the range of values which make physically sense, and other options offered within the \textit{Main window}. By executing \textit{Apply to applicable pages}, these settings are transferred to all other pages assigned to the same fit model. Note that this includes the range of lag times (lag time channels) which may have been changed with the \textit{Data range }tool for ind [...]
+
+The button \textit{Fit applicable pages} then performs several cycles of fitting [how many cycles?] on all pages of the same batch. Alternatively, the user can define an external source of parameters as a reference, i.e. the first page of some \textit{Other session} (*.fcsfit-session.zip). However, this assumes a consistent assignment of model functions.
+
+\subsubsection{Tools / Global fitting}
+\label{sec:tm.gf}
+Global fitting is useful when experimental curves share the same values for certain physical parameters. For example, due to physical constraints in two-focus FCS both autocorrelation curves and the cross-correlation curves should adopt the same values for the diffusion time \textit{taudiff} and the number of particles \textit{n}. A global fit can be applied such that \textit{n} and \textit{taudiff} are identical for all data sets. All curves are added to a single array. In contrast to f [...]
+
+\subsubsection{Tools / Average data}
+\label{sec:tm.ad}
+Often in FCS, the measurement time at a particular spot is divided in several runs. This approach is taken when occasional, global intensity changes are superimposed on the molecular fluctuations of interest. Then the user has to sort out the bad runs. After fitting, one may want to re-combine the data, to export a cleaned, average correlation function. This can be done with the tool \textit{Average data}, for which a subset of curves has to be selected by typing the numbers into the inp [...]
+
+For averaging, there are constraints:
+
+
+\begin{enumerate}
+\item Since the correlation curves are averaged point by point this requires the same number of lag time channels. Runs of different length cannot be averaged.
+\item The tool can only average data sets which are exclusively autocorrelation or cross-correlation.
+\item The user can check a box to enforce the program to ask for data sets with the same model as the current page. This may help to avoid mistakes when selecting pages.
+\end{enumerate}
+The averaged curve is shown on a separate page. The new \textit{Filename/title} receives the entry \textit{Average [numbers of pages]}. The assigned model is by default the same as for the individual pages. However, while averaging, the user can choose a different model from a drop-down list. 
+
+\subsubsection{Tools / Trace view}
+\label{sec:tm.tv}
+FCS theory makes assumptions about the thermodynamic state of the system. Signal fluctuations can only be analyzed when the system is at equilibrium or at a sufficiently stable steady state. Global instabilities on the time scale of the measurement itself, e.g. photo-bleaching, have dramatic effect on the shape of the measured correlation curve. Therefore it is common practice to check the correlated intensity trace for each curve. Trace view simply displays the signal trace for each cor [...]
+
+\subsubsection{Tools / Statistics view}
+\label{sec:tm.sv}
+The goal of a correlation analysis is to determine experimental parameter values with sufficient statistical significance. However, especially for large data sets, it can get quite laborious to check all of the individual values on each page. We designed the \textit{Statistics view} panel to review the state of parameters across the experimental batch (pages assigned to the same model) in a single plot, thereby facilitating to the identification of outliers.
+
+The current page is taken as a reference for the type of model parameters which can be displayed. The user can choose different \textit{Plot parameters} from a drop-down list. A subset of pages within the batch can be explicitly defined by typing the page numbers into the input field or by highlighting in the \textit{Overlay curves} tool. Note that page numbers which refer to different models than the current page are ignored. 
+
+The \textit{Statistics view} panel contains a separate \textit{Export} box, where parameters can be selected (checked) and saved as a comma separated text file (*.csv). Only selected page numbers are included.
+
+\subsubsection{Tools / Page info}
+\label{sec:tm.pi}
+Page info is a most verbose summary of a data set. The panel \textit{Page info} is synchronized with the current page. The following fields are listed:
+
+
+\begin{enumerate}
+\item Version of PyCorrFit
+\item Field values from the main window (filename/title, model specifications, page number, type of correlation, normalizations)
+\item Actual parameter values (as contained in the model function)
+\item Supplementary parameters (intensity, counts per particle, duration, etc.)
+\item Fitting related information (Chi-square, channel selection, varied fit parameters) .
+\item Model doc string (Section 3.1)
+\end{enumerate}
+The content of Page info is saved as a header when exporting correlation functions via the command \textit{Current page / Save data (*.csv)} (Section 3.3.2).
+
+\subsubsection{Tools / Slider simulation}
+\label{sec:tm.ss}
+This tool visualizes the impact of model parameters on the shape of the model function of a current page. Such insight may be useful to choose proper starting values for fitting or to develop new model functions. For example, in the case two of the parameters trade during the fitting one may explore to which extent a change in both values produces similar trends.
+
+Two variables (A and B) have to be assigned from a drop-down list of parameters associated with the current model function. For each of these, the \textit{Slider simulation} panel shows initially the starting value (x) as a middle position of a certain range (from 0.1*x to 1.9*x). The accessible range can be manually edited and the actual value of the slider position is displayed at the right hand side of the panel. Dragging the slider to lower (left) or higher (right) values changes the [...]
+
+In addition, the variables A and B can be linked by a mathematical relation. For this a mathematical operator can be selected from a small list and the option \textit{Fix relation} must be checked. Then, the variable B appears inactivated (greyed out) and the new variable combining values for A and B can be explored by dragging.
+
+\subsection{ Current Page}
+\label{sec:cp}
+This menu compiles import and export operations referring exclusively to the active page in the main window. 
+
+\subsubsection{Current Page / Import Data}
+\label{sec:cp.id}
+This command is the third way to import data into a pre-existing session. Single files containing correlation data can be imported as long as they have the right format (Section 3.1.2). In contrast to \textit{Load data} from the \textit{File} menu, the model assignment and the state of the parameters remains. The purpose of this command is to compare different data sets to the very same model function for a given parameter values. After successful import, the previous correlation data of [...]
+
+To avoid this loss, one could first generate a new page via the menu \textit{Models} (Section 3.4), select a model function and import data there. This is also a possibility to assign the very same data to different models within the same session.
+
+\subsubsection{Current Page / Save data (*.csv)}
+\label{sec:cp.sd}
+For the documentation with graphics software of choice, correlation curves can be exported as a comma-separated table. A saved \textit{PyCorrFit} text-file (*.csv) will contain a hashed header with metadata from the \textit{Page info} panel (Section 3.2.8), followed by the correlation and fitting values in tab-separated columns: \textit{Channel (tau [s])}, \textit{Experimental correlation}, \textit{Fitted correlation}, \textit{Residuals, }and \textit{Weights (fit)}. 
+
+Below the columns, there are again 5 rows of hashed comments followed by the intensity data in two columns: \textit{Time [s]} and \textit{Intensity trace [kHz]}. Note that there are no assemblies of “multiple runs”, since \textit{PyCorrFit} treats these as individual correlation functions. A *.csv file therefore contains only a single fitted correlation curve and one intensity trace for autocorrelation or two intensity traces for cross-correlation.
+
+\subsubsection{Current Page / Save correlation as image}
+\label{sec:cp.sc}
+For a quick documentation, the correlation curve can be exported as a compressed bitmap (*.png). The plot contains a legend and the actual values and errors of the varied parameters, however, not the fixed parameters. Note that the variable tau cannot be displayed using Unicode with Windows.
+
+\subsubsection{Current Page / Save trace view as image}
+\label{sec:cp.st}
+For a quick documentation the intensity from the \textit{Trace view} panel can be exported as a compressed bitmap (*.png). 
+
+\subsubsection{Current Page / Close page}
+\label{sec:cp.cp}
+Closes the page; the data set is removed from the session. The page numbers of all other pages remain the same. The command is equivalent with the closer (x) in the tab. 
+
+\subsection{Models}
+
+When choosing a model from the \textit{Models} menu a new page opens and the model function is plotted according to the set of starting values for parameters as they were defined in the model description. The lists contains all of the implemented model functions, which can be selected during \textit{File / Load data}. The parameters can be manipulated to explore different shapes; the tool \textit{Slider simulation} can also be used. Via \textit{Current page / Import data}, the model may  [...]
+
+Standard model functions for a confocal setup are:
+
+Confocal (Gaussian): 3D \ \ \ \ \ \ [Free diffusion in three dimensions]
+
+Confocal (Gaussian): T-3D \ \ \ \ \ \ [Triplet blinking and 3D diffusion]
+
+Confocal (Gaussian): T-3D-3D \ \ \ \ [Triplet with two diffusive components]
+
+Confocal (Gaussian): T-3D-3D-3D \ \ \ \ [Triplet with three diffusive components]
+
+Confocal (Gaussian): 2D \ \ \ \ \ \ [2D diffusion, e.g. in membranes]
+
+Confocal (Gaussian): T-2D \ \ \ \ \ \ [Triplet blinking and 2D diffusion]
+
+Confocal (Gaussian): T-2D-2D\ \ \ \ [Triplet with two diffusive components]
+
+Confocal (Gaussian): T-3D-2D \ \ \ \ [Triplet with mixed 3D and 2D diffusion]
+
+There is also a collection of models for FCS setups with TIR excitation:
+
+TIR (Gaussian/Exp.): 3D\ \ \ \ \ \ [3D diffusion]
+
+TIR (Gaussian/Exp.): T-3D-3D\ \ \ \ [Triplet with two diffusive components]
+
+TIR (Gaussian/Exp.): T-3D-2D\ \ \ \ [Triplet with mixed 3D and 2D diffusion]
+
+… 
+
+In addition, there are may be user defined model functions which have been uploaded previously via File / Import model (Section 3.1.1).
+
+\subsection{Preferences}
+
+The preference menu is still short. If the user has a Tex distribution (e.g. MikTex for Windows) installed, checking the `'Latex´' option will open a separate, Tex formatted panel (\textit{Figure1}) via the \textit{Current page / Save […] as image} commands. The \textit{Figure1} contains some interactive options for display. From there, in a second step, the image can be exported as *.png.
+
+\textit{Verbose} generates a plot showing the spline function used for calculating the weights for each data points when performing a weighted fit. If Latex is active this plot will also be in Tex format. For obvious reasons, such a plot is not generated when using the iteratively improved \textit{Model function} or the actual \textit{Average} correlation curve for weighted fitting.
+
+Checking the option \textit{Show weights} will produce two lines showing the weights for each data point of the correlation function in the plot, as well as in the exported image. Note that the weights are always exported when using the \textit{Save data (*.csv)} command from the \textit{Current page} menu.
+
+\subsection{Help}
+
+The help menu mainly provides additional information. \textit{Software used} lists the exact version of \textit{Python} used for programming the executable software. \textit{About} gives information of the participating developers, the license, and documentation writers. \textit{Update} establishes a link to the GitHub website to check for a new release; it also provides a few web links associated with PyCorrFit. Finally, \textit{Shell} specifies … ???.
+
+Documentation downloads this software guide from the GitHub website (PyCorrFit\_doc-1.pdf).
+
+\section{4 Hacker's corner}
+
+Additionally, new file formats can be implemented by programming of the readfiles module of \textit{PyCorrFit}. First, edit the code for \_\_init\_\_.py and then add the script read\_FileFormat.py to the \textit{PyCorrFit} library.[to which library?, more info? Remove this paragraph?] 
+
+External models will be imported with internal model function IDs starting at 7000. Models are checked upon import by the Python module sympy. If the import fails it might be a syntax error or just an error of sympy, since this module is still under development. 
+
+
+
+\section{Theoretical background}
+
+
+\subsection{Derivation of FCS model functions}
+This section introduces the calculation of FCS model functions. It supplies some background information and points out general properties of correlation functions.
+	
+	\subsubsection{General Autocorrelation function for a single species}
+	FCS model functions describe how the signal $F(t)$, emitted from a certain observation volume, is temporally dependent on its own past (autocorrelation) or on some other signal (cross-correlation). The autocorrelation $G(\tau)$ of a signal $F(t)$ is computed as follows:
+	\newline
+	\newline
+	%\fbox{ {
+	\begin{minipage}{\textwidth}
+	%\textbf{Mathematical foundation - Autocorrelation function:}
+	\begin{equation}
+	G(\tau) = \frac{\langle \delta F(t) \delta F(t+\tau) \rangle}{\langle F(t) \rangle^2} = \frac{g(\tau)}{\langle F(t) \rangle^2}.
+	\end{equation}
+	\begin{itemize} \small
+	\item[$G(\tau)$] normalized autocorrelation curve
+	\item[$\tau$] lag time
+	\item[$\langle F \rangle$] the expectation value of $F(t)$. Applying the ergodic theorem, this can be rewritten as the time average \[ \langle F(t) \rangle = \lim_{T \rightarrow \inf }\frac{1}{T} \int_0^T F(t) \mathrm{d}t. \]	
+	\item[$\delta F(t)$] $= F(t) - \langle F(t) \rangle$ fluctuation of the fluorescence signal
+	\item[$g(\tau)$] non normalized autocorrelation curve
+	\end{itemize}
+	\end{minipage}
+	%} 
+	%}
+	\newline
+	\newline
+	\newline
+	The fluorescence signal is dependent on the size and shape of the detection volume (e.g. Gaussian shaped for confocal setups or exponential decaying for TIRF setups), on the propagator of the diffusing dye (free diffusion, diffusion with flow, etc.), and the brightness and concentration of the dye under observation\cite{Burkhardt2010}.  \\
+	\newline
+	%\fbox{ {
+	\begin{minipage}{\textwidth}
+	%\textbf{General Correlation function for a single species:}
+	\begin{equation}
+	G(\tau) = \frac{  q^2 C \int \! \mathrm{d}^3 r \int \! \mathrm{d}^3 r'  \, \Omega(\mathbf{r})\Phi(\mathbf{r}, \mathbf{r'}, \tau) \Omega(\mathbf{r'})  }{\langle F(t) \rangle^2}
+	\end{equation}
+	\begin{itemize} \small
+	\item[$q$] molecular brightness, dependent on excitation intensity, quantum yield, i.e. emission properties and absorption cross sections of the dye, and the detection efficiency of the instrument.
+	\item[$\Omega$] 3D molecule detection function, dependent on the shape of the pinholes used for detection and the excitation laser profile, i.e. the point spread function (PSF).
+	\item[$\Phi$] diffusion propagator. The distribution of dyes in a liquid follows Fick's laws of diffusion. For free diffusion, this is a simple Gaussian distribution.
+	\item[$F$] fluorescence signal of the sample. It is defined as
+	\[ F(t) = q \int \! \mathrm{d}^3 r \, \Omega(\mathbf{r}) c(\mathbf{r}, t) \] with $c(\mathbf{r}, t)$ being dye distribution (particle concentration) inside the detection volume.
+		\item[$C$] average concentration of the dye following the dynamics of the propagator $\Phi$. Using the ergodic hypothesis and assuming a normalized molecule detection function (${V_\mathrm{eff} = \int \!\! d^3r \, \Omega(\mathbf{r}) = 1}$), the concentration computes to $ C = \langle F(t) \rangle / q$.
+	\end{itemize}
+	\end{minipage}
+	%} 
+	%}
+	
+	
+\subsubsection{General Autocorrelation function for multiple species}
+%Most experiments do not only include a single species of fluorescent dye. When considering a three dimensional detection volume with a freely diffusing dye, adding a lipid bilayer with a different fluorescent dye (diffusing in two dimensions inside the bilayer) will result in two distinct contributions to the fluorescence signal, namely 2D diffusion and 3D diffusion. For $n$ different species inside the detection volume, the autocorrelation function becomes:
+Most experiments include particles with more than one dynamic property. Labeled particles may have different size or the temporal dynamics may include a triplet term. For $n$ different species inside the detection volume, the autocorrelation function becomes:
+	\newline
+	\newline
+	%\fbox{ {
+	\begin{minipage}{\textwidth}
+	%\textbf{General Correlation function for n species:}
+	\begin{equation}
+	G(\tau) = \frac{g(\tau)}{\langle F(t) \rangle^2} =  \frac{\sum_{i=1}^n \sum_{j=1}^n g_{ij}(\tau)}{\langle F(t) \rangle^2}
+	\end{equation}
+	\begin{equation}
+	g_{ij}(\tau) = q_i q_j \int \! \mathrm{d}^3 r \int \! \mathrm{d}^3 r'  \, \Omega(\mathbf{r})\Phi_{ij}(\mathbf{r}, \mathbf{r'}, \tau) \Omega(\mathbf{r'})  
+	\end{equation}
+	\begin{itemize} \small
+	\item[$g(\tau)$] non normalized correlation function
+	\item[$g_{ij}(\tau)$] non normalized cross correlation between two species $i$ and $j$. For $n$ species, $i,j \in [1,...,n]$.
+	\item[$q_i$] molecular brightness of species $i$
+	\item[$\Omega$] 3D molecule detection function
+	\item[$\Phi_{ij}$] diffusion propagator computed from species $i$ with species $j$. If species $i$ and $j$ are independently diffusing, then $\Phi_{ij}$ is zero. 
+	$ C_{ij} \Phi_{ij}(\mathbf{r}, \mathbf{r'}, \tau) = \, \langle \delta c_i(\mathbf{r},0) \delta c_j(\mathbf{r'}, \tau) \rangle $ 
+	\item[$C_{ij}$] average concentration of objects following the dynamics of $\Phi_{ij}$. If $i=j$, $C_{ii}=C_i$ is the concentration of the dye $i$.
+	\end{itemize}
+	\end{minipage}
+	%} 
+	%}
+	\newline
+	\newline
+	If the propagators $\Phi_{ij}(x,y,z; x',y',z'; \tau)$ and the molecule detection function $\Omega(x,y,z)$ factorize into an axial ($z$) and a lateral ($x,y$) part, so will $g_{ij}(\tau)$:
+	\begin{equation}
+	g_{ij}(\tau) = q_i q_j \cdot g_{ij,z}(\tau) \cdot g_{ij,xy}(\tau)
+	\end{equation}
+	Following the example with a freely diffusing species $A$ and a laterally diffusing species $B$ inside a membrane at $z = z_0$, it can be concluded:
+	\begin{eqnarray*}
+	g_{AA}(\tau) = && q_A^2 \cdot g_{AA,z}(\tau) \cdot g_{AA,xy}(\tau) \\
+	g_{BB}(\tau) = && q_B^2 \cdot g_{BB,z_0}(\tau) \cdot g_{BB,xy}(\tau) \\
+	g_{AB}(\tau) = g_{BA} (\tau) = && q_A q_B \cdot g_{AB,z}(\tau) \cdot g_{AB,xy}(\tau)  \\
+	g(\tau) = && g_{AA}(\tau) + 2 g_{AB}(\tau) + g_{BB}(\tau)
+	\end{eqnarray*}
+	To obtain the normalized autocorrelation function, the average $\langle F(t) \rangle$ has to be calculated:
+	\begin{eqnarray*}
+	F(t) = && \sum_{i=1}^n F_i(t) \\
+	F_A(t) = && q_A \int \! \mathrm{d}^3 r \, \Omega(\mathbf{r}) C_A(\mathbf{r}, t) \\
+	F_B(t) = && q_B \int \! \mathrm{d}x \! \int \! \mathrm{d}y \, \Omega(x,y,z=z_0) C_B(x,y, t)  \\
+	\langle F(t) \rangle = && \langle F_A(t) \rangle + \langle F_B(t) \rangle
+	\end{eqnarray*}
+	It is noticeable, that $C_B$ is a 2D concentration, whereas $C_A$ is a 3D concentration. Since there is no correlation between the two freely diffusing species $A$ and $B$, $g_{AB}(\tau)$ is zero. The normalized autocorrelation curve may now be calculated like this:
+	\begin{eqnarray*}
+	G(\tau) = && \frac{g(\tau)}{\langle F(t) \rangle^2} \\
+	G(\tau) = && \frac{g_{AA}(\tau) + g_{BB}(\tau)}{(\langle F_A(t) \rangle + \langle F_B(t) \rangle)^2} \\
+	\end{eqnarray*}
+
+	\subsubsection{Cross-correlation}
+	Cross-correlation is a generalization of autocorrelation. Cross-correlation functions are derived in the same manner as autocorrelation functions. Here, signals recorded in two detection channels are cross-correlated to obtain the correlation function.
+	\begin{equation}
+	G_{XY}(\tau) = \frac{\langle \delta F_X(t) \delta F_Y(t+\tau) \rangle}{\langle F_X(t) \rangle \langle F_Y(t) \rangle}
+	\end{equation}
+A cross-correlation analysis of two species labeled by two types of dyes observed in two corresponding detection channels can be used for binding assays. Only complexes giving simultaneous signal in both channels contribute to the cross-correlation amplitude. Thus a finite cross-correlation indicates co-diffusion.
+	
+   \subsubsection{Extension of the theory}
+	By modifying the propagator $\Phi$ and the detection volume $\Omega$, other effects, like triplet blinking or binding reactions can be quantified. In many cases, analytical solutions to the above integrals are not straightforward and approximations have to be made. For example, the Gaussian shaped detection profile in confocal FCS is already an approximation. However, deviations from the true results are considered to be small \cite{Zhang2007}. \hyref{Section}{sec:mdls} introduces sever [...]
+
+
+\subsection{Non-linear least-squares fit}
+\label{cha:PyCorFit_leastsq}
+PyCorrFit uses the non-linear least-squares fitting capabilities from \texttt{scipy.optimize}. This package utilizes the Levenberg–Marquardt algorithm to minimize the sum of the squares. More information on this topic can be obtained from the online documentation of \texttt{leastsq}\footnote{\url{http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.leastsq.html##scipy.optimize.leastsq}}. 
+One can define a distance $d(G,H)$ between two discrete functions $G$ and $H$ with the discrete domain of definition $\tau_1 \dots \tau_n$ as the sum of squares:
+\begin{equation}
+d(G,H) = \sum_{i=1}^n \left[ G(\tau_i) - H(\tau_i) \right]^2
+\end{equation}
+The least-squares method minimizes this distance between the model function $G$ and the experimental values $H$ by modifying $k$ additional fitting parameters $\alpha_1, \dots, \alpha_k$:
+\begin{equation}
+\chi^2 = \min_{\alpha_1, \dots, \alpha_k} \sum_{i=1}^n \left[ G(\tau_i,\alpha_1, \dots, \alpha_k) - H(\tau_i) \right]^2
+\end{equation}
+The minimum distance $\chi^2$ is used to characterize the success of a fit. Note, that if the number of fitting parameters $k$ becomes too large, multiple values for $\chi^2$ can be found, depending on the starting values of the $k$ parameters.
+
+
+\subsection{Weighted fitting}
+In certain cases, it is useful to implement weights (standard deviation) $\sigma_i$ for the calculation of $\chi^2$. For example, very noisy parts of a correlation curve can falsify the resulting fit. In PyCorrFit, weighting is implemented as follows:
+\begin{equation}
+\chi^2_\mathrm{weighted} = \min_{\alpha_1, \dots, \alpha_k} \sum_{i=1}^n  \frac{\left[ G(\tau_i,\alpha_1, \dots, \alpha_k) - H(\tau_i) \right]^2}{\sigma_i^2}
+\end{equation}
+PyCorrFit is able to calculate the weights $\sigma_i$ from the experimental data. The different approaches of this calculation of weights implemented in PyCorrFit are explained in \hyref{section}{cha_graphint}.
+
+
+\input{PyCorrFit_doc_models}
diff --git a/doc-src/PyCorrFit_doc_models.tex b/doc-src/PyCorrFit_doc_models.tex
new file mode 100644
index 0000000..9de3505
--- /dev/null
+++ b/doc-src/PyCorrFit_doc_models.tex
@@ -0,0 +1,391 @@
+\subsection{Implemented model functions}
+\label{sec:mdls}
+This is an overview of all the model functions that are currently\footnote{\today} implemented in PyCorrFit. To each model a unique model ID is assigned by PyCorrFit. Most of the following information is also accessible from within PyCorrFit using the \textbf{Page info} tool.
+
+\subsubsection{Confocal FCS}
+The confocal detection volume with the structural parameter 
+\begin{align}
+\mathit{SP}= \frac{z_0}{r_0}
+\end{align}
+has an effective size of
+\begin{align}
+V = \pi^{3/2} r_0^2 z_0
+\end{align}
+where $r_0$ is its lateral and $z_0$ its axial (in case of 3D diffusion) extension. Thus, the effective number of particles is defined as
+\begin{align}
+N = C V
+\end{align}
+with the concentration $C$ given implicitly in the model functions.
+The diffusion coefficient is calculated from the diffusion time $\tau_\mathrm{diff}$ using
+\begin{align}
+D = \frac{1}{4 \tau_\mathrm{diff}} \left( \frac{z_0}{\mathit{SP}} \right)^2 = \frac{r_0^2}{4 \tau_\mathrm{diff}}.
+\end{align}
+The parameters in the equation above need to be calibrated to obtain the diffusion coefficient. Usually a reference dye with a known diffusion coefficient is used to determine the lateral extension of the detection volume $r_0$ with a fixed structural parameter of e.g. $\mathit{SP}=4$.\\
+\vspace{2em}
+
+
+% 2D diffusion
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{2D (Gauß)} \\ 
+ID & \textbf{6001} \\ 
+Descr. &  Two-dimensional diffusion with a Gaussian laser profile\cite{Aragon1976, Qian1991, Rigler1993}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})}
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal area \\ 
+$\tau_\mathrm{diff}$ &   Characteristic residence time in confocal area \\
+\end{tabular} \\
+\end{center}
+\vspace{2em}
+
+
+% 2D diffusion + triplett
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{2D+T (Gauß)} \\ 
+ID & \textbf{6002} \\ 
+Descr. &  Two-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Aragon1976, Qian1991, Rigler1993,Widengren1994, Widengren1995, Haupts1998}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})}  \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal area \\ 
+$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal area \\ 
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 3D diffusion
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D (Gauß)} \\ 
+ID & \textbf{6012} \\ 
+Descr. &  Three-dimensional free diffusion with a Gaussian laser profile (eliptical)\cite{Aragon1976, Qian1991, Rigler1993}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})} \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{diff})}}
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal volume \\ 
+$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal volume \\ 
+$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 3D diffusion + triplet
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+T (Gauß)} \\ 
+ID & \textbf{6011} \\ 
+Descr. &  Three-dimensional free diffusion with a Gaussian laser profile (eliptical), including a triplet component\cite{Widengren1994, Widengren1995, Haupts1998}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})} \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{diff})}} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal volume \\ 
+$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal volume \\ 
+$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet \\
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 2D+2D diffusion + triplett
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{2D+2D+T (Gauß)} \\ 
+ID & \textbf{6031} \\ 
+Descr. &  Two-component, two-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = A_0 + \frac{1}{N (F + \alpha (1-F))²} \left[ \frac{F}{1+\tau/\tau_1} + \alpha^2 \frac{1-F}{ 1+\tau/\tau_2 } \right] \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right) 
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal area ($N = N_1+N_2$) \\ 
+$\tau_1$ &  Diffusion time of particle species 1 \\ 
+$\tau_2$ &  Diffusion time of particle species 2 \\ 
+$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
+$\alpha$ & Relative molecular brightness of particles 1 and 2 ($ \alpha = q_2/q_1$) \\
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 3D+2D diffusion + triplett
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+2D+T (Gauß)} \\ 
+ID & \textbf{6032} \\ 
+Descr. &  Two-component, two- and three-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = A_0 + \frac{1}{N (1 - F + \alpha F)²} \left[ \frac{1-F}{1+\tau/\tau_\mathrm{2D}} + \frac{ \alpha^2 F}{ (1+\tau/\tau_\mathrm{3D}) } \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{3D})}} \right] \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right) 
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal volume ($N = N_\mathrm{2D}+N_\mathrm{3D}$) \\ 
+$\tau_\mathrm{2D}$ &  Diffusion time of surface bound particles \\ 
+$\tau_\mathrm{3D}$ &  Diffusion time of freely diffusing particles \\ 
+$F$ & Fraction of molecules of the freely diffusing species ($N_\mathrm{3D} = F N$) \\
+$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_\mathrm{3D}/q_\mathrm{2D}$) \\
+$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 3D+3D diffusion + triplett
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+3D+T (Gauß)} \\ 
+ID & \textbf{6030} \\ 
+Descr. &  Two-component three-dimensional free diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) &= A_0 + \frac{1}{N (F + \alpha (1-F))²}  \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
+\notag &\times  \left[ \frac{F}{(1+\tau/\tau_1)}  \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_1)}} + \alpha^2 \frac{1-F}{ (1+\tau/\tau_2) }  \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_2)}} \right]
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal volume ($N = N_1+N_2$) \\ 
+$\tau_1$ &  Diffusion time of particle species 1 \\ 
+$\tau_2$ &  Diffusion time of particle species 2 \\ 
+$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
+$\alpha$ & Relative molecular brightness of particles 1 and 2 ($ \alpha = q_2/q_1$) \\
+$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+
+\subsubsection{Confocal TIR-FCS}
+The detection volume is axially confined by an evanescent field and has an effective size of
+\begin{align}
+V = \pi R_0^2 d_\mathrm{eva}
+\end{align} 
+where $R_0$ is the lateral extent of the detection volume and $d_\mathrm{eva}$ is the evanescent field depth\footnote{Where the field has decayed to $1/e$}. From the concentration $C$, the effective number of particles is $N=CV$.
+The decay constant $\kappa$ is the inverse of the depth $d_\mathrm{eva}$ :
+\begin{align}
+d_\mathrm{eva} = \frac{1}{\kappa}
+\end{align} 
+The model functions make use of the Faddeeva function (complex error function)\footnote{In user-defined model functions, the Faddeeva function is accessible through \texttt{wofz()}. For convenience, the function \texttt{wixi()} can be used which only takes $\xi$ as an argument and the imaginary $i$ can be omitted.}:
+\begin{align}
+w\!(i\xi) &= e^{\xi^2} \mathrm{erfc}(\xi) \\
+\notag &= e^{\xi^2} \cdot  \frac{2}{\sqrt{\pi}} \int_\xi^\infty \mathrm{e}^{-\alpha^2} \mathrm{d\alpha} \label{eq:faddeeva}
+\end{align} 
+The lateral detection area has the same shape as in confocal FCS. Thus, correlation functions for two-dimensional diffusion of the confocal case apply and are not mentioned here. \\
+\vspace{2em}
+
+
+% 3D diffusion (Gauß/exp)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D (Gauß/exp)} \\ 
+ID & \textbf{6013} \\ 
+Descr. &  Three-dimensional free diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction\cite{Starr2001, Hassler2005, Ohsugi2006}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = \frac{1}{C}  \frac{ \kappa^2}{ \pi (R_0^2 +4D\tau)}
+ \left( \sqrt{\frac{D \tau}{\pi}} + \frac{1 - 2 D \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D \tau} \kappa\right) \right)
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$C$ & Particle concentration in confocal volume \\ 
+$\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
+$R_0$ & Lateral extent of the detection volume \\
+$D$ & Diffusion coefficient  \\
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 2D+3D+T diffusion (Gauß/exp)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+2D+T (Gauß/exp)} \\ 
+ID & \textbf{6033} \\ 
+Descr. &  Two-component, two- and three-dimensional diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction, including a triplet component\cite{Starr2001, Hassler2005, Ohsugi2006, Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) &= A_0 + \frac{1}{N (1-F + \alpha F)^2} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
+& \notag \times  \left[
+\frac{1-F}{1+ 4 D_\mathrm{2D} \tau/R_0^2} + 
+\frac{\alpha^2 F \kappa}{1+ 4 D_\mathrm{3D} \tau/R_0^2} 
+\left( \sqrt{\frac{D_\mathrm{3D} \tau}{\pi}} + \frac{1 - 2 D_\mathrm{3D} \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_\mathrm{3D} \tau} \kappa\right) \right) \right]
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal volume ($N = N_\mathrm{2D}+N_\mathrm{3D}$) \\ 
+$D_\mathrm{2D}$ &  Diffusion coefficient of surface bound particles \\ 
+$D_\mathrm{3D}$ &  Diffusion coefficient of freely diffusing particles \\ 
+$F$ & Fraction of molecules of the freely diffusing species ($N_\mathrm{3D} = F N$) \\
+$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_\mathrm{3D}/q_\mathrm{2D}$) \\
+$R_0$ & Lateral extent of the detection volume \\
+$\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+% 3D+3D+T diffusion (Gauß/exp)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+3D+T (Gauß/exp)} \\ 
+ID & \textbf{6034} \\ 
+Descr. &  Two-component three-dimensional diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction, including a triplet component\cite{Starr2001, Hassler2005, Ohsugi2006, Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\
+\end{tabular}
+\begin{align}
+G(\tau) = &A_0 + \frac{1}{N (1-F + \alpha F)^2} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
+\notag \times  \Bigg[ \,\, & 
+\frac{F \kappa}{1+ 4 D_1 \tau/R_0^2} 
+\left( \sqrt{\frac{D_1 \tau}{\pi}} + \frac{1 - 2 D_1 \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_1 \tau} \kappa\right) \right) + \\
+ \notag + &
+\frac{(1-F) \alpha^2 \kappa}{1+ 4 D_2 \tau/R_0^2} 
+\left( \sqrt{\frac{D_2 \tau}{\pi}} + \frac{1 - 2 D_2 \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_2 \tau} \kappa\right) \right) \,\, \Bigg]
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$A_0$ & Offset \\ 
+$N$ & Effective number of particles in confocal volume ($N = N_1+N_2$) \\ 
+$D_1$ &  Diffusion coefficient of species 1 \\ 
+$D_2$ &  Diffusion coefficient of species 2 \\ 
+$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
+$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_2/q_1$) \\
+$R_0$ & Lateral extent of the detection volume \\
+$\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+\end{tabular}
+\end{center}
+\vspace{2em}
+
+
+
+\subsubsection{TIR-FCS with a square-shaped lateral detection volume}
+The detection volume is axially confined by an evanescent field of depth\footnote{Where the field has decayed to $1/e$} $d_\mathrm{eva} = 1 / \kappa$.
+The lateral detection area is a convolution of the point spread function of the microscope of size $\sigma$,
+\begin{align}
+\sigma = \sigma_0  \frac{\lambda}{\mathit{NA}},
+\end{align} 
+with a square of side length $a$.
+The model functions make use of the Faddeeva function (complex error function)\footnote{In user-defined model functions, the Faddeeva function is accessible through \texttt{wofz()}. For convenience, the function \texttt{wixi()} can be used which only takes $\xi$ as an argument and the imaginary $i$ can be omitted.}:
+\begin{align}
+w\!(i\xi) &= e^{\xi^2} \mathrm{erfc}(\xi) \\
+\notag &= e^{\xi^2} \cdot  \frac{2}{\sqrt{\pi}} \int_\xi^\infty \mathrm{e}^{-\alpha^2} \mathrm{d\alpha} \label{eq:faddeeva}
+\end{align} 
+\vspace{2em}
+
+
+% 2D TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{2D (□x$\upsigma$)} \\ 
+ID & \textbf{6000} \\ 
+Descr. &  Two-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function\cite{Ries2008390, Yordanov2011}\footnote{The reader is made aware, that reference \cite{Ries2008390} contains several unfortunate misprints.}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) = \frac{1}{C} \left[
+\frac{2 \sqrt{\sigma^2+D \tau}}{\sqrt{\pi} a^2}
+\left( \exp\left(-\frac{a^2}{4(\sigma^2+D \tau)}\right) - 1 \right) +
+\frac{1}{a} \, \mathrm{erf}\left(\frac{a}{2 \sqrt{\sigma^2+D \tau}}\right)
+\right]^2
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$C$ & Particle concentration in detection area \\ 
+$\sigma$ & Lateral size of the point spread function \\ 
+$a$ & Side size of the square-shaped detection area \\
+$D$ & Diffusion coefficient \\
+\end{tabular} \\
+\end{center}
+\vspace{2em}
+
+
+% 3D TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D (□x$\upsigma$/exp)} \\ 
+ID & \textbf{6010} \\ 
+Descr. &  Three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction\cite{Ries2008390, Yordanov2011}. \\ 
+\end{tabular}
+\begin{align}
+G(\tau) =  \frac{\kappa^2}{C} &
+\left( \sqrt{\frac{D \tau}{\pi}} + \frac{1 - 2 D \tau \kappa^2)}{2 \kappa} w\!\left(i \sqrt{D \tau} \kappa\right) \right) \times \\
+\notag  \times \Bigg[ & \frac{2 \sqrt{\sigma^2+D \tau}}{\sqrt{\pi} a^2}
+\left( \exp\left(-\frac{a^2}{4(\sigma^2+D \tau)}\right) - 1 \right) +
+\frac{1}{a} \, \mathrm{erf}\left(\frac{a}{2 \sqrt{\sigma^2+D \tau}}\right) \Bigg]^2
+\end{align} 
+\begin{center}
+\begin{tabular}{ll}
+$C$ & Particle concentration in detection volume \\ 
+$\sigma$ & Lateral size of the point spread function \\ 
+$a$ & Side size of the square-shaped detection area \\
+$\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
+$D$ & Diffusion coefficient \\
+\end{tabular} \\
+\end{center}
+\vspace{2em}
+
+
+% 2D+2D TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{2D+2D (□x$\upsigma$/exp)} \\ 
+ID & \textbf{6022} \\ 
+Descr. &  Two-component two-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function. \newline
+The correlation function is a superposition of two-dimensional model functions of the type \textbf{2D (□x$\upsigma$)} (6000)\cite{Ries2008390, Yordanov2011}. \\
+\end{tabular}
+\vspace{2em}
+
+
+% 3D+2D TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+2D (□x$\upsigma$/exp)} \\ 
+ID & \textbf{6020} \\ 
+Descr. &  Two-component two- and three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction.  \newline
+The correlation function is a superposition of the two-dimensional model function \textbf{2D (□x$\upsigma$)} (6000) and the three-dimensional model function \textbf{3D (□x$\upsigma$)} (6010)\cite{Ries2008390, Yordanov2011}.
+\end{tabular}
+\vspace{2em}
+
+
+% 3D+3D TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+3D (□x$\upsigma$/exp)} \\ 
+ID & \textbf{6023} \\ 
+Descr. &  Two-component three-dimensional free diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction. \newline
+The correlation function is a superposition of three-dimensional model functions of the type \textbf{3D (□x$\upsigma$)} (6010)\cite{Ries2008390, Yordanov2011}. \\
+\end{tabular}
+\vspace{2em}
+
+
+% 3D+2D+kin TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{3D+2D+kin (□x$\upsigma$/exp)} \\ 
+ID & \textbf{6021} \\ 
+Descr. &  Two-component two- and three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction. This model covers binding and unbinding kintetics.  \newline 
+The correlation function for this model was introduced in \cite{Ries2008390}. Because approximations are made in the derivation, please verify if this model is applicable to your problem before using it.
+\end{tabular}
+\vspace{2em}
+
+
diff --git a/external_model_functions/ExampleFunc_CS_2D+2D+S+T.txt b/external_model_functions/ExampleFunc_CS_2D+2D+S+T.txt
new file mode 100755
index 0000000..f743c46
--- /dev/null
+++ b/external_model_functions/ExampleFunc_CS_2D+2D+S+T.txt
@@ -0,0 +1,85 @@
+# CS-FCS 2D+2D+S+T (Confocal)
+
+# Circular Scanninf FCS model function for two 2D-diffusing species
+# including triplet component.
+
+# Further reading:
+#  Precise Measurement of Diffusion Coefficients using Scanning 
+#  Fluorescence Correlation Spectroscopy
+#  Petrasek and Schwille, BiophysJ 2008, 1437-1448
+#  http://dx.doi.org/10.1529/biophysj.107.108811
+
+# Visit http://fcstools.dyndns.org/pyscanfcs/ for more information.
+# The first line of this file will be treated as the name of the model
+# inside PyCorrFit. PyCorrFit will enumerate user imported models with  IDs 
+# starting at 7001. You can save a session and the user defined models 
+# like this one will be saved as well. Lines starting with a hash "#"
+# are treated as comments. Empty lines and lines with only white space
+# characters are ignored.
+
+# Note that if your code does not work, it might be that some variables
+# have other meaning. This includes using "n" instead of "N".
+# If you get some Syntax Error it might be that your starting variables
+# are not set to a reasonable starting function. PyCorrFit is testing the
+# function with sympy (for safety) and calculates the function for 
+# different values of tau.
+
+## Definition of parameters:
+# First, define the parameters and their starting values for you model
+# function. If the parameter has a unit of measurement, then it may be
+# added separated by a white space before the "=" sign. The starting
+# value should be a floating point number. You may use abbreviations
+# like "1e-3" instead of "0.001".
+# Note that PyCorrFit has it's own unit system:
+#  unit of time        : 1 ms
+#  unit of inverse time: 1000 /s
+#  unit of distance    : 100 nm
+#  unit of diff.coeff  : 10 µm²/s
+#  unit of inverse area: 100 /µm²
+#  unit of inv. volume : 1000 /µm³
+
+# Diffusion coefficient of first component
+D1 [10 µm²/s] = 200.0
+
+# Diffusion coefficient of second component
+D2 [10 µm²/s] = 20.0
+
+# Fraction of species One
+F1 = 1.0
+
+# Half waist of the lateral detection area (w0 = 2*a)
+a [100 nm] = 1.0
+
+# Particle number
+n = 5.0
+
+# Scan radius
+R [100 nm] = 3.850
+
+# Frequency
+f [kHz] = .2
+
+# Triplet fraction
+T = 0.1
+
+# Triplet time
+tautrip [ms] = 0.001
+
+# You may choose to substitute certain parts of the correlation function
+# with other values for easy reading. This can be done by using the
+# prefix "g". You may use all common mathematical functions,
+# such as "sqrt()" or "exp()". For convenience, "pi" and "e" may also
+# be used. If you are paranoid, you always use float numbers with a dot "."
+# to be sure the program doesn't accidently do integer division.
+
+gTriplet = 1. + T/(1-T)*exp(-tau/tautrip)
+gScan1 = exp(-(R*sin(pi*f*tau))**2/(a**2+D1*tau))
+gScan2 = exp(-(R*sin(pi*f*tau))**2/(a**2+D2*tau))
+gTwoD1 = F1/(1.+D1*tau/a**2)
+gTwoD2 = (1-F1)/(1.+D2*tau/a**2)
+
+# The final line with the correlation function should start with a "G"
+# before the "=" sign.
+
+G = 1./n * (gTwoD1 * gScan1 + gTwoD2 * gScan2) * gTriplet
+
diff --git a/external_model_functions/ExampleFunc_CS_3D+S+T.txt b/external_model_functions/ExampleFunc_CS_3D+S+T.txt
new file mode 100755
index 0000000..39b61bd
--- /dev/null
+++ b/external_model_functions/ExampleFunc_CS_3D+S+T.txt
@@ -0,0 +1,81 @@
+# CS-FCS 3D+S+T (Confocal)
+
+# Circular scanning FCS (3D diffusion with triplet).
+
+# Further reading:
+#  Precise Measurement of Diffusion Coefficients using Scanning 
+#  Fluorescence Correlation Spectroscopy
+#  Petrasek and Schwille, BiophysJ 2008, 1437-1448
+#  http://dx.doi.org/10.1529/biophysj.107.108811
+
+# Visit http://fcstools.dyndns.org/pycorrfit/ for more information.
+# The first line of this file will be treated as the name of the model
+# inside PyCorrFit. PyCorrFit will enumerate user imported models with  IDs 
+# starting at 7001. You can save a session and the user defined models 
+# like this one will be saved as well. Lines starting with a hash "#"
+# are treated as comments. Empty lines and lines with only white space
+# characters are ignored.
+
+# Note that if your code does not work, it might be that some variables
+# have other meaning. This includes using "n" instead of "N".
+# If you get some Syntax Error it might be that your starting variables
+# are not set to a reasonable starting function. PyCorrFit is testing the
+# function with sympy (for safety) and calculates the function for 
+# different values of tau.
+
+## Definition of parameters:
+# First, define the parameters and their starting values for you model
+# function. If the parameter has a unit of measurement, then it may be
+# added separated by a white space before the "=" sign. The starting
+# value should be a floating point number. You may use abbreviations
+# like "1e-3" instead of "0.001".
+# Note that PyCorrFit has it's own unit system:
+#  unit of time        : 1 ms
+#  unit of inverse time: 1000 /s
+#  unit of distance    : 100 nm
+#  unit of diff.coeff  : 10 µm²/s
+#  unit of inverse area: 100 /µm²
+#  unit of inv. volume : 1000 /µm³
+
+# Diffusion coefficient
+D [10 µm²/s] = 200.0
+
+# Structural parameter
+w = 5.0
+
+# Half waist of the lateral detection area (w0 = 2*a)
+a [100 nm] = 1.0
+
+# Particle number
+n = 5.0
+
+# Scan radius
+R [100 nm] = 5.0
+
+# Frequency
+f [kHz] = 20.0
+
+# Triplet fraction
+T = 0.1
+
+# Triplet time
+tautrip [ms] = 0.001
+
+# You may choose to substitute certain parts of the correlation function
+# with other values for easy reading. This can be done by using the
+# prefix "g". You may use all common mathematical functions,
+# such as "sqrt()" or "exp()". For convenience, "pi" and "e" may also
+# be used. If you are paranoid, you always use float numbers with a dot "."
+# to be sure the program doesn't accidently do integer division.
+
+gTrip = 1. + T/(1-T)*exp(-tau/tautrip)
+gScan = exp(-(R*sin(pi*f*tau))**2/(a**2+D*tau))
+gTwoD = 1./(1.+D*tau/a**2)
+gOneD = 1./sqrt(1.+D*tau/(w*a)**2)
+gThrD = gTwoD * gOneD
+
+# The final line with the correlation function should start with a "G"
+# before the "=" sign.
+
+G = 1./n * gThrD * gScan * gTrip
+
diff --git a/external_model_functions/ExampleFunc_Exp_correlated_noise.txt b/external_model_functions/ExampleFunc_Exp_correlated_noise.txt
new file mode 100755
index 0000000..fc84a32
--- /dev/null
+++ b/external_model_functions/ExampleFunc_Exp_correlated_noise.txt
@@ -0,0 +1,16 @@
+# Exponentially correlated noise
+# Model function for PyCorrFit.
+# http://fcstools.dyndns.org/pycorrfit/
+# This is a test function used to check decay times of exponentially
+# correlated noise.
+
+# Fraction
+Purity = 0.5
+
+# Exp time
+tauexp [ms] = 2.0
+
+gTrip = Purity/(1-Purity)*exp(-tau/tauexp)
+
+G = gTrip
+
diff --git a/external_model_functions/ExampleFunc_SFCS_1C_2D_Autocorrelation.txt b/external_model_functions/ExampleFunc_SFCS_1C_2D_Autocorrelation.txt
new file mode 100755
index 0000000..eeb9ed9
--- /dev/null
+++ b/external_model_functions/ExampleFunc_SFCS_1C_2D_Autocorrelation.txt
@@ -0,0 +1,24 @@
+# 2D SFCS AC
+# 2D one component correlation function for for perpendiculat SFCS.
+# Model function for PyCorrFit.
+# http://fcstools.dyndns.org/pycorrfit/
+# http://fcstools.dyndns.org/pyscanfcs/
+# The detection profile is eliptical, as the focus passes the membrane
+# perpendicular to its axis of symmetry.
+# The axis ratio / strucure parameter is defined as:
+# SP = semi-major-axis / semi-minor-axis (wz/w0)
+
+## Parameters
+# Number of particles
+Nob = 40.0
+# Diffusion time
+taudiff [ms] = 1.0
+# Axis ratio / structural parameter
+SP = 5
+
+gFirst = 1/sqrt(1+tau/taudiff)
+gSecond = 1/sqrt(1+tau/(taudiff*SP**2))
+
+# Correlation function
+G = 1/Nob * gFirst * gSecond
+
diff --git a/external_model_functions/ExampleFunc_SFCS_1C_2D_Cross-correlation.txt b/external_model_functions/ExampleFunc_SFCS_1C_2D_Cross-correlation.txt
new file mode 100755
index 0000000..af2f12a
--- /dev/null
+++ b/external_model_functions/ExampleFunc_SFCS_1C_2D_Cross-correlation.txt
@@ -0,0 +1,32 @@
+# 2D SFCS CC
+# 2D on component correlation function for perpendiculat SFCS.
+# Model function for PyCorrFit.
+# http://fcstools.dyndns.org/pycorrfit/
+# http://fcstools.dyndns.org/pyscanfcs/
+# The detection profile is eliptical, as the focus passes the membrane
+# perpendicular to its axis of symmetry.
+# The axis ratio / strucure parameter is defined as:
+# SP = semi-major-axis / semi-minor-axis (wz/w0)
+# This model describes the cross-correlation for two-focus FCS
+
+## Parameters
+# Number of particles
+Nob = 40.0
+# Diffusion time
+taudiff [ms] = 1.0
+# axis ratio / structural parameter
+SP = 5
+# Beam waist radius
+w0 [100nm] = 2.3
+# Distance between the foci
+d [100nm] = 5.0
+
+gFirst = 1/sqrt(1+tau/taudiff)
+gSecond = 1/sqrt(1+tau/(taudiff*SP**2))
+
+gac = 1/Nob * gFirst * gSecond
+# Diffusion coefficient:
+gD = w0**2/(4*taudiff)
+gcc = exp(-d**2/(w0**2+4*gD*tau))
+
+G = gac*gcc
diff --git a/external_model_functions/ExampleFunc_TIRF_zOnly.txt b/external_model_functions/ExampleFunc_TIRF_zOnly.txt
new file mode 100755
index 0000000..d63a14e
--- /dev/null
+++ b/external_model_functions/ExampleFunc_TIRF_zOnly.txt
@@ -0,0 +1,38 @@
+# Axial diffusion (TIRF)
+# This model function describes fictional one-dimensional diffusion
+# in TIR-FCS setups. It demonstrates the mathematical functions  available
+# in PyCorrFit.
+# Visit http://fcstools.dyndns.org/pycorrfit/ for more information.
+# The first line of this file will be treated as the name of the model
+# inside PyCorrFit. PyCorrFit will enumerate user imported models with  IDs 
+# starting at 7001. You can save a session and the user defined models 
+# like this one will be saved as well. Lines starting with a hash "#"
+# are treated as comments. Empty lines and lines with only white space
+# characters are ignored.
+
+## Definition of parameters:
+# First, define the parameters and their starting values for you model
+# function. If the parameter has a unit of measurement, then it may be
+# added separated by a white space before the "=" sign. The starting
+# value should be a floating point number. You may use abbreviations
+# like "1e-3" instead of "0.001".
+# Note that PyCorrFit has it's own unit system:
+#  unit of time        : 1 ms
+#  unit of inverse time: 1000 /s
+#  unit of distance    : 100 nm
+#  unit of diff.coeff  : 10 µm²/s
+#  unit of inverse area: 100 /µm²
+#  unit of inv. volume : 1000 /µm³
+
+D [10 µm²/s] = 5e-5
+d [100 nm] = 1.0
+
+# The final line with the correlation function should start with a "G"
+# before the "=" sign. You may use all common mathematical functions,
+# such as "sqrt()" or "exp()". For convenience, "pi" and "e" may also
+# be used. If you need to use the faddeeva function you can do so by
+# typing "wofz()". A common used version with an imaginary argument is
+# also available: wofz(i*x) = wixi(x)
+
+G = (sqrt(D*tau/pi) - (2*D*tau/d**2 - 1)/(2/d) * wixi(sqrt(D*tau)/d))/d**2
+
diff --git a/external_model_functions/Model_AC_3D+T_confocal.txt b/external_model_functions/Model_AC_3D+T_confocal.txt
new file mode 100755
index 0000000..4decea4
--- /dev/null
+++ b/external_model_functions/Model_AC_3D+T_confocal.txt
@@ -0,0 +1,20 @@
+# 3D+T (Gauss)
+# Autocorrelation function for 3D diffusion + Triplet
+
+## Parameters
+# Particle number
+n = 10.0
+
+# Triplet fraction
+T = 0.2
+
+# Triplet time
+tautrip [ms] = 0.02
+
+# Diffusion time
+taudiff [ms] = 0.4
+
+# Structural parameter
+SP = 5
+
+G = 1/( n*(1+tau/taudiff) * sqrt(1 + tau/(SP**2*taudiff)) ) * ( 1+T/(1.-T)*exp(-tau/tautrip) )
diff --git a/external_model_functions/Model_Flow_AC_3D_confocal.txt b/external_model_functions/Model_Flow_AC_3D_confocal.txt
new file mode 100755
index 0000000..d889862
--- /dev/null
+++ b/external_model_functions/Model_Flow_AC_3D_confocal.txt
@@ -0,0 +1,41 @@
+# AC flow 3D (gauss)
+# Autocorrelation function including flow for confocal setups with
+# a free 3D diffusing species. 
+# This file was gladly provided by Thomas Kuckert, Schwille Lab, Biotec,
+# Tatzberg 47-51, 1307 Dresden, Germany.
+
+# For more information about this model function, see:
+#    Staroske, Wolfgang:
+#    In Vitro and In Vivo Applications of Fluorescence
+#    Cross-Correlation Spectroscopy, TU Dresden, Diss., June 2010#
+#
+#    Brinkmeier, M. ; Dörre, K. ; Stephan, J. ; Eigen, M.: Two-beam cross-
+#    correlation: A method to characterize transport phenomena in micrometer-
+#    sized structures. In: Anal Chem 71 (1999), Feb, Nr. 3, 609?616. http://dx.
+#    doi.org/10.1021/ac980820i. ? DOI 10.1021/ac980820i
+
+## Parameters
+# Diffusion coefficient
+D [10 µm²/s] = 10.0
+
+# Structural parameter
+w = 6.5
+
+# Waist of the lateral detection area
+a [100 nm] = 3.25
+
+# Particle number
+n = 10.0
+
+#Flow velocity
+v [100 µm/s] = 0.5
+
+
+## Calculation fo correlation function
+gFlow = exp(-((v**2) * (tau**2))/(a**2+4*D*tau))
+gTwoD = 1./(1.+4*D*tau/a**2)
+gOneD = 1./sqrt(1.+4*D*tau/(w*a)**2)
+gThrD = gTwoD * gOneD
+
+G = 1./n * gThrD * gFlow
+
diff --git a/external_model_functions/Model_Flow_CC_Backward_3D_confocal.txt b/external_model_functions/Model_Flow_CC_Backward_3D_confocal.txt
new file mode 100755
index 0000000..137ef67
--- /dev/null
+++ b/external_model_functions/Model_Flow_CC_Backward_3D_confocal.txt
@@ -0,0 +1,49 @@
+# CC bw flow 3D (gauss)
+# Backward cross-correlation function including flow for confocal setups with
+# a free 3D diffusing species. 
+# This file was gladly provided by Thomas Kuckert, Schwille Lab, Biotec,
+# Tatzberg 47-51, 1307 Dresden, Germany.
+
+# For more information about this model function, see:
+#    Staroske, Wolfgang:
+#    In Vitro and In Vivo Applications of Fluorescence
+#    Cross-Correlation Spectroscopy, TU Dresden, Diss., June 2010#
+#
+#    Brinkmeier, M. ; Dörre, K. ; Stephan, J. ; Eigen, M.: Two-beam cross-
+#    correlation: A method to characterize transport phenomena in micrometer-
+#    sized structures. In: Anal Chem 71 (1999), Feb, Nr. 3, 609?616. http://dx.
+#    doi.org/10.1021/ac980820i. ? DOI 10.1021/ac980820i
+
+## Parameters
+# Diffusion coefficient
+D [10 µm²/s] = 10.0
+
+# Structural parameter
+w = 6.5
+
+# Waist of the lateral detection area
+a [100 nm] = 3.25
+
+# Particle number
+n = 10.0
+
+# Focal distance
+R [100 nm] = 5.0
+
+# Flow velocity
+v [100 µm/s] = 0.5
+
+#angular difference to Flow for Foci Vector
+alpha = 0.0000001
+
+## Calculation fo correlation function
+gFlowT = (v**2)*(tau**2)+R**2
+gAng = 2*R*v*tau*cos(alpha)
+gC2Flow = exp(-(gFlowT+gAng)/(a**2+4*D*tau))
+gTwoD = 1./(1.+D*tau/a**2)
+gOneD = 1./sqrt(1.+D*tau/(w*a)**2)
+gThrD = gTwoD * gOneD
+
+
+G = 1./n * gThrD * gC2Flow
+
diff --git a/external_model_functions/Model_Flow_CC_Forward_3D_confocal.txt b/external_model_functions/Model_Flow_CC_Forward_3D_confocal.txt
new file mode 100755
index 0000000..4fd55f6
--- /dev/null
+++ b/external_model_functions/Model_Flow_CC_Forward_3D_confocal.txt
@@ -0,0 +1,48 @@
+# CC fw flow 3D (gauss)
+# Forward cross-correlation function including flow for confocal setups with
+# a free 3D diffusing species. 
+# This file was gladly provided by Thomas Kuckert, Schwille Lab, Biotec,
+# Tatzberg 47-51, 1307 Dresden, Germany.
+
+# For more information about this model function, see:
+#    Staroske, Wolfgang:
+#    In Vitro and In Vivo Applications of Fluorescence
+#    Cross-Correlation Spectroscopy, TU Dresden, Diss., June 2010#
+#
+#    Brinkmeier, M. ; Dörre, K. ; Stephan, J. ; Eigen, M.: Two-beam cross-
+#    correlation: A method to characterize transport phenomena in micrometer-
+#    sized structures. In: Anal Chem 71 (1999), Feb, Nr. 3, 609?616. http://dx.
+#    doi.org/10.1021/ac980820i. ? DOI 10.1021/ac980820i
+
+## Parameters
+# Diffusion coefficient
+D [10 µm²/s] = 10.0
+
+# Structural parameter
+w = 6.5
+
+# Waist of the lateral detection area
+a [100 nm] = 3.25
+
+# Particle number
+n = 10.0
+
+# Focal distance
+R [100 nm] = 5.0
+
+# Flow velocity
+v [100 µm/s] = 0.5
+
+# angular difference to Flow for Foci Vector
+alpha = 0.000001
+
+## Calculation fo correlation function
+gFlowT = (v**2)*(tau**2)+R**2
+gAng = 2*R*v*tau*cos(alpha)
+gC1Flow = exp(-(gFlowT-gAng)/(a**2+4*D*tau))
+gTwoD = 1./(1.+4*D*tau/a**2)
+gOneD = 1./sqrt(1.+4*D*tau/(w*a)**2)
+gThrD = gTwoD * gOneD
+
+G = 1./n * gThrD * gC1Flow
+
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..861a9f5
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build = 
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..e6fd5b6
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+from setuptools import setup, find_packages
+from os.path import join, dirname, realpath
+from warnings import warn
+
+# The next three lines are necessary for setup.py install to include
+# ChangeLog and Documentation of PyCorrFit
+from distutils.command.install import INSTALL_SCHEMES
+for scheme in INSTALL_SCHEMES.values():
+    scheme['data'] = scheme['purelib']
+
+
+# Get the version of PyCorrFit from the Changelog.txt
+StaticChangeLog = join(dirname(realpath(__file__)), "ChangeLog.txt")
+try:
+    clfile = open(StaticChangeLog, 'r')
+    version = clfile.readline().strip()
+    clfile.close()     
+except:
+    warn("Could not find 'ChangeLog.txt'. PyCorrFit version is unknown.")
+    version = "0.0.0-unknown"
+
+setup(
+    name='pycorrfit',
+    author='Paul Mueller',
+    author_email='paul.mueller at biotec.tu-dresden.de',
+    url='https://github.com/paulmueller/PyCorrFit',
+    version=version,
+    packages=['pycorrfit',
+              'pycorrfit.models',
+              'pycorrfit.readfiles',
+              'pycorrfit.tools'],
+    package_dir={'pycorrfit': 'src',
+                 'pycorrfit.models': 'src/models',
+                 'pycorrfit.readfiles': 'src/readfiles',
+                 'pycorrfit.tools': 'src/tools'},
+    data_files=[('pycorrfit_doc', ['ChangeLog.txt', 'PyCorrFit_doc.pdf'])],
+    license="GPL v2",
+    long_description=open(join(dirname(__file__), 'README.md')).read(),
+    scripts=['bin/pycorrfit'],
+    include_package_data=True,
+    install_requires=[
+        "NumPy >= 1.5.1",
+        "SciPy >= 0.8.0",
+        "sympy >= 0.7.2",
+        "PyYAML >= 3.09",
+        "wxPython >= 2.8.10.1",
+        "matplotlib >= 1.1.0"]
+    )
+
+
diff --git a/src/PyCorrFit.py b/src/PyCorrFit.py
new file mode 100755
index 0000000..d93a2dc
--- /dev/null
+++ b/src/PyCorrFit.py
@@ -0,0 +1,147 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    A flexible tool for fitting and analyzing correlation curves.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 1000 /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 um^2/s
+    unit of inverse area: 100 /um^2
+    unit of inv. volume : 1000 /um^3
+
+
+    PyCorrFit is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published 
+    by the Free Software Foundation, either version 2 of the License, 
+    or (at your option) any later version.
+
+    PyCorrFit is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of 
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  
+    See the GNU General Public License for more details. 
+
+    You should have received a copy of the GNU General Public License 
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+
+import csv
+from distutils.version import LooseVersion
+import sys
+# Import matplotlib a little earlier. This way some problems with saving
+# dialogs that are not made by "WXAgg" are solved.
+
+
+## On Windows XP I had problems with the unicode Characters.
+# I found this at 
+# http://stackoverflow.com/questions/5419/python-unicode-and-the-windows-console
+# and it helped (needs to be done before import of matplotlib):
+import platform
+if platform.system() == 'Windows':
+    reload(sys)
+    sys.setdefaultencoding('utf-8')
+    
+import matplotlib
+# We do catch warnings about performing this before matplotlib.backends stuff
+#matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets
+import warnings
+with warnings.catch_warnings():
+    warnings.simplefilter("ignore")
+    matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets for dialogs
+import numpy as np                  # NumPy
+import os
+import scipy
+
+# A missing import hook prevented us from bundling PyCorrFit on Mac using
+# pyinstaller. The following imports solved that issue:
+try:
+    from scipy.sparse.csgraph import shortest_path
+    from scipy.sparse.csgraph import _validation
+except:
+    pass
+
+# Sympy is optional:
+try:
+    import sympy
+except ImportError:
+    print "Importing sympy failed! Checking of external model functions"
+    print "will not work!"
+    # We create a fake module sympy with a __version__ property.
+    # This way users can run PyCorrFit without having installed sympy.
+    class Fake(object):
+        def __init__(self):
+            self.__version__ = "0.0 unknown"
+            self.version = "0.0 unknown"
+    sympy = Fake()
+# We must not import wx here. frontend/gui does that. If we do import wx here,
+# somehow unicode characters will not be displayed correctly on windows.
+# import wx
+import yaml
+
+## Continue with the import:
+import doc
+import frontend as gui              # The actual program
+
+
+
+
+def CheckVersion(given, required, name):
+    """ For a given set of versions  str *required* and str *given*,
+    where version are usually separated by dots, print whether for
+    the module str *name* the required verion is met or not.
+    """
+    try:
+        req = LooseVersion(required)
+        giv = LooseVersion(given)
+    except:
+        print " WARNING: Could not verify version of "+name+"."
+        return
+    if req > giv:
+        print " WARNING: You are using "+name+" v. "+given+\
+              " | Required: "+name+" "+ required
+    else:
+        print " OK: "+name+" v. "+given+" | "+required+" required"
+
+
+## VERSION
+version = doc.__version__
+__version__ = version
+
+print gui.doc.info(version)
+
+## Check important module versions
+print "\n\nChecking module versions..."
+CheckVersion(csv.__version__, "1.0", "csv")
+CheckVersion(np.__version__, "1.5.1", "NumPy")
+CheckVersion(scipy.__version__, "0.8.0", "SciPy")
+CheckVersion(sympy.__version__, "0.7.2", "sympy")
+CheckVersion(gui.wx.__version__, "2.8.10.1", "wxPython")
+CheckVersion(yaml.__version__, "3.09", "PyYAML")
+
+## Command line ?
+
+
+## Start gui
+app = gui.wx.App(False)
+frame = gui.MyFrame(None, -1, version)
+# Before starting the main loop, check for possible session files
+# in the arguments.
+sysarg = sys.argv
+for arg in sysarg:
+    if len(arg) >= 18:
+        if arg[-18:] == "fcsfit-session.zip":
+            print "\nLoading Session "+arg
+            frame.OnOpenSession(sessionfile=arg)
+    elif arg[:6] == "python":
+        pass
+    elif arg[-12:] == "PyCorrFit.py":
+        pass
+    else:
+        print "I do not know what to do with this argument: "+arg
+# Now start the app
+app.MainLoop()
+
diff --git a/src/__init__.py b/src/__init__.py
new file mode 100644
index 0000000..5832ae1
--- /dev/null
+++ b/src/__init__.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+"""
+PyCorrFit
+Paul Müller, Biotec - TU Dresden
+
+In current biomedical research, fluorescence correlation spectroscopy (FCS)
+is applied to characterize molecular dynamic processes in vitro and in living
+cells. Commercial FCS setups only permit data analysis that is limited to a
+specific instrument by the use of in-house file formats or a finite number of
+implemented correlation model functions. PyCorrFit is a general-purpose FCS
+evaluation software that, amongst other formats, supports the established Zeiss
+ConfoCor3 ~.fcs file format. PyCorrFit comes with several built-in model
+functions, covering a wide range of applications in standard confocal FCS.
+In addition, it contains equations dealing with different excitation geometries
+like total internal reflection (TIR).
+"""
+
+import doc
+import models
+import readfiles
+
+__version__ = doc.__version__
+__author__ = "Paul Mueller"
+__email__ = "paul.mueller at biotec.tu-dresden.de"
+
diff --git a/src/doc.py b/src/doc.py
new file mode 100755
index 0000000..cd101b1
--- /dev/null
+++ b/src/doc.py
@@ -0,0 +1,344 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module doc
+    *doc* is the documentation. Functions for various text output point here.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import sys
+import csv
+import matplotlib
+# We do catch warnings about performing this before matplotlib.backends stuff
+#matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets
+import warnings
+with warnings.catch_warnings():
+    warnings.simplefilter("ignore")
+    matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets for dialogs
+import numpy
+import os
+import platform
+import scipy
+
+# This is a fake class for modules not available.
+class Fake(object):
+    def __init__(self):
+        self.__version__ = "N/A"
+        self.version = "N/A"
+
+try:
+    import sympy
+except ImportError:
+    print " Warning: module sympy not found!"
+    sympy = Fake()
+import tempfile
+try:
+    import urllib2
+except ImportError:
+    print " Warning: module urllib not found!"
+    urllib = Fake()
+try:
+    import webbrowser
+except ImportError:
+    print " Warning: module webbrowser not found!"
+    webbrowser = Fake()
+import wx
+import yaml
+
+import readfiles
+
+
+
+def description():
+    return """PyCorrFit is a data displaying, fitting and evaluat
+tool, targeted at fluorescence correlation
+spectroscopy. PyCorrFit is written in Python."""
+
+
+def description():
+    return """PyCorrFit displays and processes data
+from fluorescence correlation spectroscopy
+measurements. PyCorrFit is written in Python."""
+
+
+def GetLocationOfChangeLog(filename = "ChangeLog.txt"):
+    locations = list()
+    fname1 = os.path.realpath(__file__)
+    # Try one directory up
+    dir1 = os.path.dirname(fname1)+"/../"
+    locations.append(os.path.realpath(dir1))
+    # In case of distribution with .egg files (pip, easy_install)
+    dir2 = os.path.dirname(fname1)+"/../pycorrfit_doc/"
+    locations.append(os.path.realpath(dir2))
+    ## freezed binaries:
+    if hasattr(sys, 'frozen'):
+        try:
+            dir2 = sys._MEIPASS + "/doc/"
+        except:
+            dir2 = "./"
+        locations.append(os.path.realpath(dir2))
+    for loc in locations:
+        thechl = os.path.join(loc,filename)
+        if os.path.exists(thechl):
+            return thechl
+            break
+    # if this does not work:
+    return None
+
+
+def GetLocationOfDocumentation(filename = "PyCorrFit_doc.pdf"):
+    """ Returns the location of the documentation if there is any."""
+    ## running from source
+    locations = list()
+    fname1 = os.path.realpath(__file__)
+    # Documentation is usually one directory up
+    dir1 = os.path.dirname(fname1)+"/../"
+    locations.append(os.path.realpath(dir1))
+    # In case of distribution with .egg files (pip, easy_install)
+    dir2 = os.path.dirname(fname1)+"/../pycorrfit_doc/"
+    locations.append(os.path.realpath(dir2))
+    ## freezed binaries:
+    if hasattr(sys, 'frozen'):
+        try:
+            dir2 = sys._MEIPASS + "/doc/"
+        except:
+            dir2 = "./"
+        locations.append(os.path.realpath(dir2))
+    for loc in locations:
+        thedoc = os.path.join(loc,filename)
+        if os.path.exists(thedoc):
+            return thedoc
+            break
+    # if this does not work:
+    return None
+
+
+def info(version):
+    """ Returns a little info about our program and what it can do.
+    """
+    textwin = u"""
+    Copyright 2011-2012 Paul Müller, Biotec - TU Dresden
+
+    A versatile tool for fitting and analyzing correlation curves.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 1000 /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm^3 """
+    textlin = """
+    © 2011-2012 Paul Müller, Biotec - TU Dresden
+
+    A versatile tool for fitting and analyzing correlation curves.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 1000 /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³ """
+    if platform.system() != 'Linux':
+        texta = textwin
+    else:
+        texta = textlin
+    one = "    PyCorrFit version "+version+"\n\n"
+    two = "\n\n    Supported file types:"
+    for item in readfiles.Filetypes.keys():
+        if item.split("|")[0] != readfiles.Allsupfilesstring:
+            two = two + "\n     - "+item.split("|")[0]
+    lizenz = ""
+    for line in licence().splitlines():
+        lizenz += "    "+line+"\n"
+    return one + lizenz + texta + two
+
+    
+def licence():
+    return """PyCorrFit is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published 
+by the Free Software Foundation, either version 2 of the License, 
+or (at your option) any later version.
+
+PyCorrFit is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of 
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  
+See the GNU General Public License for more details. 
+
+You should have received a copy of the GNU General Public License 
+along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+
+def saveCSVinfo(parent):
+    a = "# This file was created using PyCorrFit version "+\
+        parent.version+".\n#\n"
+    b = """# Lines starting with a '#' are treated as comments.
+# The data is stored as CSV below this comment section.
+# Data usually consists of lag times (channels) and
+# the corresponding correlation function - experimental
+# and fitted values plus resulting residuals.
+# If this file is opened by PyCorrFit, only the first two
+# columns will be imported as experimental data.
+#
+"""
+    return a+b
+
+
+def SessionReadme(parent):
+    a = "This file was created using PyCorrFit version "+parent.version+"\n"
+    b = """The .zip archive you are looking at is a stored session of PyCorrFit.
+If you are interested in how the data is stored, you will find
+out here. Most important are the dimensionalities:
+Dimensionless representation:
+ unit of time        : 1 ms
+ unit of inverse time: 10³ /s
+ unit of distance    : 100 nm
+ unit of Diff.coeff  : 10 µm²/s
+ unit of inverse area: 100 /µm²
+ unit of inv. volume : 1000 /µm³
+From there, the dimension of any parameter may be
+calculated.
+
+There are a number of files within this archive, 
+depending on what was done during the session.
+
+backgrounds.csv
+ - Contains the list of backgrounds used and
+ - Averaged intensities in [kHz]
+
+bg_trace*.csv (where * is an integer)
+ - The trace of the background corresponding
+   to the line number in backgrounds.csv
+ - Time in [ms], Trace in [kHz]
+
+comments.txt
+ - Contains page titles and session comment
+ - First n lines are titles, rest is session
+   comment (where n is total number of pages)
+
+data*.csv (where * is {Number of page})
+ - Contains lag times [ms]
+ - Contains experimental data, if available
+
+externalweights.txt
+ - Contains names (types) of external weights other than from
+   Model function or spline fit
+ - Linewise: 1st element is page number, 2nd is name
+ - According to this data, the following files are present in the archive
+
+externalweights_data_*PageID*_*Type*.csv
+ - Contains weighting information of Page *PageID* of type *Type*
+
+model_*ModelID*.txt
+ - An external (user-defined) model file with internal ID *ModelID*
+
+Parameters.yaml
+ - Contains all Parameters for each page
+   Block format:
+    - - '#{Number of page}: '       
+      - {Internal model ID}
+      - {List of parameters}
+      - {List of checked parameters (for fitting)}
+      - [{Min channel selected}, {Max channel selected}]
+      - [{Weighted fit method (0=None, 1=Spline, 2=Model function)}, 
+          {No. of bins from left and right}, {No. of knots (of e.g. spline)}]
+      - [{Background to use (line in backgrounds.csv)}]
+      - Data type is Cross-correlation?
+      - Parameter id (int) used for normalization in plotting.
+        This number first enumerates the model parameters and then
+        the supplemental parameters (e.g. "n1").
+      - - [min, max] fitting parameter range of 1st parameter
+        - [min, max] fitting parameter range of 2nd parameter
+        - etc.
+ - Order in Parameters.yaml defines order of pages in a session
+ - Order in Parameters.yaml defines order in comments.txt
+
+Readme.txt (this file)
+
+Supplements.yaml
+ - Contains errors of fitting
+   Format:
+   -- Page number
+    -- [parameter id, error value]
+     - [parameter id, error value]
+    - Chi squared
+    - [pages that share parameters] (from global fitting)
+     
+trace*.csv (where * is {Number of page} | appendix "A" or "B" point to
+            the respective channels (only in cross-correlation mode))
+ - Contains times [ms]
+ - Contains countrates [kHz]
+"""
+    return a+b
+
+
+def SoftwareUsed():
+    """ Return some Information about the software used for this program """
+    text = "Python "+sys.version+\
+           "\n\nModules:"+\
+           "\n - csv "+csv.__version__+\
+           "\n - matplotlib "+matplotlib.__version__+\
+           "\n - NumPy "+numpy.__version__+\
+           "\n - os "+\
+           "\n - platform "+platform.__version__+\
+           "\n - SciPy "+scipy.__version__+\
+           "\n - sympy "+sympy.__version__ +\
+           "\n - sys "+\
+           "\n - tempfile" +\
+           "\n - urllib2 "+ urllib2.__version__ +\
+           "\n - webbrowser"+\
+           "\n - wxPython "+wx.__version__+\
+           "\n - yaml "+yaml.__version__
+    if hasattr(sys, 'frozen'):
+        pyinst = "\n\nThis executable has been created using PyInstaller."
+        text = text+pyinst
+    return text
+
+
+    
+backgroundinit = """Correct the amplitude for non-correlated background.
+The background intensity <B> can be either imported
+from a blank measurement or set manually."""
+
+#backgroundinit = """Set background correction with the background signal <B>
+#to correct the amplitude of the correlation function by
+#a factor of [<S>/(<S>-<B>)]² where <S> is the average
+#trace signal of the measurement"""
+
+# For the selection of types to import when doing import Data
+chooseimport = """Several types of data were found in
+the chosen file. Please select
+what type(s) you would like to
+import. """
+
+
+# Standard homepage
+HomePage = "http://pycorrfit.craban.de/"
+# Changelog filename
+ChangeLog = "ChangeLog.txt"
+StaticChangeLog = GetLocationOfChangeLog(ChangeLog)
+
+# Check if we can extract the version
+try:
+    clfile = open(StaticChangeLog, 'r')
+    __version__ = clfile.readline().strip()
+    clfile.close()     
+except:
+    __version__ = "0.0.0-unknown"
+    
+    
+# Github homepage
+GitChLog = "https://raw.github.com/paulmueller/PyCorrFit/master/ChangeLog.txt"
+GitHome = "https://github.com/paulmueller/PyCorrFit"
+GitWiki = "https://github.com/paulmueller/PyCorrFit/wiki"
diff --git a/src/edclasses.py b/src/edclasses.py
new file mode 100644
index 0000000..e21e255
--- /dev/null
+++ b/src/edclasses.py
@@ -0,0 +1,221 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    EditedClasses
+    Contains classes that we edited.
+    Should make our classes more useful.
+
+"""
+
+
+# Matplotlib plotting capabilities
+import matplotlib
+# We do catch warnings about performing this before matplotlib.backends stuff
+#matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets
+import warnings
+with warnings.catch_warnings():
+    warnings.simplefilter("ignore")
+    matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets for dialogs
+# We will hack this toolbar here
+from matplotlib.backends.backend_wx import NavigationToolbar2Wx 
+import numpy as np
+import os
+import sys
+import traceback
+from wx.lib.agw import floatspin        # Float numbers in spin fields
+import wx 
+
+
+class FloatSpin(floatspin.FloatSpin):
+    def __init__(self, parent, digits=10, increment=.01):
+        floatspin.FloatSpin.__init__(self, parent, digits=digits,
+                                     increment = increment)
+        self.Bind(wx.EVT_SPINCTRL, self.increment)
+        #self.Bind(wx.EVT_SPIN, self.increment)
+        #self.increment()
+
+
+    def increment(self, event=None):
+        # Find significant digit
+        # and use it as the new increment
+        x = self.GetValue()
+        if x == 0:
+            incre = 0.1
+        else:
+            digit = int(np.ceil(np.log10(abs(x)))) - 2
+            incre = 10**digit
+        self.SetIncrement(incre)
+
+
+class ChoicesDialog(wx.Dialog):
+    def __init__(self, parent, dropdownlist, title, text):
+        # parent is main frame
+        self.parent = parent
+        #super(ChoicesDialog, self).__init__(parent=parent, 
+        #    title=title)
+        wx.Dialog.__init__(self, parent, -1, title)
+        ## Controls
+        panel = wx.Panel(self)
+        # text1
+        textopen = wx.StaticText(panel, label=text)
+        btnok = wx.Button(panel, wx.ID_OK)
+        btnabort = wx.Button(panel, wx.ID_CANCEL)
+        # Dropdown
+        self.dropdown = wx.ComboBox(panel, -1, "", (15, 30),
+              wx.DefaultSize, dropdownlist, wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.dropdown.SetSelection(0)
+        # Bindings
+        self.Bind(wx.EVT_BUTTON, self.OnOK, btnok)
+        self.Bind(wx.EVT_BUTTON, self.OnAbort, btnabort)
+        # Sizers
+        topSizer = wx.BoxSizer(wx.VERTICAL)
+        topSizer.Add(textopen)
+        topSizer.Add(self.dropdown)
+        btnSizer = wx.BoxSizer(wx.HORIZONTAL)
+        btnSizer.Add(btnok)
+        btnSizer.Add(btnabort)
+        topSizer.Add(btnSizer)
+        panel.SetSizer(topSizer)
+        topSizer.Fit(self)
+        #self.Show(True)
+        self.SetFocus()
+
+    def OnOK(self, event=None):
+        self.SelcetedID = self.dropdown.GetSelection()
+        self.EndModal(wx.ID_OK)
+
+
+    def OnAbort(self, event=None):
+        self.EndModal(wx.ID_CANCEL)
+
+
+
+def save_figure(self, evt=None):
+    """
+        A substitude function for save in:
+        matplotlib.backends.backend_wx.NavigationToolbar2Wx
+        We want to be able to give parameters such as dirname and filename.
+    """
+    try:
+        parent=self.canvas.HACK_parent
+        fig=self.canvas.HACK_fig
+        Page = self.canvas.HACK_Page
+        add = self.canvas.HACK_append
+        dirname = parent.dirname
+        filename = Page.tabtitle.GetValue().strip()+Page.counter[:2]+add
+        formats = fig.canvas.get_supported_filetypes()
+    except:
+        dirname = "."
+        filename = ""
+        formats = self.canvas.get_supported_filetypes()
+        parent = self
+    fieltypestring = ""
+    keys = formats.keys()
+    keys.sort()
+    for key in keys:
+        fieltypestring += formats[key]+"(*."+key+")|*."+key+"|"
+    # remove last |
+    fieltypestring = fieltypestring[:-1]
+    dlg = wx.FileDialog(parent, "Save figure", dirname, filename, 
+           fieltypestring, wx.SAVE|wx.OVERWRITE_PROMPT)
+    # png is default
+    dlg.SetFilterIndex(keys.index("png"))
+    # user cannot do anything until he clicks "OK"
+    if dlg.ShowModal() == wx.ID_OK:
+        wildcard = keys[dlg.GetFilterIndex()]
+        filename = dlg.GetPath()
+        haswc = False
+        for key in keys:
+            if filename.lower().endswith("."+key) is True:
+                haswc = True
+        if haswc == False:
+            filename = filename+"."+wildcard
+        dirname = dlg.GetDirectory()
+        #savename = os.path.join(dirname, filename)
+        savename = filename
+        try:
+            self.canvas.figure.savefig(savename)
+        except: # RuntimeError:
+            # The file does not seem to be what it seems to be.
+            info = sys.exc_info()
+            errstr = "Could not latex output:\n"
+            errstr += str(filename)+"\n\n"
+            errstr += str(info[0])+"\n"
+            errstr += str(info[1])+"\n"
+            for tb_item in traceback.format_tb(info[2]):
+                errstr += tb_item
+            dlg3 = wx.MessageDialog(parent, errstr, "Error", 
+                style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+            dlg3.ShowModal() == wx.ID_OK
+    else:
+        dirname = dlg.GetDirectory()
+    try:
+        parent.dirname = dirname
+    except:
+        pass
+
+
+class MyScrolledDialog(wx.Dialog):
+    def __init__(self, parent, overtext, readtext, title):
+        wx.Dialog.__init__(self, parent, title=title)
+        overtext = wx.StaticText(self, label=overtext)
+        text = wx.TextCtrl(self, -1, readtext, size=(500,400),
+                           style=wx.TE_MULTILINE | wx.TE_READONLY)
+        sizer = wx.BoxSizer(wx.VERTICAL )
+        btnsizer = wx.BoxSizer()
+        btn = wx.Button(self, wx.ID_OK)#, "OK ")
+        btnsizer.Add(btn, 0, wx.ALL, 5)
+        btnsizer.Add((5,-1), 0, wx.ALL, 5)
+        btn = wx.Button(self, wx.ID_CANCEL)#, "Abort ")
+        btnsizer.Add(btn, 0, wx.ALL, 5)
+        sizer.Add(overtext, 0, wx.EXPAND|wx.ALL, 5)   
+        sizer.Add(text, 0, wx.EXPAND|wx.ALL, 5)   
+        sizer.Add(btnsizer, 0, wx.EXPAND|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)   
+        self.SetSizerAndFit(sizer)
+        
+        
+class MyOKAbortDialog(wx.Dialog):
+    def __init__(self, parent, text, title):
+        wx.Dialog.__init__(self, parent, title=title)
+        overtext = wx.StaticText(self, label=text)
+        sizer = wx.BoxSizer(wx.VERTICAL )
+        btnsizer = wx.BoxSizer()
+        btn = wx.Button(self, wx.ID_OK)#, "OK ")
+        btnsizer.Add(btn, 0, wx.ALL, 5)
+        btnsizer.Add((5,-1), 0, wx.ALL, 5)
+        btn = wx.Button(self, wx.ID_CANCEL)#, "Abort ")
+        btnsizer.Add(btn, 0, wx.ALL, 5)
+        sizer.Add(overtext, 0, wx.EXPAND|wx.ALL, 5)   
+        sizer.Add(btnsizer, 0, wx.EXPAND|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)   
+        self.SetSizerAndFit(sizer)
+        
+        
+class MyYesNoAbortDialog(wx.Dialog):
+    def __init__(self, parent, text, title):
+        wx.Dialog.__init__(self, parent, title=title)
+        overtext = wx.StaticText(self, label=text)
+        sizer = wx.BoxSizer(wx.VERTICAL)
+        btnsizer = wx.BoxSizer()
+        btn1 = wx.Button(self, wx.ID_YES)
+        #btn1.Bind(wx.EVT_BTN, self.YES)
+        btnsizer.Add(btn1, 0, wx.ALL, 5)
+        btnsizer.Add((1,-1), 0, wx.ALL, 5)
+        btn2 = wx.Button(self, wx.ID_NO)
+        btnsizer.Add(btn2, 0, wx.ALL, 5)
+        btnsizer.Add((1,-1), 0, wx.ALL, 5)
+        btn3 = wx.Button(self, wx.ID_CANCEL)
+        btnsizer.Add(btn3, 0, wx.ALL, 5)
+        sizer.Add(overtext, 0, wx.EXPAND|wx.ALL, 5)   
+        sizer.Add(btnsizer, 0, wx.EXPAND|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)   
+        self.SetSizerAndFit(sizer)
+        self.SetFocus()
+        self.Show()
+        
+    def YES(self, e):
+        self.EndModal(wx.ID_YES)
+
+
+
+# Add the save_figure function to the standard class for wx widgets.
+matplotlib.backends.backend_wx.NavigationToolbar2Wx.save = save_figure
diff --git a/src/frontend.py b/src/frontend.py
new file mode 100644
index 0000000..029373d
--- /dev/null
+++ b/src/frontend.py
@@ -0,0 +1,1519 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module frontend
+    The frontend displays the GUI (Graphic User Interface). All necessary 
+    functions and modules are called from here.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import os
+import webbrowser
+import wx                               # GUI interface wxPython
+import wx.lib.agw.flatnotebook as fnb   # Flatnotebook (Tabs)
+import wx.lib.delayedresult as delayedresult
+import wx.py.shell
+import numpy as np                      # NumPy
+import platform
+import sys                              # System stuff
+import traceback                        # for Error handling
+
+try:
+    # contains e.g. update and icon, but no vital things.
+    import misc
+except ImportError:
+    print " Some modules are not available."
+    print " Update function will not work."
+
+# PyCorrFit modules
+import doc                          # Documentation/some texts
+import edclasses
+
+import models as mdls
+import openfile as opf              # How to treat an opened file
+import page
+import plotting
+import readfiles
+import tools                        # Some tools
+import usermodel
+
+
+## On Windows XP I had problems with the unicode Characters.
+# I found this at 
+# http://stackoverflow.com/questions/5419/python-unicode-and-the-windows-console
+# and it helped:
+if platform.system() == 'Windows':
+    reload(sys)
+    sys.setdefaultencoding('utf-8')
+# ~paulmueller
+
+
+###########################################################
+class FlatNotebookDemo(fnb.FlatNotebook):
+    """
+    Flatnotebook class
+    """
+    def __init__(self, parent):
+        """Constructor"""
+        style = fnb.FNB_SMART_TABS|fnb.FNB_NO_NAV_BUTTONS|\
+              fnb.FNB_DROPDOWN_TABS_LIST|fnb.FNB_NODRAG|\
+              fnb.FNB_TABS_BORDER_SIMPLE|\
+              fnb.FNB_X_ON_TAB|fnb.FNB_NO_X_BUTTON
+        # Bugfix for Mac
+        if platform.system().lower() in ["windows", "linux"]:
+            style = style|fnb.FNB_HIDE_ON_SINGLE_TAB
+        self.fnb = fnb.FlatNotebook.__init__(self, parent, wx.ID_ANY,
+        agwStyle=style)
+
+
+###########################################################
+class MyFrame(wx.Frame):
+    def __init__(self, parent, id, version):
+        ## Set initial variables that make sense
+        tau = 10**np.linspace(-6,8,1001)
+
+        self.version = version
+        wx.Frame.__init__(self, parent, id, "PyCorrFit " + self.version)
+        self.CreateStatusBar() # A Statusbar in the bottom of the window
+        self.StatusBar.SetStatusText("Find help and updates online:"+
+                                     " 'Help > Update'")
+        ## Properties of the Frame
+        initial_size = (768,700)
+        self.SetSize(initial_size)
+        self.SetMinSize(initial_size)
+
+        # Set this, so we know in which directory we are working in.
+        # This will change, when we load a session or data file.
+        self.dirname = os.curdir
+        self.filename = None
+
+        # Session Comment - may be edited and saved later
+        self.SessionComment = "This is a session comment. It will be saved" +\
+                              " as the session is saved."
+
+        ## Set variables
+        # The model module that can be changed by importing user defined
+        # functions.
+        # These are only for compatibility.
+        
+        # value_set and valuedict only for compatibility!
+        # I should use mdls for anything, since it's globally imported
+        # and modified by this program (e.g. adding new function)
+        self.value_set = mdls.values
+        self.valuedict = mdls.valuedict
+
+        # Some standard time scale
+        # We need this for the functions inside the "FittingPanel"s
+        self.tau = tau 
+
+        # Tab Counter
+        self.tabcounter = 1
+
+        # Background Correction List
+        # Here, each item is a list containing three elements:
+        # [0] average signal [kHz]
+        # [1] signal name (edited by user)
+        # [2] signal trace (tuple) ([ms], [kHz])
+        self.Background = list()
+
+        # A dictionary for all the opened tool windows
+        self.ToolsOpen = dict()
+        # A dictionary for all the tools
+        self.Tools = dict()
+
+        # Range selector (None if inactive)
+        # Fitting parameter range selection
+        # New as of 0.7.9
+        self.RangeSelector = None
+        
+        ## Setting up the menus.
+        # models, modeldict, modeltypes only for compatibility!
+        # I should use mdls for anything, since it's globally imported
+        # and modified by this program (e.g. adding new function)
+        self.models = mdls.models
+        self.modeldict = mdls.modeldict
+        self.modeltypes = mdls.modeltypes
+
+        self.modelmenudict = dict()
+        self.MakeMenu()
+
+        ## Create the Flatnotebook (Tabs Tabs Tabs!)
+        panel = wx.Panel(self)
+        self.panel = panel
+
+        self.notebook = FlatNotebookDemo(panel)
+        self.notebook.SetRightClickMenu(self.curmenu)
+
+        #self.notebook.SetAGWWindowStyleFlag(FNB_X_ON_TAB)
+        sizer = wx.BoxSizer(wx.VERTICAL)
+        sizer.Add(self.notebook, 1, wx.ALL|wx.EXPAND, 5)
+        panel.SetSizer(sizer)
+        self.Layout()
+
+        self.Show()
+        
+        # Notebook Handler
+        self.notebook.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CLOSED, 
+                           self.OnFNBClosedPage)
+        self.notebook.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CHANGED, 
+                           self.OnFNBPageChanged)
+        # This is a hack since version 0.7.7:
+        # When the "X"-button on a page is pressed, ask the user
+        # if he really wants to close that page.
+        self.notebook._pages.Unbind(wx.EVT_LEFT_UP)
+        self.notebook._pages.Bind(wx.EVT_LEFT_UP, self.OnMyLeftUp)
+
+        # If user hits the "x", ask if he wants to save the session
+        self.Bind(wx.EVT_CLOSE, self.OnExit)
+
+        # Set window icon
+        try:
+          self.MainIcon = misc.getMainIcon()
+          wx.Frame.SetIcon(self, self.MainIcon)
+        except:
+          self.MainIcon = None
+
+
+    def add_fitting_tab(self, event=None, modelid=None, counter=None):
+        """ This function creates a new page inside the notebook.
+            If the function is called from a menu, the modelid is 
+            known by the event. If not, the modelid should be specified by 
+            *modelid*. 
+            *counter* specifies which page number we should use for our 
+            new page. If it is None, we will simply use *self.tabcounter*.
+            
+            *event*   - An event that has event.GetId() equal to a modelid
+            *modelid* - optional, directly set the modelid
+            *counter* - optional, set the "#" value of the page
+        """
+        if modelid is None:
+            # Get the model id from the menu
+            modelid = event.GetId()
+        if counter is not None:
+            # Set the tabcounter right, so the tabs are counted continuously.
+            counterint = int(counter.strip().strip(":").strip("#"))
+            self.tabcounter = max(counterint, self.tabcounter)
+        modelid = int(modelid)
+        counter = "#"+str(self.tabcounter)+": "
+        # Get the model for the page together
+        valuepack = mdls.valuedict[modelid]
+        active_labels = valuepack[0]
+        active_values = 1*valuepack[1]
+        active_fitting = 1*valuepack[2]
+        active_parms = [active_labels, active_values, active_fitting]
+        model = mdls.modeldict[modelid][1]
+        # Create New Tab
+        Newtab = page.FittingPanel(self, counter, modelid, active_parms,
+                                   self.tau)
+        #self.Freeze()
+        self.notebook.AddPage(Newtab, counter+model, select=True)
+        #self.Thaw()
+        self.tabcounter = self.tabcounter + 1
+        # Enable the "Current" Menu
+        self.EnableToolCurrent(True)
+        #
+        #######
+        #
+        # This is a work-around to prevent a weird bug in version 0.7.8:
+        # The statistics OnPageChanged function is called but the parameters
+        # are displayed double if a new page is created and the statistics
+        # window is open.
+        # Find Tool Statistics
+        # Get open tools
+        toolkeys = self.ToolsOpen.keys()
+        for key in toolkeys:
+            tool = self.ToolsOpen[key]
+            try:
+                if tool.MyName=="STATISTICS":
+                    # Call the function properly.
+                    tool.OnPageChanged(Newtab)
+            except:
+                pass
+        #
+        #######
+        #
+        return Newtab
+
+
+    def EnableToolCurrent(self, enabled):
+        """ Independent on order of menus, enable or disable tools and
+            current menu.
+        """
+        # Tools menu is now always enabled
+        # tid = self.menuBar.FindMenu("&Tools")
+        # self.menuBar.EnableTop(tid, enabled)
+        cid = self.menuBar.FindMenu("Current &Page")
+        self.menuBar.EnableTop(cid, enabled)
+
+
+    def MakeMenu(self):
+        self.filemenu = wx.Menu()
+        # toolmenu and curmenu are public, because they need to be enabled/
+        # disabled when there are tabs/notabs.
+        self.toolmenu = wx.Menu()
+        # curmenu needs to be public, because we want to call it from the right
+        # click menu of a Page in fnb
+        self.curmenu = wx.Menu()
+        modelmenu = wx.Menu()
+        prefmenu = wx.Menu()
+        helpmenu = wx.Menu()
+        # wx.ID_ABOUT and wx.ID_EXIT are standard IDs provided by wxWidgets.
+        # self.filemenu
+        menuAddModel = self.filemenu.Append(wx.ID_ANY, 
+                          "&Import model", "Add a user defined model.")
+        menuLoadBatch = self.filemenu.Append(wx.ID_ANY, 
+                         "&Load data", "Loads one or multiple data files")
+        menuOpen = self.filemenu.Append(wx.ID_OPEN, "&Open session", 
+                                           "Restore a previously saved session")
+        self.filemenu.AppendSeparator()
+        self.menuComm = self.filemenu.Append(wx.ID_ANY, "Co&mment session", 
+                           "Add a comment to this session", kind=wx.ITEM_CHECK)
+        self.filemenu.Check(self.menuComm.GetId(), False)
+        menuClear = self.filemenu.Append(wx.ID_ANY, "&Clear session", 
+                          "Remove all pages but keep imported model functions.")
+        menuSave = self.filemenu.Append(wx.ID_SAVE, "&Save session", 
+                                   "Save entire Session")
+        self.filemenu.AppendSeparator()
+        menuExit = self.filemenu.Append(wx.ID_EXIT,"E&xit",
+                                                        "Terminate the program")
+        # prefmenu
+        self.MenuUseLatex = prefmenu.Append(wx.ID_ANY, "Use Latex",
+                            "Enables/Disables usage of Latex for image saving.",
+                            kind=wx.ITEM_CHECK)
+        self.MenuVerbose = prefmenu.Append(wx.ID_ANY, "Verbose mode",
+                           "Enables/Disables output of additional information.",
+                            kind=wx.ITEM_CHECK)
+        self.MenuShowWeights = prefmenu.Append(wx.ID_ANY, "Show weights",
+                           "Enables/Disables displaying weights of fit.",
+                            kind=wx.ITEM_CHECK)
+        self.MenuShowWeights.Check()
+        # toolmenu
+        toolkeys = tools.ToolDict.keys()
+        toolkeys.sort()
+        for ttype in toolkeys:
+            for tool in np.arange(len(tools.ToolDict[ttype])):
+                menu = self.toolmenu.Append(wx.ID_ANY, 
+                       tools.ToolName[ttype][tool][0],
+                       tools.ToolName[ttype][tool][1], kind=wx.ITEM_CHECK)
+                self.toolmenu.Check(menu.GetId(), False)
+                # Append tool to list of tools with menu ID
+                self.Tools[menu.GetId()] = tools.ToolDict[ttype][tool]
+                # Bindings
+                # On tool only needs the Id of the wx.EVT_MENU
+                self.Bind(wx.EVT_MENU, self.OnTool, menu)
+            if ttype != toolkeys[-1]:
+                self.toolmenu.AppendSeparator()
+        # curmenu
+        menuImportData = self.curmenu.Append(wx.ID_ANY, "&Import Data",
+                                             "Import experimental FCS curve")
+
+        menuSaveData = self.curmenu.Append(wx.ID_ANY, "&Save data (*.csv)",
+                                           "Save data (comma separated values)")
+
+        menuSavePlotCorr = self.curmenu.Append(wx.ID_ANY, 
+                                     "&Save correlation as image",
+                                     "Export current plot as image.")
+
+        menuSavePlotTrace = self.curmenu.Append(wx.ID_ANY, 
+                                     "&Save trace as image",
+                                     "Export current trace as image.")
+        self.curmenu.AppendSeparator()
+        menuClPa = self.curmenu.Append(wx.ID_ANY, "&Close Page",
+                                       "Close Current Page")
+        # model menu
+        # Integrate models into menu
+        keys = mdls.modeltypes.keys()
+        keys.sort()
+        for modeltype in keys:
+            # Now we have selected a type of model
+            # Create a submenu
+            submenu = wx.Menu()
+            modelmenu.AppendMenu(wx.ID_ANY, modeltype, submenu)
+            # Append to menulist
+            self.modelmenudict[modeltype] = submenu
+            for modelid in mdls.modeltypes[modeltype]:
+                # Now we add every model that belongs to that type
+                model = mdls.modeldict[modelid]
+                if platform.system().lower() == "darwin" and hasattr(sys, 'frozen'):
+                    ###
+                    ### Work-around for freezed mac version
+                    ###
+                    ### (strange UTF-8 decoding error,
+                    ###  would work with misc.removewrongUTF8)
+                    b = model[1].split("(")[0].strip()
+                    c = misc.removewrongUTF8(model[2])
+                    menuentry = submenu.Append(model[0],b,c)
+                else:
+                    menuentry = submenu.Append(model[0], model[1], model[2])
+                self.Bind(wx.EVT_MENU, self.add_fitting_tab, menuentry)
+        # help menu
+        menuDocu = helpmenu.Append(wx.ID_ANY, "&Documentation",
+                                    "PyCorrFit documentation")
+        menuWiki = helpmenu.Append(wx.ID_ANY, "&Wiki",
+                          "PyCorrFit wiki pages by users for users (online)")
+        menuUpdate = helpmenu.Append(wx.ID_ANY, "&Update",
+                                    "Check for new version"+
+                                     " (Web access required)")
+        helpmenu.AppendSeparator()
+        menuShell = helpmenu.Append(wx.ID_ANY, "S&hell",
+                                    "A Python shell")
+        helpmenu.AppendSeparator()
+        menuSoftw = helpmenu.Append(wx.ID_ANY, "&Software",
+                                    "Information about the software used")
+        menuAbout = helpmenu.Append(wx.ID_ABOUT, "&About",
+                                    "Information about this program")
+        # Create the menubar.
+        self.menuBar = wx.MenuBar()
+        # Adding all the menus to the MenuBar
+        self.menuBar.Append(self.filemenu,"&File") 
+        self.menuBar.Append(self.toolmenu,"&Tools") 
+        self.menuBar.Append(self.curmenu,"Current &Page") 
+        self.menuBar.Append(modelmenu,"&Model") 
+        self.menuBar.Append(prefmenu,"&Preferences") 
+        self.menuBar.Append(helpmenu,"&Help")
+        self.SetMenuBar(self.menuBar) # Adding the MenuBar to the Frame content.
+        self.EnableToolCurrent(False)
+        ## Set events
+        #File
+        #self.Bind(wx.EVT_MENU, self.OnLoadSingle, menuLoadSingle)
+        self.Bind(wx.EVT_MENU, self.OnLoadBatch, menuLoadBatch)
+        self.Bind(wx.EVT_MENU, self.OnAddModel, menuAddModel)
+        self.Bind(wx.EVT_MENU, self.OnCommSession, self.menuComm)
+        self.Bind(wx.EVT_MENU, self.OnClearSession, menuClear)
+        self.Bind(wx.EVT_MENU, self.OnOpenSession, menuOpen)
+        self.Bind(wx.EVT_MENU, self.OnSaveSession, menuSave)
+        self.Bind(wx.EVT_MENU, self.OnExit, menuExit)
+        # Current
+        self.Bind(wx.EVT_MENU, self.OnImportData, menuImportData)
+        self.Bind(wx.EVT_MENU, self.OnSaveData, menuSaveData)
+        self.Bind(wx.EVT_MENU, self.OnSavePlotCorr, menuSavePlotCorr)
+        self.Bind(wx.EVT_MENU, self.OnSavePlotTrace, menuSavePlotTrace)
+        self.Bind(wx.EVT_MENU, self.OnDeletePage, menuClPa)
+        # Help
+        self.Bind(wx.EVT_MENU, self.OnSoftware, menuSoftw)
+        self.Bind(wx.EVT_MENU, self.OnAbout, menuAbout)
+        self.Bind(wx.EVT_MENU, self.OnUpdate, menuUpdate)
+        self.Bind(wx.EVT_MENU, self.OnDocumentation, menuDocu)
+        self.Bind(wx.EVT_MENU, self.OnWiki, menuWiki)
+        self.Bind(wx.EVT_MENU, self.OnShell, menuShell)
+
+
+    def OnAbout(self, event=None):
+        # Show About Information
+        description = doc.description()
+        licence = doc.licence()
+        info = wx.AboutDialogInfo()
+        #info.SetIcon(wx.Icon('hunter.png', wx.BITMAP_TYPE_PNG))
+        info.SetName('PyCorrFit')
+        info.SetVersion(self.version)
+        info.SetDescription(description)
+        info.SetCopyright('(C) 2011 - 2012 Paul Müller')
+        info.SetWebSite(doc.HomePage)
+        info.SetLicence(licence)
+        info.SetIcon(misc.getMainIcon(pxlength=64))
+        info.AddDeveloper('Paul Müller')
+        info.AddDocWriter('Paul Müller')
+        wx.AboutBox(info)
+        
+
+    def OnAddModel(self, event=None):
+        """ Import a model from an external .txt file. See example model
+            functions available on the web.
+        """
+        # Add a model using the dialog.
+        filters = "text file (*.txt)|*.txt"
+        dlg = wx.FileDialog(self, "Open model file", 
+                            self.dirname, "", filters, wx.OPEN)
+        if dlg.ShowModal() == wx.ID_OK:
+            NewModel = usermodel.UserModel(self)
+            # Workaround since 0.7.5
+            (dirname, filename) = os.path.split(dlg.GetPath())
+            #filename = dlg.GetFilename()
+            #dirname = dlg.GetDirectory()
+            self.dirname = dirname
+            # Try to import a selected .txt file
+            try:
+                NewModel.GetCode( os.path.join(dirname, filename) )
+            except:
+                # The file does not seem to be what it seems to be.
+                info = sys.exc_info()
+                errstr = "Unknown file format:\n"
+                errstr += str(filename)+"\n\n"
+                errstr += str(info[0])+"\n"
+                errstr += str(info[1])+"\n"
+                for tb_item in traceback.format_tb(info[2]):
+                    errstr += tb_item
+                dlg3 = wx.MessageDialog(self, errstr, "Error", 
+                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg3.ShowModal() == wx.ID_OK
+                del NewModel
+                return
+            # Test the code for sympy compatibility.
+            # If you write your own parser, this might be easier.
+            try:
+                NewModel.TestFunction()
+            except:
+                # This means that the imported model file could be
+                # contaminated. Ask the user how to proceed.
+                text = "The model parsing check raised an Error.\n"+\
+                       "This could be the result of a wrong Syntax\n"+\
+                       "or an error of the parser.\n"+\
+                       "This might be dangerous. Procceed\n"+\
+                       "only, if you trust the source of the file.\n"+\
+                       "Try and import offensive file: "+filename+"?"
+                dlg2 = wx.MessageDialog(self, text, "Unsafe Operation",
+                        style=wx.ICON_EXCLAMATION|wx.YES_NO|wx.STAY_ON_TOP)
+                if dlg2.ShowModal() == wx.ID_YES:
+                    NewModel.ImportModel()
+                else:
+                    del NewModel
+                    return
+            else:
+                # The model was loaded correctly
+                NewModel.ImportModel()
+               
+        else:
+            dirname = dlg.GetDirectory()
+            dlg.Destroy()
+        self.dirname = dirname
+            
+
+    def OnClearSession(self,e=None,clearmodels=False):
+        """Open a previously saved session. """
+        numtabs = self.notebook.GetPageCount()
+        # Ask, if user wants to save current session.
+        if numtabs > 0:
+            dial = wx.MessageDialog(self, 
+                'Do you wish to save this session first?', 
+                'Save current session?', 
+                 wx.ICON_QUESTION | wx.CANCEL | wx.YES_NO | wx.NO_DEFAULT )
+#            dial = edclasses.MyYesNoAbortDialog(self, 
+#                    'Do you wish to save this session first?',
+#                    'Save current session?')
+            result = dial.ShowModal()
+            dial.Destroy()
+            if result == wx.ID_CANCEL:
+                return "abort"      # stop this function - do nothing.
+            elif result == wx.ID_YES:
+                self.OnSaveSession()
+            elif result == wx.ID_NO:
+                pass
+        # Delete all the pages
+        self.notebook.DeleteAllPages()
+        # Disable all the dialogs and menus
+        self.EnableToolCurrent(False)
+        self.OnFNBPageChanged()
+        self.tabcounter = 1
+        self.filename = None
+        self.SetTitleFCS(None)
+        self.SessionComment = "You may enter some information here."
+        self.Background = list()
+        ## Do we want to keep user defined models after session clearing?
+        if clearmodels == True:
+            # Also reset user defined models
+            for modelid in mdls.modeltypes["User"]:    
+                mdls.values.remove(mdls.valuedict[modelid])
+                del mdls.valuedict[modelid]
+                mdls.models.remove(mdls.modeldict[modelid])
+                del mdls.modeldict[modelid]
+            mdls.modeltypes["User"] = list()
+            # Model Menu
+            menu=self.modelmenudict["User"]
+            for item in  menu.GetMenuItems():
+                menu.RemoveItem(item)
+
+
+    def OnCommSession(self,e=None):
+        """ Dialog for commenting on session. """
+        try:
+            self.EditCommentDlg.IsEnabled()
+        except AttributeError:
+            # Dialog is not opened
+            self.EditCommentDlg = tools.EditComment(self)
+            self.EditCommentDlg.Bind(wx.EVT_CLOSE, self.EditCommentDlg.OnClose)
+            self.filemenu.Check(self.menuComm.GetId(), True)
+        else:
+            # Close Dialog
+            self.EditCommentDlg.OnClose()
+
+
+    def OnDeletePage(self, event=None):
+        """
+        This method is based on the flatnotebook demo
+ 
+        It removes a page from the notebook
+        """
+        # Ask the user if he really wants to delete the page.
+        title = self.notebook.GetCurrentPage().tabtitle.GetValue()
+        numb = self.notebook.GetCurrentPage().counter.strip().strip(":")
+        text = "This will close page "+numb+"?\n"+title
+        dlg = edclasses.MyOKAbortDialog(self, text, "Warning")
+        if dlg.ShowModal() == wx.ID_OK:
+            self.notebook.DeletePage(self.notebook.GetSelection())
+            self.OnFNBClosedPage()
+            if self.notebook.GetPageCount() == 0:
+                self.OnFNBPageChanged()
+
+
+    def OnDocumentation(self, e=None):
+        """ Get the documentation and view it with browser"""
+        filename = doc.GetLocationOfDocumentation()
+        if filename is None:
+            # Now we have to tell the user that there is no documentation
+            self.StatusBar.SetStatusText("...documentation not found.")
+        else:
+            self.StatusBar.SetStatusText("...documentation: "+filename)
+            if platform.system().lower() == 'windows':
+                os.system("start /b "+filename)
+            elif platform.system().lower() == 'linux':
+                os.system("xdg-open "+filename+" &")
+            elif platform.system().lower() == 'darwin':
+                os.system("open "+filename+" &")
+            else:
+                # defaults to linux style:
+                os.system("xdg-open "+filename+" &")
+        
+
+    def OnExit(self,e=None):
+        numtabs = self.notebook.GetPageCount()
+        # Ask, if user wants to save current session.
+        if numtabs > 0:
+            dial = wx.MessageDialog(self, 
+                'Do you wish to save this session first?', 
+                'Save current session?', 
+                 wx.ICON_QUESTION | wx.CANCEL | wx.YES_NO | wx.NO_DEFAULT )
+            result = dial.ShowModal()
+            dial.Destroy()
+            if result == wx.ID_CANCEL:
+                return # stop this function - do nothing.
+            elif result == wx.ID_YES:
+                self.OnSaveSession()
+        # Exit the Program
+        self.Destroy()
+
+
+    def OnFNBClosedPage(self,e=None):
+        """ Called, when a page has been closed """
+        if self.notebook.GetPageCount() == 0:
+            # Grey out tools
+            self.EnableToolCurrent(False)
+
+
+
+    def OnFNBPageChanged(self,e=None, Page=None):
+        """ Called, when 
+            - Page focus switches to another Page
+            - Page with focus changes significantly:
+                - experimental data is loaded
+                - weighted fit was done
+        """
+        # Get the Page
+        if Page is None:
+            Page = self.notebook.GetCurrentPage()
+        keys = self.ToolsOpen.keys()
+        for key in keys:
+            # Update the information
+            self.ToolsOpen[key].OnPageChanged(Page)
+        # parameter range selection tool for page.
+        if self.RangeSelector is not None:
+            try:
+                self.RangeSelector.OnPageChanged(Page)
+            except:
+                pass
+        # Bugfix-workaround for mac:
+        # non-existing tabs are still displayed upon clearing session
+        if platform.system().lower() == "darwin":
+            if self.notebook.GetPageCount() == 0:
+                self.notebook.Hide()
+            else:
+                self.notebook.Show()
+            
+
+
+    def OnImportData(self,e=None):
+        """Import experimental data from a all filetypes specified in 
+           *opf.Filetypes*.
+           Is called by the curmenu and applies to currently opened model.
+        """
+        # Open a data file
+        # Get Data
+        SupFiletypes = opf.Filetypes.keys()
+        SupFiletypes.sort()
+        filters = ""
+        for i in np.arange(len(SupFiletypes)):
+            # Add to the filetype filter
+            filters = filters+SupFiletypes[i]
+            if i+1 != len(SupFiletypes):
+                # Add a separator, but not behind the last entry
+                # This is wx widgets stuff.
+                filters = filters+"|"
+        dlg = wx.FileDialog(self, "Open data file", 
+            self.dirname, "", filters, wx.OPEN)
+        if dlg.ShowModal() == wx.ID_OK:
+            # The filename the page will get
+            path = dlg.GetPath()            # Workaround since 0.7.5
+            (self.dirname, self.filename) = os.path.split(path)
+            #self.filename = dlg.GetFilename()
+            #self.dirname = dlg.GetDirectory()
+            try:
+                Stuff = readfiles.openAny(self.dirname, self.filename)
+            except:
+                # The file format is not supported.
+                info = sys.exc_info()
+                errstr = "Unknown file format:\n"
+                errstr += str(self.filename)+"\n\n"
+                errstr += str(info[0])+"\n"
+                errstr += str(info[1])+"\n"
+                for tb_item in traceback.format_tb(info[2]):
+                    errstr += tb_item
+                dlg = wx.MessageDialog(self, errstr, "Error", 
+                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg.ShowModal() == wx.ID_OK
+                return
+            else:
+                dataexp = Stuff["Correlation"]
+                trace = Stuff["Trace"]
+                curvelist = Stuff["Type"]
+                filename = Stuff["Filename"]
+                # If curvelist is a list with more than one item, we are
+                # importing more than one curve per file. Therefore, we
+                # need to create more pages for this file.
+                #
+                # We want to give the user the possibility to choose from
+                # several types of input functions. If curvelist contains
+                # more than one type of data, like "AC1", "AC2", "CC1", ...
+                # then the user may wish to only import "AC1" or "AC2"
+                # functions.
+                curvetypes = dict()
+                for i in np.arange(len(curvelist)):
+                    try:
+                        curvetypes[curvelist[i]].append(i)
+                    except KeyError:
+                        curvetypes[curvelist[i]] = [i]
+                # Now we have a dictionary curvetypes with keys that name
+                # items in curvelist and which point to indices in curvelist.
+                # We will display a dialog that let's the user choose what
+                # to import.
+                keys = curvetypes.keys()
+                if len(keys) > 1:
+                    Chosen = tools.ChooseImportTypes(self, curvetypes)
+                    newcurvelist = list()
+                    newfilename = list()
+                    newdataexp = list()
+                    newtrace = list()
+                    if Chosen.ShowModal() == wx.ID_OK:
+                        keys = Chosen.keys
+                        if len(keys) == 0:
+                            # do not do anything
+                            return
+                        for key in keys:
+                            # create a new curvelist with the selected curves
+                            for index in curvetypes[key]:
+                                newcurvelist.append(curvelist[index])
+                                newfilename.append(filename[index])
+                                newdataexp.append(dataexp[index])
+                                newtrace.append(trace[index])
+                        curvelist = newcurvelist
+                        filename = newfilename
+                        dataexp = newdataexp
+                        trace = newtrace
+                    else:
+                        return
+                    Chosen.Destroy()
+                # curvelist is a list of numbers or labels that correspond
+                # to each item in dataexp or trace. Each curvelist/filename
+                # item will be converted to a string and then added to the
+                # pages title.
+                num = len(curvelist) 
+                # Show a nice progress dialog:
+                style = wx.PD_REMAINING_TIME|wx.PD_SMOOTH|wx.PD_AUTO_HIDE|\
+                        wx.PD_CAN_ABORT
+                dlg = wx.ProgressDialog("Import", "Loading pages...",
+                                        maximum = num, parent=self, style=style)
+                # Get current page and populate
+                CurPage = self.notebook.GetCurrentPage()
+                for i in np.arange(num):
+                    # Fill Page with data
+                    self.ImportData(CurPage, dataexp[i], trace[i],
+                                   curvetype=curvelist[i], filename=filename[i],
+                                   curveid=i)
+
+
+
+                    # Let the user abort, if he wants to:
+                    # We want to do this here before an empty page is added
+                    # to the notebok.
+                    if dlg.Update(i+1, "Loading pages...")[0] == False:
+                        dlg.Destroy()
+                        return
+                    if i+1 != num:
+                        # Create new page.
+                        # (Add n-1 pages while importing.)
+                        CurPage = self.add_fitting_tab(event=None, 
+                                             modelid=CurPage.modelid,
+                                             counter=None)
+                # We are finished here:
+                return
+        else:
+            # User pressed "Abort" - do nothing.
+            self.dirname = dlg.GetDirectory()
+            dlg.Destroy()
+            return
+
+
+    def OnMyLeftUp(self, event):
+        """
+        Wrapper for LeftUp:
+        We want to have a wrapper for the page closing event.
+        The code was copied from "flatnotebook.py"        
+        Handles the ``wx.EVT_LEFT_UP`` event for L{PageContainer}.
+        :param `event`: a `wx.MouseEvent` event to be processed.
+        """
+        # Get the page container
+        pc = self.notebook._pages
+        # forget the zone that was initially clicked
+        self._nLeftClickZone = fnb.FNB_NOWHERE
+        where, tabIdx = pc.HitTest(event.GetPosition())
+        FNB_X = 2
+        FNB_TAB_X = 3
+        if not pc.HasAGWFlag(fnb.FNB_NO_TAB_FOCUS):
+            # Make sure selected tab has focus
+            self.SetFocus()
+        if where == FNB_X:
+            # Make sure that the button was pressed before
+            if pc._nXButtonStatus != fnb.FNB_BTN_PRESSED:
+                return
+            pc._nXButtonStatus = fnb.FNB_BTN_HOVER
+            self.OnDeletePage(self.notebook.GetCurrentPage())
+        elif where == FNB_TAB_X:
+            # Make sure that the button was pressed before
+            if pc._nTabXButtonStatus != fnb.FNB_BTN_PRESSED:
+                return 
+            pc._nTabXButtonStatus = fnb.FNB_BTN_HOVER
+            self.OnDeletePage(self.notebook.GetCurrentPage())
+        else:
+            # Call what should have been called.
+            pc.OnLeftUp(event)
+
+
+    def ImportData(self, Page, dataexp, trace, curvetype="",
+                   filename="", curveid="", run=""):
+        CurPage = Page
+        # Import traces. Traces are usually put into a list, even if there
+        # is only one trace. The reason is, that for cross correlation, we 
+        # have two traces and thus we have to import both.
+        # In case of cross correlation, save that list of (two) traces
+        # in the page.tracecc variable. Else, save the trace for auto-
+        # correlations directly into the page.trace variable. We are
+        # doing this in order to keep data types clean.
+        if curvetype[0:2] == "CC":
+            # For cross correlation, the trace has two components
+            CurPage.IsCrossCorrelation = True
+            CurPage.tracecc = trace
+            CurPage.trace = None
+        else:
+            CurPage.IsCrossCorrelation = False
+            CurPage.tracecc = None
+            if trace is not None:
+                CurPage.trace = trace
+                CurPage.traceavg = trace[:,1].mean()
+        # Import correlation function
+        CurPage.dataexpfull = dataexp
+        # We need this to be able to work with the data.
+        # It actually does nothing to the data right now.
+        CurPage.startcrop = None
+        CurPage.endcrop = None
+        # It might be possible, that we want the channels to be
+        # fixed to some interval. This is the case if the 
+        # checkbox on the "Channel selection" dialog is checked.
+        self.OnFNBPageChanged()
+        # Enable Fitting Button
+        CurPage.Fit_enable_fitting()
+        # Set new tabtitle value and strip leading or trailing
+        # white spaces.
+        if run != "":
+            title = "{}-r{:03d}   id{:03d} {}".format(curvetype,int(run),
+                                                   int(curveid), filename)
+        else:
+            title = "{} id{:03d}   {}".format(curvetype, int(curveid), filename)
+        CurPage.tabtitle.SetValue(title.strip())
+        # Plot everything
+        CurPage.PlotAll()
+        # Call this function to allow the "Channel Selection" window that
+        # might be open to update itself.
+        # We are aware of the fact, that we just did that
+        self.OnFNBPageChanged()
+
+
+    def OnLoadBatch(self, e):
+        """ Open multiple data files and apply a single model to them
+            We will create a new window where the user may decide which
+            model to use.
+        """
+        ## Browse the file system
+        SupFiletypes = opf.Filetypes.keys()
+        # Sort them so we have "All suported filetypes" up front
+        SupFiletypes.sort()
+        filters = ""
+        for i in np.arange(len(SupFiletypes)):
+            # Add to the filetype filter
+            filters = filters+SupFiletypes[i]
+            if i+1 != len(SupFiletypes):
+                # Add a separator if item is not last item
+                filters = filters+"|"
+        dlg = wx.FileDialog(self, "Open data files", 
+            self.dirname, "", filters, wx.OPEN|wx.FD_MULTIPLE)
+        if dlg.ShowModal() == wx.ID_OK:
+            Datafiles = dlg.GetFilenames()
+            # We rely on sorted filenames
+            Datafiles.sort()
+            # Workaround since 0.7.5
+            paths = dlg.GetPaths()
+            if len(paths) != 0:
+                self.dirname = os.path.split(paths[0])[0]
+            else:
+                self.dirname = dlg.GetDirectory()
+            dlg.Destroy()
+        else:
+            dlg.Destroy()
+            return
+        ## Get information from the data files and let the user choose
+        ## which type of curves to load and the corresponding model.
+        # List of filenames that could not be opened
+        BadFiles = list()
+        # Lists for correlation, trace, type and names
+        Correlation = list()
+        Trace = list()
+        Type = list()
+        Filename = list()   # there might be zipfiles with additional name info
+        #Run = list()        # Run number connecting AC1 AC2 CC12 CC21
+        Curveid = list()    # Curve ID of each curve in a file
+        for afile in Datafiles:
+            try:
+                Stuff = readfiles.openAny(self.dirname, afile)
+            except:
+                # The file does not seem to be what it seems to be.
+                BadFiles.append(afile)
+            else:
+                for i in np.arange(len(Stuff["Type"])):
+                    Correlation.append(Stuff["Correlation"][i])
+                    Trace.append(Stuff["Trace"][i])
+                    Type.append(Stuff["Type"][i])
+                    Filename.append(Stuff["Filename"][i])
+                    #Curveid.append(str(i+1))
+        # Add number of the curve within a file.
+        nameold = None
+        counter = 1
+        for name in Filename:
+            if name == nameold:
+                Curveid.append(counter)
+                counter += 1
+            else:
+                counter = 1
+                nameold = name
+                Curveid.append(counter)
+                counter += 1
+        # If there are any BadFiles, we will let the user know.
+        if len(BadFiles) > 0:
+            # The file does not seem to be what it seems to be.
+            errstr = "The following files could not be processed:\n"
+            for item in BadFiles:
+                errstr += " "+item
+            dlg = wx.MessageDialog(self, errstr, "Error", 
+                style=wx.ICON_WARNING|wx.OK|wx.CANCEL|wx.STAY_ON_TOP)
+            if dlg.ShowModal() == wx.ID_CANCEL:
+                return
+        # Abort, if there are no curves left
+        if len(Type) == 0:
+            return
+        # We want to give the user the possibility to choose from
+        # several types of input functions. If curvelist contains
+        # more than one type of data, like "AC1", "AC2", "CC1", ...
+        # then the user may wish to only import "AC1" or "AC2"
+        # functions.
+        curvetypes = dict()
+        for i in np.arange(len(Type)):
+            try:
+                curvetypes[Type[i]].append(i)
+            except KeyError:
+                curvetypes[Type[i]] = [i]
+        # Fill in the Run information
+        keys = curvetypes.keys()
+        # This part is a little tricky. We assume at some point, that different
+        # types of curves (AC1, AC2) belong to the same run. The only possible
+        # chek/assumtion that we can make is:
+        # If all curvetypes have the same amount of curves, then the curves
+        # from different curvetypes belong together.
+        # Unfortunately, we do not know how the curves are ordered. It could
+        # be like this:
+        # AC1-r1, AC2-r1, CC12-r1, CC21-r1, AC1-r2, AC1-r2, ...
+        # or otherwise interlaced like this:
+        # AC1-r1, AC2-r1, AC1-r2, AC1-r2, ... ,  CC12-r1, CC21-r1, ...
+        # What we do know is that the first occurence of AC1 matches up with
+        # the first occurences of AC2, CC12, etc.
+        # We create the list/array *Run* whose elements are the run-number
+        # at the position of the curves in *Types*.
+        # Check if the type of curves have equal length
+        lentypes = np.zeros(len(keys), dtype=int)
+        for i in range(len(keys)):
+            lentypes[i] = len(curvetypes[keys[i]])
+        if len(np.unique(np.array(lentypes))) == 1 and lentypes[0] != 0:
+            # Made sure that AC1 AC2 CC12 CC21 have same length
+            # Create Runs such that they are matched.
+            # We assume that the curves are somehow interlaced and that
+            # the Nth occurence of the keys in Types correspond to the
+            # matching curves.
+            # Also make sure that number starts at one for each selected file.
+            coords = np.zeros(len(keys), dtype=np.int)
+            Run = np.zeros(len(Curveid), dtype=np.int)
+            WorkType = 1*Type
+            d = 0
+            for fname in np.unique(Filename):
+                # unique returns sorted file names.
+                for i in range(Filename.count(fname)/len(keys)):
+                    for k in range(len(keys)):
+                        coords[k] = WorkType.index(keys[k])
+                        WorkType[coords[k]] = None
+                    Run[coords] = i + 1
+                #del WorkType
+        else:
+            Run = [""] * len(Curveid)
+        # Now we have a dictionary curvetypes with keys that name
+        # items in *Type* and which point to indices in *Type*.
+        # We will display a dialog that lets the user choose what
+        # to import.
+        keys = curvetypes.keys()
+        # Start the dialog for choosing types and model functions
+        labels=list()
+        for i in np.arange(len(Filename)):
+            if Run[i] != "":
+                labels.append("{}-r{:03d} {}".format(Type[i], Run[i],
+                                                     Filename[i]))
+            else:
+                labels.append("{}-id{:03d} {}".format(Type[i], Curveid[i],
+                                                      Filename[i]))
+        Chosen = tools.ChooseImportTypesModel(self, curvetypes, Correlation,
+                                              labels=labels)
+        newCorrelation = list()
+        newTrace = list()
+        newType = list()
+        newFilename = list()
+        modelList = list()
+        newCurveid = list()
+        newRun = list()
+        if Chosen.ShowModal() == wx.ID_OK:
+            keys = Chosen.typekeys
+            # Keepdict is a list of indices pointing to Type or Correlation
+            # of curves that are supposed to be kept.
+            keepcurvesindex = Chosen.keepcurvesindex
+            # modelids is a dictionary with chosen modelids.
+            # The keys of modelids are indices in the *Type* etc. lists.
+            modelids = Chosen.modelids
+            if len(keys) == 0:
+                # do not do anything
+                return
+            for key in keys:
+                # create a new curvelist with the selected curves
+                for index in curvetypes[key]:
+                    if keepcurvesindex.count(index) == 1:
+                        newCorrelation.append(Correlation[index])
+                        newTrace.append(Trace[index])
+                        newType.append(Type[index])
+                        newFilename.append(Filename[index])
+                        modelList.append(modelids[index])
+                        newCurveid.append(Curveid[index])
+                        newRun.append(Run[index])
+            Correlation = newCorrelation
+            Trace = newTrace
+            Type = newType
+            Filename = newFilename
+            Curveid = newCurveid
+            Run = newRun
+        else:
+            return
+        Chosen.Destroy()
+        ## Import the data into new pages
+        # curvelist is a list of numbers or labels that correspond
+        # to each item in dataexp or trace. Each curvelist/filename
+        # item will be converted to a string and then added to the
+        # pages title.
+        num = len(Type)
+        # Show a nice progress dialog:
+        style = wx.PD_REMAINING_TIME|wx.PD_SMOOTH|wx.PD_AUTO_HIDE|\
+                wx.PD_CAN_ABORT
+        dlg = wx.ProgressDialog("Import", "Loading pages..."
+        , maximum = num, parent=self, style=style)
+        for i in np.arange(num):
+            # create a new page
+            CurPage = self.add_fitting_tab(event=None, 
+                                     modelid=modelList[i],
+                                     counter=None)
+            # Fill Page with data
+            self.ImportData(CurPage, Correlation[i], Trace[i],
+                            curvetype=Type[i], filename=Filename[i],
+                            curveid=str(Curveid[i]), run=str(Run[i]))
+            # Let the user abort, if he wants to:
+            # We want to do this here before an empty page is added
+            # to the notebok.
+            if dlg.Update(i+1, "Loading pages...")[0] == False:
+                dlg.Destroy()
+                return
+        # If the user did not select curves but chose a model, destroy
+        # the dialog.
+        dlg.Destroy()
+
+
+    def OnOpenSession(self,e=None,sessionfile=None):
+        """Open a previously saved session. 
+           Optional parameter sessionfile defines the file that shall be
+           automatically loaded (without a dialog)
+        """
+        # We need to clear the session before opening one.
+        # This will also ask, if user wants to save the current session.
+        clear = self.OnClearSession(clearmodels=True)
+        if clear == "abort":
+            # User pressed abort when he was asked if he wants to save the
+            # session.
+            return "abort"
+        Infodict, self.dirname, filename = \
+         opf.OpenSession(self, self.dirname, sessionfile=sessionfile)
+        # Check, if a file has been opened
+        if filename is not None:
+            # Reset all Pages. We already gave the user the possibility to
+            # save his session.
+            # self.OnClearSession()
+            self.filename = filename
+            self.SetTitleFCS(self.filename)
+            ## Background traces
+            try:
+                self.Background = Infodict["Backgrounds"]
+            except:
+                pass
+            ## Preferences
+            ## if Preferences is Not None:
+            ## add them!
+            # External functions
+            for key in Infodict["External Functions"].keys():
+                NewModel = usermodel.UserModel(self)
+                # NewModel.AddModel(self, code)
+                # code is a list with strings
+                # each string is one line
+                NewModel.AddModel(
+                    Infodict["External Functions"][key].splitlines())
+                NewModel.ImportModel()
+            # Internal functions:
+            N = len(Infodict["Parameters"])
+            # Reset tabcounter
+            self.tabcounter = 1
+            # Show a nice progress dialog:
+            style = wx.PD_REMAINING_TIME|wx.PD_SMOOTH|wx.PD_AUTO_HIDE|\
+                    wx.PD_CAN_ABORT
+            dlg = wx.ProgressDialog("Import", "Loading pages..."
+            , maximum = N, parent=self, style=style)
+            for i in np.arange(N):
+                # Let the user abort, if he wants to:
+                if dlg.Update(i+1, "Loading pages...")[0] == False:
+                    dlg.Destroy()
+                    return
+                # Add a new page to the notebook. This page is created with
+                # variables from models.py. We will write our data to
+                # the page later.
+                counter = Infodict["Parameters"][i][0]
+                modelid = Infodict["Parameters"][i][1]
+                self.add_fitting_tab(modelid=modelid, counter=counter)
+                # Get New Page, so we can add our stuff.
+                Newtab = self.notebook.GetCurrentPage()
+                # Add experimental Data
+                # Import dataexp:
+                number = counter.strip().strip(":").strip("#")
+                pageid = int(number)
+                [tau, dataexp] = Infodict["Correlations"][pageid]
+                if dataexp is not None:
+                    # Write experimental data
+                    Newtab.dataexpfull = dataexp
+                    Newtab.dataexp = True # not None
+                # As of 0.7.3: Add external weights to page
+                try:
+                    Newtab.external_std_weights = \
+                                   Infodict["External Weights"][pageid]
+                except KeyError:
+                    # No data
+                    pass
+                else:
+                    # Add external weights to fitbox
+                    WeightKinds = Newtab.Fitbox[1].GetItems()
+                    wkeys = Newtab.external_std_weights.keys()
+                    wkeys.sort()
+                    for wkey in wkeys:
+                        WeightKinds += [wkey]
+                    Newtab.Fitbox[1].SetItems(WeightKinds)
+                self.UnpackParameters(Infodict["Parameters"][i], Newtab)
+                # Supplementary data
+                try:
+                    Sups = Infodict["Supplements"][pageid]
+                except KeyError:
+                    pass
+                else:
+                    errdict = dict()
+                    for errInfo in Sups["FitErr"]:
+                        for ierr in np.arange(len(errInfo)):
+                            errkey = mdls.valuedict[modelid][0][int(errInfo[0])]
+                            errval = float(errInfo[1])
+                            errdict[errkey] = errval
+                    Newtab.parmoptim_error = errdict
+                    try:
+                        Newtab.GlobalParameterShare = Sups["Global Share"]
+                    except:
+                        pass
+                    try:
+                        Newtab.chi2 = Sups["Chi sq"]
+                    except:
+                        pass
+                # Set Title of the Page
+                try:
+                    Newtab.tabtitle.SetValue(Infodict["Comments"][pageid])
+                except:
+                    pass # no page title
+                # Import the intensity trace
+                try:
+                    trace = Infodict["Traces"][pageid]
+                except:
+                    trace = None
+                if trace is not None:
+                    if Newtab.IsCrossCorrelation is False:
+                        Newtab.trace = trace[0]
+                        Newtab.traceavg = trace[0][:,1].mean()
+                    else:
+                        Newtab.tracecc = trace
+                # Plot everything
+                Newtab.PlotAll()
+            # Set Session Comment
+            try:
+                self.SessionComment = Infodict["Comments"]["Session"]
+            except:
+                pass
+            try:
+                Infodict["Preferences"] # not used yet
+            except:
+                pass
+            if self.notebook.GetPageCount() > 0:
+                # Enable the "Current" Menu
+                self.EnableToolCurrent(True)
+                self.OnFNBPageChanged()
+            else:
+                # There are no pages in the session.
+                # Disable some menus and close some dialogs
+                self.EnableToolCurrent(False)
+
+
+    def OnSaveData(self,e=None):
+        # Save the Data
+        """ Save calculated Data including optional fitted exp. data. """
+        # What Data do we wish to save?
+        Page = self.notebook.GetCurrentPage()
+        # Export CSV
+        # If no file has been selected, self.filename will be set to 'None'.
+        self.dirname, self.filename = opf.saveCSV(self, self.dirname, Page)
+
+
+    def OnSavePlotCorr(self, e=None):
+        """ make some output """
+        # Saving dialog box.
+        uselatex = self.MenuUseLatex.IsChecked()
+        verbose = self.MenuVerbose.IsChecked()
+        show_weights = self.MenuShowWeights.IsChecked()
+        Page = self.notebook.GetCurrentPage()
+        plotting.savePlotCorrelation(self, self.dirname, Page, uselatex,
+                                     verbose, show_weights)
+
+
+    def OnSavePlotTrace(self, e=None):
+        """ make some output """
+        # Saving dialog box.
+        uselatex = 1*self.MenuUseLatex.IsChecked()
+        verbose = 1*self.MenuVerbose.IsChecked()
+        Page = self.notebook.GetCurrentPage()
+        plotting.savePlotTrace(self, self.dirname, Page, uselatex, verbose)
+
+
+    def OnSaveSession(self,e=None):
+        """Save a session for later continuation."""
+        # Parameters are all in one dictionary:
+        Infodict = dict()
+        Infodict["Backgrounds"] = self.Background # Background list
+        Infodict["Comments"] = dict() # Session comment "Session" and Pages int
+        Infodict["Correlations"] = dict() # all correlation curves
+        Infodict["External Functions"] = dict() # external model functions
+        Infodict["External Weights"] = dict() # additional weights for the pages
+        Infodict["Parameters"] = dict() # all parameters of all pages
+        Infodict["Preferences"] = dict() # not used
+        Infodict["Supplements"] = dict() # error estimates for fitting
+        Infodict["Traces"] = dict() # all traces
+        # Save each Page
+        N = self.notebook.GetPageCount()
+        # External functions
+        for usermodelid in mdls.modeltypes["User"]:
+            # Those models belong to external user functions.
+            doc = mdls.modeldict[usermodelid][-1].func_doc
+            doc = doc.splitlines()
+            docnew=""
+            for line in doc:
+                docnew = docnew+line.strip()+"\r\n"
+            Infodict["External Functions"][usermodelid] = docnew
+        for i in np.arange(N):
+            # Set Page 
+            Page = self.notebook.GetPage(i)
+            counter = int(Page.counter.strip().strip(":").strip("#"))
+            # Apply currently set parameters
+            Page.apply_parameters()
+            # Set parameters
+            Infodict["Parameters"][counter] = self.PackParameters(Page)
+            # Set supplementary information, such as errors of fit
+            if Page.parmoptim_error is not None: # == if Page.chi2 is not None
+                Infodict["Supplements"][counter] = dict()
+                Infodict["Supplements"][counter]["Chi sq"] = float(Page.chi2)
+                PageList = list()
+                for pagei in Page.GlobalParameterShare:
+                    PageList.append(int(pagei))
+                Infodict["Supplements"][counter]["Global Share"] = PageList
+                                                
+                Alist = list()
+                for key in Page.parmoptim_error.keys():
+                    position = mdls.GetPositionOfParameter(Page.modelid, key)
+                    Alist.append([ int(position),
+                                   float(Page.parmoptim_error[key]) ])
+                    Infodict["Supplements"][counter]["FitErr"] = Alist
+            # Set exp data
+            Infodict["Correlations"][counter] = [Page.tau, Page.dataexpfull]
+            # Also save the trace
+            if Page.IsCrossCorrelation is False:
+                Infodict["Traces"][counter] = Page.trace
+                # #Function_trace.append(Page.trace)
+            else:
+                # #Function_trace.append(Page.tracecc)
+                Infodict["Traces"][counter] = Page.tracecc
+            # Append title to Comments
+            # #Comments.append(Page.tabtitle.GetValue())
+            Infodict["Comments"][counter] = Page.tabtitle.GetValue()
+            # Add additional weights to Info["External Weights"]
+            if len(Page.external_std_weights) != 0:
+                Infodict["External Weights"][counter] = Page.external_std_weights
+        # Append Session Comment:
+        Infodict["Comments"]["Session"] = self.SessionComment
+        # Save everything
+        # If no file has been selected, self.filename will be set to 'None'.
+        self.dirname, self.filename = opf.SaveSession(self, self.dirname,
+          Infodict)
+          #Function_parms, Function_array, Function_trace, self.Background,
+          #Preferences, Comments, ExternalFunctions, Info)
+        # Set title of our window
+        self.SetTitleFCS(self.filename)
+
+
+    def OnShell(self, e=None):
+        Shell = wx.py.shell.ShellFrame(self, title="PyCorrFit Shell",
+                 style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT,
+                 locals=locals())
+        # Set window icon
+        if self.MainIcon is not None:
+          wx.Frame.SetIcon(Shell, self.MainIcon)
+        Shell.Show(True)
+
+
+    def OnSoftware(self, event=None):
+        # Show About Information
+        text = doc.SoftwareUsed()
+        dlg = wx.MessageBox(text, 'Software', 
+            wx.OK | wx.ICON_INFORMATION)
+
+
+    def OnTool(self, event):
+        eid = event.GetId()
+        try:
+            # Check if a tool is open
+            self.ToolsOpen[eid]
+        except KeyError:
+            # eid is not on self.ToolOpen
+            # So we open the dialog and add it to the list
+            self.ToolsOpen[eid] = self.Tools[eid](self)
+            self.ToolsOpen[eid].MyID = eid
+            self.ToolsOpen[eid].Bind(wx.EVT_CLOSE, self.ToolsOpen[eid].OnClose)
+            self.toolmenu.Check(eid, True)
+        else:
+            # We close it then
+            self.ToolsOpen[eid].OnClose()
+
+
+    def OnUpdate(self, event):
+        misc.Update(self)
+
+
+    def OnWiki(self, e=None):
+        """ Go to the GitHub Wiki page"""
+        webbrowser.open(doc.GitWiki)
+        
+
+    def PackParameters(self, Page):
+        """ Gets all parameters from a page and returns a list object,
+            that can be used to save as e.g. a safe YAML file 
+        """
+        Page.apply_parameters()
+        # Get Model ID
+        modelid = Page.modelid
+        # Get Page number
+        counter = Page.counter
+        active_numbers = Page.active_parms[1]       # Array, Parameters
+        active_fitting = Page.active_parms[2]
+        crop = [Page.startcrop, Page.endcrop]
+        Parms = [counter, modelid, active_numbers, active_fitting, crop]
+        # Weighting:
+        # Additional parameters as of v.0.2.0
+        # Splines and model function:
+        # Additional parameters as of v.6.4.0
+        #self.Fitbox=[ fitbox, weightedfitdrop, fittext, fittext2, fittextvar,
+        #                fitspin, buttonfit ]
+        # Some fits like Spline have a number of knots of the spline
+        # that is important for fitting. If there is a number in the
+        # Dropdown, save it.
+        #
+        knots = str(Page.FitKnots)
+        knots = filter(lambda x: x.isdigit(), knots)
+        if len(knots) == 0:
+            knots = None
+        else:
+            knots = int(knots)
+        weighted = Page.weighted_fittype_id
+        weights = Page.weighted_nuvar
+        Parms.append([weighted, weights, knots])
+        # Additional parameters as of v.0.2.9
+        # Which Background signal is selected?
+        # The Background information is in the list *self.Background*.
+        Parms.append([Page.bgselected])
+        # Additional parameter as of v.0.5.8
+        # Is the Experimental data (if it exists) AC or CC?
+        Parms.append(Page.IsCrossCorrelation)
+        # Additional parameter as of v.0.7.8
+        # The selection of a normalization parameter (None or integer)
+        if Page.normparm is not None:
+            # We need to do this because yaml export would not work
+            # in safe mode.
+            Page.normparm=int(Page.normparm)
+        Parms.append(Page.normparm)
+        # Parameter ranges
+        Parms.append(Page.parameter_range)
+        return Parms
+
+
+    def UnpackParameters(self, Parms, Page):
+        """ Apply the given parameters to the Page in question.
+        """
+        modelid = Parms[1]
+        if Page.modelid != modelid:
+            print "Wrong model: "+str(Page.modelid)+" vs. "+str(modelid)
+            return
+        active_values = Parms[2]
+        active_fitting = Parms[3]
+        # As of version 0.7.0: square pinhole TIR-FCS models
+        # use sigma instead of lambda, NA and sigma_0. This
+        # is for backwards compatibility:
+        changeTIRF = False
+        if modelid in [6000, 6010]:
+            if len(Parms[2]) > len(mdls.valuedict[modelid][0]):
+                lindex = 1
+                changeTIRF = True
+        elif modelid in [6020, 6021, 6022, 6023]:
+            if len(Parms[2]) > len(mdls.valuedict[modelid][0]):
+                lindex = 2
+                changeTIRF = True
+        if changeTIRF:
+            lamb = active_values[lindex]
+            NA = active_values[lindex+1]
+            sigma = 0.21*lamb/NA
+            active_values[lindex] = sigma
+            active_values = np.delete(active_values,lindex+1)
+            active_fitting = np.delete(active_fitting, lindex+1)
+        # Cropping: What part of dataexp should be displayed.
+        [cropstart, cropend] = Parms[4]
+        # Add parameters and fitting to the created page.
+        # We need to run Newtab.apply_parameters_reverse() in order
+        # for the data to be displayed in the user interface.
+        Page.active_parms[1] = active_values
+        Page.active_parms[2] = active_fitting
+        # Cropping
+        Page.startcrop = cropstart
+        Page.endcrop = cropend
+        Page.crop_data()
+        # Weighted fitting
+        if len(Parms) >= 6:
+            if len(Parms[5]) == 2:
+                [weighted, weights] = Parms[5]
+                knots = None
+            else:
+                # We have knots as of v. 0.6.5
+                [weighted, weights, knots] = Parms[5]
+            if knots is not None:
+         # This is done with apply_paramters_reverse:
+         #       text = Page.Fitbox[1].GetValue()
+         #       text = filter(lambda x: x.isalpha(), text)
+         #       Page.Fitbox[1].SetValue(text+str(knots))
+                Page.FitKnots = int(knots)
+            if weighted is False:
+                weighted = 0
+            elif weighted is True:
+                weighted = 1
+            elif len(Page.Fitbox[1].GetItems())-1 < weighted:
+                # Is the case, e.g. when we have an average std,
+                # but this page is not an average.
+                weighted = 0
+            Page.weighted_fittype_id = weighted
+            Page.weighted_nuvar = weights
+        Page.apply_parameters_reverse()
+
+        if Page.dataexp is not None:
+            Page.Fit_enable_fitting()
+            Page.Fit_WeightedFitCheck()
+            Page.Fit_create_instance()
+        if Page.weighted_fit_was_performed:
+            # We need this to plot std-dev
+            Page.calculate_corr()
+            Page.data4weight = 1.*Page.datacorr
+        # Set which background correction the Page uses:
+        if len(Parms) >= 7:
+            # causality check:
+            if len(self.Background) > Parms[6][0]:
+                Page.bgselected = Parms[6][0]
+                # New feature since 0.7.8: BG selection on Page panel
+                Page.OnAmplitudeCheck("init")
+        # Set if Newtab is of type cross-correlation:
+        if len(Parms) >= 8:
+            Page.IsCrossCorrelation = Parms[7]
+        if len(Parms) >= 9:
+            # New feature in 0.7.8 includes normalization to a fitting
+            # parameter.
+            Page.normparm = Parms[8]
+            Page.OnAmplitudeCheck("init")
+        if len(Parms) >= 10:
+            Page.parameter_range = np.array(Parms[9])
+        ## If we want to add more stuff, we should do something like:
+        ##   if len(Parms) >= 11:
+        ##       nextvalue = Parms[10]
+        ## Such that we are compatible to earlier versions of 
+        ## PyCorrFit sessions.
+        
+
+
+    def SetTitleFCS(self, title):
+        if title is not None and len(title) != 0:
+            title = " {"+title+"}"
+            self.SetTitle('PyCorrFit ' + self.version + title)
+        else:
+            self.SetTitle('PyCorrFit ' + self.version)
diff --git a/src/icon.py b/src/icon.py
new file mode 100644
index 0000000..79089bc
--- /dev/null
+++ b/src/icon.py
@@ -0,0 +1,246 @@
+#----------------------------------------------------------------------
+# This file was generated by /usr/bin/img2py
+#
+from wx.lib.embeddedimage import PyEmbeddedImage
+
+Main = PyEmbeddedImage(
+    "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAYAAABccqhmAAAABHNCSVQICAgIfAhkiAAAAAlw"
+    "SFlzAAAMqAAADKgBt04g1gAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoA"
+    "ACAASURBVHic7Z13nFTV2ce/d9r2Xcru0quAFDE2sASUJogGS9TYYosxFozGkMTo6/uuEzXF"
+    "RoxYIomx94YGjBRpKioqGKQICNKWBRbYXdg27b5/nLne2WXuzOwyc++dmfP9fOYzs3PP7jwM"
+    "9/zOc855nucoqqoikUiyE4fVBkgkEuuQAiCRZDFSACSSLEYKgESSxUgBkEiyGCkAEkkWIwVA"
+    "IslipABIJFmMFACJJIuRAiCRZDFSACSSLEYKgESSxUgBkEiyGCkAEkkWIwVAIsliXFYbIJFk"
+    "K4pXOR64GmgGvlQr1BdMt0EWBJFIzEfxKicD7wElEW/fpFaoj5pqhxQAicRcFK/iAbYB5a0u"
+    "qUC5WqFWm2WLXAOQSMznfA7t/AAKJk/LpQBIJOZzg8H7KmI9wDSkAEgkJqJ4laOA0QaXP1Ar"
+    "1P1m2iMFQCIxl+tjXHvcNCvCSAGQSExC8SqFwOUGl3cCs0w0B5ACIJGYyaVAscG1f6gVasBM"
+    "Y0AKgERiJkaLf0FgppmGaEgBkEhMQPEqJwHHGFyerVao28y0R0MKgERiDkajP1iw+KchBUAi"
+    "STGKVykFfmJweRPwvonmtEAKgESSem4Fcg2uPalWWBePLwVAIkkhilfpANxkcNkHPGWiOYcg"
+    "BUAiSS03Y7z196Jaoe4x05jWSAGQSFKE4lWKgFsMLoeAP5loTlSkAEgkqeNGoJPBtVfUCnW9"
+    "mcZEQwqARJICFK+SD/za4LIK3GuiOYZIAZBIUsMviJ7zD/CmWqGuNtMYI2RNQAmIQhRuwBPx"
+    "rL0OIVar/eFn7bXpcevpguJVcoDfxmhyj1m2xEMKQOajIFahOwEdI56L0Dt6e+4DFV0QmoH9"
+    "wL7ws/ba1OIWNuJnQHeDa++qFepKM42JhRSAzKII6Ino5FpH7wg4U/BZCpATfhQBpVHaNNBS"
+    "GHYBlYjkl4xE8Spu4LYYTe42y5ZEkAKQ3uQDvYDe4ecOcdo7EP/nzvDDgejIkY/W74EY7UPh"
+    "ZzXKzyFEpw6En7XItvzwo0eEDQGECGwDtiJEIdTGf7eduRzoY3DtfbVCXW6mMfGQVYHTixxa"
+    "dvjOBu1c6B3dFfFQDNonG00IAq1eR+vofmA7uiDsNsnGpBOu9rsO6GfQZJRaoX5koklxkR6A"
+    "/XEgbqghwBFEd+ddiLl8Tvg5XkdXEe65P8oj0OpnB2IxMPLhavVzbvhzI+1xhe2JJIC+ZuAL"
+    "2+EO//u0TlMLrA0/TK2PlwRuwbjzL7Rb5wfpAdiZbsBQ4EgOTSRxoi/g5WC8nesD6sKP2vBD"
+    "+znZbncu4pCL4vCjJPwoxFiQtJ2F5vDr1jfjToQQrAOakmxvCxSvogC5aoXa2M7fLwM20PKg"
+    "j0jGqRXqwvbalyqkANiLEsRIP5RD5/MuIA/R0aJ5ASFgD1CFmFfXAu26mZOMA7FI2BnoihC2"
+    "AoO2PoTNTbQUgxCwGViDSJ9t9yKi4lWKgcnA2LAt3cJ2dUEIaj1CeKrCjy3AbGBprJJdild5"
+    "HOOCn0vUCvW09tqcSqQA2IMewIlA31bvO9A7vTvK7+1F3KQ7EXPndNmbL0IXg64c6uGoCBFo"
+    "4tCtxCZgJfAlCXoFilfpDZwdfowh+ncZjxpgDvAuMEetUOsi/v4w4CuiC7MKjFQr1M/b8Zkp"
+    "RwqAtfRFdPzIVXJtey2PQ+fQfsSItB3R8X2pN9EUOiDEoC9Q1upaCOEVNNJS4PyITvcFYtQ+"
+    "BMWr9EVsu11GchdADwD3AdPVCrVe8SrvAxMN2j6rVqhXJvGzk4oUAGsYgOj4XSLecyJc4zxa"
+    "3qwqsAPh+m4jg/fQwxQB/cOPolbXAojOHjlFCABfA8sRHRPFq3QG/geYSsvFyWRTBbyOcb5/"
+    "AzBIrVB3pNCGw0IKgHkowGBgJC2371zoHT+SvcC3wHekeAHMxpQjhKAvLTtyECEEjehCEALW"
+    "dHuw25Cqg1VejBfjzMSrVqh3WW1ELKQAmEMvYDwtU0NdiBXyyPlvAPgGsZpch0TDiYhwHELL"
+    "BJsQQggaGgONyhnPn3Huki1LfmiFgVHYgRj9G6w2JBZSAFJLAXAaYuTX8ITfj5zf+9D3vjNl"
+    "Xp8qugDDiYi131633X36c6dPWVe97ohE/4iiusivHYanqRvu5lLczaU4A0UEPPvx5+zBn7OH"
+    "5vytNBZubK+dV6oV6rPt/WWzkIFAqcGBqAF/Crrr6kTsj0d2/CbE1tY3iEUtSXx2hR+lwPCv"
+    "qr4aOun5ST/ZVb/LqPDG9ziCeRRXn0JJ9akUV5+CM1AY98N8uVXUli2mtmwJBzusBCWh8In9"
+    "wIuJNLQa6QEknx4Id19LjlEQI37k3dYArAbWk/mLeiljRdWK3DFPj3mxrrluYKx2SshN6fbz"
+    "6fLdlbj88dIljGkq/JbKI56grvTDRJr/Ta1QjcqB2QYpAMkjHzgVEcSjkYMY9bX9YR9iD3s9"
+    "mZUAYzq+oE/p8VCPB6obqo223wDouOt0um2ciqepS6xmbaK+ZBU7Bk2noXhtvKbXqhXqP5L2"
+    "wSlACkBy6A2ciRABiO7ub0LsWdshOi/tOXLGkdev37t+qtF1RXXQfcPNlG27KCWfH3L42Db0"
+    "HvZ3mRermQ+YoFaoS1NiRBKQAnB4KMDJiD19LX02n5bx7zXAp4h5qyQJnPPyOaPe+eadxzAI"
+    "7nEGCujz9d0U7z055bZU9XuKqv7/4NA0hu/ZAwxVK9TqlBvTDuQiYPspQIz6vcI/OxERbVqY"
+    "aQARqbYW6e4njZqmGsfcb+f+BoPO7wjmMuDLR8k7cKQp9nTd/DOcwXx2DHzYqEkZ8H+I8wFs"
+    "hywK2j56Iwo/aJ0/B7Hop3X+rcDbiIU+2fmTyBkvnHFOU6DJYLtPofea/zWt82uUbb2YTpU/"
+    "itXkesWrDDDLnrYgBaBtKIitvfMRrr6CCFftGH4dBJYBixAr/ZIksmr3qpwvd35pOO/vuvlq"
+    "OuweZ6ZJ39Prm99RUPMDo8tu4I8mmpMwUgASx4Po+CchOrsTEdmnpbbWItJGN1hiXRZwyeuX"
+    "XOYP+qMu5+cdHEjXzdeYbdL3KCE3fVZXoIQMEw0vVLzKCDNtSgQpAImRjzjeuXf451xauvzf"
+    "Ijp/jfmmZQ8b92883+ha9w1TQbX2dvY0daN0+wWxmlxhli2JIgUgPiXAxegx6EWIxT7N5f8o"
+    "/EiXXPy05HfzfjegOdDcO9q1on0jKdp3otkmRaXrd1fFijA810xbEkEKQGzKgUvQO3wJustf"
+    "A/wbMfpLUsw7698Zb3St66ZrzTQlJk5/cSwvoKfiVU4w0554SAEwpjfC7dcW+zqgp+xWIqrD"
+    "1FpjWvaxvXZ71NU9d3MZBbVHmW1OTDrsHhvrsq28ACkA0RkEnIdeYbcjelTfZuADpMtvGk98"
+    "/kR5vb9+aLRrJdWjzDYnLnkHBsUKPT7bTFviIQXgUI4GzkI/OKMzekbfOmApcm/fVOZvmm9U"
+    "apviPaPNNCVhSvacanRpcLgCsS2QAtCSQYhMPm2brzN6tOQK4DOL7Mpqqg5WGZ2yS35dVMfA"
+    "cvLrhhldchP9GDVLkAKg0xtRLjqy8zsRQd7LgFXWmZbd7G/a37pQKCD23l1+O1T+OhR3c8w+"
+    "bnRwqOlIARCUI+ZmmtvfKfwcQkT1yeAeCznoOxjVA3D7bDOQHoK7OapmaUgBsBEdgB+jL/h1"
+    "Qh/5lyAq8UospMHfELU3xRllLUV6AOlBAS3j+juiz/k/RST1SCQZSzYLQA5i5C9B3+fXVvu/"
+    "QlTtkdiAfHf+nmjv+3NsmWIPxLWt0iw74pGtAqAAP0I/hSayes83CAGQ2IRCT2HUI8P9HjsL"
+    "QFTN0pACYDEnAn3Cr4vQI/y2Irf6bEfH3I5Re5Pq8BNw2zMYU3oA9qUnoowXiKw+LbZ/FyLI"
+    "R9ZIsxldC7tG9QAAGorXmGlKwjQUrza65Ads47pkmwDkIcp4aXv92ibyAWAhskS3LZnQf8Jm"
+    "o2t1Zfast1lbtsTo0jq1wj6FOLNNACajF+zUMvxCwGLkiTy25foTrt9d4C6IOtTXJlaj31Qa"
+    "i9bjyzWsAfuOmbbEI5sEYCTikEkQIqAV81gO7LPCIEni9Czp+UG09/05e6gv+dpsc2JSU74w"
+    "1uW3zbIjEbJFAHoA2qGROejz/i2IVX+JzTl70NkLjK5V9Z9ppikxCbrrqO75utHl7WqF+rmZ"
+    "9sQjGwQgF33e76DlvP9jq4yStI37Tr9vY44rJ2pg1oFOn3Gg06dmmxSVqr5PE3QdNLpsq9Ef"
+    "skMARiG2+kDM+7UY/8XIAznTioGdBr5mdK1y4KOJHtyZMny5O2ON/irwLxPNSYhMF4CuiKOk"
+    "Qbj9WqTfZ8h5f9rx1kVvvehxeqqiXWss3EBVv3+abdL3qA4/W4Z5UR2GY8oraoX6pZk2JUIm"
+    "C4CCntvvQD+ddwcyzDctGdBpgO/4bsfPMLpe1e9f1JRHXStMOduOvI/6DoYBpD7gf0w0J2Ey"
+    "WQB+AGh1mYrQq/jaY7IoaRfzLp/3bp4rb2P0qypbh95NY5G567p7er/Mvu7/NrzucrhmqhXq"
+    "JhNNSphMFYB89FV/D3qo7yrAcIVGYn8KPAWhSQMm3Y9BxGbI2cTG46ZSV/qRCdaoVPWfyY6B"
+    "fzNske/OPzj38rkxjxC2kkwVgNMQ230KItEHoA6w14axpF28ddFbHw/qPOgxo+tBVz2bjv4d"
+    "u/u8kDIbQs5GNh99O1X9nsIoetypOIMPTHzgmbF9x/ZHTzyzFZkoAL2AIeHXBbTM75fFPDOE"
+    "VTes+ntpfulcwwZKiMoBM9g8/HZ8eTuS+tkHOn3O+hE/p7Zsccx2t55867wbTrjhO8RANCGp"
+    "RiQJRbVPWHKyuAJRdNEZflaA7xDVfSQZxIqqFbmjnhr1bIO/YUisdkrITemO8+iy+Wpc/g7t"
+    "/rzGwg1UDniUA53jLyNNHjj5izmXzvkAaEQ/P+I/gK2ylzJNAAYCU8KvtVr+AUQAhjytNwN5"
+    "bPljXabNnfaPpkBT33htHcE8iqtPoaT6VIqrT4l1hNf3+HJ3UVu2mNqyxRzssDKhWIPS/NK5"
+    "227d9mKuK1crDbwPsROwH3gaG2WcZpoA/BRR4NOFXnr5C8AwN1OS/szfNL/wwtcufKCmqeaH"
+    "8VsLFNVFfu0wPM3luJpLcfs64wwUEHDX4M/ZS8Czj+b8bTQWtqkerDqw88Anvr7h68c9To8D"
+    "cbhMPqLza3En/8ZG29CZJAD9EF846KN/E/AGMs0347nwtQtPfH3N609i0bqWQ3E0jek75n8W"
+    "XLEgcl1iMCIJDWAvIvJ0D/Cc2fYZkUmLgNrxsC708l5rkJ0/45m+bHr3t9e9fT8W3c95rryN"
+    "N5xwwxWtOj+IcvKN4dfafKMM6G+acXFwxW+SFvREL7WsZfr5kJl+Gc8rq1/pePsHtz8ZCAU6"
+    "mv3Zbqd793Hdjpux4PIFswo8BdEWB4KIQeh4xKDkQqxJnQjYIjAoUwRAG/2d6EE/a5HJPhnN"
+    "su3L8q+Zdc3jzYHmPkZtclw5W5sDzb0Qu0FJwaE46gd1HvTUqxe++szw8uHNcZp/AxyFnoZe"
+    "C3RDnERledn5TBCArugFPrXRP4AQAEmGsqVmi3vy85MfrvfXGx7CV5RT9OW6qeuueW3Na2VP"
+    "r3x6zOaazWPrmupGqKhtvu+divNA5/zOS4eUDll015i7lo7pOybRiFLtXjwGMTgdRHgGI7GB"
+    "AGTCIuA5wBGI0V+LtvoasF3mlSQ51PvqHX0f7nt/dUP1RKM2Hqdnxws/fuGSC4ZesD/y/UXf"
+    "LSr0LvaO2rR/0zEN/oaypkBTuS/oK/eH/GWqqrodiqPe4/Ts8Tg9u3NduXsKPYU7j+9+/LIZ"
+    "k2d8UV5Q3t71JA/iABo3LeMCXgJ2tvNvJoV0F4AC4BfoIb/5CHV9A7EDIMlAek7veeeOuh0X"
+    "GV13OVz7Kk6ruPzOU+9s0wi7aveqnARc+vZyLHpq+h7Effo1YBzNaALpvgswGNH5FfS5/wZk"
+    "589YBs0YdGOszu9UnAevO/6669va+QFS2Pmh5Y6Udq8OwuJpeLoLgHY4vJb4A2CQKipJd45/"
+    "8viLN+zdcIPRdUVRfD8e+uNbZpw5w47rP83oB83mhp89wABrzBGkswCUos/5NUWtQVb6yUjG"
+    "PTNu0pc7v7w9RpPQuL7jfv/qBa/a+WSnb8PPLvSq1EMN2ppCOguAlgDiQA/8+dagrSSNOefl"
+    "c0Yt2rLoT8S4X0/ofsI986+Yb9u8+zCV6NNTbdDqg757ZTrpKgAKugBo7pQKGJ4gI0lPprw0"
+    "ZfS76999WFVVt1GbI0uPnLH82uWGBUNtROQ9moe+fhUzmzGVpKsA9EIPrdSUdCcy4y+jOOvF"
+    "s06bvWH2X1VV9Ri16VXc68V1U9f93Uy7DhPNS1XQi9RaNg1IVwHQvjAn+lzKFqGVkuQw+YXJ"
+    "Y9/b+N70WJ2/rKDsvY03b/yzmXYlgX2ItSoQ29bQcj3LVNJRAFyIvH/QR/8ANoiqkiSHSc9P"
+    "Gvf+xvcfiuX2l+SWLFt53co7PE5POgayaF5ADnofHGyFIekoAD3QR31t/r8FIQKSNOf0506f"
+    "MO/beQ/GCtctyin68r3L3vtV96Lu6fp/HrlWpS1gG+YzpJJ0zAXoFX52oNu/3SJbMo511ety"
+    "/r3+351X71ldVnmgsnO+O7+pf8f+e0b2GFl93uDzalI54o5/dvzEhZsX3qeiOo3aFOcUfz7/"
+    "ivk3jug+otGoTRrQgJgKdEKsAzQipgC5mBzElo4C0Dv8nBPxnuFZzBJjfEGfctOcm4Yt2Lxg"
+    "fNXBqlHNgeaeQTVoWCdLQQm6nK69RZ6i1YM6D/pg2snTFreOtW8v454ZN2nRd4v+Eqfzf7bo"
+    "qkVTj+16bCZEeu5ECIB2HyuItHZTA9nSLRfAA0xFfFkliDWAGmx25rrdufLtK4/5YPMHZ+2q"
+    "3zXOH/SXt/fvKCihwpzCFX079P3goYkPvTmh/4R2nbkw5ukxk5dsWfJnFdVwSlqSU/LpwqsW"
+    "3pQhnR/EVHZ8+LWWG7ASMPVoo3QTgP7AueHXZYhdgLXAcsssSiN+N+93A/6x4h+/2t+4/7Rk"
+    "/22nw1k7tHTok29e9ObLAzoN8CX6e6P/Nfqsj7Z+9MeYnT+3ZNnSq5f+MsWx+mbjAi5BDGa1"
+    "iGnAXuAZM41INwE4DVFdJTL1dyF6jLUkCo8tf6zL3UvuvqnqYNXZpHjh1+P0VI7oPmLG+z99"
+    "f7ZBlZzvOfrxo69YtXvVb4hRrKNDboePl12z7ObBpYMzqfNrTEbcx03oW4NPYGI8S7oJwOWI"
+    "LywPMQUAeBlR/ksShXHPjpu4+LvF94bUUG781smjyFP05RM/euLWS4dfekhuhi/oUwY9Mug3"
+    "W2q3XBHrb3TI7fDhsmuW/SpDOz/oKcIhYHf4vdmYWMounbYBc9FHfS04ZC+y80cl3MmmLty8"
+    "8AGzOz/AAd+B466edfVL0+ZOGxj5/sZ9Gz09H+p5f7zO3zG349Ll1y6/JYM7P4B21HnkjlZv"
+    "g7YpIZ0EoEfEa23lNOpZ8dnO8srleT0e6vHghn0brieJtfDaii/o6/7XT/76/OQXJo8FeH/j"
+    "+0XHPXnck3sa9kyK9Xsd8zouXnHdilvaspaQpuxGP65Ou6d7mmlAOk0BRgCjEaKlrVzL+X8r"
+    "VlStyB39r9FP1fvqh8dvbRqhYWXDHt60f9OUxkBjzPz30vzSeZ9f+/ltfTr0yZaCrmcg7ufI"
+    "UmF/w6TAtnSKA+gUfo60uSZaw2zFF/QpE5+beK/NOj+AY/We1bfGa9SruNeLa6eu/Uu8xcMM"
+    "Yz/6aVYaHRFbgyknnQUghKiwKgkz/PHh18UqlGlj1GFlw6Z/fePX/7LaEAuoCz9bIgDptAag"
+    "HfygfVEHsNEhi1Yz4dkJp6/fu/5Gq+1oK4qi+Ef3Gf37LO38oAuAgt4fOxm0TTrpIgB56Ik/"
+    "WqhorUHbrOPplU+XLvxu4T1YuODXHpyK8+D5Q86/YclVS+ZYbYuF1EW81gY30045ShcBiPxC"
+    "tC+pLlrDbOTOD+68IaSG8uO3tA8uh2vvL0/85ZWvXfjap1bbYjEH0XcCtMFNegCt0L4QBekB"
+    "tOCuRXf1qTxQeb7VdrSVwpzCtdMnTbfNMdkWoiKmsyA9AEM0AYjMFJMeAPDo8kdvjpVBZ1dq"
+    "GmtGXfn2lcdYbYdNaL0Q6EEveZdS0kUAWi8AgvQAuGnOTUPSdNUfgFnrZt1ktQ02wWgnIOWk"
+    "iwBoaqiNdD5kCDBzNsyZbLUNh0Ndc92IV1a/Yvqx3jZEE4BITy4vWsNkky4CoMX+a6vc2RIl"
+    "FpPKg5XjrLbhcFBRHdM/mT7GajtsQORgpt3jhsVQk4kUgDTl9/N/f0RzoNmSOnLJZMPeDWkt"
+    "Ykki8n6WAhAFrQioFIAws76ZNT5+K/uzv2n/ycsrl5vi7toYKQBxaO0BZP38f0fdjh9abUMy"
+    "UFU1x7vIO8JqOyxGCkAMImvDa/amaznopNEcbO5utQ3JYufBnRnzb2knkQKg3eNSAMJEfhHS"
+    "A0Bk/fmD/lKr7UgWtc21lpyKYyOkBxCDaAKQ1WsAr695vWOsgzPSjXpffbsrE2cIUgBiEDkF"
+    "kAIAfLL9k4zqME2Bpmz3AFREWXCQAnAIkV+EZm9WC8CW2i0Z4/4D+IK+jPr3tBNtXUuuAbQi"
+    "GPFay/9Pu9j3ZFKSU1JvtQ3JxKk4M+rf0060vqjd40Gjhqn4UDsTueCnfTmGp8ZmA0eVH7U7"
+    "fqv0IdeVa0r1G5uj3dNaarApC91SANKQs488u9pqG5JJnjsvowStHUQu6Gr3uBSAMJHzfSkA"
+    "wODSwc1OhzNjsiGLPEUZJWjtIPJ+lgLQCukBRMHj8GSM29wxr2O2ewBSAGIQQl8Q0eZHWS8A"
+    "JbklX1ttQ7IY32/8KqttsBgpAHHQvgzpAYQ5pusxph4jnSpyXbmb7hpz1xar7bCYyPtZLgJG"
+    "obUAmLJHamemT5r+sUNxNFptx+HSo6jHAqttsAHRPABTzkRMFwHQFgKlBxBmcOng5o55HT+y"
+    "2o7DZVy/cRnhyRwmcgoQh6bws7YWkEP62J4yhpQOSevR0+1075px5ozVVtthAwrCz5EH3UgB"
+    "iGB/+DkyXrrIIltsw/PnPf++x+nZYbUd7eXIzkc+53F65OlOUBx+jkxzN2WbN10EYF/4OfIL"
+    "Ko7WMJvo06GPf2SPkY9YbUd78Dg9lW9d9NZLVtthE1oLgIpJB9+miwBoHkAI3U0qscgW2+AL"
+    "+pRAKOBRUEyJG08mI7qPmDGg04CsrusQQWsBqMWkXIB0ySnfH/E6gFg0yWoPYNrcaQNnfjHz"
+    "fw/4DhxrtS1tJd+dv+79n74/22o7bEJO+AF6p99v0DbppIsA1CBGfwe6AGSlB7B069KCy9+6"
+    "/MatNVsvS8cTgRRF8V807KK7CzwFofits4LIgUzzAPZFa5gK0kUAtDlRJ3SVzDoPYNyz4yZ+"
+    "uPXD2/xBf9oWBBnZfeQ9T53z1H+ttsNGRN7H0gOIwT6EAGgqqblOpgRMWMkfFv+hz/RPpt9R"
+    "01RzitW2HA69S3o//8nPP3nTajtshiYAQfT1LekBREFTxcidgBIgYxNJ1lWvy5ny0pRrv93/"
+    "7dWqqiYU/ViUU/TlQd/BoxJtbxYluSXLvrr+q/uttsOGaFPZyPtaegBRaB0LANAZAwH449I/"
+    "9pq/ef6Q/Y37S+ua68oa/A2lTYGm0qAazM915u7Oc+dVF3oK95TkluztU9Jnx4MTH1zZvai7"
+    "bcqNT3lpyuh53867oznY3DOR9h6np3J8v/F/mnPZnEVTXpoy+r0N790XVIOmnDAbjw65HT58"
+    "7cLXftsht4Oc9x+KVg5Nu/eaAdMqJCmqmjZxGKXAFeHXnRD5ANuAhVqDqXOmDpn77dzxO+p2"
+    "jG8MNA5oyx93Ks6DnfM7LxlWPmzBvePu/fDknic3JM3yNvDIZ4909S7y/n5v496ETv5RFMXf"
+    "v0P/p9+6+K2/Dy8f/v106I4Fd/R/6JOHHmkONPdOnbXx6dOhz7Mrr1v5oOz8USkGzg2/3o/o"
+    "/JuBt8wyIJ0EAOAGxKmpheGHb0vtljenvDTl4m+qv/mpL+jrlowPURTFV5ZfNv+XI3/56J2n"
+    "3rk1GX8zHpUHKl0Tn5t4xdrqtdeH1FBCR2UV5xR/esuJt9zzh7F/+C7a9dkbZhdf9uZlD9Q2"
+    "1Z6cVGMTQFEU/0k9TvJ+fM3Hs8z+7DRiEHBS+PUuxBrAYuALswxINwH4EeJL84TUUKe7Ft01"
+    "9IGPHzi+MdDYNRUfpqAEuxd3f/2esfc8cdUxV6Wsas0lb1xywtvr3r6zKdB0RCLtXQ5X9Sm9"
+    "Trl/8VWL58RrW9NU4zjpnyddu2HvhqtDaqggXvtkUOgp/Orioy7+y8wpM7M9zz8epwF9EMlu"
+    "e8PvPY+J61rpJgBHAxP+/vnf+9+z9J4Lt9dtN2U7zKE4Go/oeMQzi65a9PdkrhM899/nOv92"
+    "7m+n7arfNSWR9gpKqFdJr5eeOfeZGWP6jjnYls96cdWLnW6bf9sNO+p2XJiq+IEcV86WUb1G"
+    "PTz/ivnzUvH3M5CLEDtZB8OPJuBxWiYFpZR0E4CO17577cynVjx1XkgNmR7GXJxT/Nlz5z33"
+    "67OPPPuwEjXqffWOU58+9SdfVX11c1ANJpTUVOgp/O+VP7jynhlnzlh7OJ9916K7+jy2/LFf"
+    "VjdWj03WToHH6akcWjb06dmXzn7NTgupNqcjoAn/PkT230bgHTONSBsBULyKC3gYuLEtv+cA"
+    "OigKpQ6FMkUhF9irquxRVapDapuDCHJcOVt/c/Jvbrpn3D2b2/irAJz90tmj5m+aPy3RRUqn"
+    "w1l7bNdj//rRzz56I5mZc8srl+fdseCOH67avWpsdUP1qcFQsEMbfl0tcBes6Vncc+GkAZMW"
+    "PXzGw98ky64sYgignYqszf8XAivMNCItBEDxKh2AN4BxibQvUxRGuZyMdjk5+TyThgAAENxJ"
+    "REFU1uWMWT1ke0hlaSDA4kCQ1cFQQr6XU3EePGvQWb+ddfGsDxOxB8QOxfP/fX5aXXPdiQn+"
+    "itq1sOusRyY/8tAFQy9I6b5wTVONY9rcaUdt3Lex577GfWUHfAfKG/wNZc2B5jKn4mzMc+ft"
+    "LnAX7CnJLdldll+257wh56259rhrMzb+wiTGAr0QI78W+PMM+lqAKdheABSv4gbmAmPitT3a"
+    "6eAXOR5+4HR8f8BaW9irqrzqC/Cazx+3GoOiKP7Lj778Z8+c+8zKWO2mL5ve/c8f/fnm3fW7"
+    "z4TEzMpz5224YMgFdz973rOmjgYS01AQ838P+vy/AXjCdEPSQAAeB66P1aavw8H1OW5GuZKz"
+    "trVbVflns5/3/AFibV67HK69D5/x8EU3jrhxV+trszfMLr5pzk3Xbqndcmmic22H4mgYVjbs"
+    "0flXzH+hvKA87VJ8JQnTE92b3YvYBfgGMD1D0tYCoHiVqcCMWG3Odrv4da4nJSGNK4JB7mz0"
+    "URvjOypwF6xZ9vNlV2hBOBv3bfT8+JUfX7ymes11wVAw4YSl0vzSuXePvfsv159wvXStMx9t"
+    "+y8AaNvLc4B1ZhtiWwFQvMoYYB4G4coO4KYcDz/xpDaauTKkcltjM5tDxr5AeUH5nG23bvv9"
+    "hOcmTP50+6e3+IK+7on+/TxX3sZJAybd/9ZFb32cFIMldscD/ARxC2vuvw/h/pu+g2JLAVC8"
+    "ihNYhVgpPQQHcG9eDqOT5PLHo0FVubWxmdVBYxHIdeVuaQo09Un0b7qd7t3HdTtuxoLLF8yS"
+    "ufFZRWT03x5EbsvXiHUu07FrMtBVGHR+gJtzPKZ1foB8ReFPeTlcU9/EHgPBTLTzOxXnwYGd"
+    "Bz716oWvPhsZuy/JGrRoTx96Ytsai2yxX01AxavkAV6j6+e6XVyQYrc/Gp3CItDeyBkFJdCr"
+    "uNeLL1/w8plrp66dKTt/VlIElIVfa4e61AHbrTHHnh7ALUCPaBcGOBzcmmtdmvtgp4Obcj08"
+    "1NS2Wpal+aVzbznxlofNSiyS2BZt9FfRz7o4rMjOw8VWAqB4FQfwa6PrN+a4sboI3jluF2/4"
+    "AmyJsSioUZRT9OWFQy988J9n/1OWwJIA9A8/N6HH+1vm/oPNBAD4IbqL1ILjnU5GmjjvN8IJ"
+    "3JDj5veNxh58rit305i+Y/763mXvLTRsJMk2eiNS2EF3/3diYvWfaNhNAM41unB9jn2OAxzl"
+    "cjLc6WBVlF0Bh+JoXnHdigsHlw6WNe8lkQwPPwfQj/2y/Fg0uy0CRhWAng6FIU57mTrRHV07"
+    "Q2oo5/YFt59gsjkSe9MdUb4OxL4/iNBfS91/sJEAKF7laPQ5UgtOddnNUYHRLqdhYP/yyuUJ"
+    "JS1JsobI0V9b/PsCCwJ/WmMbAQBGGl0wc88/UUoVhaEGXklNY83wqBck2Ug50CX8Wiv22QR8"
+    "ZY05LbGTAESt55erwDCbuf8axzijC5Mv6Iu6kCnJSo4OPwfRR/8VmHT8dzzs1LOixs+XKYqt"
+    "jIykVIk+CQiEAp1rmmrsarbEPDqh39cHEVt/Pkwu+hELO92kUQWgVLGTiS0pdUQXABXV8cba"
+    "NzpHvSjJJrTRP4Q++n8V8dpy7NS7onsABp3MDhh5AACfV34upwHZTVfE3j+Iub+KWPQzreR3"
+    "IthJAKIWx8w324o2kBdDm/Y37jelBLfEljgArfRbALHlByLD1ZIDZ4ywkwBURXtzrw3TlTX2"
+    "hYxtG9BpQMrOEZDYnqHoZ/4dQJ/7L7fMIgPsJACV0d6strEAxLJtQv8Je0w0RWIfCoAfhF83"
+    "oZ9evQw9CMg22F4A9sQYZa3GyDtxKI7Gth7cIckYRiJSRlTE6A+i8MeXllkUA9sLwD5Vpc6m"
+    "XsB2A3FyO92yrl920hNR6hvEaK8V/JiPiaf9tAU7CcCWaG+GgI8C9iuQGwKWGdiV68zdaa41"
+    "EhvgRI9mjVz4+xqR9WdL7CQACxDlkQ9hiQ0FYHUwxD4Dz6RXSa+EDwyRZAzHoqf71qEX/Vhi"
+    "mUUJYBsBUCvUGmBRtGufBYI02cyBWhpDlC456pIPTDRFYj09ESv/IHL9tTDfJdgo6CcathGA"
+    "MG9He7MZmOW3PHHqew6qKrMN7Mlz5224Y/Qd20w2SWIdBYhCNiBc/7rw60qE+29rbCMAilcp"
+    "AwYaXX/W56feJouBz/oChoeF9CjqIUf/7EEBTkUc8Q1Qg77n/x+rjGoLlifaK17leOBm9LPS"
+    "o1KrqrzoC3CtxZWBdoZUXvNFXaoAUM8bfF5a/MdLksKx6CXs6tDz++cixMD2WCIA4QM/zwd+"
+    "CZyS6O+94vczwe2kn8MaxyUEPNjsi75SCZQXlL933+n3bTTTJoll9ACOCr9uRF/1/wpYb4lF"
+    "7cBUAVC8SjlwHeKwz4SPz9JoUuG2xmZm5udSEiMRJ1X8vdnPJwaLf4qi+KedPO1vJpsksYZ8"
+    "YFT4deS8fw+w2BKL2okpR4MpXuUExGgf081PlGOcDv6an2uqer3vD3B3jPMAepf0fn7Lr7b8"
+    "xUSTJNbgAiYCpeGfqxEi4Aeex+Iqv20lZX0ows2/GTg5mX97ZTDE/zY283+5OTEz8pLFPH+A"
+    "P8fo/E7FefDBiQ8+mXpLJBajIE721Tp/Lfq8fx5p1vkhBR6A4lW6AL+gnW4+Yj71AuK01KfR"
+    "51mH0N/h4C95OXRLUc2AEDCz2c9zxot+AKEzB5558+xLZ6eV6ydpF6PQC9fWo8f6r0IIQNqR"
+    "FAEIn+gzCfg5MAVoz1L9VuAx4B9qhbo3/Hf7AZ9icFgIQImicFuuh1OTXDh0j6pyf5OPj+NE"
+    "IQ4tGzp99Y2rn0rqh0vsyAm0DPapDb+uBF7HBhV+28NhCYDiVXoDPws/esVpbsRi4G/ALLVC"
+    "PaS3KV5lNCKZIuahgD9wOpia4zGs1JsojSo87/Pzit8fN/qwS0GXf1f9pur2w/pASTpwFHBc"
+    "+HUz+n7/XuAVbB7tF4s2C0B4bn82YrSfSPuCiTQ3/xG1Qo17bp7iVa4G/gmGpfi/51SXk9Pd"
+    "Lk5yOtu0PrAhFGJpIMjbvoBhjH8khZ7Cr5Zfu/yawaWD5Sm/mc0A9K1qP7APPdX3ZfRpQFqS"
+    "sAAoXuVI4BrgSkSt8/awBd3N39eWX1S8ynnAs+gJFzFxAyNcTo5zOihVHHRyiCO+c1HYp6rs"
+    "VVX2qSrfhTt+VRvqDpQXlM+Zf8X8/5NHfGc8vRGLfgrCxd+HXuDz5fDPaU1cAVC8ykjgfkTI"
+    "Y3tZhHDz34nm5ieK4lWGA7OAfodhy+GgDi4d/MjaqWtnWvT5EvMYCJyE6PwhhLsfRAjBa9g4"
+    "xbctxBQAxaucCvwbg4KdcTgAvATMUCvUVe0zL6pNnYE3EMpsGk7F2Tih/4Tb/vPT/8gTfzOf"
+    "o4Fjwq9DiJE+EH79DrDJIruSTjwB2EHbt/I+A2YCL6sVakrKYileRQEuBe4F+qTiMzQciiM0"
+    "vt/4/04/Y/qiYWXD3kaMBJLMRAFGAIPDPwcQe/ua1/o+NjjRN5nEE4Aa9OqmsahBREHNTGRR"
+    "L1koXiUHEWF4B9Ax2X//qPKjVj048cH5E4+YqG3xBBDTmajlyyRpjROxz68NKH5E5w+FH/8B"
+    "1lljWuqIJwCVGJzZF+ZDxGj/mlqhNibZtoRRvEpH4CrgHMR/4uEEBWxzKI537x5796Y7Rt+h"
+    "fTkehMBo88GPgM2H8RkSe+EGxiIO84CWW31+hNsftWRduhNPAE5ElOqKPORiL/AMYiV/bWrN"
+    "azuKV+kEnIXYqjwOMYXJNWgeQiRwbEYo/Dtqhaqd26YAYxApnyDCpjuhb3uuQESASdKbQkTn"
+    "1zzIRvSSXo3AWxicWZEJJLILcCIilj8f+BZ4W61Q02r7K+wh9ECIQR5iBbcSqFIr1HgRXCei"
+    "V3xxIm4ULYdiB8ILSqvvQ/I9vRD/t1qQ2UH02v11iMXmtIvvbwumZANmAEcBpyO8AgfQAf2m"
+    "aUBEM8qDQNIHB8I71EJ7VUSH16ax1cCb2PAgj2QjBSBx+gM/Qh/9C9GDklTEwQ8ZtUKcoRQg"
+    "Ylq0/JIAYr6veYI7ELUps8KrkwLQNroh1heKwz97EN6Ati6wHbFAmBU3TxrSA7FIrNWkiJzv"
+    "A/wXscuTlok97UEKQNvJQWQ+Dgj/7EBslWo3VT1iXWCX+aZJDHAiAnuGhX9u7fL7EHX80qaU"
+    "V7KQAtB+jkW4ktqWY+SUAMSC6RekcaZYhtATcWKP9n/T2uXfjYh2TYsinslGCsDh0QWxLqAF"
+    "S3nCrzVR8CG2C9dj07PhMphCRFRfZJp6a5d/BeLwDvsdPWUSUgAOnxzEDsGg8M8KYqEp0hvY"
+    "C3yCDCM2AyfC1R+OLsRa4U6trlszwuXfYLp1NkMKQPL4AWJKoFVDciK8gchCJusRuwXGBQYl"
+    "h0MPhLuvJa+piK28BvRRfycwG72Sb1YjBSC5FCGyFAdFvJeL2DXQdgqagbWIuHIpBMmhDJHB"
+    "1yPivSZEJw+Ff24ElpIGx3WZiRSA1NAHGIceXqogpgSRIdV+4BtgDXKhsL10R7j6XSLeCyBS"
+    "0bWtWBURsv0h8ns+BCkAqcOJKCR5InrwkAshBJG5CUHEXHQ1YgtREp/eiI7fOeK9EOL7i3T3"
+    "dyFyWTI2lv9wkQKQeooRySZHRLznRAhBXsR7IUShia+R89NoKIhKUMNpmaIeRMzzm9A7fhNi"
+    "xF+F3H2JiRQA8+iHSKrqGvGeEzEtyKNlwdMqhBhsAcOjCLOFjgjx7EdLwQygd/zI91Yhdlws"
+    "S09PJ6QAmE9vxLQgcn/agRCCfFoKQRBxXsK3iNXrbPnPykN0+CM4tNCLH9HxI8OtfcBKROCV"
+    "7PhtQAqAdXRDCEH/iPcUxPpAHoeeg9CI8Ao2kZkpqk6EKB6BWNyLFEItN7+JljsnjYht1ZXI"
+    "/It2IQXAekoRQjCIlje9EyEEeRxa4ageMU2oQngGDaQfCuLf3hUhhmUc+u9sRnTyZlp6PweB"
+    "zxHJO1mTuJMKpADYhw6IvezBHHr2gQfdM4h23MkBdDGowp7bXQrCne+G6PRdiH44rR9hfyP6"
+    "Hj4IAdiG2Db9hiwO300mUgDsh4JYJxiKqE3vanXNE37kYHy6cy1imlCL2FHQns0aLfMRK/XF"
+    "Ec+lRD/eTUW49T7ESN/axn2ITr+WND+Fx45IAbA3boQIDEXMj1uP/g50QfAQ/7j3BoQY1CLc"
+    "aB9ixNXOt498BNBHWXf44Yp4HfleHi07eyw7tEKbzRGf35pG9CApuYefQqQApA9FwJGIKMMe"
+    "RO9kkYLgQsypD6dCcoj2nf2ooSKERBMTrcNHu+lqEC7+JkSR1lCUNpIkIwUgPXEi5tG9ENOF"
+    "bhh3dCV8zYUuCtrz4R2lLFARnVU7NivyEasTH0B0+G2IrU7p3luAFIDMwIXYOuuNEIVS9KzE"
+    "eCitHo5WP4Po5FpHj3xuy81zALFIuRXR6TNxKzPtkAKQuRQhVt07Is4z6BR+XRzrlw4T7TSd"
+    "/YjFu8jnbI9otCVSALIPF0IIihBegrZmEO21GzHaa3N3n8HrJsQcXrrxaYYUAIkki0nGIpBE"
+    "IklTpABIJFmMFACJJIuRAiCRZDFSACSSLEYKgESSxUgBkEiyGCkAEkkWIwVAIslipABIJFmM"
+    "FACJJIuRAiCRZDFSACSSLEYKgESSxUgBkEiyGCkAEkkWIwVAIslipABIJFmMFACJJIuRAiCR"
+    "ZDFSACSSLEYKgESSxUgBkEiyGCkAEkkWIwVAIslipABIJFmMFACJJIuRAiCRZDH/D8PLUIX+"
+    "BVXhAAAAAElFTkSuQmCC")
+getMainData = Main.GetData
+getMainImage = Main.GetImage
+getMainBitmap = Main.GetBitmap
+getMainIcon = Main.GetIcon
+
diff --git a/src/leastsquaresfit.py b/src/leastsquaresfit.py
new file mode 100644
index 0000000..81e2c89
--- /dev/null
+++ b/src/leastsquaresfit.py
@@ -0,0 +1,356 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module leastsquaresfit
+    Here are the necessary functions for computing a fit with given parameters.
+    See included class "Fit" for more information.
+
+    scipy.optimize.leastsq
+    "leastsq" is a wrapper around MINPACK's lmdif and lmder algorithms.
+    Those use the Levenberg-Marquardt algorithm.
+      subroutine lmdif
+ 
+      the purpose of lmdif is to minimize the sum of the squares of
+      m nonlinear functions in n variables by a modification of
+      the levenberg-marquardt algorithm. the user must provide a
+      subroutine which calculates the functions. the jacobian is
+      then calculated by a forward-difference approximation.
+"""
+
+
+import matplotlib.pyplot as plt
+import numpy as np
+from scipy import interpolate as spintp
+from scipy import optimize as spopt
+import warnings
+
+# If we use this module with PyCorrFit, we can plot things with latex using
+# our own special thing.
+try:
+    import plotting
+except:
+    pass
+
+
+class Fit(object):
+    """
+        The class Fit needs the following parameters to perform a fit:
+        check_parms - A function checking the parameters for plausibility.
+        dataexpfull - Full experimental data *array of tuples*
+        function - function to be used for fitting f(parms, x) 
+        interval - interval of dataexpfull to fit in. [a, b]
+        values - starting parameters *parms* for fitting. *array*
+        valuestofit - which parameter to use for fitting. *bool array*
+        weights - no. of datapoints from left and right to use for weighting
+        fittype - type of fit. Can be one of the following
+                  - "None" (standard) - no weights. (*weights* is ignored)
+                  - "splineX" - fit a Xth order spline and calulate standard
+                               deviation from that difference
+                  - "model function" - calculate std. dev. from difference
+                                        of fit function and dataexpfull.
+                  - "other" - use an external std. dev.. The variable
+                              self.external_deviations has to be set before
+                              self.ApplyParameters is called. Cropping with
+                              *interval* is performed here.
+    """
+    def __init__(self):
+        """ Initial setting of needed variables via the given *fitset* """   
+        self.check_parms = None
+        self.dataexpfull = None
+        self.function = None
+        self.interval = None
+        self.uselatex = False # Eventually use latex. This is passed
+                              # to each plotting command. Only when plotting
+                              # module is available.
+        self.values = None
+        self.valuestofit = None
+
+        self.verbose = False # Verbose mode (shows e.g. spline fit)
+        # The weights (data points from left and right of data array) have
+        # to be chosen in a way, that the interval +/- weights will not
+        # exceed self.dataexpfull!!!!
+        self.weights = None
+        # Changing fittype will change calculation of variances=dataweights**2.
+        # None means dataweights is 1.
+        self.fittype = "None"
+        # Chi**2 Value
+        self.chi = None
+        # Messages from leastsq
+        self.mesg = None
+        # Optimal parameters found by leastsq
+        self.parmoptim = None
+        self.covar = None # covariance matrix 
+        self.parmoptim_error = None # Errors of fit
+        # Variances for fitting
+        self.dataweights = None
+        # External std defined by the user
+        self.external_deviations = None
+        # It is possible to edit tolerance for fitting
+        # ftol, xtol and gtol.
+        # Those parameters could be added to the fitting routine later.
+        # Should we do a weighted fit?
+        # Standard is yes. If there are no weights
+        # (self.fittype not set) then this value becomes False
+        self.weightedfit=True
+        
+
+
+    def ApplyParameters(self):
+        if self.interval is None:
+            self.startcrop = self.endcrop = 0
+        else:
+            [self.startcrop, self.endcrop] = self.interval
+        # Get self.dataexp
+        if self.startcrop == self.endcrop:
+            self.dataexp = 1*self.dataexpfull
+            self.startcrop = 0
+            self.endcrop = len(self.dataexpfull)
+        else:
+            self.dataexp = 1*self.dataexpfull[self.startcrop:self.endcrop]
+            # If startcrop is larger than the lenght of dataexp,
+            # We will not have an array. Prevent that.
+            if len(self.dataexp) == 0:
+                self.dataexp = 1*self.dataexpfull
+        # Calculate x-values
+        # (Extract tau-values from dataexp)
+        self.x = self.dataexp[:, 0]
+        # Experimental data
+        self.data = self.dataexp[:,1]
+        # Set fit parameters
+        self.fitparms = np.zeros(sum(self.valuestofit))
+        index = 0
+        for i in np.arange(len(self.values)):
+            if self.valuestofit[i]:
+                self.fitparms[index] = np.float(self.values[i])
+                index = index + 1
+        # Assume we have a weighted fit. If this is not the case then
+        # this is changed in the else statement of the following 
+        # "if"-statement:
+        self.weightedfit=True
+        if self.fittype[:6] == "spline":
+            # Number of knots to use for spline
+            try:
+                knotnumber = int(self.fittype[6:])
+            except:
+                print "Could not get knotnumber. Setting to 5."
+                knotnumber = 5
+            # Number of neighbouring (left and right) points to include
+            points = self.weights
+            # Calculated dataweights
+            datalen = len(self.dataexp[:,1])
+            dataweights = np.zeros(datalen)
+            if self.startcrop < points:
+                pmin = self.startcrop
+            else:
+                pmin = points
+            if len(self.dataexpfull) - self.endcrop <  points:
+                pmax = (len(self.dataexpfull) - self.endcrop)
+            else:
+                pmax = points
+            x = self.dataexpfull[self.startcrop-pmin:self.endcrop+pmax,0]
+            xs = np.log10(x)
+            y = self.dataexpfull[self.startcrop-pmin:self.endcrop+pmax,1]
+            knots = np.linspace(xs[1], xs[-1], knotnumber+2)[1:-1]
+            try:
+                tck = spintp.splrep(xs,y,s=0,k=3,t=knots,task=-1)
+                ys = spintp.splev(xs,tck,der=0)
+            except:
+                print "Could not find spline with "+str(knotnumber)+" knots."
+                return
+            if self.verbose == True:
+                try:
+                    # If plotting module is available:
+                    name = "Spline fit: "+str(knotnumber)+" knots"
+                    plotting.savePlotSingle(name, 1*x, 1*y, 1*ys, dirname = ".",
+                                            uselatex=self.uselatex)
+                except:
+                    plt.xscale("log")
+                    plt.plot(x,ys, x,y)
+                    plt.show()
+            ## Calculation of variance
+            # In some cases, the actual cropping interval from self.startcrop to
+            # self.endcrop is chosen, such that the dataweights must be
+            # calculated from unknown datapoints.
+            # (e.g. points+endcrop > len(dataexpfull)
+            # We deal with this by multiplying dataweights with a factor
+            # corresponding to the missed points.
+            for i in np.arange(datalen):
+                # Define start and end positions of the sections from
+                # where we wish to calculate the dataweights.
+                # Offset at beginning:
+                if  i + self.startcrop <  points:
+                    # The offset that occurs
+                    offsetstart = points - i - self.startcrop
+                    offsetcrop = 0
+                elif self.startcrop > points:
+                    offsetstart = 0
+                    offsetcrop = self.startcrop - points
+                else:
+                    offsetstart = 0
+                    offsetcrop = 0
+                # i: counter on dataexp array
+                # start: counter on y array
+                start = i - points + offsetstart + self.startcrop - offsetcrop
+                end = start + 2*points + 1 - offsetstart
+                dataweights[i] = (y[start:end] - ys[start:end]).std()
+                # The standard deviation at the end and the start of the
+                # array are multiplied by a factor corresponding to the
+                # number of bins that were not used for calculation of the
+                # standard deviation.
+                if offsetstart != 0:
+                    reference = 2*points + 1
+                    dividor = reference - offsetstart
+                    dataweights[i] *= reference/dividor   
+                # Do not substitute len(y[start:end]) with end-start!
+                # It is not the same!
+                backset =  2*points + 1 - len(y[start:end]) - offsetstart
+                if backset != 0:
+                    reference = 2*points + 1
+                    dividor = reference - backset
+                    dataweights[i] *= reference/dividor
+        elif self.fittype == "model function":
+            # Number of neighbouring (left and right) points to include
+            points = self.weights
+            if self.startcrop < points:
+                pmin = self.startcrop
+            else:
+                pmin = points
+            if len(self.dataexpfull) - self.endcrop <  points:
+                pmax = (len(self.dataexpfull) - self.endcrop)
+            else:
+                pmax = points
+            x = self.dataexpfull[self.startcrop-pmin:self.endcrop+pmax,0]
+            y = self.dataexpfull[self.startcrop-pmin:self.endcrop+pmax,1]
+            # Calculated dataweights
+            datalen = len(self.dataexp[:,1])
+            dataweights = np.zeros(datalen)
+            for i in np.arange(datalen):
+                # Define start and end positions of the sections from
+                # where we wish to calculate the dataweights.
+                # Offset at beginning:
+                if  i + self.startcrop <  points:
+                    # The offset that occurs
+                    offsetstart = points - i - self.startcrop
+                    offsetcrop = 0
+                elif self.startcrop > points:
+                    offsetstart = 0
+                    offsetcrop = self.startcrop - points
+                else:
+                    offsetstart = 0
+                    offsetcrop = 0
+                # i: counter on dataexp array
+                # start: counter on dataexpfull array
+                start = i - points + offsetstart + self.startcrop - offsetcrop
+                end = start + 2*points + 1 - offsetstart
+                #start = self.startcrop - points + i
+                #end = self.startcrop + points + i + 1
+                diff = y - self.function(self.values, x)
+                dataweights[i] = diff[start:end].std()
+                # The standard deviation at the end and the start of the
+                # array are multiplied by a factor corresponding to the
+                # number of bins that were not used for calculation of the
+                # standard deviation.
+                if offsetstart != 0:
+                    reference = 2*points + 1
+                    dividor = reference - offsetstart
+                    dataweights[i] *= reference/dividor   
+                # Do not substitute len(diff[start:end]) with end-start!
+                # It is not the same!
+                backset =  2*points + 1 - len(diff[start:end]) - offsetstart
+                if backset != 0:
+                    reference = 2*points + 1
+                    dividor = reference - backset
+                    dataweights[i] *= reference/dividor
+        elif self.fittype == "other":
+            # This means that the user knows the dataweights and already
+            # gave it to us.
+            if self.external_deviations is not None:
+                dataweights = \
+                           self.external_deviations[self.startcrop:self.endcrop]
+            else:
+                raise ValueError, \
+                      "self.external_deviations not set for fit type 'other'."
+        else:
+            # The fit.Fit() class will divide the function to minimize
+            # by the dataweights only if we have weights
+            self.weightedfit=False
+            dataweights=None
+        self.dataweights = dataweights
+
+
+    def fit_function(self, parms, x):
+        """ Create the function to be minimized via least squares.
+            The old function *function* has more parameters than we need for
+            the fitting. So we use this function to set only the necessary 
+            parameters. Returns what *function* would have done.
+        """
+        # Reorder the needed variables from *spopt.leastsq* for *function*.
+        index = 0
+        for i in np.arange(len(self.values)):
+            if self.valuestofit[i]:
+                self.values[i] = parms[index]
+                index = index + 1
+        # Only allow physically correct parameters
+        self.values = self.check_parms(self.values)
+        tominimize = (self.function(self.values, x) - self.data)
+        # Check if we have a weighted fit
+        if self.weightedfit is True:
+            # Check dataweights for zeros and don't use these
+            # values for the least squares method.
+            with np.errstate(divide='ignore'):
+                tominimize = np.where(self.dataweights!=0, 
+                                      tominimize/self.dataweights, 0)
+            ## There might be NaN values because of zero weights:
+            #tominimize = tominimize[~np.isinf(tominimize)]
+        return tominimize
+
+
+    def get_chi_squared(self):
+        # Calculate Chi**2
+        degrees_of_freedom = len(self.x) - len(self.parmoptim) - 1
+        return np.sum( (self.fit_function(self.parmoptim, self.x))**2) / \
+                   degrees_of_freedom
+
+
+    def least_square(self):
+        """ This will minimize *self.fit_function()* using least squares.
+            *self.values*: The values with which the function is called.
+            *valuestofit*: A list with bool values that indicate which values
+            should be used for fitting.
+            Function *self.fit_function()* takes two parameters:
+            self.fit_function(parms, x) where *x* are x-values of *dataexp*.
+        """
+        if np.sum(self.valuestofit) == 0:
+            print "No fitting parameters selected."
+            self.valuesoptim = 1*self.values
+            return
+        # Begin fitting
+        res = spopt.leastsq(self.fit_function, self.fitparms[:],
+                            args=(self.x), full_output=1)
+        (popt, pcov, infodict, errmsg, ier) = res
+        self.parmoptim = popt
+        if ier not in [1,2,3,4]:
+            print "Optimal parameters not found: " + errmsg
+        # Now write the optimal parameters to our values:
+        index = 0
+        for i in np.arange(len(self.values)):
+            if self.valuestofit[i]:
+                self.values[i] = self.parmoptim[index]
+                index = index + 1
+        # Only allow physically correct parameters
+        self.values = self.check_parms(self.values)
+        # Write optimal parameters back to this class.
+        self.valuesoptim = 1*self.values # This is actually a redundance array
+        self.chi = self.get_chi_squared()
+        try:
+            self.covar = pcov * self.chi # The covariance matrix
+        except:
+            print "PyCorrFit Warning: Error estimate not possible, because we"
+            print "          could not calculate covariance matrix. Please try"
+            print "          reducing the number of fitting parameters."
+            self.parmoptim_error = None
+        else:
+            # Error estimation of fitted parameters
+            if self.covar is not None:
+                self.parmoptim_error = np.diag(self.covar)
diff --git a/src/misc.py b/src/misc.py
new file mode 100644
index 0000000..0df44bd
--- /dev/null
+++ b/src/misc.py
@@ -0,0 +1,275 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module misc
+"""
+
+import codecs
+from distutils.version import LooseVersion # For version checking
+import numpy as np
+import os
+import platform
+import sys
+import tempfile
+import urllib2
+import webbrowser
+import wx                               # GUI interface wxPython
+import wx.html
+import wx.lib.delayedresult as delayedresult
+
+import doc                          # Documentation/some texts
+# The icon file was created with
+# img2py -i -n Main PyCorrFit_icon.png icon.py
+import icon                         # Contains the program icon
+
+
+class UpdateDlg(wx.Frame):
+    def __init__(self, parent, valuedict):
+        
+        description = valuedict["Description"]
+        homepage = valuedict["Homepage"]
+        githome = valuedict["Homepage_GIT"]
+        changelog = valuedict["Changelog"]
+        pos = parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent, wx.ID_ANY, title="Update", 
+                          size=(250,180), pos=pos)
+        self.changelog = changelog
+        # Fill html content
+        html = wxHTML(self)
+        string =             '' +\
+            "<b> PyCorrFit <br></b>" +\
+            "Your version: " + description[0]+"<br>" +\
+            "Latest version: " + description[1]+"<br>" +\
+            "(" + description[2]+")<br><p><b>"
+        if len(homepage) != 0:
+            string = string + '<a href="'+homepage+'">Homepage</a><br>'
+        if len(githome) != 0:
+            string = string + '<a href="'+githome+'">Repository</a><br>'
+
+        if len(changelog) != 0:
+            string = string + \
+                     '<a href="'+changelog+'">Change Log</a>'
+        string = string+'</b></p>'
+        html.SetPage(string)
+        self.Bind(wx.EVT_CLOSE, self.Close)
+        # Set window icon
+        ico = getMainIcon()
+        wx.Frame.SetIcon(self, ico)
+
+
+    def Close(self, event):
+        if len(self.changelog) != 0:
+            # Cleanup downloaded file, if it was downloaded
+            if self.changelog != doc.StaticChangeLog:
+                os.remove(self.changelog)
+        self.Destroy()
+
+
+class wxHTML(wx.html.HtmlWindow):
+    def OnLinkClicked(parent, link):
+         webbrowser.open(link.GetHref())
+
+def parseString2Pagenum(parent, string, nodialog=False):
+    """ Parse a string with a list of pagenumbers to an integer list with
+        page numbers.
+        e.g. "1-3,5,7" --> [1,2,3,5,7]
+        parent is important
+    """
+    listFull = string.split(",")
+    PageNumbers = list()
+    try:
+        for item in listFull:
+            pagerange = item.split("-")
+            start = pagerange[0].strip()
+            start = int(filter(type(start).isdigit, start))
+            end = pagerange[-1].strip()
+            end = int(filter(type(end).isdigit, end))
+            for i in np.arange(end-start+1)+start:
+                PageNumbers.append(i)
+        PageNumbers.sort()
+        return PageNumbers
+    except:
+        if nodialog is False:
+            errstring = "Invalid syntax in page selection: "+string+\
+                        ". Please use a comma separated list with"+\
+                        " optional dashes, e.g. '1-3,6,8'." 
+            try:
+                dlg = wx.MessageDialog(parent, errstring, "Error", 
+                                  style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg.ShowModal() == wx.ID_OK
+            except:
+                raise ValueError(errstring)
+        else:
+            raise ValueError(errstring)
+        return None
+        
+
+def parsePagenum2String(pagenumlist):
+    """ Make a string with dashes and commas from a list of pagenumbers.
+        e.g. [1,2,3,5,7] --> "1-3,5,7"
+    """
+    if len(pagenumlist) == 0:
+        return ""
+    # Make sure we have integers
+    newlist = list()
+    for num in pagenumlist:
+        newlist.append(int(num))
+    newlist.sort()
+    # begin string
+    string = str(newlist[0])
+    # iteration through list:
+    dash = False
+    for i in np.arange(len(newlist)-1)+1:
+        if dash == True:
+            if newlist[i]-1 == newlist[i-1]:
+                pass
+            else:
+                string += "-"+str(newlist[i-1])+", "+str(newlist[i])
+                dash = False
+        else:
+            if newlist[i]-1 == newlist[i-1]:
+                 if newlist[i]-2 == newlist[i-2]:
+                     dash = True
+                 elif len(newlist) != i+1 and newlist[i]+1 == newlist[i+1]:
+                     dash = True
+                 else:
+                     string += ", "+str(newlist[i])
+                     dash = False
+            else:
+                dash = False
+                string += ", "+str(newlist[i])
+        # Put final number
+        if newlist[i] == newlist[-1]:
+            if parseString2Pagenum(None, string)[-1] != newlist[i]:
+                if dash == True:
+                    string += "-"+str(newlist[i])
+                else:
+                    string += ", "+str(newlist[i])
+    return string
+
+
+def removewrongUTF8(name):
+    newname = u""
+    for char in name:
+       try:
+           uchar = codecs.decode(char, "UTF-8")
+       except:
+           pass
+       else:
+           newname += char
+    return newname
+    
+
+def getMainIcon(pxlength=32):
+    """ *pxlength* is the side length in pixels of the icon """
+    # Set window icon
+    iconBMP = icon.getMainBitmap()
+    # scale
+    image = wx.ImageFromBitmap(iconBMP)
+    image = image.Scale(pxlength, pxlength, wx.IMAGE_QUALITY_HIGH)
+    iconBMP = wx.BitmapFromImage(image)
+    iconICO = wx.IconFromBitmap(iconBMP)
+    return iconICO
+
+
+def findprogram(program):
+    """ Uses the systems PATH variable find executables"""
+    path = os.environ['PATH']
+    paths = path.split(os.pathsep)
+    for d in paths:
+        if os.path.isdir(d):
+            fullpath = os.path.join(d, program)
+            if sys.platform[:3] == 'win':
+                for ext in '.exe', '.bat':
+                    program_path = fullpath + ext
+                    if os.path.isfile(fullpath + ext):
+                        return (1, program_path)
+            else:
+                if os.path.isfile(fullpath):
+                    return (1, fullpath)
+    return (0, None)
+
+
+def Update(parent):
+    """ This is a thread for _Update """
+    parent.StatusBar.SetStatusText("Connecting to server...")
+    delayedresult.startWorker(_UpdateConsumer, _UpdateWorker,
+                              wargs=(parent,), cargs=(parent,))
+
+def _UpdateConsumer(delayedresult, parent):
+    results = delayedresult.get()
+    dlg = UpdateDlg(parent, results)
+    dlg.Show()
+    parent.StatusBar.SetStatusText("...update status: "+results["Description"][2])
+
+
+def _UpdateWorker(parent):
+        changelog = ""
+        hpversion = None
+        # I created this TXT record to keep track of the current web presence.
+        try:
+            urlopener = urllib2.urlopen(doc.HomePage, timeout=2)
+            homepage = urlopener.geturl()
+        except:
+            homepage = doc.HomePage
+        try:
+            urlopener2 = urllib2.urlopen(doc.GitHome, timeout=2)
+            githome = urlopener2.geturl()
+        except:
+            githome = ""
+        # Find the changelog file
+        try:
+            responseCL = urllib2.urlopen(homepage+doc.ChangeLog, timeout=2)
+        except:
+            CLfile = doc.GitChLog
+        else:
+            fileresponse = responseCL.read()
+            CLlines = fileresponse.splitlines()
+            # We have a transition between ChangeLog.txt on the homepage
+            # containing the actual changelog or containing a link to
+            # the ChangeLog file.
+            if len(CLlines) == 1:
+                CLfile = CLlines[0]
+            else:
+                hpversion = CLlines[0]
+                CLfile = doc.GitChLog
+        # Continue version comparison if True
+        continuecomp = False
+        try:
+            responseVer = urllib2.urlopen(CLfile, timeout=2)
+        except:
+            if hpversion == None:
+                newversion = "unknown"
+                action = "cannot connect to server"
+            else:
+                newversion = hpversion
+                continuecomp = True
+        else:
+            continuecomp = True
+            changelog = responseVer.read()
+            newversion = changelog.splitlines()[0]
+        if continuecomp:
+            new = LooseVersion(newversion)
+            old = LooseVersion(parent.version)
+            if new > old:
+                action = "update available"
+            elif new < old:
+                action = "whoop you rock!"
+            else:
+                action = "state of the art"
+        description = [parent.version, newversion, action]
+        if len(changelog) != 0:
+            changelogfile = tempfile.mktemp()+"_PyCorrFit_ChangeLog"+".txt"
+            clfile = open(changelogfile, 'wb')
+            clfile.write(changelog)
+            clfile.close()            
+        else:
+            changelogfile=doc.StaticChangeLog
+        results = dict()
+        results["Description"] = description
+        results["Homepage"] = homepage
+        results["Homepage_GIT"] = githome
+        results["Changelog"] = changelogfile
+        return results
diff --git a/src/models/MODEL_TIRF_1C.py b/src/models/MODEL_TIRF_1C.py
new file mode 100755
index 0000000..41e158a
--- /dev/null
+++ b/src/models/MODEL_TIRF_1C.py
@@ -0,0 +1,239 @@
+# -*- coding: utf-8 -*-
+""" This file contains TIR one component models
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)
+
+def CF_Gxy_TIR_square(parms, tau):
+    # Model 6000
+    u""" Two-dimensional diffusion with a square shaped lateral detection
+        area taking into account the size of the point spread function.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D      Diffusion coefficient
+        [1] σ      Lateral size of the point spread function
+                   σ = σ₀ * λ / NA
+        [2] a      Side size of the square-shaped detection area
+        [3] C_2D   Particle concentration in detection area
+        *tau* - lag time
+
+        Please refer to the documentation of PyCorrFit
+        for further information on this model function.
+        
+        Returns: Normalized Lateral correlation function w/square pinhole.
+    """
+    D = parms[0]
+    sigma = parms[1]
+    a = parms[2]
+    Conc = parms[3]
+
+    var1 = sigma**2+D*tau
+    AA = 2*np.sqrt(var1)/(a**2*np.sqrt(np.pi))
+    BB = np.exp(-a**2/(4*(var1))) - 1
+    CC = sps.erf(a/(2*np.sqrt(var1)))/a
+    # gx = AA*BB+CC
+    # gxy = gx**2
+    # g2D = gxy * eta**2 * Conc
+    # F = 1/(eta*Conc)
+    # G = g2D / F**2
+    G = 1/Conc * (AA*BB+CC)**2
+    return G
+
+
+# 3D free tir
+def CF_Gxyz_TIR_square(parms, tau, wixi=wixi):
+    # Model 6010
+    u""" Three-dimensional diffusion with a square-shaped lateral
+        detection area taking into account the size of the
+        point spread function; and an exponential decaying profile
+        in axial direction.
+        
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D      Diffusion coefficient
+        [1] σ      Lateral size of the point spread function
+                   σ = σ₀ * λ / NA
+        [2] a      Side size of the square-shaped detection area
+        [3] d_eva  Evanescent penetration depth
+        [4] C_3D   Particle concentration in detection volume
+        *tau* - lag time
+
+        Please refer to the documentation of PyCorrFit
+        for further information on this model function.
+        
+        Returns: 3D correlation function for TIR-FCS w/square pinhole
+    """
+    D = parms[0]
+    sigma = parms[1]
+    a = parms[2]
+    kappa = 1/parms[3]
+    Conc = parms[4]
+    ### Calculate gxy
+
+    # Axial correlation    
+    x = np.sqrt(D*tau)*kappa
+    w_ix = wixi(x)
+    gz = np.sqrt(D*tau/np.pi) - (2*D*tau*kappa**2 - 1)/(2*kappa) * w_ix
+
+    # Lateral correlation
+    gx1 = 2/(a**2*np.sqrt(np.pi)) * np.sqrt(sigma**2+D*tau) * \
+          ( np.exp(-a**2/(4*(sigma**2+D*tau))) -1 )
+    gx2 = 1/a * sps.erf( a / (2*np.sqrt(sigma**2 + D*tau))) 
+    gx =  gx1 + gx2
+    gxy = gx**2
+
+    # Non normalized correlation function
+    # We do not need eta after normalization
+    # g = eta**2 * Conc * gxy * gz
+    g = Conc * gxy * gz
+    # Normalization:
+    # F = eta * Conc / kappa
+    F = Conc / kappa
+    G = g / F**2
+    return G
+
+
+def MoreInfo_6000(parms, countrate):
+    u"""Supplementary parameters:
+        For a>>sigma, the correlation function at tau=0 corresponds to:
+        [4] G(τ=0) = 1/(N_eff) * ( 1-2*σ/(sqrt(π)*a) )²
+        Effective detection area:
+        [5] A_eff [µm²] = a²
+        Effective particle number in detection area:
+        [6] N_eff = A_eff * C_2D
+    """
+    D = parms[0]
+    sigma = parms[1]
+    a = parms[2]
+    Conc = parms[3]
+    Info=list()
+
+    # Detection area:
+    Aeff = a**2 
+    # Particel number
+    Neff = Aeff * Conc
+    # Correlation function at tau = 0
+    G_0 = CF_Gxy_TIR_square(parms, 0)
+
+    Info.append(["G(0)", G_0])
+
+    # 10 000 nm² = 0.01 µm²
+    # Aeff * 10 000 nm² * 10**(-6)µm²/nm² = Aeff * 0.01 * µm²
+    # Have to divide Aeff by 100
+    Info.append([u"A_eff [µm²]", Aeff / 100])
+    Info.append(["N_eff", Neff])
+    if countrate is not None:
+        # CPP
+        cpp = countrate/Neff
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+def MoreInfo_6010(parms, countrate):
+    u"""Supplementary parameters:
+        Molarity:
+        [5] C_3D [nM] = C_3D [1000/µm³] * 10000/6.0221415
+        Effective detection volume:
+        [6] V_eff = a² * d_eva
+        Effective particle number:
+        [7] N_eff = V_eff * C_3D
+        For a>>σ, the correlation function at τ=0 corresponds to:
+        [8] G(τ=0) = 1/(2*N_eff) * ( 1-2*σ/(sqrt(π)*a) )²
+    """
+    # 3D Model TIR square
+    # 3D TIR (□xσ/exp),Simple 3D diffusion w/ TIR, fct.CF_Gxyz_square_tir
+    # D [10 µm²/s],σ [100 nm],a [100 nm],d_eva [100 nm],[conc.] [1000 /µm³]
+    sigma = parms[1]
+    a = parms[2]
+    d_eva = parms[3]
+    conc = parms[4]
+    # Sigma
+
+    Info = list()
+    # Molarity [nM]:
+    # 1000/(µm³)*10**15µm³/l * mol /(6.022*10^23) * 10^9 n
+    cmol = conc * 10000/6.0221415
+    # Effective volume [al]:
+    Veff = a**2 * d_eva
+    # Effective particel number
+    Neff = a**2 * d_eva * conc
+    # Correlation function at tau = 0
+    G_0 = CF_Gxyz_TIR_square(parms, 0)
+
+    Info.append(["G(0)", G_0])
+
+    Info.append(["C_3D [nM]", cmol])
+    # atto liters
+    # 1 000 000 nm³ = 1 al
+    Info.append(["V_eff [al]", Veff])
+    Info.append(["N_eff", Neff])
+    if countrate is not None:
+        # CPP
+        cpp = countrate/Neff
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+
+# 2D Model Square
+m_twodsq6000 = [6000, u"2D", u"2D diffusion w/ square pinhole",
+                CF_Gxy_TIR_square]
+labels_6000 = [u"D [10 µm²/s]", u"σ [100 nm]", "a [100 nm]", u"C_2D [100 /µm²]"]
+values_6000 = [0.054, 2.3, 7.5, .6] # [D,lamb,NA,a,conc]
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6000 = [u"D [µm²/s]", u"σ [nm]", "a [nm]",
+                              u"C_2D [1/µm²]"]
+values_factor_human_readable_6000 = [10, 100, 100, 100]
+valuestofit_6000 = [True, False, False, True]      # Use as fit parameter?
+parms_6000 = [labels_6000, values_6000, valuestofit_6000,
+              labels_human_readable_6000, values_factor_human_readable_6000]
+
+# 3D Model TIR square
+m_3dtirsq6010 = [6010, u"3D", "Simple 3D diffusion w/ TIR",
+                 CF_Gxyz_TIR_square]
+labels_6010 = [u"D [10 µm²/s]", u"σ [100 nm]","a [100 nm]", "d_eva [100 nm]",
+               u"C_3D [1000 /µm³]"]
+values_6010 = [0.520, 2.3, 7.5, 1.0, .0216]
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6010 = [u"D [µm²/s]", u"σ [nm]", "a [nm]", "d_eva [nm]",
+                              u"C_3D [1/µm³]"]
+values_factor_human_readable_6010 = [10, 100, 100, 100, 1000]
+valuestofit_6010 = [True, False, False, False, True]
+parms_6010 = [labels_6010, values_6010, valuestofit_6010,
+              labels_human_readable_6010, values_factor_human_readable_6010]
+
+
+
+# Pack the models
+model1 = dict()
+model1["Parameters"] = parms_6000
+model1["Definitions"] = m_twodsq6000
+model1["Supplements"] = MoreInfo_6000
+model1["Verification"] = lambda parms: np.abs(parms)
+
+model2 = dict()
+model2["Parameters"] = parms_6010
+model2["Definitions"] = m_3dtirsq6010
+model2["Supplements"] = MoreInfo_6010
+model2["Verification"] = lambda parms: np.abs(parms)
+
+
+Modelarray = [model1, model2]
diff --git a/src/models/MODEL_TIRF_2D2D.py b/src/models/MODEL_TIRF_2D2D.py
new file mode 100755
index 0000000..19d2bc4
--- /dev/null
+++ b/src/models/MODEL_TIRF_2D2D.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+""" This file contains 2D+2D TIR-FCS models.
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)
+
+
+# 2D + 2D no binding TIRF
+def CF_Gxy_TIR_square_2d2d(parms, tau, wixi=wixi):
+    u""" Two-component two-dimensional diffusion with a square-shaped
+        lateral detection area taking into account the size of the
+        point spread function.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_2D1  Diffusion coefficient of species 1
+        [1] D_2D2  Diffusion coefficient of species 2
+        [2] σ      Lateral size of the point spread function
+                   σ = σ₀ * λ / NA
+        [3] a      Side size of the square-shaped detection area
+        [4] d_eva  Evanescent penetration depth
+        [5] C_2D1  Two-dimensional concentration of species 1
+        [6] C_2D2  Two-dimensional concentration of species 2
+        [7] α      Relative molecular brightness of particle
+                   2 compared to particle 1 (α = q₂/q₁)
+        *tau* - lag time
+    """
+    D_2D1 = parms[0]
+    D_2D2 = parms[1]
+    sigma = parms[2]
+    a = parms[3]
+    kappa = 1/parms[4]
+    Conc_2D1 = parms[5]
+    Conc_2D2 = parms[6]
+    alpha = parms[7]
+
+    ## First the 2D-diffusion of species 1
+    var1 = sigma**2+D_2D1*tau
+    AA1 = 2*np.sqrt(var1)/(a**2*np.sqrt(np.pi))
+    BB1 = np.exp(-a**2/(4*(var1))) - 1
+    CC1 = sps.erf(a/(2*np.sqrt(var1)))/a
+    # gx = AA*BB+CC
+    # gxy = gx**2
+    # g2D = Conc_2D * gxy
+    g2D1 =  Conc_2D1 * (AA1*BB1+CC1)**2
+
+    ## Second the 2D-diffusion of species 2
+    var2 = sigma**2+D_2D2*tau
+    AA2 = 2*np.sqrt(var2)/(a**2*np.sqrt(np.pi))
+    BB2 = np.exp(-a**2/(4*(var2))) - 1
+    CC2 = sps.erf(a/(2*np.sqrt(var2)))/a
+    # gx = AA*BB+CC
+    # gxy = gx**2
+    # g2D = Conc_2D * gxy
+    g2D2 =  alpha**2 * Conc_2D2 * (AA2*BB2+CC2)**2
+
+    ## Finally the Prefactor
+    F = Conc_2D1 + alpha * Conc_2D2
+    G = (g2D1 + g2D2) / F**2
+    return G
+
+
+# 2D-2D Model TIR
+m_tir_2d_2d_mix_6022 = [6022, u"2D+2D","Separate 2D diffusion, TIR", 
+                        CF_Gxy_TIR_square_2d2d]
+labels_6022 = [ "D"+u"\u2081"+u" [10 µm²/s]",
+                "D"+u"\u2082"+u" [10 µm²/s]",
+                u"σ [100 nm]",
+                "a [100 nm]", 
+                "d_eva [100 nm]", 
+                "C"+u"\u2081"+u" [100 /µm²]", 
+                "C"+u"\u2082"+u" [100 /µm²]", 
+                u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")"
+                ]
+values_6022 = [
+                0.90,     # D_2D₁ [10 µm²/s]
+                0.01,    # D_2D₂ [10 µm²/s]
+                2.3,     # σ [100 nm]
+                7.50,    # a [100 nm]
+                1.0,     # d_eva [100 nm]
+                0.01,    # conc.2D₁ [100 /µm²]
+                0.03,    # conc.2D₂ [100 /µm²]
+                1        # alpha
+                ]        
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6022 = [
+                "D"+u"\u2081"+u" [µm²/s]",
+                "D"+u"\u2082"+u" [µm²/s]",
+                u"σ [nm]",
+                "a [nm]", 
+                "d_eva [nm]", 
+                "C"+u"\u2081"+u" [1/µm²]", 
+                "C"+u"\u2082"+u" [1/µm²]", 
+                u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")"
+                ]
+values_factor_human_readable_6022 = [
+                10,     # D_2D₁ [10 µm²/s],
+                10,     # D_2D₂ [10 µm²/s]
+                100,    # σ [100 nm]
+                100,    # a [100 nm]
+                100,    # d_eva [100 nm]
+                100,    # conc.2D₁ [100 /µm²]
+                100,    # conc.2D₂ [100 /µm²]
+                1
+                ]
+valuestofit_6022 = [False, True, False, False, False, False, True, False]
+parms_6022 = [labels_6022, values_6022, valuestofit_6022, 
+              labels_human_readable_6022, values_factor_human_readable_6022]
+
+
+model1 = dict()
+model1["Parameters"] = parms_6022
+model1["Definitions"] = m_tir_2d_2d_mix_6022
+model1["Verification"] = lambda parms: np.abs(parms)
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_TIRF_3D2D.py b/src/models/MODEL_TIRF_3D2D.py
new file mode 100755
index 0000000..1ee6149
--- /dev/null
+++ b/src/models/MODEL_TIRF_3D2D.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+"""  This file contains 3D+2D TIR-FCS models.
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)
+
+
+# 3D + 2D no binding TIRF
+def CF_Gxyz_TIR_square_3d2d(parms, tau, wixi=wixi):
+    u""" Two-component two- and three-dimensional diffusion
+        with a square-shaped lateral detection area taking into account
+        the size of the point spread function; and an exponential
+        decaying profile in axial direction.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_3D    Diffusion coefficient of freely diffusing species
+        [1] D_2D    Diffusion coefficient of surface bound species
+        [2] σ       Lateral size of the point spread function
+                    σ = σ₀ * λ / NA
+        [3] a       Side size of the square-shaped detection area
+        [4] d_eva   Evanescent penetration depth
+        [5] C_3D    Concentration of freely diffusing species
+        [6] C_2D    Concentration of surface bound species
+        [7] α       Relative molecular brightness of 3D particle
+                    compared to 2D particle (α = q3D/q2D)
+        *tau* - lag time
+    """
+    D_3D = parms[0]
+    D_2D = parms[1]
+    sigma = parms[2]
+    a = parms[3]
+    kappa = 1/parms[4]
+    Conc_3D = parms[5]
+    Conc_2D = parms[6]
+    alpha = parms[7]
+
+    ## First the 2D-diffusion at z=0
+    var1 = sigma**2+D_2D*tau
+    AA = 2*np.sqrt(var1)/(a**2*np.sqrt(np.pi))
+    BB = np.exp(-a**2/(4*(var1))) - 1
+    CC = sps.erf(a/(2*np.sqrt(var1)))/a
+    # gx = AA*BB+CC
+    # gxy = gx**2
+    # g2D = Conc_2D * gxy
+    g2D =  Conc_2D * (AA*BB+CC)**2
+
+    ## Second the 3D diffusion for z>0
+    # Axial correlation    
+    x = np.sqrt(D_3D*tau)*kappa
+    w_ix = wixi(x)
+    gz = np.sqrt(D_3D*tau/np.pi) - (2*D_3D*tau*kappa**2 - 1)/(2*kappa) * w_ix
+    # Lateral correlation
+    gx1 = 2/(a**2*np.sqrt(np.pi)) * np.sqrt(sigma**2+D_3D*tau) * \
+          ( np.exp(-a**2/(4*(sigma**2+D_3D*tau))) -1 )
+    gx2 = 1/a * sps.erf( a / (2*np.sqrt(sigma**2 + D_3D*tau))) 
+    gx =  gx1 + gx2
+    gxy = gx**2
+    # Non normalized correlation function
+    g3D = alpha**2 * Conc_3D * gxy * gz
+
+    ## Finally the Prefactor
+    F = alpha * Conc_3D / kappa + Conc_2D
+    G = (g3D + g2D) / F**2
+    return G
+
+
+# 3D-2D Model TIR
+m_tir_3d_2d_mix_6020 = [6020, u"3D+2D",
+                        "Separate 3D and 2D diffusion, 3D TIR",
+                        CF_Gxyz_TIR_square_3d2d]
+labels_6020 = [u"D_3D [10 µm²/s]",
+                u"D_2D [10 µm²/s]",
+                u"σ [100 nm]",
+                "a [100 nm]", 
+                "d_eva [100 nm]", 
+                u"C_3D [1000 /µm³]", 
+                u"C_2D [100 /µm²]", 
+                u"\u03b1"+" (q3D/q2D)"
+                ]
+values_6020 = [
+                50.0,     # D_3D [10 µm²/s]
+                0.81,    # D_2D [10 µm²/s]
+                2.3,     # σ [100 nm]
+                7.50,    # a [100 nm]
+                1.0,     # d_eva [100 nm]
+                0.01,    # conc.3D [1000 /µm³]
+                0.03,    # conc.2D [100 /µm²]
+                1        # alpha
+                ]        
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6020 = ["D_3D [µm²/s]",
+                u"D_2D [µm²/s]",
+                u"σ [nm]",
+                "a [nm]", 
+                "d_eva [nm]", 
+                u"C_3D [1/µm³]", 
+                u"C_2D [1/µm²]", 
+                u"\u03b1"+" (q3D/q2D)"
+                ]
+values_factor_human_readable_6020 = [
+                10,     # D_3D [µm²/s]
+                10,     # D_2D [10 µm²/s]
+                100,    # σ [100 nm]
+                100,    # a [100 nm]
+                100,    # d_eva [100 nm]
+                1000,   # conc.3D [1000 /µm³]
+                100,    # conc.2D [100 /µm²]
+                1       # alpha
+                ]
+valuestofit_6020 = [False, True, False, False, False, False, True, False]
+parms_6020 = [labels_6020, values_6020, valuestofit_6020, 
+              labels_human_readable_6020, values_factor_human_readable_6020]
+
+
+model1 = dict()
+model1["Parameters"] = parms_6020
+model1["Definitions"] = m_tir_3d_2d_mix_6020
+model1["Verification"] = lambda parms: np.abs(parms)
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_TIRF_3D2Dkin_Ries.py b/src/models/MODEL_TIRF_3D2Dkin_Ries.py
new file mode 100755
index 0000000..1d32661
--- /dev/null
+++ b/src/models/MODEL_TIRF_3D2Dkin_Ries.py
@@ -0,0 +1,401 @@
+# -*- coding: utf-8 -*-
+""" 
+    "Total Internal Reflection Fluorescence Correlation Spectroscopy: Effects
+    of Lateral Diffusion and Surface-Generated Fluorescence"
+    Jonas Ries, Eugene P. Petrov, and Petra Schwille
+    Biophysical Journal, Volume 95, July 2008, 390–399
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+import numpy.lib.scimath as nps
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    return np.real_if_close(wixi)
+
+
+# Lateral correlation function
+def CF_gxy_square(parms, tau):
+    """ 2D free diffusion measured with a square pinhole.
+        For the square pinhole, the correlation function can readily be
+        calculated for a TIR-FCS setup.
+        This function is called by other functions within this module.
+        Attention:
+        This is NOT g2D (or gCC), the non normalized correlation function.
+        g2D = gxy * eta**2 * Conc,
+        where eta is the molecular brightness, Conc the concentration and
+        gxy is this function.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D      Diffusion coefficient
+        [1] sigma  lateral size of the point spread function
+                   sigma = simga_0 * lambda / NA
+        [2] a      side size of the square pinhole
+        *tau* - lag time
+
+        Returns: Nonnormalized Lateral correlation function w/square pinhole.
+    """
+    D = parms[0]
+    sigma = parms[1]
+    a = parms[2]
+
+    var1 = sigma**2+D*tau
+    AA = 2*np.sqrt(var1)/(a**2*np.sqrt(np.pi))
+    BB = np.exp(-a**2/(4*(var1))) - 1
+    CC = sps.erf(a/(2*np.sqrt(var1)))/a
+    # gx = AA*BB+CC
+    # gxy = gx**2
+    return (AA*BB+CC)**2
+
+
+def CF_gz_CC(parms, tau, wixi=wixi):
+    """ Axial (1D) diffusion in a TIR-FCS setup.
+        From Two species (bound/unbound) this is the bound part.
+        This function is called by other functions within this module.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_3D     3D Diffusion coefficient (species A)
+        [1] D_2D     2D Diffusion coefficient of bound species C
+        [2] sigma    lateral size of the point spread function
+                     sigma = simga_0 * lambda / NA
+        [3] a        side size of the square pinhole
+        [4] d_eva    evanescent decay length (decay to 1/e)
+        [5] Conc_3D  3-dimensional concentration of species A
+        [6] Conc_2D  2-dimensional concentration of species C
+        [7] eta_3D   molecular brightness of species A
+        [8] eta_2D   molecular brightness of species C
+        [9] k_a      Surface association rate constant
+        [10] k_d     Surface dissociation rate constant
+        *tau* - lag time
+    """
+    D = parms[0]
+    # D_2D = parms[1]
+    sigma = parms[2]
+    # a = parms[3]
+    d_eva = parms[4]
+    Conc_3D = parms[5]      # ligand concentration in solution
+    Conc_2D = parms[6]
+    eta_3D = parms[7]
+    eta_2D = parms[8]
+    k_a = parms[9]
+    k_d = parms[10]
+    # Define some other constants:
+    K = k_a/k_d              # equilibrium constant
+    Beta = 1/(1 + K*Conc_3D) # This is wrong in the Ries paper
+    Re = D / d_eva**2
+    Rt = D * (Conc_3D / (Beta * Conc_2D))**2
+    Rr = k_a * Conc_3D + k_d
+    # Define even more constants:
+    sqrtR1 = -Rr/(2*nps.sqrt(Rt)) + nps.sqrt( Rr**2/(4*Rt) - Rr )
+    sqrtR2 = -Rr/(2*nps.sqrt(Rt)) - nps.sqrt( Rr**2/(4*Rt) - Rr )
+    R1 = sqrtR1 **2
+    R2 = sqrtR2 **2
+    # Calculate return function
+    A1 = eta_2D * Conc_2D / (eta_3D * Conc_3D) * Beta
+    A2 = sqrtR1 * wixi(-nps.sqrt(tau*R2)) - sqrtR2 * wixi(-nps.sqrt(tau*R1))
+    A3 = sqrtR1 - sqrtR2
+    Sol = A1 * A2 / A3
+    # There are some below numerical errors-imaginary numbers.
+    # We do not want them.
+    return np.real_if_close(Sol)
+
+
+def CF_gz_AC(parms, tau, wixi=wixi):
+    """ Axial (1D) diffusion in a TIR-FCS setup.
+        From Two species (bound/unbound) this is the cross correlation part.
+        This function is called by other functions within this module.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_3D     3D Diffusion coefficient (species A)
+        [1] D_2D     2D Diffusion coefficient of bound species C
+        [2] sigma    lateral size of the point spread function
+                     sigma = simga_0 * lambda / NA
+        [3] a        side size of the square pinhole
+        [4] d_eva    evanescent decay length (decay to 1/e)
+        [5] Conc_3D  3-dimensional concentration of species A
+        [6] Conc_2D  2-dimensional concentration of species C
+        [7] eta_3D   molecular brightness of species A
+        [8] eta_2D   molecular brightness of species C
+        [9] k_a      Surface association rate constant
+        [10] k_d     Surface dissociation rate constant
+        *tau* - lag time
+    """
+    D = parms[0]
+    # D_2D = parms[1]
+    sigma = parms[2]
+    # a = parms[3]
+    d_eva = parms[4]
+    Conc_3D = parms[5]      # ligand concentration in solution
+    Conc_2D = parms[6]
+    eta_3D = parms[7]
+    eta_2D = parms[8]
+    k_a = parms[9]
+    k_d = parms[10]
+    # Define some other constants:
+    K = k_a/k_d             # equilibrium constant
+    Beta = 1/(1 + K*Conc_3D)
+    Re = D / d_eva**2
+    Rt = D * (Conc_3D / (Beta * Conc_2D))**2
+    Rr = k_a * Conc_3D + k_d
+    # Define even more constants:
+    sqrtR1 = -Rr/(2*nps.sqrt(Rt)) + nps.sqrt( Rr**2/(4*Rt) - Rr )
+    sqrtR2 = -Rr/(2*nps.sqrt(Rt)) - nps.sqrt( Rr**2/(4*Rt) - Rr )
+    R1 = sqrtR1 **2
+    R2 = sqrtR2 **2
+    # And even more more:
+    sqrtR3 = sqrtR1 + nps.sqrt(Re)
+    sqrtR4 = sqrtR2 + nps.sqrt(Re)
+    R3 = sqrtR3 **2
+    R4 = sqrtR4 **2
+    # Calculate return function
+    A1 = eta_2D * Conc_2D * k_d / (eta_3D * Conc_3D)
+    A2 = sqrtR4*wixi(-nps.sqrt(tau*R1)) - sqrtR3*wixi(-nps.sqrt(tau*R2))
+    A3 = ( sqrtR1 - sqrtR2 ) * wixi(nps.sqrt(tau*Re))
+    A4 = (sqrtR1 - sqrtR2) * sqrtR3*sqrtR4
+    Solution = A1 * ( A2 + A3 ) / A4
+    # There are some below numerical errors-imaginary numbers.
+    # We do not want them.
+    return np.real_if_close(Solution)
+
+
+def CF_gz_AA(parms, tau, wixi=wixi):
+    """ Axial (1D) diffusion in a TIR-FCS setup.
+        From Two species (bound/unbound) this is the unbound part.
+        This function is called by other functions within this module.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_3D     3D Diffusion coefficient (species A)
+        [1] D_2D     2D Diffusion coefficient of bound species C
+        [2] sigma    lateral size of the point spread function
+                     sigma = simga_0 * lambda / NA
+        [3] a        side size of the square pinhole
+        [4] d_eva    evanescent decay length (decay to 1/e)
+        [5] Conc_3D  3-dimensional concentration of species A
+        [6] Conc_2D  2-dimensional concentration of species C
+        [7] eta_3D   molecular brightness of species A
+        [8] eta_2D   molecular brightness of species C
+        [9] k_a      Surface association rate constant
+        [10] k_d     Surface dissociation rate constant
+        *tau* - lag time
+    """
+    D = parms[0]
+    # D_2D = parms[1]
+    sigma = parms[2]
+    # a = parms[3]
+    d_eva = parms[4]
+    Conc_3D = parms[5]      # ligand concentration in solution
+    Conc_2D = parms[6]
+    eta_3D = parms[7]
+    eta_2D = parms[8]
+    k_a = parms[9]
+    k_d = parms[10]
+    # Define some other constants:
+    d = d_eva
+    K = k_a/k_d             # equilibrium constant
+    Beta = 1/(1 + K*Conc_3D)
+    Re = D / d_eva**2
+    Rt = D * (Conc_3D / (Beta * Conc_2D))**2
+    Rr = k_a * Conc_3D + k_d
+    # Define even more constants:
+    sqrtR1 = -Rr/(2*nps.sqrt(Rt)) + nps.sqrt( Rr**2/(4*Rt) - Rr )
+    sqrtR2 = -Rr/(2*nps.sqrt(Rt)) - nps.sqrt( Rr**2/(4*Rt) - Rr )
+    R1 = sqrtR1 **2
+    R2 = sqrtR2 **2
+    # And even more more:
+    sqrtR3 = sqrtR1 + nps.sqrt(Re)
+    sqrtR4 = sqrtR2 + nps.sqrt(Re)
+    R3 = sqrtR3 **2
+    R4 = sqrtR4 **2
+    # Calculate return function
+    Sum1 = d * nps.sqrt( Re*tau/np.pi )
+    Sum2 = -d/2*(2*tau*Re -1) * wixi(np.sqrt(tau*Re))
+    Sum3Mult1 = - eta_2D * Conc_2D * k_d / ( eta_3D * Conc_3D * 
+                                            (sqrtR1 - sqrtR2) )
+    S3M2S1M1 = sqrtR1/R3
+    S3M2S1M2S1 = wixi(-nps.sqrt(tau*R1)) + -2*nps.sqrt(tau*R3/np.pi)
+    S3M2S1M2S2 = ( 2*tau*sqrtR1*nps.sqrt(Re) + 2*tau*Re -1 ) * \
+                 wixi(nps.sqrt(tau*Re))
+    S3M2S2M1 = -sqrtR2/R4
+    S3M2S2M2S1 = wixi(-nps.sqrt(tau*R2)) + -2*nps.sqrt(tau*R4/np.pi)
+    S3M2S2M2S2 = ( 2*tau*sqrtR2*nps.sqrt(Re) + 2*tau*Re -1 ) * \
+                 wixi(nps.sqrt(tau*Re))
+    Sum3 = Sum3Mult1 * ( S3M2S1M1 * (S3M2S1M2S1 + S3M2S1M2S2) + 
+                         S3M2S2M1 * (S3M2S2M2S1 + S3M2S2M2S2) )
+    Sum = Sum1 + Sum2 + Sum3
+    # There are some below numerical errors-imaginary numbers.
+    # We do not want them.
+    return np.real_if_close(Sum)
+
+
+# 3D-2D binding/unbinding TIRF
+def CF_Gxyz_TIR_square_ubibi(parms, tau, 
+                         gAAz=CF_gz_AA, gACz=CF_gz_AC, gCCz=CF_gz_CC,
+                         gxy=CF_gxy_square):
+    u""" Two-component two- and three-dimensional diffusion
+        with a square-shaped lateral detection area taking into account
+        the size of the point spread function; and an exponential
+        decaying profile in axial direction. This model covers binding
+        and unbinding kintetics. 
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_3D    Diffusion coefficient of freely diffusing species A
+        [1] D_2D    Diffusion coefficient of bound species C
+        [2] σ       Lateral size of the point spread function
+                    σ = σ₀ * λ / NA
+        [3] a       Side size of the square-shaped detection area 
+        [4] d_eva   Evanescent decay constant
+        [5] C_3D    Concentration of species A in observation volume
+        [6] C_2D    Concentration of species C in observation area
+        [7] η_3D    Molecular brightness of species A
+        [8] η_2D    Molecular brightness of species C
+        [9] k_a     Surface association rate constant
+        [10] k_d    Surface dissociation rate constant
+        *tau* - lag time
+
+        Returns: 3D correlation function for TIR-FCS w/square pinhole and
+                 surface binding/unbinding.
+
+        Model introduced in:
+         Jonas Ries, Eugene P. Petrov, and Petra Schwille
+         Total Internal Reflection Fluorescence Correlation Spectroscopy: 
+         Effects of Lateral Diffusion and Surface-Generated Fluorescence
+         Biophysical Journal, Vol.95, July 2008, 390–399
+    """
+    D_3D = parms[0]
+    D_2D = parms[1]
+    sigma = parms[2]
+    a = parms[3]
+    kappa = 1/parms[4]
+    Conc_3D = parms[5]
+    Conc_2D = parms[6]
+    eta_3D = parms[7]
+    eta_2D = parms[8]
+    k_a = parms[9]
+    k_d = parms[10]
+    ## We now need to copmute a real beast:
+    # Inter species non-normalized correlation functions
+    # gAA = gAAz * gxy(D_3D)
+    # gAC = gACz * np.sqrt ( gxy(D_3D) * gxy(D_2D) )
+    # gCC = gCCz * gxy(D_2D)
+    # Nonnormalized correlation function
+    # g = eta_3D * Conc_3D * ( gAA + 2*gAC + gCC )
+    # Expectation value of fluorescence signal
+    # F = eta_3D * Conc_3D / kappa + eta_2D * Conc_2D
+    # Normalized correlation function
+    # G = g / F**2
+    ##
+    # Inter species non-normalized correlation functions
+    # The gijz functions take the same parameters as this function
+    # The gxy function needs two different sets of parameters, depending
+    # on the diffusion constant used.
+    #    [0] D: Diffusion coefficient
+    #    [1] sigma: lateral size of the point spread function
+    #    [3] a: side size of the square pinhole
+    parms_xy_2D = [D_2D, sigma, a]
+    parms_xy_3D = [D_3D, sigma, a]
+    # Here we go.
+    gAA = gAAz(parms, tau) * gxy(parms_xy_3D, tau)
+    gAC = gACz(parms, tau) * nps.sqrt( gxy(parms_xy_3D, tau) *
+                                       gxy(parms_xy_2D, tau) )
+    gCC = gCCz(parms, tau) * gxy(parms_xy_2D, tau)
+    # Nonnormalized correlation function
+    g = eta_3D * Conc_3D * ( gAA + 2*gAC + gCC )
+    # Expectation value of fluorescence signal
+    F = eta_3D * Conc_3D / kappa + eta_2D * Conc_2D
+    # Normalized correlation function
+    G = g / F**2
+    # There are some below numerical errors-imaginary numbers.
+    # We do not want them.
+    return G.real
+    #FNEW = eta_3D * Conc_3D / kappa
+    #GNEW = eta_3D * Conc_3D * gCCz(parms, tau) / FNEW**2
+    #return GNEW.real
+
+
+
+
+
+# 3D-2D binding Model TIR
+m_tir_3d_2d_ubib6021 = [6021, u"3D+2D+kin",
+                        "Surface binding and unbinding, 3D TIR",
+                        CF_Gxyz_TIR_square_ubibi]
+labels_6021 = [u"D_3D [10 µm²/s]",
+                u"D_2D [10 µm²/s]",
+                u"σ [100 nm]",
+                "a [100 nm]", 
+                "d_eva [100 nm]", 
+                u"C_3D [1000 /µm³]", 
+                u"C_2D[100 /µm²]", 
+                u"η_3D", 
+                u"η_2D", 
+                u"k_a [µm³/s]", 
+                u"k_d [10³ /s]"
+                ]
+values_6021 = [
+                9.0,      # D_3D [10 µm²/s]
+                0.0,      # D_2D [10 µm²/s]
+                2.3,      # σ [100 nm]
+                7.50,     # a [100 nm]
+                1.0,      # d_eva [100 nm]
+                0.01,     # conc.3D [1000 /µm³]
+                0.03,     # conc.2D [100 /µm²]
+                1,        # η_3D
+                1,        # η_2D
+                0.00001,  # k_a [µm³/s]
+                0.000064  # k_d [10³ /s]
+                ]        
+valuestofit_6021 = [False, True, False, False, False, False, True, False,
+                    False, False, False]
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6021 = [
+                u"D_3D [µm²/s]",
+                u"D_2D [µm²/s]",
+                u"σ [nm]",
+                "a [nm]", 
+                "d_eva [nm]", 
+                u"C_3D [1/µm³]", 
+                u"C_2D [1/µm²]", 
+                u"η_3D", 
+                u"η_2D", 
+                u"k_a [µm³/s]", 
+                "k_d [1/s]"
+                ]
+values_factor_human_readable_6021 = [10, # "D_3D [µm²/s]",
+                10,     # D_2D [10 µm²/s]
+                100,    # σ [100 nm]
+                100,    # a [100 nm]
+                100,    # d_eva [100 nm]
+                1000,   # conc.3D [1000 /µm³]
+                100,    # conc.2D [100 /µm²]
+                1,      # η_3D
+                1,      # η_2D
+                1,      # k_a [µm³/s]
+                1000   # k_d [10³ /s]
+                ]
+parms_6021 = [labels_6021, values_6021, valuestofit_6021,
+              labels_human_readable_6021, values_factor_human_readable_6021]
+
+
+model1 = dict()
+model1["Parameters"] = parms_6021
+model1["Definitions"] = m_tir_3d_2d_ubib6021
+model1["Verification"] = lambda parms: np.abs(parms)
+
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_TIRF_3D3D.py b/src/models/MODEL_TIRF_3D3D.py
new file mode 100755
index 0000000..431bcd0
--- /dev/null
+++ b/src/models/MODEL_TIRF_3D3D.py
@@ -0,0 +1,144 @@
+# -*- coding: utf-8 -*-
+"""  This file contains 3D+3D TIR-FCS models.
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)
+
+
+# 3D + 3D no binding TIRF
+def CF_Gxyz_TIR_square_3d3d(parms, tau, wixi=wixi):
+    u""" Two-component three-dimensional free diffusion
+        with a square-shaped lateral detection area taking into account
+        the size of the point spread function; and an exponential
+        decaying profile in axial direction.
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D_3D1  3D Diffusion coefficient (species 1)
+        [1] D_3D2  3D Diffusion coefficient of bound species 2
+        [2] σ      Lateral size of the point spread function
+                   σ = σ₀ * λ / NA
+        [3] a      Side size of the square-shaped detection area
+        [4] d_eva  Evanescent penetration depth
+        [5] C_3D1  Concentration of species 1
+        [6] C_3D2  Concentration of species 2
+        [7] α      Relative molecular brightness of particle
+                   2 compared to particle 1 (α = q₂/q₁)
+        *tau* - lag time
+    """
+    D_3D1 = parms[0]
+    D_3D2 = parms[1]
+    sigma = parms[2]
+    a = parms[3]
+    kappa = 1/parms[4]
+    Conc_3D1 = parms[5]
+    Conc_3D2 = parms[6]
+    alpha = parms[7]
+
+    ## First, the 3D diffusion of species 1
+    # Axial correlation    
+    x1 = np.sqrt(D_3D1*tau)*kappa
+    w_ix1 = wixi(x1)
+    gz1 = np.sqrt(D_3D1*tau/np.pi) - (2*D_3D1*tau*kappa**2 - 1)/(2*kappa) * \
+          w_ix1
+    # Lateral correlation
+    gx1_1 = 2/(a**2*np.sqrt(np.pi)) * np.sqrt(sigma**2+D_3D1*tau) * \
+            ( np.exp(-a**2/(4*(sigma**2+D_3D1*tau))) -1 )
+    gx2_1 = 1/a * sps.erf( a / (2*np.sqrt(sigma**2 + D_3D1*tau))) 
+    gx1 =  gx1_1 + gx2_1
+    gxy1 = gx1**2
+    # Non normalized correlation function
+    g3D1 = Conc_3D1 * gxy1 * gz1
+
+    ## Second, the 3D diffusion of species 2
+    # Axial correlation    
+    x2 = np.sqrt(D_3D2*tau)*kappa
+    w_ix2 = wixi(x2)
+    gz2 = np.sqrt(D_3D2*tau/np.pi) - (2*D_3D2*tau*kappa**2 - 1)/(2*kappa) * \
+          w_ix2
+    # Lateral correlation
+    gx1_2 = 2/(a**2*np.sqrt(np.pi)) * np.sqrt(sigma**2+D_3D2*tau) * \
+            ( np.exp(-a**2/(4*(sigma**2+D_3D2*tau))) -1 )
+    gx2_2 = 1/a * sps.erf( a / (2*np.sqrt(sigma**2 + D_3D2*tau))) 
+    gx2 =  gx1_2 + gx2_2
+    gxy2 = gx2**2
+    # Non normalized correlation function
+    g3D2 = alpha**2 * Conc_3D2 * gxy2 * gz2
+
+    ## Finally the Prefactor
+    F = (Conc_3D1 + alpha * Conc_3D2) / kappa
+    G = (g3D1 + g3D2) / F**2
+    return G
+
+
+# 3D-3D Model TIR
+m_tir_3d_3d_mix_6023 = [6023, u"3D+3D",
+                        "Separate 3D diffusion, 3D TIR",
+                        CF_Gxyz_TIR_square_3d3d]
+labels_6023 = ["D"+u"\u2081"+u" [10 µm²/s]",
+               "D"+u"\u2082"+u" [10 µm²/s]",
+               u"σ [100 nm]",
+               "a [100 nm]", 
+               "d_eva [100 nm]", 
+               "C"+u"\u2081"+u" [1000 /µm³]", 
+               "C"+u"\u2082"+u" [1000 /µm³]", 
+               u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")"
+                ]
+values_6023 = [
+                9.0,     # D_3D₁ [10 µm²/s]
+                0.5,    # D_3D₂ [10 µm²/s]
+                2.3,     # σ [100 nm]
+                7.50,    # a [100 nm]
+                1.0,     # d_eva [100 nm]
+                0.01,    # conc.3D₁ [1000 /µm³]
+                0.03,    # conc.3D₂ [1000 /µm³]
+                1       # alpha
+                ]        
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6023 = ["D"+u"\u2081"+u" [µm²/s]",
+                "D"+u"\u2082"+u" [µm²/s]",
+                u"σ [nm]",
+                "a [nm]", 
+                "d_eva [nm]", 
+                "C"+u"\u2081"+u" [1/µm³]", 
+                "C"+u"\u2082"+u" [1/µm³]", 
+                u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")"
+                ]
+values_factor_human_readable_6023 = [10, # "D_3D₁ [µm²/s]",
+                10,     # D_3D₂ [10 µm²/s]
+                100,    # σ [100 nm]
+                100,    # a [100 nm]
+                100,    # d_eva [100 nm]
+                1000,   # conc.3D₁ [1000 /µm³]
+                1000,   # conc.3D₂ [1000 /µm³]
+                1       # alpha
+                ]
+valuestofit_6023 = [False, True, False, False, False, False, True, False]
+parms_6023 = [labels_6023, values_6023, valuestofit_6023, 
+              labels_human_readable_6023, values_factor_human_readable_6023]
+
+
+model1 = dict()
+model1["Parameters"] = parms_6023
+model1["Definitions"] = m_tir_3d_3d_mix_6023
+model1["Verification"] = lambda parms: np.abs(parms)
+
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_TIRF_gaussian_1C.py b/src/models/MODEL_TIRF_gaussian_1C.py
new file mode 100755
index 0000000..e9b2404
--- /dev/null
+++ b/src/models/MODEL_TIRF_gaussian_1C.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+""" This file contains TIR one component models + Triplet
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)
+
+def CF_Gxyz_TIR_gauss(parms, tau):
+    u""" Three-dimensional free diffusion with a Gaussian lateral 
+        detection profile and an exponentially decaying profile
+        in axial direction.
+
+        x = sqrt(D*τ)*κ
+        κ = 1/d_eva
+        w(i*x) = exp(x²)*erfc(x)
+        gz = κ * [ sqrt(D*τ/π) + (1 - 2*D*τ*κ)/(2*κ) * w(i*x) ]
+        g2D = 1 / [ π (r₀² + 4*D*τ) ]
+        G = 1/C_3D * g2D * gz
+
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] D      Diffusion coefficient
+        [1] r₀     Lateral extent of the detection volume
+        [2] d_eva  Evanescent field depth
+        [3] C_3D   Particle concentration in the confocal volume
+        *tau* - lag time
+
+        Returns: Normalized 3D correlation function for TIRF.
+    """
+    D = parms[0]
+    r0 = parms[1]
+    deva = parms[2]
+    Conc = parms[3]
+
+    # Calculate sigma: width of the gaussian approximation of the PSF
+    Veff = np.pi * r0**2 * deva
+    Neff = Conc * Veff
+
+    taudiff = r0**2/(4*D)
+    # 2D gauss component
+    # G2D = 1/N2D * g2D = 1/(Aeff*Conc.2D) * g2D
+    g2D = 1 / ( (1.+tau/taudiff) )
+
+    # 1d TIR component
+    # Axial correlation    
+    kappa = 1/deva
+    x = np.sqrt(D*tau)*kappa
+    w_ix = wixi(x)
+
+    # Gz = 1/N1D * gz = kappa / Conc.1D * gz
+    gz = kappa * (np.sqrt(D*tau/np.pi) - (2*D*tau*kappa**2 - 1)/(2*kappa) * w_ix)
+
+    # gz * g2D * 1/( deva *A2D) * 1 / Conc3D
+
+    # Neff is not the actual particle number. This formula just looks nicer
+    # this way.
+    # What would be easier to get is:
+    # 1 / (Conc * deva * np.pi * r0) * gz * g2D
+
+    return 1 / (Neff) * g2D * gz
+
+
+
+def MoreInfo_6013(parms, countrate):
+    u"""Supplementary variables:
+        Beware that the effective volume is chosen arbitrarily.
+        Correlation function at lag time τ=0:
+        [4] G(τ=0)
+        Effective detection volume:         
+        [5] V_eff  = π * r₀² * d_eva
+        Effective particle concentration:
+        [6] C_3D [nM] = C_3D [1000/µm³] * 10000/6.0221415
+    """
+    D = parms[0]
+    r0 = parms[1]
+    deva = parms[2]
+    Conc = parms[3]
+    Info=list()
+    # Detection area:
+    Veff = np.pi * r0**2 * deva
+    Neff = Conc * Veff
+    # Correlation function at tau = 0
+    G_0 = CF_Gxyz_TIR_gauss(parms, 0)
+    Info.append(["G(0)", G_0])
+    Info.append(["V_eff [al]", Veff])
+    Info.append(["C_3D [nM]", Conc * 10000/6.0221415])
+    if countrate is not None:
+        # CPP
+        cpp = countrate/Neff
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+# 3D Model TIR gaussian
+m_3dtirsq6013 = [6013, "3D","Simple 3D diffusion w/ TIR", CF_Gxyz_TIR_gauss]
+labels_6013 = ["D [10 µm²/s]",u"r₀ [100 nm]", "d_eva [100 nm]", "C_3D [1000/µm³)"]
+values_6013 = [2.5420, 9.44, 1.0, .03011]
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6013 = ["D [µm²/s]", u"r₀ [nm]", "d_eva [nm]", "C_3D [1/µm³]"]
+values_factor_human_readable_6013 = [10, 100, 100, 1000]
+valuestofit_6013 = [True, False, False, True]
+parms_6013 = [labels_6013, values_6013, valuestofit_6013,
+              labels_human_readable_6013, values_factor_human_readable_6013]
+
+
+
+# Pack the models
+model1 = dict()
+model1["Parameters"] = parms_6013
+model1["Definitions"] = m_3dtirsq6013
+model1["Supplements"] = MoreInfo_6013
+model1["Verification"] = lambda parms: np.abs(parms)
+
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_TIRF_gaussian_3D2D.py b/src/models/MODEL_TIRF_gaussian_3D2D.py
new file mode 100755
index 0000000..2a002d0
--- /dev/null
+++ b/src/models/MODEL_TIRF_gaussian_3D2D.py
@@ -0,0 +1,242 @@
+# -*- coding: utf-8 -*-
+"""  This file contains a 3D+2D+T TIR FCS model.
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)
+
+# 3D + 2D + T
+def CF_Gxyz_3d2dT_gauss(parms, tau):
+    """ Two-component, two- and three-dimensional diffusion
+        with a Gaussian lateral detection profile and
+        an exponentially decaying profile in axial direction,
+        including a triplet component.
+        The triplet factor takes into account blinking according to triplet
+        states of excited molecules.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        kappa = 1/d_eva
+        x = sqrt(D_3D*τ)*kappa
+        w(i*x) = exp(x²)*erfc(x)
+        gz = kappa * 
+             [ sqrt(D_3D*τ/pi) + (1 - 2*D_3D*τ*kappa²)/(2*kappa) * w(i*x) ]
+        g2D3D = 1 / [ 1+4*D_3D*τ/r₀² ]
+        particle3D = α*F * g2D3D * gz
+        particle2D = (1-F)/ (1+4*D_2D*τ/r₀²) 
+        triplet = 1 + T/(1-T)*exp(-τ/τ_trip)
+        norm = (1-F + α*F)²
+        G = 1/n*(particle2D + particle3D)*triplet/norm + offset
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal volume
+                    (n = n2D+n3D)
+        [1] D_2D    Diffusion coefficient  of surface bound particles
+        [2] D_3D    Diffusion coefficient of freely diffusing particles
+        [3] F       Fraction of molecules of the freely diffusing species
+                    (n3D = n*F), 0 <= F <= 1
+        [4] r₀      Lateral extent of the detection volume
+        [5] d_eva   Evanescent field depth
+        [6] α       Relative molecular brightness of freely diffusing
+                    compared to surface bound particles (α = q3D/q2D)
+        [7] τ_trip  Characteristic residence time in triplet state
+        [8] T       Fraction of particles in triplet (non-fluorescent) state
+                    0 <= T < 1
+        [9] offset
+        *tau* - lag time
+    """
+    n=parms[0]
+    D2D=parms[1]
+    D3D=parms[2]
+    F=parms[3]
+    r0=parms[4]
+    deva=parms[5]
+    alpha=parms[6]
+    tautrip=parms[7]
+    T=parms[8]
+    off=parms[9]
+
+    ### 2D species
+    taud2D = r0**2/(4*D2D)
+    particle2D = (1-F)/ (1+tau/taud2D) 
+
+    ### 3D species
+    taud3D = r0**2/(4*D3D)
+    # 2D gauss component
+    g2D3D = 1 / ( (1.+tau/taud3D) )
+    # 1d TIR component
+    # Axial correlation    
+    kappa = 1/deva
+    x = np.sqrt(D3D*tau)*kappa
+    w_ix = wixi(x)
+    # Gz = 1/N1D * gz = kappa / Conc.1D * gz
+    gz = kappa * (np.sqrt(D3D*tau/np.pi) - 
+         (2*D3D*tau*kappa**2 - 1)/(2*kappa) * w_ix)
+    particle3D = alpha**2*F * g2D3D * gz
+
+    ### triplet
+    triplet = 1 + T/(1-T)*np.exp(-tau/tautrip)
+
+    ### Norm
+    norm = (1-F + alpha*F)**2
+
+    ### Correlation function
+    G = 1/n*(particle2D + particle3D)*triplet/norm
+    return G + off
+
+
+def Checkme(parms):
+    parms[0] = np.abs(parms[0])
+    parms[1] = D2D = np.abs(parms[1])
+    parms[2] = D3D = np.abs(parms[2])
+    F=parms[3]
+    parms[4] = r0 = np.abs(parms[4])
+    parms[5]=np.abs(parms[5])
+    parms[6]=np.abs(parms[6])
+    tautrip=np.abs(parms[7])
+    T=parms[8]
+    off=parms[9]
+
+    taud2D = r0**2/(4*D2D)
+    taud3D = r0**2/(4*D3D)
+    # We are not doing this anymore (Issue #2).
+    ## Force triplet component to be smaller than diffusion times
+    ## tautrip = min(tautrip,taud2D*0.9, taud3D*0.9)
+    
+    # Triplet fraction is between 0 and one. T may not be one!
+    T = (0.<=T<1.)*T + .99999999999999*(T>=1)
+    # Fraction of molecules may also be one
+    F = (0.<=F<=1.)*F + 1.*(F>1)
+
+    parms[3] = F
+    parms[7] = tautrip
+    parms[8] = T
+
+    return parms
+
+
+def MoreInfo(parms, countrate):
+    u"""Supplementary parameters:
+        Effective number of freely diffusing particles in 3D:
+        [10] n3D = n*F
+        Effective number particles diffusing on 2D surface:
+        [11] n2D = n*(1-F)
+        Value of the correlation function at lag time zero:
+        [12] G(0)
+        Effective measurement volume:
+        [13] V_eff [al] = π * r₀² * d_eva
+        Concentration of the 2D species:
+        [14] C_2D [1/µm²] = n2D / ( π * r₀² )
+        Concentration of the 3D species:
+        [15] C_3D [nM] = n3D/V_eff
+    """
+    # We can only give you the effective particle number
+    n=parms[0]
+    D2D=parms[1]
+    D3D=parms[2]
+    F=parms[3]
+    r0=parms[4]
+    deva=parms[5]
+    alpha=parms[6]
+
+    Info=list()
+    # The enumeration of these parameters is very important for
+    # plotting the normalized curve. Countrate must come out last!
+    Info.append([u"n3D", n*F])
+    Info.append([u"n2D", n*(1.-F)])
+    # Detection area:
+    Veff = np.pi * r0**2 * deva
+    C3D = n*F / Veff
+    C2D = n*(1-F) / ( np.pi*r0**2 )
+    # Correlation function at tau = 0
+    G_0 = CF_Gxyz_3d2dT_gauss(parms, 0)
+    Info.append(["G(0)", G_0])
+    Info.append(["V_eff [al]", Veff])
+    Info.append(["C_2D [1/µm²]", C2D * 100])
+    Info.append(["C_3D [nM]", C3D * 10000/6.0221415])
+
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+# 3D + 3D + T model gauss
+m_gauss_3d_2d_t = [6033, "T+3D+2D",
+                            "Separate 3D and 2D diffusion + triplet w/ TIR",
+                            CF_Gxyz_3d2dT_gauss]
+labels  = ["n",
+                u"D_2D [10 µm²/s]",
+                u"D_3D [10 µm²/s]",
+                "F_3D", 
+                u"r₀ [100 nm]",
+                "d_eva [100 nm]",
+                u"\u03b1"+" (q_3D/q_2D)", 
+                u"τ_trip [ms]",
+                "T",
+                "offset"
+                ]
+values = [ 
+                25,      # n
+                0.51,       # D2D
+                25.1,    # D3D
+                0.45,     # F3D
+                9.44,       # r0
+                1.0,       # deva
+                1.0,     # alpha
+                0.001,       # tautrip
+                0.01,       # T
+                0.0      # offset
+                ]  
+# Human readable stuff
+labelshr  = ["n",
+                u"D_2D [µm²/s]",
+                u"D_3D [µm²/s]",
+                "F_3D", 
+                u"r₀ [nm]",
+                "d_eva [nm]",
+                u"\u03b1"+" (q_3D/q_2D)", 
+                u"τ_trip [µs]",
+                "T",
+                "offset"
+                ] 
+valueshr = [ 
+                1.,      # n
+                10.,       # D2D
+                10.,    # D3D
+                1.,     # F3D
+                100.,       # r0
+                100.,       # deva
+                1.,     # alpha
+                1000.,       # tautrip
+                1.,       # T
+                1.      # offset
+                ]   
+    
+valuestofit = [True, True, True, True, False, False, False, False, False, False]
+parms = [labels, values, valuestofit, labelshr, valueshr]
+
+
+model1 = dict()
+model1["Parameters"] = parms
+model1["Definitions"] = m_gauss_3d_2d_t
+model1["Verification"] = Checkme
+model1["Supplements"] = MoreInfo
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_TIRF_gaussian_3D3D.py b/src/models/MODEL_TIRF_gaussian_3D3D.py
new file mode 100755
index 0000000..89ec550
--- /dev/null
+++ b/src/models/MODEL_TIRF_gaussian_3D3D.py
@@ -0,0 +1,259 @@
+# -*- coding: utf-8 -*-
+"""  This file contains a 3D+3D+T TIR FCS model.
+"""
+import numpy as np                  # NumPy
+import scipy.special as sps
+
+
+def wixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    wixi = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    
+    return np.real_if_close(wixi)  
+
+# 3D + 3D + T
+def CF_Gxyz_3D3DT_gauss(parms, tau):
+    u""" Two-component three-dimensional diffusion with a Gaussian
+        lateral detection profile and an exponentially decaying profile
+        in axial direction, including a triplet component.
+        The triplet factor takes into account blinking according to triplet
+        states of excited molecules.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        w(i*x) = exp(x²)*erfc(x)
+        taud1 = r₀²/(4*D₁)
+        κ = 1/d_eva
+        x1 = sqrt(D₁*τ)*κ
+        gz1 = κ * [ sqrt(D₁*τ/π) + (1 - 2*D₁*τ*κ)/(2*κ) * w(i*x1) ]
+        g2D1 = 1 / [ 1+τ/taud1 ]
+        particle1 = F₁ * g2D1 * gz1
+
+        taud2 = r₀²/(4*D₂)
+        x2 = sqrt(D₂*τ)*κ
+        gz2 = κ * [ sqrt(D₂*τ/π) + (1 - 2*D₂*τ*κ)/(2*κ) * w(i*x2) ]
+        g2D2 = 1 / [ 1+τ/taud2 ]
+        particle2 =  α*(1-F₁) * g2D2 * gz2
+
+
+        triplet = 1 + T/(1-T)*exp(-τ/τ_trip)
+        norm = (1-F₁ + α*F₁)²
+        G = 1/n*(particle1 + particle2)*triplet/norm + offset
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal volume
+                    (n = n₁+n₂)
+        [1] D₁      Diffusion coefficient of species 1
+        [2] D₂      Diffusion coefficient of species 2
+        [3] F₁      Fraction of molecules of species 1 (n₁ = n*F₁)
+                    0 <= F₁ <= 1
+        [4] r₀      Lateral extent of the detection volume
+        [5] d_eva   Evanescent field depth
+        [6] α       Relative molecular brightness of particle
+                    2 compared to particle 1 (α = q₂/q₁)
+        [7] τ_trip  Characteristic residence time in triplet state
+        [8] T       Fraction of particles in triplet (non-fluorescent) state
+                    0 <= T < 1
+        [9] offset
+        *tau* - lag time
+    """
+    n=parms[0]
+    D1=parms[1]
+    D2=parms[2]
+    F=parms[3]
+    r0=parms[4]
+    deva=parms[5]
+    alpha=parms[6]
+    tautrip=parms[7]
+    T=parms[8]
+    off=parms[9]
+
+    kappa = 1/deva
+
+
+    ### 1st species
+    tauD1 = r0**2/(4*D1)
+    # 2D gauss component
+    g2D1 = 1 / ( (1.+tau/tauD1) )
+    # 1d TIR component
+    # Axial correlation    
+    x1 = np.sqrt(D1*tau)*kappa
+    w_ix1 = wixi(x1)
+    # Gz = 1/N1D * gz = kappa / Conc.1D * gz
+    gz1 = kappa * (np.sqrt(D1*tau/np.pi) -
+                   (2*D1*tau*kappa**2 - 1)/(2*kappa) * w_ix1)
+    particle1 = F * g2D1 * gz1
+
+    ### 2nd species
+    tauD2 = r0**2/(4*D2)
+    # 2D gauss component
+    g2D2 = 1 / ( (1.+tau/tauD2) )
+    # 1d TIR component
+    # Axial correlation    
+    x2 = np.sqrt(D2*tau)*kappa
+    w_ix2 = wixi(x2)
+    # Gz = 1/N1D * gz = kappa / Conc.1D * gz
+    gz2 = kappa * (np.sqrt(D2*tau/np.pi) -
+                   (2*D2*tau*kappa**2 - 1)/(2*kappa) * w_ix2)
+    particle2 = alpha**2*(1-F) * g2D2 * gz2
+
+    ### triplet
+    triplet = 1 + T/(1-T)*np.exp(-tau/tautrip)
+
+    ### Norm
+    norm = (F + alpha*(1-F))**2
+
+    ### Correlation function
+    G = 1/n*(particle1 + particle2)*triplet/norm
+    return G + off
+
+
+def Checkme(parms):
+    parms[0] = np.abs(parms[0])
+    parms[1] = D1 = np.abs(parms[1])
+    parms[2] = D2 = np.abs(parms[2])
+    F=parms[3]
+    parms[4] = r0 = np.abs(parms[4])
+    parms[5]=np.abs(parms[5])
+    parms[6]=np.abs(parms[6])
+    tautrip=np.abs(parms[7])
+    T=parms[8]
+    off=parms[9]
+
+    # REMOVED (issue #2)
+    ## Force triplet component to be smaller than diffusion times
+    #tauD2 = r0**2/(4*D2)
+    #tauD1 = r0**2/(4*D1)
+    #tautrip = min(tautrip,tauD2*0.9, tauD1*0.9)
+
+    # Triplet fraction is between 0 and one. T may not be one!
+    T = (0.<=T<1.)*T + .99999999999999*(T>=1)
+    # Fraction of molecules may also be one
+    F = (0.<=F<=1.)*F + 1.*(F>1)
+
+    parms[3] = F
+    parms[7] = tautrip
+    parms[8] = T
+
+    return parms
+
+
+def MoreInfo(parms, countrate):
+    u"""Supplementary parameters:
+        Effective number of particle species 1:
+        [10] n₁ = n*F₁
+        Effective number of particle species 2:
+        [11] n₂ = n*(1-F₁)
+        Value of the correlation function at lag time zero:
+        [12] G(0)
+        Effective measurement volume:
+        [13] V_eff [al] = π * r₀² * d_eva
+        Concentration of particle species 1:
+        [14] C₁ [nM] = n₁/V_eff
+        Concentration of particle species 2:
+        [15] C₂ [nM] = n₂/V_eff
+    """
+    # We can only give you the effective particle number
+    n=parms[0]
+    D1=parms[1]
+    D2=parms[2]
+    F=parms[3]
+    r0=parms[4]
+    deva=parms[5]
+    alpha=parms[6]
+
+    Info=list()
+    # The enumeration of these parameters is very important for
+    # plotting the normalized curve. Countrate must come out last!
+    Info.append([u"n\u2081", n*F])
+    Info.append([u"n\u2082", n*(1.-F)])
+    # Detection area:
+    Veff = np.pi * r0**2 * deva
+    C1 = n*F / Veff
+    C2 = n*(1-F) / Veff
+    # Correlation function at tau = 0
+    G_0 = CF_Gxyz_3D3DT_gauss(parms, 0)
+    Info.append(["G(0)", G_0])
+    Info.append(["V_eff [al]", Veff])
+    Info.append(["C"+u"\u2081"+" [nM]", C1 * 10000/6.0221415])
+    Info.append(["C"+u"\u2082"+" [nM]", C2 * 10000/6.0221415])
+
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+# 3D + 3D + T model gauss
+m_gauss_3d_3d_t = [6034, "T+3D+3D",
+                            "Combined 3D diffusion + triplet w/ TIR",
+                            CF_Gxyz_3D3DT_gauss]
+labels  = ["n",
+                "D"+u"\u2081"+" [10 µm²/s]",
+                "D"+u"\u2082"+" [10 µm²/s]",
+                "F"+u"\u2081", 
+                "r₀ [100 nm]",
+                "d_eva [100 nm]",
+                u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                u"τ_trip [ms]",
+                "T",
+                "offset"
+                ]
+values = [ 
+                25,      # n
+                25.,       # D1
+                0.9,    # D2
+                0.45,     # F1
+                9.44,       # r0
+                1.0,       # deva
+                1.0,     # alpha
+                0.001,       # tautrip
+                0.01,       # T
+                0.0      # offset
+                ]    
+# Human readable stuff
+labelshr  = ["n",
+                "D"+u"\u2081"+u" [µm²/s]",
+                "D"+u"\u2082"+u" [µm²/s]",
+                "F"+u"\u2081", 
+                "r₀ [nm]",
+                "d_eva [nm]",
+                u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                u"τ_trip [µs]",
+                "T",
+                "offset"
+                ]
+valueshr = [ 
+                1.,      # n
+                10.,       # D1
+                10.,    # D2
+                1.,     # F1
+                100.,       # r0
+                100.,       # deva
+                1.,     # alpha
+                1000.,       # tautrip
+                1.,       # T
+                1.      # offset
+                ]   
+    
+valuestofit = [True, True, True, True, False, False, False, False, False, False]
+parms = [labels, values, valuestofit, labelshr, valueshr]
+
+
+model1 = dict()
+model1["Parameters"] = parms
+model1["Definitions"] = m_gauss_3d_3d_t
+model1["Verification"] = Checkme
+model1["Supplements"] = MoreInfo
+
+Modelarray = [model1]
diff --git a/src/models/MODEL_classic_gaussian_2D.py b/src/models/MODEL_classic_gaussian_2D.py
new file mode 100755
index 0000000..bbe1e9a
--- /dev/null
+++ b/src/models/MODEL_classic_gaussian_2D.py
@@ -0,0 +1,297 @@
+# -*- coding: utf-8 -*-
+""" This file contains some simple 2D models for confocal microscopy
+"""
+
+import numpy as np                  # NumPy
+
+
+# 2D simple gauss
+def CF_Gxy_gauss(parms, tau):
+    u""" Two-dimensional diffusion with a Gaussian laser profile.
+
+        G(τ) = offset + 1/( n * (1+τ/τ_diff) )
+    
+        Calculation of diffusion coefficient and concentration
+        from the effective radius of the detection profile (r₀ = 2*σ):
+        D = r₀²/(4*τ_diff)
+        Conc = n/(π*r₀²)
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal area
+        [1] τ_diff  Characteristic residence time in confocal area
+        [3] offset
+        *tau* - lag time
+    """
+    n = parms[0]
+    taudiff = parms[1]
+    dc = parms[2]
+
+    BB = 1 / ( (1.+tau/taudiff) )
+    G = dc + 1/n * BB
+    return G
+
+def Check_xy_gauss(parms):
+    parms[0] = np.abs(parms[0])
+    parms[1] = np.abs(parms[1])
+
+    return parms
+
+
+# 2D simple gauss
+def CF_Gxy_T_gauss(parms, tau):
+    u""" Two-dimensional diffusion with a Gaussian laser profile,
+        including a triplet component.
+        The triplet factor takes into account a blinking term.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        triplet = 1 + T/(1-T)*exp(-τ/τ_trip)
+
+        G(τ) = offset + 1/( n * (1+τ/τ_diff) )*triplet
+    
+        Calculation of diffusion coefficient and concentration
+        from the effective radius of the detection profile (r₀ = 2*σ):
+        D = r₀²/(4*τ_diff)
+        Conc = n/(π*r₀²)
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal area
+        [1] τ_diff  Characteristic residence time in confocal area
+        [2] τ_trip  Characteristic residence time in triplet state
+        [3] T       Fraction of particles in triplet (non-fluorescent) state
+                    0 <= T < 1
+        [4] offset
+        *tau* - lag time
+    """
+    n = parms[0]
+    taudiff = parms[1]
+    tautrip = parms[2]
+    T = parms[3]
+    dc = parms[4]
+
+    triplet = 1 + T/(1-T)*np.exp(-tau/tautrip)
+
+    BB = 1 / ( (1.+tau/taudiff) )
+    G = dc + 1/n * BB * triplet
+    return G
+
+
+
+def Check_xy_T_gauss(parms):
+    parms[0] = np.abs(parms[0])
+    taudiff = parms[1] = np.abs(parms[1])
+    tautrip = np.abs(parms[2])
+    T=parms[3]
+    
+    # REMOVED (Issue #2)
+     ## Force triplet component to be smaller than diffusion times
+     #tautrip = min(tautrip,taudiff*0.9)
+     
+    # Triplet fraction is between 0 and one. T may not be one!
+    T = (0.<=T<1.)*T + .99999999999999*(T>=1)
+
+    parms[2] = tautrip
+    parms[3] = T
+    return parms
+
+
+
+# 2D + 2D + Triplet Gauß
+    # Model 6031
+def CF_Gxyz_gauss_2D2DT(parms, tau):
+    u""" Two-component, two-dimensional diffusion with a Gaussian laser
+        profile, including a triplet component.
+        The triplet factor takes into account blinking according to triplet
+        states of excited molecules.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        particle1 = F₁/(1+τ/τ₁)
+        particle2 = α*(1-F₁)/(1+τ/τ₂)
+        triplet = 1 + T/(1-T)*exp(-τ/τ_trip)
+        norm = (F₁ + α*(1-F₁))²
+        G = 1/n*(particle1 + particle2)*triplet/norm + offset
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal area
+                    (n = n₁+n₂)
+        [1] τ₁      Diffusion time of particle species 1
+        [2] τ₂      Diffusion time of particle species 2
+        [3] F₁      Fraction of molecules of species 1 (n₁ = n*F₁)
+                    0 <= F₁ <= 1
+        [4] α       Relative molecular brightness of particle 2
+                    compared to particle 1 (α = q₂/q₁)
+        [5] τ_trip  Characteristic residence time in triplet state
+        [6] T       Fraction of particles in triplet (non-fluorescent)
+                    state 0 <= T < 1
+        [7] offset
+        *tau* - lag time
+    """
+    n=parms[0]
+    taud1=parms[1]
+    taud2=parms[2]
+    F=parms[3]
+    alpha=parms[4]
+    tautrip=parms[5]
+    T=parms[6]
+    off=parms[7]
+
+    particle1 = F/( 1+tau/taud1 )
+    particle2 = alpha**2*(1-F)/( 1+tau/taud2 )
+    # If the fraction of dark molecules is zero, we put the
+    # whole triplet fraction to death.
+    triplet = 1 + T/(1-T)*np.exp(-tau/tautrip)
+    # For alpha == 1, *norm* becomes one
+    norm = (F + alpha*(1-F))**2
+
+    G = 1/n*(particle1 + particle2)*triplet/norm + off
+    return G
+
+def Check_6031(parms):
+    parms[0] = np.abs(parms[0])
+    parms[1] = taud1 = np.abs(parms[1])
+    parms[2] = taud2 = np.abs(parms[2])
+    F=parms[3]
+    parms[4] = np.abs(parms[4])
+    tautrip = np.abs(parms[5])
+    T=parms[6]
+    off=parms[7]
+    
+    ## REMOVED (Issue #2)
+     ## Force triplet component to be smaller than diffusion times
+     #tautrip = min(tautrip,taud1*0.9, taud2*0.9)
+     
+    # Triplet fraction is between 0 and one. T may not be one!
+    T = (0.<=T<1.)*T + .99999999999999*(T>=1)
+    # Fraction of molecules may also be one
+    F = (0.<=F<=1.)*F + 1.*(F>1)
+
+    parms[3] = F
+    parms[5] = tautrip
+    parms[6] = T
+
+    return parms
+
+
+def MoreInfo_6001(parms, countrate):
+    # We can only give you the effective particle number
+    n = parms[0]
+    Info = list()
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+    
+    
+def MoreInfo_6031(parms, countrate):
+    u"""Supplementary parameters:
+        [8] n₁ = n*F₁     Particle number of species 1
+        [9] n₂ = n*(1-F₁) Particle number of species 2
+    """
+    # We can only give you the effective particle number
+    n = parms[0]
+    F1 = parms[3]
+    Info = list()
+    # The enumeration of these parameters is very important for
+    # plotting the normalized curve. Countrate must come out last!
+    Info.append([u"n\u2081", n*F1])
+    Info.append([u"n\u2082", n*(1.-F1)])
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+# 2D Model Gauss
+m_twodga6001 = [6001, u"2D", u"2D confocal diffusion", CF_Gxy_gauss]
+labels_6001 = ["n", u"τ_diff [ms]", "offset"]
+values_6001 = [4.0, 0.4, 0.0]
+valuestofit_6001 = [True, True, False]
+parms_6001 = [labels_6001, values_6001, valuestofit_6001]
+
+
+# 2D Model Gauss with Triplet
+m_twodga6002 = [6002, u"T+2D", u"2D confocal diffusion with triplet", 
+                CF_Gxy_T_gauss]
+labels_6002 = ["n", u"τ_diff [ms]",  u"τ_trip [ms]", u"T", u"offset"]
+values_6002 = [4.0, 0.4, 0.001, 0.01, 0.0]
+labels_hr_6002 = ["n", u"τ_diff [ms]",  u"τ_trip [µs]", u"T", u"offset"]
+factors_hr_6002 = [1., 1., 1000., 1., 1.]
+valuestofit_6002 = [True, True, True, True, False]
+parms_6002 = [labels_6002, values_6002, valuestofit_6002,
+              labels_hr_6002, factors_hr_6002]
+
+
+# 2D + 2D + T model gauss
+m_gauss_2d_2d_t_mix_6031 = [6031, u"T+2D+2D",
+                            u"Separate 2D diffusion + triplet, Gauß",
+                            CF_Gxyz_gauss_2D2DT]
+labels_6031  = ["n",
+                u"τ"+u"\u2081"+u" [ms]",
+                u"τ"+u"\u2082"+u" [ms]",
+                u"F"+u"\u2081", 
+                u"\u03b1"+u" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                u"τ_trip [ms]",
+                u"T",
+                u"offset"
+                ]
+values_6031 = [ 
+                25,      # n
+                5,       # taud1
+                1000,    # taud2
+                0.75,     # F
+                1.0,     # alpha
+                0.001,       # tautrip
+                0.01,       # T
+                0.0      # offset
+                ]        
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6031  = [
+                        u"n",
+                        u"τ"+u"\u2081"+u" [ms]",
+                        u"τ"+u"\u2082"+u" [ms]",
+                        u"F"+u"\u2081", 
+                        u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                        u"τ_trip [µs]",
+                        u"T",
+                        u"offset"
+                            ]
+values_factor_human_readable_6031 = [
+                1.,     # "n",
+                1.,     # "τ"+u"\u2081"+" [ms]",
+                1.,     # "τ"+u"\u2082"+" [ms]",
+                1.,     # "F"+u"\u2081", 
+                1.,     # u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                1000.,  # "τ_trip [µs]",
+                1.,     # "T",
+                1.      # "offset"
+                ]
+valuestofit_6031 = [True, True, True, True, False, False, False, False]
+parms_6031 = [labels_6031, values_6031, valuestofit_6031,
+              labels_human_readable_6031, values_factor_human_readable_6031]
+
+
+model1 = dict()
+model1["Parameters"] = parms_6001
+model1["Definitions"] = m_twodga6001
+model1["Supplements"] = MoreInfo_6001
+model1["Verification"] = Check_xy_gauss
+
+model2 = dict()
+model2["Parameters"] = parms_6002
+model2["Definitions"] = m_twodga6002
+model2["Supplements"] = MoreInfo_6001
+model2["Verification"] = Check_xy_T_gauss
+
+model3 = dict()
+model3["Parameters"] = parms_6031
+model3["Definitions"] = m_gauss_2d_2d_t_mix_6031
+model3["Supplements"] = MoreInfo_6031
+model3["Verification"] = Check_6031
+
+
+Modelarray = [model1, model2, model3]
diff --git a/src/models/MODEL_classic_gaussian_3D.py b/src/models/MODEL_classic_gaussian_3D.py
new file mode 100755
index 0000000..96fc8b6
--- /dev/null
+++ b/src/models/MODEL_classic_gaussian_3D.py
@@ -0,0 +1,309 @@
+# -*- coding: utf-8 -*-
+""" This file contains TIR one component models + Triplet
+"""
+import numpy as np                  # NumPy
+
+# 3D simple gauss
+def CF_Gxyz_gauss(parms, tau):
+    # Model 6012
+    u""" Three-dimanesional free diffusion with a Gaussian laser profile
+        (eliptical).
+
+        G(τ) = offset + 1/( n*(1+τ/τ_diff) * sqrt(1 + τ/(SP²*τ_diff)) )
+
+        Calculation of diffusion coefficient and concentration
+        from the effective radius of the detection profile (r₀ = 2*σ):
+        D = r₀²/(4*τ_diff)
+        Conc = n/( sqrt(π³)*r₀²*z₀ )
+
+            r₀   lateral detection radius (waist of lateral gaussian)
+            z₀   axial detection length (waist of axial gaussian)
+            D    Diffusion coefficient
+            Conc Concentration of dye
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal volume
+        [1] τ_diff  Characteristic residence time in confocal volume
+        [2] SP      SP=z₀/r₀ Structural parameter,
+                    describes the axis ratio of the confocal volume
+        [3] offset
+        *tau* - lag time
+    """
+    n = parms[0]
+    taudiff = parms[1]
+    SP = parms[2]
+    off = parms[3]
+
+    BB = 1 / ( (1.+tau/taudiff) * np.sqrt(1.+tau/(SP**2*taudiff)) )
+    G = off + 1/n * BB
+    return G
+
+
+# 3D blinking gauss
+    # Model 6011
+def CF_Gxyz_blink(parms, tau):
+    u""" Three-dimanesional free diffusion with a Gaussian laser profile
+        (eliptical), including a triplet component.
+        The triplet factor takes into account a blinking term.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        G(τ) = offset + 1/( n*(1+τ/τ_diff) * sqrt(1 + τ/(SP²*τ_diff)) )
+                    * ( 1+T/(1.-T)*exp(-τ/τ_trip) )
+
+        Calculation of diffusion coefficient and concentration
+        from the effective radius of the detection profile (r₀ = 2*σ):
+        D = r₀²/(4*τ_diff)
+        Conc = n/( sqrt(π³)*r₀²*z₀ )
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal volume
+        [1] T       Fraction of particles in triplet (non-fluorescent) state
+                    0 <= T < 1
+        [2] τ_trip  Characteristic residence time in triplet state
+        [3] τ_diff  Characteristic residence time in confocal volume
+        [4] SP      SP=z₀/r₀ Structural parameter,
+                    describes the axis ratio of the confocal volume
+        [5] offset
+        *tau* - lag time
+    """
+    n = parms[0]
+    T = parms[1]
+    tautrip = parms[2]
+    taudiff = parms[3]
+    SP = parms[4]
+    off = parms[5]
+
+
+    AA = 1. + T/(1.-T) * np.exp(-tau/tautrip)
+
+    BB = 1 / ( (1.+tau/taudiff) * np.sqrt(1.+tau/(SP**2*taudiff)) )
+    G = off + 1/n * AA * BB
+    return G
+
+
+def Check_6011(parms):
+    parms[0] = np.abs(parms[0])
+    T = parms[1]
+    tautrip = np.abs(parms[2])
+    parms[3] = taudiff = np.abs(parms[3])
+    parms[4] = np.abs(parms[4])
+    off = parms[5]
+    
+    # REMOVED (issue #2)
+     ## Force triplet component to be smaller than diffusion
+     #tautrip = min(tautrip, 0.9*taudiff)
+     
+    # Triplet fraction is between 0 and one.
+    T = (0.<=T<1.)*T + .999999999*(T>=1)
+
+    parms[1] = T
+    parms[2] = tautrip
+
+    return parms
+
+
+# 3D + 3D + Triplet Gauß
+    # Model 6030
+def CF_Gxyz_gauss_3D3DT(parms, tau):
+    u""" Two-component three-dimensional free diffusion
+        with a Gaussian laser profile, including a triplet component.
+        The triplet factor takes into account a blinking term.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        particle1 = F₁/( (1+τ/τ₁) * sqrt(1+τ/(τ₁*SP²)))
+        particle2 = α*(1-F₁)/( (1+τ/τ₂) * sqrt(1+τ/(τ₂*SP²)))
+        triplet = 1 + T/(1-T)*exp(-τ/τ_trip)
+        norm = (F₁ + α*(1-F₁))²
+        G = 1/n*(particle1 + particle2)*triplet/norm + offset
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal volume
+                    (n = n₁+n₂)
+        [1] τ₁      Diffusion time of particle species 1
+        [2] τ₂      Diffusion time of particle species 2
+        [3] F₁      Fraction of molecules of species 1 (n₁ = n*F₁)
+                    0 <= F₁ <= 1
+        [4] SP      SP=z₀/r₀, Structural parameter,
+                    describes elongation of the confocal volume
+        [5] α       Relative molecular brightness of particle
+                    2 compared to particle 1 (α = q₂/q₁)
+        [6] τ_trip  Characteristic residence time in triplet state
+        [7] T       Fraction of particles in triplet (non-fluorescent) state
+                    0 <= T < 1
+        [8] offset
+        *tau* - lag time
+    """
+    n=parms[0]
+    taud1=parms[1]
+    taud2=parms[2]
+    F=parms[3]
+    SP=parms[4]
+    alpha=parms[5]
+    tautrip=parms[6]
+    T=parms[7]
+    off=parms[8]
+
+    particle1 = F/( (1+tau/taud1) * np.sqrt(1+tau/(taud1*SP**2)))
+    particle2 = alpha**2*(1-F)/( (1+tau/taud2) * np.sqrt(1+tau/(taud2*SP**2)))
+    # If the fraction of dark molecules is zero, its ok. Python can also handle
+    # exp(-1/inf).
+    triplet = 1 + T/(1-T)*np.exp(-tau/tautrip)
+    # For alpha == 1, *norm* becomes one
+    norm = (F + alpha*(1-F))**2
+
+    G = 1/n*(particle1 + particle2)*triplet/norm + off
+    return G
+
+def Check_3D3DT(parms):
+    parms[0] = np.abs(parms[0])
+    parms[1] = taud1 = np.abs(parms[1])
+    parms[2] = taud2 = np.abs(parms[2])
+    F=parms[3]
+    parms[4]=np.abs(parms[4])
+    parms[5]=np.abs(parms[5])
+    tautrip=np.abs(parms[6])
+    T=parms[7]
+    off=parms[8]
+    
+    # REMOVED (issue #2)
+     # Force triplet component to be smaller than diffusion times
+     #tautrip = min(tautrip,taud1*0.9, taud2*0.9)
+    
+    # Triplet fraction is between 0 and one. T may not be one!
+    T = (0.<=T<1.)*T + .99999999999999*(T>=1)
+    # Fraction of molecules may also be one
+    F = (0.<=F<=1.)*F + 1.*(F>1)
+
+    parms[3] = F
+    parms[6] = tautrip
+    parms[7] = T
+
+    return parms
+
+
+def MoreInfo_1C(parms, countrate):
+    # We can only give you the effective particle number
+    n = parms[0]
+    Info = list()
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+def MoreInfo_6030(parms, countrate):
+    u"""Supplementary parameters:
+        [9]  n₁ = n*F₁     Particle number of species 1
+        [10] n₂ = n*(1-F₁) Particle number of species 2
+    """
+    # We can only give you the effective particle number
+    n = parms[0]
+    F1 = parms[3]
+    Info = list()
+    # The enumeration of these parameters is very important for
+    # plotting the normalized curve. Countrate must come out last!
+    Info.append([u"n\u2081", n*F1])
+    Info.append([u"n\u2082", n*(1.-F1)])
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+# 3D Model blink gauss
+m_3dblink6011 = [6011, "T+3D","3D confocal diffusion with triplet", 
+                 CF_Gxyz_blink]
+labels_6011 = ["n","T","τ_trip [ms]", "τ_diff [ms]", "SP", "offset"]
+values_6011 = [4.0, 0.2, 0.001, 0.4, 5.0, 0.0]
+labels_hr_6011 = ["n","T","τ_trip [µs]", "τ_diff [ms]", "SP", "offset"]
+factors_hr_6011 = [1., 1., 1000., 1., 1., 1.]
+valuestofit_6011 = [True, True, True, True, False, False]
+parms_6011 = [labels_6011, values_6011, valuestofit_6011,
+              labels_hr_6011, factors_hr_6011]
+
+# 3D Model gauss
+m_3dgauss6012 = [6012, "3D","3D confocal diffusion", CF_Gxyz_gauss]
+labels_6012 = ["n", "τ_diff [ms]", "SP", "offset"]
+values_6012 = [4.0, 0.4, 5.0, 0.0]
+valuestofit_6012 = [True, True, False, False]
+parms_6012 = [labels_6012, values_6012, valuestofit_6012]
+
+# 3D + 3D + T model gauss
+m_gauss_3d_3d_t_mix_6030 = [6030, "T+3D+3D",
+                            "Separate 3D diffusion + triplet, Gauß",
+                            CF_Gxyz_gauss_3D3DT]
+labels_6030  = ["n",
+                "τ"+u"\u2081"+" [ms]",
+                "τ"+u"\u2082"+" [ms]",
+                "F"+u"\u2081", 
+                "SP",
+                u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                "τ_trip [ms]",
+                "T",
+                "offset"
+                ]
+values_6030 = [ 
+                25,      # n
+                5,       # taud1
+                1000,    # taud2
+                0.75,    # F
+                5,       # SP
+                1.0,     # alpha
+                0.001,   # tautrip
+                0.01,    # T
+                0.0      # offset
+                ]        
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable_6030  = [
+                        "n",
+                        "τ"+u"\u2081"+" [ms]",
+                        "τ"+u"\u2082"+" [ms]",
+                        "F"+u"\u2081", 
+                        "SP",
+                        u"\u03b1"+" (q"+u"\u2082"+"/q"+u"\u2081"+")", 
+                        "τ_trip [µs]",
+                        "T",
+                        "offset"
+                            ]
+values_factor_human_readable_6030 = [
+                        1.,     # n
+                        1.,     # taud1
+                        1.,     # taud2
+                        1.,     # F
+                        1.,     # SP
+                        1.,     # alpha
+                        1000.,  # tautrip [µs]
+                        1.,     # T
+                        1.      # offset
+                ]
+valuestofit_6030 = [True, True, True, True, False, False, False, False, False]
+parms_6030 = [labels_6030, values_6030, valuestofit_6030,
+              labels_human_readable_6030, values_factor_human_readable_6030]
+
+
+# Pack the models
+model1 = dict()
+model1["Parameters"] = parms_6011
+model1["Definitions"] = m_3dblink6011
+model1["Supplements"] = MoreInfo_1C
+model1["Verification"] = Check_6011
+
+model2 = dict()
+model2["Parameters"] = parms_6012
+model2["Definitions"] = m_3dgauss6012
+model2["Supplements"] = MoreInfo_1C
+
+model3 = dict()
+model3["Parameters"] = parms_6030
+model3["Definitions"] = m_gauss_3d_3d_t_mix_6030
+model3["Supplements"] = MoreInfo_6030
+model3["Verification"] = Check_3D3DT
+
+
+Modelarray = [model1, model2, model3]
diff --git a/src/models/MODEL_classic_gaussian_3D2D.py b/src/models/MODEL_classic_gaussian_3D2D.py
new file mode 100755
index 0000000..56532d4
--- /dev/null
+++ b/src/models/MODEL_classic_gaussian_3D2D.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+"""  This file contains a 3D+2D+T confocal FCS model.
+"""
+import numpy as np                  # NumPy
+
+# 3D + 2D + T
+def CF_Gxyz_3d2dT_gauss(parms, tau):
+    u""" Two-component, two- and three-dimensional diffusion
+        with a Gaussian laser profile, including a triplet component.
+        The triplet factor takes into account blinking according to triplet
+        states of excited molecules.
+        Set *T* or *τ_trip* to 0, if no triplet component is wanted.
+
+        particle2D = (1-F)/ (1+τ/τ_2D) 
+        particle3D = α*F/( (1+τ/τ_3D) * sqrt(1+τ/(τ_3D*SP²)))
+        triplet = 1 + T/(1-T)*exp(-τ/τ_trip)
+        norm = (1-F + α*F)²
+        G = 1/n*(particle1 + particle2)*triplet/norm + offset
+
+        *parms* - a list of parameters.
+        Parameters (parms[i]):
+        [0] n       Effective number of particles in confocal volume
+                    (n = n2D+n3D)
+        [1] τ_2D    Diffusion time of surface bound particls
+        [2] τ_3D    Diffusion time of freely diffusing particles
+        [3] F       Fraction of molecules of the freely diffusing species
+                    (n3D = n*F), 0 <= F <= 1
+        [4] SP      SP=z₀/r₀ Structural parameter,
+                         describes elongation of the confocal volume
+        [5] α       Relative molecular brightness of particle
+                    3D compared to particle 2D (α = q3D/q2D)
+        [6] τ_trip  Characteristic residence time in triplet state
+        [7] T       Fraction of particles in triplet (non-fluorescent) state
+                    0 <= T < 1
+        [8] offset
+        *tau* - lag time
+    """
+    n=parms[0]
+    taud2D=parms[1]
+    taud3D=parms[2]
+    F=parms[3]
+    SP=parms[4]
+    alpha=parms[5]
+    tautrip=parms[6]
+    T=parms[7]
+    off=parms[8]
+
+
+    particle2D = (1-F)/ (1+tau/taud2D) 
+    particle3D = alpha**2*F/( (1+tau/taud3D) * np.sqrt(1+tau/(taud3D*SP**2)))
+    triplet = 1 + T/(1-T)*np.exp(-tau/tautrip)
+    norm = (1-F + alpha*F)**2
+    G = 1/n*(particle2D + particle3D)*triplet/norm
+
+    return G + off
+
+def Checkme(parms):
+    parms[0] = np.abs(parms[0])
+    parms[1] = taud2D = np.abs(parms[1])
+    parms[2] = taud3D = np.abs(parms[2])
+    F=parms[3]
+    parms[4]=np.abs(parms[4])
+    parms[5]=np.abs(parms[5])
+    tautrip=np.abs(parms[6])
+    T=parms[7]
+    off=parms[8]
+    # Triplet fraction is between 0 and one. T may not be one!
+    T = (0.<=T<1.)*T + .99999999999999*(T>=1)
+    # Fraction of molecules may also be one
+    F = (0.<=F<=1.)*F + 1.*(F>1)
+
+    parms[3] = F
+    parms[6] = tautrip
+    parms[7] = T
+
+    return parms
+
+
+def MoreInfo(parms, countrate):
+    u"""Supplementary parameters:
+        Effective number of freely diffusing particles in 3D solution:
+        [9]  n3D = n*F
+        Effective number particles diffusing on 2D surface:
+        [10] n2D = n*(1-F)
+    """
+    # We can only give you the effective particle number
+    n = parms[0]
+    F3d = parms[3]
+    Info = list()
+    # The enumeration of these parameters is very important for
+    # plotting the normalized curve. Countrate must come out last!
+    Info.append([u"n3D", n*F3d])
+    Info.append([u"n2D", n*(1.-F3d)])
+    if countrate is not None:
+        # CPP
+        cpp = countrate/n
+        Info.append(["cpp [kHz]", cpp])
+    return Info
+
+
+# 3D + 3D + T model gauss
+m_gauss_3d_2d_t = [6032, "T+3D+2D",
+                            "Separate 3D and 2D diffusion + triplet, Gauß",
+                            CF_Gxyz_3d2dT_gauss]
+labels  = ["n",
+                "τ_2D [ms]",
+                "τ_3D [ms]",
+                "F_3D", 
+                "SP",
+                u"\u03b1"+" (q_3D/q_2D)", 
+                "τ_trip [ms]",
+                "T",
+                "offset"
+                ]
+values = [ 
+                25,      # n
+                100,       # taud2D
+                0.1,    # taud3D
+                0.45,     # F3D
+                7,       # SP
+                1.0,     # alpha
+                0.001,       # tautrip
+                0.01,       # T
+                0.0      # offset
+                ]
+# For user comfort we add values that are human readable.
+# Theese will be used for output that only humans can read.
+labels_human_readable  = [  "n",
+                            "τ_2D [ms]",
+                            "τ_3D [ms]",
+                            "F_3D", 
+                            "SP",
+                            u"\u03b1"+" (q_3D/q_2D)", 
+                            "τ_trip [µs]",
+                            "T",
+                            "offset"
+                            ]
+values_factor_human_readable = [
+                          1.,     # "n",
+                          1.,     # "τ_2D [ms]",
+                          1.,     # "τ_3D [ms]",
+                          1.,     # "F_3D", 
+                          1.,     # "SP",
+                          1.,     # u"\u03b1"+" (q_3D/q_2D)", 
+                          1000.,  # "τ_trip [µs]",
+                          1.,     # "T",
+                          1.      # "offset"
+                ]
+valuestofit = [True, True, True, True, False, False, False, False, False]
+parms = [labels, values, valuestofit,
+         labels_human_readable, values_factor_human_readable]
+
+
+model1 = dict()
+model1["Parameters"] = parms
+model1["Definitions"] = m_gauss_3d_2d_t
+model1["Verification"] = Checkme
+model1["Supplements"] = MoreInfo
+
+Modelarray = [model1]
diff --git a/src/models/__init__.py b/src/models/__init__.py
new file mode 100644
index 0000000..83f7dd6
--- /dev/null
+++ b/src/models/__init__.py
@@ -0,0 +1,357 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module models:
+    Define all models and set initial parameters.
+
+    Each model has a unique ID. This ID is very important:
+        1. It is a wxWidgets ID.
+        2. It is used in the saving of sessions to identify a model.
+    It is very important, that model IDs do NOT change in newer versions of
+    this program, because it would not be possible to restore older PyCorrFit
+    sessions (Unless you add a program that maps the model IDs to the correct
+    models).
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+# This file is necessary for this folder to become a module that can be 
+# imported by PyCorrFit or other people.
+
+import numpy as np                  # NumPy
+import platform
+import sys
+
+
+## On Windows XP I had problems with the unicode Characters.
+# I found this at 
+# http://stackoverflow.com/questions/5419/python-unicode-and-the-windows-console
+# and it helped:
+reload(sys)
+sys.setdefaultencoding('utf-8')
+
+
+## Models
+import MODEL_classic_gaussian_2D
+import MODEL_classic_gaussian_3D
+import MODEL_classic_gaussian_3D2D
+import MODEL_TIRF_gaussian_1C
+import MODEL_TIRF_gaussian_3D2D
+import MODEL_TIRF_gaussian_3D3D
+import MODEL_TIRF_1C
+import MODEL_TIRF_2D2D
+import MODEL_TIRF_3D2D
+import MODEL_TIRF_3D3D
+import MODEL_TIRF_3D2Dkin_Ries
+
+def AppendNewModel(Modelarray):
+    """ Append a new model from a modelarray. *Modelarray* has to be a list
+        whose elements have two items:
+        [0] parameters
+        [1] some info about the model
+        See separate models for more information
+    """
+    global values
+    global valuedict
+    global models
+    global modeldict
+    global verification
+
+    for Model in Modelarray:
+        # We can have many models in one model array
+        parms = Model["Parameters"]
+        texts = Model["Definitions"]
+        values.append(parms)
+        # model ID is texts[0]
+        valuedict[texts[0]] = parms
+        models.append(texts)
+        modeldict[texts[0]] = texts
+        # Suplementary Data might be there
+        try:
+             supper = Model["Supplements"]
+        except KeyError:
+            # Nothing to do
+            pass
+        else:
+            supplement[texts[0]] = supper
+        # Check functions - check for correct values
+        try:
+             verify = Model["Verification"]
+        except KeyError:
+            # Nothing to do. Return empty function, so we do not need to
+            # do this try and error thing again.
+            verification[texts[0]] = lambda parms: parms
+        else:
+            verification[texts[0]] = verify
+
+
+def GetHumanReadableParms(model, parameters):
+    """ From a set of parameters that have internal units e.g. [100 nm],
+        Calculate the parameters in human readable units e.g. [nm].
+        Uses modeldict from this module.
+        *model* - an integer ID of a model
+        *parameters* - a list/array of parameters 
+                       (all parameters of that model)
+        Returns:
+        New Units, New Parameters
+    """
+    stdparms = valuedict[model]
+    if len(stdparms) == 5:
+        # This means we have extra information on the model
+        # Return some human readable stuff
+        OldParameters = 1.*np.array(parameters)
+        Facors = 1.*np.array(stdparms[4])
+        NewParameters = 1.*OldParameters*Facors
+        NewUnits = stdparms[3]
+        return NewUnits, NewParameters
+    else:
+        # There is no info about human readable stuff, or it is already human
+        # readable.
+        return stdparms[0], parameters
+
+
+def GetHumanReadableParameterDict(model, names, parameters):
+    """ From a set of parameters that have internal units e.g. [100 nm],
+        Calculate the parameters in human readable units e.g. [nm].
+        Uses modeldict from this module.
+        In contrast to *GetHumanReadableParms* this function accepts
+        single parameter names and does not need the full array of
+        parameters.
+        *model* - an integer ID of a model
+        *name* - the names of the parameters to be translated,
+                 order should be same as in
+        *parameters* - a list of parameters
+        Returns:
+        New Units, New Parameters
+    """
+    stdparms = valuedict[model]
+    if len(stdparms) == 5:
+        # This means we have extra information on the model
+        # Return some human readable stuff
+        # Check for list:
+        if isinstance(names, basestring):
+            names = [names]
+            parameters = [parameters]
+            retstring = True
+        else:
+            retstring = False
+        # Create new lists
+        NewUnits = list()
+        NewParameters = list()
+        for i in np.arange(len(stdparms[0])):
+            for j in np.arange(len(names)):
+                if names[j] == stdparms[0][i]:
+                    NewUnits.append(stdparms[3][i])
+                    NewParameters.append(stdparms[4][i]*parameters[j])
+        if retstring == True:
+            NewUnits = NewUnits[0]
+            NewParameters = NewParameters[0]
+        return NewUnits, NewParameters
+    else:
+        # There is no info about human readable stuff, or it is already human
+        # readable.
+        return names, parameters
+
+
+def GetInternalFromHumanReadableParm(model, parameters):
+    """ This is the inverse of *GetHumanReadableParms*
+        *model* - an integer ID of a model
+        *parameters* - a list/array of parameters 
+        Returns:
+        New Units, New Parameters
+    """
+    stdparms = valuedict[model]
+    if len(stdparms) == 5:
+        # This means we have extra information on the model
+        # and can convert to internal values
+        OldParameters = 1.*np.array(parameters)
+        Facors = 1./np.array(stdparms[4])        # inverse
+        NewParameters = 1.*OldParameters*Facors
+        NewUnits = stdparms[0]
+        return NewUnits, NewParameters
+    else:
+        # There is no info about human readable stuff. The given 
+        # parameters have not been converted befor using
+        # *GetHumanReadableParms*.
+        return stdparms[0], parameters
+        
+
+def GetModelType(modelid):
+    """ Given a modelid, get the type of model function
+        (Confocal, TIR-Conf., TIR-□, User)
+    """
+    if modelid >= 7000:
+        return u"User"
+    else:
+        shorttype = dict()
+        shorttype[u"Confocal (Gaussian)"] = u"Confocal"
+        shorttype[u"TIR (Gaussian/Exp.)"] = u"TIR Conf."
+        shorttype[u"TIR (□xσ/Exp.)"] = u"TIR □xσ"
+        for key in modeltypes.keys():
+            mlist = modeltypes[key]
+            if mlist.count(modelid) == 1:
+                return shorttype[key]
+                try:
+                    return shorttype[key]
+                except:
+                    return ""
+
+
+def GetMoreInfo(modelid, Page):
+    """ This functino is called by someone who has already calculated
+        some stuff or wants to know more about the model he is looking at.
+        *modelid* is an ID of a model.
+        *Page* is a wx.flatnotebook page.
+        Returns:
+         More information about a model in form of a list.
+    """
+    # Background signal average
+    bgaverage = None
+    # Signal countrate/average:
+    # might become countrate - bgaverage
+    countrate = Page.traceavg
+    # Get the parameters from the current page.
+    parms = Page.active_parms[1]
+    Info = list()
+    if Page.IsCrossCorrelation is False:
+        ## First import the supplementary parameters of the model
+        ## The order is important for plot normalization and session
+        ## saving as of version 0.7.8
+        # Try to get the dictionary entry of a model
+        # Background information
+        if Page.bgselected is not None:
+            # Background list consists of items with
+            #  [0] average
+            #  [1] name
+            #  [2] trace
+            bgaverage = Page.parent.Background[Page.bgselected][0]
+            # Now set the correct countrate
+            # We already printed the countrate, so there's no harm done.
+            if countrate is not None:
+                # might be that there is no countrate.
+                countrate = countrate - bgaverage
+        try:
+            # This function should return all important information
+            # that can be calculated from the given parameters.
+            func_info = supplement[modelid]
+            data = func_info(parms, countrate)
+            for item in data:
+                Info.append([item[0], item[1]])
+        except KeyError:
+            # No information available
+            pass
+        # In case of cross correlation, we don't show this kind of
+        # information.
+        if Page.traceavg is not None:
+            # Measurement time
+            duration = Page.trace[-1,0]/1000
+            Info.append(["duration [s]", duration])
+            # countrate has to be printed before background.
+            # Background might overwrite countrate.
+            Info.append(["avg. signal [kHz]", Page.traceavg])
+    else:
+        ## Cross correlation curves usually have two traces. Since we
+        ## do not know how to compute the cpp, we will pass the argument
+        ## "None" as the countrate.
+        ## First import the supplementary parameters of the model
+        ## The order is important for plot normalization and session
+        ## saving as of version 0.7.8
+        # Try to get the dictionary entry of a model
+        try:
+            # This function should return all important information
+            # that can be calculated from the given parameters.
+            func_info = supplement[modelid]
+            data = func_info(parms, None)
+            for item in data:
+                Info.append([item[0], item[1]])
+        except KeyError:
+            # No information available
+            pass
+        if Page.tracecc is not None:
+            # Measurement time
+            duration = Page.tracecc[0][-1,0]/1000
+            Info.append(["duration [s]", duration])
+            # countrate has to be printed before background.
+            # Background might overwrite countrate.
+            avg0 = Page.tracecc[0][:,1].mean()
+            avg1 = Page.tracecc[1][:,1].mean()
+            Info.append(["avg. signal A [kHz]", avg0])
+            Info.append(["avg. signal B [kHz]", avg1])
+
+
+    if len(Info) == 0:
+        # If nothing matched until now:
+        return None
+    else:
+        return Info
+
+
+def GetPositionOfParameter(model, name):
+    """ Returns an integer corresponding to the position of the label
+        of a parameter in the model function
+    """
+    stdparms = valuedict[model]
+    for i in np.arange(len(stdparms[0])):
+        if name == stdparms[0][i]:
+            return int(i)
+    
+
+# Pack all variables
+values = list()
+# Also create a dictionary, key is modelid
+valuedict = dict()
+# Pack all models
+models = list()
+# Also create a dictinary
+modeldict = dict()
+# A dictionary for supplementary data:
+supplement = dict()
+# A dictionary for checking for correct variables
+verification = dict()
+
+
+# 6001 6002 6031
+AppendNewModel(MODEL_classic_gaussian_2D.Modelarray) 
+# 6011 6012 6030
+AppendNewModel(MODEL_classic_gaussian_3D.Modelarray) 
+# 6032
+AppendNewModel(MODEL_classic_gaussian_3D2D.Modelarray) 
+# 6013
+AppendNewModel(MODEL_TIRF_gaussian_1C.Modelarray)
+# 6033
+AppendNewModel(MODEL_TIRF_gaussian_3D2D.Modelarray) 
+# 6034
+AppendNewModel(MODEL_TIRF_gaussian_3D3D.Modelarray) 
+# 6000 6010
+AppendNewModel(MODEL_TIRF_1C.Modelarray) 
+# 6022
+AppendNewModel(MODEL_TIRF_2D2D.Modelarray) 
+# 6020
+AppendNewModel(MODEL_TIRF_3D2D.Modelarray) 
+# 6023
+AppendNewModel(MODEL_TIRF_3D3D.Modelarray) 
+# 6021
+AppendNewModel(MODEL_TIRF_3D2Dkin_Ries.Modelarray) 
+
+
+
+# Create a list for the differentiation between the models
+# This should make everything look a little cleaner
+modeltypes = dict()
+#modeltypes[u"Confocal (Gaussian)"] = [6001, 6002, 6012, 6011, 6031, 6032, 6030]
+#modeltypes[u"TIR (Gaussian/Exp.)"] = [6013, 6033, 6034]
+#modeltypes[u"TIR (□xσ/Exp.)"] = [6000, 6010, 6022, 6020, 6023, 6021]
+
+modeltypes[u"Confocal (Gaussian)"] = [6011, 6030, 6002, 6031, 6032]
+modeltypes[u"TIR (Gaussian/Exp.)"] = [6013, 6034, 6033]
+modeltypes[u"TIR (□xσ/Exp.)"] = [6010, 6023, 6000, 6022, 6020, 6021]
+modeltypes[u"User"] = list()
+
diff --git a/src/openfile.py b/src/openfile.py
new file mode 100644
index 0000000..2994bad
--- /dev/null
+++ b/src/openfile.py
@@ -0,0 +1,721 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module openfile
+    This file is used to define operations on how to open some files.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import csv
+from distutils.version import LooseVersion # For version checking
+import numpy as np
+import os
+import platform
+import shutil
+import sys
+import tempfile
+import traceback
+import wx
+import yaml
+import zipfile
+
+import doc
+import edclasses
+import models as mdls
+from tools import info
+# This contains all the information necessary to import data files:
+from readfiles import Filetypes
+from readfiles import BGFiletypes
+
+
+def ImportParametersYaml(parent, dirname):
+    """ Import the parameters from a parameters.yaml file
+        from an PyCorrFit session.
+    """
+    dlg = wx.FileDialog(parent, "Open session file", dirname, "", 
+                                "*.fcsfit-session.zip", wx.OPEN)
+    # user cannot do anything until he clicks "OK"
+    if dlg.ShowModal() == wx.ID_OK:
+        path = dlg.GetPath()            # Workaround since 0.7.5
+        (dirname, filename) = os.path.split(path)
+        #filename = dlg.GetFilename()
+        #dirname = dlg.GetDirectory()
+        dlg.Destroy()
+        Arc = zipfile.ZipFile(os.path.join(dirname, filename), mode='r')
+        # Get the yaml parms dump:
+        yamlfile = Arc.open("Parameters.yaml")
+        # Parms: Fitting and drawing parameters of correlation curve
+        # The *yamlfile* is responsible for the order of the Pages #i.
+        Parms = yaml.safe_load(yamlfile)
+        yamlfile.close()
+        Arc.close()
+        return Parms, dirname, filename
+    else:
+        dirname=dlg.GetDirectory()
+        return None, dirname, None
+
+
+def OpenSession(parent, dirname, sessionfile=None):
+    """ Load a whole session that has previously been saved
+        by PyCorrFit.
+        Infodict may contain the following keys:
+        "Backgrounds", list: contains the backgrounds
+        "Comments", dict: "Session" comment and int keys to Page titles
+        "Correlations", dict: page numbers, all correlation curves
+        "External Functions", dict: modelids to external model functions
+        "External Weights", dict: page numbers, external weights for fitting
+        "Parameters", dict: page numbers, all parameters of the pages
+        "Preferences", dict: not used yet
+        "Traces", dict: page numbers, all traces of the pages
+    """
+    Infodict = dict()
+    fcsfitwildcard = ".fcsfit-session.zip"
+    if sessionfile is None:
+        dlg = wx.FileDialog(parent, "Open session file", dirname, "", 
+                        "*"+fcsfitwildcard, wx.OPEN)
+        # user cannot do anything until he clicks "OK"
+        if dlg.ShowModal() == wx.ID_OK:
+            path = dlg.GetPath()            # Workaround since 0.7.5
+            (dirname, filename) = os.path.split(path)
+            #filename = dlg.GetFilename()
+            #dirname = dlg.GetDirectory()
+            dlg.Destroy()
+        else:
+            # User did not press OK
+            # stop this function
+            dirname = dlg.GetDirectory()
+            dlg.Destroy()
+            return None, dirname, None
+    else:
+        (dirname, filename) = os.path.split(sessionfile)
+        path = sessionfile                  # Workaround since 0.7.5
+        if filename[-19:] != fcsfitwildcard:
+            # User specified wrong file
+            print "Unknown file extension: "+filename
+            # stop this function
+            dirname = dlg.GetDirectory()
+            dlg.Destroy()
+            return None, dirname, None
+    Arc = zipfile.ZipFile(path, mode='r')
+    try:
+        ## Check PyCorrFit version:
+        readmefile = Arc.open("Readme.txt")
+        # e.g. "This file was created using PyCorrFit version 0.7.6"
+        identifier = readmefile.readline()
+        arcv = LooseVersion(identifier[46:].strip())
+        thisv = LooseVersion(parent.version.strip())
+        if arcv > thisv:
+            errstring = "Your version of Pycorrfit ("+str(thisv)+")"+\
+                   " is too old to open this session ("+\
+                   str(arcv).strip()+").\n"+\
+                   "Please download the lates version of "+\
+                   " PyCorrFit from \n"+doc.HomePage+".\n"+\
+                   "Continue opening this session?"
+            dlg = edclasses.MyOKAbortDialog(parent, errstring, "Warning")
+            returns = dlg.ShowModal()
+            if returns == wx.ID_OK:
+                dlg.Destroy()
+            else:
+                dlg.Destroy()
+                return None, dirname, None
+    except:
+        pass
+    # Get the yaml parms dump:
+    yamlfile = Arc.open("Parameters.yaml")
+    # Parameters: Fitting and drawing parameters of correlation curve
+    # The *yamlfile* is responsible for the order of the Pages #i.
+    Infodict["Parameters"] = yaml.safe_load(yamlfile)
+    yamlfile.close()
+    # Supplementary data (errors of fit)
+    supname = "Supplements.yaml"
+    try:
+        Arc.getinfo(supname)
+    except:
+        pass
+    else:
+        supfile = Arc.open(supname)
+        supdata = yaml.safe_load(supfile)
+        Infodict["Supplements"] = dict()
+        for idp in supdata:
+            Infodict["Supplements"][idp[0]] = dict() 
+            Infodict["Supplements"][idp[0]]["FitErr"] = idp[1]
+            if len(idp) > 2:
+                # As of version 0.7.4 we save chi2 and shared pages -global fit
+                Infodict["Supplements"][idp[0]]["Chi sq"] = idp[2]
+                Infodict["Supplements"][idp[0]]["Global Share"] = idp[3]
+    ## Preferences: Reserved for a future version of PyCorrFit :)
+    prefname = "Preferences.yaml"
+    try:
+        Arc.getinfo(prefname)
+    except KeyError:
+        pass
+    else:
+        yamlpref = Arc.open(prefname)
+        Infodict["Preferences"] = yaml.safe_load(yamlpref)
+        yamlpref.close()
+    # Get external functions
+    Infodict["External Functions"] = dict()
+    key = 7001
+    while key <= 7999:
+        # (There should not be more than 1000 functions)
+        funcfilename = "model_"+str(key)+".txt"
+        try:
+            Arc.getinfo(funcfilename)
+        except KeyError:
+            # No more functions to import
+            key = 8000
+        else:
+            funcfile =  Arc.open(funcfilename)
+            Infodict["External Functions"][key] = funcfile.read()
+            funcfile.close()
+            key=key+1
+    # Get the correlation arrays
+    Infodict["Correlations"] = dict()
+    for i in np.arange(len(Infodict["Parameters"])):
+        # The *number* is used to identify the correct file
+        number = str(Infodict["Parameters"][i][0]).strip().strip(":").strip("#")
+        pageid = int(number)
+        expfilename = "data"+number+".csv"
+        expfile = Arc.open(expfilename, 'r')
+        readdata = csv.reader(expfile, delimiter=',')
+        dataexp = list()
+        tau = list()
+        if str(readdata.next()[0]) == "# tau only":
+            for row in readdata:
+                # Exclude commentaries
+                if (str(row[0])[0:1] != '#'):
+                    tau.append(float(row[0]))
+            tau = np.array(tau)
+            dataexp = None
+        else:
+            for row in readdata:
+                # Exclude commentaries
+                if (str(row[0])[0:1] != '#'):
+                    dataexp.append((float(row[0]), float(row[1])))
+            dataexp = np.array(dataexp)
+            tau = dataexp[:,0]
+        Infodict["Correlations"][pageid] = [tau, dataexp]
+        del readdata
+        expfile.close()
+    # Get the Traces
+    Infodict["Traces"] = dict()
+    for i in np.arange(len(Infodict["Parameters"])):
+        # The *number* is used to identify the correct file
+        number = str(Infodict["Parameters"][i][0]).strip().strip(":").strip("#")
+        pageid = int(number)
+        # Find out, if we have a cross correlation data type
+        IsCross = False
+        try:
+            IsCross = Infodict["Parameters"][i][7]
+        except IndexError:
+            # No Cross correlation
+            pass
+        if IsCross is False:
+            tracefilenames = ["trace"+number+".csv"]
+        else:
+            # Cross correlation uses two traces
+            tracefilenames = ["trace"+number+"A.csv",
+                              "trace"+number+"B.csv" ]
+        thistrace = list()
+        for tracefilename in tracefilenames:
+            try:
+                Arc.getinfo(tracefilename)
+            except KeyError:
+                pass
+            else:
+                tracefile = Arc.open(tracefilename, 'r')
+                traceread = csv.reader(tracefile, delimiter=',')
+                singletrace = list()
+                for row in traceread:
+                    # Exclude commentaries
+                    if (str(row[0])[0:1] != '#'):
+                        singletrace.append((float(row[0]), float(row[1])))
+                singletrace = np.array(singletrace)
+                thistrace.append(singletrace)
+                del traceread
+                del singletrace
+                tracefile.close()
+        if len(thistrace) != 0:
+            Infodict["Traces"][pageid] = thistrace
+        else:
+            Infodict["Traces"][pageid] = None
+    # Get the comments, if they exist
+    commentfilename = "comments.txt"
+    try:
+        # Raises KeyError, if file is not present:
+        Arc.getinfo(commentfilename)
+    except KeyError:
+        pass   
+    else:
+        # Open the file
+        commentfile = Arc.open(commentfilename, 'r')
+        Infodict["Comments"] = dict()
+        for i in np.arange(len(Infodict["Parameters"])):
+            number = str(Infodict["Parameters"][i][0]).strip().strip(":").strip("#")
+            pageid = int(number)
+            # Strip line ending characters for all the Pages.
+            Infodict["Comments"][pageid] = commentfile.readline().strip()
+        # Now Add the Session Comment (the rest of the file). 
+        ComList = commentfile.readlines()
+        Infodict["Comments"]["Session"] = ''
+        for line in ComList:
+            Infodict["Comments"]["Session"] += line
+        commentfile.close()
+    # Get the Backgroundtraces and data if they exist
+    bgfilename = "backgrounds.csv"
+    try:
+        # Raises KeyError, if file is not present:
+        Arc.getinfo(bgfilename)
+    except KeyError:
+        pass
+    else:
+        # Open the file
+        Infodict["Backgrounds"] = list()
+        bgfile = Arc.open(bgfilename, 'r')
+        bgread = csv.reader(bgfile, delimiter='\t')
+        i = 0
+        for bgrow in bgread:
+            bgtracefilename = "bg_trace"+str(i)+".csv"
+            bgtracefile = Arc.open(bgtracefilename, 'r')
+            bgtraceread = csv.reader(bgtracefile, delimiter=',')
+            bgtrace = list()
+            for row in bgtraceread:
+                # Exclude commentaries
+                if (str(row[0])[0:1] != '#'):
+                    bgtrace.append((np.float(row[0]), np.float(row[1])))
+            bgtrace = np.array(bgtrace)
+            Infodict["Backgrounds"].append([np.float(bgrow[0]), str(bgrow[1]), bgtrace])
+            i = i + 1
+        bgfile.close()
+    # Get external weights if they exist
+    Info = dict()
+    WeightsFilename = "externalweights.txt"
+    try:
+        # Raises KeyError, if file is not present:
+        Arc.getinfo(WeightsFilename)
+    except:
+        pass
+    else:
+        Wfile = Arc.open(WeightsFilename, 'r')
+        Wread = csv.reader(Wfile, delimiter='\t')
+        Weightsdict = dict()
+        for wrow in Wread:
+            Pkey = wrow[0]  # Page of weights
+            pageid = int(Pkey)
+            # Do not overwrite anything
+            try:
+                Weightsdict[pageid]
+            except:
+                Weightsdict[pageid] = dict()
+            Nkey = wrow[1]  # Name of weights
+            Wdatafilename = "externalweights_data"+Pkey+"_"+Nkey+".csv"
+            Wdatafile = Arc.open(Wdatafilename, 'r')
+            Wdatareader = csv.reader(Wdatafile)
+            Wdata = list()
+            for row in Wdatareader:
+                # Exclude commentaries
+                if (str(row[0])[0:1] != '#'):
+                    Wdata.append(np.float(row[0]))
+            Weightsdict[pageid][Nkey] = np.array(Wdata)
+        Infodict["External Weights"] = Weightsdict
+    Arc.close()
+    return Infodict, dirname, filename
+
+
+def saveCSV(parent, dirname, Page):
+    """ Write relevant data into a comma separated list.
+        
+        Parameters:
+        *parent*   the parent window
+        *dirname* directory to set on saving
+        *Page*     Page containing all necessary variables
+    """
+    filename = Page.tabtitle.GetValue().strip()+Page.counter[:2]
+    dlg = wx.FileDialog(parent, "Save curve", dirname, filename, 
+          "Correlation with trace (*.csv)|*.csv;*.CSV"+\
+          "|Correlation only (*.csv)|*.csv;*.CSV",
+           wx.SAVE|wx.FD_OVERWRITE_PROMPT)
+    # user cannot do anything until he clicks "OK"
+    if dlg.ShowModal() == wx.ID_OK:
+        path = dlg.GetPath()            # Workaround since 0.7.5
+        (dirname, filename) = os.path.split(path)
+        #filename = dlg.GetFilename()
+        #dirname = dlg.GetDirectory()
+        if filename.lower().endswith(".csv") is not True:
+            filename = filename+".csv"
+        openedfile = open(os.path.join(dirname, filename), 'wb')
+        ## First, some doc text
+        openedfile.write(doc.saveCSVinfo(parent).replace('\n', '\r\n'))
+        # The infos
+        InfoMan = info.InfoClass(CurPage=Page)
+        PageInfo = InfoMan.GetCurFancyInfo()
+        for line in PageInfo.splitlines():
+            openedfile.write("# "+line+"\r\n")
+        openedfile.write("#\r\n#\r\n")
+        # Get all the data we need from the Page
+        # Modeled data
+        # Since 0.7.8 the user may normalize the curves. The normalization
+        # factor is set in *Page.normfactor*.
+        corr = Page.datacorr[:,1]*Page.normfactor
+        if Page.dataexp is not None:
+            # Experimental data
+            tau = Page.dataexp[:,0]
+            exp = Page.dataexp[:,1]*Page.normfactor
+            res = Page.resid[:,1]*Page.normfactor
+            # Plotting! Because we only export plotted area.
+            weight = Page.weights_used_for_plotting
+            if weight is None:
+                pass
+            elif len(weight) != len(exp):
+                text = "Weights have not been calculated for the "+\
+                       "area you want to export. Pressing 'Fit' "+\
+                       "again should solve this issue. Data will "+\
+                       "not be saved."
+                dlg = wx.MessageDialog(parent, text, "Error", 
+                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg.ShowModal() == wx.ID_OK
+                return dirname, None
+        else:
+            tau = Page.datacorr[:,0]
+            exp = None
+            res = None
+        # Include weights in data saving:
+        # PyCorrFit thinks in [ms], but we will save as [s]
+        timefactor = 0.001
+        tau = timefactor * tau
+        ## Now we want to write all that data into the file
+        # This is for csv writing:
+        ## Correlation curve
+        dataWriter = csv.writer(openedfile, delimiter='\t')
+        if exp is not None:
+            header = '# Channel (tau [s])'+"\t"+ \
+                     'Experimental correlation'+"\t"+ \
+                     'Fitted correlation'+ "\t"+ \
+                     'Residuals'+"\r\n"
+            data = [tau, exp, corr, res]
+            if Page.weighted_fit_was_performed is True \
+            and weight is not None:
+                header = header.strip() + "\t"+'Weights (fit)'+"\r\n"
+                data.append(weight)
+        else:
+            header = '# Channel (tau [s])'+"\t"+ \
+                     'Correlation function'+"\r\n"
+            data = [tau, corr]
+        # Write header
+        openedfile.write(header)
+        # Write data
+        for i in np.arange(len(data[0])):
+            # row-wise, data may have more than two elements per row
+            datarow = list()
+            for j in np.arange(len(data)):
+                rowcoli = str("%.10e") % data[j][i]
+                datarow.append(rowcoli)
+            dataWriter.writerow(datarow)
+        ## Trace
+        # Only save the trace if user wants us to:
+        if dlg.GetFilterIndex() == 0:
+            # We will also save the trace in [s]
+            # Intensity trace in kHz may stay the same
+            if Page.trace is not None:
+                # Mark beginning of Trace
+                openedfile.write('#\r\n#\r\n# BEGIN TRACE\r\n#\r\n')
+                # Columns
+                time = Page.trace[:,0]*timefactor
+                intensity = Page.trace[:,1]
+                # Write
+                openedfile.write('# Time [s]'+"\t" 
+                                     'Intensity trace [kHz]'+" \r\n")
+                for i in np.arange(len(time)):
+                    dataWriter.writerow([str("%.10e") % time[i],
+                                         str("%.10e") % intensity[i]])
+            elif Page.tracecc is not None:
+                # We have some cross-correlation here:
+                # Mark beginning of Trace A
+                openedfile.write('#\r\n#\r\n# BEGIN TRACE\r\n#\r\n')
+                # Columns
+                time = Page.tracecc[0][:,0]*timefactor
+                intensity = Page.tracecc[0][:,1]
+                # Write
+                openedfile.write('# Time [s]'+"\t" 
+                                     'Intensity trace [kHz]'+" \r\n")
+                for i in np.arange(len(time)):
+                    dataWriter.writerow([str("%.10e") % time[i],
+                                         str("%.10e") % intensity[i]])
+                # Mark beginning of Trace B
+                openedfile.write('#\r\n#\r\n# BEGIN SECOND TRACE\r\n#\r\n')
+                # Columns
+                time = Page.tracecc[1][:,0]*timefactor
+                intensity = Page.tracecc[1][:,1]
+                # Write
+                openedfile.write('# Time [s]'+"\t" 
+                                     'Intensity trace [kHz]'+" \r\n")
+                for i in np.arange(len(time)):
+                    dataWriter.writerow([str("%.10e") % time[i],
+                                         str("%.10e") % intensity[i]])
+        dlg.Destroy()
+        openedfile.close()
+        return dirname, filename
+    else:
+        dirname = dlg.GetDirectory()
+        dlg.Destroy()
+        return dirname, None
+
+
+def SaveSession(parent, dirname, Infodict):
+    """ Write whole Session into a zip file.
+        Infodict may contain the following keys:
+        "Backgrounds", list: contains the backgrounds
+        "Comments", dict: "Session" comment and int keys to Page titles
+        "Correlations", dict: page numbers, all correlation curves
+        "External Functions, dict": modelids to external model functions
+        "External Weights", dict: page numbers, external weights for fitting
+        "Parameters", dict: page numbers, all parameters of the pages
+        "Preferences", dict: not used yet
+        "Traces", dict: page numbers, all traces of the pages
+        We will also write a Readme.txt
+    """
+    dlg = wx.FileDialog(parent, "Save session file", dirname, "",
+                     "*.fcsfit-session.zip", wx.SAVE|wx.FD_OVERWRITE_PROMPT)
+    if dlg.ShowModal() == wx.ID_OK:
+        path = dlg.GetPath()            # Workaround since 0.7.5
+        (dirname, filename) = os.path.split(path)
+        #filename = dlg.GetFilename()
+        #dirname = dlg.GetDirectory()
+        # Sometimes you have multiple endings...
+        if filename.endswith(".fcsfit-session.zip") is not True:
+            filename = filename+".fcsfit-session.zip"
+        dlg.Destroy()
+        # Change working directory
+        returnWD = os.getcwd()
+        tempdir = tempfile.mkdtemp()
+        os.chdir(tempdir)
+        # Create zip file
+        Arc = zipfile.ZipFile(filename, mode='w')
+        # Only do the Yaml thing for safe operations.
+        # Make the yaml dump
+        parmsfilename = "Parameters.yaml"
+        # Parameters have to be floats in lists
+        # in order for yaml.safe_load to work.
+        Parms =  Infodict["Parameters"]
+        ParmsKeys = Parms.keys()
+        ParmsKeys.sort()
+        Parmlist = list()
+        for idparm in ParmsKeys:
+            # Make sure we do not accidently save arrays.
+            # This would not work correctly with yaml.
+            Parms[idparm][2] = np.array(Parms[idparm][2],dtype="float").tolist()
+            Parms[idparm][3] = np.array(Parms[idparm][3],dtype="bool").tolist()
+            # Range of fitting parameters
+            Parms[idparm][9] = np.array(Parms[idparm][9],dtype="float").tolist()
+            Parmlist.append(Parms[idparm])
+        yaml.dump(Parmlist, open(parmsfilename, "wb"))
+        Arc.write(parmsfilename)
+        os.remove(os.path.join(tempdir, parmsfilename))
+        # Supplementary data (errors of fit)
+        errsfilename = "Supplements.yaml"
+        Sups =  Infodict["Supplements"]
+        SupKeys = Sups.keys()
+        SupKeys.sort()
+        Suplist = list()
+        for idsup in SupKeys:
+            error = Sups[idsup]["FitErr"]
+            chi2 = Sups[idsup]["Chi sq"]
+            globalshare = Sups[idsup]["Global Share"]
+            Suplist.append([idsup, error, chi2, globalshare])
+        yaml.dump(Suplist, open(errsfilename, "wb"))
+        Arc.write(errsfilename)
+        os.remove(os.path.join(tempdir, errsfilename))
+        # Save external functions
+        for key in Infodict["External Functions"].keys():
+            funcfilename = "model_"+str(key)+".txt"
+            funcfile =  open(funcfilename, 'wb')
+            funcfile.write(Infodict["External Functions"][key])
+            funcfile.close()
+            Arc.write(funcfilename)
+            os.remove(os.path.join(tempdir, funcfilename))
+        # Save (dataexp and tau)s into separate csv files.
+        for pageid in Infodict["Correlations"].keys():
+            # Since *Array* and *Parms* are in the same order (the page order),
+            # we will identify the filename by the Page title number.
+            number = str(pageid)
+            expfilename = "data"+number+".csv"
+            expfile = open(expfilename, 'wb')
+            tau = Infodict["Correlations"][pageid][0]
+            exp = Infodict["Correlations"][pageid][1]
+            dataWriter = csv.writer(expfile, delimiter=',')
+            if exp is not None:
+                # Names of Columns
+                dataWriter.writerow(['# tau', 'experimental data'])
+                # Actual Data
+                # Do not use len(tau) instead of len(exp[:,0])) !
+                # Otherwise, the experimental data will not be saved entirely,
+                # if it has been cropped. Because tau might be smaller, than
+                # exp[:,0] --> tau = exp[startcrop:endcrop,0]
+                for j in np.arange(len(exp[:,0])):
+                    dataWriter.writerow(["%.20e" % exp[j,0],
+                                         "%.20e" % exp[j,1]])
+            else:
+                # Only write tau
+                dataWriter.writerow(['# tau'+' only'])
+                for j in np.arange(len(tau)):
+                    dataWriter.writerow(["%.20e" % tau[j]])
+            expfile.close()
+            # Add to archive
+            Arc.write(expfilename)
+            os.remove(os.path.join(tempdir, expfilename))
+        # Save traces into separate csv files.
+        for pageid in Infodict["Traces"].keys():
+            number = str(pageid)
+            # Since *Trace* and *Parms* are in the same order, which is the
+            # Page order, we will identify the filename by the Page title 
+            # number.
+            if Infodict["Traces"][pageid] is not None:
+                if Parms[pageid][7] is True:
+                    # We have cross correlation: save two traces
+                    ## A
+                    tracefilenamea = "trace"+number+"A.csv"
+                    tracefile = open(tracefilenamea, 'wb')
+                    traceWriter = csv.writer(tracefile, delimiter=',')
+                    time = Infodict["Traces"][pageid][0][:,0]
+                    rate = Infodict["Traces"][pageid][0][:,1]
+                    # Names of Columns
+                    traceWriter.writerow(['# time', 'count rate'])
+                    # Actual Data
+                    for j in np.arange(len(time)):
+                        traceWriter.writerow(["%.20e" % time[j],
+                                              "%.20e" % rate[j]])
+                    tracefile.close()
+                    # Add to archive
+                    Arc.write(tracefilenamea)
+                    os.remove(os.path.join(tempdir, tracefilenamea))
+                    ## B
+                    tracefilenameb = "trace"+number+"B.csv"
+                    tracefile = open(tracefilenameb, 'wb')
+                    traceWriter = csv.writer(tracefile, delimiter=',')
+                    time = Infodict["Traces"][pageid][1][:,0]
+                    rate = Infodict["Traces"][pageid][1][:,1]
+                    # Names of Columns
+                    traceWriter.writerow(['# time', 'count rate'])
+                    # Actual Data
+                    for j in np.arange(len(time)):
+                        traceWriter.writerow(["%.20e" % time[j],
+                                              "%.20e" % rate[j]])
+                    tracefile.close()
+                    # Add to archive
+                    Arc.write(tracefilenameb)
+                    os.remove(os.path.join(tempdir, tracefilenameb))
+                else:
+                    # Save one single trace
+                    tracefilename = "trace"+number+".csv"
+                    tracefile = open(tracefilename, 'wb')
+                    traceWriter = csv.writer(tracefile, delimiter=',')
+                    time = Infodict["Traces"][pageid][:,0]
+                    rate = Infodict["Traces"][pageid][:,1]
+                    # Names of Columns
+                    traceWriter.writerow(['# time', 'count rate'])
+                    # Actual Data
+                    for j in np.arange(len(time)):
+                        traceWriter.writerow(["%.20e" % time[j],
+                                              "%.20e" % rate[j]])
+                    tracefile.close()
+                    # Add to archive
+                    Arc.write(tracefilename)
+                    os.remove(os.path.join(tempdir, tracefilename))
+        # Save comments into txt file
+        commentfilename = "comments.txt"
+        commentfile = open(commentfilename, 'wb')
+        # Comments[-1] is comment on whole Session
+        Ckeys = Infodict["Comments"].keys()
+        Ckeys.sort()
+        for key in Ckeys:
+            if key != "Session":
+                commentfile.write(Infodict["Comments"][key]+"\r\n")
+        commentfile.write(Infodict["Comments"]["Session"])
+        commentfile.close()
+        Arc.write(commentfilename)
+        os.remove(os.path.join(tempdir, commentfilename))
+        ## Save Background information:
+        Background = Infodict["Backgrounds"]
+        if len(Background) > 0:
+            # We do not use a comma separated, but a tab separated file,
+            # because a comma might be in the name of a bg.
+            bgfilename = "backgrounds.csv"
+            bgfile = open(bgfilename, 'wb')
+            bgwriter = csv.writer(bgfile, delimiter='\t')
+            for i in np.arange(len(Background)):
+                bgwriter.writerow([str(Background[i][0]), Background[i][1]])
+                # Traces
+                bgtracefilename = "bg_trace"+str(i)+".csv"
+                bgtracefile = open(bgtracefilename, 'wb')
+                bgtraceWriter = csv.writer(bgtracefile, delimiter=',')
+                bgtraceWriter.writerow(['# time', 'count rate'])
+                # Actual Data
+                time = Background[i][2][:,0]
+                rate = Background[i][2][:,1]
+                for j in np.arange(len(time)):
+                    bgtraceWriter.writerow(["%.20e" % time[j],
+                                            "%.20e" % rate[j]])
+                bgtracefile.close()
+                # Add to archive
+                Arc.write(bgtracefilename)
+                os.remove(os.path.join(tempdir, bgtracefilename))
+            bgfile.close()
+            Arc.write(bgfilename)
+            os.remove(os.path.join(tempdir, bgfilename))
+        ## Save External Weights information
+        WeightedPageID = Infodict["External Weights"].keys()
+        WeightedPageID.sort()
+        WeightFilename = "externalweights.txt"
+        WeightFile = open(WeightFilename, 'wb')
+        WeightWriter = csv.writer(WeightFile, delimiter='\t')
+        for pageid in WeightedPageID:
+            number = str(pageid)
+            NestWeights = Infodict["External Weights"][pageid].keys()
+            # The order of the types does not matter, since they are
+            # sorted in the frontend and upon import. We sort them here, anyhow.
+            NestWeights.sort()
+            for Nkey in NestWeights:
+                WeightWriter.writerow([number, str(Nkey).strip()])
+                # Add data to a File
+                WeightDataFilename = "externalweights_data"+number+\
+                                     "_"+str(Nkey).strip()+".csv"
+                WeightDataFile = open(WeightDataFilename, 'wb')
+                WeightDataWriter = csv.writer(WeightDataFile)
+                wdata = Infodict["External Weights"][pageid][Nkey]
+                for jw in np.arange(len(wdata)):
+                    WeightDataWriter.writerow([str(wdata[jw])])
+                WeightDataFile.close()
+                Arc.write(WeightDataFilename)
+                os.remove(os.path.join(tempdir, WeightDataFilename))
+        WeightFile.close()
+        Arc.write(WeightFilename)
+        os.remove(os.path.join(tempdir, WeightFilename))
+        ## Readme
+        rmfilename = "Readme.txt"
+        rmfile = open(rmfilename, 'wb')
+        rmfile.write(doc.SessionReadme(parent))
+        rmfile.close()
+        Arc.write(rmfilename)
+        os.remove(os.path.join(tempdir, rmfilename))
+        # Close the archive
+        Arc.close()
+        # Move archive to destination directory
+        shutil.move(os.path.join(tempdir, filename), 
+                    os.path.join(dirname, filename) )
+        # Go to destination directory
+        os.chdir(returnWD)
+        os.rmdir(tempdir)
+        return dirname, filename
+    else:
+        dirname = dlg.GetDirectory()
+        dlg.Destroy()
+        return dirname, None
diff --git a/src/page.py b/src/page.py
new file mode 100644
index 0000000..840d16d
--- /dev/null
+++ b/src/page.py
@@ -0,0 +1,999 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module frontend
+    The frontend displays the GUI (Graphic User Interface). All necessary 
+    functions and modules are called from here.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+# Use DEMO for contrast-rich screenshots.
+# This enlarges axis text and draws black lines instead of grey ones.
+DEMO = False
+
+
+import os
+import wx                               # GUI interface wxPython
+import wx.lib.plot as plot              # Plotting in wxPython
+import wx.lib.scrolledpanel as scrolled
+import numpy as np                      # NumPy
+import sys                              # System stuff
+
+import doc
+import edclasses                    # Cool stuf like better floatspin
+import leastsquaresfit as fit       # For fitting
+import models as mdls
+import tools
+
+
+## On Windows XP I had problems with the unicode Characters.
+# I found this at 
+# http://stackoverflow.com/questions/5419/python-unicode-and-the-windows-console
+# and it helped:
+reload(sys)
+sys.setdefaultencoding('utf-8')
+
+
+class FittingPanel(wx.Panel):
+    """
+    Those are the Panels that show the fitting dialogs with the Plots.
+    """
+    def __init__(self, parent, counter, modelid, active_parms, tau):
+        """ Initialize with given parameters. """
+        wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY)
+        self.parent = parent
+        self.filename = "None"
+        ## If this value is set to True, the trace and traceavg variables
+        ## will not be used. Instead tracecc a list, of traces will be used.
+        self.IsCrossCorrelation = False
+        ## Setting up variables for plotting
+        self.trace = None        # The intensity trace, tuple
+        self.traceavg = None     # Average trace intensity
+        self.tracecc = None      # List of traces (in CC mode only)
+        self.bgselected = None   # integer, index for parent.Background
+        self.bgcorrect = 1.      # Background correction factor for dataexp
+        self.normparm = None     # Parameter number used for graph normalization
+                                 # if greater than number of fitting parms,
+                                 # then supplementary parm is used.
+        self.normfactor = 1.     # Graph normalization factor (e.g. value of n)
+        self.startcrop = None    # Where cropping of dataexp starts
+        self.endcrop = None      # Where cropping of dataexp ends
+        self.dataexp = None      # Experimental data (cropped)
+        self.dataexpfull = None  # Experimental data (not cropped)
+        self.datacorr = None     # Calculated data
+        self.resid = None        # Residuals
+        self.data4weight = None  # Data used for weight calculation 
+        # Fitting:
+        #self.Fitbox=[ fitbox, weightedfitdrop, fittext, fittext2, fittextvar,
+        #                fitspin, buttonfit ]
+        # chi squared - is also an indicator, if something had been fitted
+        self.FitKnots = 5 # number of knots for spline fit or similiars
+        self.chi2 = None
+        self.weighted_fit_was_performed = False # default is no weighting
+        self.weights_used_for_fitting = None # weights used for fitting
+        self.weights_used_for_plotting = None # weights used for plotting
+        self.weights_plot_fill_area = None # weight area in plot
+        self.weighted_fittype_id = None # integer (drop down item)
+        self.weighted_fittype = "Unknown" # type of fit used
+        self.weighted_nuvar = None # bins for std-dev. (left and rigth)
+        # dictionary for alternative variances from e.g. averaging
+        self.external_std_weights = dict()
+        # Errors of fit dictionary
+        self.parmoptim_error = None
+        # A list containing page numbers that share parameters with this page.
+        # This parameter is defined by the global fitting tool and is saved in
+        # sessions.
+        self.GlobalParameterShare = list()
+        # Counts number of Pages already created:
+        self.counter = counter
+        # Has inital plot been performed?
+        # Call PlotAll("init") to set this to true. If it is true, then
+        # nothing will be plotted if called with "init"
+        self.InitialPlot = False
+        # Model we are using
+        self.modelid = modelid
+        # modelpack:
+        # [0] labels
+        # [1] values
+        # [2] bool values to fit
+        # [3] labels human readable (optional)
+        # [4] factors human readable (optional)
+        modelpack = mdls.modeldict[modelid]
+        # The string of the model in the menu
+        self.model = modelpack[1]
+        # Some more useless text about the model
+        self.modelname = modelpack[2]
+        # Function for fitting
+        self.active_fct = modelpack[3]
+        # Parameter verification function.
+        # This checks parameters concerning their physical meaningfullness :)
+        self.check_parms_model = mdls.verification[modelid]
+        # active_parameters:
+        # [0] labels
+        # [1] values
+        # [2] bool values to fit
+        self.active_parms = active_parms
+        # Parameter range for fitting (defaults to zero)
+        self.parameter_range = np.zeros((len(active_parms[0]),2))
+        # Some timescale
+        self.taufull = tau
+        self.tau = 1*self.taufull
+        # Tool statistics uses this list:
+        self.StatisticsCheckboxes = None
+        ### Splitter window
+        # Sizes
+        size = parent.notebook.GetSize()
+        tabsize = 33
+        size[1] = size[1] - tabsize
+        self.sizepanelx = 270
+        canvasx = size[0]-self.sizepanelx+5
+        sizepanel = (self.sizepanelx, size[1])
+        sizecanvas = (canvasx, size[1])
+        self.sp = wx.SplitterWindow(self, size=size, style=wx.SP_3DSASH)
+        # This is necessary to prevent "Unsplit" of the SplitterWindow:
+        self.sp.SetMinimumPaneSize(1)
+        ## Settings Section (left side)
+        #self.panelsettings = wx.Panel(self.sp, size=sizepanel)
+        self.panelsettings = scrolled.ScrolledPanel(self.sp, size=sizepanel)
+        self.panelsettings.SetupScrolling(scroll_x=False)
+        ## Setting up Plot (correlation + chi**2)
+        self.spcanvas = wx.SplitterWindow(self.sp, size=sizecanvas,
+                                          style=wx.SP_3DSASH)
+        # This is necessary to prevent "Unsplit" of the SplitterWindow:
+        self.spcanvas.SetMinimumPaneSize(1)
+        # y difference in pixels between Auocorrelation and Residuals
+        cupsizey = size[1]*4/5
+        # Calculate initial data
+        self.calculate_corr()
+        # Draw the settings section
+        self.settings()
+        # Upper Plot for plotting of Correlation Function
+        self.canvascorr = plot.PlotCanvas(self.spcanvas)
+        self.canvascorr.setLogScale((True, False))  
+        self.canvascorr.SetEnableZoom(True)
+        self.PlotAll()
+        self.canvascorr.SetSize((canvasx, cupsizey))
+        # Lower Plot for plotting of the residuals
+        self.canvaserr = plot.PlotCanvas(self.spcanvas)
+        self.canvaserr.setLogScale((True, False))
+        self.canvaserr.SetEnableZoom(True)
+        self.canvaserr.SetSize((canvasx, size[1]-cupsizey))
+        self.spcanvas.SplitHorizontally(self.canvascorr, self.canvaserr,
+                                        cupsizey)
+        self.sp.SplitVertically(self.panelsettings, self.spcanvas,
+                                self.sizepanelx)
+        ## Check out the DEMO option and make change the plot:
+        try:
+            if DEMO == True:
+                self.canvascorr.SetFontSizeAxis(16)
+                self.canvaserr.SetFontSizeAxis(16)
+        except:
+            # Don't raise any unnecessary erros
+            pass
+        # Bind resizing to resizing function.
+        wx.EVT_SIZE(self, self.OnSize)
+
+
+    def apply_parameters(self, event=None):
+        """ Read the values from the form and write it to the
+            pages parameters.
+            This function is called when the "Apply" button is hit.
+        """
+        parameters = list()
+        # Read parameters from form and update self.active_parms[1]
+        for i in np.arange(len(self.active_parms[1])):
+            parameters.append(1*self.spincontrol[i].GetValue())
+            self.active_parms[2][i] = self.checkboxes[i].GetValue()
+        # As of version 0.7.5: we want the units to be displayed
+        # human readable - the way they are displayed 
+        # in the Page info tool.
+        # Here: Convert human readable units to program internal
+        # units
+        e, self.active_parms[1] = mdls.GetInternalFromHumanReadableParm(
+                                  self.modelid, np.array(parameters))
+        self.active_parms[1] = self.check_parms(1*self.active_parms[1])
+        # Fitting parameters
+        self.weighted_nuvar = self.Fitbox[5].GetValue()
+        self.weighted_fittype_id = self.Fitbox[1].GetSelection()
+        if self.Fitbox[1].GetSelection() == -1:
+            # User edited knot number
+            Knots = self.Fitbox[1].GetValue()
+            Knots = filter(lambda x: x.isdigit(), Knots)
+            if Knots == "":
+                Knots = "5"
+            self.weighted_fittype_id = 1
+            self.FitKnots = str(Knots)
+        elif self.Fitbox[1].GetSelection() == 1:
+            Knots = self.Fitbox[1].GetValue()
+            Knots = filter(lambda x: x.isdigit(), Knots)
+            self.FitKnots = int(Knots)
+        # If parameters have been changed because of the check_parms
+        # function, write them back.
+        self.apply_parameters_reverse()
+
+
+    def apply_parameters_reverse(self, event=None):
+        """ Read the values from the pages parameters and write
+            it to the form.
+        """
+        # check parameters
+        self.active_parms[1] = self.check_parms(self.active_parms[1])
+        #
+        # As of version 0.7.5: we want the units to be displayed
+        # human readable - the way they are displayed 
+        # in the Page info tool.
+        # 
+        # Here: Convert program internal units to
+        # human readable units
+        labels, parameters = \
+                     mdls.GetHumanReadableParms(self.modelid,
+                                        self.active_parms[1])
+        # Write parameters to the form on the Page
+        for i in np.arange(len(self.active_parms[1])):
+            self.spincontrol[i].SetValue(parameters[i])
+            self.checkboxes[i].SetValue(self.active_parms[2][i])
+        # Fitting parameters
+        self.Fitbox[5].SetValue(self.weighted_nuvar)
+        idf = self.weighted_fittype_id
+        List = self.Fitbox[1].GetItems()
+        List[1] = "Spline ("+str(self.FitKnots)+" knots)"
+        self.Fitbox[1].SetItems(List)
+        self.Fitbox[1].SetSelection(idf)
+
+
+    def calculate_corr(self):
+        """ Calculate correlation function
+            Returns an array of tuples (tau, correlation)
+            *self.active_f*: A function that is being calculated using
+            *self.active_parms*: A list of parameters
+    
+            Uses variables:
+            *self.datacorr*: Plotting data (tuples) of the correlation curve
+            *self.dataexp*: Plotting data (tuples) of the experimental curve
+            *self.tau*: "tau"-values for plotting (included) in dataexp.
+    
+            Returns:
+            Nothing. Recalculation of the mentioned global variables is done.
+        """
+        parameters = self.active_parms[1]
+        # calculate correlation values
+        y = self.active_fct(parameters, self.tau)
+        # Create new plotting data
+        self.datacorr = np.zeros((len(self.tau), 2))
+        self.datacorr[:, 0] = self.tau
+        self.datacorr[:, 1] = y
+
+
+    def check_parms(self, parms):
+        """ Check parameters using self.check_parms_model and the user defined
+            borders for each parameter.
+        """
+        p = 1.*np.array(parms)
+        p = self.check_parms_model(p)
+        r = self.parameter_range
+        for i in range(len(p)):
+            if r[i][0] == r[i][1]:
+                pass
+            elif p[i] < r[i][0]:
+                p[i] = r[i][0]
+            elif p[i] > r[i][1]:
+                p[i] = r[i][1]
+        return p
+            
+        
+    def crop_data(self):
+        """ Crop the pages data for plotting
+            This will create slices from
+            *self.taufull* and *self.dataexpfull* using the values from
+            *self.startcrop* and *self.endcrop*, creating
+            *self.tau* and *self.dataexp*.
+        """
+        if self.dataexpfull is not None:
+            if self.startcrop == self.endcrop:
+                # self.bgcorrect is background correction
+                self.dataexp = 1*self.dataexpfull
+                self.taufull = self.dataexpfull[:,0]
+                self.tau = 1*self.taufull
+                self.startcrop = 0
+                self.endcrop = len(self.taufull)
+            else:
+                self.dataexp = 1*self.dataexpfull[self.startcrop:self.endcrop]
+                self.taufull = self.dataexpfull[:,0]
+                self.tau = 1*self.dataexp[:,0]
+                # If startcrop is larger than the lenght of dataexp,
+                # We will not have an array. Prevent that.
+                if len(self.tau) == 0:
+                    self.tau = 1*self.taufull
+                    self.dataexp = 1*self.dataexpfull
+            try:
+                self.taufull[self.startcrop]
+                self.taufull[self.endcrop-1]
+            except:
+                self.startcrop = 0
+                self.endcrop = len(self.taufull)
+                self.tau = 1*self.taufull
+                self.dataexp = 1*self.dataexpfull
+        else:
+            # We have to check if the startcrop and endcrop parameters are
+            # inside the taufull array.
+            try:
+                # Raises IndexError if index out of bounds
+                self.taufull[self.startcrop]
+                # Raises TypeError if self.endcrop is not an int.
+                self.taufull[self.endcrop-1]
+            except (IndexError, TypeError):
+                self.tau = 1*self.taufull
+                self.endcrop = len(self.taufull)
+                self.startcrop = 0
+            else:
+                self.tau = 1*self.taufull[self.startcrop:self.endcrop]
+        ## ## Channel selection
+        ## # Crops the array *self.dataexpfull* from *start* (int) to *end* (int)
+        ## # and assigns the result to *self.dataexp*. If *start* and *end* are 
+        ## # equal (or not given), *self.dataexp* will be equal to 
+        ## # *self.dataexpfull*.
+        ## self.parent.OnFNBPageChanged(e=None, Page=self)
+
+
+
+    def CorrectDataexp(self, dataexp):
+        """ Background correction
+            Background correction with *self.bgcorrect*.
+            Overwrites *self.dataexp*.
+            For details see:
+            Incollection (Thomps:bookFCS2002)
+            Thompson, N. Lakowicz, J.; Geddes, C. D. & Lakowicz, J. R. (ed.)
+            Fluorescence Correlation Spectroscopy
+            Topics in Fluorescence Spectroscopy, Springer US, 2002, 1, 337-378
+        """
+        # Make a copy. Do not overwrite the original.
+        if dataexp is not None:
+            modified = 1 * dataexp
+            if self.bgselected is not None:
+                # self.bgselected - background, needs to be imported via Tools
+                if self.traceavg is not None:
+                    S = self.traceavg
+                    B = self.parent.Background[self.bgselected][0]
+                    # Calculate correction factor
+                    self.bgcorrect = (S/(S-B))**2
+                    # self.dataexp should be set, since we have self.trace
+                    modified[:,1] *= self.bgcorrect
+            return modified
+        else:
+            return None
+
+    def Fit_enable_fitting(self):
+        """ Enable the fitting button and the weighted fit control"""
+        #self.Fitbox=[ fitbox, weightedfitdrop, fittext, fittext2, fittextvar,
+        #                fitspin, buttonfit ]
+        self.Fitbox[0].Enable()
+        self.Fitbox[1].Enable()
+        self.Fitbox[-1].Enable()
+
+
+    def Fit_create_instance(self, noplots=False):
+        """ *noplots* prohibits plotting (e.g. splines) """
+        ### If you change anything here, make sure you
+        ### take a look at the global fit tool!
+        ## Start fitting class and fill with information.
+        self.apply_parameters()
+        Fitting = fit.Fit()
+        # Verbose mode?
+        if noplots is False:
+            Fitting.verbose = self.parent.MenuVerbose.IsChecked()
+        Fitting.uselatex = self.parent.MenuUseLatex.IsChecked()
+        Fitting.check_parms = self.check_parms
+        Fitting.dataexpfull = self.CorrectDataexp(self.dataexpfull)
+      ## This is now done in apply_parameters
+      #  if self.Fitbox[1].GetSelection() == -1:
+      #      # User edited knot number
+      #      Knots = self.Fitbox[1].GetValue()
+      #      Knots = filter(lambda x: x.isdigit(), Knots)
+      #      if Knots == "":
+      #          Knots = "5"
+      #      List = self.Fitbox[1].GetItems()
+      #      List[1] = "Spline ("+Knots+" knots)"
+      #      Fitting.fittype = "spline"+Knots
+      #      self.Fitbox[1].SetItems(List)
+      #      self.Fitbox[1].SetSelection(1)
+      #      self.FitKnots = Knots
+        if self.Fitbox[1].GetSelection() == 1:
+      #      Knots = self.Fitbox[1].GetValue()
+      #      Knots = filter(lambda x: x.isdigit(), Knots)
+      #      self.FitKnots = Knots
+            Fitting.fittype = "spline"+str(self.FitKnots)
+            self.parent.StatusBar.SetStatusText("You can change the number"+
+               " of knots. Check 'Preference>Verbose Mode' to view the spline.")
+        elif self.Fitbox[1].GetSelection() == 2:
+            Fitting.fittype = "model function"
+            if self is self.parent.notebook.GetCurrentPage():
+                self.parent.StatusBar.SetStatusText("This is iterative. Press"+
+                 " 'Fit' multiple times. If it does not converge, use splines.")
+        elif self.Fitbox[1].GetSelection() > 2:
+            # This means we have some user defined std, for example from
+            # averaging. This std is stored in self.external_std_weights
+            # list, which looks like this:
+            # self.external_std_weights["from average"] = 1D np.array std
+            Fitting.fittype = "other"
+            Fitlist = self.Fitbox[1].GetItems()
+            FitValue = Fitlist[self.Fitbox[1].GetSelection()]
+            Fitting.external_deviations = self.external_std_weights[FitValue]
+            # Fitting will crop the variances according to
+            # the Fitting.interval that we set below.
+            if self is self.parent.notebook.GetCurrentPage():
+                self.parent.StatusBar.SetStatusText("")
+        else:
+            self.parent.StatusBar.SetStatusText("")
+        Fitting.function = self.active_fct
+        Fitting.interval = [self.startcrop, self.endcrop]
+        Fitting.values = 1*self.active_parms[1]
+        Fitting.valuestofit = 1*self.active_parms[2]
+        Fitting.weights = self.Fitbox[5].GetValue()
+        Fitting.ApplyParameters()
+        # Set weighted_fit_was_performed variables
+        if self.Fitbox[1].GetSelection() == 0:
+            self.weighted_fit_was_performed = False
+            self.weights_used_for_fitting = None
+            self.tauweight = None
+        else:
+            self.weighted_fit_was_performed = True
+            self.weights_used_for_fitting = Fitting.dataweights
+        self.weighted_fittype_id = idf = self.Fitbox[1].GetSelection()
+        self.weighted_fittype = self.Fitbox[1].GetItems()[idf]
+        return Fitting
+
+        
+    def Fit_function(self, event=None, noplots=False):
+        """ Call the fit function. """
+        # Make a busy cursor
+        wx.BeginBusyCursor()
+        # Apply parameters
+        # This also applies the background correction, if present
+        self.apply_parameters()
+        # Create instance of fitting class
+        Fitting = self.Fit_create_instance(noplots)
+        # Reset page counter
+        self.GlobalParameterShare = list()
+        try:
+            Fitting.least_square()
+        except ValueError:
+            # I sometimes had this on Windows. It is caused by fitting to
+            # a .SIN file without selection proper channels first.
+            print "There was an Error fitting. Please make sure that you\n"+\
+                  "are fitting in a proper channel domain."
+            wx.EndBusyCursor()
+            return
+        parms = Fitting.valuesoptim
+        # create an error dictionary
+        p_error = Fitting.parmoptim_error
+        if p_error is None:
+            self.parmoptim_error = None
+        else:
+            self.parmoptim_error = dict()
+            errcount = 0
+            for i in np.arange(len(parms)):
+                if self.active_parms[2][i]:
+                    self.parmoptim_error[self.active_parms[0][i]] =p_error[errcount]
+                    errcount += 1
+        self.chi2 = Fitting.chi
+        for i in np.arange(len(parms)):
+            self.active_parms[1][i] = parms[i]
+        # We need this for plotting
+        self.calculate_corr()
+        self.data4weight = 1.*self.datacorr
+        # Update spin-control values
+        self.apply_parameters_reverse()
+        # Plot everthing
+        self.PlotAll()
+        # Return cursor to normal
+        wx.EndBusyCursor()
+
+
+    def Fit_WeightedFitCheck(self, event=None):
+        """ Enable Or disable variance calculation, dependent on 
+            "Weighted Fit" checkbox
+        """
+        #self.Fitbox=[ fitbox, weightedfitdrop, fittext, fittext2, fittextvar,
+        #                fitspin, buttonfit ]
+        weighted = (self.Fitbox[1].GetSelection() != 0)
+        # In the case of "Average" we do not enable the
+        # "Calculation of variance" part.
+        if weighted is True and self.Fitbox[1].GetValue() != "Average":
+            self.Fitbox[2].Enable()
+            self.Fitbox[3].Enable()
+            self.Fitbox[4].Enable()
+            self.Fitbox[5].Enable()
+        else:
+            self.Fitbox[2].Disable()
+            self.Fitbox[3].Disable()
+            self.Fitbox[4].Disable()
+            self.Fitbox[5].Disable()
+
+
+    def MakeStaticBoxSizer(self, boxlabel):
+        """ Create a Box with check boxes (fit yes/no) and possibilities to 
+            change initial values for fitting.
+
+            Parameters:
+            *boxlabel*: The name of the box (is being displayed)
+            *self.active_parms[0]*: A list of things to put into the box
+
+            Returns:
+            *sizer*: The static Box
+            *check*: The (un)set checkboxes
+            *spin*: The spin text fields
+        """
+        box = wx.StaticBox(self.panelsettings, label=boxlabel)
+        sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
+        check = list()
+        spin = list()
+        #
+        # As of version 0.7.5: we want the units to be displayed
+        # human readable - the way they are displayed 
+        # in the Page info tool.
+        # 
+        labels, parameters = mdls.GetHumanReadableParms(self.modelid,
+                                                self.active_parms[1])
+        for label in labels:
+            sizerh = wx.BoxSizer(wx.HORIZONTAL)
+            checkbox = wx.CheckBox(self.panelsettings, label=label)
+            # We needed to "from wx.lib.agw import floatspin" to get this:
+            spinctrl = edclasses.FloatSpin(self.panelsettings, digits=10,
+                                           increment=.01)
+            sizerh.Add(spinctrl)
+            sizerh.Add(checkbox)
+            sizer.Add(sizerh)
+            # Put everything into lists to be able to refer to it later
+            check.append(checkbox)
+            spin.append(spinctrl)
+        return sizer, check, spin
+
+
+    def OnAmplitudeCheck(self, event=None):
+        """ Enable/Disable BG rate text line.
+            New feature introduced in 0.7.8
+        """
+        #self.AmplitudeInfo = [ bgnorm, bgtex, normtoNDropdown, textnor]
+        ## Normalization to a certain parameter in plots
+        # Find all parameters that start with an "N"
+        # ? and "C" ?
+        # Create List
+        normlist = list()
+        normlist.append("None")
+        ## Add parameters
+        parameterlist = list()
+        for i in np.arange(len(self.active_parms[0])):
+            label = self.active_parms[0][i]
+            if label[0] == "n" or label[0] == "N":
+                normlist.append("*"+label)
+                parameterlist.append(i)
+        ## Add supplementary parameters
+        # Get them from models
+        supplement = mdls.GetMoreInfo(self.modelid, self)
+        if supplement is not None:
+            for i in np.arange(len(supplement)):
+                label = supplement[i][0]
+                if label[0] == "n" or label[0] == "N":
+                    normlist.append("*"+label)
+                    # Add the id of the supplement starting at the
+                    # number of fitting parameters of current page.
+                    parameterlist.append(i+len(self.active_parms[0]))
+        normsel = self.AmplitudeInfo[2].GetSelection()
+        if event == "init":
+            # Read everything from the page not from the panel
+            # self.normparm was set and we need to set
+            #  self.normfactor
+            #  self.AmplitudeInfo[2]
+            if self.normparm is not None:
+                if self.normparm < len(self.active_parms[1]):
+                    # use fitting parameter from page
+                    self.normfactor =  self.active_parms[1][self.normparm]
+                else:
+                    # use supplementary parameter
+                    supnum = self.normparm - len(self.active_parms[1])
+                    self.normfactor =  supplement[supnum][1]
+                # Set initial selection
+                for j in np.arange(len(parameterlist)):
+                    if parameterlist[j] == self.normparm:
+                        normsel = j+1
+            else:
+                self.normfactor = 1.
+                normsel = 0
+        else:
+            if normsel > 0:
+                # Make sure we are not normalizing with a background
+                # Use the parameter id from the internal parameterlist
+                parameterid = parameterlist[normsel-1]
+                if parameterid < len(self.active_parms[1]):
+                    # fitting parameter
+                    self.normfactor = self.active_parms[1][parameterid]
+                else:
+                    # supplementary parameter
+                    supnum = parameterid - len(self.active_parms[1])
+                    self.normfactor =  supplement[supnum][1]
+                
+                #### supplement are somehow sorted !!!!
+                
+                #import IPython
+                #IPython.embed()
+                # For parameter export:
+                self.normparm = parameterid
+                # No internal parameters will be changed
+                # Only the plotting
+            else:
+                self.normfactor = 1.
+                normsel = 0
+                # For parameter export
+                self.normparm = None
+        if len(parameterlist) > 0:
+            self.AmplitudeInfo[2].Enable()
+            self.AmplitudeInfo[3].Enable()
+        else:
+            self.AmplitudeInfo[2].Disable()
+            self.AmplitudeInfo[3].Disable()
+        # Set dropdown values
+        self.AmplitudeInfo[2].SetItems(normlist)
+        self.AmplitudeInfo[2].SetSelection(normsel)
+        ## Background correction
+        bgsel = self.AmplitudeInfo[0].GetSelection()
+        # Standard is the background of the page
+        # Read bg selection
+        if event == "init":
+            # Read everything from the page not from the panel
+            if self.bgselected is not None:
+                bgsel = self.bgselected + 1
+            else:
+                bgsel = 0
+        else:
+            if bgsel <= 0:
+                self.bgselected = None
+                bgsel = 0 #None
+            else:
+                self.bgselected = bgsel - 1
+        # Rebuild itemlist
+        # self.parent.Background[self.bgselected][i]
+        # [0] average signal [kHz]
+        # [1] signal name (edited by user)
+        # [2] signal trace (tuple) ([ms], [kHz])
+        bglist = list()
+        bglist.append("None")
+        for item in self.parent.Background:
+            if len(item[1]) > 10:
+                item[1] = item[1][:7]+"..."
+            bgname = item[1]+" (%.2f kHz)" %item[0]
+            bglist.append(bgname)
+        self.AmplitudeInfo[0].SetItems(bglist)
+        self.AmplitudeInfo[0].SetSelection(bgsel)
+        #self.AmplitudeInfo = [ bgnorm, bgtex, normtoNDropdown, textnor]
+        if len(bglist) <= 1:
+            self.AmplitudeInfo[0].Disable()
+            self.AmplitudeInfo[1].Disable()
+        else:
+            self.AmplitudeInfo[0].Enable()
+            self.AmplitudeInfo[1].Enable()
+
+
+    def OnTitleChanged(self, e):
+        pid = self.parent.notebook.GetPageIndex(self)
+        if self.tabtitle.GetValue() == "":
+            text = self.counter + mdls.modeldict[self.modelid][1]
+        else:
+            # How many characters of the the page title should be displayed
+            # in the tab? We choose 9: AC1-012 plus 2 whitespaces
+            text = self.counter + self.tabtitle.GetValue()[:9]
+        self.parent.notebook.SetPageText(pid,text)        
+        #import IPython
+        #IPython.embed()
+
+        
+    def OnSetRange(self, e):
+        """ Open a new Frame where the parameter range can be set.
+            Rewrites self.parameter_range
+            Parameter ranges are treated like parameters: They are saved in
+            sessions and applied in batch mode.
+        """
+        # We write a separate tool for that.
+        # This tool does not show up in the Tools menu.
+        if self.parent.RangeSelector is None:
+            self.parent.RangeSelector = tools.RangeSelector(self)
+            self.parent.RangeSelector.Bind(wx.EVT_CLOSE,
+                                           self.parent.RangeSelector.OnClose)
+        else:
+            try:
+                self.parent.RangeSelector.OnClose()
+            except:
+                pass
+            self.parent.RangeSelector = None
+        
+
+    def OnSize(self, event):
+        """ Resize the fitting Panel, when Window is resized. """
+        size = self.parent.notebook.GetSize()
+        tabsize = 33
+        size[1] = size[1] - tabsize
+        self.sp.SetSize(size)
+
+
+    def PlotAll(self, event=None):
+        """
+        This function plots the whole correlation and residuals canvas.
+        We do:
+        - Channel selection
+        - Background correction
+        - Apply Parameters (separate function)
+        - Drawing of plots
+        """
+        if event == "init":
+            # We use this to have the page plotted at least once before
+            # readout of parameters (e.g. startcrop, endcrop)
+            # This is a performence tweak.
+            self.crop_data()
+            if self.InitialPlot is True:
+                return
+            else:
+                self.InitialPlot = True
+        ## Enable/Disable, set values frontend normalization
+        self.OnAmplitudeCheck()
+        self.crop_data()
+        ## Calculate trace average
+        if self.trace is not None:
+            # Average of the current pages trace
+            self.traceavg = self.trace[:,1].mean()
+        # Perform Background correction
+        self.dataexp = self.CorrectDataexp(self.dataexp)
+        ## Apply parameters
+        self.apply_parameters()
+        # Calculate correlation function from parameters
+        self.calculate_corr()
+        ## Drawing of correlation plot
+        # Plots self.dataexp and the calcualted correlation function 
+        # self.datacorr into the upper canvas.
+        # Create a line @ y=zero:
+        zerostart = self.tau[0]
+        zeroend = self.tau[-1]
+        datazero = [[zerostart, 0], [zeroend,0]]
+        ## Check out the DEMO option and make change the plot:
+        try:
+            if DEMO == True:
+                width = 4
+                colexp = "black"
+                colfit = "red"
+            else:
+                width = 1
+                colexp = "grey"
+                colfit = "blue"
+        except:
+            # Don't raise any unnecessary erros
+            width = 1   
+            colexp = "grey"  
+            colfit = "blue"
+        colweight = "cyan"
+        lines = list()
+        linezero = plot.PolyLine(datazero, colour='orange', width=width)
+        lines.append(linezero)
+                
+        if self.dataexp is not None:
+            if self.weighted_fit_was_performed == True and \
+               self.weights_used_for_fitting is not None and \
+               self.parent.MenuShowWeights.IsChecked() and \
+               self.data4weight is not None:
+                # Add the weights to the graph.
+                # This is done by drawing two lines.
+                w = 1*self.data4weight
+                w1 = 1*w
+                w2 = 1*w
+                w1[:, 1] = w[:, 1] + self.weights_used_for_fitting 
+                w2[:, 1] = w[:, 1] - self.weights_used_for_fitting 
+                wend = 1*self.weights_used_for_fitting 
+                # crop w1 and w2 if self.dataexp does not include all
+                # data points.
+                if np.all(w[:,0] == self.dataexp[:,0]):
+                    pass
+                else:
+                    start = np.min(self.dataexp[:,0])
+                    end = np.max(self.dataexp[:,0])
+                    idstart = np.argwhere(w[:,0]==start)
+                    idend = np.argwhere(w[:,0]==end)
+                    if len(idend) == 0:
+                        # dataexp is longer, do not change anything
+                        pass
+                    else:
+                        w1 = w1[:idend[0][0]+1]
+                        w2 = w2[:idend[0][0]+1]
+                        wend = wend[:idend[0][0]+1]
+                    if len(idstart) == 0:
+                        # dataexp starts earlier, do not change anything
+                        pass
+                    else:
+                        w1 = w1[idstart[0][0]:]
+                        w2 = w2[idstart[0][0]:]
+                        wend = wend[idstart[0][0]:]
+                ## Normalization with self.normfactor
+                w1[:,1] *= self.normfactor
+                w2[:,1] *= self.normfactor
+                self.weights_used_for_plotting = wend
+                self.weights_plot_fill_area = [w1,w2]
+                lineweight1 = plot.PolyLine(w1, legend='',
+                                          colour=colweight, width=width)
+                lines.append(lineweight1)
+                lineweight2 = plot.PolyLine(w2, legend='',
+                                          colour=colweight, width=width)
+                lines.append(lineweight2)
+                
+            ## Plot Correlation curves
+            # Plot both, experimental and calculated data
+            # Normalization with self.normfactor, new feature in 0.7.8
+            datacorr_norm = 1*self.datacorr
+            datacorr_norm[:,1] *= self.normfactor
+            dataexp_norm = 1*self.dataexp
+            dataexp_norm[:,1] *= self.normfactor
+            linecorr = plot.PolyLine(datacorr_norm, legend='', colour=colfit,
+                                     width=width)
+            lineexp = plot.PolyLine(dataexp_norm, legend='', colour=colexp,
+                                    width=width)
+            # Draw linezero first, so it is in the background
+            lines.append(lineexp)
+            lines.append(linecorr)
+            PlotCorr = plot.PlotGraphics(lines, 
+                                xLabel=u'lag time τ [ms]', yLabel=u'G(τ)')
+            self.canvascorr.Draw(PlotCorr)
+            ## Calculate residuals
+            self.resid = np.zeros((len(self.tau), 2))
+            self.resid[:, 0] = self.tau
+            self.resid[:, 1] = self.dataexp[:, 1] - self.datacorr[:, 1]
+            # Plot residuals
+            # Normalization with self.normfactor, new feature in 0.7.8
+            resid_norm = np.zeros((len(self.tau), 2))
+            resid_norm[:, 0] = self.tau
+            resid_norm[:, 1] = dataexp_norm[:, 1] - datacorr_norm[:, 1]
+            lineres = plot.PolyLine(resid_norm, legend='', colour=colfit,
+                                    width=width)
+            # residuals or weighted residuals?
+            if self.weighted_fit_was_performed:
+                yLabelRes = "weighted \nresiduals"
+            else:
+                yLabelRes = "residuals"
+            PlotRes = plot.PlotGraphics([linezero, lineres], 
+                                   xLabel=u'lag time τ [ms]',
+                                   yLabel=yLabelRes)
+            self.canvaserr.Draw(PlotRes)
+        else:
+            # Amplitude normalization, new feature in 0.7.8
+            datacorr_norm = 1*self.datacorr
+            datacorr_norm[:,1] *= self.normfactor
+            linecorr = plot.PolyLine(datacorr_norm, legend='', colour='blue',
+                                     width=1)
+            PlotCorr = plot.PlotGraphics([linezero, linecorr],
+                       xLabel=u'Lag time τ [ms]', yLabel=u'G(τ)')
+            self.canvascorr.Draw(PlotCorr)
+        self.parent.OnFNBPageChanged()
+
+
+    def settings(self):
+        """ Here we define, what should be displayed at the left side
+            of the window.
+            Parameters:
+        """
+        horizontalsize = self.sizepanelx-10
+        # Title
+        # Create empty tab title
+        mddat = mdls.modeldict[self.modelid]
+        modelshort = mdls.GetModelType(self.modelid)
+        titlelabel = "Data set ({} {})".format(modelshort, mddat[1])
+        boxti = wx.StaticBox(self.panelsettings, label=titlelabel)
+        sizerti = wx.StaticBoxSizer(boxti, wx.VERTICAL)
+        sizerti.SetMinSize((horizontalsize, -1))
+        self.tabtitle = wx.TextCtrl(self.panelsettings, value="", 
+                                    size=(horizontalsize-20, -1))
+        self.Bind(wx.EVT_TEXT, self.OnTitleChanged, self.tabtitle)
+        sizerti.Add(self.tabtitle)                       
+        # Create StaticBoxSizer
+        box1, check, spin = self.MakeStaticBoxSizer("Model parameters")
+        # Make the check boxes and spin-controls available everywhere
+        self.checkboxes = check
+        self.spincontrol = spin
+        #
+        # As of version 0.7.5: we want the units to be displayed
+        # human readable - the way they are displayed 
+        # in the Page info tool.
+        # 
+        labels, parameters = mdls.GetHumanReadableParms(self.modelid,
+                                                self.active_parms[1])
+        parameterstofit = self.active_parms[2]
+        # Set initial values given by user/programmer for Diffusion Model
+        for i in np.arange(len(labels)):
+            self.checkboxes[i].SetValue(parameterstofit[i]) 
+            self.spincontrol[i].SetValue(parameters[i])
+            self.spincontrol[i].increment()
+        # Put everything together
+        self.panelsettings.sizer = wx.BoxSizer(wx.VERTICAL)
+        self.panelsettings.sizer.Add(sizerti)
+        self.panelsettings.sizer.Add(box1)
+        # Add button "Apply" and "Set range"
+        horzs = wx.BoxSizer(wx.HORIZONTAL)
+        buttonapply = wx.Button(self.panelsettings, label="Apply")
+        self.Bind(wx.EVT_BUTTON, self.PlotAll, buttonapply)
+        horzs.Add(buttonapply)
+        buttonrange = wx.Button(self.panelsettings, label="Set range")
+        self.Bind(wx.EVT_BUTTON, self.OnSetRange, buttonrange)
+        horzs.Add(buttonrange)
+        box1.Add(horzs)
+        # Set horizontal size
+        box1.SetMinSize((horizontalsize, -1))
+        ## More info
+        normbox = wx.StaticBox(self.panelsettings, label="Amplitude corrections")
+        miscsizer = wx.StaticBoxSizer(normbox, wx.VERTICAL)
+        miscsizer.SetMinSize((horizontalsize, -1))
+        # Type of normalization
+        bgtex = wx.StaticText(self.panelsettings, label="Background correction")
+        miscsizer.Add(bgtex)
+        bgnorm = wx.ComboBox(self.panelsettings)
+        self.Bind(wx.EVT_COMBOBOX, self.PlotAll, bgnorm)
+        miscsizer.Add(bgnorm)
+        ## Normalize to n?
+        textnor = wx.StaticText(self.panelsettings, label="Plot normalization")
+        miscsizer.Add(textnor)
+        normtoNDropdown = wx.ComboBox(self.panelsettings)
+        self.Bind(wx.EVT_COMBOBOX, self.PlotAll, normtoNDropdown)
+        miscsizer.Add(normtoNDropdown)
+        self.AmplitudeInfo = [ bgnorm, bgtex, normtoNDropdown, textnor]
+        self.panelsettings.sizer.Add(miscsizer)
+        ## Add fitting Box
+        fitbox = wx.StaticBox(self.panelsettings, label="Fitting options")
+        fitsizer = wx.StaticBoxSizer(fitbox, wx.VERTICAL)
+        fitsizer.SetMinSize((horizontalsize, -1))
+        # Add a checkbox for weighted fitting
+        weightedfitdrop = wx.ComboBox(self.panelsettings)
+        self.weightlist = ["No weights", "Spline (5 knots)", "Model function"]
+        weightedfitdrop.SetItems(self.weightlist)
+        weightedfitdrop.SetSelection(0)
+        fitsizer.Add(weightedfitdrop)
+        # WeightedFitCheck() Enables or Disables the variance part
+        weightedfitdrop.Bind(wx.EVT_COMBOBOX, self.Fit_WeightedFitCheck)
+        # Add the variance part.
+        # In order to do a weighted fit, we need to calculate the variance
+        # at each point of the experimental data array.
+        # In order to do that, we need to know how many data points from left
+        # and right of the interesting data point we want to include in that
+        # calculation.
+        fittext = wx.StaticText(self.panelsettings, 
+                                label="Calculation of the variance")
+        fitsizer.Add(fittext)
+        fittext2 = wx.StaticText(self.panelsettings, 
+                                 label="from 2j+1 data points")
+        fitsizer.Add(fittext2)
+        fitsizerspin = wx.BoxSizer(wx.HORIZONTAL)
+        fittextvar = wx.StaticText(self.panelsettings, label="j = ")
+        fitspin = wx.SpinCtrl(self.panelsettings, -1, initial=3, min=1, max=100)
+        fitsizerspin.Add(fittextvar)
+        fitsizerspin.Add(fitspin)
+        fitsizer.Add(fitsizerspin)
+        # Add button "Fit"
+        buttonfit = wx.Button(self.panelsettings, label="Fit")
+        self.Bind(wx.EVT_BUTTON, self.Fit_function, buttonfit)
+        fitsizer.Add(buttonfit)
+        
+        self.panelsettings.sizer.Add(fitsizer)
+        # Squeeze everything into the sizer
+        self.panelsettings.SetSizer(self.panelsettings.sizer)
+        # This is also necessary in Windows
+        self.panelsettings.Layout()
+        self.panelsettings.Show()
+        # Make all the stuff available for everyone
+        self.Fitbox = [ fitbox, weightedfitdrop, fittext, fittext2, fittextvar,
+                        fitspin, buttonfit ]
+        # Disable Fitting since no data has been loaded yet
+        for element in self.Fitbox:
+            element.Disable()
+        x = self.panelsettings.GetSize()[0]
+        y = self.parent.GetSize()[1] - 33
+        self.parent.SetSize((x,y))
+        self.parent.Layout()
+
diff --git a/src/plotting.py b/src/plotting.py
new file mode 100644
index 0000000..fdfd742
--- /dev/null
+++ b/src/plotting.py
@@ -0,0 +1,457 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module plotting
+    Everything about plotting with matplotlib is located here.
+    Be sure to install texlive-science and texlive-math-extra
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import codecs
+import numpy as np
+import matplotlib
+# We do catch warnings about performing this before matplotlib.backends stuff
+import warnings
+with warnings.catch_warnings():
+    warnings.simplefilter("ignore")
+    matplotlib.use('WXAgg') # Tells matplotlib to use WxWidgets for dialogs
+import matplotlib.gridspec as gridspec
+import matplotlib.pyplot as plt
+# Text rendering with matplotlib
+from matplotlib import rcParams
+from matplotlib.backends.backend_wx import NavigationToolbar2Wx #We hack this
+## In edclasses, we edited the wxWidgets version of the NavigationToolbar2Wx.
+## This hack enables us to remember directories.
+# import edclasses
+# NavigationToolbar2Wx = edclasses.NavigationToolbar2Wx
+import os
+import sys
+import unicodedata
+
+# For finding latex tools
+from misc import findprogram
+import models as mdls
+
+
+def greek2tex(char):
+    """ Converts greek UTF-8 letters to latex """
+    decchar = codecs.decode(char, "UTF-8")
+    repres = unicodedata.name(decchar).split(" ")
+    # GREEK SMALL LETTER ALPHA
+    if repres[0] == "GREEK" and len(repres) == 4:
+        letter = repres[3].lower()
+        if repres[1] != "SMALL":
+            letter = letter[0].capitalize() + letter[1:]
+        return "\\"+letter
+    else:
+        return char
+
+
+def escapechars(string):
+    """ For latex output, some characters have to be escaped with a "\\" """
+    string = codecs.decode(string, "UTF-8")
+    escapechars = ["#", "$", "%", "&", "~", "_", "\\", "{", "}"] 
+    retstr = ur""
+    for char in string:
+        if char in escapechars:
+            retstr += "\\"
+            retstr += char
+        elif char == "^":
+            # Make a hat in latex without $$?
+            retstr += "$\widehat{~}$"
+        else:
+            retstr += char
+    return retstr
+
+
+def latexmath(string):
+    """ Format given parameters to nice latex. """
+    string = codecs.decode(string, "UTF-8")
+    unicodechars = dict()
+    #unicodechars[codecs.decode("τ", "UTF-8")] = r"\tau"
+    #unicodechars[codecs.decode("µ", "UTF-8")] = r"\mu"
+    unicodechars[codecs.decode("²", "UTF-8")] = r"^2"
+    unicodechars[codecs.decode("³", "UTF-8")] = r"^3"
+    unicodechars[codecs.decode("₁", "UTF-8")] = r"_1"
+    unicodechars[codecs.decode("₂", "UTF-8")] = r"_2"
+    unicodechars[codecs.decode("₀", "UTF-8")] = r"_0"
+    #unicodechars[codecs.decode("α", "UTF-8")] = r"\alpha"
+    # We need lambda in here, because unicode names it lamda sometimes.
+    unicodechars[codecs.decode("λ", "UTF-8")] = r"\lambda"
+    #unicodechars[codecs.decode("η", "UTF-8")] = r'\eta'
+    items = string.split(" ", 1)
+    a = items[0]
+    if len(items) > 1:
+        b = items[1]
+    else:
+        b = ""
+    anew = r""
+    for char in a:
+        if char in unicodechars.keys():
+            anew += unicodechars[char]
+        elif char != greek2tex(char):
+            anew += greek2tex(char)
+        else:
+            anew += char
+    # lower case
+    lcitems = anew.split("_",1)
+    if len(lcitems) > 1:
+        anew = lcitems[0]+"_{\\text{"+lcitems[1]+"}}"
+    return anew + r" \hspace{0.3em} \mathrm{"+b+r"}"
+
+
+def savePlotCorrelation(parent, dirname, Page, uselatex=False,
+                        verbose=False, show_weights=True):
+    """ Save plot from Page into file        
+        Parameters:
+        *parent*    the parent window
+        *dirname*   directory to set on saving
+        *Page*      Page containing all variables
+        *uselatex*  Whether to use latex for the ploting or not.
+        This function uses a hack in misc.py to change the function
+        for saving the final figure. We wanted save in the same directory
+        as PyCorrFit was working and the filename should be the tabtitle.
+    """
+    # Close all other plots before commencing
+    try:
+        plt.close()
+    except:
+        pass
+    # As of version 0.7.8 the user may export data normalized to a certain
+    # parameter.
+    if Page.dataexp is not None:
+        dataexp = 1*Page.dataexp
+        resid = 1*Page.resid
+        dataexp[:,1] *= Page.normfactor
+        resid[:,1] *= Page.normfactor
+    else:
+        dataexp = Page.dataexp
+        resid = Page.resid
+    fit = 1*Page.datacorr
+    fit[:,1] *= Page.normfactor
+    weights = Page.weights_plot_fill_area
+    tabtitle = Page.tabtitle.GetValue()
+    #fitlabel = ur"Fit model: "+str(mdls.modeldict[Page.modelid][0])
+    fitlabel = Page.modelname
+    labelweights = ur"Weights of fit"
+    labels, parms = mdls.GetHumanReadableParms(Page.modelid,
+                                               Page.active_parms[1])
+    # Error parameters with nice look
+    errparmsblank = Page.parmoptim_error
+    if errparmsblank is None:
+        errparms = None
+    else:
+        errparms = dict()
+        for key in errparmsblank.keys():
+            newkey, newparm = mdls.GetHumanReadableParameterDict(Page.modelid,
+                                                        key, errparmsblank[key])
+            errparms[newkey] = newparm
+    parmids = np.where(Page.active_parms[2])[0]
+    labels = np.array(labels)[parmids]
+    parms = np.array(parms)[parmids]
+    if dataexp is None:
+        if tabtitle.strip() == "":
+            fitlabel = Page.modelname
+        else:
+            fitlabel = tabtitle
+    else:
+        if tabtitle.strip() == "":
+            tabtitle = "page"+str(Page.counter).strip().strip(":")
+    if Page.normparm is not None:
+        fitlabel += ur", normalized to "+Page.active_parms[0][Page.normparm]
+
+    ## Check if we can use latex or plotting:
+    (r1, path) = findprogram("latex")
+    (r2, path) = findprogram("dvipng")
+    # Ghostscript
+    (r31, path) = findprogram("gs")
+    (r32, path) = findprogram("mgs") # from miktex
+    r3 = max(r31,r32)
+    if r1+r2+r3 < 3:
+        uselatex = False
+    if uselatex == True:
+        rcParams['text.usetex']=True
+        rcParams['text.latex.unicode']=True
+        rcParams['font.family']='serif'
+        rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"] 
+        fitlabel = ur"{\normalsize "+escapechars(fitlabel)+r"}"
+        tabtitle = ur"{\normalsize "+escapechars(tabtitle)+r"}"
+        labelweights = ur"{\normalsize "+escapechars(labelweights)+r"}"
+    else:
+        rcParams['text.usetex']=False
+    # create plot
+    # plt.plot(x, y, '.', label = 'original data', markersize=5)
+    fig=plt.figure()
+    if resid is not None:
+        gs = gridspec.GridSpec(2, 1, height_ratios=[5,1])
+        ax = plt.subplot(gs[0])
+    else:
+        ax = plt.subplot(111)
+        #    ax = plt.axes()
+    ax.semilogx()
+    if dataexp is not None:
+        plt.plot(dataexp[:,0], dataexp[:,1], '-', color="darkgrey",
+                 label=tabtitle)
+    else:
+        plt.xlabel(r'lag time $\tau$ [ms]')
+    # Plotting with error bars is very ugly if you have a lot of
+    # data points.
+    # We will use fill_between instead.
+    #plt.errorbar(fit[:,0], fit[:,1], yerr=weights, fmt='-',
+    #             label = fitlabel, lw=2.5, color="blue")
+    plt.plot(fit[:,0], fit[:,1], '-', label = fitlabel, lw=2.5,
+             color="blue")    
+    if weights is not None and show_weights is True:
+        plt.fill_between(weights[0][:,0],weights[0][:,1],weights[1][:,1],
+                         color='cyan')
+        # fake legend:
+        p = plt.Rectangle((0, 0), 0, 0, color='cyan',
+                          label=labelweights)
+        ax.add_patch(p)
+    plt.ylabel('correlation')
+    if dataexp is not None:
+        mind = np.min([ dataexp[:,1], fit[:,1]])
+        maxd = np.max([ dataexp[:,1], fit[:,1]])
+    else:
+        mind = np.min(fit[:,1])
+        maxd = np.max(fit[:,1])
+    ymin = mind - (maxd - mind)/20.
+    ymax = maxd + (maxd - mind)/20.
+    ax.set_ylim(bottom=ymin, top=ymax)
+    xmin = np.min(fit[:,0])
+    xmax = np.max(fit[:,0])
+    ax.set_xlim(xmin, xmax)
+    # Add some nice text:
+    if uselatex == True and len(parms) != 0:
+        text = r""
+        text += r'\['            #every line is a separate raw string...
+        text += r'\begin{split}' # ...but they are all concatenated
+                                 # by the interpreter :-)
+        for i in np.arange(len(parms)):
+            text += r' '+latexmath(labels[i])+r" &= " + str(parms[i]) +r' \\ '
+        if errparms is not None:
+            keys = errparms.keys()
+            keys.sort()
+            for key in keys:
+                text += r' \Delta '+latexmath(key)+r" &= " + str(errparms[key]) +r' \\ '
+        text += r' \end{split} '
+        text += r' \] '
+    else:
+        text = ur""
+        for i in np.arange(len(parms)):
+            text += labels[i]+" = "+str(parms[i])+"\n"
+        if errparms is not None:
+            keys = errparms.keys()
+            keys.sort()
+            for key in keys:
+                text += "Err "+key+" = " + str(errparms[key]) +"\n"
+    # Add some more stuff to the text and append data to a .txt file
+    #text = Auswert(parmname, parmoptim, text, savename)
+    plt.legend()
+    logmax = np.log10(xmax)
+    logmin = np.log10(xmin)
+    logtext = 0.6*(logmax-logmin)+logmin
+    xt = 10**(logtext)
+    yt = 0.3*ymax
+    plt.text(xt,yt,text, size=12)
+    if resid is not None:
+        ax2 = plt.subplot(gs[1])
+        #ax2 = plt.axes()
+        ax2.semilogx()
+        if Page.weighted_fit_was_performed:
+            if uselatex == True:
+                lb = r"\newline \indent "
+            else:
+                lb = "\n"
+            yLabelRes = "weighted "+ lb +"residuals"
+        else:
+            yLabelRes = "residuals"
+        plt.plot(resid[:,0], resid[:,1], '-', color="darkgrey", label=yLabelRes)
+        plt.xlabel(r'lag time $\tau$ [ms]')
+        plt.ylabel(yLabelRes, multialignment='center')
+        minx = np.min(resid[:,0])
+        maxx = np.max(resid[:,0])
+        miny = np.min(resid[:,1])
+        maxy = np.max(resid[:,1])
+        ax2.set_xlim(minx, maxx)
+        maxy = max(abs(maxy), abs(miny))
+        ax2.set_ylim(-maxy, maxy)
+        ticks = ax2.get_yticks()
+        ax2.set_yticks([ticks[0], ticks[-1], 0])
+    ## Hack
+    # We need this for hacking. See edclasses.
+    fig.canvas.HACK_parent = parent
+    fig.canvas.HACK_fig = fig
+    fig.canvas.HACK_Page = Page
+    fig.canvas.HACK_append = ""
+    if verbose == True:
+        plt.show()
+    else:
+        # If WXAgg is not used for some reason, then our hack does not work
+        # and we must use e.g. TkAgg
+        try:
+            fig.canvas.toolbar.save()
+        except AttributeError:
+            fig.canvas.toolbar.save_figure()
+        # Close all other plots before commencing
+        try:
+            plt.close()
+        except:
+            pass
+
+
+def savePlotTrace(parent, dirname, Page, uselatex=False, verbose=False):
+    """ Save trace plot from Page into file        
+        Parameters:
+        *parent*    the parent window
+        *dirname*   directory to set on saving
+        *Page*      Page containing all variables
+        *uselatex*  Whether to use latex for the ploting or not.
+        This function uses a hack in misc.py to change the function
+        for saving the final figure. We wanted save in the same directory
+        as PyCorrFit was working and the filename should be the tabtitle.
+    """
+    # Close all other plots before commencing
+    try:
+        plt.close()
+    except:
+        pass
+    # Trace must be displayed in s
+    timefactor = 1e-3
+    tabtitle = Page.tabtitle.GetValue()
+    if tabtitle.strip() == "":
+        tabtitle = "page"+str(Page.counter).strip().strip(":")
+    # Intensity trace in kHz may stay the same
+    if Page.trace is not None:
+        # Set trace
+        traces = [Page.trace]
+        labels = [tabtitle]
+    elif Page.tracecc is not None:
+        # We have some cross-correlation here. Two traces.
+        traces = Page.tracecc
+        labels = [tabtitle+" A", tabtitle+" B"]
+    else:
+        return
+    ## Check if we can use latex or plotting:
+    (r1, path) = findprogram("latex")
+    (r2, path) = findprogram("dvipng")
+    # Ghostscript
+    (r31, path) = findprogram("gs")
+    (r32, path) = findprogram("mgs") # from miktex
+    r3 = max(r31,r32)
+    if r1+r2+r3 < 3:
+        uselatex = False
+    if uselatex == True:
+        rcParams['text.usetex']=True
+        rcParams['text.latex.unicode']=True
+        rcParams['font.family']='serif'
+        rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"] 
+        for i in np.arange(len(labels)):
+            labels[i] = ur"{\normalsize "+escapechars(labels[i])+r"}"
+    else:
+        rcParams['text.usetex']=False
+    # create plot
+    # plt.plot(x, y, '.', label = 'original data', markersize=5)
+    fig=plt.figure()
+    ax = plt.subplot(111)
+    for i in np.arange(len(traces)):
+        # Columns
+        time = traces[i][:,0]*timefactor
+        intensity = traces[i][:,1]
+        plt.plot(time, intensity, '-', 
+                 label = labels[i],
+                 lw=1)
+    plt.ylabel('count rate [kHz]')
+    plt.xlabel('time [s]')
+    # Add some more stuff to the text and append data to a .txt file
+    plt.legend()
+    ## Hack
+    # We need this for hacking. See edclasses.
+    fig.canvas.HACK_parent = parent
+    fig.canvas.HACK_fig = fig
+    fig.canvas.HACK_Page = Page
+    fig.canvas.HACK_append = "_trace"
+    if verbose == True:
+        plt.show()
+    else:
+        # If WXAgg is not used for some reason, then our hack does not work
+        # and we must use e.g. TkAgg
+        try:
+            fig.canvas.toolbar.save()
+        except AttributeError:
+            fig.canvas.toolbar.save_figure()
+        # Close all other plots before commencing
+        try:
+            plt.close()
+        except:
+            pass
+
+
+def savePlotSingle(name, x, dataexp, datafit, dirname = ".", uselatex=False):
+    """ CURRENTLY THIS FUNCTION IS NOT USED BY PYCORRFIT
+        Show log plot of correlation function without residuals. 
+        Parameters:
+        *name*      name of curve in legend
+        *x*         tau-values to plot
+        *dataexp*   correlation data to plot
+        *datafit*   fitted curve to correlation data
+        *dirname*   initial directory for dialog (not used here)
+        *uselatex*  use latex for plotting
+        This function uses a hack in misc.py to change the function
+        for saving the final figure. We wanted save in the same directory
+        as PyCorrFit was working and the filename should be the tabtitle.
+    """
+    # This is a dirty hack to make sure no plots are opened
+    try:
+        plt.close()
+    except:
+        pass
+    ## Check if we can use latex or plotting:
+    (r1, path) = findprogram("latex")
+    (r2, path) = findprogram("dvipng")
+    # Ghostscript
+    (r31, path) = findprogram("gs")
+    (r32, path) = findprogram("mgs") # from miktex
+    r3 = max(r31,r32)
+    if r1+r2+r3 < 3:
+        uselatex = False
+    if uselatex == True:
+        rcParams['text.usetex']=True
+        rcParams['text.latex.unicode']=True
+        rcParams['font.family']='serif'
+        rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"] 
+        name = ur"{\normalsize "+escapechars(name)+r"}"
+    else:
+        rcParams['text.usetex']=False
+    # create plot
+    # plt.plot(x, y, '.', label = 'original data', markersize=5)
+    fig=plt.figure()
+    ax = plt.subplot(111)
+    #    ax = plt.axes()
+    ax.semilogx()
+    plt.plot(x, dataexp,'-', color="darkgrey")
+    plt.xlabel(r'lag time $\tau$ [ms]')
+    plt.plot(x, datafit, '-', label = name,
+             lw=2.5, color="blue")
+    plt.ylabel('correlation')
+    mind = np.min([ dataexp, datafit])
+    maxd = np.max([ dataexp, datafit])
+    ymin = mind - (maxd - mind)/20.
+    ymax = maxd + (maxd - mind)/20.
+    ax.set_ylim(bottom=ymin, top=ymax)
+    xmin = np.min(x)
+    xmax = np.max(x)
+    ax.set_xlim(xmin, xmax)
+    # Add some more stuff to the text and append data to a .txt file
+    #text = Auswert(parmname, parmoptim, text, savename)
+    plt.legend()
+    plt.show()
diff --git a/src/readfiles/__init__.py b/src/readfiles/__init__.py
new file mode 100644
index 0000000..6dea88b
--- /dev/null
+++ b/src/readfiles/__init__.py
@@ -0,0 +1,226 @@
+# -*- coding: utf-8 -*-
+
+# This file is necessary for this folder to become a module that can be 
+# imported by PyCorrFit.
+
+import csv
+import numpy as np
+import os
+import tempfile
+import yaml
+import zipfile
+
+# To add a filetype add it here and in the
+# dictionaries at the end of this file.
+from read_ASC_ALV_6000 import openASC
+from read_CSV_PyCorrFit import openCSV
+from read_SIN_correlator_com import openSIN
+from read_FCS_Confocor3 import openFCS
+from read_mat_ries import openMAT
+
+
+def AddAllWildcard(Dictionary):
+    wildcard = ""
+    keys = Dictionary.keys()
+    N = len(keys)
+    i = 0
+    for key in keys:
+        newwc = key.split("|")[1]
+        wildcard = wildcard + newwc
+        i = i + 1
+        if i != N:
+            wildcard = wildcard + ";"
+
+    Dictionary[Allsupfilesstring+"|"+wildcard] = openAny
+    return Dictionary
+
+
+# To increase user comfort, we will now create a file opener thingy that
+# knows how to open all files we know.
+def openAny(dirname, filename):
+    """ Using the defined Filetypes and BGFiletypes, open the given file """
+    wildcard = filename.split(".")[-1]
+    for key in Filetypes.keys():
+        # Recurse into the wildcards
+        wildcardstring = key.split("|")
+        # We do not want to recurse
+        if wildcardstring[0] != Allsupfilesstring:
+            otherwcs = wildcardstring[1].split(";")
+            for string in otherwcs:
+                if string[-3:] == wildcard:
+                    return Filetypes[key](dirname, filename)
+    # If we could not find the correct function in Filetypes, try again
+    # in BGFiletypes:
+    return openAnyBG(dirname, filename)
+    
+    ## For convenience in openZIP
+    #return None # already in openAnyBG
+
+
+def openAnyBG(dirname, filename):
+    wildcard = filename.split(".")[-1]
+    for key in BGFiletypes.keys():
+        wildcardstring = key.split("|")
+        # We do not want to recurse
+        if wildcardstring[0] != Allsupfilesstring:
+            otherwcs = wildcardstring[1].split(";")
+            for string in otherwcs:
+                if string[-3:] == wildcard:
+                    return BGFiletypes[key](dirname, filename)
+    # For convenience in openZIP
+    return None
+    
+
+def openZIP(dirname, filename):
+    """ 
+        Get everything inside a .zip file that could be an FCS curve.
+        Will use any wildcard in Filetypes dictionary.
+    """
+    #    It's a rather lengthy import of the session file. The code is copied
+    #    from openfile.OpenSession. The usual zip file packed curves are
+    #    imported on the few code lines after the else statement.
+
+    ## Open the archive:
+    Arc = zipfile.ZipFile(os.path.join(dirname, filename), mode='r')
+    Correlations = list() # Correlation information
+    Curvelist = list()    # Type information
+    Filelist = list()     # List of filenames corresponding to *Curvelist*
+    Trace = list()        # Corresponding traces
+    ## First test, if we are opening a session file
+    fcsfitwildcard = ".fcsfit-session.zip"
+    if len(filename)>19 and filename[-19:] == fcsfitwildcard:
+        # Get the yaml parms dump:
+        yamlfile = Arc.open("Parameters.yaml")
+        # Parms: Fitting and drawing parameters of the correlation curve
+        # The *yamlfile* is responsible for the order of the Pages #i.
+        # The parameters are actually useless to us right now.
+        Parms = yaml.safe_load(yamlfile)
+        yamlfile.close()
+        # Get the correlation arrays
+        ImportedNum = list()
+        for i in np.arange(len(Parms)):
+            # The *number* is used to identify the correct file
+            number = str(Parms[i][0])
+            expfilename = "data"+number[1:len(number)-2]+".csv"
+            expfile = Arc.open(expfilename, 'r')
+            readdata = csv.reader(expfile, delimiter=',')
+            dataexp = list()
+            tau = list()
+            if str(readdata.next()[0]) == "# tau only":
+                # We do not have a curve here
+                pass
+            else:
+                Filelist.append(filename+"/#"+number[1:len(number)-2])
+                for row in readdata:
+                    # Exclude commentaries
+                    if (str(row[0])[0:1] != '#'):
+                        dataexp.append((float(row[0]), float(row[1])))
+                dataexp = np.array(dataexp)
+                Correlations.append(dataexp)
+                ImportedNum.append(i)
+            del readdata
+            expfile.close()
+        # Get the Traces
+        for i in ImportedNum:   
+            # Make sure we only import those traces that had a corresponding
+            # correlation curve. (ImportedNum)
+            #
+            # The *number* is used to identify the correct file
+            number = str(Parms[i][0])
+            # Find out, if we have a cross correlation data type
+            IsCross = False
+            try:
+                IsCross = Parms[i][7]
+            except IndexError:
+                # No Cross correlation
+                pass
+            if IsCross is False:
+                tracefilenames = ["trace"+number[1:len(number)-2]+".csv"]
+                Curvelist.append("AC")
+            else:
+                # Cross correlation uses two traces
+                tracefilenames = ["trace"+number[1:len(number)-2]+"A.csv",
+                                  "trace"+number[1:len(number)-2]+"B.csv" ]
+                Curvelist.append("CC")
+            thistrace = list()
+            for tracefilename in tracefilenames:
+                try:
+                    Arc.getinfo(tracefilename)
+                except KeyError:
+                    # No correlation curve, but add a None
+                    pass
+                else:
+                    tracefile = Arc.open(tracefilename, 'r')
+                    traceread = csv.reader(tracefile, delimiter=',')
+                    singletrace = list()
+                    for row in traceread:
+                        # Exclude commentaries
+                        if (str(row[0])[0:1] != '#'):
+                            singletrace.append((float(row[0]), float(row[1])))
+                    singletrace = np.array(singletrace)
+                    thistrace.append(singletrace)
+                    del traceread
+                    del singletrace
+                    tracefile.close()
+            if len(thistrace) == 1:
+                Trace.append(thistrace[0])
+            elif len(thistrace) == 2:
+                Trace.append(thistrace)
+            else:
+                Trace.append(None)
+    else:
+        # We are not importing from a session but from a zip file with
+        # probably a mix of all filetypes we know. This works 
+        # recursively (e.g. a zip file in a zipfile).
+        allfiles = Arc.namelist()
+        # Extract data to temporary folder
+        tempdir = tempfile.mkdtemp()
+        for afile in allfiles:
+            Arc.extract(afile, path=tempdir)
+            ReturnValue = openAny(tempdir, afile)
+            if ReturnValue is not None:
+                cs = ReturnValue["Correlation"]
+                ts = ReturnValue["Trace"]
+                ls = ReturnValue["Type"]
+                fs = ReturnValue["Filename"]
+                for i in np.arange(len(cs)):
+                    Correlations.append(cs[i])
+                    Trace.append(ts[i])
+                    Curvelist.append(ls[i])
+                    Filelist.append(filename+"/"+fs[i])
+            # Delte file
+            os.remove(os.path.join(tempdir,afile))
+        os.removedirs(tempdir)
+    Arc.close()
+    dictionary = dict()
+    dictionary["Correlation"] = Correlations
+    dictionary["Trace"] = Trace
+    dictionary["Type"] = Curvelist
+    dictionary["Filename"] = Filelist
+    return dictionary
+
+
+# The string that is shown when opening all supported files
+Allsupfilesstring = "All supported files"
+
+# Dictionary with filetypes that we can open
+# The wildcards point to the appropriate functions.
+Filetypes = { "Correlator.com (*.SIN)|*.SIN;*.sin" : openSIN,
+              "Correlator ALV-6000 (*.ASC)|*.ASC" : openASC,
+              "PyCorrFit (*.csv)|*.csv" : openCSV,
+              "Matlab 'Ries (*.mat)|*.mat" : openMAT,
+              "Confocor3 (*.fcs)|*.fcs" : openFCS,
+              "zip files (*.zip)|*.zip" : openZIP
+            }
+# For user comfort, add "All supported files" wildcard:
+Filetypes = AddAllWildcard(Filetypes)
+
+
+# Dictionary with filetypes we can open that have intensity traces in them.
+BGFiletypes = { "Correlator.com (*.SIN)|*.SIN;*.sin" : openSIN,
+                "Correlator ALV-6000 (*.ASC)|*.ASC" : openASC,
+                "PyCorrFit (*.csv)|*.csv" : openCSV,
+                "Confocor3 (*.fcs)|*.fcs" : openFCS,
+                "zip files (*.zip)|*.zip" : openZIP
+              }
+BGFiletypes = AddAllWildcard(BGFiletypes)
diff --git a/src/readfiles/read_ASC_ALV_6000.py b/src/readfiles/read_ASC_ALV_6000.py
new file mode 100755
index 0000000..67fb0c5
--- /dev/null
+++ b/src/readfiles/read_ASC_ALV_6000.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+import os
+import csv
+import numpy as np
+
+
+def openASC(dirname, filename):
+    """ Read data from a .ASC file, created by
+        some ALV-6000 correlator.
+
+            ALV-6000/E-WIN Data
+            Date :	"2/20/2012"
+            ...
+            "Correlation"
+              1.25000E-004	  3.00195E-001
+              2.50000E-004	  1.13065E-001
+              3.75000E-004	  7.60367E-002
+              5.00000E-004	  6.29926E-002
+              6.25000E-004	  5.34678E-002
+              7.50000E-004	  4.11506E-002
+              8.75000E-004	  4.36752E-002
+              1.00000E-003	  4.63146E-002
+              1.12500E-003	  3.78226E-002
+            ...
+              3.35544E+004	 -2.05799E-006
+              3.77487E+004	  4.09032E-006
+              4.19430E+004	  4.26295E-006
+              4.61373E+004	  1.40265E-005
+              5.03316E+004	  1.61766E-005
+              5.45259E+004	  2.19541E-005
+              5.87202E+004	  3.26527E-005
+              6.29145E+004	  2.72920E-005
+
+            "Count Rate"
+               1.17188	      26.77194
+               2.34375	      26.85045
+               3.51563	      27.06382
+               4.68750	      26.97932
+               5.85938	      26.73694
+               7.03125	      27.11332
+               8.20313	      26.81376
+               9.37500	      26.82741
+              10.54688	      26.88801
+              11.71875	      27.09710
+              12.89063	      27.13209
+              14.06250	      27.02200
+              15.23438	      26.95287
+              16.40625	      26.75657
+              17.57813	      26.43056
+            ...
+             294.14063	      27.22597
+             295.31250	      26.40581
+             296.48438	      26.33497
+             297.65625	      25.96457
+             298.82813	      26.71902
+
+        1. We are interested in the "Correlation" section,
+        where the first column denotes tau in ms and the second row the
+        correlation signal. Values are separated by a tabulator "\t" (some " ").
+
+        2. We are also interested in the "Count Rate" section. Here the times
+        are saved as seconds and not ms like above.
+
+        3. There is some kind of mode where the ALV exports five runs at a
+        time and averages them. The sole correlation data is stored in the
+        file, but the trace is only stored as average or something.
+        So I would not recommend this. However, I added support for this.
+        PyCorrFit then only imports the average data.
+         ~ Paul, 2012-02-20
+        Correlation data starts at "Correlation (Multi, Averaged)".
+
+        Returns:
+        [0]:
+         An array with tuples containing two elements:
+         1st: tau in ms
+         2nd: corresponding correlation signal
+        [1]:
+         Intensity trace:
+         1st: time in ms
+         2nd: Trace in kHz
+        [2]:
+         An array with N elements, indicating, how many curves we are opening
+         from the file. Elements can be names and must be convertible to
+         strings.
+    """
+    openfile = open(os.path.join(dirname, filename), 'r')
+    Alldata = openfile.readlines()
+    ## Correlation function
+    # Find out where the correlation function is
+    for i in np.arange(len(Alldata)):
+        if Alldata[i][0:13] == '"Correlation"':
+            # Start of correlation function
+            StartC = i+1
+        if Alldata[i][0:31] == '"Correlation (Multi, Averaged)"':
+            # Start of AVERAGED correlation function !!!
+            # There are several curves now.
+            StartC = i+2
+        if Alldata[i][0:12] == '"Count Rate"':
+            # End of correlation function
+            EndC = i-2
+            # Start of trace (goes until end of file)
+            StartT = i+1
+    EndT = Alldata.__len__()
+    # Get the header
+    Namedata = Alldata.__getslice__(StartC-1, StartC)
+    ## Define *curvelist*
+    curvelist = csv.reader(Namedata, delimiter='\t').next()
+    if len(curvelist) <= 2:
+        # Then we have just one single correlation curve
+        curvelist = [""]
+    else:
+        # We have a number of correlation curves. We need to specify
+        # names for them. We take these names from the headings.
+        # Lag times not in the list
+        curvelist.remove(curvelist[0])
+        # Last column is empty
+        curvelist.remove(curvelist[-1])
+    ## Correlation function
+    Truedata = Alldata.__getslice__(StartC, EndC)
+    readdata = csv.reader(Truedata, delimiter='\t')
+    data = list()
+    # Add lists to *data* according to the length of *curvelist*
+    for item in curvelist:
+        data.append(list())
+    # Work through the rows in the read data
+    for row in readdata:
+        for i in np.arange(len(curvelist)):
+            data[i].append( (np.float(row[0]), np.float(row[i+1])) )
+    ## Trace
+    # Trace is stored in two columns
+    # 1st column: time [s]
+    # 2nd column: trace [kHz] 
+    # Get the trace
+    Tracedata = Alldata.__getslice__(StartT, EndT)
+    timefactor = 1000 # because we want ms instead of s
+    readtrace = csv.reader(Tracedata, delimiter='\t')
+    trace = list()
+    # Add lists to *trace* according to the length of *curvelist*
+    for item in curvelist:
+        trace.append(list())
+    # Work through the rows
+    for row in readtrace:
+        # tau in ms, corr-function
+        trace[0].append((np.float(row[0])*timefactor, np.float(row[1])))
+        for i in np.arange(len(curvelist)-1):
+            trace[i+1].append((np.float(row[0])*timefactor, 0))
+    # return as an array
+    openfile.close()
+    dictionary = dict()
+    dictionary["Correlation"] = np.array(data)
+    dictionary["Trace"] = np.array(trace)
+    dictionary["Type"] = curvelist
+    filelist = list()
+    for i in curvelist:
+        filelist.append(filename)
+    dictionary["Filename"] = filelist
+    return dictionary
diff --git a/src/readfiles/read_CSV_PyCorrFit.py b/src/readfiles/read_CSV_PyCorrFit.py
new file mode 100644
index 0000000..9305a94
--- /dev/null
+++ b/src/readfiles/read_CSV_PyCorrFit.py
@@ -0,0 +1,134 @@
+# -*- coding: utf-8 -*-
+import os
+import csv
+import numpy as np
+
+
+def openCSV(dirname, filename):
+    """ Read relevant data from a file looking like this:
+        [...]
+        # Comment
+        # Data type: Autocorrelation
+        [...]
+        1.000000e-006   3.052373e-001
+        1.020961e-006   3.052288e-001
+        1.042361e-006   3.052201e-001
+        1.064209e-006   3.052113e-001
+        1.086516e-006   3.052023e-001
+        1.109290e-006   3.051931e-001
+        [...]
+        # BEGIN TRACE
+        [...]
+        10.852761   31.41818
+        12.058624   31.1271
+        13.264486   31.27305
+        14.470348   31.33442
+        15.676211   31.15861
+        16.882074   31.08564
+        18.087936   31.21335
+        [...]
+
+        Data type:
+        If Data type is "Cross-correlation", we will try to import
+        two traces after "# BEGIN SECOND TRACE"
+
+        1st section:
+         First column denotes tau in seconds and the second row the
+         correlation signal.
+        2nd section:
+         First column denotes tau in seconds and the second row the
+         intensity trace in kHz.
+
+
+        Returns:
+        1. A list with tuples containing two elements:
+           1st: tau in ms
+           2nd: corresponding correlation signal
+        2. None - usually is the trace, but the trace is not saved in
+                  the PyCorrFit .csv format.
+        3. A list with one element, indicating, that we are opening only
+           one correlation curve.
+    """
+    # Define what will happen to the file
+    timefactor = 1000 # because we want ms instead of s
+    csvfile = open(os.path.join(dirname, filename), 'r')
+    readdata = csv.reader(csvfile, delimiter=',')
+    data = list()
+    trace = None
+    traceA = None
+    DataType="AC" # May be changed
+    numtraces = 0
+    for row in readdata:
+        if len(row) == 0 or len(str(row[0]).strip()) == 0:
+            # Do nothing with empty/whitespace lines
+            pass
+            # Beware that the len(row) statement has to be called first
+            # (before the len(str(row[0]).strip()) ). Otherwise some
+            # error would be raised.
+        elif str(row[0])[:12] == "# Type AC/CC":
+            corrtype = str(row[0])[12:].strip().strip(":").strip()
+            if corrtype[:17].lower() == "cross-correlation":
+                # We will later try to import a second trace
+                DataType="CC"
+                DataType += corrtype[17:].strip()
+            elif corrtype[0:15].lower() == "autocorrelation":
+                DataType="AC"
+                DataType += corrtype[15:].strip()         
+        elif str(row[0])[0:13].upper() == '# BEGIN TRACE':
+            # Correlation is over. We have a trace
+            corr = np.array(data)
+            data=list()
+            numtraces = 1
+        elif str(row[0])[0:20].upper() == '# BEGIN SECOND TRACE':
+            # First trace is over. We have a second trace
+            traceA = np.array(data)
+            data = list()
+            numtraces = 2
+        # Exclude commentaries
+        elif str(row[0])[0:1] != '#':
+            # Read the 1st section
+            # On Windows we had problems importing nan values that
+            # had some white-spaces around them. Therefore: strip()
+            ## As of version 0.7.8 we are supporting white space
+            ## separated values as well
+            if len(row) == 1:
+                row = row[0].split()
+            data.append((np.float(row[0].strip())*timefactor, 
+                         np.float(row[1].strip())))
+    # Collect the rest of the trace, if there is any:
+    rest = np.array(data)
+    if numtraces == 0:
+        corr = rest
+    elif numtraces >= 1:
+        trace = rest
+    del data
+    ## Remove any NaN numbers from thearray
+    # Explanation:
+    # np.isnan(data)
+    #  finds the position of NaNs in the array (True positions); 2D array, bool
+    # any(1)
+    #  finds the rows that have True in them; 1D array, bool
+    # ~
+    #  negates them and is given as an argument (array type bool) to
+    #  select which items we want.
+    corr = corr[~np.isnan(corr).any(1)]
+    # Also check for infinities.
+    corr = corr[~np.isinf(corr).any(1)]
+    csvfile.close()
+    Traces=list()
+    # Set correct trace data for import
+    if numtraces == 1 and DataType[:2] == "AC":
+        Traces.append(trace)
+    elif numtraces == 2 and DataType[:2] == "CC":
+        Traces.append([traceA, trace])
+    elif numtraces == 1 and DataType[:2] == "CC":
+        # Should not happen, but for convenience:
+        Traces.append([trace, trace])
+    else:
+        Traces.append(None)
+    dictionary = dict()
+    dictionary["Correlation"] = [corr]
+    dictionary["Trace"] = Traces
+    dictionary["Type"] = [DataType]
+    dictionary["Filename"] = [filename]
+    return dictionary
diff --git a/src/readfiles/read_FCS_Confocor3.py b/src/readfiles/read_FCS_Confocor3.py
new file mode 100644
index 0000000..36208df
--- /dev/null
+++ b/src/readfiles/read_FCS_Confocor3.py
@@ -0,0 +1,363 @@
+# -*- coding: utf-8 -*-
+""" 
+    This works with files from the Confocor2, Confocor3 (AIM) and files
+    created from the newer ZEN Software.
+"""
+import os
+import csv
+import numpy as np
+import warnings
+
+
+def openFCS(dirname, filename):
+    """ The AIM software can save data as multiple or single data files.
+        The type is identified by the first line of the .fcs file. """
+    openfile = open(os.path.join(dirname, filename), 'r')
+    identitystring = openfile.readline().strip()[:20]
+    openfile.close()
+    if identitystring == "Carl Zeiss ConfoCor3":
+        return openFCS_Multiple(dirname, filename)
+    else:
+        return openFCS_Single(dirname, filename)
+
+
+def openFCS_Multiple(dirname, filename):
+    """ Load data from Zeiss Confocor3
+        Data is imported sequenially from the file.
+        PyCorrFit will give each curve an id which corresponds to the position
+        of the curve in the .fcs file.
+    """
+    openfile = open(os.path.join(dirname, filename), 'r')
+    Alldata = openfile.readlines()
+    # Start progressing through the file. i is the line index.
+    # We are searching for "FcsDataSet" sections that contain
+    # all the information we want.
+    # index i for linenumber
+    i = 0
+    # A parameter to check whether we are in a "FcsDataSet" section
+    # and should import something
+    fcsset = False
+    # The names of the traces
+    aclist = list()     # All autocorrelation functions
+    cclist = list()     # All cross-correlation functions
+    # The intensity traces
+    traces = list()
+    # The correlation curves
+    ac_correlations = list()
+    cc_correlations = list()
+    while i <= len(Alldata)-1:
+        if Alldata[i].count("FcsDataSet") == 1:
+            # We are in a "FcsDataSet" section
+            fcsset = True
+            gottrace = False
+        if fcsset == True:
+            if Alldata[i].partition("=")[0].strip() == "Channel":
+                # Find out what type of correlation curve we have.
+                # Might be interesting to the user.
+                FCStype = Alldata[i].partition("=")[2].strip()
+                FoundType = False
+                for chnum in np.arange(4)+1:
+                    if FCStype == "Auto-correlation detector "+str(chnum):
+                        FoundType = "AC"+str(chnum)
+                        aclist.append(FoundType)
+                    elif FCStype == "Auto-correlation detector Meta"+str(chnum):
+                        FoundType = "AC"+str(chnum)
+                        aclist.append(FoundType)
+                    else:
+                        for ch2num in np.arange(4)+1:
+                            if FCStype == "Cross-correlation detector "+\
+                                          str(chnum)+" versus detector "+\
+                                          str(ch2num):
+                                FoundType = "CC"+str(chnum)+str(ch2num)
+                                cclist.append(FoundType)
+                            elif FCStype == "Cross-correlation detector Meta"+\
+                                          str(chnum)+" versus detector Meta"+\
+                                          str(ch2num):
+                                FoundType = "CC"+str(chnum)+str(ch2num)
+                                cclist.append(FoundType)
+                if FoundType is False:
+                    # Jump out of this set. We will continue at 
+                    # the next "FcsDataSet"-section.
+                    print "Unknown channel configuration in .fcs file: "+FCStype
+                    fcsset = False
+            if Alldata[i].partition("=")[0].strip() == "CountRateArray":
+                # Start importing the trace. This is a little difficult, since
+                # traces in those files are usually very large. We will bin
+                # the trace and import a lighter version of it.
+                tracelength = \
+                     int(Alldata[i].partition("=")[2].strip().partition(" ")[0])
+                if tracelength != 0:
+                    tracedata = Alldata.__getslice__(i+1, i+tracelength+1)
+                    # Jump foward in the index
+                    i = i + tracelength
+                    readtrace = csv.reader(tracedata, delimiter='\t')
+                    trace = list()
+                    for row in readtrace:
+                        # tau in ms, trace in kHz
+                        # So we need to put some factors here
+                        trace.append( (np.float(row[3])*1000,
+                                       np.float(row[4])/1000) )
+                    trace = np.array(trace)
+                    # The trace is too big. Wee need to bin it.
+                    if len(trace) >= 500:
+                        # We want about 500 bins
+                        # We need to sum over intervals of length *teiler*
+                        teiler = int(len(trace)/500)
+                        newlength = len(trace)/teiler
+                        newsignal = np.zeros(newlength)
+                        # Simultaneously sum over all intervals
+                        for j in np.arange(teiler):
+                            newsignal = \
+                                 newsignal+trace[j:newlength*teiler:teiler][:,1]
+                        newsignal = 1.* newsignal / teiler
+                        newtimes = trace[teiler-1:newlength*teiler:teiler][:,0]
+                        if len(trace)%teiler != 0:
+                            # We have a rest signal
+                            # We average it and add it to the trace
+                            rest = trace[newlength*teiler:][:,1]
+                            lrest = len(rest)
+                            rest = np.array([sum(rest)/lrest])
+                            newsignal = np.concatenate((newsignal, rest),
+                                                       axis=0)
+                            timerest = np.array([trace[-1][0]])
+                            newtimes = np.concatenate((newtimes, timerest),
+                                                      axis=0)
+                        newtrace=np.zeros((len(newtimes),2))
+                        newtrace[:,0] = newtimes
+                        newtrace[:,1] = newsignal
+                    else:
+                        # Declare newtrace -
+                        # otherwise we have a problem down three lines ;)
+                        newtrace = trace
+                    # Finally add the trace to the list
+                    traces.append(newtrace)
+                    if FoundType[:2] != "AC":
+                        # For every trace there is an entry in aclist
+                        print "Trace data saved in CC section."+ \
+                              "I cannot handle that."
+                    gottrace = True
+            if Alldata[i].partition("=")[0].strip() == "CorrelationArraySize":
+                # Get the correlation information
+                corrlength = int(Alldata[i].partition("=")[2].strip())
+                if corrlength !=0:
+                    # For cross correlation or something sometimes
+                    # there is no trace information.
+                    if gottrace == False and FoundType[:2] =="AC":
+                        # We think we know that there is no trace in CC curves
+                        traces.append(None)
+                    corrdata = Alldata.__getslice__(i+2, i+corrlength+2)
+                    # Jump foward
+                    i = i + corrlength
+                    readcorr = csv.reader(corrdata, delimiter='\t')
+                    corr = list()
+                    for row in readcorr:
+                        # tau in ms, corr-function
+                        corr.append( (np.float(row[3])*1000,
+                                      np.float(row[4])-1)    )
+                    if FoundType[:2] == "AC":
+                        ac_correlations.append(np.array(corr))
+                    elif FoundType[:2] == "CC":
+                        cc_correlations.append(np.array(corr))
+                else:
+                    # There is no correlation data in the file
+                    # Fill in some dummy data. These will be removed.
+                    if FoundType[:2] == "AC":
+                        # append a dummy correlation curve
+                        ac_correlations.append(None)
+                        if gottrace == False:
+                            # append a dummy trace
+                            traces.append(None)
+                    elif FoundType[:2] == "CC":
+                        # append a dummy correlation curve
+                        # cc_correlations do not have traces
+                        cc_correlations.append(None)
+                # We reached the end of this "FcsDataSet" section.
+                fcsset = False
+        i = i + 1
+    # finished.
+    openfile.close()
+    # We now have:
+    #  aclist: a list of AC curve names mentioned in the file.
+    #  cclist: a list of CC curve names mentioned in the file.
+    #  traces: All traces corresponding to non-"None"-type entries in
+    #          ac_correlations. Not in cc_correlations,
+    #          because cross-correlations are not saved with traces.?
+    #
+    #  ac_correlations: AC-correlation data in list.
+    #  cc_correlations: CC-correlation data in list.
+    # 
+    # ac_correlations or cc_correlations can have items that are "None".
+    # These item come from averaging inside the Confocor software and
+    # do not contain any data.
+    # These "None" type items should be at the end of these lists.
+    # If the user created .fcs files with averages between the curves,
+    # the *traces* contains *None* values at those positions.
+    ## We now create:
+    #  curvelist: All actually used data
+    #  tracelist: Traces brought into right form (also for CCs)
+    #  corrlist: Correlation curves
+    #  Index in curvelist defines index in trace and correlation.
+    curvelist = list()
+    tracelist = list()
+    corrlist = list()
+    for i in np.arange(len(ac_correlations)):
+        if ac_correlations[i] is not None:
+            curvelist.append(aclist[i])
+            tracelist.append(1*traces[i])
+            corrlist.append(ac_correlations[i])
+        else:
+            if traces[i] is not None:
+                warnings.warn("File {} curve {} does not contain AC data.".format(filename, i))
+    ## The CC traces are more tricky:
+    # Add traces to CC-correlation functions.
+    # It seems reasonable, that if number of AC1,AC2 and CC are equal,
+    # CC gets the traces accordingly.
+    cctracelist = list()
+    n_ac1 = aclist.count("AC1")
+    n_ac2 = aclist.count("AC2")
+    n_cc12 = cclist.count("CC12")
+    n_cc21 = cclist.count("CC21")
+    if n_ac1==n_ac2==n_cc12==n_cc21>0:
+        CCTraces = True
+    else:
+        CCTraces = False
+    # Commence swapping, if necessary
+    # We want to have CC12 first and the corresponding trace to AC1 as well.
+    if len(cc_correlations) != 0:
+        if cclist[0] == "CC12":
+            if aclist[0] == "AC2":
+                for i in np.arange(len(traces)/2):
+                    traces[2*i], traces[2*i+1] = traces[2*i+1], traces[2*i] 
+            # Everything is OK
+        elif cclist[0] == "CC21":
+            # Switch the order of CC correlations
+            a = cc_correlations
+            for i in np.arange(len(a)/2):
+                a[2*i], a[2*i+1] = a[2*i+1], a[2*i]
+                cclist[2*i], cclist[2*i+1] = cclist[2*i+1], cclist[2*i]
+                if aclist[2*i] == "AC2":
+                    traces[2*i], traces[2*i+1] = traces[2*i+1], traces[2*i] 
+    # Add cc-curves with (if CCTraces) trace.
+    for i in np.arange(len(cc_correlations)):
+        if cc_correlations[i] is not None:
+            curvelist.append(cclist[i])
+            corrlist.append(cc_correlations[i])
+            if CCTraces == True:
+                if cclist[i] == "CC12":
+                    tracelist.append([traces[i], traces[i+1]])
+                elif cclist[i] == "CC21":
+                    tracelist.append([traces[i-1], traces[i]])
+            else:
+                tracelist.append(None)
+    dictionary = dict()
+    dictionary["Correlation"] = corrlist
+    dictionary["Trace"] = tracelist
+    dictionary["Type"] = curvelist
+    filelist = list()
+    for i in curvelist:
+        filelist.append(filename)
+    dictionary["Filename"] = filelist
+    return dictionary
+
+
+def openFCS_Single(dirname, filename):
+    """ Load data from Zeiss Confocor3
+    """
+    openfile = open(os.path.join(dirname, filename), 'r')
+    Alldata = openfile.readlines()
+    # Start progressing through the file. i is the line index.
+    # We are searching for "FcsDataSet" sections that contain
+    # all the information we want.
+    # index i for linenumber
+    i = 0
+    # Indicates if trace or FCS curve should be imported in loop
+    fcscurve = False
+    tracecurve = False
+    while i <= len(Alldata)-1:
+        if Alldata[i].partition("=")[0].strip() == "##DATA TYPE":
+            # Find out what type of correlation curve we have.
+            # Might be interesting to the user.
+            Type  = Alldata[i].partition("=")[2].strip()
+            if Type == "FCS Correlogram":
+                fcscurve = True
+                tracecurve = False
+            elif Type == "FCS Count Rates":
+                tracecurve = True
+                fcscurve = False
+            else:
+                raise SyntaxError("Unknown file syntax: "+Type)
+        i = i + 1
+        if tracecurve == True:
+            if Alldata[i].partition("=")[0].strip() == "##NPOINTS":
+                # Start importing the trace. This is a little difficult, since
+                # traces in those files are usually very large. We will bin
+                # the trace and import a lighter version of it.
+                tracelength = int(Alldata[i].partition("=")[2].strip())
+                # Trace starts 3 lines after this.
+                i = i + 3
+                if tracelength != 0:
+                    tracedata = Alldata.__getslice__(i, i+tracelength)
+                    # Jump foward in the index
+                    i = i + tracelength
+                    readtrace = csv.reader(tracedata, delimiter=',')
+                    trace = list()
+                    for row in readtrace:
+                        # tau in ms, trace in kHz
+                        # So we need to put some factors here
+                        trace.append( (np.float(row[0])*1000, np.float(row[1])) )
+                    trace = np.array(trace)
+                    # The trace is too big. Wee need to bin it.
+                    if len(trace) >= 500:
+                        # We want about 500 bins
+                        # We need to sum over intervals of length *teiler*
+                        teiler = int(len(trace)/500)
+                        newlength = len(trace)/teiler
+                        newsignal = np.zeros(newlength)
+                        # Simultaneously sum over all intervals
+                        for j in np.arange(teiler):
+                            newsignal = \
+                                 newsignal+trace[j:newlength*teiler:teiler][:,1]
+                        newsignal = 1.* newsignal / teiler
+                        newtimes = trace[teiler-1:newlength*teiler:teiler][:,0]
+                        if len(trace)%teiler != 0:
+                            # We have a rest signal
+                            # We average it and add it to the trace
+                            rest = trace[newlength*teiler:][:,1]
+                            lrest = len(rest)
+                            rest = np.array([sum(rest)/lrest])
+                            newsignal = np.concatenate((newsignal, rest),
+                                                       axis=0)
+                            timerest = np.array([trace[-1][0]])
+                            newtimes = np.concatenate((newtimes, timerest),
+                                                      axis=0)
+                        newtrace=np.zeros((len(newtimes),2))
+                        newtrace[:,0] = newtimes
+                        newtrace[:,1] = newsignal
+                    else:
+                        # Declare newtrace -
+                        # otherwise we have a problem down three lines ;)
+                        newtrace = trace
+                tracecurve = False
+        if fcscurve == True:
+            if Alldata[i].partition("=")[0].strip() == "##NPOINTS":
+                # Get the correlation information
+                corrlength = int(Alldata[i].partition("=")[2].strip())
+                i = i + 2
+                if corrlength !=0:
+                    corrdata = Alldata.__getslice__(i, i+corrlength)
+                    # Jump foward
+                    i = i + corrlength
+                    readcorr = csv.reader(corrdata, delimiter=',')
+                    corr = list()
+                    for row in readcorr:
+                        # tau in ms, corr-function
+                        corr.append( (np.float(row[0]), np.float(row[1])-1) )
+                    corr = np.array(corr)
+                fcscurve = False
+    openfile.close()
+    dictionary = dict()
+    dictionary["Correlation"] = [corr]
+    dictionary["Trace"] = [newtrace]
+    dictionary["Type"] = [""]
+    dictionary["Filename"] = [filename]
+    return dictionary
diff --git a/src/readfiles/read_SIN_correlator_com.py b/src/readfiles/read_SIN_correlator_com.py
new file mode 100644
index 0000000..7fe23ca
--- /dev/null
+++ b/src/readfiles/read_SIN_correlator_com.py
@@ -0,0 +1,240 @@
+# -*- coding: utf-8 -*-
+import os
+import csv
+import numpy as np
+
+
+def openSIN(dirname, filename):
+    """ Read data from a .SIN file, usually created by
+        the software using correlators from correlator.com.
+
+            FLXA
+            Version= 1d
+
+            [Parameters]
+            ...
+            Mode= Single Auto
+            ...
+
+            [CorrelationFunction]
+            1.562500e-09	0.000000e+00
+            3.125000e-09	0.000000e+00
+            4.687500e-09	0.000000e+00
+            ...
+            1.887435e+01	1.000030e+00
+            1.929378e+01	1.000141e+00
+            1.971321e+01	9.999908e-01
+            2.013264e+01	9.996810e-01
+            2.055207e+01	1.000047e+00
+            2.097150e+01	9.999675e-01
+            2.139093e+01	9.999591e-01
+            2.181036e+01	1.000414e+00
+            2.222979e+01	1.000129e+00
+            2.264922e+01	9.999285e-01
+            2.306865e+01	1.000077e+00
+            ...
+            3.959419e+02	0.000000e+00
+            4.026528e+02	0.000000e+00
+            4.093637e+02	0.000000e+00
+            4.160746e+02	0.000000e+00
+            4.227854e+02	0.000000e+00
+            4.294963e+02	0.000000e+00
+
+            [RawCorrelationFunction]
+            ...
+
+            [IntensityHistory]
+            TraceNumber= 458
+            0.000000	9.628296e+03	9.670258e+03
+            0.262144	1.001358e+04	9.971619e+03
+            0.524288	9.540558e+03	9.548188e+03
+            0.786432	9.048462e+03	9.010315e+03
+            1.048576	8.815766e+03	8.819580e+03
+            1.310720	8.827210e+03	8.861542e+03
+            1.572864	9.201050e+03	9.185791e+03
+            1.835008	9.124756e+03	9.124756e+03
+            2.097152	9.059906e+03	9.029389e+03
+            ...
+
+        1. We are interested in the "[CorrelationFunction]" section,
+        where the first column denotes tau in seconds and the second row the
+        correlation signal. Values are separated by a tabulator "\t".
+        We do not import anything from the "[Parameters]" section.
+        We have to subtract "1" from the correlation function, since it
+        is a correlation function that converges to "1" and not to "0".
+
+        2. We are also interested in the "[IntensityHistory]" section.
+        If we are only interested in autocorrelation functions: An email
+        from Jixiang Zhu - Correlator.com (2012-01-22) said, that
+        "For autocorrelation mode, the 2nd and 3 column represent the same
+        intensity series with slight delay.  Therefore, they are statistically
+        the same but numerically different."
+        It is therefore perfectly fine to just use the 2nd column.
+
+        Different acquisition modes:
+        Mode            [CorrelationFunction]               [IntensityHistory]
+        Single Auto     2 Colums (tau,AC)                   1 significant
+        Single Cross    2 Colums (tau,CC)                   2
+        Dual Auto       3 Colums (tau,AC1,AC2)              2
+        Dual Cross      3 Colums (tau,CC12,CC21)            2
+        Quad            5 Colums (tau,AC1,AC2,CC12,CC21)    2
+
+        Returns:
+        [0]:
+         N arrays with tuples containing two elements:
+         1st: tau in ms
+         2nd: corresponding correlation signal
+        [1]:
+         N Intensity traces:
+         1st: time in ms
+         2nd: Trace in kHz
+        [2]: 
+         A list with N elements, indicating, how many correlation
+         curves we are importing.
+    """
+    openfile = open(os.path.join(dirname, filename), 'r')
+    Alldata = openfile.readlines()
+    # Find out where the correlation function and trace are
+    for i in np.arange(len(Alldata)):
+        if Alldata[i][0:4] == "Mode":
+         
+            Mode = Alldata[i].split("=")[1].strip()
+        if Alldata[i][0:21] == "[CorrelationFunction]":
+            StartC = i+1
+        if Alldata[i][0:24] == "[RawCorrelationFunction]":
+            EndC = i-2
+        if Alldata[i][0:18] == "[IntensityHistory]":
+            # plus 2, because theres a line with the trace length
+            StartT = i+2 
+        if Alldata[i][0:11] == "[Histogram]":
+            EndT = i-2
+    curvelist = list()
+    correlations = list()
+    traces = list()
+    # Get the correlation function
+    Truedata = Alldata.__getslice__(StartC, EndC)
+    timefactor = 1000 # because we want ms instead of s
+    readcorr = csv.reader(Truedata, delimiter='\t')
+    # Trace
+    # Trace is stored in three columns
+    # 1st column: time [s]
+    # 2nd column: trace [Hz] 
+    # 3rd column: trace [Hz] - Single Auto: equivalent to 2nd
+    # Get the trace
+    Tracedata = Alldata.__getslice__(StartT, EndT)
+    # timefactor = 1000 # because we want ms instead of s
+    timedivfac = 1000 # because we want kHz instead of Hz
+    readtrace = csv.reader(Tracedata, delimiter='\t')
+    openfile.close()
+    # Process all Data:
+    if Mode == "Single Auto":
+        curvelist.append("AC")
+        corrdata = list()
+        for row in readcorr:
+            # tau in ms, corr-function minus "1"
+            corrdata.append((np.float(row[0])*timefactor, np.float(row[1])-1))
+        correlations.append(np.array(corrdata))
+        trace = list()
+        for row in readtrace:
+            # tau in ms, corr-function minus "1"
+            trace.append((np.float(row[0])*timefactor,
+                         np.float(row[1])/timedivfac))
+        traces.append(np.array(trace))
+    elif Mode == "Single Cross":
+        curvelist.append("CC")
+        corrdata = list()
+        for row in readcorr:
+            # tau in ms, corr-function minus "1"
+            corrdata.append((np.float(row[0])*timefactor, np.float(row[1])-1))
+        correlations.append(np.array(corrdata))
+        trace1 = list()
+        trace2 = list()
+        for row in readtrace:
+            # tau in ms, corr-function minus "1"
+            trace1.append((np.float(row[0])*timefactor,
+                           np.float(row[1])/timedivfac))
+            trace2.append((np.float(row[0])*timefactor,
+                           np.float(row[2])/timedivfac))
+        traces.append([np.array(trace1), np.array(trace2)])
+    elif Mode == "Dual Auto":
+        curvelist.append("AC1")
+        curvelist.append("AC2")
+        corrdata1 = list()
+        corrdata2 = list()
+        for row in readcorr:
+            # tau in ms, corr-function minus "1"
+            corrdata1.append((np.float(row[0])*timefactor, np.float(row[1])-1))
+            corrdata2.append((np.float(row[0])*timefactor, np.float(row[2])-1))
+        correlations.append(np.array(corrdata1))
+        correlations.append(np.array(corrdata2))
+        trace1 = list()
+        trace2 = list()
+        for row in readtrace:
+            # tau in ms, corr-function minus "1"
+            trace1.append((np.float(row[0])*timefactor,
+                           np.float(row[1])/timedivfac))
+            trace2.append((np.float(row[0])*timefactor,
+                           np.float(row[2])/timedivfac))
+        traces.append(np.array(trace1))
+        traces.append(np.array(trace2))
+    elif Mode == "Dual Cross":
+        curvelist.append("CC12")
+        curvelist.append("CC21")
+        corrdata1 = list()
+        corrdata2 = list()
+        for row in readcorr:
+            # tau in ms, corr-function minus "1"
+            corrdata1.append((np.float(row[0])*timefactor, np.float(row[1])-1))
+            corrdata2.append((np.float(row[0])*timefactor, np.float(row[2])-1))
+        correlations.append(np.array(corrdata1))
+        correlations.append(np.array(corrdata2))
+        trace1 = list()
+        trace2 = list()
+        for row in readtrace:
+            # tau in ms, corr-function minus "1"
+            trace1.append((np.float(row[0])*timefactor,
+                           np.float(row[1])/timedivfac))
+            trace2.append((np.float(row[0])*timefactor,
+                           np.float(row[2])/timedivfac))
+        traces.append([np.array(trace1), np.array(trace2)])
+        traces.append([np.array(trace1), np.array(trace2)])
+    elif Mode == "Quad":
+        curvelist.append("AC1")
+        curvelist.append("AC2")
+        curvelist.append("CC12")
+        curvelist.append("CC21")
+        corrdata1 = list()
+        corrdata2 = list()
+        corrdata12 = list()
+        corrdata21 = list()
+        for row in readcorr:
+            # tau in ms, corr-function minus "1"
+            corrdata1.append((np.float(row[0])*timefactor, np.float(row[1])-1))
+            corrdata2.append((np.float(row[0])*timefactor, np.float(row[2])-1))
+            corrdata12.append((np.float(row[0])*timefactor, np.float(row[3])-1))
+            corrdata21.append((np.float(row[0])*timefactor, np.float(row[4])-1))
+        correlations.append(np.array(corrdata1))
+        correlations.append(np.array(corrdata2))
+        correlations.append(np.array(corrdata12))
+        correlations.append(np.array(corrdata21))
+        trace1 = list()
+        trace2 = list()
+        for row in readtrace:
+            # tau in ms, corr-function minus "1"
+            trace1.append((np.float(row[0])*timefactor,
+                           np.float(row[1])/timedivfac))
+            trace2.append((np.float(row[0])*timefactor,
+                           np.float(row[2])/timedivfac))
+        traces.append(np.array(trace1))
+        traces.append(np.array(trace2))
+        traces.append([np.array(trace1), np.array(trace2)])
+        traces.append([np.array(trace1), np.array(trace2)])
+    dictionary = dict()
+    dictionary["Correlation"] = correlations
+    dictionary["Trace"] = traces
+    dictionary["Type"] = curvelist
+    filelist = list()
+    for i in curvelist:
+        filelist.append(filename)
+    dictionary["Filename"] = filelist
+    return dictionary
diff --git a/src/readfiles/read_mat_ries.py b/src/readfiles/read_mat_ries.py
new file mode 100644
index 0000000..187b001
--- /dev/null
+++ b/src/readfiles/read_mat_ries.py
@@ -0,0 +1,220 @@
+# -*- coding: utf-8 -*-
+"""
+Read mat files that Jonas Ries used in his programs.
+For opening .mat files, this helped a lot:
+http://stackoverflow.com/questions/7008608/
+scipy-io-loadmat-nested-structures-i-e-dictionaries
+
+The structure has been derived from "corrSFCS.m" from the SFCS.m program
+from Jonas Ries.
+"""
+
+import csv
+import numpy as np
+
+# On the windows machine the matlab binary import raised a warning.
+# We want to catch that warning, since importing ries's files works.
+import warnings
+with warnings.catch_warnings():
+    warnings.simplefilter("ignore")
+    try:
+        # scipy.io might not work on OSX (wrong architecture)
+        import scipy.io as spio
+        import scipy.io.matlab
+        # streams is not available in older versions
+        # of scipy. We catch this, so PyCorrFit will start
+        # without problems.
+        import scipy.io.matlab.streams
+    except:
+        print " Error: import error in scipys 'matlab' submodule."
+        print "        Try upgrading python-scipy or ignore this"
+        print "        error if you are not using .mat files that"
+        print "        were generated by programs by Jonas Ries."
+import os
+
+
+def openMAT(dirname, filename):
+    # initiate lists
+    correlations = list()
+    traces = list()
+    curvelist = list()
+    # Import everything inside the mat file as big iterated dictionary
+    f = os.path.join(dirname, filename)
+    alldata = loadmat(f)
+    # Correlation functions are stored in "g"
+    g = alldata["g"]
+    # Get all Autocorrelation functions
+    try:
+        # ac for autocorrelation
+        ac = g["ac"]
+    except KeyError:
+        pass
+    else:
+        N = len(ac)
+        # Workaround for single ACs, they are not stored in a separate list,
+        # but directly inserted into g["ac"]. We put it in a list.
+        # This is not the case for the trace averages.
+        # There are a maximum of 4 autocorrelation functions in one file,
+        # as far as I know.
+        if len(ac) > 4:
+            N=1
+            ac = [ac]
+            g["act"] = [g["act"]]
+        for i in np.arange(len(ac)):
+            corr = ac[i]
+            try:
+                times = g["act"][i]
+            except KeyError:
+                pass
+            else:
+                # Another workaround
+                # Sometimes, there's just one curve, which
+                # means that corr[0] has no length.
+                if len( np.atleast_1d(corr[0]) ) == 1:
+                    final = np.zeros((len(corr), 2))
+                    final[:,0] = times
+                    final[:,1] = corr
+                    correlations.append(final)
+                    curvelist.append("AC"+str(i+1))
+                    try:
+                        # only trace averages are saved
+                        traceavg = g["trace"][i][j]
+                    except:
+                        # No trace
+                        traces.append(None)
+                    else:
+                        trace = np.zeros((2,2))
+                        trace[1,0] = 1.0
+                        trace[:,1] = traceavg
+                        traces.append(trace)
+
+
+                elif len(corr) == len(times):
+                    for j in np.arange(len(corr[0])):
+
+                        final = np.zeros((len(corr), 2))
+                        final[:,0] = times
+                        final[:,1] = corr[:,j]
+                        correlations.append(final)
+                        curvelist.append("AC"+str(i+1))
+                        try:
+                            # only trace averages are saved
+                            traceavg = g["trace"][i][j]
+                        except:
+                            # No trace
+                            traces.append(None)
+                        else:
+                            trace = np.zeros((2,2))
+                            trace[1,0] = 1.0
+                            trace[:,1] = traceavg
+                            traces.append(trace)
+    # Get dc "dual color" functions
+    try:
+        dc = g["dc"]
+    except KeyError:
+        pass
+    else:
+        for i in np.arange(len(dc)):
+            corr = dc[i]
+            try:
+                times = g["dct"][i]
+            except KeyError:
+                pass
+            else:
+                if len(corr) == len(times):
+                    for j in np.arange(len(corr[0])):
+
+                        final = np.zeros((len(corr), 2))
+                        final[:,0] = times
+                        final[:,1] = corr[:,j]
+                        correlations.append(final)
+                        curvelist.append("CC dual color "+str(i+1))
+                        traces.append(None)
+    # Get twof "two focus" functions
+    try:
+        twof = g["twof"]
+    except KeyError:
+        pass
+    else:
+        for i in np.arange(len(dc)):
+            corr = twof[i]
+            try:
+                times = g["twoft"][i]
+            except KeyError:
+                pass
+            else:
+                if len(corr) == len(times):
+                    for j in np.arange(len(corr[0])):
+
+                        final = np.zeros((len(corr), 2))
+                        final[:,0] = times
+                        final[:,1] = corr[:,j]
+                        correlations.append(final)
+                        curvelist.append("CC two foci "+str(i+1))
+                        traces.append(None)
+    # Get dc2f "dual color two focus" functions
+    try:
+        dc2f = g["dc2f"]
+    except KeyError:
+        pass
+    else:
+        for i in np.arange(len(dc)):
+            corr = twof[i]
+            try:
+                times = g["dc2ft"][i]
+            except KeyError:
+                pass
+            else:
+                if len(corr) == len(times):
+                    for j in np.arange(len(corr[0])):
+
+                        final = np.zeros((len(corr), 2))
+                        final[:,0] = times
+                        final[:,1] = corr[:,j]
+                        correlations.append(final)
+                        curvelist.append("CC dual color two foci "+str(i+1))
+                        traces.append(None)
+    dictionary = dict()
+    dictionary["Correlation"] = correlations
+    dictionary["Trace"] = traces
+    dictionary["Type"] = curvelist
+    filelist = list()
+    for i in curvelist:
+        filelist.append(filename)
+    dictionary["Filename"] = filelist
+    return dictionary
+
+
+def loadmat(filename):
+    '''
+    this function should be called instead of direct spio.loadmat
+    as it cures the problem of not properly recovering python dictionaries
+    from mat files. It calls the function check keys to cure all entries
+    which are still mat-objects
+    '''
+    data = spio.loadmat(filename, struct_as_record=False, squeeze_me=True)
+    return _check_keys(data)
+
+
+def _check_keys(dict):
+    '''
+    checks if entries in dictionary are mat-objects. If yes
+    todict is called to change them to nested dictionaries
+    '''
+    for key in dict:
+        if isinstance(dict[key], spio.matlab.mio5_params.mat_struct):
+            dict[key] = _todict(dict[key])
+    return dict        
+
+def _todict(matobj):
+    '''
+    A recursive function which constructs from matobjects nested dictionaries
+    '''
+    dict = {}
+    for strg in matobj._fieldnames:
+        elem = matobj.__dict__[strg]
+        if isinstance(elem, spio.matlab.mio5_params.mat_struct):
+            dict[strg] = _todict(elem)
+        else:
+            dict[strg] = elem
+    return dict
diff --git a/src/tools/__init__.py b/src/tools/__init__.py
new file mode 100644
index 0000000..73e7d8c
--- /dev/null
+++ b/src/tools/__init__.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools
+    This file contains useful tools, such as dialog boxes and other stuff,
+    that we need in PyCorrFit.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+# This file is necessary for this folder to become a module that can be 
+# imported by PyCorrFit or other people.
+
+import importlib
+import numpy as np                  # NumPy
+import sys
+
+## On Windows XP I had problems with the unicode Characters.
+# I found this at 
+# http://stackoverflow.com/questions/5419/python-unicode-and-the-windows-console
+# and it helped:
+reload(sys)
+sys.setdefaultencoding('utf-8')
+
+import datarange
+import background
+import overlaycurves
+import batchcontrol
+import globalfit
+import average
+import simulation
+
+import info
+import statistics
+import trace
+# Load all of the classes
+# This also defines the order of the tools in the menu
+ImpA = [ 
+        ["datarange", "SelectChannels"],
+        ["overlaycurves", "Wrapper_Tools"],
+        ["batchcontrol", "BatchCtrl"],
+        ["globalfit", "GlobalFit"],        
+        ["average", "Average"],
+        ["background", "BackgroundCorrection"]
+       ]
+
+ImpB = [
+        ["trace", "ShowTrace"],
+        ["statistics", "Stat"],
+        ["info", "ShowInfo"],
+        ["simulation", "Slide"]
+       ]
+
+ModuleActive = list()
+ToolsActive = list()
+for i in np.arange(len(ImpA)):
+    # We have to add "tools." because this is a relative import
+    ModuleActive.append(__import__(ImpA[i][0], globals(), locals(), [ImpA[i][1]], -1))
+    ToolsActive.append(getattr(ModuleActive[i], ImpA[i][1]))
+
+ModulePassive = list()
+ToolsPassive = list()
+for i in np.arange(len(ImpB)):
+    ModulePassive.append(__import__(ImpB[i][0], globals(), locals(), [ImpB[i][1]], -1))
+    ToolsPassive.append(getattr(ModulePassive[i], ImpB[i][1]))
+    #ModulePassive.append(importlib.import_module("tools."+ImpB[i][0]))
+    #ToolsPassive.append(getattr(ModulePassive[i], ImpB[i][1]))
+
+# This is in the file menu and not needed in the dictionaries below.
+from chooseimport import ChooseImportTypes
+from chooseimport import ChooseImportTypesModel
+from comment import EditComment
+# the "special" tool RangeSelector
+from parmrange import RangeSelector
+
+ToolDict = dict()
+ToolDict["A"] = ToolsActive
+ToolDict["P"] = ToolsPassive
+
+
+# Make the same for Menu Names in Tools
+NameActive = list()
+for i in np.arange(len(ImpA)):
+    NameActive.append(ModuleActive[i].MENUINFO)
+    
+NamePassive = list()
+for i in np.arange(len(ImpB)):
+    NamePassive.append(ModulePassive[i].MENUINFO)
+
+
+ToolName = dict()
+ToolName["A"] = NameActive
+ToolName["P"] = NamePassive
+
diff --git a/src/tools/average.py b/src/tools/average.py
new file mode 100644
index 0000000..1d9a0d0
--- /dev/null
+++ b/src/tools/average.py
@@ -0,0 +1,344 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - average
+    Creates an average of curves.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import numpy as np
+import wx
+
+import misc
+import models as mdls
+import doc
+
+# Menu entry name
+MENUINFO = ["&Average data", "Create an average curve from whole session."]
+
+class Average(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent):
+        # Define a unique name that identifies this tool
+        # Do not change this value. It is important for the Overlay tool
+        # (selectcurves.py, *Wrapper_Tools*).
+        self.MyName="AVERAGE"
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Average curves",
+            pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+         ## Content
+        self.panel = wx.Panel(self)
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        textinit = wx.StaticText(self.panel,
+                    label="Create an average from the following pages:")
+        self.topSizer.Add(textinit)
+        ## Page selection
+        self.WXTextPages = wx.TextCtrl(self.panel, value="",
+                                       size=(textinit.GetSize()[0],-1))
+        self.topSizer.Add(self.WXTextPages)
+        ## Chechbox asking for Mono-Model
+        self.WXCheckMono = wx.CheckBox(self.panel,
+         label="Only use pages with the same model as the current page.")
+        self.WXCheckMono.SetValue(True)
+        self.topSizer.Add(self.WXCheckMono)
+        ## Model selection Dropdown
+        textinit2 = wx.StaticText(self.panel,
+                                label="Select a model for the average:")
+        self.topSizer.Add(textinit2)
+        self.WXDropSelMod = wx.ComboBox(self.panel, -1, "", (15,30),
+               wx.DefaultSize, [], wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.topSizer.Add(self.WXDropSelMod)
+        textinit3 = wx.StaticText(self.panel,
+         label="This tool averages only over pages with the same type"+\
+               "\n(auto- or cross-correlation). Intensity data are"+\
+               "\nappended sequentially.")
+        self.topSizer.Add(textinit3)
+        # Set all values of Text and Strin
+        self.SetValues()
+        btnavg = wx.Button(self.panel, wx.ID_CLOSE, 'Create average')
+        # Binds the button to the function - close the tool
+        self.Bind(wx.EVT_BUTTON, self.OnAverage, btnavg)
+        self.topSizer.Add(btnavg)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        self.SetMinSize(self.topSizer.GetMinSizeTuple())
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+        self.OnPageChanged(self.Page)
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnPageChanged(self, page):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        idsel = self.WXDropSelMod.GetSelection()
+        self.SetValues()
+        # Set back user selection:
+        self.WXDropSelMod.SetSelection(idsel)
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+            return
+        self.panel.Enable()
+        self.Page = page
+
+
+    def OnAverage(self, evt=None):
+        strFull = self.WXTextPages.GetValue()
+        PageNumbers = misc.parseString2Pagenum(self, strFull)
+        if PageNumbers is None:
+            # Something went wrong and parseString2Pagenum already displayed
+            # an error message.
+            return
+        pages = list()
+        UsedPagenumbers = list()
+        referencePage = self.parent.notebook.GetCurrentPage()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            j = filter(lambda x: x.isdigit(), Page.counter)
+            if int(j) in PageNumbers:
+                # Get all pages with the same model?
+                if self.WXCheckMono.GetValue() == True:
+                    if (Page.modelid == referencePage.modelid and
+                       Page.IsCrossCorrelation == referencePage.IsCrossCorrelation):
+                        ## Check if current page has experimental data:
+                        # If there is an empty page somewhere, don't bother
+                        if Page.dataexpfull is not None:
+                            pages.append(Page)
+                            UsedPagenumbers.append(int(j))
+                else:
+                    if Page.IsCrossCorrelation == referencePage.IsCrossCorrelation:
+                        # If there is an empty page somewhere, don't bother
+                        if Page.dataexpfull is not None:
+                            pages.append(Page)
+                            UsedPagenumbers.append(int(j))
+        # If there are no pages in the list, exit gracefully
+        if len(pages) <= 0:
+            texterr_a = "At least one page with experimental data is\n"+\
+                        "required for averaging. Please check the pages\n"+\
+                        "that you selected for averaging."
+            if self.WXCheckMono.GetValue() == True:
+                texterr_a += " Note: You selected\n"+\
+                 "to only use pages with same model as the current page."
+            dlg = wx.MessageDialog(self, texterr_a, "Error", 
+                              style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+            dlg.ShowModal() == wx.ID_OK
+            return
+        # Now get all the experimental data
+        explist = list()
+        # Two components in case of Cross correlation
+        tracetime = [np.array([]), np.array([])]
+        tracerate = [np.array([]), np.array([])]
+        TraceNumber = 0
+        TraceAvailable = False # turns True, if pages contain traces
+        for page in pages:
+            # experimental correlation curve
+            # (at least 1d, because it might be None)
+            explist.append(np.atleast_1d(1*page.dataexpfull))
+            # trace
+            # We will put together a trace from all possible traces
+            # Stitch together all the traces.
+            if page.IsCrossCorrelation is False:
+                trace = [page.trace]
+                # trace has one element
+                TraceNumber = 1
+            else:
+                trace = page.tracecc
+                # trace has two elements
+                TraceNumber = 2
+            if trace is not None and trace[0] is not None:
+                TraceAvailable = True
+                # Works with one or two traces. j = 0 or 1.
+                for j in np.arange(TraceNumber):
+                    if len(tracetime[j]) != 0:
+                        # append to the trace
+                        oldend = tracetime[j][-1]
+                        newtracetime = 1.*trace[j][:,0]
+                        newtracetime = newtracetime + oldend
+                        tracetime[j] = np.append(tracetime[j], newtracetime)
+                        del newtracetime
+                        tracerate[j] = np.append(tracerate[j], trace[j][:,1])
+                    else:
+                        # Initiate the trace
+                        tracetime[j] = 1.*trace[j][:,0]
+                        tracerate[j] = 1.*trace[j][:,1]
+        # Now check if the length of the correlation arrays are the same:
+        len0 = len(explist[0])
+        for item in explist[1:]:
+            if len(item) != len0:
+                # print an error  message
+                dlg = wx.MessageDialog(self,
+                "Averaging over curves with different lengths is not"+\
+                "\nsupported. When measuring, please make sure that"+\
+                "\nthe measurement time for all curves is the same.",
+                "Error", style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg.ShowModal() == wx.ID_OK
+                return
+        # Now shorten the trace, because we want as little memory usage as
+        # possible. I used this algorithm in read_FCS_Confocor3.py as well.
+        newtraces = list()
+        if TraceAvailable is True:
+            for j in np.arange(TraceNumber):
+                tracej = np.zeros((len(tracetime[j]),2))
+                tracej[:,0] = tracetime[j]
+                tracej[:,1] = tracerate[j]
+                if len(tracej) >= 500:
+                    # We want about 500 bins
+                    # We need to sum over intervals of length *teiler*
+                    teiler = int(len(tracej)/500)
+                    newlength = len(tracej)/teiler
+                    newsignal = np.zeros(newlength)
+                    # Simultaneously sum over all intervals
+                    for k in np.arange(teiler):
+                        newsignal = \
+                                newsignal+tracej[k:newlength*teiler:teiler][:,1]
+                    newsignal = 1.* newsignal / teiler
+                    newtimes = tracej[teiler-1:newlength*teiler:teiler][:,0]
+                    if len(tracej)%teiler != 0:
+                        # We have a rest signal
+                        # We average it and add it to the trace
+                        rest = tracej[newlength*teiler:][:,1]
+                        lrest = len(rest)
+                        rest = np.array([sum(rest)/lrest])
+                        newsignal = np.concatenate((newsignal, rest), axis=0)
+                        timerest = np.array([tracej[-1][0]])
+                        newtimes = np.concatenate((newtimes, timerest), axis=0)
+                    newtrace=np.zeros((len(newtimes),2))
+                    newtrace[:,0] = newtimes
+                    newtrace[:,1] = newsignal
+                else:
+                    # Declare newtrace -
+                    # otherwise we have a problem down three lines ;)
+                    newtrace = tracej
+                newtraces.append(newtrace)
+        else:
+            newtraces=[None,None]
+        # Everything is cleared for averaging
+        exparray = np.array(explist)
+        averagedata = exparray.sum(axis=0)[:,1]/len(exparray)
+        # Create a copy from the first page
+        average = 1*exparray[0]
+        # Set average data
+        average[:,1] = averagedata
+        # create new page
+        self.IsCrossCorrelation = self.Page.IsCrossCorrelation
+        interval = (self.Page.startcrop, self.Page.endcrop)
+        # Obtain the model ID from the dropdown selection.
+        idsel = self.WXDropSelMod.GetSelection()
+        modelid = self.DropdownIndex[idsel]
+        self.parent.add_fitting_tab(modelid = modelid)
+        self.AvgPage = self.parent.notebook.GetCurrentPage()
+        (self.AvgPage.startcrop, self.AvgPage.endcrop) = interval
+        self.AvgPage.dataexpfull = average
+        self.AvgPage.IsCrossCorrelation = self.IsCrossCorrelation
+        if self.IsCrossCorrelation is False:
+            newtrace = newtraces[0]
+            if newtrace is not None and len(newtrace) != 0:
+                self.AvgPage.trace = newtrace
+                self.AvgPage.traceavg = newtrace.mean()
+            else:
+                self.AvgPage.trace = None
+                self.AvgPage.traceavg = None
+        else:
+            if newtraces[0] is not None and len(newtraces[0][0]) != 0:
+                self.AvgPage.tracecc = newtraces
+            else:
+                self.AvgPage.tracecc = None
+        self.AvgPage.PlotAll()
+        self.AvgPage.Fit_enable_fitting()
+        if len(pages) == 1:
+            # Use the same title as the current page
+            newtabti = referencePage.tabtitle.GetValue()
+        else:
+            # Create a new tab title
+            newtabti = "Average ["+misc.parsePagenum2String(UsedPagenumbers)+"]"
+        self.AvgPage.tabtitle.SetValue(newtabti)
+        # Set the addition information about the variance from averaging
+        Listname = "Average"
+        standarddev = exparray.std(axis=0)[:,1]
+        if np.sum(np.abs(standarddev)) == 0:
+            # The average sd is zero. We probably made an average
+            # from only one page. In this case we do not enable
+            # average weighted fitting
+            pass
+        else:
+            self.AvgPage.external_std_weights[Listname] = standarddev
+            WeightKinds = self.AvgPage.Fitbox[1].GetItems()
+            # Attention! Average weights and other external weights should
+            # be sorted (for session saving).
+            extTypes = self.AvgPage.external_std_weights.keys()
+            extTypes.sort() # sorting
+            for key in extTypes:
+                try:
+                    WeightKinds.remove(key)
+                except:
+                    pass
+            LenInternal = len(WeightKinds)
+            IndexAverag = extTypes.index(Listname)
+            IndexInList = LenInternal + IndexAverag
+            for key in extTypes:
+                WeightKinds += [key]
+            self.AvgPage.Fitbox[1].SetItems(WeightKinds)
+            self.AvgPage.Fitbox[1].SetSelection(IndexInList)
+        # Keep the average tool open.
+        # self.OnClose()
+
+    def SetPageNumbers(self, pagestring):
+        self.WXTextPages.SetValue(pagestring)
+        
+    def SetValues(self, e=None):
+        # Text input
+        pagenumlist = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
+        valstring=misc.parsePagenum2String(pagenumlist)
+        self.WXTextPages.SetValue(valstring)
+        # Dropdown
+        modelkeys = mdls.modeltypes.keys()
+        modelkeys.sort()
+        try:
+            current_model = self.parent.notebook.GetCurrentPage().modelid
+        except:
+            current_model = -1
+        i = 0
+        DropdownList = list()
+        self.DropdownIndex = list() # Contains model ids with same index
+        current_index = 0
+        for modeltype in modelkeys:
+            for modelid in mdls.modeltypes[modeltype]:
+                DropdownList.append(modeltype+": "+mdls.modeldict[modelid][1])
+                self.DropdownIndex.append(str(modelid))
+                if str(current_model) == str(modelid):
+                    current_index = i
+                i+=1
+        self.WXDropSelMod.SetItems(DropdownList)
+        self.WXDropSelMod.SetSelection(current_index)
diff --git a/src/tools/background.py b/src/tools/background.py
new file mode 100644
index 0000000..e1a6ca3
--- /dev/null
+++ b/src/tools/background.py
@@ -0,0 +1,498 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - background
+    We make some background corection here.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import numpy as np
+import os
+import sys
+import traceback                        # for Error handling
+import wx
+from wx.lib.agw import floatspin        # Float numbers in spin fields
+import wx.lib.plot as plot    
+
+import doc
+import misc
+import openfile as opf                  # How to treat an opened file
+import readfiles
+
+# Menu entry name
+MENUINFO = ["&Background correction", "Open a file for background correction."]
+
+class BackgroundCorrection(wx.Frame):
+    def __init__(self, parent):
+        self.MyName="BACKGROUND"
+        # Parent is main frame
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=parent, title="Background correction",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Current trace we are looking at
+        self.activetrace = None 
+        # Importet trace
+        self.trace = None
+        # Importet trace after user decides to cange radio buttons
+        self.oldtrace = None
+        self.oldfilename = None
+        self.average = None
+        ## Start drawing
+        # Splitter Window
+        self.sp = wx.SplitterWindow(self, style=wx.SP_NOBORDER)
+        ## Controls
+        panel = wx.Panel(self.sp)
+        # text1
+        textinit = wx.StaticText(panel, label=doc.backgroundinit)
+        # Radio buttons
+        self.rbtnfile = wx.RadioButton (panel, -1, 'Blank measurement: ', 
+                                        style = wx.RB_GROUP)
+        self.rbtnfile.SetValue(True)
+        self.btnbrowse = wx.Button(panel, wx.ID_ANY, 'Browse ...')
+        self.rbtnhand = wx.RadioButton (panel, -1, 'Manual, <B> [kHz]: ')
+        # Spincontrol
+        self.spinctrl = floatspin.FloatSpin(panel, digits=7,
+                                            increment=.1)
+        self.spinctrl.Enable(False)
+        # Verbose text
+        self.textfile = wx.StaticText(panel,
+                                    label="No blank measurement file selected.")
+        textmeanavg = wx.StaticText(panel,
+                                    label="Average background signal [kHz]: ")
+        self.textmean = wx.StaticText(panel, label="")
+        # name
+        textname = wx.StaticText(panel, label="User defined background name: ")
+        sizeTextn = textname.GetSize()[0]
+        self.bgname = wx.TextCtrl(panel, value="", size=(sizeTextn,-1))
+        self.bgname.Enable(False)
+        self.btnimport = wx.Button(panel, wx.ID_ANY, 'Import into session')
+        self.btnimport.Enable(False)
+        # Dropdown
+        textdropdown = wx.StaticText(panel, label="Show background: ")
+        self.BGlist = list()
+        #self.BGlist.append("File/User")
+        for item in self.parent.Background:
+            bgname = "{} ({:.2f} kHz)".format(item[1],item[0])
+            self.BGlist.append(bgname)
+        if len(self.BGlist) == 0:
+            ddlist = ["File/User"]
+        else:
+            ddlist = 1*self.BGlist
+        self.dropdown = wx.ComboBox(panel, -1, "File/User", (15, -1),
+                     wx.DefaultSize, ddlist, wx.CB_DROPDOWN|wx.CB_READONLY)
+        #self.textafterdropdown = wx.StaticText(panel, label="")
+        # Apply buttons
+        self.btnapply = wx.Button(panel, wx.ID_ANY, 'Apply')
+        textor = wx.StaticText(panel, label=" or ")
+        self.btnrem = wx.Button(panel, wx.ID_ANY, 'Dismiss')
+        textpages   = wx.StaticText(panel, label=" correction for pages: ")
+        self.WXTextPages = wx.TextCtrl(panel, value="")
+        # Initial value for WXTextPages
+        pagenumlist = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
+        valstring=misc.parsePagenum2String(pagenumlist)
+        self.WXTextPages.SetValue(valstring)
+        
+        textyma   = wx.StaticText(panel, label="You may also: ")
+        self.btnapplyall = wx.Button(panel, wx.ID_ANY, 'Apply to all pages')
+        self.btnapply.Enable(False)
+        self.btnapplyall.Enable(False)
+        textor2 = wx.StaticText(panel, label=" or ")
+        self.btnremyall = wx.Button(panel, wx.ID_ANY, 'Dismiss from all pages')
+        if len(self.BGlist) <= 1:
+            self.btnrem.Enable(False)
+            self.btnremyall.Enable(False)
+        # Bindings
+        self.Bind(wx.EVT_BUTTON, self.OnBrowse, self.btnbrowse)
+        self.Bind(wx.EVT_RADIOBUTTON, self.OnRadioFile, self.rbtnfile)
+        self.Bind(wx.EVT_RADIOBUTTON, self.OnRadioHand, self.rbtnhand)
+        self.Bind(wx.EVT_SPINCTRL, self.SpinCtrlChange, self.spinctrl)
+        self.Bind(wx.EVT_BUTTON, self.OnImport, self.btnimport)
+        self.Bind(wx.EVT_COMBOBOX, self.OnDraw, self.dropdown)
+        self.Bind(wx.EVT_BUTTON, self.OnApply, self.btnapply)
+        self.Bind(wx.EVT_BUTTON, self.OnApplyAll, self.btnapplyall)
+        self.Bind(wx.EVT_BUTTON, self.OnRemove, self.btnrem)
+        self.Bind(wx.EVT_BUTTON, self.OnRemoveAll, self.btnremyall)
+        # Sizers
+        topSizer = wx.BoxSizer(wx.VERTICAL)
+        text1sizer = wx.BoxSizer(wx.HORIZONTAL)
+        text1sizer.Add(self.rbtnfile)
+        text1sizer.Add(self.btnbrowse)
+        text2sizer = wx.BoxSizer(wx.HORIZONTAL)
+        text2sizer.Add(self.rbtnhand)
+        text2sizer.Add(self.spinctrl)
+        textmeansizer = wx.BoxSizer(wx.HORIZONTAL)
+        textmeansizer.Add(textmeanavg)
+        textmeansizer.Add(self.textmean)
+        dropsizer = wx.BoxSizer(wx.HORIZONTAL)
+        dropsizer.Add(textdropdown)
+        droprightsizer = wx.BoxSizer(wx.VERTICAL)
+        dropsizer.Add(droprightsizer)
+        droprightsizer.Add(self.dropdown)
+        #droprightsizer.Add(self.textafterdropdown)
+        applysizer = wx.BoxSizer(wx.HORIZONTAL)
+        applysizer.Add(self.btnapply)
+        applysizer.Add(textor)
+        applysizer.Add(self.btnrem)
+        applysizer.Add(textpages)
+        applysizer.Add(self.WXTextPages)
+        applysizer.Add(self.btnapplyall)
+        allsizer = wx.BoxSizer(wx.HORIZONTAL)
+        allsizer.Add(textyma)
+        allsizer.Add(self.btnapplyall)
+        allsizer.Add(textor2)
+        allsizer.Add(self.btnremyall)
+        
+        topSizer.Add(textinit)
+        topSizer.Add(text1sizer)
+        topSizer.Add(text2sizer)
+        topSizer.Add(self.textfile)
+        topSizer.Add(textmeansizer)
+        topSizer.Add(textname)
+        topSizer.Add(self.bgname)
+        topSizer.Add(self.btnimport)
+        topSizer.Add(dropsizer)
+        topSizer.Add(applysizer)
+        topSizer.Add(allsizer)
+        panel.SetSizer(topSizer)
+        topSizer.Fit(self)
+        self.SetMinSize(topSizer.GetMinSizeTuple())
+        self.Show(True)
+        ## Canvas
+        self.canvas = plot.PlotCanvas(self.sp)
+        # Sizes
+        psize = panel.GetBestSize()
+        initial_size = (psize[0],psize[1]+200)
+        self.SetSize(initial_size)
+        sashsize = psize[1]+3
+        # This is also necessary to prevent unsplitting
+        self.sp.SetMinimumPaneSize(sashsize)
+        self.sp.SplitHorizontally(panel, self.canvas, sashsize)
+        # If there is no page, disable ourselves:
+        self.OnPageChanged(self.parent.notebook.GetCurrentPage())
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+
+
+    def OnApply(self, event):
+        strFull = self.WXTextPages.GetValue()
+        PageNumbers = misc.parseString2Pagenum(self, strFull)
+        if PageNumbers is None:
+            # Something went wrong and parseString2Pagenum already displayed
+            # an error message.
+            return
+        # BG number
+        item = self.dropdown.GetSelection()
+        # Apply to corresponding pages
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            j = filter(lambda x: x.isdigit(), Page.counter)
+            if int(j) in PageNumbers:
+                Page.bgselected = item
+                Page.OnAmplitudeCheck("init")
+                Page.PlotAll()
+
+
+    def OnApplyAll(self, event):
+        self.btnrem.Enable(True)
+        self.btnremyall.Enable(True)
+        N = self.parent.notebook.GetPageCount()
+        item = self.dropdown.GetSelection()
+        for i in np.arange(N):
+            # Set Page 
+            Page = self.parent.notebook.GetPage(i)
+            Page.bgselected = item
+            try:
+                Page.OnAmplitudeCheck("init")
+                Page.PlotAll()
+            except OverflowError:
+                errstr = "Could not apply background to Page "+Page.counter+\
+                 ". \n Check the value of the trace average and the background."
+                dlg = wx.MessageDialog(self, errstr, "Error", 
+                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg.ShowModal()
+                Page.bgselected = None
+
+
+    def OnClose(self, event=None):
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+            
+
+    def OnBrowse(self, event):
+        # opf.BGFiletypes is a dictionary with filetypes that have some
+        # trace signal information.
+        SupFiletypes = opf.BGFiletypes.keys()
+        SupFiletypes.sort()
+        filters = ""
+        for i in np.arange(len(SupFiletypes)):
+            # Add to the filetype filter
+            filters = filters+SupFiletypes[i]
+            if i+1 != len(SupFiletypes):
+                # Add a separator
+                filters = filters+"|"
+        dlg = wx.FileDialog(self, "Choose a data file", 
+            self.parent.dirname, "", filters, wx.OPEN)
+        if dlg.ShowModal() == wx.ID_OK:
+            # Workaround since 0.7.5
+            (dirname, filename) = os.path.split(dlg.GetPath())
+            #filename = dlg.GetFilename()
+            #dirname = dlg.GetDirectory()
+            # Set parent dirname for user comfort
+            self.parent.dirname = dirname
+            try:
+                # [data, trace, curvelist]
+                stuff = readfiles.openAnyBG(dirname, filename)
+            except:
+                # The file does not seem to be what it seems to be.
+                info = sys.exc_info()
+                errstr = "Unknown file format:\n"
+                errstr += str(filename)+"\n\n"
+                errstr += str(info[0])+"\n"
+                errstr += str(info[1])+"\n"
+                for tb_item in traceback.format_tb(info[2]):
+                    errstr += tb_item
+                dlg = wx.MessageDialog(self, errstr, "Error", 
+                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                dlg.ShowModal() == wx.ID_OK
+                return
+            # Usually we will get a bunch of traces. Let the user select which
+            # one to take.
+            if len(stuff["Filename"]) > 1:
+                choices = list()
+                for i2 in np.arange(len(stuff["Filename"])):
+                    choices.append(str(i2)+". " + stuff["Filename"][i2] + " " +
+                                   stuff["Type"][i2])
+                dlg = wx.SingleChoiceDialog(self, "Choose a curve",
+                                            "Curve selection", choices=choices)
+                if dlg.ShowModal() == wx.ID_OK:
+                    selindex = dlg.GetSelection()
+                else:
+                    return
+            else:
+                selindex = 0
+            # If we accidentally recorded a cross correlation curve
+            # as the background, let the user choose which trace he wants:
+            channelindex = None
+            if ( len(stuff["Type"][selindex]) >= 2 and 
+                 stuff["Type"][selindex][0:2] == "CC"       ):
+                choices = ["Channel 1", "Channel 2"]
+                label = "From which channel do you want to use the trace?"
+                dlg = wx.SingleChoiceDialog(self, label,
+                                "Curve selection", choices=choices)
+                if dlg.ShowModal() == wx.ID_OK:
+                    channelindex = dlg.GetSelection()
+                    trace = stuff["Trace"][selindex][channelindex]
+                else:
+                    return
+            else:
+                trace = stuff["Trace"][selindex]
+            if trace is None:
+                print "WARNING: I did not find any trace data."
+                return
+            # Display filename and some of the directory
+            self.textfile.SetLabel("File: ..."+dirname[-10:]+"/"+filename)
+            name = str(selindex)+". "+stuff["Filename"][selindex]+" "+\
+                   stuff["Type"][selindex]
+            if channelindex is not None:
+                name += " "+str(channelindex+1)
+            self.bgname.SetValue(name)
+            
+            self.trace = trace
+            # Calculate average
+            self.average = self.trace[:,1].mean()
+            # Display average
+            self.textmean.SetLabel(str(self.average)+" kHz")
+            self.spinctrl.SetValue(self.average)
+            # Let the user see the opened file
+            self.dropdown.SetSelection(0)
+            # show trace
+            self.OnDraw()
+            # Enable button and editable name
+            self.bgname.Enable(True)
+            self.btnimport.Enable(True)
+        else:
+            # User pressed "Abort" - do nothing.
+            self.parent.dirname = dlg.GetDirectory()
+            dlg.Destroy()
+            return
+
+
+    def OnDraw(self, event=None):
+        item = self.dropdown.GetSelection()
+        if item < 0:
+            # Disable Apply Buttons
+            self.btnapply.Enable(False)
+            self.btnapplyall.Enable(False)
+            # Draw the trace that was just imported
+            if self.trace != None:
+                # Calculate average
+                self.average = self.trace[:,1].mean()
+                self.activetrace = self.trace
+                #self.textafterdropdown.SetLabel(" Avg:  "+str(self.average)+
+                #                                " kHz")
+                self.textmean.SetLabel(str(self.average))
+                self.spinctrl.SetValue(self.average)
+            else:
+                # Clear the canvas. Looks better.
+                self.canvas.Clear()
+                # Don't show the average
+                #self.textafterdropdown.SetLabel("")
+                self.textmean.SetLabel("")
+                return
+        else:
+            # Enable Apply Buttons
+            self.btnapply.Enable(True)
+            self.btnapplyall.Enable(True)
+            # Draw a trace from the list
+            self.activetrace = self.parent.Background[item-1][2]
+            #self.textafterdropdown.SetLabel(" Avg:  "+
+            #                    str(self.parent.Background[item-1][0]))
+        # We want to have the trace in [s] here.
+        trace = 1.*self.activetrace
+        trace[:,0] = trace[:,0]/1000
+        linesig = plot.PolyLine(trace, legend='', colour='blue', width=1)
+        self.canvas.Draw(plot.PlotGraphics([linesig], 
+                         xLabel='time [s]', 
+                         yLabel='background signal [kHz]'))
+
+
+    def OnImport(self, event):
+        self.parent.Background.append([self.average, self.bgname.GetValue(), 
+                                      self.trace])
+        name = "{} ({:.2f} kHz)".format(self.bgname.GetValue(), self.average)
+        self.BGlist.append(name)
+        self.UpdateDropdown()
+        # Let the user see the imported file
+        self.dropdown.SetSelection(len(self.BGlist)-1)
+        self.btnremyall.Enable(True)
+        self.btnrem.Enable(True)
+        self.btnapplyall.Enable(True)
+        self.btnapply.Enable(True)
+        self.OnDraw()
+        # Update BG dropdown of each page
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            self.parent.notebook.GetPage(i).OnAmplitudeCheck()
+
+
+    def OnPageChanged(self, page):
+        # We do not need the *Range* Commands here yet.
+        # We open and close the SelectChannelsFrame every time we
+        # import some data.
+        if len(self.parent.Background) == 0:
+            self.BGlist = list()
+            self.UpdateDropdown()
+            self.dropdown.SetValue("File/User")
+        if self.parent.notebook.GetPageCount() == 0:
+            self.sp.Disable()
+            return
+        self.sp.Enable()
+        if (self.WXTextPages.GetValue() == ""
+            and self.parent.notebook.GetPageCount() != 0):
+            # Initial value for WXTextPages
+            pagenumlist = list()
+            for i in np.arange(self.parent.notebook.GetPageCount()):
+                Page = self.parent.notebook.GetPage(i)
+                pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
+            valstring=misc.parsePagenum2String(pagenumlist)
+            self.WXTextPages.SetValue(valstring)
+        
+
+
+    def OnRadioFile(self, event):
+        # Do not let the user change the spinctrl
+        # setting.
+        self.spinctrl.Enable(False)
+        self.btnbrowse.Enable(True)
+        # Restor the old trace
+        self.trace = self.oldtrace
+        if self.oldfilename is not None:
+            self.textfile.SetLabel(self.oldfilename)
+        if self.trace is None:
+            # Disable button and editable name
+            self.bgname.Enable(False)
+            self.btnimport.Enable(False)
+        # Let us draw
+        self.dropdown.SetSelection(0)
+        self.OnDraw()
+
+
+    def OnRadioHand(self, event):
+        # Let user enter a signal.
+        self.spinctrl.Enable(True)
+        self.btnbrowse.Enable(False)
+        # save the old trace. We might want to switch back to it.
+        if self.trace is not None:
+            self.oldtrace = 1.*self.trace
+            self.oldfilename = self.textfile.GetLabel()
+        self.SpinCtrlChange()
+        # Do not show the filename
+        self.textfile.SetLabel("No file selected.")
+        # Enable button and editable name
+        self.bgname.Enable(True)
+        self.btnimport.Enable(True)
+        if len(self.bgname.GetValue()) == 0:
+            # Enter something as name
+            self.bgname.SetValue("User")
+
+
+    def OnRemove(self, event):
+        strFull = self.WXTextPages.GetValue()
+        PageNumbers = misc.parseString2Pagenum(self, strFull)
+        if PageNumbers is None:
+            # Something went wrong and parseString2Pagenum already displayed
+            # an error message.
+            return
+        # BG number
+        item = self.dropdown.GetSelection()
+        # Apply to corresponding pages
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            j = filter(lambda x: x.isdigit(), Page.counter)
+            if int(j) in PageNumbers:
+                Page.bgselected = None
+                Page.OnAmplitudeCheck("init")
+                Page.PlotAll()
+
+
+    def OnRemoveAll(self, event):
+        N = self.parent.notebook.GetPageCount()
+        for i in np.arange(N):
+            Page = self.parent.notebook.GetPage(i)
+            Page.bgselected = None
+            Page.OnAmplitudeCheck("init")
+            Page.PlotAll()
+
+    def SetPageNumbers(self, pagestring):
+        self.WXTextPages.SetValue(pagestring)
+    
+    def SpinCtrlChange(self, event=None):
+        # Let user see the continuous trace we will generate
+        self.average = self.spinctrl.GetValue()
+        self.trace = np.array([[0,self.average],[1,self.average]])
+        self.textmean.SetLabel(str(self.average))
+        self.OnDraw()
+
+
+    def UpdateDropdown(self):
+        self.dropdown.SetItems(self.BGlist)
+
diff --git a/src/tools/batchcontrol.py b/src/tools/batchcontrol.py
new file mode 100644
index 0000000..690c264
--- /dev/null
+++ b/src/tools/batchcontrol.py
@@ -0,0 +1,177 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - batch
+    Stuff that concerns batch processing.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import numpy as np
+import platform
+import sys
+import traceback           # for Error handling
+import wx
+
+import openfile as opf     # How to treat an opened file
+import models as mdls
+
+# Menu entry name
+MENUINFO = ["B&atch control", "Batch fitting."]
+
+class BatchCtrl(wx.Frame):
+    def __init__(self, parent):
+        # Parent is main frame
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=parent, title="Batch control",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        ## Controls
+        panel = wx.Panel(self)
+        self.panel = panel
+        text1 = wx.StaticText(panel, label="Choose source of parameters:")
+        self.rbtnhere = wx.RadioButton(panel, -1, 'This session', 
+                                        style = wx.RB_GROUP)
+        self.rbtnhere.SetValue(True)
+        self.rbtnthere = wx.RadioButton(panel, -1, 'Other session')
+        self.dropdown = wx.ComboBox(panel, -1, "Current page", (15, 30),
+                         wx.DefaultSize, [], wx.CB_DROPDOWN|wx.CB_READONLY)
+        # Create the dropdownlist
+        self.OnPageChanged()
+        text2 = wx.StaticText(panel, label='This will affect all pages'+
+                                           '\nwith the same model.'+
+                                           '\nApply parameters:')
+        btnapply = wx.Button(panel, wx.ID_ANY, 'Apply to applicable pages')
+        btnfit = wx.Button(panel, wx.ID_ANY, 'Fit applicable pages')
+        # Bindings
+        self.Bind(wx.EVT_BUTTON, self.OnApply, btnapply)
+        self.Bind(wx.EVT_BUTTON, self.OnFit, btnfit)
+        self.Bind(wx.EVT_RADIOBUTTON, self.OnRadioHere, self.rbtnhere)
+        self.Bind(wx.EVT_RADIOBUTTON, self.OnRadioThere, self.rbtnthere)
+        # self.Bind(wx.EVT_COMBOBOX, self.OnSelect, self.dropdown)
+        topSizer = wx.BoxSizer(wx.VERTICAL)
+        topSizer.Add(text1)
+        topSizer.Add(self.rbtnhere)
+        topSizer.Add(self.rbtnthere)
+        topSizer.AddSpacer(5)
+        topSizer.Add(self.dropdown)
+        topSizer.AddSpacer(5)
+        topSizer.Add(text2)
+        topSizer.AddSpacer(5)
+        topSizer.Add(btnapply)
+        topSizer.Add(btnfit)
+        panel.SetSizer(topSizer)
+        topSizer.Fit(self)
+        self.SetMinSize(topSizer.GetMinSizeTuple())
+        # Check if we even have pages.
+        self.OnPageChanged()
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+
+    def OnApply(self, event):
+        # Get the item from the dropdown list
+        item = self.dropdown.GetSelection()
+        if self.rbtnhere.Value == True:
+            # Get parameters from this session
+            if item <= 0:
+                Page = self.parent.notebook.GetCurrentPage()
+            else:
+                Page = self.parent.notebook.GetPage(item-1)
+            # First apply the parameters of the page
+            Page.apply_parameters()          
+            # Get all parameters
+            Parms = self.parent.PackParameters(Page)
+        else:
+            # Get Parameters from different session
+            Parms = self.YamlParms[item]
+        modelid = Parms[1]
+        # Set all parameters for all pages
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            OtherPage = self.parent.notebook.GetPage(i)
+            if OtherPage.modelid == modelid and OtherPage.dataexp is not None:
+                self.parent.UnpackParameters(Parms, OtherPage)
+                OtherPage.PlotAll()
+
+
+    def OnClose(self, event=None):
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnFit(self, event):
+        item = self.dropdown.GetSelection()
+        if self.rbtnhere.Value == True:
+            if item <= 0:
+                Page = self.parent.notebook.GetCurrentPage()
+            else:
+                Page = self.parent.notebook.GetPage(item)
+            # Get internal ID
+            modelid = Page.modelid
+        else:
+            # Get external ID
+            modelid = self.YamlParms[item][1]
+        # Fit all pages with right modelid
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            OtherPage = self.parent.notebook.GetPage(i)
+            if (OtherPage.modelid == modelid and
+                OtherPage.dataexpfull is not None):
+                #Fit
+                OtherPage.Fit_function(noplots=True)
+
+
+    def OnPageChanged(self, Page=None):
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+            return
+        else:
+            self.panel.Enable()
+        # We need to update the list of Pages in self.dropdown
+        if self.rbtnhere.Value == True:
+            DDlist = list()
+            DDlist.append("Current page")
+            for i in np.arange(self.parent.notebook.GetPageCount()):
+                aPage = self.parent.notebook.GetPage(i)
+                DDlist.append(aPage.counter+aPage.model)
+            self.dropdown.SetItems(DDlist)
+            self.dropdown.SetSelection(0)
+
+
+    def OnRadioHere(self, event=None):
+        self.OnPageChanged()
+
+
+    def OnRadioThere(self, event=None):
+        # If user clicks on pages in main program, we do not want the list
+        # to be changed.
+        self.YamlParms, dirname, filename = \
+                      opf.ImportParametersYaml(self.parent, self.parent.dirname)
+        if filename == None:
+            # User did not select any sesion file
+            self.rbtnhere.SetValue(True)
+        else:
+            DDlist = list()
+            for i in np.arange(len(self.YamlParms)):
+                # Rebuild the list
+                modelid = self.YamlParms[i][1]
+                modelname = mdls.modeldict[modelid][1]
+                DDlist.append(self.YamlParms[i][0]+modelname)
+            self.dropdown.SetItems(DDlist)
+            # Set selection text to first item
+            self.dropdown.SetSelection(0)
diff --git a/src/tools/chooseimport.py b/src/tools/chooseimport.py
new file mode 100644
index 0000000..7cae653
--- /dev/null
+++ b/src/tools/chooseimport.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - chooseimport
+    Displays a window that lets the user choose what type
+    of data (AC1, AC2, CC12, CC21) he wants to import.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import numpy as np
+import wx
+
+import models as mdls
+import doc
+import overlaycurves
+
+
+class ChooseImportTypes(wx.Dialog):
+    """ This class is used for importing single files from the "Current" menu.
+        The model function is defined by the model that is in use.
+    """
+    # This tool is derived from a wx.Dialog.
+    def __init__(self, parent, curvedict):
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # init
+        #super(ChooseImportTypes, self).__init__(parent=parent, 
+        #    title="Choose types", size=(250, 200))
+        wx.Dialog.__init__(self, parent, -1, "Choose models")
+        self.keys = list()
+         ## Content
+        self.panel = wx.Panel(self)
+        self.sizer = wx.BoxSizer(wx.VERTICAL)
+        self.boxes = dict()
+        textinit = wx.StaticText(self.panel, label=doc.chooseimport)
+        self.sizer.Add(textinit)
+        thekeys = curvedict.keys()
+        thekeys.sort()
+        for key in thekeys:
+            label = key + " (" + str(len(curvedict[key])) + " curves)"
+            check = wx.CheckBox(self.panel, label=label)
+            self.boxes[key] = check
+            self.sizer.Add(check)
+            self.Bind(wx.EVT_CHECKBOX, self.OnSetkeys, check)
+        btnok = wx.Button(self.panel, wx.ID_OK, 'OK')
+        # Binds the button to the function - close the tool
+        self.Bind(wx.EVT_BUTTON, self.OnClose, btnok)
+        self.sizer.Add(btnok)
+        self.panel.SetSizer(self.sizer)
+        self.sizer.Fit(self)
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Dialog.SetIcon(self, parent.MainIcon)
+        #self.Show(True)
+        self.SetFocus()
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.EndModal(wx.ID_OK)
+        #self.Destroy()
+
+
+    def OnSetkeys(self, event = None):
+        self.keys = list()
+        for key in self.boxes.keys():
+            if self.boxes[key].Value == True:
+                self.keys.append(key)
+
+
+class ChooseImportTypesModel(wx.Dialog):
+    """ This class shows a dialog displaying options to choose
+        model function on import of data
+    """
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent, curvedict, correlations, labels=None):
+        """ curvedict - dictionary, contains indexes to correlations and
+                        labels. The keys are different types of curves
+            correlations - list of correlations
+            labels - list of labels for the correlations (e.g. filename+run)
+                     if none, index numbers will be used for labels
+        """
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # init
+        #super(ChooseImportTypesModel, self).__init__(parent=parent, 
+        #    title="Choose types", size=(250, 200))
+        wx.Dialog.__init__(self, parent, -1, "Choose models")
+        self.curvedict = curvedict
+        self.kept_curvedict = curvedict.copy() # Can be edited by user
+        self.correlations = correlations
+        self.labels = labels
+        # List of keys that will be imported by our *parent*
+        self.typekeys = list()
+        # Dictionary of modelids corresponding to indices in curvedict
+        self.modelids = dict()
+        ## Content
+        self.panel = wx.Panel(self)
+        self.sizer = wx.BoxSizer(wx.VERTICAL)
+        self.boxes = dict()
+        labelim = "Select a fitting model for each correlation channel (AC,CC)."
+        textinit = wx.StaticText(self.panel, label=labelim)
+        self.sizer.Add(textinit)
+        curvekeys = curvedict.keys()
+        curvekeys.sort()
+        self.curvekeys = curvekeys
+        # Dropdown model selections:
+        DropdownList = ["No model selected"] # Contains string in model dropdown
+        self.DropdownIndex = [None]          # Contains corresponsing model
+        modelkeys = mdls.modeltypes.keys()
+        modelkeys.sort()
+        for modeltype in modelkeys:
+            for modelid in mdls.modeltypes[modeltype]:
+                DropdownList.append(modeltype+": "+mdls.modeldict[modelid][1])
+                self.DropdownIndex.append(modelid)
+        self.ModelDropdown = dict()
+        dropsizer = wx.FlexGridSizer(rows=len(modelkeys), cols=3, vgap=5, hgap=5)
+        self.Buttons = list()
+        i = 8000
+        for key in curvekeys:
+            # Text with keys and numer of curves
+            dropsizer.Add( wx.StaticText(self.panel, label=str(key)) )
+            label=" ("+str(len(curvedict[key]))+" curves)"
+            button = wx.Button(self.panel, i, label)
+            i += 1
+            self.Bind(wx.EVT_BUTTON, self.OnSelectCurves, button)
+            self.Buttons.append(button)
+            dropsizer.Add(button)
+            # Model selection dropdown
+            dropdown = wx.ComboBox(self.panel, -1, DropdownList[0], (15,30),
+               wx.DefaultSize, DropdownList, wx.CB_DROPDOWN|wx.CB_READONLY)
+            dropsizer.Add( dropdown )
+            self.ModelDropdown[key] = dropdown
+            self.Bind(wx.EVT_COMBOBOX, self.OnSetkeys, dropdown)
+        self.sizer.Add(dropsizer)
+        btnok = wx.Button(self.panel, wx.ID_OK, 'OK')
+        # Binds the button to the function - close the tool
+        self.Bind(wx.EVT_BUTTON, self.OnClose, btnok)
+        self.sizer.Add(btnok)
+        self.panel.SetSizer(self.sizer)
+        self.sizer.Fit(self)
+        #self.Show(True)
+        self.SetFocus()
+        if parent.MainIcon is not None:
+            wx.Dialog.SetIcon(self, parent.MainIcon)
+
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.keepcurvesindex = list()
+        for key in self.kept_curvedict.keys():
+            self.keepcurvesindex += self.kept_curvedict[key]
+        for i in np.arange(len(self.keepcurvesindex)):
+            self.keepcurvesindex[i] = int(self.keepcurvesindex[i])
+        self.EndModal(wx.ID_OK)
+        
+        #self.Show
+        #self.Destroy()
+
+
+    def OnSelectCurves(self, buttonevent):
+        # Get the type of curves we want to look at
+        index = buttonevent.GetId() - 8000
+        self.buttonindex = index
+        curvedict = dict()
+        key = self.curvekeys[index]
+        # Get correlation curves for corresponding type
+        corrcurves = dict()
+        if self.labels is None:
+            labeldict = None
+        else:
+            labeldict = dict()
+        for i in self.curvedict[key]:
+            corrcurves[str(i)] = self.correlations[int(i)]
+            if self.labels is not None:
+                labeldict[str(i)] = self.labels[int(i)]
+        prev_selected = list()
+        for item in self.kept_curvedict.keys():
+            prev_selected += self.kept_curvedict[item]
+        Selector = overlaycurves.Wrapper_OnImport(self.parent, corrcurves,
+                                                 self.OnSelected, prev_selected,
+                                                 labels=labeldict)
+
+    def OnSelected(self, keep, remove):
+        # Set new button label
+        for i in np.arange(len(keep)):
+            keep[i] = int(keep[i])
+        #button = self.Buttons[self.buttonindex]
+        label = " ("+str(len(keep))+" curves)"
+        #button.SetLabel(label)
+        # Add new content to selected key
+        SelectedKey = self.curvekeys[self.buttonindex]
+        #self.kept_curvedict[SelectedKey] = keep
+        # If there are keys with the same amount of correlations,
+        # these are assumed to be AC2, CC12, CC21 etc., so we will remove
+        # items from them accordingly.
+        diff = set(keep).intersection(set(self.curvedict[SelectedKey]))
+        indexes = list()
+        for i in np.arange(len(self.curvedict[SelectedKey])):
+            for number in diff:
+                if number == self.curvedict[SelectedKey][i]:
+                    indexes.append(i)
+        for j in np.arange(len(self.curvekeys)):
+            key = self.curvekeys[j]
+            if len(self.curvedict[key]) == len(self.curvedict[SelectedKey]):
+                newlist = list()
+                for index in indexes:
+                    newlist.append(self.curvedict[key][index])
+                self.kept_curvedict[key] = newlist
+                # Also update buttons
+                button = self.Buttons[j]
+                button.SetLabel(label)
+
+
+    def OnSetkeys(self, event = None):
+        # initiate objects
+        self.typekeys = list()
+        self.modelids = dict()
+        # iterate through all given keys (AC1, AC2, CC12, etc.)
+        for key in self.curvedict.keys():
+            # get the dropdown selection for a given key
+            modelindex = self.ModelDropdown[key].GetSelection()
+            # modelindex is -1 or 0, if no model has been chosen
+            if modelindex > 0:
+                # Append the key to a list of to be imported types
+                self.typekeys.append(key)
+                # Append the modelid to a dictionary that has indexes
+                # belonging to the imported curves in *parent*
+                modelid = self.DropdownIndex[modelindex]
+                for index in self.curvedict[key]:
+                    # Set different model id for the curves
+                    self.modelids[index] = modelid
+        self.typekeys.sort()
+
+        
+
+
+        
diff --git a/src/tools/comment.py b/src/tools/comment.py
new file mode 100755
index 0000000..ab9792e
--- /dev/null
+++ b/src/tools/comment.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - comment
+    Just edit the sessions comment.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+
+
+class EditComment(wx.Frame):
+    """ Little Dialog to edit the comment on the session. """
+    def __init__(self, parent):
+        ## Variables
+        # parent is main frame
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=parent, title="Session comment",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        initial_size = (400,300)
+        initial_sizec = (initial_size[0], initial_size[1]-50)
+        self.SetSize(initial_size)
+        self.SetMinSize((400,300))
+        ## Content
+        self.panel = wx.Panel(self)
+        self.control = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE, 
+                        size=initial_sizec, value=self.parent.SessionComment)
+        text = wx.StaticText(self.panel, 
+                   label="Session comments will be saved in the  session file.")
+        # buttons
+        btnclose = wx.Button(self.panel, wx.ID_ANY, 'Close')
+        btnokay = wx.Button(self.panel, wx.ID_ANY, 'OK')
+        self.Bind(wx.EVT_BUTTON, self.OnClose, btnclose)
+        self.Bind(wx.EVT_BUTTON, self.OnOkay, btnokay)
+        #sizers
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        buttonsizer = wx.BoxSizer(wx.HORIZONTAL)
+        buttonsizer.Add(btnclose, 1)
+        buttonsizer.Add(btnokay, 1)
+        self.topSizer.Add(text)
+        self.topSizer.Add(buttonsizer)
+        self.topSizer.Add(self.control)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+        wx.EVT_SIZE(self, self.OnSize)
+
+
+    def OnSize(self, event):
+        size = event.GetSize()
+        sizec = (size[0], size[1]-50)
+        self.panel.SetSize(size)
+        self.control.SetSize(sizec)
+
+
+    def OnClose(self, event=None):
+        self.parent.filemenu.Check(self.parent.menuComm.GetId(), False)
+        self.Destroy()
+
+
+    def OnOkay(self, event):
+        self.parent.SessionComment = self.control.GetValue()
+        self.OnClose()
+
diff --git a/src/tools/datarange.py b/src/tools/datarange.py
new file mode 100644
index 0000000..1b86e67
--- /dev/null
+++ b/src/tools/datarange.py
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - channels
+    Let the user choose time domains.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+
+import doc
+
+# Menu entry name
+MENUINFO = ["&Data range",
+            "Select an interval of lag times to be used for fitting."]
+
+class SelectChannels(wx.Frame):
+    def __init__(self, parent):
+        # parent is main frame
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Data range selection",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        ## Start drawing
+        panel = wx.Panel(self)
+        self.panel = panel
+        # Page
+        self.Page = self.parent.notebook.GetCurrentPage()
+        self.Calc_init(self.Page)
+        text1 = wx.StaticText(panel, label=u"The lag times τ are stored as an "+
+                                           u"array of length ")
+        self.textend = wx.StaticText(panel, label="%d." % self.lentau)
+        text2 = wx.StaticText(panel,
+                              label=u"You may wish to confine this array. "+
+                                    u"This can be done here.")
+        ##Spincontrols:
+        FlexSpinSizer = wx.FlexGridSizer(rows=2, cols=4, vgap=5, hgap=5)
+        FlexSpinSizer.Add(wx.StaticText(panel, label="Channels:"))
+        self.spinstart = wx.SpinCtrl(panel, -1, initial=self.left, 
+                                     min=self.start0, max=self.end0-1)
+        FlexSpinSizer.Add(self.spinstart)
+        FlexSpinSizer.Add(wx.StaticText(panel, label=" - "))
+        self.spinend = wx.SpinCtrl(panel, -1, initial=self.right, 
+                                   min=self.start0+1, max=self.end0)
+        FlexSpinSizer.Add(self.spinend)
+        FlexSpinSizer.Add(wx.StaticText(panel, label="Times [ms]:"))
+        self.TextTimesStart = wx.StaticText(panel, label="None")
+        FlexSpinSizer.Add(self.TextTimesStart)
+        FlexSpinSizer.Add(wx.StaticText(panel, label=" - "))
+        self.TextTimesEnd = wx.StaticText(panel, label="None")
+        FlexSpinSizer.Add(self.TextTimesEnd)
+        # Buttons
+        btnapply = wx.Button(panel, wx.ID_ANY, 'Apply')
+        btnapplyall = wx.Button(panel, wx.ID_ANY, 'Apply to all pages')
+        self.ButtonApply = btnapply
+        self.ButtonApplyAll = btnapplyall
+        self.Bind(wx.EVT_BUTTON, self.OnApply, btnapply)
+        self.Bind(wx.EVT_BUTTON, self.OnApplyAll, btnapplyall)
+        self.Bind(wx.EVT_SPINCTRL, self.OnChangeChannels, self.spinend)
+        self.Bind(wx.EVT_SPINCTRL, self.OnChangeChannels, self.spinstart)
+        # Checkbox
+        self.fixcheck = wx.CheckBox(panel, -1,
+             label="Fix current channel selection for all pages.")
+        self.Bind(wx.EVT_CHECKBOX, self.OnCheckbox, self.fixcheck)
+        # Text
+        channelsel = "Leave this window open for a fixed selection."
+        text3 = wx.StaticText(panel, label=channelsel)
+        # Sizer
+        topSizer = wx.BoxSizer(wx.VERTICAL)
+        buttonsizer = wx.BoxSizer(wx.HORIZONTAL)
+        buttonsizer.Add(btnapply, 1)
+        buttonsizer.Add(btnapplyall, 1)
+        text1sizer = wx.BoxSizer(wx.HORIZONTAL)
+        text1sizer.Add(text1)
+        text1sizer.Add(self.textend)
+        topSizer.Add(text1sizer)
+        topSizer.Add(text2)
+        topSizer.AddSpacer(5)
+        topSizer.Add(FlexSpinSizer)
+        topSizer.Add(self.fixcheck)
+        topSizer.Add(text3)
+        topSizer.AddSpacer(5)
+        topSizer.Add(buttonsizer)
+        panel.SetSizer(topSizer)
+        topSizer.Fit(self)
+        self.SetMinSize(topSizer.GetMinSizeTuple())
+        # Get times.
+        self.OnChangeChannels()
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        # Show window
+        self.Show(True)
+        self.OnPageChanged(self.Page)
+
+
+    def Calc_init(self, parent):
+        ## Variables
+        # Parent should be the fitting panel -
+        # The tab, where the fitting is done.
+        self.Page = parent
+        if self.Page == None:
+            # dummy info
+            taufull = np.arange(100)
+            self.left = self.right = None
+            self.panel.Disable()
+        else:
+            self.left = self.Page.startcrop     # starting position
+            self.right = self.Page.endcrop      # ending position
+            if self.Page.dataexpfull is not None:
+                taufull = self.Page.dataexpfull[:,0]
+            else:
+                # then we only have tau
+                taufull = self.Page.taufull
+        self.lentau = len(taufull)
+        self.start0 = 0                     # left border of interval
+        # The interval starts at 0!
+        self.end0 = self.lentau - 1         # right border of interval 
+        if self.left is None or self.left > self.end0:
+            # This means, that either left = right = None
+            # or the dataexp-array is too small
+            self.left = self.start0
+        if self.right is None:
+            # set the maximum possible value
+            self.right = self.end0
+        else:
+            self.right -=1
+
+
+    def OnApply(self, event=None):
+        self.SetValues()
+        self.Page.PlotAll(event="init")
+
+
+    def OnApplyAll(self, event=None):
+        start = self.spinstart.GetValue()
+        end = self.spinend.GetValue() + 1 # +1, [sic]
+        if start > end:
+            # swap the variables, we are not angry at the user
+            start, end = end, start
+        # Get all the Pages
+        N = self.parent.notebook.GetPageCount()
+        for i in np.arange(N):
+            # Set Page 
+            Page = self.parent.notebook.GetPage(i)
+            # Find out maximal length
+            if Page.dataexpfull is not None:
+                maxlen = len(Page.dataexpfull[:,0])
+            else:
+                # then we only have tau
+                maxlen = len(Page.taufull)
+            # Use the smaller one of both, so we do not get an
+            # index out of bounds error
+            Page.endcrop = min(end, maxlen)
+            Page.startcrop = start*(start < maxlen - 1 )
+            Page.PlotAll()
+        # Page.PlorAll() calls this function. This results in the wrong data
+        # being displayed in an open "View Info" Window. We call it again.
+        self.parent.OnFNBPageChanged()
+
+
+    def OnChangeTimes(self, e=None):
+        """ Called, whenever data range in seconds is changed. This updates
+            the data range in channels in the window.
+            This function might be used in later versions of PyCorrFit.
+        """
+        pass
+
+
+    def OnChangeChannels(self, e=None):
+        """ Called, whenever data range in channels is changed. This updates
+            the data range in seconds in the window.
+        """
+        if self.Page == None:
+            return
+        N = len(self.Page.taufull)
+        start = self.spinstart.Value
+        end = self.spinend.Value
+        # If the initial boundaries are outside of the experimental
+        # data array of length N, change the start and end variables.
+        start = start*(start < N-2)
+        end = min(end, N-1)
+        t1 = 1.*self.Page.taufull[start]
+        t2 = 1.*self.Page.taufull[end]
+        self.TextTimesStart.SetLabel("%.4e" % t1)
+        self.TextTimesEnd.SetLabel("%.4e" % t2)
+        self.OnCheckbox()
+
+
+    def OnCheckbox(self, event=None):
+        """ Set the correct value in the spincontrol, if the checkbox
+            is not checked.
+        """
+        state = self.fixcheck.GetValue()
+        if state == True:
+            self.OnApplyAll()
+            self.ButtonApply.Disable()
+            self.ButtonApplyAll.Disable()
+        else:
+            self.ButtonApply.Enable()
+            self.ButtonApplyAll.Enable()
+        #self.OnPageChanged(self.Page)
+            
+
+    def OnClose(self, event=None):
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnPageChanged(self, page):
+        # We do not need the *Range* Commands here yet.
+        # We open and close the SelectChannelsFrame every time we
+        # import some data.
+        #
+        # Check if we have a fixed channel selection
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+        else:
+            self.panel.Enable()
+            # There is a page. We may continue.
+            state = self.fixcheck.GetValue()
+            if state == True:
+                # We do not need to run Calc_init
+                self.Page = page
+                self.SetValues()
+                self.Page.PlotAll(event="init")
+            else:
+                # We will run it
+                self.Calc_init(page)
+                self.spinstart.SetRange(self.start0, self.end0-1)
+                self.spinstart.SetValue(self.left)
+                self.spinend.SetRange(self.start0+1, self.end0)
+                self.spinend.SetValue(self.right)
+                self.textend.SetLabel("%d." % self.lentau)
+                self.OnChangeChannels()
+
+
+
+    def SetValues(self):
+        start = self.spinstart.GetValue()
+        end = self.spinend.GetValue()
+        if start > end:
+            # swap the variables, we are not angry at the user
+            start, end = end, start
+        self.Page.startcrop = start
+        self.Page.endcrop = end + 1 # +1, because arrays are accessed like this
diff --git a/src/tools/example.py b/src/tools/example.py
new file mode 100644
index 0000000..92d741f
--- /dev/null
+++ b/src/tools/example.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - example
+    This is an example tool. You will need to edit __init__.py inside this
+    folder to activate it.
+    Add the filename (*example*) and class (*Tool*) to either of the lists
+    *ImpA*  or *ImpB* in __init__.py.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+
+
+class Tool(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent):
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Example tool",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+        ## Content
+        self.panel = wx.Panel(self)
+        btncopy = wx.Button(self.panel, wx.ID_ANY, 'Example button')
+        # Binds the button to the function - close the tool
+        self.Bind(wx.EVT_BUTTON, self.OnClose, btncopy)
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        self.topSizer.Add(btncopy)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        self.SetMinSize(self.topSizer.GetMinSizeTuple())
+        # Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnPageChanged(self, page):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        if self.parent.notebook.GetPageCount() == 0:
+            # Do something when there are no pages left.
+            self.panel.Disable()
+            return
+        self.panel.Enable()
+        self.Page = page
+        
+
diff --git a/src/tools/globalfit.py b/src/tools/globalfit.py
new file mode 100644
index 0000000..80ffb5d
--- /dev/null
+++ b/src/tools/globalfit.py
@@ -0,0 +1,299 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - globalfit
+    Perform global fitting on pages which share parameters.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+from scipy import optimize as spopt
+
+import misc
+import models as mdls
+
+# Menu entry name
+MENUINFO = ["&Global fitting",
+            "Interconnect parameters from different measurements."]
+
+class GlobalFit(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent):
+        # Define a unique name that identifies this tool
+        # Do not change this value. It is important for the Overlay tool
+        # (selectcurves.py, *Wrapper_Tools*).
+        self.MyName="GLOBALFIT"
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Gobal fitting",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+        ## Content
+        self.panel = wx.Panel(self)
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        textinit = """Fitting of multiple data sets with different models.
+Parameter names have to match. Select pages (e.g. 1,3-5,7),
+check parameters on each page and start 'Global fit'. 
+"""
+        self.topSizer.Add(wx.StaticText(self.panel, label=textinit))
+        ## Page selection
+        self.WXTextPages = wx.TextCtrl(self.panel, value="", size=(330,-1))
+        # Set initial value in text control
+        pagenumlist = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
+        valstring=misc.parsePagenum2String(pagenumlist)
+        self.WXTextPages.SetValue(valstring)
+        self.topSizer.Add(self.WXTextPages)
+        ## Weighted fitting
+        # The weighted fit of the current page will be applied to
+        # all other pages.
+        self.weightedfitdrop = wx.ComboBox(self.panel)
+        ## Bins from left and right: We also don't edit that.
+        self.topSizer.Add(self.weightedfitdrop)
+        ## Button
+        btnfit = wx.Button(self.panel, wx.ID_ANY, 'Global fit')
+        # Binds the button to the function - close the tool
+        self.Bind(wx.EVT_BUTTON, self.OnFit, btnfit)
+        self.topSizer.Add(btnfit)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        self.SetMinSize(self.topSizer.GetMinSizeTuple())
+        self.OnPageChanged(self.Page)
+        # Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+    
+    def fit_function(self, parms):
+        """
+            *parms*: Parameters to fit, array
+            needs: 
+             self.parmstofit - list (strings) of parameters to fit
+                               (corresponding to *parms*)
+             self.PageData (dict with dict item = self.PageData["PageNumber"]):
+                item["x"]
+                item["data"]
+                item["modelid"]
+                item["values"]
+        """
+        # The list containing arrays to be minimized
+        minimize = list()
+        for key in self.PageData.keys():
+            # Get the function
+            item = self.PageData[key]
+            modelid = item["modelid"]
+            function = mdls.modeldict[modelid][3]
+            values = self.PageData[key]["values"]
+            # Set parameters for each function (Page)
+            for i in np.arange(len(self.parmstofit)):
+                p = self.parmstofit[i]
+                labels = mdls.valuedict[modelid][0]
+                if p in labels:
+                    index = labels.index(p)
+                    values[index] = parms[i]
+            # Check parameters, if there is such a function
+            check_parms = mdls.verification[modelid]
+            values = check_parms(values)
+            # Write parameters back?
+            # self.PageData[key]["values"] = values
+            # Calculate resulting correlation function
+            # corr = function(item.values, item.x)
+            # Subtract data. This is the function we want to minimize
+            minimize.append(
+              (function(values, item["x"]) - item["data"]) / item["dataweights"]
+                           )
+
+        # Flatten the list and make an array out of it.
+        return np.array([item for sublist in minimize for item in sublist])
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+    def OnFit(self, e=None):
+        # process a string like this: "1,2,4-9,10"
+        strFull = self.WXTextPages.GetValue()
+        PageNumbers = misc.parseString2Pagenum(self, strFull)
+        if PageNumbers is None:
+            # Something went wrong and parseString2Pagenum already displayed
+            # an error message.
+            return
+        ## Get the corresponding pages, if they exist:
+        self.PageData = dict()
+        self.parmstofit = list()
+        fitparms = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            j = filter(lambda x: x.isdigit(), Page.counter)
+            if int(j) in PageNumbers:
+                dataset = dict()
+                try:
+                    dataset["x"] = Page.dataexp[:,0]
+                    dataset["data"] = Page.dataexp[:,1]
+                except:
+                    print "No experimental data in page #"+j+"!"
+                else:
+                    dataset["modelid"] = Page.modelid
+                    Page.apply_parameters()
+                    dataset["values"] = Page.active_parms[1]
+                    # Get weights
+                    weighttype = self.weightedfitdrop.GetSelection()
+                    Page.Fitbox[1].SetSelection(weighttype)
+                    weightname = self.weightedfitdrop.GetValue()
+                    setweightname = Page.Fitbox[1].GetValue()
+                    if setweightname.count(weightname) == 0:
+                        print "Page "+Page.counter+" has no fitting type '"+ \
+                              weightname+"'!"
+                    Page.Fit_WeightedFitCheck()
+                    Fitting = Page.Fit_create_instance(noplots=True)
+                    if Fitting.dataweights is None:
+                        dataset["dataweights"] = 1.
+                    else:
+                        dataset["dataweights"] = Fitting.dataweights
+                    self.PageData[int(j)] = dataset
+                    # Get the parameters to fit from that page
+                    labels = Page.active_parms[0]
+                    parms = 1*Page.active_parms[1]
+                    tofit = 1*Page.active_parms[2]
+                    for i in np.arange(len(labels)):
+                        if tofit[i]:
+                            if self.parmstofit.count(labels[i]) == 0:
+                                self.parmstofit.append(labels[i])
+                                fitparms.append(parms[i])
+        fitparms = np.array(fitparms)
+        # Now we can perform the least squares fit
+        if len(fitparms) == 0:
+            return
+        res = spopt.leastsq(self.fit_function, fitparms[:], full_output=1)
+        (popt, pcov, infodict, errmsg, ier) = res
+        #self.parmoptim, self.mesg = spopt.leastsq(self.fit_function, 
+        #                                          fitparms[:])
+        self.parmoptim = res[0]
+        # So we have the optimal parameters.
+        # We would like to give each page a chi**2 and its parameters back:
+        # Create a clean list of PageNumbers
+        # UsedPages = dict.fromkeys(PageNumbers).keys()
+        UsedPages = self.PageData.keys()
+        UsedPages.sort()
+        for key in UsedPages:
+            # Get the Page:
+            for i in np.arange(self.parent.notebook.GetPageCount()):
+                aPage = self.parent.notebook.GetPage(i)
+                j = filter(lambda x: x.isdigit(), aPage.counter)
+                if int(j) == int(key):
+                    Page = aPage
+            Page.GlobalParameterShare = UsedPages
+            # Get the function
+            item = self.PageData[key]
+            modelid = item["modelid"]
+            function = mdls.modeldict[modelid][3]
+            values = 1*Page.active_parms[1]
+            # Set parameters for each Page)
+            for i in np.arange(len(self.parmstofit)):
+                p = self.parmstofit[i]
+                labels = mdls.valuedict[modelid][0]
+                if p in labels:
+                    index = labels.index(p)
+                    values[index] = self.parmoptim[i]
+                    Page.active_parms[2][index] = True
+            # Check parameters, if there is such a function
+            check_parms = mdls.verification[modelid]
+            values = check_parms(values)
+            # Write parameters back?
+            Page.active_parms[1] = 1*values
+            # Calculate resulting correlation function
+            # corr = function(item.values, item.x)
+            # Subtract data. This is the function we want to minimize
+            residual = function(values, item["x"]) - item["data"]
+            # Calculate chi**2
+            # Set the parameter error estimates for all pages
+            minimized = self.fit_function(self.parmoptim)
+            degrees_of_freedom = len(minimized) - len(self.parmoptim) - 1
+            self.chi = Page.chi2 = np.sum((minimized)**2) / degrees_of_freedom
+            try:
+                self.covar = pcov * self.chi
+            except:
+                self.parmoptim_error = None
+            else:
+                parmoptim_error = list()
+                if self.covar is not None:
+                    self.parmoptim_error = np.diag(self.covar)
+            p_error = self.parmoptim_error
+            if p_error is None:
+                Page.parmoptim_error = None
+            else:
+                Page.parmoptim_error = dict()
+                for i in np.arange(len(p_error)):
+                    Page.parmoptim_error[self.parmstofit[i]] = p_error[i]
+            Page.apply_parameters_reverse()
+            # Because we are plotting the weights, we need to update
+            # the corresponfing info in each page:
+            weightid = self.weightedfitdrop.GetSelection()
+            if weightid != 0:
+                # We have weights.
+                # We need the following information for correct plotting.
+                Page.weighted_fit_was_performed = True
+                Page.weights_used_for_fitting = Fitting.dataweights
+                Page.calculate_corr()
+                Page.data4weight = 1.*Page.datacorr
+            Page.PlotAll()
+
+
+    def OnPageChanged(self, page):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+            return
+        self.panel.Enable()
+        self.Page = page
+        if self.Page is not None:
+            weightlist = self.Page.Fitbox[1].GetItems()
+            # Do not display knot number for spline. May be different for each page.
+            # Remove everything after a "(" in the weightlist string.
+            # This way, e.g. the list does not show the knotnumber, which
+            # we don't use anyhow.
+            # We are doing this for all elements, because in the future, other (?)
+            # weighting methods might be implemented.
+            for i in np.arange(len(weightlist)):
+                weightlist[1] = weightlist[1].split("(")[0].strip()
+            self.weightedfitdrop.SetItems(weightlist)
+            try:
+                # if there is no data, this could go wrong
+                self.Page.Fit_create_instance(noplots=True)
+                FitTypeSelection = self.Page.Fitbox[1].GetSelection()
+            except:
+                FitTypeSelection = 0
+            self.weightedfitdrop.SetSelection(FitTypeSelection)
+            ## Knotnumber: we don't want to interfere
+            # The user might want to edit the knotnumbers.
+            # self.FitKnots = Page.FitKnots   # 5 by default
+
+    def SetPageNumbers(self, pagestring):
+        self.WXTextPages.SetValue(pagestring)
diff --git a/src/tools/info.py b/src/tools/info.py
new file mode 100644
index 0000000..451627e
--- /dev/null
+++ b/src/tools/info.py
@@ -0,0 +1,315 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - info
+    Open a text window with lots of information.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+
+import models as mdls
+
+# Menu entry name
+MENUINFO = ["Page &info",
+            "Display some information on the current page."]
+            
+class InfoClass(object):
+    """ This class get's all the Info possible from a Page and
+        makes it available through a dictionary with headings as keys.
+    """
+    def __init__(self, CurPage=None, Pagelist=None ):
+        # A list of all Pages currently available:
+        self.Pagelist = Pagelist
+        # The current page we are looking at:
+        self.CurPage = CurPage
+
+
+    def GetAllInfo(self):
+        """ Get a dictionary with page titles and an InfoDict as value.
+        """
+        MultiInfo = dict()
+        for Page in self.Pagelist:
+            # Page counter includes a whitespace and a ":" which we do not want.
+            MultiInfo[Page.counter[:-2]] = self.GetPageInfo(Page)
+        return MultiInfo
+
+
+    def GetCurInfo(self):
+        """ Get all the information about the current Page.
+            Added for convenience. You may use GetPageInfo.
+        """
+        return self.GetPageInfo(self.CurPage)
+
+
+    def GetCurFancyInfo(self):
+        """ For convenience. """
+        return self.GetFancyInfo(self.CurPage)
+
+
+    def GetFancyInfo(self, Page):
+        """ Get a nice string representation of the Info """
+        InfoDict = self.GetPageInfo(Page)
+        # Version
+        Version = "PyCorrFit v."+InfoDict["version"][0]+"\n"
+        # Title
+        Title = "\n"
+        for item in InfoDict["title"]:
+            Title = Title + item[0]+"\t"+ item[1]+"\n"
+        # Parameters
+        Parameters = "\nParameters:\n"
+        for item in InfoDict["parameters"]:
+            Parameters = Parameters + "  "+item[0]+"\t"+ str(item[1])+"\n"
+        # Supplementary parameters
+        Supplement = "\nSupplementary parameters:\n"
+        try:
+            for item in InfoDict["supplement"]:
+                Supplement = Supplement + "  "+item[0]+"\t"+ str(item[1])+"\n"
+        except KeyError:
+            Supplement = ""
+        # Fitting
+        Fitting = "\nFitting:\n"
+        try:
+            for item in InfoDict["fitting"]:
+                Fitting = Fitting + "  "+item[0]+"\t"+str(item[1])+"\n"
+        except KeyError:
+            Fitting = ""
+        # Background
+        Background = "\nBackground:\n"
+        try:
+            for item in InfoDict["background"]:
+                Background = Background + "  "+item[0]+"\t"+str(item[1])+"\n"
+        except KeyError:
+            Background = ""
+
+        # Function doc string
+        ModelDoc = "\n\nModel doc string:\n       " + InfoDict["modeldoc"][0]
+        # Supplementary variables
+        try:
+            SupDoc = "\n"+8*" "+InfoDict["modelsupdoc"][0]
+        except:
+            SupDoc = ""
+        PageInfo = Version+Title+Parameters+Supplement+Fitting+Background+\
+                   ModelDoc+SupDoc
+        return PageInfo
+
+
+    def GetPageInfo(self, Page):
+        """ Needs a Page and gets all information from it """
+        Page.PlotAll("init")
+        # A dictionary with headings as keys and lists of singletts/tuples as 
+        # values. If it is a tuple, it might me interesting for a table.
+        InfoDict = dict()
+        # Get model information
+        model = [Page.model, Page.tabtitle.GetValue(), Page.modelid]
+        parms = Page.active_parms[1]
+        fct = Page.active_fct.__name__
+        InfoDict["version"] = [Page.parent.version]
+        Title = list()
+        # The tool statistics relys on the string "filename/title".
+        # Do not change it!
+        if len(model[1]) == 0:
+            # Prevent saving no title
+            model[1] = "NoName"
+        Title.append(["filename/title", model[1] ]) 
+        Title.append(["Model ID", str(model[2]) ]) 
+        Title.append(["Model name", model[0] ]) 
+        Title.append(["Model function", fct ]) 
+        Title.append(["Page number", Page.counter[1:-2] ]) 
+        ## Parameters
+        Parameters = list()
+        # Use this function to determine human readable parameters, if possible
+        Units, Newparameters = mdls.GetHumanReadableParms(model[2], parms)
+        # Add Parameters
+        for i in np.arange(len(parms)):
+            Parameters.append([ Units[i], Newparameters[i] ])
+        InfoDict["parameters"] = Parameters
+        # Add some more information if available
+        # Info is a dictionary or None
+        MoreInfo = mdls.GetMoreInfo(model[2], Page)
+        if MoreInfo is not None:
+            InfoDict["supplement"] = MoreInfo
+            # Try to get the dictionary entry of a model
+            try:
+                # This function should return all important information
+                # that can be calculated from the given parameters.
+                func_info = mdls.supplement[model[2]]
+            except KeyError:
+                # No information available
+                pass
+            else:
+                InfoDict["modelsupdoc"] = [func_info.func_doc]
+        ## Fitting
+        weightedfit = Page.weighted_fit_was_performed
+        weightedfit_type = Page.weighted_fittype
+        fittingbins = Page.weighted_nuvar  # from left and right
+        Fitting = list()
+        if Page.dataexp is not None:
+            # Mode AC vs CC
+            if Page.IsCrossCorrelation is True:
+                Title.append(["Type AC/CC", "Cross-correlation" ]) 
+            else:
+                Title.append(["Type AC/CC", "Autocorrelation" ]) 
+            Fitting.append([ u"\u03c7"+"²", Page.chi2 ])
+            if Page.weighted_fit_was_performed:
+                Chi2type = "reduced "+u"\u03c7"+"²"
+            else:
+                Chi2type = "reduced sum of squares"
+            Fitting.append([ u"\u03c7"+"²-type", Chi2type ])
+            Fitting.append([ "Weighted fit", weightedfit_type ])
+            if len(Page.GlobalParameterShare) != 0:
+                shared = str(Page.GlobalParameterShare[0])
+                for item in Page.GlobalParameterShare[1:]:
+                    shared += ", "+str(item)
+                Fitting.append(["Shared parameters with Pages", shared])
+            if weightedfit is True:
+                Fitting.append([ "Std. channels", 2*fittingbins+1 ])
+            # Fitting range:
+            t1 = 1.*Page.taufull[Page.startcrop]
+            t2 = 1.*Page.taufull[Page.endcrop-1]
+            Fitting.append([ "Interval start [ms]", "%.4e" % t1 ])
+            Fitting.append([ "Interval end [ms]", "%.4e" % t2 ])
+            # Fittet parameters and errors
+            somuch = sum(Page.active_parms[2])
+            if somuch >= 1:
+                fitted = ""
+                for i in np.arange(len(Page.active_parms[2])):
+                    if np.bool(Page.active_parms[2][i]) is True:
+                        errorvar = Page.active_parms[0][i] # variable name
+                        fitted=fitted+errorvar+ ", "
+                fitted = fitted.strip().strip(",") # remove trailing comma
+                Fitting.append(["fit par.", fitted])
+                # Fitting error included in v.0.7.3
+                Errors_fit = Page.parmoptim_error
+                if Errors_fit is not None:
+                    errkeys = Errors_fit.keys()
+                    errkeys.sort()
+                    for key in errkeys:
+                        savekey, saveval = \
+                            mdls.GetHumanReadableParameterDict(model[2],
+                                                [key], [Errors_fit[key]])
+                        # The tool statistics relys on the string "Err ".
+                        # Do not change it!
+                        Fitting.append(["Err "+savekey[0], saveval[0]])
+            InfoDict["fitting"] = Fitting
+        ## Normalization
+        if Page.normparm is None:
+            normparmtext = "None"
+        elif Page.normparm < len(Page.active_parms[0]):
+            normparmtext = Page.active_parms[0][Page.normparm]
+        else:
+            # supplementary parameters
+            supnum = Page.normparm - len(Page.active_parms[1])
+            normparmtext =  MoreInfo[supnum][0]
+        Title.append(["Normalization", normparmtext ]) 
+        ## Background
+        bgselected = Page.bgselected # Selected Background
+        Background = list()
+        if bgselected is not None:
+            bgname = Page.parent.Background[bgselected][1]
+            if len(bgname) == 0:
+                # Prevent saving no name
+                bgname = "NoName"
+            bgrate = Page.parent.Background[bgselected][0]
+            Background.append([ "bg name", bgname ])
+            Background.append([ "bg rate [kHz]", bgrate ])
+            InfoDict["background"] = Background
+        ## Function doc string
+        InfoDict["modeldoc"] = [Page.active_fct.func_doc]
+        InfoDict["title"] = Title
+
+        return InfoDict
+
+
+class ShowInfo(wx.Frame):
+    def __init__(self, parent):
+        # parent is main frame
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Info",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page
+        self.Page = self.parent.notebook.GetCurrentPage()
+        # Size
+        initial_size = wx.Size(650,700)
+        initial_sizec = (initial_size[0]-6, initial_size[1]-30)
+        self.SetMinSize(wx.Size(200,200))
+        self.SetSize(initial_size)
+         ## Content
+        self.panel = wx.Panel(self)
+        self.control = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE, 
+                        size=initial_sizec)
+        font1 = wx.Font(10, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Monospace')
+        self.control.SetFont(font1)
+        btncopy = wx.Button(self.panel, wx.ID_CLOSE, 'Copy to clipboard')
+        self.Bind(wx.EVT_BUTTON, self.OnCopy, btncopy)
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        self.topSizer.Add(btncopy)
+        self.topSizer.Add(self.control)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+        wx.EVT_SIZE(self, self.OnSize)
+        self.Content()
+
+
+    def Content(self):
+        # Fill self.control with content.
+        # Parameters and models
+        if self.parent.notebook.GetPageCount() == 0:
+            self.control.SetValue("")
+            self.panel.Disable()
+            return
+        self.panel.Enable()
+        Page = self.Page
+        InfoMan = InfoClass(CurPage=Page)
+        PageInfo = InfoMan.GetCurFancyInfo()
+        self.control.SetValue(PageInfo)
+
+
+    def OnClose(self, event=None):
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnCopy(self, event):
+        if not wx.TheClipboard.IsOpened():
+            clipdata = wx.TextDataObject()
+            clipdata.SetText(self.control.GetValue())
+            wx.TheClipboard.Open()
+            wx.TheClipboard.SetData(clipdata)
+            wx.TheClipboard.Close()
+        else:
+            print "Other application has lock on clipboard."
+
+
+    def OnPageChanged(self, page=None):
+        # When parent changes
+        self.Page = page
+        self.Content()
+
+
+    def OnSize(self, event):
+        size = event.GetSize()
+        sizec = wx.Size(size[0]-5, size[1]-30)
+        self.panel.SetSize(size)
+        self.control.SetSize(sizec)
diff --git a/src/tools/overlaycurves.py b/src/tools/overlaycurves.py
new file mode 100644
index 0000000..71d1469
--- /dev/null
+++ b/src/tools/overlaycurves.py
@@ -0,0 +1,383 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - selectcurves
+
+    Let the user choose which correlation curves to use.
+    Contains wrappers for file import and tools.
+    
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+from matplotlib import cm
+import numpy as np
+import platform
+import wx
+import wx.lib.plot as plot              # Plotting in wxPython
+
+import edclasses
+import misc
+
+# Menu entry name
+MENUINFO = ["&Overlay curves", "Select experimental curves."]
+
+class Wrapper_OnImport(object):
+    """ Wrapper for import function.
+        parent: wx.Frame
+        curvedict: dictionary with curves
+        onselected: external function that is called with two arguments:
+                    *kept keys* and *unwanted keys* as lists referring to
+                    curvedict.
+        selkeys: preselected values for curves in curvedict
+    """
+    def __init__(self, parent, curvedict, onselected,
+                 selkeys=None, labels=None):
+        self.onselected = onselected
+        self.parent = parent
+        self.Selector = UserSelectCurves(parent, curvedict, wrapper=self,
+                                         selkeys=selkeys, labels=labels)
+        self.Selector.Show()
+        self.Selector.MakeModal(True)
+        self.Selector.Bind(wx.EVT_CLOSE, self.OnClose)
+        
+    def OnClose(self, event=None):
+        self.Selector.MakeModal(False)
+        self.Selector.Destroy()
+
+        
+    def OnResults(self, keyskeep, keysrem):
+        """ Here we will close (or disable?) pages that are not wanted
+            by the user. It is important that we do not close pages that
+            do not contain any experimental data (Page.dataeyp is None),
+            because we ignored those pages during import.
+        """
+        self.OnClose()
+        self.onselected(keyskeep,keysrem)
+        
+ 
+
+
+class Wrapper_Tools(object):
+    def __init__(self, parent):
+        """
+        Wrapper for tools menu. Gets curvedict from parent and starts curve
+        selection. See *UserSelectCurves* class.
+        """
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        ## Wrapping
+        curvedict, labels = self.GetCurvedict()
+        self.labels = labels
+        self.Selector = UserSelectCurves(parent, curvedict,
+                                         wrapper=self, labels=labels)
+        # This is necessary for parent to deselect and select the tool
+        # in the tools menu.
+        self.Bind = self.Selector.Bind
+        if self.parent.notebook.GetPageCount() == 0:
+            self.Selector.sp.Disable()
+
+
+    def Disable(self):
+        self.Selector.Disable()
+
+    def Enable(self, par=True):
+        self.Selector.Enable(par)
+    
+    def GetCurvedict(self, e=None):
+        curvedict = dict()
+        labels = dict()
+        N = self.parent.notebook.GetPageCount()
+        for i in np.arange(N):
+            Page = self.parent.notebook.GetPage(i)
+            key = Page.counter
+            if Page.dataexp is not None:
+                curve = 1*Page.dataexp
+                curve[:,1] *= Page.normfactor
+                curvedict[key] = curve
+                labels[key] = Page.tabtitle.GetValue()
+        return curvedict, labels
+
+        
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Selector.Destroy()
+
+
+    def OnPageChanged(self, page=None):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        if self.parent.notebook.GetPageCount() == 0:
+            self.Selector.SelectBox.SetItems([])
+            self.Selector.sp.Disable()
+        else:
+            # Sticky behavior cleaned up in 0.7.8
+            curvedict, labels = self.GetCurvedict()
+            self.Selector.curvedict = curvedict
+            self.Selector.labels = labels
+            self.Selector.ProcessDict()
+            self.labels = labels
+            self.Selector.SelectBox.SetItems(self.Selector.curvelabels)
+            for i in np.arange(len(self.Selector.curvekeys)):
+                self.Selector.SelectBox.SetSelection(i)
+            self.Selector.OnUpdatePlot()
+
+
+    def OnResults(self, keyskeep, keysrem):
+        """ Here we will close (or disable?) pages that are not wanted
+            by the user. It is important that we do not close pages that
+            do not contain any experimental data (Page.dataeyp is None),
+            because we ignored those pages during import.
+        """
+        if len(keysrem) == 0:
+            self.OnClose()
+            return
+        # warn the user!
+        # First make a list of all pages that need to be removed and then
+        # delete those pages.
+        overtext = "Keep only pages in this list?"
+        textlist = ""
+        for key in keyskeep:
+            textlist += "- "+key+" "+self.labels[key]+"\n"
+        dlg = edclasses.MyScrolledDialog(self.parent,
+                                          overtext, textlist, "Warning")
+        if dlg.ShowModal() == wx.ID_OK:
+            N = self.parent.notebook.GetPageCount()
+            pagerem = list()
+            for i in np.arange(N):
+                Page = self.parent.notebook.GetPage(i)
+                key = Page.counter
+                if keysrem.count(key) == 1:
+                    pagerem.append(Page)
+            for Page in pagerem:
+                j = self.parent.notebook.GetPageIndex(Page)
+                self.parent.notebook.DeletePage(j)
+        dlg.Destroy()
+        self.OnPageChanged()
+
+
+    def OnSelectionChanged(self, keylist):
+        if len(keylist) == 0:
+            return
+        # integer type list with page number
+        pagelist = list()
+        N = self.parent.notebook.GetPageCount()
+        for i in np.arange(N):
+            Page = self.parent.notebook.GetPage(i)
+            key = Page.counter
+            if keylist.count(key) == 1:
+                pagelist.append(int(key.strip("#: ")))
+        # Get open tools
+        toolkeys = self.parent.ToolsOpen.keys()
+        if len(toolkeys) == 0:
+            return
+        # Fill
+        string = misc.parsePagenum2String(pagelist)
+        for key in toolkeys:
+            tool = self.parent.ToolsOpen[key]
+            try:
+                tool.SetPageNumbers(string)
+            except:
+                pass
+        
+        
+class UserSelectCurves(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent, curvedict, wrapper=None, selkeys=None,
+                 labels=None):
+        """
+        *curvedict* is a dictionary that contains the curves. Keys serve as
+        identifiers in the curve selection.
+        e.g.
+        curvelist["#1:"] = np.array[ np.array[0.0,1], np.array[0.0,.971] ...]
+        *parent* is the main frame
+        *wrapper* is the object to which the chosen keys are given back. If
+           it is not None, it must provide a function *OnResults*, accepting
+           a list of keys as an argument.
+        *selkeys* items in the list *curvedict* that are preelected.
+        *labels* dictionary with same keys as *curvelist* - labels of the
+           entries in the list. If none, the keys of *curvedict* will be used.        
+        """
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        self.wrapper = wrapper
+        self.curvedict = curvedict
+        self.selkeys = selkeys
+        self.labels = labels    # can be None
+        self.curvelabels = None # filled by self.ProcessDict()
+        if self.selkeys is not None:
+            newselkeys = list()
+            for item in self.selkeys:
+                newselkeys.append(str(item))
+            self.selkeys = newselkeys
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Curve selection",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT,
+                 size=(800,500))
+        ## Pre-process
+        self.ProcessDict()
+        ## Content
+        self.sp = wx.SplitterWindow(self, size=(500,500), style=wx.SP_NOBORDER)
+        self.sp.SetMinimumPaneSize(1)
+        # Top panel
+        panel_top = wx.Panel(self.sp, size=(500,200))
+        self.upperSizer = wx.BoxSizer(wx.VERTICAL)
+        if platform.system().lower() == 'darwin':
+            ctrl = "Apple"
+        else:
+            ctrl = "Ctrl"
+        text = "Select the curves to keep. \n" +\
+               "By holding down the '"+ctrl+"' key, single curves can be \n" +\
+               "selected or deselected. The 'Shift' key can be used \n" +\
+               "to select groups."
+        self.upperSizer.Add(wx.StaticText(panel_top, label=text))
+        # Bottom Panel
+        self.bottom_sp = wx.SplitterWindow(self.sp, size=(500,300), style=wx.SP_NOBORDER)
+        self.bottom_sp.SetMinimumPaneSize(1)
+        sizepanelx = 250
+        panel_bottom = wx.Panel(self.bottom_sp, size=(sizepanelx,300))
+        self.boxSizer = wx.BoxSizer(wx.VERTICAL)
+        # Box selection
+        style = wx.LB_EXTENDED
+        self.SelectBox = wx.ListBox(panel_bottom, size=(sizepanelx,300),
+                                    style=style, choices=self.curvelabels)
+        for i in np.arange(len(self.curvekeys)):
+            self.SelectBox.SetSelection(i)
+        # Deselect keys that are not in self.selkeys
+        if self.selkeys is not None:
+            for i in np.arange(len(self.curvekeys)):
+                if self.selkeys.count(self.curvekeys[i]) == 0:
+                    self.SelectBox.Deselect(i)
+        self.Bind(wx.EVT_LISTBOX, self.OnUpdatePlot, self.SelectBox)
+        self.boxSizer.Add(self.SelectBox)
+        # Button APPLY
+        btnok = wx.Button(panel_bottom, wx.ID_ANY, 'Apply')
+        self.Bind(wx.EVT_BUTTON, self.OnPushResults, btnok)
+        self.boxSizer.Add(btnok)
+        # Button CANCEL
+        btncancel = wx.Button(panel_bottom, wx.ID_ANY, 'Cancel')
+        self.Bind(wx.EVT_BUTTON, self.OnCancel, btncancel)
+        self.boxSizer.Add(btncancel)
+        # Finish off sizers
+        panel_top.SetSizer(self.upperSizer)
+        panel_bottom.SetSizer(self.boxSizer)
+        self.upperSizer.Fit(panel_top)
+        self.boxSizer.Fit(panel_bottom)
+        minsize = np.array(self.boxSizer.GetMinSizeTuple()) +\
+                  np.array(self.upperSizer.GetMinSizeTuple()) +\
+                  np.array((300,30))
+        self.SetMinSize(minsize)
+        #self.SetSize(minsize)
+        #self.SetMaxSize((9999, self.boxSizer.GetMinSizeTuple()[1]))
+        # Canvas
+        self.canvas = plot.PlotCanvas(self.bottom_sp)
+        self.canvas.setLogScale((True, False))  
+        self.canvas.SetEnableZoom(True)
+        # Splitter window
+        self.bottom_sp.SplitVertically(panel_bottom, self.canvas, sizepanelx)
+        sizetoppanel = self.upperSizer.GetMinSizeTuple()[1]
+        self.sp.SplitHorizontally(panel_top, self.bottom_sp, sizetoppanel)
+        self.OnUpdatePlot()
+        # Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+    
+    def ProcessDict(self, e=None):
+        # Define the order of keys used.
+        # We want to sort the keys, such that #10: is not before #1:
+        self.curvekeys = self.curvedict.keys()
+        # Sorting key function applied to each key before sorting:
+        page_num = lambda counter: int(counter.strip().strip(":").strip("#"))
+        try:
+            for item in self.curvekeys:
+                page_num(item)
+        except:
+            fstr = lambda x: x
+        else:
+            fstr = page_num
+        self.curvekeys.sort(key = fstr)
+        if self.labels is None:
+            self.curvelabels = self.curvekeys
+        else:
+            # Use given labels instead of curvekeys.
+            self.curvelabels = list()
+            for key in self.curvekeys:
+                self.curvelabels.append("#"+str(key).strip(":# ")+" "+self.labels[key])
+
+
+    def OnCancel(self, e=None):
+        """ Close the tool """
+        self.wrapper.OnClose()
+        
+
+    def OnPushResults(self, e=None):
+        # Get keys from selection
+        keyskeep = list()
+        for i in self.SelectBox.GetSelections():
+            keyskeep.append(self.curvekeys[i])
+        keysrem = list()
+        for key in self.curvekeys:
+            if keyskeep.count(key) == 0:
+                keysrem.append(key)
+        self.wrapper.OnResults(keyskeep, keysrem)
+
+
+    def OnUpdatePlot(self, e=None):
+        """ What should happen when the selection in *self.SelectBox*
+            is changed?
+            This function will alsy try to call the function
+            *self.parent.OnSelectionChanged* and hand over the list of
+            currently selected curves. This is an addon for 0.7.8
+            where we will control the page selection in the average
+            tool.
+        """
+        # Get selected curves
+        curves = list()
+        legends = list()
+        selection = self.SelectBox.GetSelections()
+        for i in selection:
+            curves.append(self.curvedict[self.curvekeys[i]])
+            legends.append(self.curvekeys[i])
+        # Set color map
+        cmap = cm.get_cmap("gist_rainbow")
+        # Clear Plot
+        self.canvas.Clear()
+        # Draw Plot
+        lines = list()
+        for i in np.arange(len(curves)):
+            color = cmap(1.*i/(len(curves)), bytes=True)
+            color = wx.Colour(color[0], color[1], color[2])
+            line = plot.PolyLine(curves[i], legend=legends[i], colour=color,
+                                 width=1)
+            lines.append(line)
+        self.canvas.SetEnableLegend(True)
+        if len(curves) != 0:
+            self.canvas.Draw(plot.PlotGraphics(lines, 
+                         xLabel=u'lag time τ [s]', 
+                         yLabel=u'G(τ)'))
+        ## This is an addon for 0.7.8
+        keyskeep = list()
+        for i in self.SelectBox.GetSelections():
+            keyskeep.append(self.curvekeys[i])
+        try:
+            self.wrapper.OnSelectionChanged(keyskeep)
+        except:
+            pass
diff --git a/src/tools/parmrange.py b/src/tools/parmrange.py
new file mode 100644
index 0000000..4cc7ff7
--- /dev/null
+++ b/src/tools/parmrange.py
@@ -0,0 +1,146 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - RangeSelector
+    Select the range in which the parameter should reside for fitting.
+    This is only the frontend.
+    
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+
+import edclasses  # edited floatspin
+import models as mdls
+
+
+class RangeSelector(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, Page):
+        # parent is the main frame of PyCorrFit
+        self.parent = Page.parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Parameter Range",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+        ## Content
+        self.panel = wx.Panel(self)
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+
+        self.WXboxsizerlist = list()
+        self.WXparmlist = list()
+        self.OnPageChanged(self.Page)
+        #Icon
+        if self.parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, self.parent.MainIcon)
+        self.Show(True)
+
+
+    def FillPanel(self):
+        """ Fill the panel with parameters from the page
+        """
+        
+        self.parameter_range = np.zeros(self.Page.parameter_range.shape)
+        labels, parmleft = mdls.GetHumanReadableParms(self.Page.modelid,
+                                                 self.Page.parameter_range[:,0])
+        labels, parmright = mdls.GetHumanReadableParms(self.Page.modelid,
+                                                 self.Page.parameter_range[:,1])
+        self.parameter_range[:,0] = np.array(parmleft)
+        self.parameter_range[:,1] = np.array(parmright)
+        # create line
+        
+        # = wx.BoxSizer(wx.VERTICAL)
+        self.WXboxsizer = wx.FlexGridSizer(rows=len(labels), cols=4, vgap=5, hgap=5)
+        for i in range(len(labels)):
+            left = edclasses.FloatSpin(self.panel, digits=7, increment=.1)
+            right = edclasses.FloatSpin(self.panel, digits=7, increment=.1)
+            left.SetValue(self.parameter_range[i][0])
+            right.SetValue(self.parameter_range[i][1])
+            left.Bind(wx.EVT_SPINCTRL, self.OnSetParmRange)
+            right.Bind(wx.EVT_SPINCTRL, self.OnSetParmRange)
+            text = wx.StaticText(self.panel, label=u'< '+labels[i])
+            text2 = wx.StaticText(self.panel, label=u' <')
+            self.WXboxsizer.Add(left)
+            self.WXboxsizer.Add(text)
+            self.WXboxsizer.Add(text2)
+            self.WXboxsizer.Add(right)
+            self.WXparmlist.append([left, [text, text2], right])
+
+        self.topSizer.Add(self.WXboxsizer)
+        self.btnapply = wx.Button(self.panel, wx.ID_ANY, 'Apply')
+        self.Bind(wx.EVT_BUTTON, self.OnSetParmRange, self.btnapply)
+        self.topSizer.Add(self.btnapply)
+        
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.RangeSelector = None
+        self.Destroy()
+
+
+    def OnPageChanged(self, page=None):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        self.Page = page
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+            return
+        self.panel.Enable()
+        try:
+            self.btnapply.Destroy()
+        except:
+            pass
+        for i in np.arange(len(self.WXparmlist)):
+            self.WXparmlist[i][0].Destroy() #start
+            self.WXparmlist[i][1][0].Destroy() #pname
+            self.WXparmlist[i][1][1].Destroy() #pname
+            self.WXparmlist[i][2].Destroy() #end
+        del self.WXparmlist
+        for i in np.arange(len(self.WXboxsizerlist)):
+            self.WXboxsizer.Remove(0)
+        self.WXboxsizerlist = list()
+        self.WXparmlist = list()
+        self.FillPanel()
+        self.WXboxsizer.Layout()
+        self.topSizer.Layout()
+        self.SetMinSize(self.topSizer.GetMinSizeTuple())
+        self.topSizer.Fit(self)
+
+
+    def OnSetParmRange(self, e):
+        """ Called whenever something is edited in this frame.
+            Writes back parameter ranges to the page
+        """
+        # Read out parameters from all controls
+        for i in range(len(self.WXparmlist)):
+           self.parameter_range[i][0] = self.WXparmlist[i][0].GetValue()
+           self.parameter_range[i][1] = self.WXparmlist[i][2].GetValue()
+           if self.parameter_range[i][0] > self.parameter_range[i][1]:
+               self.parameter_range[i][1] = 1.01*np.abs(self.parameter_range[i][0])
+               self.WXparmlist[i][2].SetValue(self.parameter_range[i][1])
+        # Set parameters
+        l, parm0 = mdls.GetInternalFromHumanReadableParm(self.Page.modelid,
+                                                     self.parameter_range[:,0])
+        l, parm1 = mdls.GetInternalFromHumanReadableParm(self.Page.modelid,
+                                                     self.parameter_range[:,1])
+        self.Page.parameter_range[:,0] = np.array(parm0)
+        self.Page.parameter_range[:,1] = np.array(parm1)
+        #self.Page.PlotAll()
+        
+
diff --git a/src/tools/plotexport.py b/src/tools/plotexport.py
new file mode 100644
index 0000000..bcbe190
--- /dev/null
+++ b/src/tools/plotexport.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - plotexport
+    Let the user create nice plots of our data.
+    --currently not used
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+
+import models as mdls
+
+
+class Tool(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent):
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Example Tool",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+        ## Content
+        self.panel = wx.Panel(self)
+        btnexample = wx.Button(self.panel, wx.ID_ANY, 'Example button')
+        # Binds the button to the function - close the tool
+        self.Bind(wx.EVT_BUTTON, self.OnClose, btncopy)
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        self.topSizer.Add(btncopy)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        self.SetMinSize(self.topSizer.GetMinSizeTuple())
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnPageChanged(self, page):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        self.Page = page
+
diff --git a/src/tools/simulation.py b/src/tools/simulation.py
new file mode 100644
index 0000000..c346695
--- /dev/null
+++ b/src/tools/simulation.py
@@ -0,0 +1,445 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - simulation
+    Enables the user to change plotting parameters and replotting fast.
+    Might be useful for better understanding.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+
+import edclasses  # edited floatspin
+import models as mdls
+
+# Menu entry name
+MENUINFO = ["S&lider simulation",
+            "Fast plotting for different parameters."]
+
+class Slide(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent):
+        # parent is the main frame of PyCorrFit
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Simulation",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        # Starting positions/factors for spinctrls and sliders
+        self.slidemax = 1000
+        self.slidestart = 500
+        self.spinstartfactor = 0.1
+        self.spinendfactor = 1.9
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+        ## Content
+        self.panel = wx.Panel(self)
+        self.rbtnB = wx.RadioButton (self.panel, -1, 'Vary A and B', 
+                                        style = wx.RB_GROUP)
+        self.rbtnOp = wx.RadioButton (self.panel, -1, 'Fix relation')
+        self.btnreset = wx.Button(self.panel, wx.ID_ANY, 'Reset')
+        # Set starting variables
+        self.SetStart()
+        # Populate panel
+        dropsizer = wx.FlexGridSizer(rows=2, cols=3, vgap=5, hgap=5)
+        dropsizer.Add( wx.StaticText(self.panel, label="Parameter A"))
+        dropsizer.Add( wx.StaticText(self.panel, label="Operator"))
+        dropsizer.Add( wx.StaticText(self.panel, label="Parameter B"))
+        self.droppA = wx.ComboBox(self.panel, -1, self.labelA, (15, 20),
+                     wx.DefaultSize, self.parmAlist,
+                     wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.droppA.SetSelection(0)
+        self.Bind(wx.EVT_COMBOBOX, self.Ondrop, self.droppA)
+        self.dropop = wx.ComboBox(self.panel, -1, "", (10, 20),
+                     wx.DefaultSize, self.oplist,
+                     wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.dropop.SetSelection(0)
+        self.opfunc = self.opdict[self.opdict.keys()[0]]
+        self.Bind(wx.EVT_COMBOBOX, self.Ondrop, self.dropop)
+        self.droppB = wx.ComboBox(self.panel, -1, self.labelB, (15, 30),
+                     wx.DefaultSize, self.parmBlist,
+                     wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.Bind(wx.EVT_COMBOBOX, self.Ondrop, self.droppB)
+        self.droppB.SetSelection(1)
+        dropsizer.Add(self.droppA)
+        dropsizer.Add(self.dropop)
+        dropsizer.Add(self.droppB)
+        textfix = wx.StaticText(self.panel,
+                                label="\nEdit intervals and drag the slider.\n")
+        # Parameter A
+        slidesizer = wx.FlexGridSizer(rows=3, cols=5, vgap=5, hgap=5)
+        self.textstartA = wx.StaticText(self.panel, label=self.labelA)
+        slidesizer.Add(self.textstartA)
+        self.startspinA = edclasses.FloatSpin(self.panel, digits=7,
+                                            increment=.1)
+        slidesizer.Add(self.startspinA)
+        self.sliderA = wx.Slider(self.panel, -1, self.slidestart, 0,
+                                 self.slidemax, wx.DefaultPosition, (250, -1),
+                                 wx.SL_HORIZONTAL)
+        slidesizer.Add(self.sliderA)
+        self.endspinA = edclasses.FloatSpin(self.panel, digits=7,
+                                            increment=.1)
+        slidesizer.Add(self.endspinA)
+        self.textvalueA = wx.StaticText(self.panel, label= "%.5e" % self.valueA)
+        slidesizer.Add(self.textvalueA)
+        # Parameter B
+        self.textstartB = wx.StaticText(self.panel, label=self.labelB)
+        slidesizer.Add(self.textstartB)
+        self.startspinB = edclasses.FloatSpin(self.panel, digits=7,
+                                            increment=.1)
+        slidesizer.Add(self.startspinB)
+        self.sliderB = wx.Slider(self.panel, -1, self.slidestart, 0,
+                                 self.slidemax, wx.DefaultPosition, (250, -1),
+                                 wx.SL_HORIZONTAL)
+        slidesizer.Add(self.sliderB)
+        self.endspinB = edclasses.FloatSpin(self.panel, digits=7,
+                                            increment=.1)
+        slidesizer.Add(self.endspinB)
+        self.textvalueB = wx.StaticText(self.panel, label= "%.5e" % self.valueB)
+        slidesizer.Add(self.textvalueB)
+        # Result of operation
+        self.textstartOp = wx.StaticText(self.panel, label=self.labelOp)
+        slidesizer.Add(self.textstartOp)
+        self.startspinOp = edclasses.FloatSpin(self.panel, digits=7,
+                                            increment=.1)
+        slidesizer.Add(self.startspinOp)
+        self.sliderOp = wx.Slider(self.panel, -1, self.slidestart, 0,
+                                  self.slidemax, wx.DefaultPosition, (250, -1),
+                                  wx.SL_HORIZONTAL)
+        slidesizer.Add(self.sliderOp)
+        self.endspinOp = edclasses.FloatSpin(self.panel, digits=7,
+                                        increment=.1)
+        slidesizer.Add(self.endspinOp)
+        self.textvalueOp = wx.StaticText(self.panel,
+                                         label= "%.5e" % self.valueOp)
+        slidesizer.Add(self.textvalueOp)
+        # Bindings for slider
+        self.Bind(wx.EVT_SLIDER, self.OnSlider, self.sliderA)
+        self.Bind(wx.EVT_SLIDER, self.OnSlider, self.sliderB)
+        self.Bind(wx.EVT_SLIDER, self.OnSlider, self.sliderOp)
+        # Bindings for radiobuttons
+        self.Bind(wx.EVT_RADIOBUTTON, self.OnRadio, self.rbtnB)
+        self.Bind(wx.EVT_RADIOBUTTON, self.OnRadio, self.rbtnOp)
+        self.Bind(wx.EVT_BUTTON, self.OnReset, self.btnreset)
+        # Bindings for spin controls
+        # Our self-made spin controls alread have wx_EVT_SPINCTRL bound to
+        # the increment function. We will call that function manually here.
+        self.startspinA.Unbind(wx.EVT_SPINCTRL)
+        self.startspinB.Unbind(wx.EVT_SPINCTRL)
+        self.startspinOp.Unbind(wx.EVT_SPINCTRL)
+        self.endspinA.Unbind(wx.EVT_SPINCTRL)
+        self.endspinB.Unbind(wx.EVT_SPINCTRL)
+        self.endspinOp.Unbind(wx.EVT_SPINCTRL)
+        self.Bind(wx.EVT_SPINCTRL, self.OnSlider, self.startspinA)
+        self.Bind(wx.EVT_SPINCTRL, self.OnSlider, self.startspinB)
+        self.Bind(wx.EVT_SPINCTRL, self.OnSlider, self.startspinOp)
+        self.Bind(wx.EVT_SPINCTRL, self.OnSlider, self.endspinA)
+        self.Bind(wx.EVT_SPINCTRL, self.OnSlider, self.endspinB)
+        self.Bind(wx.EVT_SPINCTRL, self.OnSlider, self.endspinOp)
+        # Set values
+        self.SetValues()
+        ## Sizers
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        self.topSizer.Add(dropsizer)
+        self.topSizer.Add(self.rbtnB)
+        self.topSizer.Add(self.rbtnOp)
+        self.topSizer.Add(self.btnreset)
+        self.topSizer.Add(textfix)
+        self.topSizer.Add(slidesizer)
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        #self.SetMinSize(self.topSizer.GetMinSizeTuple())
+        self.OnRadio()
+        self.OnPageChanged(self.Page, init=True)
+        #Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+
+    def CalcFct(self, A, B, C):
+        if self.rbtnB.Value == True:
+            func = self.opfunc[0]
+            try:
+                C = func(A,B)
+            except ZeroDivisionError:
+                a = 0
+            else:
+                return B, C
+        else:
+            func = self.opfunc[1]
+            try:
+                B = func(A,C)
+            except ZeroDivisionError:
+                a = 0
+            else:
+                return B, C
+
+
+    def Increment(self):
+        # Set the correct increment for each spinctrl
+        self.startspinA.increment()
+        self.startspinB.increment()
+        self.startspinOp.increment()
+        self.endspinA.increment()
+        self.endspinB.increment()
+        self.endspinOp.increment()
+
+
+    def FillOpDict(self):
+        # Dictionaries: [Calculate C, Calculate B)
+        self.opdict["A/B"] = [lambda A,B: A/B, lambda A,C: A/C]
+        self.opdict["B/A"] = [lambda A,B: B/A, lambda A,C: C*A]
+        self.opdict["A*B"] = [lambda A,B: A*B, lambda A,C: C/A]
+        self.opdict["A+B"] = [lambda A,B: A+B, lambda A,C: C-A]
+        self.opdict["A-B"] = [lambda A,B: A-B, lambda A,C: A-C]
+        self.opdict["A*exp(B)"] = [lambda A,B: A*np.exp(B),
+                                   lambda A,C: np.log(C/A)]
+        self.opdict["B*exp(A)"] = [lambda A,B: B*np.exp(A),
+                                   lambda A,C: C/np.exp(A)]
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def Ondrop(self, event=None):
+        self.labelOp = self.oplist[self.dropop.GetSelection()]
+        self.labelA = self.parmAlist[self.droppA.GetSelection()]
+        self.labelB = self.parmBlist[self.droppB.GetSelection()]
+
+        self.textstartOp.SetLabel(self.labelOp)
+        self.textstartA.SetLabel(label=self.labelA)
+        self.textstartB.SetLabel(self.labelB)
+
+        self.sliderB.SetValue(self.slidestart)
+        self.sliderOp.SetValue(self.slidestart)
+        self.sliderA.SetValue(self.slidestart)
+        self.SetValues()
+        self.OnSize()
+
+
+    def OnPageChanged(self, page=None, init=False):
+        #if init:
+        #    # Get the parameters of the current page.
+        #    self.SavedParms = self.parent.PackParameters(self.Page)
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+            return
+        try:
+            # wx._core.PyDeadObjectError: The C++ part of the FittingPanel
+            # object has been deleted, attribute access no longer allowed.
+            oldcounter = self.Page.counter
+        except:
+            oldcounter = -1
+        if page is not None:
+            if page.counter != oldcounter:
+                self.Page = page
+                self.SetStart()
+                self.droppA.SetItems(self.parmAlist)
+                self.droppB.SetItems(self.parmBlist)
+                self.droppA.SetSelection(0)
+                self.droppB.SetSelection(1)
+                self.dropop.SetSelection(0)
+                # Set labels
+                self.Ondrop()
+        else:
+            self.Page = page
+        self.panel.Enable()
+
+
+    def OnRadio(self, event=None):
+        if self.rbtnB.Value == True:
+            # Parameter B is vaiable
+            self.sliderOp.Enable(False)
+            self.startspinOp.Enable(False)
+            self.endspinOp.Enable(False)
+            self.sliderB.Enable(True)
+            self.startspinB.Enable(True)
+            self.endspinB.Enable(True)
+        else:
+            # Operation result is vaiable
+            self.sliderOp.Enable(True)
+            self.startspinOp.Enable(True)
+            self.endspinOp.Enable(True)
+            self.sliderB.Enable(False)
+            self.startspinB.Enable(False)
+            self.endspinB.Enable(False)
+        self.Ondrop()
+
+
+    def OnReset(self, e=None):
+        self.parent.UnpackParameters(self.SavedParms, self.Page)
+        self.Page.apply_parameters_reverse()
+        #self.OnPageChanged(self.Page)
+        self.SetStart()
+        self.Ondrop()
+
+    def OnSize(self, event=None):
+        # We need this funciton, because contents of the flexgridsizer
+        # may change in size.
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        self.panel.SetSize(self.GetSize())
+
+
+    def OnSlider(self, event=None):
+        ## Set the slider vlaues
+        idmax = self.sliderA.GetMax()
+        slideA = self.sliderA.GetValue()
+        startA = self.startspinA.GetValue()
+        endA = self.endspinA.GetValue()
+        self.valueA = startA + (endA-startA)*slideA/idmax
+        self.textvalueA.SetLabel( "%.5e" % self.valueA)
+        if self.rbtnB.Value == True:
+            slideB = self.sliderB.GetValue()
+            startB = self.startspinB.GetValue()
+            endB = self.endspinB.GetValue()
+            self.valueB = startB + (endB-startB)*slideB/idmax
+        else:
+            # Same thing
+            slideOp = self.sliderOp.GetValue()
+            startOp = self.startspinOp.GetValue()
+            endOp = self.endspinOp.GetValue()
+            self.valueOp = startOp + (endOp-startOp)*slideOp/idmax
+        self.valueB, self.valueOp = self.CalcFct(self.valueA, self.valueB,
+                                                 self.valueOp)
+        self.textvalueB.SetLabel( "%.5e" % self.valueB)
+        self.textvalueOp.SetLabel( "%.5e" % self.valueOp)
+        self.Increment()
+        self.SetResult()
+        self.OnSize()
+
+
+    def SetResult(self, event=None):
+        if self.parent.notebook.GetPageCount() == 0:
+            # Nothing to do
+            return
+        # And Plot
+        idA = self.droppA.GetSelection()
+        idB = self.droppB.GetSelection()
+        # As of version 0.7.5: we want the units to be displayed
+        # human readable - the way they are displayed 
+        # in the Page info tool.
+        # Convert from human readable to internal units
+        # The easiest way is to make a copy of all parameters and
+        # only write back those that have been changed:
+        # 
+        parms_0 = 1.*np.array(mdls.valuedict[self.modelid][1])
+        parms_0[idA] = self.valueA # human readable units
+        parms_0[idB] = self.valueB # human readable units
+        label, parms_i =\
+            mdls.GetInternalFromHumanReadableParm(self.modelid, parms_0)
+        self.Page.active_parms[1][idA] = parms_i[idA]
+        self.Page.active_parms[1][idB] = parms_i[idB]
+        self.Page.apply_parameters_reverse()
+        self.Page.PlotAll()
+
+
+    def SetStart(self):
+        # Sets first and second variable of a page to
+        # Parameters A and B respectively.
+        self.SavedParms = self.parent.PackParameters(self.Page)
+        if self.parent.notebook.GetPageCount() == 0:
+            self.modelid = 6000
+            ParmLabels, ParmValues = \
+                   mdls.GetHumanReadableParms(self.modelid,
+                                              mdls.valuedict[6000][1])
+        else:
+            self.modelid = self.Page.modelid
+            ParmLabels, ParmValues = \
+                   mdls.GetHumanReadableParms(self.modelid,
+                                              self.Page.active_parms[1])
+
+        self.parmAlist = ParmLabels
+        self.parmBlist = ParmLabels
+        # Operators
+        # Calculation of variable A with fixed B
+        self.opdict = dict()
+        self.FillOpDict()
+        self.oplist = self.opdict.keys()
+        self.oplist.sort()
+        self.labelA = self.parmAlist[0]
+        self.labelB = self.parmBlist[1]
+        self.labelOp = self.oplist[0]
+        self.opfunc = self.opdict[self.labelOp]
+        self.valueA = ParmValues[0]
+        self.valueB = ParmValues[1]
+        self.valueB, self.valueOp = self.CalcFct(self.valueA, 
+                                                         self.valueB, 0)
+
+
+    def SetValues(self, event=None):
+        # Set the values for spin and slider
+        # As of version 0.7.5: we want the units to be displayed
+        # human readable - the way they are displayed 
+        # in the Page info tool.
+        #
+        # Parameter A
+        idA = self.droppA.GetSelection()
+        # Parameter B
+        idB = self.droppB.GetSelection()
+        # self.valueB = self.Page.active_parms[1][idB]
+        # self.valueA = self.Page.active_parms[1][idA]
+        if self.parent.notebook.GetPageCount() == 0:
+            self.modelid = 6000
+            ParmLabels, ParmValues = \
+                   mdls.GetHumanReadableParms(self.modelid,
+                                              mdls.valuedict[6000][1])
+        else:
+            self.modelid = self.Page.modelid
+            ParmLabels, ParmValues = \
+                   mdls.GetHumanReadableParms(self.modelid,
+                                              self.Page.active_parms[1])
+        self.valueA = ParmValues[idA]
+        self.valueB = ParmValues[idB]                             
+        # Operator
+        idop = self.dropop.GetSelection()
+        keys = self.opdict.keys()
+        opkey = self.oplist[idop]
+        self.opfunc = self.opdict[opkey]
+        # Parameter A
+        startA = self.valueA*self.spinstartfactor
+        endA = self.valueA*self.spinendfactor
+        self.startspinA.SetValue(startA)
+        self.endspinA.SetValue(endA)
+        # Parameter B
+        startB = self.valueB*self.spinstartfactor
+        endB = self.valueB*self.spinendfactor
+        self.startspinB.SetValue(startB)
+        self.endspinB.SetValue(endB)
+        # Operation result
+        self.valueOp = self.opfunc[0](self.valueA, self.valueB)
+        startOp = self.valueOp*self.spinstartfactor
+        endOp = self.valueOp*self.spinendfactor
+        self.startspinOp.SetValue(startOp)
+        self.endspinOp.SetValue(endOp)
+        # Set text
+        self.textvalueA.SetLabel( "%.5e" % self.valueA)
+        self.textvalueB.SetLabel( "%.5e" % self.valueB)
+        self.textvalueOp.SetLabel( "%.5e" % self.valueOp)
+        self.Increment()
+        self.SetResult()
+
diff --git a/src/tools/statistics.py b/src/tools/statistics.py
new file mode 100644
index 0000000..9bda0f0
--- /dev/null
+++ b/src/tools/statistics.py
@@ -0,0 +1,574 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - statistics
+    Provide the user with tab-separated statistics of their curves.
+    Values are sorted according to the page number.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import wx.lib.plot as plot              # Plotting in wxPython
+import numpy as np
+import os
+
+from info import InfoClass
+import misc
+
+# Menu entry name
+MENUINFO = ["&Statistics view", "Show some session statistics."]
+
+def run_once(f):
+    def wrapper(*args, **kwargs):
+        if not wrapper.has_run:
+            wrapper.has_run = True
+            return f(*args, **kwargs)
+    wrapper.has_run = False
+    return wrapper
+
+
+class Stat(wx.Frame):
+    # This tool is derived from a wx.frame.
+    def __init__(self, parent):
+        self.MyName="STATISTICS"
+        # parent is the main frame of PyCorrFit
+        self.boxsizerlist = list()
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Statistics",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        self.MyName = "STATISTICS"
+        # List of parameters that are plotted or not
+        self.PlotParms = list(["None", 0])
+        # Page - the currently active page of the notebook.
+        self.Page = self.parent.notebook.GetCurrentPage()
+        # Pagenumbers
+        self.PageNumbers = np.arange(self.parent.notebook.GetPageCount())
+        ## Splitter window. left side: checkboxes
+        ##                  right side: plot with parameters
+        self.sp = wx.SplitterWindow(self, style=wx.SP_3DSASH)
+        # This is necessary to prevent "Unsplit" of the SplitterWindow:
+        self.sp.SetMinimumPaneSize(1)
+        ## Content
+        # We will display a dialog that conains all the settings
+        # - Which model we want statistics on
+        # - What kind of parameters should be printed
+        #   (We will get the parameters from the current page)
+        #   If on another page, the parameter is not available,
+        #   do not make a mess out of it.
+        # Then the user presses a button and sees/saves the table
+        # with all the info.
+        self.panel = wx.Panel(self.sp)
+        # Parameter settings.
+        if self.parent.notebook.GetPageCount() != 0:
+            self.InfoClass = InfoClass(CurPage=self.Page)
+        else:
+            self.panel.Disable()
+        # A dropdown menu for the source Page:
+        text = wx.StaticText(self.panel, 
+                             label="Create a table with all the selected\n"+
+                                   "variables below from pages with the\n"+
+                                   "same model as the current page.")
+        ## Page selection as in average tool
+        Pagetext = wx.StaticText(self.panel, 
+                             label="Curves ")
+        Psize = text.GetSize()[0] - Pagetext.GetSize()[0]
+        self.WXTextPages = wx.TextCtrl(self.panel, value="",
+                                       size=(Psize,-1))
+        # Set number of pages
+        pagenumlist = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
+        valstring=misc.parsePagenum2String(pagenumlist)
+        self.WXTextPages.SetValue(valstring)
+        ## Plot parameter dropdown box
+        self.PlotParms = self.GetListOfPlottableParms()
+        Parmlist = list()
+        #for item in self.PlotParms:
+        #    Parmlist.append(item[0])
+        Parmlist = self.PlotParms
+        DDtext = wx.StaticText(self.panel, 
+                             label="Plot parameter ")
+        DDsize = text.GetSize()[0] - DDtext.GetSize()[0]
+        self.WXDropdown = wx.ComboBox(self.panel, -1, "", size=(DDsize,-1),
+                        choices=Parmlist, style=wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.Bind(wx.EVT_COMBOBOX, self.OnDropDown, self.WXDropdown)
+        self.Bind(wx.EVT_TEXT, self.OnDropDown, self.WXTextPages)
+        self.WXDropdown.SetSelection(0)
+        # Create space for parameters
+        self.box = wx.StaticBox(self.panel, label="Export parameters")
+        self.masterboxsizer = wx.StaticBoxSizer(self.box, wx.VERTICAL)
+        self.masterboxsizer.Add(text)
+        self.boxsizer = wx.BoxSizer(wx.HORIZONTAL)
+        self.masterboxsizer.Add(self.boxsizer)
+        self.Checkboxes = list()
+        self.Checklabels = list()
+        if self.parent.notebook.GetPageCount() != 0:
+            self.OnChooseValues()
+        self.btnSave = wx.Button(self.panel, wx.ID_ANY, 'Save')
+        self.Bind(wx.EVT_BUTTON, self.OnSaveTable, self.btnSave)
+        # Add elements to sizer
+        self.topSizer = wx.BoxSizer(wx.VERTICAL)
+        #self.topSizer.Add(text)
+        Psizer = wx.BoxSizer(wx.HORIZONTAL)
+        Psizer.Add(Pagetext)
+        Psizer.Add(self.WXTextPages)
+        DDsizer = wx.BoxSizer(wx.HORIZONTAL)
+        DDsizer.Add(DDtext)
+        DDsizer.Add(self.WXDropdown)
+        self.topSizer.Add(Psizer)
+        self.topSizer.Add(DDsizer)
+        self.topSizer.Add(self.masterboxsizer)
+        self.topSizer.Add(self.btnSave)
+        # Set size of window
+        self.panel.SetSizer(self.topSizer)
+        self.topSizer.Fit(self)
+        (px, py) = self.topSizer.GetMinSizeTuple()
+
+        ## Plotting panel
+        self.canvas = plot.PlotCanvas(self.sp)
+        self.sp.SplitVertically(self.panel, self.canvas, px+5)
+        self.SetMinSize((px+400, py))
+        ## Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+        self.OnDropDown()
+
+
+    def GetListOfAllParameters(self, e=None, return_std_checked=False):
+        """ Returns sorted list of parameters.
+            If return_std_checked is True, then a second list with
+            standart checked parameters is returned.
+        """
+        self.InfoClass.CurPage = self.Page
+        # Now that we know our Page, we may change the available
+        # parameter options.
+        Infodict = self.InfoClass.GetCurInfo()
+        # We want to sort the information and have some prechecked values
+        # in the statistics window afterwards.
+        # new iteration
+        keys = Infodict.keys()
+        head = list()
+        body = list()
+        tail = list()
+
+        for key in keys:
+            # "title" - filename/title first
+            if key == "title":
+                for item in Infodict[key]:
+                    if len(item) == 2:
+                        if item[0] == "filename/title":
+                            headtitle = [item]
+                        else:
+                            tail.append(item)
+            # "title" - filename/title first
+            elif key == "parameters":
+                headparm = list()
+                bodyparm = list()
+                for parm in Infodict[key]:
+                    parminlist = False
+                    try:
+                        for fitp in Infodict["fitting"]:
+                            parmname = parm[0]
+                            errname = "Err "+parmname
+                            if fitp[0] == errname:
+                                headparm.append(parm)
+                                parminlist = True
+                                headparm.append(fitp)
+                    except:
+                        # Maybe there was not fit...
+                        pass
+                    if parminlist == False:
+                        bodyparm.append(parm)
+            elif key == "fitting":
+                for fitp in Infodict[key]:
+                    # We added the error data before in the parameter section
+                    if str(fitp[0])[0:4] != "Err ":
+                        tail.append(fitp)
+            elif key == "supplement":
+                body += Infodict[key]
+            # Append all other items
+            elif key == "background":
+                body += Infodict[key]
+            else:
+                for item in Infodict[key]:
+                    if item is not None and len(item) == 2:
+                        tail.append(item)
+        # Bring lists together
+        head = headtitle + headparm
+        body = bodyparm + body
+        
+        Info = head + body + tail
+
+        # List of default checked parameters:
+        checked = np.zeros(len(Info), dtype=np.bool)
+        checked[:len(head)] = True
+        # A list with additional strings that should be default checked if found
+        # somewhere in the data.
+        checklist = ["cpp", "duration", "bg rate"]
+        for i in range(len(Info)):
+            item = Info[i]
+            for checkitem in checklist:
+                if item[0].count(checkitem):
+                    checked[i] = True
+
+        if return_std_checked:
+            return Info, checked
+        else:
+            return Info
+
+        
+    def GetListOfPlottableParms(self, e=None, return_values=False):
+        """ Returns sorted list of parameters that can be plotted.
+            (This means that the values are convertable to floats)
+            If return_values is True, then a second list with
+            the corresponding values is returned.
+        """
+        if self.parent.notebook.GetPageCount() != 0:
+            #Info = self.InfoClass.GetPageInfo(self.Page)
+            Info = self.GetListOfAllParameters()
+            #keys = Info.keys()
+            #keys.sort()
+            parmlist = list()
+            parmvals = list()
+            for item in Info:
+                if item is not None and len(item) == 2:
+                    try:
+                        val = float(item[1])
+                    except:
+                        pass
+                    else:
+                        # save the key so we can find the parameter later
+                        parmlist.append(item[0])
+                        parmvals.append(val)
+        else:
+            parmlist = ["<No Pages>"]
+            parmvals = [0]
+        if return_values:
+            return parmlist, parmvals
+        else:
+            return parmlist
+
+
+    def GetWantedParameters(self):
+        strFull = self.WXTextPages.GetValue()
+        PageNumbers = misc.parseString2Pagenum(self, strFull)
+        # Get the wanted parameters from the selection.
+        checked = list()
+        for i in np.arange(len(self.Checkboxes)):
+            if self.Checkboxes[i].IsChecked() == True:
+                checked.append(self.Checklabels[i])
+        # Collect all the relevant pages
+        pages = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            if Page.modelid == self.Page.modelid:
+                # Only pages with same modelid
+                if int(Page.counter.strip("#: ")) in PageNumbers:
+                    # Only pages selected in self.WXTextPages
+                    pages.append(Page)
+        self.InfoClass.Pagelist = pages
+        AllInfo = self.InfoClass.GetAllInfo()
+        self.SaveInfo = list()
+        # Some nasty iteration through the dictionaries.
+        # Collect all checked variables.
+        pagekeys = AllInfo.keys()
+        # If pagenumber is larger than 10,
+        # pagekeys.sort will not work, because we have strings
+        # Define new compare function
+        cmp_func = lambda a,b: cmp(int(a.strip().strip("#")),
+                                   int(b.strip().strip("#")))
+        pagekeys.sort(cmp=cmp_func)
+        #for Info in pagekeys:
+        #    pageinfo = list()
+        #    for item in AllInfo[Info]:
+        #        for subitem in AllInfo[Info][item]:
+        #            if len(subitem) == 2:
+        #                for label in checked:
+        #                    if label == subitem[0]:
+        #                        pageinfo.append(subitem)
+        #
+        # We want to replace the above iteration with an iteration that
+        # covers missing values. This means checking for "label == subitem[0]"
+        # and iteration over AllInfo with that consition.
+        for Info in pagekeys:
+            pageinfo = list()
+            for label in checked:
+                label_in_there = False
+                for item in AllInfo[Info]:
+                    for subitem in AllInfo[Info][item]:
+                        if subitem is not None and len(subitem) == 2:
+                            if label == subitem[0]:
+                                label_in_there = True
+                                pageinfo.append(subitem)
+                if label_in_there == False:
+                    # No data available
+                    pageinfo.append([label, "NaN"])
+            self.SaveInfo.append(pageinfo)
+
+
+    def OnCheckboxChecked(self, e="restore"):
+        """
+            Write boolean data of checked checkboxes to Page variable
+            *StatisticsCheckboxes*. If e=="restore", then we will attempt
+            to get the info back from the page.
+        """
+        # What happens if a checkbox has been checked?
+        # We write the data to the Page (it will not be saved in the session).
+        if e=="restore":
+            checklist = self.Page.StatisticsCheckboxes
+            if checklist is not None:
+                if len(checklist) <= len(self.Checkboxes):
+                    for i in np.arange(len(checklist)):
+                        self.Checkboxes[i].SetValue(checklist[i])
+        else:
+            checklist = list()
+            for cb in self.Checkboxes:
+                checklist.append(cb.GetValue())
+                self.Page.StatisticsCheckboxes = checklist
+
+
+    def OnChooseValues(self, event=None):
+        Info, checked = self.GetListOfAllParameters(return_std_checked=True)
+        #headcounter = 0
+        #headlen = len(head)
+        # We will sort the checkboxes in more than one column if there
+        # are more than *maxitemsincolumn*
+        maxitemsincolumn = np.float(25)
+        Sizernumber = int(np.ceil(len(Info)/maxitemsincolumn))
+        self.boxsizerlist = list()
+        for i in np.arange(Sizernumber):
+            self.boxsizerlist.append(wx.BoxSizer(wx.VERTICAL))
+        # Start at -1 so the indexes will start at 0 (see below).
+        #itemcount = -1
+        for i in range(len(Info)):
+            #itemcount += 1
+            #headcounter += 1
+            checkbox = wx.CheckBox(self.panel, label=Info[i][0])
+            #if headcounter <= headlen:
+            #    checkbox.SetValue(True)
+            # Additionally default checked items
+            #for checkitem in checklist:
+            #    if item[0].count(checkitem):
+            #        checkbox.SetValue(True)
+            checkbox.SetValue(checked[i])
+            # Add checkbox to column sizers
+            sizern = int(np.floor(i/maxitemsincolumn))
+            self.boxsizerlist[sizern].Add(checkbox)
+            self.Checkboxes.append(checkbox)
+            self.Checklabels.append(Info[i][0])
+            self.Bind(wx.EVT_CHECKBOX, self.OnCheckboxChecked, checkbox)
+        # Add sizers to boxsizer
+        for sizer in self.boxsizerlist:
+            self.boxsizer.Add(sizer)
+        self.OnCheckboxChecked("restore")
+        self.AllPlotParms = Info
+
+
+    def OnClose(self, event=None):
+        # This is a necessary function for PyCorrFit.
+        # Do not change it.
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnDropDown(self, e=None):
+        """ Plot the parameter selected in WXDropdown
+            Uses info stored in self.PlotParms and self.InfoClass
+        """
+        if self.parent.notebook.GetPageCount() == 0 or self.Page is None:
+            self.canvas.Clear()
+            return
+        # Get valid pages
+        strFull = self.WXTextPages.GetValue()
+        try:
+            PageNumbers = misc.parseString2Pagenum(self, strFull, nodialog=True)
+        except:
+            PageNumbers = self.PageNumbers
+        else:
+            self.PageNumbers = PageNumbers
+        
+        # Get plot parameters
+        DDselid = self.WXDropdown.GetSelection()
+        #[label, key] = self.PlotParms[DDselid]
+        label = self.PlotParms[DDselid]
+        # Get potential pages
+        pages = list()
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            if Page.modelid == self.Page.modelid:
+                # Only pages with same modelid
+                if int(Page.counter.strip("#: ")) in PageNumbers:
+                    # Only pages selected in self.WXTextPages
+                    pages.append(Page)
+        plotcurve = list()
+        InfoCl = InfoClass()
+        oldpage = self.Page
+        for page in pages:
+            self.Page = page
+            pllabel, pldata = self.GetListOfPlottableParms(return_values=True)
+            # Get the labels and make a plot of the parameters
+            if len(pllabel)-1 >= DDselid and pllabel[DDselid] == label:
+                x = int(page.counter.strip("#: "))
+                y = pldata[DDselid]
+                plotcurve.append([x,y])
+            else:
+                # try to get the label by searching for the first instance
+                for k in range(len(pllabel)):
+                    if pllabel[k] == label:
+                        x = int(page.counter.strip("#: "))
+                        y = pldata[k]
+                        plotcurve.append([x,y])
+        # Prepare plotting
+        self.canvas.Clear()
+        linesig = plot.PolyMarker(plotcurve, size=1.5, fillstyle=wx.TRANSPARENT,
+                                  marker='circle')
+        plotlist = [linesig]
+        # average line
+
+        try:
+            avg = np.average(np.array(plotcurve)[:,1])
+            maxpage =  np.max(np.array(plotcurve)[:,0])
+        except:
+            maxpage = 0
+        else:
+            plotavg = [[0, avg], [maxpage, avg]]
+            lineclear = plot.PolyLine(plotavg, colour="black",
+            style= wx.SHORT_DASH)
+            plotlist.append(lineclear)
+        # Draw
+        self.canvas.Draw(plot.PlotGraphics(plotlist, 
+                             xLabel='page number', 
+                             yLabel=label))
+        
+        # Correctly set x-axis
+        minticks = 2
+        self.canvas.SetXSpec(max(maxpage, minticks))
+        # Zoom out such that we can see the end of all curves
+        try:
+            xcenter = np.average(np.array(plotcurve)[:,0])
+            ycenter = np.average(np.array(plotcurve)[:,1])
+            scale = 1.1
+            self.canvas.Zoom((xcenter,ycenter), (scale, scale))
+        except:
+            pass
+        # Redraw result
+        self.canvas.Redraw()
+                         
+        
+    def OnPageChanged(self, page):
+        # When parent changes
+        # This is a necessary function for PyCorrFit.
+        # This is stuff that should be done when the active page
+        # of the notebook changes.
+        #
+        # Prevent this function to be run twice at once:
+        #
+        oldsize = self.GetSizeTuple()
+        if self.WXTextPages.GetValue() == "":
+            # Set number of pages
+            pagenumlist = list()
+            for i in np.arange(self.parent.notebook.GetPageCount()):
+                Page = self.parent.notebook.GetPage(i)
+                pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
+            valstring=misc.parsePagenum2String(pagenumlist)
+            self.WXTextPages.SetValue(valstring)
+        DDselection = self.WXDropdown.GetValue()
+        self.Page = page
+        self.InfoClass = InfoClass(CurPage=self.Page)
+        self.PlotParms = self.GetListOfPlottableParms()
+        #Parmlist = list()
+        # Make sure the selection stays the same
+        DDselid = 0
+        for i in range(len(self.PlotParms)):
+            #Parmlist.append(self.PlotParms[i][0])
+            #if DDselection == self.PlotParms[i][0]:
+            if DDselection == self.PlotParms[i]:
+                DDselid = i
+        Parmlist = self.PlotParms
+        self.WXDropdown.SetItems(Parmlist)
+        self.WXDropdown.SetSelection(DDselid)
+        self.panel.Enable()
+        for i in np.arange(len(self.Checkboxes)):
+            self.Checkboxes[i].Destroy()
+        del self.Checkboxes
+            #self.Checklabels[i].Destroy() # those cannot be destroyed.
+        for i in np.arange(len(self.boxsizerlist)):
+            self.boxsizer.Remove(0)
+        self.boxsizer.Layout()
+        self.boxsizerlist = list()
+        self.Checkboxes = list()
+        self.Checklabels = list()
+        # Disable if there are no pages left
+        if self.parent.notebook.GetPageCount() == 0:
+            self.panel.Disable()
+            self.canvas.Clear()
+            return
+        self.OnChooseValues()
+        self.boxsizer.Layout()
+        self.topSizer.Fit(self)
+        (ax, ay) = self.GetSizeTuple()
+        (px, py) = self.topSizer.GetMinSizeTuple()
+        self.sp.SetSashPosition(px+5)
+        self.SetSize((np.max([px+400,ax,oldsize[0]]), np.max([py,ay,oldsize[1]])))
+        self.SetMinSize((px+400, py))
+        # Replot
+        self.OnDropDown()
+
+
+    def OnSaveTable(self, event=None):
+        dirname = self.parent.dirname
+        dlg = wx.FileDialog(self.parent, "Choose file to save", dirname, "", 
+              "Text file (*.txt)|*.txt;*.TXT",
+               wx.SAVE|wx.FD_OVERWRITE_PROMPT)
+        # user cannot do anything until he clicks "OK"
+        if dlg.ShowModal() == wx.ID_OK:
+            filename = dlg.GetPath()
+            if filename.lower().endswith(".txt") is not True:
+                filename = filename+".txt"
+            dirname = dlg.GetDirectory()
+            openedfile = open(filename, 'wb')
+            # Get Parameterlist of all Pages with same model id as
+            # Self.Page
+            modelid = self.Page.modelid
+            # This creates self.SaveInfo:
+            self.GetWantedParameters()
+            # Write header
+            linestring = ""
+            for atuple in self.SaveInfo[0]:
+                linestring += str(atuple[0])+"\t"
+            # remove trailing "\t"
+            openedfile.write(linestring.strip()+"\r\n")
+            # Write data         
+            for item in self.SaveInfo:
+                linestring = ""
+                for btuple in item:
+                    linestring += str(btuple[1])+"\t"
+                openedfile.write(linestring.strip()+"\r\n")
+            openedfile.close()
+        else:
+            dirname = dlg.GetDirectory()
+            dlg.Destroy()
+        # Give parent the current dirname
+        self.parent.dirname = dirname
+
+    def SetPageNumbers(self, pagestring):
+        self.WXTextPages.SetValue(pagestring)
+
diff --git a/src/tools/trace.py b/src/tools/trace.py
new file mode 100644
index 0000000..6d797ea
--- /dev/null
+++ b/src/tools/trace.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module tools - trace
+    Show the trace of a file.
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import wx
+import numpy as np
+import wx.lib.plot as plot    
+
+# Menu entry name
+MENUINFO = ["&Trace view", "Show the trace of an opened file."]
+
+class ShowTrace(wx.Frame):
+    def __init__(self, parent):
+        # parent is main frame
+        self.parent = parent
+        # Get the window positioning correctly
+        pos = self.parent.GetPosition()
+        pos = (pos[0]+100, pos[1]+100)
+        wx.Frame.__init__(self, parent=self.parent, title="Trace view",
+                 pos=pos, style=wx.DEFAULT_FRAME_STYLE|wx.FRAME_FLOAT_ON_PARENT)
+        ## MYID
+        # This ID is given by the parent for an instance of this class
+        self.MyID = None
+        # Page
+        self.Page = self.parent.notebook.GetCurrentPage()
+        ## Canvas
+        self.canvas = plot.PlotCanvas(self)
+        self.canvas.SetEnableZoom(True)
+        if self.parent.notebook.GetPageCount() == 0:
+            # We do not need to disable anything here.  user input.
+            pass
+        else:
+            self.OnDraw()
+        initial_size = (350,150)
+        self.SetSize(initial_size)
+        self.SetMinSize(initial_size)
+        ## Icon
+        if parent.MainIcon is not None:
+            wx.Frame.SetIcon(self, parent.MainIcon)
+        self.Show(True)
+
+
+    def OnClose(self, event=None):
+        self.parent.toolmenu.Check(self.MyID, False)
+        self.parent.ToolsOpen.__delitem__(self.MyID)
+        self.Destroy()
+
+
+    def OnDraw(self):
+        if self.Page.trace is not None:
+            self.trace = 1*self.Page.trace
+            # We want to have the trace in [s] here.
+            self.trace[:,0] = self.trace[:,0]/1000
+            line = plot.PolyLine(self.trace, legend='', colour='blue',
+                                 width=1)
+            lines = [line]
+            self.canvas.SetEnableLegend(False)
+        elif self.Page.tracecc is not None:
+            # This means that we have two (CC) traces to plot
+            self.tracea = 1*self.Page.tracecc[0]
+            self.tracea[:,0] = self.tracea[:,0]/1000
+            self.traceb = 1*self.Page.tracecc[1]
+            self.traceb[:,0] = self.traceb[:,0]/1000
+            linea = plot.PolyLine(self.tracea, legend='channel 1', 
+                                  colour='blue', width=1)
+            lineb = plot.PolyLine(self.traceb, legend='channel 2', 
+                                  colour='red', width=1)
+            lines = [linea, lineb]
+            self.canvas.SetEnableLegend(True)
+        else: 
+            self.canvas.Clear()
+            return
+        # Plot lines
+        self.canvas.Draw(plot.PlotGraphics(lines, 
+                                           xLabel='time [s]', 
+                                           yLabel='count rate [kHz]'))
+
+
+    def OnPageChanged(self, page=None):
+        self.Page = page
+        # When parent changes
+        if self.parent.notebook.GetPageCount() == 0:
+            # Nothing to do
+            try:
+                self.canvas.Clear()
+            except:
+                pass
+            return
+        self.OnDraw()
+
diff --git a/src/usermodel.py b/src/usermodel.py
new file mode 100644
index 0000000..14484d1
--- /dev/null
+++ b/src/usermodel.py
@@ -0,0 +1,305 @@
+# -*- coding: utf-8 -*-
+""" PyCorrFit
+    Paul Müller, Biotec - TU Dresden
+
+    Module: user model:
+    When the user wants to use his own functions.
+    We are using sympy as function parser instead of writing our own,
+    which might be safer.
+    We only parse the function with sympy and test it once during
+    import. After that, the function is evaluated using eval()!
+
+    Dimensionless representation:
+    unit of time        : 1 ms
+    unit of inverse time: 10³ /s
+    unit of distance    : 100 nm
+    unit of Diff.coeff  : 10 µm²/s
+    unit of inverse area: 100 /µm²
+    unit of inv. volume : 1000 /µm³
+"""
+
+
+import numpy as np
+import scipy.special as sps
+try:
+    import sympy
+    from sympy.core.function import Function
+    from sympy.core import S
+    from sympy import sympify, I
+    from sympy.functions import im
+except ImportError:
+    print " Warning: module sympy not found!"
+    # Define Function, so PyCorrFit will start, even if sympy is not there.
+    # wixi needs Function.
+    Function = object
+import sys
+import wx
+
+import models as mdls
+
+
+class CorrFunc(object):
+    """
+        Check the input code of a proposed user model function and
+        return a function for fitting via GetFunction.
+    """
+    def __init__(self, labels, values, substitutes, funcstring):
+        self.values = values
+        # a --> a
+        # b [ms] --> b
+        self.variables = list()
+        for item in labels:
+            self.variables.append(item.split(" ")[0].strip())
+        self.funcstring = funcstring
+        for key in substitutes.keys():
+            # Don't forget to insert the "(" and ")"'s
+            self.funcstring = self.funcstring.replace(key, 
+                                                       "("+substitutes[key]+")")
+            for otherkey in substitutes.keys():
+                substitutes[otherkey] = substitutes[otherkey].replace(key, 
+                                                       "("+substitutes[key]+")")
+        # Convert the function string to a simpification object
+        self.simpification = sympify(self.funcstring, sympyfuncdict)
+        self.simstring = str(self.simpification)
+        self.vardict = evalfuncdict
+
+
+    def GetFunction(self):
+        # Define the function that will be calculated later
+        def G(parms, tau):
+            tau = np.atleast_1d(tau)
+            for i in np.arange(len(parms)):
+                self.vardict[self.variables[i]] = float(parms[i])
+            self.vardict["tau"] = tau
+            # Function called with array/list
+            # The problem here might be 
+            #for key in vardict.keys():
+            #    symstring = symstring.replace(key, str(vardict[key]))
+            #symstring = symstring.replace("####", "tau")
+            g = eval(self.funcstring, self.vardict)
+            ## This would be a safer way to do this, but it is too slow!
+            # Once simpy supports arrays, we can use these.
+            #
+            # g = np.zeros(len(tau))
+            # for i in np.arange(len(tau)):
+            # vardict["tau"] = tau[i]
+            # g[i] = simpification.evalf(subs=vardict)
+            return g
+        return G
+
+
+    def TestFunction(self):
+        """ Test the function for parsibility with the given parameters. """
+        vardict = dict()
+        for i in np.arange(len(self.variables)):
+            vardict[self.variables[i]] = sympify(float(self.values[i]))
+        for tau in np.linspace(0.0001, 10000, 10):
+            vardict["tau"] = tau
+            Number = self.simpification.evalf(subs=vardict)
+            if Number.is_Number is False:
+                raise SyntaxError("Function could not be parsed!")
+
+
+class UserModel(object):
+    """ Class for importing txt files as models into PyCorrFit.
+    """
+    def __init__(self, parent):
+        " Define all important constants and variables. "
+        # Current ID is the last model ID we gave away.
+        # This will be set using self.SetCurrentID
+        self.CurrentID = None
+        # The file to be opened. This is a full path like
+        # os.path.join(dirname, filename)
+        self.filename = None
+        # Imported models
+        # Modelarray = [model1, model2]
+        self.modelarray = []
+        # String that contains the executable code
+        self.modelcode = None
+        # Parent is main PyCorrFit program
+        self.parent = parent
+        # The string that identifies the user model menu
+        self.UserStr="User"
+
+
+    def GetCode(self, filename=None):
+        """ Get the executable code from the file.
+            Optional argument filename may be used. If not self.filename will
+            be used.
+            This automatically sets self.filename
+        """
+        if filename is not None:
+            self.filename = filename
+        openedfile = open(self.filename, 'r')
+        code = openedfile.readlines()
+        # File should start with a comment #.
+        # Remove everything before that comment (BOM).
+        startfile = code[0].find("#")
+        if startfile != -1:
+            code[0] = code[0][startfile:]
+        else:
+            code[0] = "# "+code[0]
+        # Returncode: True if model was imported, False if there was a problem.
+        # See ModelImported in class CorrFunc
+        self.AddModel(code)
+        openedfile.close()
+
+
+    def AddModel(self, code):
+        """ *code* is a list with strings
+             each string is one line.
+        """
+        # a = 1
+        # b [ms] = 2.5
+        # gAlt = 1+tau/b
+        # gProd = a*b
+        # G = 1/gA * gB
+        labels = list()
+        values = list()
+        substitutes = dict()
+        for line in code:
+            # We deal with comments and empty lines
+            # We need to check line length first and then we look for
+            # a hash.
+            line = line.strip()
+            if len(line) != 0 and line[0] != "#":
+                var, val = line.split("=")
+                var = var.strip()
+                if var == "G":
+                    # Create a fuction that calculates G
+                    funcstring = val.strip()
+                    self.FuncClass = CorrFunc(labels, values, substitutes,
+                                              funcstring)
+                    func = self.FuncClass.GetFunction()
+                    doc = code[0].strip()
+                    # Add whitespaces in model string (looks nicer)
+                    for olin in code[1:]:
+                        doc = doc + "\n       "+olin.strip()
+                    func.func_doc = doc
+                elif var[0] == "g":
+                    substitutes[var] = val.strip()
+                else:
+                    # Add value and variable to our lists
+                    labels.append(var)
+                    values.append(float(val))
+        # Active Parameters we are using for the fitting
+        # [0] labels
+        # [1] values
+        # [2] bool values to fit
+        bools = list([False]*len(values))
+        bools[0] = True
+        # Create Modelarray
+        active_parms = [ labels, values, bools ]
+        self.SetCurrentID()
+        Modelname = code[0][1:].strip()
+        definitions = [self.CurrentID, Modelname, Modelname, func]
+        model = dict()
+        model["Parameters"] = active_parms
+        model["Definitions"] = definitions
+        self.modelarray.append(model)
+
+
+    def ImportModel(self):
+        """ Do everything that is necessarry to import the models into
+            PyCorrFit.
+        """
+        # Set the model ids of the new model(s)
+        # Normally, there is only one model.
+        for i in np.arange(len(self.modelarray)):
+            self.SetCurrentID()
+            self.modelarray[i]["Definitions"][0] = self.CurrentID
+        # We assume that the models have the correct ID for now
+        mdls.AppendNewModel(self.modelarray)
+        # Set variables and models
+        # Is this still necessary? - We are doing this for compatibility!
+        self.parent.value_set = mdls.values
+        self.parent.valuedict = mdls.valuedict
+        self.parent.models = mdls.models
+        self.parent.modeldict = mdls.modeldict
+        self.parent.modeltypes = mdls.modeltypes
+        # Get menu
+        menu = self.parent.modelmenudict[self.UserStr]
+        # Add menu entrys
+        for item in self.modelarray:
+            # Get definitions
+            Defs = item["Definitions"]
+            # This is important if we want to save the session with
+            # the imported model.
+            mdls.modeltypes[self.UserStr].append(Defs[0])
+            menuentry = menu.Append(Defs[0], Defs[1], Defs[2])
+            self.parent.Bind(wx.EVT_MENU, self.parent.add_fitting_tab,
+                             menuentry)
+
+
+    def TestFunction(self):
+        """ Convenience function to test self.FuncClass """
+        self.FuncClass.TestFunction()
+
+
+    def SetCurrentID(self):
+        # Check last item or so of modelarray
+        # Imported functions get IDs starting from 7000
+        theID = 7000
+        for model in mdls.models:
+            theID = max(theID, model[0])
+        self.CurrentID = theID + 1
+
+
+class wixi(Function):
+    """
+        This is a ghetto solution for using wofz in sympy.
+        It only returns the real part of the function.
+        I am not sure, if the eval's are placed correctly.
+        I only made it work for my needs. This might be wrong!
+        For true use of wofz, I am not using sympy, anyhow.
+    """
+    nargs = 1
+    is_real = True
+    @classmethod
+    def eval(csl,arg):
+        return None
+    #def _should_evalf(csl,arg):
+    #    return True
+    def as_base_exp(cls):
+        return cls,S.One
+    def _eval_evalf(cls, prec):
+        result = sps.wofz(1j*float(cls.args[0]))
+        return sympy.numbers.Number(sympy.functions.re(result))
+
+
+def evalwixi(x):
+    """ Complex Error Function (Faddeeva/Voigt).
+        w(i*x) = exp(x**2) * ( 1-erf(x) )
+        This function is called by other functions within this module.
+        We are using the scipy.special.wofz module which calculates
+        w(z) = exp(-z**2) * ( 1-erf(-iz) )
+        z = i*x
+    """
+    z = x*1j
+    result = sps.wofz(z)
+    # We should have a real solution. Make sure nobody complains about
+    # some zero-value imaginary numbers.
+    return np.real_if_close(result)
+
+
+
+sympyfuncdict = dict()
+sympyfuncdict["wixi"] = wixi
+
+evalfuncdict = dict()
+evalfuncdict["wixi"] = evalwixi
+evalfuncdict["I"] = 1j
+
+scipyfuncs = ['wofz', 'erf', 'erfc']
+numpyfuncs = ['abs', 'arccos', 'arcsin', 'arctan', 'arctan2', 'ceil', 'cos',
+              'cosh', 'degrees', 'e', 'exp', 'fabs', 'floor', 'fmod', 'frexp',
+              'hypot', 'ldexp', 'log', 'log10', 'modf', 'pi', 'power',
+              'radians', 'sin', 'sinh', 'sqrt', 'tan', 'tanh']
+
+for func in scipyfuncs:
+    evalfuncdict[func] = eval("sps."+func)
+
+for func in numpyfuncs:
+    evalfuncdict[func] = eval("np."+func)
+
+

-- 
Alioth's /git/debian-med/git-commit-notice on /srv/git.debian.org/git/debian-med/pycorrfit.git



More information about the debian-med-commit mailing list