diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index a5f9526e..596afd49 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.9", "3.10"]
+ python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v3
@@ -26,7 +26,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install C dependencies
run: |
- sudo apt-get install fftw3-dev
+ sudo apt-get install libfftw3-dev
sudo apt-get install liblapack-dev
- name: Install Python dependencies
run: |
@@ -39,7 +39,7 @@ jobs:
scons
- name: Install python package
run: |
- python setup.py install
+ python -m pip install -e .
- name: Test with pytest
run: |
pytest
diff --git a/doc/source/index.rst b/doc/source/index.rst
index dac6295e..bfec89a1 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -2,7 +2,7 @@ Storm-Analysis
==============
This is a repository of code developed in the `Zhuang Lab `_
-and the `Babcock Lab `_ for analysis of STORM
+and the `Babcock Lab `_ for analysis of STORM
movies. The code is available on `github `_.
Some algorithms were developed in other groups and ported to Python. In this
diff --git a/doc/source/install.rst b/doc/source/install.rst
index 5574d2da..afef8d43 100644
--- a/doc/source/install.rst
+++ b/doc/source/install.rst
@@ -13,6 +13,7 @@ C libraries
Python
~~~~~~
+* `scons `_ (to build the C libraries).
* `numpy `_
* `scipy `_
* `matplotlib `_
@@ -87,10 +88,11 @@ The C libraries are built using `SCons `_.
Basic installation ::
- $ git clone https://github.com/ZhuangLab/storm-analysis.git
- $ cd storm-analysis
- $ python setup.py build_c
- $ python setup.py install
+ $ git clone https://github.com/ZhuangLab/storm-analysis.git
+ $ cd storm-analysis
+ $ python -m pip install scons
+ $ scons
+ $ python -m pip install .
You may find that this does not work because ``build_c`` fails. This step is just a
wrapper for SCons, so you may have better luck running the SCons by itself, then using
@@ -99,14 +101,16 @@ wrapper for SCons, so you may have better luck running the SCons by itself, then
Linux / OS-X example ::
$ cd storm-analysis
+ $ python -m pip install scons
$ scons
- $ python setup.py install
+ $ python -m pip install .
Windows (mingw64) example ::
$ cd storm-analysis
- $ scons
- $ python setup.py install
+ $ python -m pip install scons
+ $ scons -Q compiler=mingw
+ $ python -m pip install .
`nuwen `_ is one source for mingw64.
@@ -155,25 +159,7 @@ Install storm-analysis ::
Testing
~~~~~~~
-Test the (source) installation (this will take a few minutes to run).
+Test the (source) installation (this requires ``pytest`` will take a few minutes to run) ::
-Option 1 ::
-
- $ cd storm-analysis
- $ python setup.py test
-
-Option 2 ::
-
$ cd storm-analysis
$ pytest
-
-.. note:: Due to issues with creating pickle files that are compatible between Python2
- and Python3 all of the tests that involve pickles (Spliner mostly) are skipped
- on Python2.
-
-Also
-----
-
-If you are modifying the code in the storm-analysis project you may find it more convenient
-to add a .pth file to your pythonX.Y/site-packages directory. Then you won't have to
-run ``python setup.py install`` after every change.
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..85ac7ca4
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,46 @@
+[build-system]
+requires = ['setuptools>=61.0']
+build-backend = 'setuptools.build_meta'
+
+[project]
+name = 'storm_analysis'
+version = '2.2'
+dependencies = [
+ 'scons',
+ 'numpy',
+ 'scipy',
+ 'matplotlib',
+ 'pillow',
+ 'tifffile',
+ 'Shapely',
+ 'randomcolor',
+ 'PyWavelets',
+ 'h5py',
+ 'astropy'
+]
+requires-python = '>=3.10'
+authors = [{name = 'Hazen Babcock'}]
+maintainers = []
+description = 'STORM movie analysis code.'
+readme = 'README.md'
+keywords = ['STORM', 'microscopy']
+classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: Mixed'
+]
+
+[tool.pytest]
+minversion = '9.0'
+addopts = ['-ra', '-q']
+testpaths = [
+ 'storm_analysis/test'
+]
+
+[tool.setuptools]
+packages = ["storm_analysis"]
+
+[project.optional-dependencies]
+test = [
+ 'pytest',
+]
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 98b07604..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,5 +0,0 @@
-[aliases]
-test=pytest
-
-[tool:pytest]
-python_files = test*.py
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 32340437..00000000
--- a/setup.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# Custom commands following:
-# https://seasonofcode.com/posts/how-to-add-custom-build-steps-and-commands-to-setuppy.html
-#
-
-import distutils.cmd
-import platform
-import os
-import setuptools.command.build_py
-import subprocess
-import sys
-
-from setuptools import setup, find_packages
-from distutils.core import Extension
-
-
-version = "2.2"
-description = "STORM movie analysis code."
-long_description = ""
-
-class SConsCommand(distutils.cmd.Command):
- """
- Custom command to run scons (http://scons.org/) to build the C libraries.
- """
- description = 'run scons to build C libraries'
- user_options = [
- ('scons-exe=', None, 'location of the scons executable'),
- ('compiler=', None, 'which C compiler to use, e.g. "mingw", ..')
- ]
-
- def initialize_options(self):
- self.scons_exe = ''
- self.compiler = ''
-
- def finalize_options(self):
- if self.scons_exe:
- assert os.path.exists(self.scons_exe), ("scon executable " + self.scons_exe + " not found")
-
- def run(self):
- if self.scons_exe:
- command = [self.scons_exe]
- else:
- command = ['scons']
- if self.compiler:
- command.extend(['-Q', 'compiler=' + self.compiler])
-
- self.announce('Running command: ' + str(command))
- try:
- subprocess.check_call(command)
- except OSError:
- print("Failed to build C libraries, is scons installed?")
-
-
-setup(
- name='storm_analysis',
- version=version,
- description=description,
- long_description=long_description,
- author='Hazen Babcock',
- author_email='hbabcock at fas.harvard.edu',
- url='https://github.com/ZhuangLab/storm-analysis',
-
- cmdclass={
- 'build_c' : SConsCommand,
- },
-
- zip_safe=False,
- packages=find_packages(),
-
- package_data={},
- exclude_package_data={},
- include_package_data=True,
-
- requires=[],
-
- install_requires=[
- "numpy",
- "scipy",
- "matplotlib",
- "pillow",
- "tifffile",
- "Shapely",
- "randomcolor",
- "PyWavelets",
- "h5py",
- "astropy"
- ],
- setup_requires=['pytest-runner'],
- tests_require=['pytest'],
-
- license="",
- keywords='storm,microscopy',
- classifiers=[
- 'Development Status :: 4 - Beta',
- 'Intended Audience :: Developers',
- 'License :: Mixed',
- "Programming Language :: C",
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.9',
- 'Programming Language :: Python :: 3.10',
- ],
-
-
-)
diff --git a/storm_analysis/__init__.py b/storm_analysis/__init__.py
index 22dc24ad..96ae5e6b 100644
--- a/storm_analysis/__init__.py
+++ b/storm_analysis/__init__.py
@@ -42,9 +42,9 @@ def configureMatplotlib():
def getData(data_path):
- import pkg_resources
- data = pkg_resources.resource_filename(__name__, data_path)
- return data
+ import importlib.resources
+ data = importlib.resources.files(__name__).joinpath(data_path)
+ return str(data)
def getPath(path):
diff --git a/storm_analysis/admm/admm_3d.py b/storm_analysis/admm/admm_3d.py
index ce6be378..e8102e6e 100644
--- a/storm_analysis/admm/admm_3d.py
+++ b/storm_analysis/admm/admm_3d.py
@@ -2,7 +2,7 @@
"""
Pure Python code for doing ADMM in 3D.
-minimize 1/2*|| Ax - b ||_2^2 + \lambda || x ||_1
+minimize 1/2*|| Ax - b ||_2^2 + lambda || x ||_1
As described in:
Boyd et al., "Distributed Optimization and Statistical Learning
diff --git a/storm_analysis/admm/admm_decon.py b/storm_analysis/admm/admm_decon.py
index 063a13c9..ae971ad6 100644
--- a/storm_analysis/admm/admm_decon.py
+++ b/storm_analysis/admm/admm_decon.py
@@ -2,7 +2,7 @@
"""
Uses ADMM to perform image deconvolution.
-This minimizes 1/2*|| Ax - b ||_2^2 + \lambda || x ||_1.
+This minimizes 1/2*|| Ax - b ||_2^2 + lambda || x ||_1.
Hazen 11/19
"""
diff --git a/storm_analysis/fista/fista_3d.py b/storm_analysis/fista/fista_3d.py
index 20918843..7e211637 100644
--- a/storm_analysis/fista/fista_3d.py
+++ b/storm_analysis/fista/fista_3d.py
@@ -2,7 +2,7 @@
"""
Pure Python code for doing FISTA in 3D.
-minimize || Ax - b ||_2^2 + \lambda || x ||_1
+minimize || Ax - b ||_2^2 + lambda || x ||_1
As described in:
Beck and Teboulle, "A Fast Iterative Shrinkage-Thresholding
diff --git a/storm_analysis/fista/fista_decon.py b/storm_analysis/fista/fista_decon.py
index 7a084773..a5c3716a 100644
--- a/storm_analysis/fista/fista_decon.py
+++ b/storm_analysis/fista/fista_decon.py
@@ -2,7 +2,7 @@
"""
Deconvolve images in 3D using FISTA.
-This minimizes || Ax - b ||_2^2 + \lambda || x ||_1.
+This minimizes || Ax - b ||_2^2 + lambda || x ||_1.
Hazen 11/19
"""
diff --git a/storm_analysis/fista/fista_fft_c.py b/storm_analysis/fista/fista_fft_c.py
index 93de9919..66c38259 100644
--- a/storm_analysis/fista/fista_fft_c.py
+++ b/storm_analysis/fista/fista_fft_c.py
@@ -128,7 +128,7 @@ def newImage(self, image, background):
c_background = numpy.ascontiguousarray(background, dtype = float)
fista_fft.newImage(self.c_fista, c_image, c_background)
- def run(self, f_lamba, iterations):
+ def run(self, f_lambda, iterations):
fista_fft.run(self.c_fista, f_lambda, iterations)
diff --git a/storm_analysis/micrometry/micrometry.py b/storm_analysis/micrometry/micrometry.py
index d59ff6bf..3da51a8b 100644
--- a/storm_analysis/micrometry/micrometry.py
+++ b/storm_analysis/micrometry/micrometry.py
@@ -92,7 +92,7 @@ def makeTreeAndQuadsFromH5File(h5_filename, min_size = None, max_size = None, ma
def plotMatch(kd1, kd2, transform, save_as = None, show = True):
[x2, y2] = applyTransform(kd2, transform)
- fig = pyplot.figure()
+ fig = pyplot.figure(figsize = (10,10))
pyplot.scatter(kd1.data[:,0], kd1.data[:,1], facecolors = 'none', edgecolors = 'red', s = 100)
pyplot.scatter(x2, y2, color = 'green', marker = '+', s = 100)
diff --git a/storm_analysis/simulator/pupil_math.py b/storm_analysis/simulator/pupil_math.py
index 9058807e..88178df0 100644
--- a/storm_analysis/simulator/pupil_math.py
+++ b/storm_analysis/simulator/pupil_math.py
@@ -313,7 +313,7 @@ def pfToPSF(self, pf, z_vals, want_intensity = True, scaling_factor = None):
raise PupilMathException("OTF scaling of a complex valued PSF is not supported!")
psf = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
for i, z in enumerate(z_vals):
psf[i,:,:] = toRealSpace(self.changeFocus(pf, z))
return psf
@@ -432,19 +432,19 @@ def pfToRS(self, pf, z_vals):
[[rs_px_ex, rs_px_ey], [rs_py_ex, rs_py_ey], [rs_pz_ex, rs_pz_ey]]
"""
rs_px_ex = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_px_ey = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_py_ex = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_py_ey = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_pz_ex = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_pz_ey = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
for i, z in enumerate(z_vals):
pf_at_z = self.changeFocus(pf, z)
@@ -525,7 +525,7 @@ def pfToPSF(self, pf, z_vals, want_intensity = True, scaling_factor = None):
if scaling_factor is not None:
raise PupilMathException("OTF scaling of a complex valued PSF is not supported!")
- psf = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]), dtype = numpy.complex_)
+ psf = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]), dtype = numpy.complex128)
for i, z in enumerate(z_vals):
self.pf_c.translateZ(z)
psf[i,:,:] = self.pf_c.getPSF()
@@ -586,19 +586,19 @@ def pfToRS(self, pf, z_vals):
[[rs_px_ex, rs_px_ey], [rs_py_ex, rs_py_ey], [rs_pz_ex, rs_pz_ey]]
"""
rs_px_ex = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_px_ey = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_py_ex = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_py_ey = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_pz_ex = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
rs_pz_ey = numpy.zeros((len(z_vals), pf.shape[0], pf.shape[1]),
- dtype = numpy.complex_)
+ dtype = numpy.complex128)
self.pf_c.setPF(pf)
for i, z in enumerate(z_vals):
diff --git a/storm_analysis/spliner/psf_to_spline.py b/storm_analysis/spliner/psf_to_spline.py
index 99d77b70..82f88b9d 100644
--- a/storm_analysis/spliner/psf_to_spline.py
+++ b/storm_analysis/spliner/psf_to_spline.py
@@ -54,7 +54,7 @@ def psfToSpline(psf_name, spline_name, s_size):
if True:
import tifffile
tiff_name = os.path.splitext(spline_name)[0] + "_sp.tif"
- tifffile.imsave(tiff_name, np_spline.astype(numpy.float32))
+ tifffile.imwrite(tiff_name, np_spline.astype(numpy.float32))
# 3D spline
@@ -99,7 +99,7 @@ def psfToSpline(psf_name, spline_name, s_size):
tiff_name = os.path.splitext(spline_name)[0] + "_sp.tif"
with tifffile.TiffWriter(tiff_name) as tf:
for i in range(s_size):
- tf.save(np_spline[i,:,:].astype(numpy.float32))
+ tf.write(np_spline[i,:,:].astype(numpy.float32))
del psf_data["psf"]
psf_data["spline"] = np_spline
diff --git a/storm_analysis/spliner/spline_to_psf.py b/storm_analysis/spliner/spline_to_psf.py
index 0b511244..ed2988a3 100644
--- a/storm_analysis/spliner/spline_to_psf.py
+++ b/storm_analysis/spliner/spline_to_psf.py
@@ -5,7 +5,7 @@
Hazen 01/16
"""
-
+import pathlib
import pickle
import numpy
@@ -39,7 +39,7 @@ def loadSplineFile(self, spline_file):
Load the spline_file if it has not already been loaded. Otherwise
just return it under the assumption that is a unpickled spline file.
"""
- if isinstance(spline_file, str):
+ if isinstance(spline_file, str) or isinstance(spline_file, pathlib.PosixPath):
with open(spline_file, 'rb') as fp:
spline_data = pickle.load(fp)
return spline_data
diff --git a/storm_analysis/test/data/test_spliner_psf.spline b/storm_analysis/test/data/test_spliner_psf.spline
index 62a82417..2d48a884 100644
Binary files a/storm_analysis/test/data/test_spliner_psf.spline and b/storm_analysis/test/data/test_spliner_psf.spline differ
diff --git a/storm_analysis/test/data/test_spliner_psf_2d.spline b/storm_analysis/test/data/test_spliner_psf_2d.spline
index e3ba600c..42e3a9da 100644
Binary files a/storm_analysis/test/data/test_spliner_psf_2d.spline and b/storm_analysis/test/data/test_spliner_psf_2d.spline differ