Skip to content

Commit

Permalink
Merge pull request #502 from sezelt/black-gh-action
Browse files Browse the repository at this point in the history
Add a GitHub action that checks for `black` styling
  • Loading branch information
sezelt authored Aug 29, 2023
2 parents be44354 + aaf8d18 commit d9daf5d
Show file tree
Hide file tree
Showing 83 changed files with 482 additions and 750 deletions.
11 changes: 5 additions & 6 deletions .github/scripts/update_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,14 @@
with open(version_file_path, "r") as f:
lines = f.readlines()

line_split = lines[0].split('.')
patch_number = line_split[2].split('\'')[0]
line_split = lines[0].split(".")
patch_number = line_split[2].split("'")[0]

# Increment patch number
patch_number = str(int(patch_number) + 1)+'\''
patch_number = str(int(patch_number) + 1) + "'"


new_line = line_split[0]+'.'+line_split[1]+'.'+patch_number
new_line = line_split[0] + "." + line_split[1] + "." + patch_number

with open(version_file_path,"w") as f:
with open(version_file_path, "w") as f:
f.write(new_line)

14 changes: 14 additions & 0 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Check code style

on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]

jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: psf/black@stable
2 changes: 1 addition & 1 deletion .github/workflows/build-flake.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Python application
name: Check module can be imported

on:
push:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: flake8 Lint
name: Check for errors with flake8

on:
push:
Expand Down
35 changes: 16 additions & 19 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,16 @@

import os
import sys
sys.path.insert(0,os.path.dirname(os.getcwd()))

sys.path.insert(0, os.path.dirname(os.getcwd()))
from py4DSTEM import __version__
from datetime import datetime

# -- Project information -----------------------------------------------------

project = 'py4dstem'
copyright = f'{datetime.today().year}, py4DSTEM Development Team'
author = 'Ben Savitsky & Alex Rakowski'
project = "py4dstem"
copyright = f"{datetime.today().year}, py4DSTEM Development Team"
author = "Ben Savitsky & Alex Rakowski"

# The full version, including alpha/beta/rc tags
# release = '0.14.0'
Expand All @@ -35,9 +36,7 @@
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.intersphinx']
extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinx.ext.intersphinx"]

# Other useful extensions
# sphinx_copybutton
Expand All @@ -49,7 +48,7 @@


# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
Expand All @@ -58,18 +57,18 @@

# Set autodoc defaults
autodoc_default_options = {
'members': True,
'member-order': 'bysource',
'special-members': '__init__'
"members": True,
"member-order": "bysource",
"special-members": "__init__",
}

# Include todo items/lists
todo_include_todos = True

#autodoc_member_order = 'bysource'
# autodoc_member_order = 'bysource'


# intersphinx options
# intersphinx options

# intersphinx_mapping = {
# 'emdfile': ('https://pypi.org/project/emdfile/0.0.4/', None)
Expand All @@ -80,21 +79,19 @@
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_theme = "sphinx_rtd_theme"

# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['../_static']
html_static_path = ["../_static"]


# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '../_static/py4DSTEM_logo.png'
html_logo = "../_static/py4DSTEM_logo.png"

# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '../_static/py4DSTEM_logo_vsmall.ico'


html_favicon = "../_static/py4DSTEM_logo_vsmall.ico"
1 change: 0 additions & 1 deletion py4DSTEM/braggvectors/braggvector_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,6 @@ def measure_origin_beamstop(
found_center = np.zeros((R_Nx, R_Ny), dtype=bool)
for rx in range(R_Nx):
for ry in range(R_Ny):

# Get data
pl = braggpeaks_masked[rx, ry]
is_paired = np.zeros(len(pl.data), dtype=bool)
Expand Down
1 change: 0 additions & 1 deletion py4DSTEM/braggvectors/braggvectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,6 @@ def _populate_instance(self, group):
# standard output display

def __repr__(self):

space = " " * len(self.__class__.__name__) + " "
string = f"{self.__class__.__name__}( "
string += f"A {self.shape}-shaped array of lists of bragg vectors )"
Expand Down
12 changes: 1 addition & 11 deletions py4DSTEM/braggvectors/diskdetection.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,6 @@ def _find_Bragg_disks_single(
_return_cc=False,
_template_space="real",
):

# apply filter function
er = "filter_function must be callable"
if filter_function:
Expand All @@ -334,7 +333,6 @@ def _find_Bragg_disks_single(
if template is None:
cc = DP
else:

# fourier transform the template
assert _template_space in ("real", "fourier")
if _template_space == "real":
Expand Down Expand Up @@ -417,11 +415,9 @@ def _find_Bragg_disks_stack(
maxNumPeaks=100,
_template_space="real",
):

ans = []

for idx in range(dp_stack.shape[0]):

dp = dp_stack[idx, :, :]
peaks = _find_Bragg_disks_single(
dp,
Expand Down Expand Up @@ -466,7 +462,6 @@ def _find_Bragg_disks_CPU(
maxNumPeaks=70,
radial_bksb=False,
):

# Make the BraggVectors instance
braggvectors = BraggVectors(datacube.Rshape, datacube.Qshape)

Expand All @@ -475,14 +470,13 @@ def _find_Bragg_disks_CPU(

# Loop over all diffraction patterns
# Compute and populate BraggVectors data
for (rx, ry) in tqdmnd(
for rx, ry in tqdmnd(
datacube.R_Nx,
datacube.R_Ny,
desc="Finding Bragg Disks",
unit="DP",
unit_scale=True,
):

# Get a diffraction pattern

# without background subtraction
Expand Down Expand Up @@ -538,7 +532,6 @@ def _find_Bragg_disks_CUDA_unbatched(
edgeBoundary=20,
maxNumPeaks=70,
):

# compute
from py4DSTEM.braggvectors.diskdetection_cuda import find_Bragg_disks_CUDA

Expand Down Expand Up @@ -584,7 +577,6 @@ def _find_Bragg_disks_CUDA_batched(
edgeBoundary=20,
maxNumPeaks=70,
):

# compute
from py4DSTEM.braggvectors.diskdetection_cuda import find_Bragg_disks_CUDA

Expand Down Expand Up @@ -633,7 +625,6 @@ def _find_Bragg_disks_ipp(
edgeBoundary=20,
maxNumPeaks=70,
):

# compute
from py4DSTEM.braggvectors.diskdetection_parallel import find_Bragg_disks_ipp

Expand Down Expand Up @@ -684,7 +675,6 @@ def _find_Bragg_disks_dask(
edgeBoundary=20,
maxNumPeaks=70,
):

# compute
from py4DSTEM.braggvectors.diskdetection_parallel import find_Bragg_disks_dask

Expand Down
2 changes: 1 addition & 1 deletion py4DSTEM/braggvectors/diskdetection_aiml.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ def find_Bragg_disks_aiml_serial(
)

# Loop over all diffraction patterns
for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
datacube.R_Nx,
datacube.R_Ny,
desc="Finding Bragg Disks using AI/ML",
Expand Down
2 changes: 1 addition & 1 deletion py4DSTEM/braggvectors/diskdetection_aiml_cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def find_Bragg_disks_aiml_CUDA(
)

# Loop over all diffraction patterns
for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
datacube.R_Nx,
datacube.R_Ny,
desc="Finding Bragg Disks using AI/ML CUDA",
Expand Down
2 changes: 1 addition & 1 deletion py4DSTEM/braggvectors/diskdetection_cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def find_Bragg_disks_CUDA(

else:
# Loop over all diffraction patterns
for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
datacube.R_Nx,
datacube.R_Ny,
desc="Finding Bragg Disks",
Expand Down
4 changes: 2 additions & 2 deletions py4DSTEM/braggvectors/diskdetection_parallel_new.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def register_dill_serializer():

#### DASK WRAPPER FUNCTION ####


# Each delayed objected is passed a 4D array, currently implementing only on 2D slices.
# TODO add batching with fancy indexing - needs to run a for loop over the batch of arrays
# TODO add cuda accelerated version
Expand Down Expand Up @@ -138,7 +139,6 @@ def beta_parallel_disk_detection(

if dask_client == None:
if dask_client_params != None:

dask.config.set(
{
"distributed.worker.memory.spill": False,
Expand Down Expand Up @@ -247,7 +247,7 @@ def beta_parallel_disk_detection(
# temp_peaks[0][0]

# operating over a list so we need the size (0->count) and re-create the probe positions (0->rx,0->ry),
for (count, (rx, ry)) in zip(
for count, (rx, ry) in zip(
[i for i in range(dataset.data[..., 0, 0].size)],
np.ndindex(dataset.data.shape[:-2]),
):
Expand Down
8 changes: 4 additions & 4 deletions py4DSTEM/braggvectors/threshold.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def threshold_Braggpeaks(
assert all(
[item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]]
), "pointlistarray must include the coordinates 'qx', 'qy', and 'intensity'."
for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
pointlistarray.shape[0],
pointlistarray.shape[1],
desc="Thresholding Bragg disks",
Expand Down Expand Up @@ -120,7 +120,7 @@ def universal_threshold(
_pointlistarray.name = pointlistarray.name + "_unithresh"

HI_array = np.zeros((_pointlistarray.shape[0], _pointlistarray.shape[1]))
for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
_pointlistarray.shape[0],
_pointlistarray.shape[1],
desc="Thresholding Bragg disks",
Expand All @@ -142,7 +142,7 @@ def universal_threshold(
else:
_thresh = thresh

for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
_pointlistarray.shape[0],
_pointlistarray.shape[1],
desc="Thresholding Bragg disks",
Expand Down Expand Up @@ -205,7 +205,7 @@ def get_pointlistarray_intensities(pointlistarray):
), "pointlistarray coords must include 'intensity'"

first_pass = True
for (Rx, Ry) in tqdmnd(
for Rx, Ry in tqdmnd(
pointlistarray.shape[0],
pointlistarray.shape[1],
desc="Getting disk intensities",
Expand Down
1 change: 0 additions & 1 deletion py4DSTEM/datacube/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -896,7 +896,6 @@ def find_Bragg_disks(
)

if isinstance(peaks, Node):

# add metadata
peaks.name = name
peaks.metadata = Metadata(
Expand Down
2 changes: 0 additions & 2 deletions py4DSTEM/datacube/virtualdiffraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ def get_virtual_diffraction(

# ...with no center shifting
if shift_center == False:

# ...for the whole pattern
if mask is None:
if method == "mean":
Expand Down Expand Up @@ -187,7 +186,6 @@ def get_virtual_diffraction(

# ...for integer shifts
if not subpixel:

# round shifts -> int
qx_shift = qx_shift.round().astype(int)
qy_shift = qy_shift.round().astype(int)
Expand Down
4 changes: 0 additions & 4 deletions py4DSTEM/datacube/virtualimage.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,8 @@ def get_virtual_image(

# no center shifting
if shift_center == False:

# single CPU
if not dask:

# allocate space
if mask.dtype == "complex":
virtual_image = np.zeros(self.Rshape, dtype="complex")
Expand All @@ -220,7 +218,6 @@ def get_virtual_image(

# dask
if dask == True:

# set up a generalized universal function for dask distribution
def _apply_mask_dask(self, mask):
virtual_image = np.sum(
Expand All @@ -240,7 +237,6 @@ def _apply_mask_dask(self, mask):

# with center shifting
else:

# get shifts
assert (
self.calibration.get_origin_shift() is not None
Expand Down
Loading

0 comments on commit d9daf5d

Please sign in to comment.