diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 0000000000..fcf6e648ca --- /dev/null +++ b/.codespellrc @@ -0,0 +1,12 @@ +[codespell] +skip = .git,*.pdf,*.svg,external +# nd,nam - import module short +# fith - oddness coming from AFNI +# whos - smth used in matlab things +# SMAL - Stanford CNI MRS Library +# Suh - name +# noo,crasher - field/var name used +# Reson - short journal name +# ALS, FWE - neuroimaging specific abbrevs +# Comision - foreign word used +ignore-words-list = te,inport,objekt,jist,nd,hel,inout,fith,whos,fot,ue,shs,smal,nam,filetest,suh,noo,reson,als,fwe,crasher,comision diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 0000000000..5768d7c636 --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,19 @@ +--- +name: Codespell + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Codespell + uses: codespell-project/actions-codespell@v1 diff --git a/doc/changelog/0.X.X-changelog.rst b/doc/changelog/0.X.X-changelog.rst index 0c007cade7..8b779970d4 100644 --- a/doc/changelog/0.X.X-changelog.rst +++ b/doc/changelog/0.X.X-changelog.rst @@ -15,7 +15,7 @@ * ENH: Generate Dockerfiles with neurodocker (https://github.com/nipy/nipype/pull/2202) * ENH: FLAIR options for recon-all (https://github.com/nipy/nipype/pull/2279) * ENH: Config option for setting maxtasksperchild when multiprocessing (https://github.com/nipy/nipype/pull/2284) -* FIX: Testing maintainance and improvements (https://github.com/nipy/nipype/pull/2252) +* FIX: Testing maintenance and improvements (https://github.com/nipy/nipype/pull/2252) * ENH: Add elapsed_time and final metric_value to ants.Registration (https://github.com/nipy/nipype/pull/1985) * ENH: Improve terminal_output feature (https://github.com/nipy/nipype/pull/2209) * ENH: Simple interface to FSL std2imgcoords (https://github.com/nipy/nipype/pull/2209, prev #1398) @@ -39,7 +39,7 @@ * ENH: Add cosine-basis high-pass-filter to CompCor, allow skip of initial volumes (https://github.com/nipy/nipype/pull/2107, https://github.com/nipy/nipype/pull/#2122) * FIX: Catch more dcm2niix DTI conversions (https://github.com/nipy/nipype/pull/2110) * FIX: Retrieve aseg + wmparc stats properly (https://github.com/nipy/nipype/pull/2117) -* ENH: ANTs MeasureImageSimilarity Inteface (https://github.com/nipy/nipype/pull/2128) +* ENH: ANTs MeasureImageSimilarity Interface (https://github.com/nipy/nipype/pull/2128) * FIX: CompCor filter_basis of correct size, pre-filter column headers (https://github.com/nipy/nipype/pull/2136, https://github.com/nipy/nipype/pull/2138) * ENH: FreeSurfer lta_convert and mri_coreg interfaces (https://github.com/nipy/nipype/pull/2140, https://github.com/nipy/nipype/pull/2172) * ENH: Speed up S3DataGrabber (https://github.com/nipy/nipype/pull/2143) @@ -311,16 +311,16 @@ Release 0.11.0 (September 15, 2015) Release 0.10.0 (October 10, 2014) ================================= -* ENH: New miscelaneous interfaces: SplitROIs (mapper), MergeROIs (reducer) +* ENH: New miscellaneous interfaces: SplitROIs (mapper), MergeROIs (reducer) to enable parallel processing of very large images. * ENH: Updated FSL interfaces: BEDPOSTX and XFibres, former interfaces are still available with the version suffix: BEDPOSTX4 and XFibres4. Added gpu versions of BEDPOSTX: BEDPOSTXGPU, BEDPOSTX5GPU, and BEDPOSTX4GPU -* ENH: Added experimental support for MIPAV algorithms thorugh JIST plugins +* ENH: Added experimental support for MIPAV algorithms through JIST plugins * ENH: New dipy interfaces: Denoise, Resample * ENH: New Freesurfer interfaces: Tkregister2 (for conversion of fsl style matrices to freesurfer format), MRIPretess * ENH: New FSL interfaces: WarpPoints, WarpPointsToStd, EpiReg, ProbTrackX2, WarpUtils, ConvertWarp -* ENH: New miscelaneous interfaces: AddCSVRow, NormalizeProbabilityMapSet, AddNoise +* ENH: New miscellaneous interfaces: AddCSVRow, NormalizeProbabilityMapSet, AddNoise * ENH: New AFNI interfaces: Eval, Means, SVMTest, SVMTrain * ENH: FUGUE interface has been refactored to use the name_template system, 3 examples added to doctests, some bugs solved. @@ -510,7 +510,7 @@ Release 0.5 (Mar 10, 2012) * API: By default inputs are removed from Node working directory * API: InterfaceResult class is now versioned and stores class type not instance * API: Added FIRST interface -* API: Added max_jobs paramter to plugin_args. limits the number of jobs +* API: Added max_jobs parameter to plugin_args. limits the number of jobs executing at any given point in time * API: crashdump_dir is now a config execution option * API: new config execution options for controlling hash checking, execution and diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index f6a9233f5e..6b3647a86d 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -426,7 +426,7 @@ Python 1.2.3 will be the last version to support Python 3.4. * FIX: ANTS LaplacianThickness cmdline opts fixed up (https://github.com/nipy/nipype/pull/2846) * FIX: Resolve LinAlgError during SVD (https://github.com/nipy/nipype/pull/2838) - * ENH: Add interfaces wrapping DIPY worflows (https://github.com/nipy/nipype/pull/2830) + * ENH: Add interfaces wrapping DIPY workflows (https://github.com/nipy/nipype/pull/2830) * ENH: Update BIDSDataGrabber for pybids 0.7 (https://github.com/nipy/nipype/pull/2737) * ENH: Add FSL `eddy_quad` interface (https://github.com/nipy/nipype/pull/2825) * ENH: Support tckgen -select in MRtrix3 v3+ (https://github.com/nipy/nipype/pull/2823) diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index 38f56d6541..8e5c6b150c 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -150,8 +150,8 @@ def ICC_rep_anova(Y, projection_matrix=None): SSR = SST - SSC - SSE MSR = SSR / dfr - # ICC(3,1) = (mean square subjeT - mean square error) / - # (mean square subjeT + (k-1)*-mean square error) + # ICC(3,1) = (mean square subject - mean square error) / + # (mean square subject + (k-1)*-mean square error) ICC = (MSR - MSE) / (MSR + dfc * MSE) e_var = MSE # variance of error diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index cd2145b8d5..4b870b53ab 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -865,7 +865,7 @@ class Registration(ANTSCommand): --write-composite-transform 0' One can use multiple similarity metrics in a single registration stage.The Node below first - performs a linear registation using only the Mutual Information ('Mattes')-metric. + performs a linear registration using only the Mutual Information ('Mattes')-metric. In a second stage, it performs a non-linear registration ('Syn') using both a Mutual Information and a local cross-correlation ('CC')-metric. Both metrics are weighted equally ('metric_weight' is .5 for both). The Mutual Information- metric uses 32 bins. diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 9467c4086f..8af76d95b9 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1009,10 +1009,10 @@ def _run_interface(self, runtime, correct_return_codes=(0,)): runtime = super(BrainExtraction, self)._run_interface(runtime) # Still, double-check if it didn't found N4 - if "we cant find" in runtime.stdout: + if "we can't find" in runtime.stdout: for line in runtime.stdout.split("\n"): - if line.strip().startswith("we cant find"): - tool = line.strip().replace("we cant find the", "").split(" ")[0] + if line.strip().startswith("we can't find"): + tool = line.strip().replace("we can't find the", "").split(" ")[0] break errmsg = ( diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 5c92c7ec69..01ef126abb 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -177,7 +177,7 @@ def get_traitsfree(self, **kwargs): return out def _clean_container(self, objekt, undefinedval=None, skipundefined=False): - """Convert a traited obejct into a pure python representation.""" + """Convert a traited object into a pure python representation.""" if isinstance(objekt, TraitDictObject) or isinstance(objekt, dict): out = {} for key, val in list(objekt.items()): diff --git a/nipype/interfaces/base/tests/test_core.py b/nipype/interfaces/base/tests/test_core.py index cdfef51193..0c3f5a8dc1 100644 --- a/nipype/interfaces/base/tests/test_core.py +++ b/nipype/interfaces/base/tests/test_core.py @@ -288,28 +288,28 @@ class WithoutInput(WithInput): _version = "0.6" has = WithInput() - hasnt = WithoutInput() + hasnot = WithoutInput() trying_anyway = WithoutInput(foo=3) assert has.inputs.foo == 3 - assert not nib.isdefined(hasnt.inputs.foo) + assert not nib.isdefined(hasnot.inputs.foo) assert trying_anyway.inputs.foo == 3 has.run() - hasnt.run() + hasnot.run() with pytest.raises(Exception): trying_anyway.run() # Still settable has.inputs.foo = 4 - hasnt.inputs.foo = 4 + hasnot.inputs.foo = 4 trying_anyway.inputs.foo = 4 assert has.inputs.foo == 4 - assert hasnt.inputs.foo == 4 + assert hasnot.inputs.foo == 4 assert trying_anyway.inputs.foo == 4 has.run() with pytest.raises(Exception): - hasnt.run() + hasnot.run() with pytest.raises(Exception): trying_anyway.run() diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 991ca89dcf..15449515f4 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -542,8 +542,8 @@ def _run_interface(self, runtime): "Saving extra measure file to %s in Pickle format", op.abspath(out_pickled_extra_measures), ) - with open(out_pickled_extra_measures, "w") as fo: - pickle.dump(dict_measures, fo) + with open(out_pickled_extra_measures, "w") as f: + pickle.dump(dict_measures, f) iflogger.info("Saving MATLAB measures as %s", matlab) diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index 5c3f621e87..086d0a96c8 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -73,7 +73,7 @@ class RobustTemplateInputSpec(FSTraitedSpecOpenMP): ) initial_timepoint = traits.Int( argstr="--inittp %d", - desc="use TP# for spacial init (default random), 0: no init", + desc="use TP# for special init (default random), 0: no init", ) fixed_timepoint = traits.Bool( default_value=False, diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 50485bac2e..b657ec2fb0 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -2026,7 +2026,7 @@ class ClusterInputSpec(FSLCommandInputSpec): argstr="--stdvol=%s", desc="filename for standard-space volume" ) num_maxima = traits.Int(argstr="--num=%d", desc="no of local maxima to report") - warpfield_file = File(argstr="--warpvol=%s", desc="file contining warpfield") + warpfield_file = File(argstr="--warpvol=%s", desc="file containing warpfield") class ClusterOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 80ef25534a..f550fb1ef7 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -376,12 +376,12 @@ class FAST(FSLCommand): Examples -------- >>> from nipype.interfaces import fsl - >>> fastr = fsl.FAST() - >>> fastr.inputs.in_files = 'structural.nii' - >>> fastr.inputs.out_basename = 'fast_' - >>> fastr.cmdline + >>> fast = fsl.FAST() + >>> fast.inputs.in_files = 'structural.nii' + >>> fast.inputs.out_basename = 'fast_' + >>> fast.cmdline 'fast -o fast_ -S 1 structural.nii' - >>> out = fastr.run() # doctest: +SKIP + >>> out = fast.run() # doctest: +SKIP """ diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index ea06482500..7cae487abc 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -1683,7 +1683,7 @@ class FSSourceOutputSpec(TraitedSpec): File(exists=True), desc="Inflated surface meshes", loc="surf" ) pial = OutputMultiPath( - File(exists=True), desc="Gray matter/pia mater surface meshes", loc="surf" + File(exists=True), desc="Gray matter/pia matter surface meshes", loc="surf" ) area_pial = OutputMultiPath( File(exists=True), diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index 4619740fc2..4624d2c780 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -388,14 +388,14 @@ class StreamlineTrack(CommandLine): ------- >>> import nipype.interfaces.mrtrix as mrt - >>> strack = mrt.StreamlineTrack() - >>> strack.inputs.inputmodel = 'SD_PROB' - >>> strack.inputs.in_file = 'data.Bfloat' - >>> strack.inputs.seed_file = 'seed_mask.nii' - >>> strack.inputs.mask_file = 'mask.nii' - >>> strack.cmdline + >>> streamtrack = mrt.StreamlineTrack() + >>> streamtrack.inputs.inputmodel = 'SD_PROB' + >>> streamtrack.inputs.in_file = 'data.Bfloat' + >>> streamtrack.inputs.seed_file = 'seed_mask.nii' + >>> streamtrack.inputs.mask_file = 'mask.nii' + >>> streamtrack.cmdline 'streamtrack -mask mask.nii -seed seed_mask.nii SD_PROB data.Bfloat data_tracked.tck' - >>> strack.run() # doctest: +SKIP + >>> streamtrack.run() # doctest: +SKIP """ _cmd = "streamtrack" @@ -423,10 +423,10 @@ class DiffusionTensorStreamlineTrack(StreamlineTrack): ------- >>> import nipype.interfaces.mrtrix as mrt - >>> dtstrack = mrt.DiffusionTensorStreamlineTrack() - >>> dtstrack.inputs.in_file = 'data.Bfloat' - >>> dtstrack.inputs.seed_file = 'seed_mask.nii' - >>> dtstrack.run() # doctest: +SKIP + >>> dtstreamtrack = mrt.DiffusionTensorStreamlineTrack() + >>> dtstreamtrack.inputs.in_file = 'data.Bfloat' + >>> dtstreamtrack.inputs.seed_file = 'seed_mask.nii' + >>> dtstreamtrack.run() # doctest: +SKIP """ input_spec = DiffusionTensorStreamlineTrackInputSpec diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index 1afc887b06..c41a0da966 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -23,7 +23,7 @@ def test_coreg(): _, tgt, _ = split_filename(target) mat = os.path.join(pth, "%s_to_%s.mat" % (mov, tgt)) invmat = fname_presuffix(mat, prefix="inverse_") - scrpt = coreg._make_matlab_command(None) + script = coreg._make_matlab_command(None) assert coreg.inputs.mat == mat assert coreg.inputs.invmat == invmat @@ -35,11 +35,11 @@ def test_apply_transform(): assert applymat.inputs.matlab_cmd == "mymatlab" applymat.inputs.in_file = moving applymat.inputs.mat = mat - scrpt = applymat._make_matlab_command(None) + script = applymat._make_matlab_command(None) expected = "[p n e v] = spm_fileparts(V.fname);" - assert expected in scrpt + assert expected in script expected = "V.mat = transform.M * V.mat;" - assert expected in scrpt + assert expected in script def test_reslice(): diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index 45bd53e32c..635c90cef1 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -708,10 +708,10 @@ def _mock_get_ssh_client(self): def test_ExportFile(tmp_path): - testin = tmp_path / "in.txt" - testin.write_text("test string", encoding='utf-8') + test_in = tmp_path / "in.txt" + test_in.write_text("test string", encoding='utf-8') i = nio.ExportFile() - i.inputs.in_file = str(testin) + i.inputs.in_file = str(test_in) i.inputs.out_file = str(tmp_path / "out.tsv") i.inputs.check_extension = True with pytest.raises(RuntimeError): diff --git a/nipype/utils/profiler.py b/nipype/utils/profiler.py index 2179b29db6..d83b745df5 100644 --- a/nipype/utils/profiler.py +++ b/nipype/utils/profiler.py @@ -44,7 +44,7 @@ def stop(self): class ResourceMonitor(threading.Thread): """ - A ``Thread`` to monitor a specific PID with a certain frequence + A ``Thread`` to monitor a specific PID with a certain frequency to a file """ diff --git a/tools/ex2rst b/tools/ex2rst index 82653f80e5..1b19ce8726 100755 --- a/tools/ex2rst +++ b/tools/ex2rst @@ -98,7 +98,7 @@ def exfile2rst(filename): proc_line = None # handle doc start if not indocs: - # guarenteed to start with """ + # guaranteed to start with """ if len(cleanline) > 3 \ and (cleanline.endswith('"""') \ or cleanline.endswith("'''")):