diff --git a/.gitignore b/.gitignore
index c4895be..eb43bb4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,6 +24,7 @@ venv/
docs/build
# Sphinx autogenerated API documentation
docs/source/defdap
+docs/source/userguide/howtouse.ipynb
# Pip install
DefDAP.egg-info/
diff --git a/defdap/base.py b/defdap/base.py
index f4f2401..0f2ee0e 100755
--- a/defdap/base.py
+++ b/defdap/base.py
@@ -45,14 +45,22 @@ class Map(ABC):
"""
def __init__(self, file_name, data_type=None, experiment=None,
increment=None, frame=None, map_name=None):
- """
+ """Initialize a Map object.
Parameters
----------
file_name : str
- Path to EBSD file, including name, excluding extension.
- data_type : str, {'OxfordBinary', 'OxfordText'}
- Format of EBSD data file.
+ Path to data file.
+ data_type : str, optional
+ Format of data file.
+ experiment : defdap.experiment.Experiment, optional
+ Experiment object to associate with this map.
+ increment : defdap.experiment.Increment, optional
+ Increment object to associate with this map.
+ frame : defdap.experiment.Frame, optional
+ Frame object for coordinate transformations.
+ map_name : str, optional
+ Name for this map in the experiment structure.
"""
@@ -273,7 +281,10 @@ def calc_line_profile(self, plot, start_end, **kwargs):
@report_progress("constructing neighbour network")
def build_neighbour_network(self):
- """Construct a list of neighbours
+ """Construct a network graph of neighbouring grains.
+
+ Creates a NetworkX graph where nodes are grains and edges connect
+ neighbouring grains that share a boundary.
"""
## TODO: fix HRDIC NN
@@ -439,11 +450,17 @@ def calc_proxigram(self, num_trials=500):
return trial_distances.min(axis=0)
def _validate_map(self, map_name):
- """Check the name exists and is a map data.
+ """Check that the data name exists and that it contains map data.
Parameters
----------
map_name : str
+ Name of the map data to validate.
+
+ Raises
+ ------
+ ValueError
+ If the data name does not exist or is not map data.
"""
if map_name not in self.data:
@@ -453,19 +470,26 @@ def _validate_map(self, map_name):
raise ValueError(f'`{map_name}` is not a valid map.')
def _validate_component(self, map_name, comp):
- """
+ """Validate component specification for map data.
Parameters
----------
map_name : str
- comp : int or tuple of int or str
+ Name of the map data.
+ comp : int or tuple of int or str, optional
Component of the map data. This is either the
tensor component (tuple of ints) or the name of a calculation
to be applied e.g. 'norm', 'all_euler' or 'IPF_x'.
Returns
-------
- tuple of int or str
+ tuple of int or str or None
+ Validated component specification.
+
+ Raises
+ ------
+ ValueError
+ If component specification is invalid.
"""
order = self.data[map_name, 'order']
@@ -590,14 +614,14 @@ def plot_map(self, map_name, component=None, **kwargs):
return MapPlot.create(self, map_data, **plot_params)
def calc_grain_average(self, map_data, grain_ids=-1):
- """Calculate grain average of any DIC map data.
+ """Calculate grain average of map data.
Parameters
----------
map_data : numpy.ndarray
Array of map data to grain average. This must be cropped!
- grain_ids : list, optional
- grain_ids to perform operation on, set to -1 for all grains.
+ grain_ids : list of int, optional
+ Grain IDs to perform operation on. Set to -1 for all grains.
Returns
-------
@@ -610,14 +634,27 @@ def calc_grain_average(self, map_data, grain_ids=-1):
grain_average_data = np.zeros(len(grain_ids))
- for i, grainId in enumerate(grain_ids):
- grain = self[grainId]
- grainData = grain.grain_data(map_data)
- grain_average_data[i] = grainData.mean()
+ for i, grain_id in enumerate(grain_ids):
+ grain = self[grain_id]
+ grain_data = grain.grain_data(map_data)
+ grain_average_data[i] = grain_data.mean()
return grain_average_data
def grain_data_to_map(self, name):
+ """Convert grain list data to map data.
+
+ Parameters
+ ----------
+ name : str
+ Map name of the grain data to convert.
+
+ Returns
+ -------
+ numpy.ndarray
+ Map data created from grain data.
+
+ """
map_data = np.zeros(self[0].data[name].shape[:-1] + self.shape)
for grain in self:
for i, point in enumerate(grain.data.point):
@@ -664,8 +701,8 @@ def grain_data_to_map_data(self, grain_data, grain_ids=-1, bg=0):
"single value or RGB values per grain.")
grain_map = np.full(mapShape, bg, dtype=grain_data.dtype)
- for grainId, grain_value in zip(grain_ids, grain_data):
- for point in self[grainId].data.point:
+ for grain_id, grain_value in zip(grain_ids, grain_data):
+ for point in self[grain_id].data.point:
grain_map[point[1], point[0]] = grain_value
return grain_map
@@ -673,18 +710,18 @@ def grain_data_to_map_data(self, grain_data, grain_ids=-1, bg=0):
def plot_grain_data_map(
self, map_data=None, grain_data=None, grain_ids=-1, bg=0, **kwargs
):
- """Plot a grain map with grains coloured by given data. The data
- can be provided as a list of values per grain or as a map which
- a grain average will be applied.
+ """Plot a grain map with grains colored by given data. The data
+ can be provided as a list of values per grain or as a map
+ from which grain averages will be calculated.
Parameters
----------
map_data : numpy.ndarray, optional
- Array of map data. This must be cropped! Either mapData or
+ Array of map data. This must be cropped! Either map_data or
grain_data must be supplied.
- grain_data : list or np.array, optional
- Grain values. This an be a single value per grain or RGB
- values. You must supply either mapData or grain_data.
+ grain_data : list or numpy.ndarray, optional
+ Grain values. This can be a single value per grain or RGB
+ values. You must supply either map_data or grain_data.
grain_ids: list of int or int, optional
IDs of grains to plot for. Use -1 for all grains in the map.
bg: int or real, optional
@@ -721,24 +758,29 @@ def plot_grain_data_ipf(
**kwargs
):
"""
- Plot IPF of grain reference (average) orientations with
- points coloured by grain average values from map data.
+ Plot IPF of grain verage orientations with points coloured
+ by grain average values from map data.
Parameters
----------
direction : numpy.ndarray
Vector of reference direction for the IPF.
- map_data : numpy.ndarray
- Array of map data. This must be cropped! Either mapData or
+ map_data : numpy.ndarray, optional
+ Array of map data. This must be cropped! Either map_data or
grain_data must be supplied.
- grain_data : list or np.array, optional
- Grain values. This an be a single value per grain or RGB
- values. You must supply either mapData or grain_data.
- grain_ids: list of int or int, optional
+ grain_data : list or numpy.ndarray, optional
+ Grain values. This can be a single value per grain or RGB
+ values. You must supply either map_data or grain_data.
+ grain_ids : list of int, optional
IDs of grains to plot for. Use -1 for all grains in the map.
kwargs : dict, optional
Keyword arguments passed to :func:`defdap.quat.Quat.plot_ipf`
+ Returns
+ -------
+ defdap.plotting.Plot
+ Plot object containing the IPF.
+
"""
# Set default plot parameters then update with any input
plot_params = {}
@@ -781,6 +823,18 @@ class Grain(ABC):
"""
def __init__(self, grain_id, owner_map, group_id):
+ """Initialize a Grain object.
+
+ Parameters
+ ----------
+ grain_id : int
+ Unique identifier for this grain.
+ owner_map : defdap.base.Map
+ The map that contains this grain.
+ group_id : uuid.UUID
+ Group identifier for the datastore.
+
+ """
self.data = Datastore(group_id=group_id)
self.data.add_derivative(
owner_map.data, self.grain_data,
@@ -885,16 +939,17 @@ def plot_outline(self, ax=None, plot_scale_bar=False, **kwargs):
Parameters
----------
- ax : matplotlib.axes.Axes
- axis to plot on, if not provided the current active axis is used.
- plot_scale_bar : bool
- plots the scale bar on the grain if true.
- kwargs : dict
- keyword arguments passed to :func:`defdap.plotting.GrainPlot.add_map`
+ ax : matplotlib.axes.Axes, optional
+ Axis to plot on. If not provided, the current active axis is used.
+ plot_scale_bar : bool, optional
+ If True, plots the scale bar on the grain.
+ kwargs : dict, optional
+ Keyword arguments passed to :func:`defdap.plotting.GrainPlot.add_map`
Returns
-------
defdap.plotting.GrainPlot
+ Plot object containing the grain outline.
"""
plot = plotting.GrainPlot(self, ax=ax)
@@ -926,15 +981,15 @@ def grain_map_data(self, map_data=None, grain_data=None, bg=np.nan):
Parameters
----------
- map_data : numpy.ndarray
- Array of map data. This must be cropped! Either this or
- 'grain_data' must be supplied and 'grain_data' takes precedence.
- grain_data : numpy.ndarray
+ map_data : numpy.ndarray, optional
+ Array of map data. Either this or grain_data must be supplied.
+ grain_data takes precedence.
+ grain_data : numpy.ndarray, optional
Array of data at each point in the grain. Either this or
- 'mapData' must be supplied and 'grain_data' takes precedence.
+ map_data must be supplied. grain_data takes precedence.
bg : various, optional
Value to fill the background with. Must be same dtype as
- input array.
+ input array. Default is :obj:`numpy.nan`.
Returns
-------
@@ -944,7 +999,7 @@ def grain_map_data(self, map_data=None, grain_data=None, bg=np.nan):
"""
if grain_data is None:
if map_data is None:
- raise ValueError("Either 'mapData' or 'grain_data' must "
+ raise ValueError("Either 'map_data' or 'grain_data' must "
"be supplied.")
else:
grain_data = self.grain_data(map_data)
@@ -966,29 +1021,29 @@ def grain_map_data(self, map_data=None, grain_data=None, bg=np.nan):
def grain_map_data_coarse(self, map_data=None, grain_data=None,
kernel_size=2, bg=np.nan):
"""
- Create a coarsened data map of this grain only from the given map
- data. Data is coarsened using a kernel at each pixel in the
- grain using only data in this grain.
+ Create a coarsened data map of this grain only from given map
+ or grain data. Pixel values are averaged within a kernel around each
+ pixel in the grain.
Parameters
----------
- map_data : numpy.ndarray
- Array of map data. This must be cropped! Either this or
- 'grain_data' must be supplied and 'grain_data' takes precedence.
- grain_data : numpy.ndarray
- List of data at each point in the grain. Either this or
- 'mapData' must be supplied and 'grain_data' takes precedence.
+ map_data : numpy.ndarray, optional
+ Array of map data. Either this or grain_data must be supplied.
+ grain_data takes precedence.
+ grain_data : numpy.ndarray, optional
+ Array of data at each point in the grain. Either this or
+ map_data must be supplied. grain_data takes precedence.
kernel_size : int, optional
- Size of kernel as the number of pixels to dilate by i.e 1
- gives a 3x3 kernel.
+ Size of kernel as the number of pixels to dilate by. For example,
+ 1 gives a 3x3 kernel. Default is 2 (5x5 kernel).
bg : various, optional
Value to fill the background with. Must be same dtype as
- input array.
+ input array. Default is :obj:`numpy.nan`.
Returns
-------
numpy.ndarray
- Map of this grains coarsened data.
+ Coarsened data map for this grain.
"""
grain_map_data = self.grain_map_data(map_data=map_data, grain_data=grain_data)
@@ -1032,20 +1087,24 @@ def grain_map_data_coarse(self, map_data=None, grain_data=None,
return grain_map_data_coarse
def plot_grain_data(self, map_data=None, grain_data=None, **kwargs):
- """
- Plot a map of this grain from the given map data.
+ """Plot a map of this grain from the given map or grain data.
Parameters
----------
- map_data : numpy.ndarray
- Array of map data. This must be cropped! Either this or
- 'grain_data' must be supplied and 'grain_data' takes precedence.
- grain_data : numpy.ndarray
- List of data at each point in the grain. Either this or
- 'mapData' must be supplied and 'grain_data' takes precedence.
+ map_data : numpy.ndarray, optional
+ Array of map data. Either this or grain_data must be supplied.
+ grain_data takes precedence.
+ grain_data : numpy.ndarray, optional
+ Array of data at each point in the grain. Either this or
+ map_data must be supplied. grain_data takes precedence.
kwargs : dict, optional
Keyword arguments passed to :func:`defdap.plotting.GrainPlot.create`
+ Returns
+ -------
+ defdap.plotting.GrainPlot
+ Plot object containing the grain data map.
+
"""
# Set default plot parameters then update with any input
plot_params = {}
@@ -1058,11 +1117,17 @@ def plot_grain_data(self, map_data=None, grain_data=None, **kwargs):
return plot
def _validate_list(self, list_name):
- """Check the name exists and is a list data.
+ """Check that data name exists and is valid list data.
Parameters
----------
list_name : str
+ Name of the list data to validate.
+
+ Raises
+ ------
+ ValueError
+ If the data name does not exist or is not list data.
"""
if list_name not in self.data:
@@ -1072,19 +1137,26 @@ def _validate_list(self, list_name):
raise ValueError(f'`{list_name}` is not a valid data.')
def _validate_component(self, map_name, comp):
- """
+ """Validate and normalize component specification for grain data.
Parameters
----------
map_name : str
- comp : int or tuple of int or str
- Component of the map data. This is either the
+ Name of the data.
+ comp : int or tuple of int or str, optional
+ Component of the data. This is either the
tensor component (tuple of ints) or the name of a calculation
to be applied e.g. 'norm', 'all_euler' or 'IPF_x'.
Returns
-------
- tuple of int or str
+ tuple of int or str or None
+ Validated component specification.
+
+ Raises
+ ------
+ ValueError
+ If component specification is invalid.
"""
order = self.data[map_name, 'order']
@@ -1162,12 +1234,12 @@ def plot_map(self, map_name, component=None, **kwargs):
----------
map_name : str
Map data name to plot i.e. e, max_shear, euler_angle, orientation.
- component : int or tuple of int or str
+ component : int or tuple of int or str, optional
Component of the map data to plot. This is either the tensor
component (int or tuple of ints) or the name of a calculation
to be applied e.g. 'norm', 'all_euler' or 'IPF_x'.
- kwargs
- All arguments are passed to :func:`defdap.plotting.MapPlot.create`.
+ kwargs : dict, optional
+ Keyword arguments passed to :func:`defdap.plotting.MapPlot.create`.
Returns
-------
diff --git a/defdap/crystal.py b/defdap/crystal.py
index b38c418..afcce9e 100755
--- a/defdap/crystal.py
+++ b/defdap/crystal.py
@@ -23,18 +23,46 @@
class Phase(object):
+ """
+ Represents a crystallographic phase.
+
+ Stores phase information including crystal structure, lattice parameters,
+ and associated slip systems.
+
+ Attributes
+ ----------
+ name : str
+ Name of the phase.
+ laue_group : int
+ Laue group number.
+ spaceGroup : int
+ Space group number.
+ lattice_params : tuple of float
+ Lattice parameters (a, b, c, alpha, beta, gamma)
+ where lengths are in angstrom and angles are in radians.
+ crystal_structure : defdap.crystal.CrystalStructure
+ Crystal structure object for the phase.
+ slip_systems : list or list of list of SlipSystem, optional
+ Slip systems for the phase.
+ slip_trace_colours : list of str, optional
+ Colors for slip plane traces.
+
+ """
def __init__(self, name, laue_group, space_group, lattice_params):
"""
+ Initialize a Phase object.
+
Parameters
----------
name : str
- Name of the phase
+ Name of the phase.
laue_group : int
- Laue group
+ Laue group number (e.g., 9 for hexagonal, 11 for cubic).
space_group : int
- Space group
- lattice_params : tuple
- Lattice parameters in order (a,b,c,alpha,beta,gamma)
+ Space group number.
+ lattice_params : tuple of float
+ Lattice parameters in order (a, b, c, alpha, beta, gamma)
+ where lengths are in angstrom and angles are in radians.
"""
self.name = name
@@ -71,7 +99,7 @@ def __init__(self, name, laue_group, space_group, lattice_params):
def __str__(self):
text = ("Phase: {:}\n Crystal structure: {:}\n Lattice params: "
- "({:.2f}, {:.2f}, {:.2f}, {:.0f}, {:.0f}, {:.0f})\n"
+ "({:.3f} Å, {:.3f} Å, {:.3f} Å, {:.0f} °, {:.0f} °, {:.0f} °)\n"
" Slip systems: {:}")
return text.format(self.name, self.crystal_structure.name,
*self.lattice_params[:3],
@@ -80,12 +108,14 @@ def __str__(self):
@property
def c_over_a(self):
+ """Crystal c over a ratio for hexagonal crystals."""
if self.crystal_structure is crystalStructures['hexagonal']:
return self.lattice_params[2] / self.lattice_params[0]
return None
def print_slip_systems(self):
- """Print a list of slip planes (with colours) and slip directions.
+ """Print slip plane family and their associated colors.
+ For each slip plane, print corresponding slip directions.
"""
# TODO: this should be moved to static method of the SlipSystem class
@@ -217,8 +247,11 @@ def __init__(self, name, symmetries, vertices, faces):
class SlipSystem(object):
- """Class used for defining and performing operations on a slip system.
+ """
+ Class for defining and performing operations on a slip system.
+ Handles slip system operations including plane and direction transformations
+ for both cubic and hexagonal crystal structures.
"""
def __init__(self, slip_plane, slip_dir, crystal_structure, c_over_a=None):
"""Initialise a slip system object.
@@ -282,42 +315,44 @@ def __str__(self):
return self.slip_plane_label + self.slip_dir_label
def __repr__(self):
- return (f"SlipSystem(slipPlane={self.slip_plane_label}, "
- f"slipDir={self.slip_dir_label}, "
+ return (f"SlipSystem(slip_plane={self.slip_plane_label}, "
+ f"slip_dir={self.slip_dir_label}, "
f"symmetry={self.crystal_structure.name})")
@property
def slip_plane_label(self):
- """Return the slip plane label. For example '(111)'.
+ """Slip plane label.
Returns
-------
str
- Slip plane label.
+ Slip plane label in the format '(hkl)'. For example, '(111)'.
"""
return idc_to_string(self.plane_idc, '()')
@property
def slip_dir_label(self):
- """Returns the slip direction label. For example '[110]'.
+ """Slip direction label.
Returns
-------
str
- Slip direction label.
+ Slip direction label in the format '[uvw]'. For example, '[110]'.
"""
return idc_to_string(self.dir_idc, '[]')
def generate_family(self):
- """Generate the family of slip systems which this system belongs to.
+ """
+ Generate the family of slip systems to which this system belongs by
+ applying symmetry operations to generate equivalent slip systems.
Returns
-------
- list of SlipSystem
- The family of slip systems.
-
+ set of SlipSystem
+ Symmetrically equivalent slip systems.
+
"""
#
symms = self.crystal_structure.symmetries
@@ -365,32 +400,39 @@ def generate_family(self):
@staticmethod
def load(name, crystal_structure, c_over_a=None, group_by='plane'):
"""
- Load in slip systems from file. 3 integers for slip plane
- normal and 3 for slip direction. Returns a list of list of slip
- systems grouped by slip plane.
+ Reads slip system definitions from a text file. File should contain
+ Miller (or Miller-Bravais for hexagonal) indices: 3 (or 4) integers
+ for slip plane normal and 3 (or 4) for slip direction.
Parameters
----------
name : str
- Name of the slip system file (without file extension)
- stored in the defdap install dir or path to a file.
+ Slip system file name (without extension) in defdap's
+ slip_systems directory, or full file path.
crystal_structure : defdap.crystal.CrystalStructure
Crystal structure of the slip systems.
c_over_a : float, optional
- C over a ratio for hexagonal crystals.
+ C over a ratio (required for hexagonal crystals).
group_by : str, optional
- How to group the slip systems, either by slip plane ('plane')
- or slip system family ('family') or don't group (None).
+ Grouping method: 'plane' (default) to group by slip plane,
+ 'family' to group by slip system family, or None for no grouping.
Returns
-------
- list of list of SlipSystem
- A list of list of slip systems grouped slip plane.
+ tuple
+ Slip systems and trace colors.
+
+ - slip_systems : list of list of SlipSystem objects,
+ optionally grouped based on `group_by`.
+ - slip_trace_colours : list of str
+ RGB color codes for each slip plane group.
Raises
------
+ FileNotFoundError
+ If slip system file cannot be found.
IOError
- Raised if not 6/8 integers per line.
+ If file format is invalid (not 6/8 integers per line).
"""
# try and load from package dir first
@@ -442,20 +484,25 @@ def load(name, crystal_structure, c_over_a=None, group_by='plane'):
@staticmethod
def group(slip_systems, group_by):
"""
- Groups slip systems by their slip plane.
+ Groups slip systems by their slip plane or family.
Parameters
----------
slip_systems : list of SlipSystem
- A list of slip systems.
+ List of slip systems to group.
group_by : str
- How to group the slip systems, either by slip plane ('plane')
- or slip system family ('family').
+ Grouping method - either 'plane' to group by slip plane
+ or 'family' to group by slip system equivalence family.
Returns
-------
list of list of SlipSystem
- A list of list of grouped slip systems.
+ Grouped slip systems.
+
+ Raises
+ ------
+ ValueError
+ If `group_by` is not 'plane' or 'family'.
"""
if group_by.lower() == 'plane':
diff --git a/defdap/crystal_utils.py b/defdap/crystal_utils.py
index 18018b5..2af7b31 100644
--- a/defdap/crystal_utils.py
+++ b/defdap/crystal_utils.py
@@ -33,8 +33,28 @@
def create_l_matrix(a, b, c, alpha, beta, gamma, convention=None):
- """ Construct L matrix based on Page 22 of
- Randle and Engle - Introduction to texture analysis"""
+ """ Construct L matrix.
+
+ Parameters
+ ----------
+ a, b, c : float
+ Lattice parameters in angstroms.
+ alpha, beta, gamma : float
+ Interaxial lattice angles in radians.
+ convention : str {'hkl', 'oi', 'tsl'}, optional
+ Orthonormalisation convention. If omitted, uses
+ ``defaults['crystal_ortho_conv']``.
+
+ Returns
+ -------
+ numpy.ndarray
+ A ``(3, 3)`` L matrix for transforming crystal directions.
+
+ References
+ ----------
+ Page 22 of Randle and Engle - Introduction to Texture Analysis
+
+ """
l_matrix = np.zeros((3, 3))
cos_alpha = np.cos(alpha)
@@ -85,9 +105,23 @@ def create_l_matrix(a, b, c, alpha, beta, gamma, convention=None):
def create_q_matrix(l_matrix):
- """ Construct matrix of reciprocal lattice vectors to transform
- plane normals See C. T. Young and J. L. Lytton, J. Appl. Phys.,
- vol. 43, no. 4, pp. 1408–1417, 1972."""
+ """Construct matrix of reciprocal lattice vectors to transform plane normals
+
+ Parameters
+ ----------
+ l_matrix : numpy.ndarray
+ Direct-lattice transform matrix of shape ``(3, 3)``.
+
+ Returns
+ -------
+ numpy.ndarray
+ A ``(3, 3)`` Q matrix whose columns are reciprocal lattice vectors.
+
+ References
+ ----------
+ C. T. Young and J. L. Lytton, J. Appl. Phys., vol. 43, no. 4, pp. 1408–1417, 1972.
+
+ """
a = l_matrix[:, 0]
b = l_matrix[:, 1]
c = l_matrix[:, 2]
@@ -103,6 +137,21 @@ def create_q_matrix(l_matrix):
def check_len(val, length):
+ """Validate that a vector-like object has the expected length.
+
+ Parameters
+ ----------
+ val : collection
+ Value to validate.
+ length : int
+ Required number of elements.
+
+ Raises
+ ------
+ ValueError
+ If ``val`` does not contain exactly ``length`` elements.
+
+ """
if len(val) != length:
raise ValueError(f"Vector must have {length} values.")
@@ -117,9 +166,9 @@ def convert_idc(in_type, *, dir=None, plane=None):
Type of indices provided. If 'm' converts from Miller to
Miller-Bravais, opposite for 'mb'.
dir : tuple of int or equivalent, optional
- Direction to convert. This OR `plane` must me provided.
+ Direction to convert. This OR ``plane`` must be provided.
plane : tuple of int or equivalent, optional
- Plane to convert. This OR `direction` must me provided.
+ Plane to convert. This OR ``dir`` must be provided.
Returns
-------
@@ -180,6 +229,30 @@ def equavlent_indicies(
c_over_a=None,
in_type=None
):
+ """Generate crystallographically equivalent planes or directions.
+
+ Parameters
+ ----------
+ crystal_symm : str
+ Crystal symmetry name, e.g. ``'hexagonal'``.
+ symmetries : iterable
+ Symmetry operators.
+ dir : tuple of int, optional
+ Direction indices. Provide either ``dir`` or ``plane``.
+ plane : tuple of int, optional
+ Plane indices. Provide either ``plane`` or ``dir``.
+ c_over_a : float, optional
+ Hexagonal ``c/a`` ratio.
+ in_type : str {'m', 'mb'}, optional
+ Input index basis. Defaults to ``'mb'`` for hexagonal crystals,
+ otherwise ``'m'``.
+
+ Returns
+ -------
+ list[tuple[int, ...]]
+ Equivalent indices in the requested basis.
+
+ """
if dir is None and plane is None:
raise ValueError("One of either `direction` or `plane` must be "
"provided.")
@@ -252,16 +325,19 @@ def project_to_orth(c_over_a, *, dir=None, plane=None, in_type='mb'):
Parameters
----------
- in_type : str {'m', 'mb'}
- Type of indices provided
+ c_over_a : float
+ Hexagonal lattice ratio ``c/a``.
dir : tuple of int or equivalent, optional
- Direction to convert. This OR `plane` must me provided.
+ Direction to convert. This OR ``plane`` must be provided.
plane : tuple of int or equivalent, optional
- Plane to convert. This OR `direction` must me provided.
+ Plane to convert. This OR ``dir`` must be provided.
+ in_type : str {'m', 'mb'}
+ Type of indices provided.
Returns
-------
-
+ numpy.ndarray
+ Projected direction or plane normal.
"""
if dir is None and plane is None:
@@ -270,7 +346,6 @@ def project_to_orth(c_over_a, *, dir=None, plane=None, in_type='mb'):
if dir is not None and plane is not None:
raise ValueError("One of either `direction` or `plane` must be "
"provided, not both.")
-
if in_type == 'mb':
if dir is None:
check_len(plane, 4)
@@ -322,7 +397,7 @@ def pos_idc(vec):
def reduce_idc(vec):
"""
- Reduce indices to lowest integers
+ Reduce indices to lowest integers.
Parameters
----------
@@ -371,7 +446,7 @@ def safe_int_cast(vec, tol=1e-3):
def idc_to_string(idc, brackets=None, str_type='unicode'):
"""
- String representation of a set of indicies.
+ String representation of a set of indices.
Parameters
----------
@@ -399,11 +474,18 @@ def str_idx(idx, str_type='unicode'):
----------
idx : int
str_type : str {'unicode', 'tex'}
+ Output format. ``'unicode'`` uses combining overbars for negative
+ values, while ``'tex'`` returns TeX math markup.
Returns
-------
str
+ Raises
+ ------
+ ValueError
+ If ``idx`` is not an integer.
+
"""
if not isinstance(idx, (int, np.integer)):
raise ValueError("Index must be an integer.")
diff --git a/defdap/ebsd.py b/defdap/ebsd.py
index 56ee7ee..f1863d9 100755
--- a/defdap/ebsd.py
+++ b/defdap/ebsd.py
@@ -50,31 +50,37 @@ class Map(base.Map):
origin : tuple(int)
Map origin (x, y). Used by linker class where origin is a
homologue point of the maps.
-
data : defdap.utils.Datastore
+ Data storage object containing:
+
Must contain after loading data (maps):
- phase : numpy.ndarray
- 1-based, 0 is non-indexed points
- euler_angle : numpy.ndarray
- stored as (3, y_dim, x_dim) in radians
+
+ - phase : numpy.ndarray
+ 1-based, 0 is non-indexed points
+ - euler_angle : numpy.ndarray
+ stored as (3, y_dim, x_dim) in radians
+
Generated data:
- orientation : numpy.ndarray of defdap.quat.Quat
- Quaterion for each point of map. Shape (y_dim, x_dim).
- grain_boundaries : BoundarySet
- phase_boundaries : BoundarySet
- grains : numpy.ndarray of int
- Map of grains. Grain numbers start at 1 here but everywhere else
- grainID starts at 0. Regions that are smaller than the minimum
- grain size are given value -2. Remnant boundary points are -1.
- KAM : numpy.ndarray
- Kernal average misorientaion map.
- GND : numpy.ndarray
- GND scalar map.
- Nye_tensor : numpy.ndarray
- 3x3 Nye tensor at each point.
+
+ - orientation : numpy.ndarray of defdap.quat.Quat
+ Quaterion for each point of map. Shape (y_dim, x_dim).
+ - grain_boundaries : BoundarySet
+ - phase_boundaries : BoundarySet
+ - grains : numpy.ndarray of int
+ Map of grains. Grain numbers start at 1 here but everywhere else
+ grainID starts at 0. Regions that are smaller than the minimum
+ grain size are given value -2. Remnant boundary points are -1.
+ - KAM : numpy.ndarray
+ Kernal average misorientaion map.
+ - GND : numpy.ndarray
+ GND scalar map.
+ - Nye_tensor : numpy.ndarray
+ 3x3 Nye tensor at each point.
+
Derived data:
- grain_data_to_map : numpy.ndarray
- Grain list data to map data from all grains
+
+ - grain_data_to_map : numpy.ndarray
+ Grain list data to map data from all grains
"""
MAPNAME = 'ebsd'
@@ -224,6 +230,14 @@ def c_over_a(self):
@property
def num_phases(self):
+ """Number of phases in the EBSD map.
+
+ Returns
+ -------
+ int or None
+ Number of phases, or ``None`` if no phases are defined.
+
+ """
return len(self.phases) or None
@property
@@ -240,6 +254,7 @@ def primary_phase(self):
@property
def scale(self):
+ """Spatial scale of the map in microns per pixel."""
return self.step_size
@report_progress("rotating EBSD data")
@@ -264,6 +279,23 @@ def rotate_data(self):
yield 1.
def calc_euler_colour(self, map_data, phases=None, bg_colour=None):
+ """Calculate RGB colours using Euler colouring.
+
+ Parameters
+ ----------
+ map_data : numpy.ndarray
+ Euler-angle map data with shape ``(3, y, x)``.
+ phases : list of int, optional
+ Phase IDs to include. If omitted, include all phases.
+ bg_colour : numpy.ndarray, optional
+ Background RGB colour used where phases are excluded.
+
+ Returns
+ -------
+ numpy.ndarray
+ RGB map array with shape ``(y, x, 3)``.
+
+ """
if phases is None:
phases = self.phases
phase_ids = range(len(phases))
@@ -292,6 +324,25 @@ def calc_euler_colour(self, map_data, phases=None, bg_colour=None):
def calc_ipf_colour(self, map_data, direction, phases=None,
bg_colour=None):
+ """Calculate RGB colours from IPF colouring.
+
+ Parameters
+ ----------
+ map_data : numpy.ndarray
+ Orientation data as quaternion objects.
+ direction : numpy.ndarray
+ Sample reference direction for IPF colouring.
+ phases : list of int, optional
+ Phase IDs to include. If omitted, include all phases.
+ bg_colour : numpy.ndarray, optional
+ Background RGB colour used where phases are excluded.
+
+ Returns
+ -------
+ numpy.ndarray
+ RGB map array with shape ``(y, x, 3)``.
+
+ """
if phases is None:
phases = self.phases
phase_ids = range(len(phases))
@@ -580,6 +631,19 @@ def calc_quat_array(self):
return quats
def filter_data(self, misori_tol=5):
+ """Apply a Kuwahara-style quaternion filter.
+
+ Parameters
+ ----------
+ misori_tol : float, optional
+ Misorientation tolerance in degrees.
+
+ Returns
+ -------
+ numpy.ndarray
+ Last processed quadrant quaternion subset.
+
+ """
# Kuwahara filter
print("8 quadrants")
misori_tol *= np.pi / 180
@@ -751,6 +815,7 @@ def find_boundaries(self, misori_tol=10):
@report_progress("constructing neighbour network")
def build_neighbour_network(self):
+ """Construct the grain-neighbour network from boundary pixels."""
# create network
nn = nx.Graph()
nn.add_nodes_from(self.grains)
@@ -1222,7 +1287,7 @@ def __init__(self, grain_id, ebsdMap, group_id):
@property
def crystal_sym(self):
- """Temporary"""
+ """Crystal symmetry name of the grain phase."""
return self.phase.crystal_structure.name
def calc_average_ori(self):
@@ -1285,6 +1350,14 @@ def build_mis_ori_list(self, calc_axis=False):
self.mis_ori_axis_list.append(row)
def calc_grod(self):
+ """Calculate GROD magnitude and axis for all grain points.
+
+ Returns
+ -------
+ tuple[numpy.ndarray, numpy.ndarray]
+ GROD magnitudes and GROD axis vectors.
+
+ """
quat_comps = Quat.calc_sym_eqvs(self.data.orientation, self.crystal_sym)
if self.ref_ori is None:
@@ -1310,7 +1383,22 @@ def calc_grod(self):
return misori, misori_axis
- def calc_ipf_colour(self, grain_data, direction, bg_colour=None):
+ def calc_ipf_colour(self, grain_data, direction):
+ """Calculate grain colours from IPF colouring.
+
+ Parameters
+ ----------
+ grain_data : numpy.ndarray
+ Grain orientation data as quaternions.
+ direction : numpy.ndarray
+ Sample reference direction for IPF colouring.
+
+ Returns
+ -------
+ numpy.ndarray
+ RGB colour array for the grain.
+
+ """
grain_colours = Quat.calc_ipf_colours(
grain_data, direction, self.phase.crystal_structure.name
@@ -1318,7 +1406,20 @@ def calc_ipf_colour(self, grain_data, direction, bg_colour=None):
return grain_colours
- def calc_euler_colour(self, grain_data, bg_colour=None):
+ def calc_euler_colour(self, grain_data):
+ """Calculate grain colours from normalised Euler angles.
+
+ Parameters
+ ----------
+ grain_data : numpy.ndarray
+ Euler-angle data with shape ``(3, n_points)``.
+
+ Returns
+ -------
+ numpy.ndarray
+ RGB colour array for the grain.
+
+ """
if self.phase.crystal_structure.name == 'cubic':
norm = np.array([2 * np.pi, np.pi / 2, np.pi / 2])
@@ -1571,17 +1672,20 @@ def calc_slip_traces(self, slip_systems=None):
class BoundarySet(object):
+ """Container for phase and grain boundary point sets."""
# boundaries : numpy.ndarray
# Map of boundaries. -1 for a boundary, 0 otherwise.
# phaseBoundaries : numpy.ndarray
# Map of phase boundaries. -1 for boundary, 0 otherwise.
def __init__(self, ebsd_map, points_x, points_y):
+ """Initialise a boundary set from horizontal and vertical points."""
self.ebsd_map = ebsd_map
self.points_x = set(points_x)
self.points_y = set(points_y)
@classmethod
def from_image(cls, ebsd_map, image_x, image_y):
+ """Create a boundary set from boolean boundary images."""
return cls(
ebsd_map,
zip(*image_x.transpose().nonzero()),
@@ -1590,6 +1694,7 @@ def from_image(cls, ebsd_map, image_x, image_y):
@classmethod
def from_boundary_segments(cls, b_segs):
+ """Create a boundary set from boundary segments."""
points_x = []
points_y = []
for b_seg in b_segs:
@@ -1600,27 +1705,33 @@ def from_boundary_segments(cls, b_segs):
@property
def points(self):
+ """Combined boundary points from horizontal and vertical sets."""
return self.points_x.union(self.points_y)
def _image(self, points):
+ """Convert a point collection to a boolean map image."""
image = np.zeros(self.ebsd_map.shape, dtype=bool)
image[tuple(zip(*points))[::-1]] = True
return image
@property
def image_x(self):
+ """Boolean image of horizontal boundary points."""
return self._image(self.points_x)
@property
def image_y(self):
+ """Boolean image of vertical boundary points."""
return self._image(self.points_y)
@property
def image(self):
+ """Boolean image of all boundary points."""
return self._image(self.points)
@property
def lines(self):
+ """Line segments representing all boundary points."""
_, _, lines = self.boundary_points_to_lines(
boundary_points_x=self.points_x,
boundary_points_y=self.points_y
@@ -1630,6 +1741,21 @@ def lines(self):
@staticmethod
def boundary_points_to_lines(*, boundary_points_x=None,
boundary_points_y=None):
+ """Convert boundary points to line segments for plotting.
+
+ Parameters
+ ----------
+ boundary_points_x : iterable of tuple, optional
+ Horizontal boundary points.
+ boundary_points_y : iterable of tuple, optional
+ Vertical boundary points.
+
+ Returns
+ -------
+ list or tuple
+ Line-segment collections for provided boundary directions.
+
+ """
boundary_data = {}
if boundary_points_x is not None:
boundary_data['x'] = boundary_points_x
@@ -1660,7 +1786,10 @@ def boundary_points_to_lines(*, boundary_points_x=None,
class BoundarySegment(object):
+ """Boundary segment between two neighbouring grains."""
+
def __init__(self, ebsdMap, grain1, grain2):
+ """Initialise a boundary segment for a grain pair."""
self.ebsdMap = ebsdMap
self.grain1 = grain1
@@ -1688,6 +1817,18 @@ def __len__(self):
return len(self.boundary_points_x) + len(self.boundary_points_y)
def addBoundaryPoint(self, point, kind, owner_grain):
+ """Add a boundary point and its owner grain.
+
+ Parameters
+ ----------
+ point : tuple[int, int]
+ Boundary point coordinates.
+ kind : int
+ Boundary type: ``0`` for horizontal, ``1`` for vertical.
+ owner_grain
+ Grain that owns the point side.
+
+ """
if kind == 0:
self.boundary_points_x.append(point)
self.boundary_point_owners_x.append(owner_grain is self.grain1)
@@ -1744,6 +1885,14 @@ def boundary_lines(self):
return lines
def misorientation(self):
+ """Calculate misorientation angle and axis between neighbouring grains.
+
+ Returns
+ -------
+ tuple[float, numpy.ndarray]
+ Misorientation angle (radians) and unit rotation axis.
+
+ """
mis_ori, minSymm = self.grain1.ref_ori.mis_ori(
self.grain2.ref_ori, self.ebsdMap.crystal_sym, return_quat=2
)
diff --git a/defdap/experiment.py b/defdap/experiment.py
index 5642aa0..9f5254d 100644
--- a/defdap/experiment.py
+++ b/defdap/experiment.py
@@ -19,19 +19,49 @@
class Experiment(object):
+ """Container for map increments and frame transformations."""
+
def __init__(self):
+ """Initialise an empty experiment."""
self.frame_relations = {}
self.increments = []
def __getitem__(self, key):
+ """Return an increment by index."""
return self.increments[key]
def add_increment(self, **kwargs):
+ """Create and append a new increment.
+
+ Parameters
+ ----------
+ **kwargs
+ Metadata stored on the increment.
+
+ Returns
+ -------
+ Increment
+ Newly created increment.
+
+ """
inc = Increment(self, **kwargs)
self.increments.append(inc)
return inc
def iter_over_maps(self, map_name):
+ """Iterate over increments containing a named map.
+
+ Parameters
+ ----------
+ map_name : str
+ Map name to look up.
+
+ Yields
+ ------
+ tuple[int, object]
+ Increment index and map object.
+
+ """
for i, inc in enumerate(self.increments):
map_obj = inc.maps.get(map_name)
if map_obj is None:
@@ -39,9 +69,30 @@ def iter_over_maps(self, map_name):
yield i, map_obj
def link_frames(self, frame_1, frame_2, transform_props):
+ """Store transformation properties between two frames."""
self.frame_relations[(frame_1, frame_2)] = transform_props
def get_frame_transform(self, frame_1, frame_2):
+ """Estimate the transform mapping ``frame_1`` to ``frame_2``.
+
+ Parameters
+ ----------
+ frame_1 : Frame
+ Source frame.
+ frame_2 : Frame
+ Target frame.
+
+ Returns
+ -------
+ skimage.transform._geometric.GeometricTransform
+ Estimated transform object.
+
+ Raises
+ ------
+ ValueError
+ If frames are not linked or relations are inconsistent.
+
+ """
transform_lookup = {
'piecewise_affine': tf.PiecewiseAffineTransform,
'projective': tf.ProjectiveTransform,
@@ -76,7 +127,7 @@ def get_frame_transform(self, frame_1, frame_2):
return transform
def warp_image(self, map_data, frame_1, frame_2, crop=True, **kwargs):
- """Warps a map to the DIC frame.
+ """Warp image data from ``frame_1`` into ``frame_2``.
Parameters
----------
@@ -88,9 +139,9 @@ def warp_image(self, map_data, frame_1, frame_2, crop=True, **kwargs):
All other arguments passed to :func:`skimage.transform.warp`.
Returns
- ----------
+ -------
numpy.ndarray
- Map (i.e. EBSD map data) warped to the DIC frame.
+ Warped map data.
"""
transform = self.get_frame_transform(frame_2, frame_1)
@@ -130,6 +181,25 @@ def warp_lines(self, lines, frame_1, frame_2):
return lines
def warp_points(self, points_img, frame_1, frame_2, **kwargs):
+ """Warp point-image data between frames and return point coordinates.
+
+ Parameters
+ ----------
+ points_img : numpy.ndarray
+ Binary/float image containing points to warp.
+ frame_1 : Frame
+ Source frame.
+ frame_2 : Frame
+ Target frame.
+ **kwargs
+ Additional keyword arguments passed to ``warp_image``.
+
+ Returns
+ -------
+ iterator
+ Iterator of ``(x, y)`` point coordinates in the target frame.
+
+ """
input_shape = np.array(points_img.shape)
points_img = self.warp_image(points_img, frame_1, frame_2, crop=False,
**kwargs)
@@ -154,8 +224,19 @@ def warp_points(self, points_img, frame_1, frame_2, **kwargs):
class Increment(object):
- # def __init__(self, experiment, **kwargs):
+ """A single experiment increment containing one or more maps."""
+
def __init__(self, experiment, **kwargs):
+ """Initialise an increment.
+
+ Parameters
+ ----------
+ experiment : Experiment
+ Parent experiment.
+ **kwargs
+ Increment metadata.
+
+ """
self.maps = {}
# ex: (name, map, frame)
@@ -166,11 +247,15 @@ def __init__(self, experiment, **kwargs):
self.metadata = kwargs
def add_map(self, name, map_obj):
+ """Add a named map object to this increment."""
self.maps[name] = map_obj
class Frame(object):
+ """Frame containing homologous points for map registration."""
+
def __init__(self):
+ """Initialise an empty frame."""
# self.maps = []
self.homog_points = []
diff --git a/defdap/file_readers.py b/defdap/file_readers.py
index 123fd9b..4a57859 100644
--- a/defdap/file_readers.py
+++ b/defdap/file_readers.py
@@ -28,9 +28,7 @@
class EBSDDataLoader(ABC):
- """Class containing methods for loading and checking EBSD data
-
- """
+ """Base class for loading and validating EBSD datasets."""
def __init__(self) -> None:
# required metadata
self.loaded_metadata = {
@@ -87,6 +85,7 @@ def check_metadata(self) -> None:
assert type(phase) is Phase
def check_data(self) -> None:
+ """Validate the shape of required loaded EBSD data arrays."""
shape = self.loaded_metadata['shape']
assert self.loaded_data.phase.shape == shape
@@ -100,13 +99,12 @@ def load(self, file_name: pathlib.Path) -> None:
class OxfordTextLoader(EBSDDataLoader):
def load(self, file_name: pathlib.Path) -> None:
- """ Read an Oxford Instruments .ctf file, which is a HKL single
- orientation file.
+ """Read an Oxford Instruments ``.ctf`` orientation file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
# open data file and read in metadata
@@ -236,12 +234,12 @@ def parse_phase() -> Phase:
class EdaxAngLoader(EBSDDataLoader):
def load(self, file_name: pathlib.Path) -> None:
- """ Read an EDAX .ang file.
+ """Read an EDAX ``.ang`` file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
# open data file and read in metadata
@@ -347,6 +345,24 @@ def load(self, file_name: pathlib.Path) -> None:
@staticmethod
def parse_phase(lines) -> Phase:
+ """Parse phase metadata lines from an EDAX ``.ang`` header.
+
+ Parameters
+ ----------
+ lines : list of str
+ Header lines describing a single phase.
+
+ Returns
+ -------
+ Phase
+ Parsed phase definition.
+
+ Raises
+ ------
+ ValueError
+ If an unsupported crystal symmetry is encountered.
+
+ """
for line in lines:
line = line.split()
@@ -382,8 +398,8 @@ def load(self, file_name: pathlib.Path) -> None:
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
self.load_oxford_cpr(file_name)
@@ -396,8 +412,8 @@ def load_oxford_cpr(self, file_name: pathlib.Path) -> None:
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
comment_char = ';'
@@ -505,12 +521,12 @@ def parse_line(line: str, group_dict: Dict) -> None:
self.data_format = np.dtype(data_format)
def load_oxford_crc(self, file_name: pathlib.Path) -> None:
- """Read binary EBSD data from an Oxford Instruments .crc file
+ """Read binary EBSD data from an Oxford Instruments ``.crc`` file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
shape = self.loaded_metadata['shape']
@@ -577,7 +593,7 @@ def load(self, data_dict: Dict[str, Any]) -> None:
Parameters
----------
- data_dict
+ data_dict : dict
Dictionary with keys:
'step_size'
'phases'
@@ -603,9 +619,7 @@ def load(self, data_dict: Dict[str, Any]) -> None:
class DICDataLoader(ABC):
- """Class containing methods for loading and checking HRDIC data
-
- """
+ """Base class for loading and validating DIC datasets."""
def __init__(self, file_type : str = '') -> None:
self.file_type = file_type
self.loaded_metadata = {
@@ -656,8 +670,7 @@ def checkMetadata(self) -> None:
return
def check_data(self) -> None:
- """ Calculate size of map from loaded data and check it matches
- values from metadata.
+ """Validate DIC coordinate spacing and map shape against metadata.
"""
# check binning
@@ -687,12 +700,12 @@ def load(self, file_name: pathlib.Path) -> None:
class DavisLoader(DICDataLoader):
def load(self, file_name: pathlib.Path) -> None:
- """ Load from Davis .txt file.
+ """Load a DaVis ``.txt`` displacement file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
if not file_name.is_file():
@@ -724,12 +737,12 @@ def load(self, file_name: pathlib.Path) -> None:
@staticmethod
def load_davis_image_data(file_name: pathlib.Path) -> np.ndarray:
- """ A .txt file from DaVis containing a 2D image
+ """Load a DaVis ``.txt`` file containing a 2D image.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
Returns
-------
@@ -748,12 +761,12 @@ def load_davis_image_data(file_name: pathlib.Path) -> np.ndarray:
class OpenPivTextLoader(DICDataLoader):
def load(self, file_name: pathlib.Path) -> None:
- """ Load from Open PIV .txt file.
+ """Load an OpenPIV .txt file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
if not file_name.is_file():
@@ -798,12 +811,12 @@ def load(self, file_name: pathlib.Path) -> None:
class OpenPivBinaryLoader(DICDataLoader):
def load(self, file_name: pathlib.Path) -> None:
- """ Load from Open PIV .npz file.
+ """Load an OpenPIV binary ``.npz`` file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
if not file_name.is_file():
@@ -834,12 +847,12 @@ def load(self, file_name: pathlib.Path) -> None:
class PyValeLoader(DICDataLoader):
def load(self, file_name: pathlib.Path) -> None:
- """ Load from PyVale csv or binary file.
+ """Load a PyVale CSV or binary file.
Parameters
----------
- file_name
- Path to file
+ file_name : pathlib.Path
+ Path to file.
"""
if not file_name.is_file():
@@ -921,16 +934,16 @@ def read_until_string(
Parameters
----------
- file
+ file : TextIO
An open python text file object.
- term_string
+ term_string : str
String to terminate reading.
- comment_char
+ comment_char : str, optional
Character at start of a comment line to ignore.
- line_process
+ line_process : callable, optional
Function to apply to each line when loaded.
- exact
- A line must exactly match `termString` to stop.
+ exact : bool, optional
+ If ``True``, a line must exactly match ``term_string`` to stop.
Returns
-------
diff --git a/defdap/file_writers.py b/defdap/file_writers.py
index d7c85a6..49b0bf8 100644
--- a/defdap/file_writers.py
+++ b/defdap/file_writers.py
@@ -22,6 +22,8 @@
class EBSDDataWriter(object):
+ """Base class for writing EBSD data to supported file formats."""
+
def __init__(self) -> None:
self.metadata = {
'shape': (0, 0),
@@ -37,7 +39,7 @@ def __init__(self) -> None:
self.data_format = None
@staticmethod
- def get_writer(datatype: str) -> "Type[EBSDDataLoader]":
+ def get_writer(datatype: str) -> "Type[EBSDDataWriter]":
if datatype is None:
datatype = "OxfordText"
@@ -49,16 +51,20 @@ def get_writer(datatype: str) -> "Type[EBSDDataLoader]":
class OxfordTextWriter(EBSDDataWriter):
def write(self, file_name: str, file_dir: str = "") -> None:
- """ Write an Oxford Instruments .ctf file, which is a HKL single
- orientation file.
+ """Write an Oxford Instruments ``.ctf`` file.
Parameters
----------
- file_name
+ file_name : str
File name.
- file_dir
+ file_dir : str, optional
Path to file.
+ Raises
+ ------
+ FileExistsError
+ If the destination file already exists.
+
"""
# check output file
diff --git a/defdap/hrdic.py b/defdap/hrdic.py
index edf939c..ca49e10 100755
--- a/defdap/hrdic.py
+++ b/defdap/hrdic.py
@@ -56,7 +56,7 @@ class Map(base.Map):
ydim : int
Size of map along y (from header).
shape : tuple
- Size of map (after cropping, like *Dim).
+ Size of map (after cropping, like ``*Dim``).
corrVal : numpy.ndarray
Correlation value.
ebsd_map : defdap.ebsd.Map
@@ -178,10 +178,12 @@ def __init__(self, *args, **kwargs):
@property
def original_shape(self):
+ """Original map shape before cropping as ``(y, x)``."""
return self.ydim, self.xdim
@property
def crystal_sym(self):
+ """Crystal symmetry of the linked EBSD map."""
return self.ebsd_map.crystal_sym
@report_progress("loading HRDIC data")
@@ -217,7 +219,7 @@ def load_data(self, file_name, data_type=None):
f"sub-window size: {self.binning} x {self.binning} pixels)")
def load_corr_val_data(self, file_name, data_type=None):
- """Load correlation value for DIC data
+ """Load correlation value map for the DIC data.
Parameters
----------
@@ -243,7 +245,7 @@ def load_corr_val_data(self, file_name, data_type=None):
"Dimensions of imported data and dic data do not match"
def retrieve_name(self):
- """Gets the first name assigned to the a map, as a string
+ """Get the first variable name bound to this map instance.
"""
for fi in reversed(inspect.stack()):
@@ -274,7 +276,7 @@ def scale(self):
return self.bse_scale * self.binning
def print_stats_table(self, percentiles, components):
- """Print out a statistics table for a DIC map
+ """Print a statistics table for selected DIC map components.
Parameters
----------
@@ -363,9 +365,15 @@ def crop(self, map_data, binning=None):
Parameters
----------
map_data : numpy.ndarray
- Bap data to crop.
- binning : int
- True if mapData is binned i.e. binned BSE pattern.
+ Map data to crop.
+ binning : int, optional
+ Scale factor applied to crop distances (for binned data).
+
+ Returns
+ -------
+ numpy.ndarray
+ Cropped map data.
+
"""
binning = 1 if binning is None else binning
@@ -407,12 +415,12 @@ def check_ebsd_linked(self):
"""Check if an EBSD map has been linked.
Returns
- ----------
+ -------
bool
Returns True if EBSD map linked.
Raises
- ----------
+ ------
Exception
If EBSD map not linked.
@@ -422,7 +430,7 @@ def check_ebsd_linked(self):
return True
def warp_to_dic_frame(self, map_data, **kwargs):
- """Warps a map to the DIC frame.
+ """Warp map data into the DIC frame.
Parameters
----------
@@ -432,7 +440,7 @@ def warp_to_dic_frame(self, map_data, **kwargs):
All other arguments passed to :func:`defdap.experiment.Experiment.warp_map`.
Returns
- ----------
+ -------
numpy.ndarray
Map (i.e. EBSD map data) warped to the DIC frame.
@@ -501,7 +509,18 @@ def calc_mask(self, mask=None, dilation=0):
return mask
def mask(self, map_data):
- """ Values set to False in mask will be set to nan in map.
+ """Apply the current mask to map data.
+
+ Parameters
+ ----------
+ map_data : numpy.ndarray
+ Data to mask.
+
+ Returns
+ -------
+ numpy.ndarray or numpy.ma.MaskedArray
+ Input data if no mask is set, otherwise masked data.
+
"""
if self.data.mask is None:
return map_data
@@ -518,7 +537,8 @@ def set_pattern(self, img_path, window_size):
Path to image.
window_size : int
Size of pixel in pattern image relative to pixel size of DIC data
- i.e 1 means they are the same size and 2 means the pixels in
+
+ i.e 1 means they are the same size and 2 means the pixels in
the pattern are half the size of the dic data.
"""
@@ -527,6 +547,22 @@ def set_pattern(self, img_path, window_size):
self.data['pattern', 'binning'] = window_size
def load_pattern(self):
+ """Load and validate the linked pattern image. Set a pattern image with
+ :func:`defdap.hrdic.Map.set_pattern`.
+
+ Returns
+ -------
+ numpy.ndarray
+ Pattern image array.
+
+ Raises
+ ------
+ FileNotFoundError
+ If no pattern path has been configured.
+ ValueError
+ If image dimensions do not match expected binned size.
+
+ """
print('Loading img')
path = self.data.get_metadata('pattern', 'path')
binning = self.data.get_metadata('pattern', 'binning', 1)
@@ -740,15 +776,17 @@ class Grain(base.Grain):
EBSD map that this DIC grain belongs to.
points_list : numpy.ndarray
Start and end points for lines drawn using defdap.inspector.GrainInspector.
- groups_list :
+ groups_list : list
Groups, angles and slip systems detected for
lines drawn using defdap.inspector.GrainInspector.
-
data : defdap.utils.Datastore
Must contain after creating:
+
point : list of tuples
(x, y) in cropped map
+
Generated data:
+ None
Derived data:
Map data to list data from the map the grain is part of
@@ -807,7 +845,7 @@ def calc_slip_traces(self, slip_systems=None):
"""Calculates list of slip trace angles based on EBSD grain orientation.
Parameters
- -------
+ ----------
slip_systems : defdap.crystal.SlipSystem, optional
"""
@@ -826,7 +864,7 @@ def calc_slip_bands(self, grain_map_data, thres=None, min_dist=None):
Minimum angle between bands.
Returns
- ----------
+ -------
list(float)
Detected slip band angles
@@ -875,13 +913,28 @@ def calc_slip_bands(self, grain_map_data, thres=None, min_dist=None):
class BoundarySet(object):
+ """Boundary points and line segments represented in DIC coordinates."""
+
def __init__(self, dic_map, points, lines):
+ """Initialise a boundary set.
+
+ Parameters
+ ----------
+ dic_map : defdap.hrdic.Map
+ Parent DIC map.
+ points : iterable
+ Boundary point coordinates.
+ lines : iterable
+ Boundary line segments.
+
+ """
self.dic_map = dic_map
self.points = set(points)
self.lines = lines
@classmethod
def from_ebsd_boundaries(cls, dic_map, ebsd_boundaries):
+ """Create DIC frame boundaries by warping EBSD boundaries."""
if len(ebsd_boundaries.points) == 0:
return cls(dic_map, [], [])
@@ -896,10 +949,12 @@ def from_ebsd_boundaries(cls, dic_map, ebsd_boundaries):
return cls(dic_map, points, lines)
def _image(self, points):
+ """Convert boundary points to a boolean image."""
image = np.zeros(self.dic_map.shape, dtype=bool)
image[tuple(zip(*points))[::-1]] = True
return image
@property
def image(self):
+ """Boolean image of all boundary points."""
return self._image(self.points)
diff --git a/defdap/inspector.py b/defdap/inspector.py
index 8a77a39..e47408e 100644
--- a/defdap/inspector.py
+++ b/defdap/inspector.py
@@ -26,29 +26,27 @@
class GrainInspector:
- """
- Class containing the interactive grain inspector tool for slip trace analysis
- and relative displacement ratio analysis.
-
- """
+ """Interactive tool for slip-trace and relative displacement analysis."""
def __init__(self,
selected_dic_map: 'hrdic.Map',
vmax: float,
correction_angle: float = 0,
rdr_line_length: int = 3):
- """
+ """Initialise the grain inspector.
Parameters
----------
- selected_dic_map
+ selected_dic_map : hrdic.Map
DIC map to run grain inspector on.
- vmax
+ vmax : float
Maximum effective shear strain in colour scale.
- correction_angle
+ correction_angle : float, optional
Angle (in degrees) to subtract from drawn line angle.
- rdr_line_length
- Length on lines perpendicular to slip trace (can be any odd number above default 3).
+ rdr_line_length : int, optional
+ Length of lines perpendicular to the slip trace used for RDR
+ calculation. Can be any odd number greater than or equal to 3.
+
"""
# Initialise some values
self.grain_id = 0
@@ -117,12 +115,14 @@ def __init__(self,
def goto_grain(self,
event: int,
plot):
- """ Go to a specified grain ID.
+ """Go to a specified grain ID.
Parameters
----------
- event
+ event : int
Grain ID to go to.
+ plot
+ Plot callback argument.
"""
# Go to grain ID specified in event
@@ -135,12 +135,14 @@ def goto_grain(self,
def save_line(self,
event: np.ndarray,
plot):
- """ Save the start point, end point and angle of drawn line into the grain.
+ """Save a drawn line to the selected grain.
Parameters
----------
- event
- Start x, start y, end x, end y point of line passed from drawn line.
+ event : numpy.ndarray
+ Start and end coordinates passed from the drawn line callback.
+ plot
+ Plot callback argument.
"""
@@ -166,16 +168,13 @@ def save_line(self,
self.redraw_line()
def group_lines(self,
- grain: 'defdap.hrdic.Grain' = None):
- """
- Group the lines drawn in the current grain item using a mean shift algorithm,
- save the average angle and then detect the active slip planes.
-
+ grain: 'hrdic.Grain' = None):
+ """Group drawn lines by angle and detect candidate slip planes.
groups_list is a list of line groups: [id, angle, [slip plane id], [angular deviation]
Parameters
----------
- grain
+ grain : hrdic.Grain, optional
Grain for which to group the slip lines.
"""
@@ -221,9 +220,7 @@ def group_lines(self,
def clear_all_lines(self,
event,
plot):
- """ Clear all lines in a given grain.
-
- """
+ """Clear all saved lines and groups for the selected grain."""
self.selected_dic_grain.points_list = []
self.selected_dic_grain.groups_list = []
@@ -232,12 +229,14 @@ def clear_all_lines(self,
def remove_line(self,
event: int,
plot):
- """ Remove single line [runs after submitting a text box].
+ """Remove a single saved line.
Parameters
----------
- event
+ event : int
Line ID to remove.
+ plot
+ Plot callback argument.
"""
# Remove single line
@@ -246,9 +245,7 @@ def remove_line(self,
self.redraw()
def redraw(self):
- """Draw items which need to be redrawn when changing grain ID.
-
- """
+ """Redraw the grain inspector for the currently selected grain."""
# Plot max shear for grain
self.max_shear_axis.clear()
@@ -279,10 +276,7 @@ def redraw(self):
self.redraw_line()
def redraw_line(self):
- """
- Draw items which need to be redrawn when adding a line.
-
- """
+ """Redraw line, group, and slip-trace overlays for the grain."""
# Write lines text and draw lines
title_text = 'List of lines'
lines_text = 'ID x0 y0 x1 y1 Angle Group\n' \
@@ -336,12 +330,14 @@ def redraw_line(self):
def run_rdr_group(self,
event: int,
plot):
- """ Run RDR on a specified group, upon submitting a text box.
+ """Run RDR for a specified line group.
Parameters
----------
- event
+ event : int
Group ID specified from text box.
+ plot
+ Plot callback argument.
"""
# Run RDR for group of lines
@@ -352,9 +348,7 @@ def run_rdr_group(self,
def batch_run_sta(self,
event,
plot):
- """ Run slip trace analysis on all grains which hve slip trace lines drawn.
-
- """
+ """Run slip-trace analysis for all grains with saved lines."""
# Print header
print("Grain\tEul1\tEul2\tEul3\tMaxSF\tGroup\tAngle\tSystem\tDev\tRDR")
@@ -375,16 +369,16 @@ def calc_rdr(self,
grain,
group: int,
show_plot: bool = True):
- """ Calculates the relative displacement ratio for a given grain and group.
+ """Calculate the relative displacement ratio for a grain group.
Parameters
----------
grain
DIC grain to run RDR on.
- group
- group ID to run RDR on.
- show_plot
- if True, show plot window.
+ group : int
+ Group ID to run RDR on.
+ show_plot : bool, optional
+ If ``True``, show the RDR plot window.
"""
@@ -443,25 +437,24 @@ def plot_rdr(self,
x_list: List[List[int]],
y_list: List[List[int]],
lin_reg_result: List):
- """
- Plot rdr figure, including location of perpendicular lines and scatter plot of ucentered vs vcentered.
+ """Plot the RDR calculation summary for a group of lines in a grain.
Parameters
----------
grain
DIC grain to plot.
- group
+ group : int
Group ID to plot.
- u_list
- List of ucentered values.
- v_list
- List of vcentered values.
- x_list
- List of all x values.
- y_list
- List of all y values.
+ u_list : list of float
+ List of centred ``u`` values.
+ v_list : list of float
+ List of centred ``v`` values.
+ x_list : list of list of int
+ Sampled x coordinates.
+ y_list : list of list of int
+ Sampled y coordinates.
lin_reg_result
- Results from linear regression of ucentered vs vcentered
+ Linear regression result for centred ``u`` against centred ``v``.
{slope, intercept, rvalue, pvalue, stderr}.
"""
@@ -590,22 +583,33 @@ def plot_rdr(self,
def update_filename(self,
event: str,
plot):
- """ Update class variable filename, based on text input from textbox handler.
+ """Update the output filename from textbox input.
- event:
- Text in textbox.
+ Parameters
+ ----------
+ event : str
+ Text entered in the textbox.
+ plot
+ Plot callback argument.
"""
self.filename = event
- def save_file(self,
- event,
- plot):
- """ Save a file which contains definitions of slip lines drawn in grains
- [(x0, y0, x1, y1), angle, groupID]
- and groups of lines, defined by an average angle and identified sip plane
- [groupID, angle, [slip plane id(s)], [angular deviation(s)]]
+ def save_file(self, event, plot):
+ """Save drawn line and group definitions to a text file.
+
+ Lines drawn are saved in the following format:
+ [(x0, y0, x1, y1), angle, groupID].
+ Groups of lines are saved in the following format:
+ [groupID, angle, [slip plane id(s)], [angular deviation(s)]].
+
+ Parameters
+ ----------
+ event
+ Plot callback event argument.
+ plot
+ Plot callback argument.
"""
@@ -630,10 +634,14 @@ def save_file(self,
def load_file(self,
event,
plot):
- """ Load a file which contains definitions of slip lines drawn in grains
- [(x0, y0, x1, y1), angle, groupID]
- and groups of lines, defined by an average angle and identified sip plane
- [groupID, angle, [slip plane id(s)], [angular deviation(s)]]
+ """Load drawn line and group definitions from a text file.
+
+ Parameters
+ ----------
+ event
+ Plot callback event argument.
+ plot
+ Plot callback argument.
"""
diff --git a/defdap/plotting.py b/defdap/plotting.py
index d490bfd..cd5536a 100644
--- a/defdap/plotting.py
+++ b/defdap/plotting.py
@@ -36,9 +36,7 @@
class Plot(object):
- """ Class used for creating and manipulating plots.
-
- """
+ """Base class for creating and manipulating plots."""
def __init__(self, ax=None, ax_params={}, fig=None, make_interactive=False,
title=None, **kwargs):
@@ -87,23 +85,34 @@ def check_interactive(self):
raise Exception("Plot must be interactive")
def add_event_handler(self, eventName, eventHandler):
+ """Register an interactive matplotlib event handler.
+
+ Parameters
+ ----------
+ eventName : str
+ Matplotlib event name.
+ eventHandler : callable
+ Callback receiving ``(event, plot)``.
+
+ """
self.check_interactive()
self.fig.canvas.mpl_connect(eventName, lambda e: eventHandler(e, self))
def add_axes(self, loc, proj='2d'):
- """Add axis to current plot
+ """Add an axis to the current plot.
Parameters
----------
- loc
+ loc : tuple
Location of axis.
- proj : str, {2d, 3d}
+ proj : str, {'2d', '3d'}
2D or 3D projection.
Returns
-------
- matplotlib.Axes.axes
+ matplotlib.axes.Axes
+ Created axes object.
"""
if proj == '2d':
@@ -151,7 +160,7 @@ def add_text_box(self, label, submit_handler=None, change_handler=None, loc=(0.8
Returns
-------
- matplotlotlib.widgets.TextBox
+ matplotlib.widgets.TextBox
"""
self.check_interactive()
@@ -192,13 +201,13 @@ def add_arrow(self, start_end, persistent=False, clear_previous=True, label=None
Parameters
----------
- start_end: 4-tuple
+ start_end : tuple[float, float, float, float]
Starting (x, y), Ending (x, y).
- persistent :
+ persistent : bool, optional
If persistent, do not clear arrow with clearPrev.
- clear_previous :
+ clear_previous : bool, optional
Clear all non-persistent arrows.
- label
+ label : str, optional
Label to place near arrow.
"""
@@ -306,7 +315,7 @@ def clear(self):
self.draw()
def draw(self):
- """Draw plot
+ """Draw the plot.
"""
self.fig.canvas.draw()
@@ -346,6 +355,7 @@ def __init__(self, calling_map, fig=None, ax=None, ax_params={},
self.set_empty_state()
def set_empty_state(self):
+ """Reset map-plot layer and annotation state."""
self.img_layers = []
self.highlights_layer_id = None
self.points_layer_ids = []
@@ -425,11 +435,6 @@ def add_grain_boundaries(self, kind="pixel", boundaries=None, colour=None,
of coordinates representing the start and end of each boundary
segment. If not provided the boundaries are loaded from the
calling map.
-
- boundaries : various, defdap.ebsd.BoundarySet
- Boundaries to plot. If not provided the boundaries are loaded from
- the calling map.
-
colour : various
One of:
- Colour of all boundaries as a string (only option pixel kind)
@@ -444,8 +449,8 @@ def add_grain_boundaries(self, kind="pixel", boundaries=None, colour=None,
Returns
-------
- Various :
- matplotlib.image.AxesImage if type is pixel
+ matplotlib.artist.Artist
+ Added image or line collection artist.
"""
if colour is None:
@@ -737,6 +742,7 @@ def __init__(self, calling_grain, fig=None, ax=None, ax_params={},
self.set_empty_state()
def set_empty_state(self):
+ """Reset grain-plot layer state."""
self.img_layers = []
self.ax.set_xticks([])
@@ -969,9 +975,7 @@ def create(
class PolePlot(Plot):
- """ Class for creating an inverse pole figure plot.
-
- """
+ """Plot class for inverse pole figure visualisation."""
def __init__(self, plot_type, crystal_sym, projection=None,
fig=None, ax=None, ax_params={}, make_interactive=False,
@@ -1251,6 +1255,26 @@ def add_legend(
@staticmethod
def _validateProjection(projection_in, validate_default=False):
+ """Validate and resolve a projection specification.
+
+ Parameters
+ ----------
+ projection_in : str or callable or None
+ Projection name/function.
+ validate_default : bool, optional
+ If ``True``, validate only explicit defaults.
+
+ Returns
+ -------
+ callable
+ Projection function.
+
+ Raises
+ ------
+ ValueError
+ If no valid default projection is available.
+
+ """
if validate_default:
default_projection = None
else:
@@ -1351,21 +1375,19 @@ def lambert_project(*args):
class HistPlot(Plot):
- """ Class for creating a histogram.
-
- """
+ """Plot class for histogram visualisation."""
def __init__(self, plot_type="scatter", axes_type="linear", density=True, fig=None,
ax=None, ax_params={}, make_interactive=False, **kwargs):
- """Initialise a histogram plot
+ """Initialise a histogram plot.
Parameters
----------
- plot_type: str, {'scatter', 'bar', 'step'}
- Type of plot to use
+ plot_type : str, {'scatter', 'bar', 'step'}
+ Type of plot to use.
axes_type : str, {'linear', 'logx', 'logy', 'loglog', 'None'}, optional
If 'log' is specified, logarithmic scale is used.
- density :
+ density : bool, optional
If true, histogram is normalised such that the integral sums to 1.
fig : matplotlib.figure.Figure
Matplotlib figure to plot on.
@@ -1411,7 +1433,7 @@ def __init__(self, plot_type="scatter", axes_type="linear", density=True, fig=No
def add_hist(self, hist_data, bins=100, range=None, line='o',
label=None, **kwargs):
- """Add a histogram to the current plot
+ """Add a histogram to the current plot.
Parameters
----------
@@ -1482,14 +1504,14 @@ def create(
ax_params :
Passed to defdap.plotting.Plot as ax_params.
plot : defdap.plotting.HistPlot
- Plot where histgram is created. If none, a new plot is created.
+ Plot where histogram is created. If ``None``, a new plot is created.
make_interactive : bool, optional
If true, make plot interactive.
- plot_type: str, {'scatter', 'bar', 'barfilled', 'step'}
- Type of plot to use
+ plot_type : str, {'scatter', 'bar', 'step'}
+ Type of plot to use.
axes_type : str, {'linear', 'logx', 'logy', 'loglog', 'None'}, optional
If 'log' is specified, logarithmic scale is used.
- density :
+ density : bool, optional
If true, histogram is normalised such that the integral sums to 1.
bins : int
Number of bins to use for histogram.
@@ -1518,9 +1540,7 @@ def create(
class CrystalPlot(Plot):
- """ Class for creating a 3D plot for plotting unit cells.
-
- """
+ """Plot class for 3D unit-cell visualisation."""
def __init__(self, fig=None, ax=None, ax_params={},
make_interactive=False, **kwargs):
@@ -1558,7 +1578,7 @@ def __init__(self, fig=None, ax=None, ax_params={},
)
def add_verts(self, verts, **kwargs):
- """Plots planes, defined by the vertices provided.
+ """Plot planes defined by the provided vertices.
Parameters
----------
diff --git a/defdap/quat.py b/defdap/quat.py
index d3a8d00..c5a9e19 100755
--- a/defdap/quat.py
+++ b/defdap/quat.py
@@ -18,7 +18,12 @@
from defdap import plotting
from defdap import defaults
-from typing import Union, Tuple, List, Optional
+from typing import Union, Tuple, List, Optional, TYPE_CHECKING
+
+if TYPE_CHECKING:
+ import matplotlib.figure
+ import matplotlib.axes
+ import defdap.crystal as crystal
class Quat(object):
@@ -36,9 +41,9 @@ def __init__(self, *args, allow_southern: Optional[bool] = False) -> None:
Parameters
----------
*args
- Variable length argument list.
- allow_southern
- if False, move quat to northern hemisphere.
+ Either four quaternion coefficients or one length-4 iterable.
+ allow_southern : bool, optional
+ If ``False``, move the quaternion to the northern hemisphere.
"""
# construct with array of quat coefficients
@@ -66,11 +71,11 @@ def from_euler_angles(cls, ph1: float, phi: float, ph2: float) -> 'Quat':
Parameters
----------
- ph1
+ ph1 : float
First Euler angle, rotation around Z in radians.
- phi
+ phi : float
Second Euler angle, rotation around new X in radians.
- ph2
+ ph2 : float
Third Euler angle, rotation around new Z in radians.
Returns
@@ -97,9 +102,9 @@ def from_axis_angle(cls, axis: np.ndarray, angle: float) -> 'Quat':
Parameters
----------
- axis
+ axis : numpy.ndarray
Axis that the rotation is applied around.
- angle
+ angle : float
Magnitude of rotation in radians.
Returns
@@ -254,11 +259,11 @@ def __mul__(self, right: 'Quat', allow_southern: bool = False) -> 'Quat':
raise TypeError("{:} - {:}".format(type(self), type(right)))
def dot(self, right: 'Quat') -> float:
- """ Calculate dot product between two quaternions.
+ """Calculate the dot product between two quaternions.
Parameters
----------
- right
+ right : Quat
Right hand quaternion.
Returns
@@ -332,7 +337,7 @@ def transform_vector(
vector: Union[Tuple, List, np.ndarray]
) -> np.ndarray:
"""
- Transforms vector by the quaternion. For passive EBSD quaterions
+ Transforms a vector by the quaternion. For passive EBSD quaterions
this is a transformation from sample space to crystal space.
Perform on conjugate of quaternion for crystal to sample. For a
quaternion representing a passive rotation from CS1 to CS2 and a
@@ -368,27 +373,25 @@ def mis_ori(
sym_group: str,
return_quat: Optional[int] = 0
) -> Tuple[float, 'Quat']:
- """
- Calculate misorientation angle between 2 orientations taking
- into account the symmetries of the crystal structure.
- Angle is 2*arccos(output).
+ """Calculate minimum misorientation between two quaternions,
+ accounting for crystal symmetry. The misorientation angle is
+ ``2 * arccos(m)``, where ``m`` is the minimum misorientation
+ value.
Parameters
----------
- right
+ right : Quat
Orientation to find misorientation to.
- sym_group
+ sym_group : str
Crystal type (cubic, hexagonal).
- return_quat
+ return_quat : int, optional
What to return: 0 for minimum misorientation, 1 for
symmetric equivalent with minimum misorientation, 2 for both.
Returns
-------
- float
- Minimum misorientation.
- defdap.quat.Quat
- Symmetric equivalent orientation with minimum misorientation.
+ float or Quat or tuple[float, Quat]
+ Return type depends on ``return_quat``.
"""
if isinstance(right, type(self)):
@@ -448,19 +451,19 @@ def plot_ipf(
marker_size: Optional[float] = 40,
**kwargs
) -> 'plotting.PolePlot':
- """
- Plot IPF of orientation, with relation to specified sample direction.
+ """Plot the orientation on an inverse pole figure,
+ with relation to the specified sample direction.
Parameters
----------
- direction
+ direction : numpy.ndarray
Sample reference direction for IPF.
- sym_group
+ sym_group : str
Crystal type (cubic, hexagonal).
- projection
- Projection to use. Either string (stereographic or lambert)
- or a function.
- plot
+ projection : str or callable, optional
+ Projection to use. Either a string (``stereographic`` or
+ ``lambert``) or a function.
+ plot : defdap.plotting.Plot, optional
Defdap plot to plot on.
fig
Figure to plot on, if not provided the current
@@ -468,21 +471,26 @@ def plot_ipf(
ax
Axis to plot on, if not provided the current
active axis is used.
- make_interactive
- If true, make the plot interactive.
- plot_colour_bar : bool
- If true, plot a colour bar next to the map.
- clabel : str
+ plot_colour_bar : bool, optional
+ If ``True``, plot a colour bar next to the map.
+ clabel : str, optional
Label for the colour bar.
- marker_colour: str or list of str
+ make_interactive : bool, optional
+ If ``True``, make the plot interactive.
+ marker_colour : str or list of str, optional
Colour of markers (only used for half and half colouring,
otherwise use argument c).
- marker_size
+ marker_size : float, optional
Size of markers (only used for half and half colouring,
otherwise use argument s).
kwargs
All other arguments are passed to :func:`defdap.plotting.PolePlot.add_points`.
+ Returns
+ -------
+ defdap.plotting.PolePlot
+ Pole-figure plot object.
+
"""
plot_params = {'marker': '+'}
plot_params.update(kwargs)
@@ -513,7 +521,7 @@ def plot_ipf(
def plot_unit_cell(
self,
- crystal_structure: 'defdap.crystal.CrystalStructure',
+ crystal_structure: 'crystal.CrystalStructure',
OI: Optional[bool] = True,
plot: Optional['plotting.CrystalPlot'] = None,
fig: Optional['matplotlib.figure.Figure'] = None,
@@ -521,25 +529,30 @@ def plot_unit_cell(
make_interactive: Optional[bool] = False,
**kwargs
) -> 'plotting.CrystalPlot':
- """Plots a unit cell.
+ """Plot a unit cell for the current orientation.
Parameters
----------
crystal_structure
Crystal structure.
- OI
- True if using oxford instruments system.
- plot
+ OI : bool, optional
+ If ``True``, use the Oxford Instruments convention.
+ plot : defdap.plotting.CrystalPlot, optional
Plot object to plot to.
- fig
+ fig : matplotlib.figure.Figure, optional
Figure to plot on, if not provided the current active axis is used.
- ax
+ ax : matplotlib.axes.Axes, optional
Axis to plot on, if not provided the current active axis is used.
- make_interactive
- True to make the plot interactive.
+ make_interactive : bool, optional
+ If ``True``, make the plot interactive.
kwargs
All other arguments are passed to :func:`defdap.plotting.CrystalPlot.add_verts`.
+ Returns
+ -------
+ defdap.plotting.CrystalPlot
+ Crystal plot object.
+
"""
# Set default plot parameters then update with any input
plot_params = {}
@@ -592,11 +605,11 @@ def plot_unit_cell(
@staticmethod
def create_many_quats(eulerArray: np.ndarray) -> np.ndarray:
- """Create a an array of quats from an array of Euler angles.
+ """Create an array of quaternions from Euler angles.
Parameters
----------
- eulerArray
+ eulerArray : numpy.ndarray
Array of Bunge Euler angles of shape 3 x n x ... x m.
Returns
@@ -626,18 +639,19 @@ def create_many_quats(eulerArray: np.ndarray) -> np.ndarray:
@staticmethod
def multiply_many_quats(quats: List['Quat'], right: 'Quat') -> List['Quat']:
- """ Multiply all quats in a list of quats, by a single quat.
+ """Multiply a list of quaternions by a single quaternion.
Parameters
----------
- quats
+ quats : list[Quat]
List of quats to be operated on.
- right
+ right : Quat
Single quaternion to multiply with the list of quats.
Returns
-------
- list(defdap.quat.Quat)
+ list[Quat]
+ Resulting multiplied quaternions.
"""
quat_array = np.array([q.quat_coef for q in quats])
@@ -654,7 +668,7 @@ def multiply_many_quats(quats: List['Quat'], right: 'Quat') -> List['Quat']:
@staticmethod
def extract_quat_comps(quats: np.ndarray) -> np.ndarray:
- """Return a NumPy array of the provided quaternion components
+ """Return quaternion components as a NumPy array.
Input quaternions may be given as a list of Quat objects or any iterable
whose items have 4 components which map to the quaternion.
@@ -662,12 +676,12 @@ def extract_quat_comps(quats: np.ndarray) -> np.ndarray:
Parameters
----------
quats : numpy.ndarray(defdap.quat.Quat)
- A list of Quat objects to return the components of
+ Quaternions to extract components from.
Returns
-------
numpy.ndarray
- Array of quaternion components, shape (4, ..)
+ Array of quaternion components, shape ``(4, ...)``.
"""
quats = np.array(quats)
@@ -689,15 +703,16 @@ def calc_sym_eqvs(
----------
quats : numpy.ndarray(defdap.quat.Quat)
Array of quat objects.
- sym_group
+ sym_group : str
Crystal type (cubic, hexagonal).
- dtype
- Datatype used for calculation, defaults to `float`.
+ dtype : type, optional
+ Datatype used for calculation, defaults to ``float``.
Returns
-------
- quat_comps: numpy.ndarray, shape: (numSym x 4 x numQuats)
- Array containing all symmetrically equivalent quaternion components of input quaternions.
+ numpy.ndarray
+ Symmetrically equivalent quaternion components with shape
+ ``(num_sym, 4, num_quats)``.
"""
syms = Quat.sym_eqv(sym_group)
@@ -770,23 +785,23 @@ def calc_average_ori(
def calcMisOri(
quat_comps: np.ndarray,
ref_ori: 'Quat'
- ) -> Tuple[np.ndarray, 'Quat']:
+ ) -> Tuple[np.ndarray, np.ndarray]:
"""Calculate the misorientation between the quaternions and a reference quaternion.
Parameters
----------
quat_comps
Array containing all symmetrically equivalent quaternion components of given quaternions
- (shape: numSym x 4 x numQuats), can be calculated from quats with :func:`Quat.calc_sym_eqvs` .
+ (shape: numSym x 4 x numQuats), can be calculated from quats with :func:`Quat.calc_sym_eqvs`.
ref_ori
Reference orientation.
Returns
-------
- min_mis_oris : numpy.ndarray, len numQuats
+ min_mis_oris : numpy.ndarray
Minimum misorientation between quats and reference orientation.
- min_quat_comps : defdap.quat.Quat
- Quaternion components describing minimum misorientation between quats and reference orientation.
+ min_quat_comps : numpy.ndarray
+ Quaternion components describing minimum misorientation.
"""
mis_oris = np.empty((quat_comps.shape[0], quat_comps.shape[2]))
@@ -821,7 +836,7 @@ def polar_angles(x: np.ndarray, y: np.ndarray, z: np.ndarray):
Returns
-------
- float, float
+ tuple[numpy.ndarray, numpy.ndarray]
inclination angle and azimuthal angle (around z axis from x
in anticlockwise as per ISO).
@@ -851,11 +866,11 @@ def calc_ipf_colours(
----------
quats : numpy.ndarray(defdap.quat.Quat)
Array of quat objects.
- direction
+ direction : numpy.ndarray
Direction in sample space.
- sym_group
+ sym_group : str
Crystal type (cubic, hexagonal).
- dtype
+ dtype : type, optional
Data type to use for calculation.
Returns
@@ -964,17 +979,17 @@ def calc_fund_dirs(
Parameters
----------
- quats: array_like(defdap.quat.Quat)
+ quats : array_like[Quat]
Array of quat objects.
- direction
+ direction : numpy.ndarray
Direction in sample space.
- sym_group
+ sym_group : str
Crystal type (cubic, hexagonal).
- dtype
+ dtype : type, optional
Data type to use for calculation.
- triangle: str, optional
+ triangle : str, optional
Triangle convention to use for hexagonal symmetry (up, down). If None,
- defaults to the value in `defaults['ipf_triangle_convention']`.
+ defaults to the value in ``defaults['ipf_triangle_convention']``.
Returns
-------
diff --git a/defdap/utils.py b/defdap/utils.py
index 4e4bf6e..b67190c 100644
--- a/defdap/utils.py
+++ b/defdap/utils.py
@@ -23,13 +23,18 @@ def report_progress(message: str = ""):
Parameters
----------
- message
- Message to display (prefixed by 'Starting ', progress percentage
- and then 'Finished '
+ message : str, optional
+ Message to display (prefixed by ``Starting``, progress percentage,
+ and then ``Finished``).
+
+ Returns
+ -------
+ callable
+ Decorator wrapping a generator function that yields progress values.
References
----------
- Inspiration from :
+ Inspiration from:
https://gist.github.com/Garfounkel/20aa1f06234e1eedd419efe93137c004
"""
@@ -79,15 +84,16 @@ class Datastore(object):
Storage for data and metadata, keyed by data name. Each item is
a dict with at least a `data` key, all other items are metadata,
possibly including:
- type : str
- Type of data stored:
- `map` - at least a 2-axis array, trailing axes are spatial
- order : int
- Tensor order of the data
- unit : str
- Measurement unit the data is stored in
- plot_params : dict
- Dictionary of the default parameters used to plot
+
+ type : str
+ Type of data stored:
+ `map` - at least a 2-axis array, trailing axes are spatial
+ order : int
+ Tensor order of the data
+ unit : str
+ Measurement unit the data is stored in
+ plot_params : dict
+ Dictionary of the default parameters used to plot
_generators: dict
Methods to generate derived data, keyed by tuple of data names
that the method produces.
@@ -105,6 +111,14 @@ class Datastore(object):
@staticmethod
def generate_id():
+ """Generate a unique identifier for datastore grouping.
+
+ Returns
+ -------
+ uuid.UUID
+ Generated group identifier.
+
+ """
return uuid4()
def __init__(self, group_id=None, crop_func=None, mask_func=None):
@@ -137,7 +151,7 @@ def __contains__(self, key):
return key in self.keys()
def __getitem__(self, key):
- """Get data or metadata
+ """Get data or metadata.
Parameters
----------
@@ -206,15 +220,11 @@ def __setitem__(self, key, val):
self._store[key][attr] = val
def __getattr__(self, key):
- """Get data
-
- """
+ """Get data for attributes via datastore lookup."""
return self[key]
def __setattr__(self, key, val):
- """Set data of item that already exists.
-
- """
+ """Set known attributes or route unknown ones to datastore items."""
if key in self.__slots__:
super().__setattr__(key, val)
else:
@@ -239,6 +249,19 @@ def keys(self):
return keys
def lookup_derivative_keys(self, derivative):
+ """Return source keys whose metadata matches a derivative definition.
+
+ Parameters
+ ----------
+ derivative : dict
+ Derivative definition created by ``add_derivative``.
+
+ Returns
+ -------
+ list[str]
+ Matching source keys.
+
+ """
root_call = False
if Datastore._been_to is None:
root_call = True
@@ -262,6 +285,26 @@ def lookup_derivative_keys(self, derivative):
return matched_keys
def _get_derived_item(self, key, attr):
+ """Retrieve derived data or metadata for a key.
+
+ Parameters
+ ----------
+ key : str
+ Name of the requested derived item.
+ attr : str
+ Attribute to access, typically ``'data'`` or metadata name.
+
+ Returns
+ -------
+ any
+ Requested derived value.
+
+ Raises
+ ------
+ KeyError
+ If no matching derivative exists for ``key``.
+
+ """
for derivative in self._derivatives:
if key in self.lookup_derivative_keys(derivative):
break
@@ -346,6 +389,23 @@ def add_generator(self, keys, func, metadatas=None, **kwargs):
def add_derivative(self, datastore, derive_func, in_props=None,
out_props=None, pass_ref=False):
+ """Register or update a derived-data relationship.
+
+ Parameters
+ ----------
+ datastore : Datastore
+ Source datastore for derived values.
+ derive_func : callable
+ Function used to derive output values.
+ in_props : dict, optional
+ Metadata filters required on source items.
+ out_props : dict, optional
+ Metadata values exposed on derived items.
+ pass_ref : bool, optional
+ If ``True``, pass source key reference into ``derive_func``
+ instead of source data.
+
+ """
if in_props is None:
in_props = {}
if out_props is None:
@@ -411,7 +471,8 @@ def update(self, other, priority=None):
Parameters
----------
other : defdap.utils.Datastore
- priority : str
+ Datastore providing additional data items.
+ priority : str, optional
Which datastore to keep an item from if the same name exists
in both. Default is to prioritise `other`.
diff --git a/docs/Makefile b/docs/Makefile
index f22e981..043b44c 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -20,11 +20,6 @@ help:
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
docs:
- @echo "Building API documentation to '/source/defdap'"
- sphinx-apidoc -feMT ../defdap -o ./source/defdap
- @echo "Auto-generation of API documentation finished. " \
- "The generated files are in '/source/defdap'"
- @echo " "
@echo "Cleaning build folder"
make clean
@echo " "
diff --git a/docs/source/_static/IPF_down.png b/docs/source/_static/IPF_down.png
new file mode 100644
index 0000000..a0b4438
Binary files /dev/null and b/docs/source/_static/IPF_down.png differ
diff --git a/docs/source/_static/IPF_up.png b/docs/source/_static/IPF_up.png
new file mode 100644
index 0000000..1070c4e
Binary files /dev/null and b/docs/source/_static/IPF_up.png differ
diff --git a/docs/source/_static/IPF_up_down.png b/docs/source/_static/IPF_up_down.png
new file mode 100644
index 0000000..d2eff5c
Binary files /dev/null and b/docs/source/_static/IPF_up_down.png differ
diff --git a/docs/source/_static/custom-icons.js b/docs/source/_static/custom-icons.js
new file mode 100644
index 0000000..0c33d26
--- /dev/null
+++ b/docs/source/_static/custom-icons.js
@@ -0,0 +1,19 @@
+FontAwesome.library.add(
+ /**
+ * Custom icon definitions
+ *
+ * see https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/header-links.html#svg-image-icons
+ */
+ {
+ prefix: "fa-custom",
+ iconName: "pypi",
+ icon: [
+ 17.313,
+ 19.807,
+ [],
+ "e001",
+ // https://simpleicons.org/icons/pypi.svg
+ "m10.383 0.2-3.239 1.1769 3.1883 1.1614 3.239-1.1798zm-3.4152 1.2411-3.2362 1.1769 3.1855 1.1614 3.2369-1.1769zm6.7177 0.00281-3.2947 1.2009v3.8254l3.2947-1.1988zm-3.4145 1.2439-3.2926 1.1981v3.8254l0.17548-0.064132 3.1171-1.1347zm-6.6564 0.018325v3.8247l3.244 1.1805v-3.8254zm10.191 0.20931v2.3137l3.1777-1.1558zm3.2947 1.2425-3.2947 1.1988v3.8254l3.2947-1.1988zm-8.7058 0.45739c0.00929-1.931e-4 0.018327-2.977e-4 0.027485 0 0.25633 0.00851 0.4263 0.20713 0.42638 0.49826 1.953e-4 0.38532-0.29327 0.80469-0.65542 0.93662-0.36226 0.13215-0.65608-0.073306-0.65613-0.4588-6.28e-5 -0.38556 0.2938-0.80504 0.65613-0.93662 0.068422-0.024919 0.13655-0.038114 0.20156-0.039466zm5.2913 0.78369-3.2947 1.1988v3.8247l3.2947-1.1981zm-10.132 1.239-3.2362 1.1769 3.1883 1.1614 3.2362-1.1769zm6.7177 0.00213-3.2926 1.2016v3.8247l3.2926-1.2009zm-3.4124 1.2439-3.2947 1.1988v3.8254l3.2947-1.1988zm-6.6585 0.016195v3.8275l3.244 1.1805v-3.8254zm16.9 0.21143-3.2947 1.1988v3.8247l3.2947-1.1981zm-3.4145 1.2411-3.2926 1.2016v3.8247l3.2926-1.2009zm-3.4145 1.2411-3.2926 1.2016v3.8247l3.2926-1.2009zm-3.4124 1.2432-3.2947 1.1988v3.8254l3.2947-1.1988zm-6.6585 0.019027v3.8247l3.244 1.1805v-3.8254zm13.485 1.4497-3.2947 1.1988v3.8247l3.2947-1.1981zm-3.4145 1.2411-3.2926 1.2016v3.8247l3.2926-1.2009zm2.4018 0.38127c0.0093-1.83e-4 0.01833-3.16e-4 0.02749 0 0.25633 0.0085 0.4263 0.20713 0.42638 0.49826 1.97e-4 0.38532-0.29327 0.80469-0.65542 0.93662-0.36188 0.1316-0.65525-0.07375-0.65542-0.4588-1.95e-4 -0.38532 0.29328-0.80469 0.65542-0.93662 0.06842-0.02494 0.13655-0.03819 0.20156-0.03947zm-5.8142 0.86403-3.244 1.1805v1.4201l3.244 1.1805z",
+ ],
+ }
+);
diff --git a/docs/source/_static/favicon.png b/docs/source/_static/favicon.png
new file mode 100644
index 0000000..3edd1b1
Binary files /dev/null and b/docs/source/_static/favicon.png differ
diff --git a/docs/source/_templates/layout.html b/docs/source/_templates/layout.html
deleted file mode 100644
index c381a6b..0000000
--- a/docs/source/_templates/layout.html
+++ /dev/null
@@ -1,6 +0,0 @@
-{% extends "!layout.html" %}
-
- {% block menu %}
- {{ super() }}
- Index
- {% endblock %}
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 3d79157..ea032e1 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -6,41 +6,58 @@
import shutil
import sys
-# -- Path setup --------------------------------------------------------------
+# -----------------------------------------------------------------------------
+# Path setup
+# -----------------------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../')) # Reference the root directory so autodocs can find the python modules
+# -----------------------------------------------------------------------------
+# Generate 'how to use' page from example notebook
+# -----------------------------------------------------------------------------
+
# Copy the example notebook into the docs source
-shutil.copyfile('../../notebooks/example_notebook.ipynb', 'howtouse.ipynb')
+shutil.copyfile('../../notebooks/example_notebook.ipynb', 'userguide/howtouse.ipynb')
# Open file
-with open('howtouse.ipynb') as f:
+with open('userguide/howtouse.ipynb') as f:
old_text = f.read()
# change %matplotlib to inline
new_text = old_text.replace('%matplotlib tk', r'%matplotlib inline')
# change directory so that paths still work
-new_text = new_text.replace('../tests/data/', r'../../tests/data/')
+new_text = new_text.replace('../tests/data/', r'../../../tests/data/')
# Change title to 'How to use'
-new_text = new_text.replace('DefDAP Example notebook', r'How to use')
+new_text = new_text.replace('DefDAP Example notebook', r'Example notebook')
new_text = new_text.replace('This notebook', r'These pages')
# Write back to notebook
-with open('howtouse.ipynb', "w") as f:
+with open('userguide/howtouse.ipynb', "w") as f:
f.write(new_text)
-# -- Project information -----------------------------------------------------
+nbsphinx_allow_errors = True
+nbsphinx_execute = 'always'
+nbsphinx_kernel_name = 'python3'
+
+nbsphinx_prolog = """
+This page was built from the example_notebook Jupyter notebook available on `Github `_
+
+----
+"""
+
+# -----------------------------------------------------------------------------
+# Project information
+# -----------------------------------------------------------------------------
project = 'DefDAP'
-copyright = '2023, Mechanics of Microstructures Group at The University of Manchester'
+copyright = '2026, Mechanics of Microstructures Group at The University of Manchester'
author = 'Michael D. Atkinson, Rhys Thomas, João Quinta da Fonseca'
-
def get_version():
ver_path = '../../defdap/_version.py'
main_ns = {}
@@ -55,17 +72,16 @@ def get_version():
# The short X.Y version
version = '.'.join(release.split('.')[:2])
-# -- General configuration ---------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = '1.0'
+# -----------------------------------------------------------------------------
+# General configuration
+# -----------------------------------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
+ #'sphinx.ext.apidoc', # Need sphinx.ext.apidoc for this, which needs sphinx 8.2.3, which needs python 3.11
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
@@ -73,23 +89,11 @@ def get_version():
'sphinx.ext.napoleon',
'sphinx.ext.intersphinx',
'sphinx_autodoc_typehints',
- 'sphinx_rtd_theme',
- 'nbsphinx'
+ 'pydata_sphinx_theme',
+ 'nbsphinx',
+ 'sphinx_design'
]
-nbsphinx_allow_errors = True
-nbsphinx_execute = 'always'
-nbsphinx_kernel_name = 'python3'
-
-nbsphinx_prolog = """
-This page was built from the example_notebook Jupyter notebook available on `Github `_
-
-.. image:: https://mybinder.org/badge_logo.svg
- :target: https://mybinder.org/v2/gh/MechMicroMan/DefDAP/master?filepath=example_notebook.ipynb
-
-----
-"""
-
napoleon_use_param = True
# Add any paths that contain templates here, relative to this directory.
@@ -118,37 +122,62 @@ def get_version():
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
+# -----------------------------------------------------------------------------
+# Options for HTML these
+# -----------------------------------------------------------------------------
-# -- Options for HTML output -------------------------------------------------
+html_theme = "pydata_sphinx_theme"
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = "sphinx_rtd_theme"
+html_static_path = ["_static"]
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#
html_theme_options = {
- 'collapse_navigation': False,
- 'sticky_navigation': True,
- 'navigation_depth': 4,
- 'includehidden': True,
- 'titles_only': False
+ "icon_links": [
+ {
+ "name": "GitHub",
+ "url": "https://github.com/MechMicroMan/DefDAP",
+ "icon": "fa-brands fa-square-github",
+ "type": "fontawesome"
+ },
+ {
+ "name": "PyPI",
+ "url": "https://pypi.org/project/defdap",
+ "icon": "fa-custom fa-pypi", # defined in file `_static/custom-icons.js`
+ }
+ ],
+ "use_edit_page_button": True,
}
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_context = {
+ "github_user": "MechMicroMan",
+ "github_repo": "DefDAP",
+ "github_version": "master",
+ "doc_path": "docs/source",
+}
+
+html_js_files = [
+ ("custom-icons.js", {"defer": "defer"}),
+]
+
+html_copy_source = False
+html_favicon = '_static/favicon.png'
-# -- Options for HTMLHelp output ---------------------------------------------
+# -----------------------------------------------------------------------------
+# Options for HTMLHelp output
+# -----------------------------------------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'DefDAPdoc'
+# -----------------------------------------------------------------------------
+# Autodoc
+# -----------------------------------------------------------------------------
+
+autodoc_member_order = 'bysource'
+autodoc_default_options = {
+ 'inherited-members': True,
+ 'exclude_patterns': ['*base*'],
+}
# -- Generate API docs during sphinx-build (for readthedocs) ------------------
@@ -175,11 +204,29 @@ def setup(app):
if ON_RTD:
app.connect('builder-inited', run_apidoc)
-# -- Extension configuration -------------------------------------------------
+# -----------------------------------------------------------------------------
+# Apidoc
+# Need sphinx.ext.apidoc for this, which needs sphinx 8.2.3, which needs python 3.11
+# -----------------------------------------------------------------------------
+
+# apidoc_modules = [
+# {
+# 'path': '../../defdap',
+# 'destination': 'defdap',
+# 'exclude_patterns': ['*base*'],
+# 'separate_modules': True,
+# 'module_first': False,
+# 'automodule_options': {'members', 'show-inheritance', 'undoc-members'}
+# }
+# ]
+
+
+# -----------------------------------------------------------------------------
+# Intersphinx
+# -----------------------------------------------------------------------------
-autodoc_member_order = 'bysource'
intersphinx_mapping = {'python': ('https://docs.python.org/3.7/', None),
'numpy': ('https://numpy.org/doc/stable/', None),
- 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),
- 'matplotlib': ('https://matplotlib.org/', None),
+ 'scipy': ('https://docs.scipy.org/doc/scipy/', None),
+ 'matplotlib': ('https://matplotlib.org/stable/', None),
'skimage': ('https://scikit-image.org/docs/dev/', None)}
\ No newline at end of file
diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst
index 3e11037..c340e8c 100644
--- a/docs/source/contributing.rst
+++ b/docs/source/contributing.rst
@@ -1,47 +1,11 @@
Contributing
-===========================
+=================
We welcome any improvements made to this package, but please follow the guidelines below when making changes.
-Coding style
------------------
+.. toctree::
+ :maxdepth: 1
-In this project we (try) to follow the PEP8 standard for formatting python code with 2 notable exceptions:
-- Function and variable names are in mixed case e.g myFirstFunction
-- The 79 character line length limit is seen more as a guideline than rule. Code is split across lines where possible and to improve readability. Line length should never excede 119 characters. All documentation should be split to lines of less than 73 characters.
-
-Repository Structure
----------------------
-
-The schematic below shows the intended structure of the branches in this repository, based on `Gitflow `_. The `master` branch will be committed to at regular intervals by an administrator and each release will be given a tag with a version number. This is the first branch that is visible and we (will eventually) have controls in place to ensure this branch always works correctly. The `develop` branch contains new features which may not be significant enough to form part of the 'master' branch just yet. The final type of branch is the feature branch (green), such as `feture_1` and `feature_2`. This is where you come in! If you would like to make a new feature or fix a bug, simply make a branch from develop with a reasonable name and make a start on any changes you would like to make. Don't be afraid of breaking anything at this point, there are controls in place to help make sure buggy code does not make it into `develop` or `master`.
-
-.. figure:: ./_static/branches.png
- :scale: 100 %
- :alt: Repository structure
-
- Repository structure
-
-Instructions
------------------
-
-1. Making an 'issue' first is recommended when adding a new feature or fixing a bug, especially if you're not sure how to go about the change. Advice can then be given on the best way forward.
-2. Using your local git client (GitHub Desktop is easy to use), checkout the `develop` branch by selecting it (making sure you 'Pull Origin' after).
-3. Create a new branch with an appropriate name, using underscores where appropriate, for example `new_feature`
-4. Make any changes necessary in your favourite editor (PyCharm is recommended).
-5. Test to make sure the feature works correctly in your Jupyter Notebook.
-6. Commit the change to your new branch using the 'Commit to new_feature' button and include a descriptive title and description for the commit, making sure you click 'Push origin' when this is done.
-7. Make additional commits if necessary.
-8. Raise a pull request.
-
-Additional notes
------------------
-
-- Always make a branch from `develop`, don't try to make a branch from `master` or any feature branch.
-- You will not be able to complete a pull request into `develop` without a review by Mike or Rhys.
-- Try to avoid adding any dependencies to the code, if you do need to, add a comment to your 'issue' with the details.
-
-
-Documentation
------------------
-
-Where possible, update or add documentation at the beginning of the function you are making or changing, adding references if required. This is important so that other people know how to use your code and so that we can validate any methods you use. We are following the `NumPy Docs Style Guide `_, but you can use any of the documentation in the code as an example. Add comments where it is not clear what you have done.
+ contributing/contributing
+ contributing/docstrings
+ contributing/website
\ No newline at end of file
diff --git a/docs/source/contributing/contributing.rst b/docs/source/contributing/contributing.rst
new file mode 100644
index 0000000..04044aa
--- /dev/null
+++ b/docs/source/contributing/contributing.rst
@@ -0,0 +1,29 @@
+Repository Structure
+===========================
+
+The schematic below shows the intended structure of the branches in this repository, based on `Gitflow `_. The `master` branch will be committed to at regular intervals by an administrator and each release will be given a tag with a version number. This is the first branch that is visible and we (will eventually) have controls in place to ensure this branch always works correctly. The `develop` branch contains new features which may not be significant enough to form part of the 'master' branch just yet. The final type of branch is the feature branch (green), such as `feture_1` and `feature_2`. This is where you come in! If you would like to make a new feature or fix a bug, simply make a branch from develop with a reasonable name and make a start on any changes you would like to make. Don't be afraid of breaking anything at this point, there are controls in place to help make sure buggy code does not make it into `develop` or `master`.
+
+.. figure:: ../_static/branches.png
+ :scale: 100 %
+ :alt: Repository structure
+
+ Repository structure
+
+Instructions
+-----------------
+
+1. Making an 'issue' first is recommended when adding a new feature or fixing a bug, especially if you're not sure how to go about the change. Advice can then be given on the best way forward.
+2. Using your local git client (GitHub Desktop is easy to use), checkout the `develop` branch by selecting it (making sure you 'Pull Origin' after).
+3. Create a new branch with an appropriate name, using underscores where appropriate, for example `new_feature`
+4. Make any changes necessary in your favourite editor (PyCharm is recommended).
+5. Test to make sure the feature works correctly in your Jupyter Notebook.
+6. Commit the change to your new branch using the 'Commit to new_feature' button and include a descriptive title and description for the commit, making sure you click 'Push origin' when this is done.
+7. Make additional commits if necessary.
+8. Raise a pull request.
+
+Additional notes
+-----------------
+
+- Always make a branch from `develop`, don't try to make a branch from `master` or any feature branch.
+- You will not be able to complete a pull request into `develop` without a review by Mike or Rhys.
+- Try to avoid adding any dependencies to the code, if you do need to, add a comment to your 'issue' with the details.
\ No newline at end of file
diff --git a/docs/source/contributing/docstrings.rst b/docs/source/contributing/docstrings.rst
new file mode 100644
index 0000000..963e3c8
--- /dev/null
+++ b/docs/source/contributing/docstrings.rst
@@ -0,0 +1,63 @@
+Docstrings
+===========================
+
+Where possible, update or add documentation at the beginning of the function you are making or changing, adding references if required.
+This is important so that other people know how to use your code and so that we can validate any methods you use.
+We are following the `NumPy Docs Style Guide `_,
+but you can use any of the documentation in the code as an example.
+Add comments where it is not clear what you have done.
+
+Example
+---------
+
+.. code-block:: python
+
+ def foo(var1, long_var_name='hi'):
+ r"""A one-line summary that does not use variable names or the
+ function name. Several sentences providing an extended description.
+ Refer to variables using back-ticks, e.g. `var`.
+
+ Parameters
+ ----------
+ var1 : array_like
+ Array_like means all those objects -- lists, nested lists, etc. --
+ that can be converted to an array. We can also refer to
+ variables like `var1`. The type above can either refer to an actual
+ Python type (e.g. ``int``), or describe the type of the variable
+ in more detail, e.g. ``(N,) ndarray`` or ``array_like``.
+ long_var_name : {'hi', 'ho'}, optional
+ Choices in brackets, default first when optional.
+
+ Returns
+ -------
+ type
+ Explanation of anonymous return value of type ``type``.
+ out : type
+ Explanation of `out`.
+
+ Raises
+ ------
+ BadException
+ Because you shouldn't have done that.
+
+ References
+ ----------
+ .. [1] O. McNoleg, "The integration of GIS, remote sensing,
+ expert systems and adaptive co-kriging for environmental habitat
+ modelling of the Highland Haggis using object-oriented, fuzzy-logic
+ and neural-network techniques," Computers & Geosciences, vol. 22,
+ pp. 585-588, 1996.
+
+ Examples
+ --------
+ These are written in doctest format, and should illustrate how to
+ use the function.
+
+ >>> a = [1, 2, 3]
+ >>> print [x + 3 for x in a]
+ [4, 5, 6]
+ >>> print "a\n\nb"
+ a
+ b
+
+ """
diff --git a/docs/source/contributing/website.rst b/docs/source/contributing/website.rst
new file mode 100644
index 0000000..39742d0
--- /dev/null
+++ b/docs/source/contributing/website.rst
@@ -0,0 +1,28 @@
+Documentation
+===========================
+
+The narrative documentation on this website (including this page) is built from source files which can be found in the ``docs/source`` directory.
+It is automatically built to https://defdap.readthedocs.io/en/latest/ when changes are merged to the main branch.
+Documentation for the develop branch is available at https://defdap.readthedocs.io/en/develop/.
+
+Writing documentation
+-----------------------
+
+DefDAP uses Sphinx to build the documentation, which is written in reStructuredText format.
+More details about the format can be found in the Sphinx documentation:
+
+https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html.
+
+Building documentation
+-----------------------
+
+To build the documentation yourself, you will first need to install the dependencies for building the documentation.
+You can do this using pip:
+
+``pip install defdap[docs]``
+
+Then, you can build the documentation using the following command from the ``docs/source`` directory:
+
+``make docs``
+
+The built html documentation will be available in the ``docs/build/html`` directory, and can be opened in a web browser.
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 0b8a712..3b7c5ac 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -1,35 +1,79 @@
DefDAP Documentation
-======================
+===================================================
.. figure:: ./_static/logo.png
- :scale: 100 %
+ :scale: 80 %
+ :align: left
:alt: DefDAP
-DefDAP is a python library for correlating EBSD and HRDIC data. It was developed by Michael Atkinson and Rhys Thomas during their PhDs at the Univeristy of Manchester.
-This documentation gives information about the latest version of DefDAP.
+.. |pypi| image:: https://img.shields.io/pypi/v/defdap
+ :target: https://pypi.org/project/defdap
+ :alt: PyPI version
-Citation
-===========
+.. |python| image:: https://img.shields.io/pypi/pyversions/defdap
+ :target: https://pypi.org/project/defdap
+ :alt: Supported python versions
-If this software is used in the preparation of published work please cite:
+.. |docs| image:: https://readthedocs.org/projects/defdap/badge/?version=latest
+ :target: https://defdap.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation status
-`Atkinson, Michael D, Thomas, Rhys, Harte, Allan, Crowther, Peter, & Quinta da Fonseca, João. (2020, May 4). DefDAP: Deformation Data Analysis in Python - v0.92 (Version 0.92). Zenodo. `_
+.. |doi| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3688096.svg
+ :target: https://doi.org/10.5281/zenodo.368809
+ :alt: DOI
+
+DefDAP is a python library for correlating EBSD and HRDIC data, developed by Michael Atkinson and Rhys Thomas.
+
+
+|pypi| |python| |docs| |doi|
+
+.. grid:: 4
+ :gutter: 3
+
+ .. grid-item-card:: User guide
+ :link: userguide
+ :link-type: doc
+ :text-align: center
+ :shadow: lg
+
+ :octicon:`gear;5em`
-Licenses
-==========
+ .. grid-item-card:: Contributing
+ :link: contributing
+ :link-type: doc
+ :text-align: center
+ :shadow: lg
-This software is distributed under Apache License 2.0. For more details see the :doc:`License` page.
+ :octicon:`workflow;5em`
+
+ .. grid-item-card:: Papers
+ :link: papers
+ :link-type: doc
+ :text-align: center
+ :shadow: lg
+
+ :octicon:`book;5em`
+
+ .. grid-item-card:: API Reference
+ :link: modules
+ :link-type: doc
+ :text-align: center
+ :shadow: lg
+
+ :octicon:`file-code;5em`
+
+If this software is used in the preparation of published work please cite:
+
+`Atkinson, Michael D, Thomas, Rhys, Harte, Allan, Crowther, Peter, & Quinta da Fonseca, João. (2020, May 4). DefDAP: Deformation Data Analysis in Python - v0.92 (Version 0.92). Zenodo. `_
-Contents
-==========
+This software is distributed under Apache License 2.0.
.. toctree::
+ :hidden:
:maxdepth: 1
- installation
- howtouse
+ userguide
contributing
papers
- license
modules
\ No newline at end of file
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
deleted file mode 100644
index 8ffc856..0000000
--- a/docs/source/installation.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-Installation
-===========================
-
-The defdap package is available from the Python Package Index (PyPI) and can be installed by executing the following command: ::
-
- pip install defdap
-
-The prerequisite packages should be installed automatically by pip, but if you want to manually install them, using the conda package manager for example, then run the following command: ::
-
- conda install scipy numpy matplotlib scikit-image scikit-learn pandas networkx
-
-If you are doing development work on the scripts, first clone the repository from GitHub. The package can then be installed in editable mode using pip with flag -e to create a "linked" .egg module, which means the module is loaded from the directory at runtime. This avoids having to reinstall every time changes are made. Run the following command from the root of the cloned repository: ::
-
- pip install -e .
\ No newline at end of file
diff --git a/docs/source/license.rst b/docs/source/license.rst
deleted file mode 100644
index a8206ea..0000000
--- a/docs/source/license.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-License
-===========================
-
-.. include:: ../../LICENSE
- :literal:
\ No newline at end of file
diff --git a/docs/source/modules.rst b/docs/source/modules.rst
index c788edd..2c1b63a 100644
--- a/docs/source/modules.rst
+++ b/docs/source/modules.rst
@@ -1,9 +1,10 @@
-API Documentation
+API reference
=================
Information on specific functions, classes, and methods.
.. toctree::
:glob:
+ :maxdepth: 2
defdap/*
\ No newline at end of file
diff --git a/docs/source/papers.rst b/docs/source/papers.rst
index c454ccd..2820782 100644
--- a/docs/source/papers.rst
+++ b/docs/source/papers.rst
@@ -6,6 +6,10 @@ Here is a list of journal papers which have used the DefDAP Python library.
2026
------
+* `D.Lunt, A.D.Smith, J.M.Donoghue, I.Alakiozidis, R.Thomas, E.J.Pickering, P.Frankel, M.Carrington, B.Poole, C.Hardie, C.Hamelin, A.Harte. A protocol for in-situ high resolution strain mapping at elevated temperature. Ultramicroscopy. Materials Characterization. Volume 233, March 2026, 116119. `_
+
+* `C.Liu, T.Sun, D.Hu, R.Thomas, P.Frankel, J.Fonseca, M.Preuss. Mechanistic insight into cooperative slip system activation under cyclic loading in a near-alpha titanium alloy. Acta Materialia. Volume 308, 15 April 2026, 122031. `_
+
* `A.D.Smith, D.Lunt, M.Taylor, A.Davis, R.Thomas, F.Martinez, A.Candeias, A.Gholinia, M.Preuss, J.M.Donoghue. A new approach to SEM in-situ thermomechanical experiments through automation. Ultramicroscopy. Vol. 280, Feb 2026, pp. 114244. `_
2025
diff --git a/docs/source/userguide.rst b/docs/source/userguide.rst
new file mode 100644
index 0000000..c21bca2
--- /dev/null
+++ b/docs/source/userguide.rst
@@ -0,0 +1,16 @@
+User Guide
+=================
+
+This is a narrative guide on how to install DefDAP and how to use the various classes.
+
+.. toctree::
+ :maxdepth: 1
+
+ userguide/installation
+ userguide/hrdic
+ userguide/ebsd
+ userguide/conventions
+ userguide/linking
+ userguide/experiment
+ userguide/inspector
+ userguide/howtouse
\ No newline at end of file
diff --git a/docs/source/userguide/conventions.rst b/docs/source/userguide/conventions.rst
new file mode 100644
index 0000000..66e01f2
--- /dev/null
+++ b/docs/source/userguide/conventions.rst
@@ -0,0 +1,44 @@
+Conventions
+===================================================
+
+Spatial
+----------------------
+
+The origin of plots is in the top left, with x increasing to the right and y increasing downwards.
+
+Orthonormal reference
+----------------------
+
+Oxford Instruments and EDAX use different conventions when attaching an orthonormal frame to a crystal structure.
+This can be set in ``defdap/__init__.py`` by changing the ``crystal_ortho_conv`` argument. The ``hkl``
+convention is x // [10-10] and y // a2 [-12-10], whereas the ``tsl`` convention is x // a1 [2-1-10], y // [01-10]
+
+Pole figure projection
+----------------------
+
+There are two common conventions for the pole figure projection, which can be set in ``defdap/__init__.py``
+by changing the ``pole_projection`` argument. The default is the ``stereographic`` (equal-angle) convention,
+but the ``lambert`` (equal-area) convention is also available.
+
+IPF Triangle
+--------------
+
+The orientation of the hexagonal IPF triangle can be set in ``defdap/__init__.py`` by changing the ``ipf_triangle_convention`` argument.
+The ``up`` and ``down`` conventions looks lke this:
+
+.. image:: /_static/IPF_up_down.png
+
+Slip systems
+----------------------
+
+Slip system definition files are in the ``defdap/slip_systems`` folder.
+The slip system definition file used for each crystal structure can be chosen in ``defdap/__init__.py``, under the ``slip_system_file`` argument.
+By default, the FCC slip systems are defined in ``cubic_fcc.txt``, contatining [111] planes and (011) directions.
+By default, the BCC slip systems are defined in ``cubic_bcc.txt``, contatining [110] planes and (111) directions,
+[112] planes and (111) directions and [312] planes and (111) directions.
+By default, the HCP slip systems are defined in ``hexagonal_withca.txt``, basal , prismatic , pyramidal and pyramidal slip systems.
+
+Slip trace angles
+----------------------
+
+These are calculated with the convention that 0 degrees corresponds to a slip trace pointing upwards, and angles increase anticlockwise.
diff --git a/docs/source/userguide/ebsd.rst b/docs/source/userguide/ebsd.rst
new file mode 100644
index 0000000..245a1fc
--- /dev/null
+++ b/docs/source/userguide/ebsd.rst
@@ -0,0 +1,127 @@
+EBSD Map (`defdap.ebsd.Map`)
+===================================================
+
+The EBSD class in DefDAP provides tools for loading, processing, and analyzing EBSD data.
+
+Supported Data Formats
+----------------------
+
+DefDAP supports loading data from various commercial EBSD vendors:
+
+.. list-table::
+ :header-rows: 1
+ :widths: 18 12 50
+
+ * - ``data_type``
+ - Extension
+ - Description
+ * - ``oxfordbinary``
+ - ``.cpr/.crc``
+ - Oxford Instruments binary files.
+ * - ``oxfordtext``
+ - ``.ctf``
+ - Oxford Instruments text files
+ * - ``edaxang``
+ - ``.ang``
+ - EDAX text file
+
+.. note::
+
+ Oxford Instruments and EDAX use different conventions when attaching an orthonormal frame to a crystal structure.
+ More information in :doc:`../userguide/conventions`.
+
+.. note::
+
+ If you have issues loading one of these files, please make an 'Issue' on GitHub, including a copy of the file.
+ If you are trying to load a new file type then please provide a sample file and we will try to add support for it.
+
+Loading EBSD Data
+------------------
+
+EBSD data can be loaded (for example, from a Oxford Instruments binary file) as follows:
+
+.. code-block:: python
+
+ import defdap.ebsd as ebsd
+
+ ebsd_map = ebsd.Map("path/to/ebsd_data.txt", data_type="oxfordbinary")
+
+Data Structure
+--------------
+
+The EBSD Map stores several key data structures as a :class:`defdap.utils.Datastore` object under the ``data`` attribute.
+You can print a list of all the attributed stored: ``print(ebsd_map.data)``.
+These structures must be present in the imported data:
+
+.. list-table::
+ :header-rows: 1
+ :widths: 8 24
+
+ * - Attribute
+ - Description
+ * - ``phase``
+ - Phase ID map (1-based; 0 for non-indexed points).
+ * - ``euler_angle``
+ - Euler angles stored as (3, y, x) in radians.
+
+These are optional, but often present.
+
+.. list-table::
+ :header-rows: 1
+ :widths: 8 24
+
+ * - Attribute
+ - Description
+ * - ``band_contrast``
+ - Band contrast map from the EBSD scan.
+ * - ``band_slope``
+ - Band slope map from the EBSD scan.
+ * - ``mean_angular_deviation``
+ - Mean angular deviation (MAD) map.
+
+These are generated from the above data:
+
+.. list-table::
+ :header-rows: 1
+ :widths: 8 24
+
+ * - Attribute
+ - Description
+ * - ``orientation``
+ - Quaternion map (generated from ``euler_angle``).
+ * - ``grain_boundaries``
+ - Grain boundary set.
+ * - ``phase_boundaries``
+ - Phase boundary set.
+ * - ``grains``
+ - Grain ID map (1-based in the map).
+ * - ``KAM``
+ - Kernel average misorientation (radians).
+ * - ``GND``
+ - Geometrically necessary dislocation density map.
+ * - ``Nye_tensor``
+ - 3x3 Nye tensor at each point.
+ * - ``proxigram``
+ - Proxigram values for each pixel.
+ * - ``point``
+ - Point locations used for the proxigram calculation.
+ * - ``GROD``
+ - Grain reference orientation deviation map.
+ * - ``GROD_axis``
+ - Grain reference orientation deviation axis map.
+ * - ``grain_data_to_map``
+ - Derived grain list data mapped back to the pixel grid.
+
+Plotting and Visualization
+---------------------------
+
+To plot a maximum shear strain map, with scale bar:
+
+.. code-block:: python
+
+ ebsd_map.plot_map('band_contrast', vmin=0, vmax=0.1, plot_scale_bar=True)
+
+Further Reading
+---------------
+
+For detailed API documentation, see :doc:`../defdap/defdap.ebsd`.
\ No newline at end of file
diff --git a/docs/source/userguide/experiment.rst b/docs/source/userguide/experiment.rst
new file mode 100644
index 0000000..74fe604
--- /dev/null
+++ b/docs/source/userguide/experiment.rst
@@ -0,0 +1,3 @@
+Experiment (`defdap.experiment.Experiment`)
+===================================================
+
diff --git a/docs/source/userguide/hrdic.rst b/docs/source/userguide/hrdic.rst
new file mode 100644
index 0000000..ae5345d
--- /dev/null
+++ b/docs/source/userguide/hrdic.rst
@@ -0,0 +1,177 @@
+HRDIC Map (`defdap.hrdic.Map`)
+===================================================
+
+The HRDIC class in DefDAP provides tools for loading, processing, and analyzing DIC data.
+
+Supported Data Formats
+----------------------
+
+DefDAP supports loading data from various commercial and open-source software packages:
+
+.. list-table::
+ :header-rows: 1
+ :widths: 18 12 50
+
+ * - ``data_type``
+ - Extension
+ - Description
+ * - ``davis``
+ - ``.txt``
+ - LaVision DaVis text files
+ * - ``openpivbinary``
+ - ``.npz``
+ - OpenPIV binary files
+ * - ``openpivtext``
+ - ``.txt``
+ - OpenPIV text files
+ * - ``pyvale-csv``
+ - ``.csv``
+ - PyVale text files
+ * - ``pyvale-binary``
+ - ``.2ddic``
+ - PyVale binary files
+
+.. note::
+
+ Only files from version 8 of DaVis are currently supported.
+ In DaVis, ensure the decimal point chartacter in exported files is set to dot,
+ by going to Project → Global Options → Export → Decimal Point Character and selecting 'Dot'
+
+Loading HRDIC Data
+------------------
+
+HRDIC data can be loaded (for example, from a LaVision DaVis text file) as follows:
+
+.. code-block:: python
+
+ import defdap.hrdic as hrdic
+
+ dic_map = hrdic.Map("path/to/dic_data.txt", data_type="davis")
+
+Data Structure
+--------------
+
+The HRDIC Map stores several key data structures as a :class:`defdap.utils.Datastore` object under the ``data`` attribute.
+You can print a list of all the attributed stored: ``print(dic_map.data)`` and a more detailed summmary is below:
+
+.. list-table::
+ :header-rows: 1
+ :widths: 8 24
+
+ * - Attribute
+ - Description
+ * - ``coordinate``
+ - Pixel coordinate grid for the DIC map.
+ * - ``displacement``
+ - Displacement field arrays (first element is x, second element is y).
+ * - ``e``
+ - Deformation gradient components (e.g. ``Exx`` is element [0][0] ``Eyy`` is element [1][1]).
+ * - ``f``
+ - Green strain components (e.g. ``Fxx`` is element [0][0] ``Fyy`` is element [1][1]).
+ * - ``max_shear``
+ - Maximum shear strain field.
+ * - ``pattern``
+ - Image/pattern data associated with DIC, set this with :class:`defdap.hridic.set_pattern`.
+ * - ``mask``
+ - Validity mask for data points, set this with :class:`defdap.hridic.generate_mask`.
+
+When linked to a :class:`defdap.ebsd.EBSDMap` object, the HRDIC Map also stores the following data structures:
+
+.. list-table::
+ :header-rows: 1
+ :widths: 8 24
+
+ * - Attribute
+ - Description
+ * - ``proxigram``
+ - Distance away from grain boundary for each point in the DIC map.
+ * - ``grains``
+ - Grain map derived from EBSD map.
+ * - ``phase_boundaries``
+ - Phase boundries derived from EBSD map.
+ * - ``grain_boundaries``
+ - Grain boundaries derived from EBSD map.
+
+Setting and plotting pattern
+-----------------------------
+
+The undeformed pattern image, from which the DIC data was derived can contain microstructural information which will be useful to link the HRDIC to EBSD data later.
+First, scale the image down, ideally a factor of the interregation window size for the DIC data.
+For example, if the DIC interregation window size was (16 x 16) pixels, then the pattern image should be scaled down by a factor of 16.
+The path can then set with :class:`defdap.hridic.set_pattern`, where the second argument is the scaling factor of the pattern image relative to the DIC interregation window size.
+
+.. code-block:: python
+
+ dic_map.set_pattern("pattern_image.bmp", 1)
+
+.. note::
+
+ Ensure this is the same image as the one used to generate the DIC data,
+ otherwise the pattern will not be correctly aligned with the DIC data and the subsequent correlation with EBSD data will be incorrect.
+
+.. note::
+
+ DefDAP calculates the expected size of the pattern image based on the size of the DIC map and the scaling factor,
+ so if the pattern image is not the expected size, an error will be raised.
+
+To plot the pattern image, use the :class:`defdap.hridic.plot_map` method with the argument 'pattern'
+
+.. code-block:: python
+
+ dic_map.plot_map("pattern")
+
+Setting scale, crop and mask
+----------------------------
+
+HRDIC data is stored in a pixel-based coordinate system, with no knowledge of the physical resolution of the data.
+The scale of the map can be set so that a scale bar in microns is
+plotted when the map is plotted with the argument ``plot_scale_bar=True``.
+If the original image has a horizontal field width of 30 microns and a horizontal resolution of 2048 pixels, the scale can be set as follows:
+
+.. code-block:: python
+
+ dic_map.set_scale(scale=30/2048)
+
+.. note::
+
+ The sub-window size of the DIC data is automatically taken into account when setting the scale.
+ For the above example, the scale of the DIC data (16 x 16 pixels) would give (30 / 2048) * 16 = 0.2344 microns per DIC pixel,
+ so a scale bar of 10 microns would be plotted as 10 / 0.2344 = 42.7 pixels long.
+
+There are normally some anomalous points near the edges of a DIC map,
+so it is often desirable to crop the map to a region of interest, which can be done using this command:
+
+.. code-block:: python
+
+ # Crop to region of interest
+ dic_map.crop(left=100, right=100, top=100, bottom=100)
+
+Finally, a mask can be generated to identify valid and invalid points in the DIC map, using the :class:`defdap.hridic.generate_mask` method.
+The boolean array passed as ``mask`` should have the same shape as the DIC map,
+where ``True`` values indicate invalid points and ``False`` values indicate valid points.
+These are some examples of how to generate a mask based on the DIC data:
+
+.. code-block:: python
+
+ #To remove data points in dic_map where max_shear is above 0.8, use:
+ mask = dic_map.data.max_shear > 0.8
+
+ #To remove data points in dic_map where e11 is above 1 or less than -1, use:
+ mask = (dic_map.data.e[0, 0] > 1) | (dic_map.data.e[0, 0] < -1)
+
+ #To disable masking:
+ mask = None
+
+Plotting and Visualization
+---------------------------
+
+To plot a maximum shear strain map, with scale bar:
+
+.. code-block:: python
+
+ dic_map.plot_map('max_shear', vmin=0, vmax=0.1, plot_scale_bar=True)
+
+Further Reading
+---------------
+
+For detailed API documentation, see :doc:`../defdap/defdap.hrdic`.
\ No newline at end of file
diff --git a/docs/source/userguide/inspector.rst b/docs/source/userguide/inspector.rst
new file mode 100644
index 0000000..2e616ec
--- /dev/null
+++ b/docs/source/userguide/inspector.rst
@@ -0,0 +1,24 @@
+Grain inspector (`defdap.inspector.GrainInspector`)
+===================================================
+
+The inspector class in DefDAP provides GUI tools for interrogating HRDIC maps on a grain-by-grain basis.
+
+Launching the Grain Inspector
+-----------------------------
+
+To launch the grain inspector, use the ``grain_inspector`` method of the HRDIC Map class:
+
+.. code-block:: python
+
+ dic_map.grain_inspector()
+
+Interface Overview
+------------------
+
+Labelled image of interface
+
+
+Data Structure
+--------------
+
+The grain inspector stores data for each grain in
\ No newline at end of file
diff --git a/docs/source/userguide/installation.rst b/docs/source/userguide/installation.rst
new file mode 100644
index 0000000..0e964b8
--- /dev/null
+++ b/docs/source/userguide/installation.rst
@@ -0,0 +1,33 @@
+Installation
+===========================
+
+For most users, only looking to use DefDAP, the easiest installation method is via the Python Package Index (PyPI).
+If you are doing development work on DefDAP, then we reccomend you first clone the repository from GitHub, then install the package in editable mode.
+
+.. tab-set::
+
+ .. tab-item:: PyPi
+
+ The latest released version of DefDAP can be installed from PyPI by executing the following command: ::
+
+ pip install defdap
+
+ This will automatically install any required dependencies.
+
+ .. tab-item:: Clone repository
+
+ First, clone the repository from GitHub. You can do this multiple ways:
+
+ - (Recomended for new users) Use the GitHub Desktop GUI (https://desktop.github.com/download/).
+ - If you are comfortable with the Git version control system, you can clone the repository with the command line.
+ - (Not reccomended) Download https://github/com/MechMicroMan/DefDAP as a ZIP file, then extract the contents to a folder on your computer.
+
+ Using the first two methods mean that you can easily pull down updates from the main repository in the future
+ and you can also push any changes you make to your own fork of the repository.
+
+ After cloning the repository, the package can then be installed in editable mode using pip with flag -e.
+ This avoids having to reinstall every time changes are made.
+ Run the following command from the root of the cloned repository: ::
+
+ pip install -e .
+
diff --git a/docs/source/userguide/linking.rst b/docs/source/userguide/linking.rst
new file mode 100644
index 0000000..d7e36cb
--- /dev/null
+++ b/docs/source/userguide/linking.rst
@@ -0,0 +1,3 @@
+Linking HRDIC to EBSD
+===================================================
+
diff --git a/pyproject.toml b/pyproject.toml
index 2682908..8f9add4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -55,13 +55,14 @@ testing = [
"pytest_cases",
]
docs = [
- "sphinx==5.0.2",
- "sphinx_rtd_theme==0.5.0",
- "sphinx_autodoc_typehints==1.11.1",
- "nbsphinx==0.9.3",
+ "sphinx==8.1.3", # Max version avaialble from readthedocs - Rhys running 8.2.3 locally
+ "pydata_sphinx_theme==0.16.1",
+ "sphinx_autodoc_typehints==3.0.1", # Max version avaialble from readthedocs - Rhys running 3.6.2 locally
+ "nbsphinx==0.9.8",
"ipykernel",
"pandoc",
"ipympl",
+ "sphinx_design==0.6.1"
]
[tool.setuptools]