# MAMMAL
MAMMAL - Magnetic Anomaly Map Matching Airborne and Land
A Python package for simulating and processing aeromagnetic anomaly survey data. It can be used to create magnetic anomaly maps for Magnetic Navigation solutions (MagNav).
## Install
To install MAMMAL, clone the repository to your machine and open a terminal in the folder containing `setup.py`. Lastly, run the following commands:
```bash
conda install gdal==3.4.3
python setup.py install
```
You will also need to download and install the GeoScraper package. Navigate to the [GeoScraper repository](https://git.antcenter.net/lbergeron/geoscraper), clone the repository to your machine, and open a terminal in the folder containing `setup.py`. Lastly, run the following commands:
```bash
python setup.py install
```
### If the osgeo (GDAL) package is not importing correctly on Windows:
1. Download and install GDAL core _and_ Python binding binaries from https://www.gisinternals.com/release.php
2. Find folder where GDAL was installed (usually `C:\Program Files (x86)\GDAL`)
3. Create a new environment variable named `GDAL` and set its value to the GDAL install folder path
4. Download the GDAL wheel from https://www.lfd.uci.edu/~gohlke/pythonlibs/#gdal for your CPU type _and_ Python version
5. Navigate to the folder the wheel was saved to
6. Open a command terminal and run the following:
```bash
pip install GDAL‑X.X.X‑cpXX‑cpXX‑winXXX.whl
```
7. Test installation by opening a python/ipython terminal and trying:
```Python
import osgeo
```
### If the rioxarray/rasterio packages are not importing correctly on Windows
If rioxarray is erroring on import, it might be because rasterio was installed incorrectly. If this is the case:
1. Install rasterio manually by downloading the rasterio wheel from https://www.lfd.uci.edu/~gohlke/pythonlibs/#rasterio for your CPU type _and_ Python version
2. Navigate to the folder the wheel was saved to
3. Open a command terminal and run the following:
```bash
pip install rasterio‑X.X.X‑cpXX‑cpXX‑winXXX.whl
```
## Usage
### Parsing Log Files
---
To process a single MFAM Dev Kit log and save as a CSV:
```Python
from MAMMAL.Parse import parseGeometrics as pg
LOG_FNAME = r'dev_kit_log.txt'
CSV_FNAME = r'dev_kit_log.csv'
df = pg.parse_devLog(LOG_FNAME)
print(df)
df.to_csv(CSV_FNAME, index=False)
```
To process an entire acquisition of MFAM Dev Kit logs and save as a CSV:
```Python
from MAMMAL.Parse import parseGeometrics as pg
LOG_PATH = r'dev_kit_acqu_folder_path'
CSV_FNAME = r'dev_kit_log.csv'
df = pg.parse_devACQU(LOG_FNAME)
print(df)
df.to_csv(CSV_FNAME, index=False)
```
To process a GSMP sensor log and save as a CSV:
```Python
from MAMMAL.Parse import parseGSMP as pgsmp
LOG_FNAME = r'gsmp_log.txt'
CSV_FNAME = r'gsmp_log.csv'
df = pgsmp.parse_GSMP(LOG_FNAME)
print(df)
df.to_csv(CSV_FNAME, index=False)
```
To process an INTERMAGNET ground reference station log and save as a CSV:
```Python
from MAMMAL.Parse import parseIM as pim
LOG_FNAME = r'intermagnet_log.sec'
CSV_FNAME = r'intermagnet_log.csv'
df = pim.parse_sec(LOG_FNAME)
print(df)
df.to_csv(CSV_FNAME, index=False)
```
To process a Pixhawk flight log and save as a CSV:
```Python
from MAMMAL.Parse import parsePixhawk as pp
LOG_FNAME = r'pix_log.txt'
CSV_FNAME = r'pix_log.csv'
df = pp.parsePix(LOG_FNAME)
print(df)
df.to_csv(CSV_FNAME, index=False)
```
To load a raster map:
```Python
from MAMMAL.Parse import parseRaster as praster
MAP_FNAME = r'map.tiff'
map = praster.parse_raster(MAP_FNAME)
print(map)
```
### Data Processing
---
To find temporal variations after reading-in flight and magnetic reference datasets:
```Python
import pandas as pd
from MAMMAL import Diurnal
REF_FNAME = r'ref_log.csv'
LOG_FNAME = r'flight_log.csv'
ref_df = pd.read_csv(REF_FNAME, parse_dates=['datetime'])
log_df = pd.read_csv(LOG_FNAME, parse_dates=['datetime'])
timestamps = np.array(log_df.epoch_sec)
_, ref_mag = Diurnal.interp_reference_df(df = ref_df,
timestamps = timestamps,
survey_lon = log_df.LONG.mean(),
subtract_core = True)
```
To calibrate airborne scalar data:
```Python
import pandas as pd
from MAMMAL.VehicleCal import magUtilsTL as magtl
LOG_FNAME = r'flight_log.csv'
TL_C = np.array([-1.86687725e+01, 1.33975396e+02, -1.80762945e+02, 1.69023832e-01,
-3.92262356e-03, -1.84382741e-03, 1.71830230e-01, -1.61173781e-04,
1.72575427e-01, -4.31927864e-04, -8.21512835e-05, -4.37609432e-05,
-1.06838978e-04, -1.22444017e-04, -2.76294434e-04, -8.51727772e-05,
3.16374022e-05, -2.77441572e-05])
TL_TERMS = magtl.DEFAULT_TL_TERMS
log_df = pd.read_csv(LOG_FNAME, parse_dates=['datetime'])
f = log_df.F
b_vector = np.hstack([np.array(log_df.X)[:, np.newaxis],
np.array(log_df.Y)[:, np.newaxis],
np.array(log_df.Z)[:, np.newaxis]])
body_effects_scalar = magtl.tlc_compensation(vector = b_vector,
tlc = TL_C,
terms = TL_TERMS)
f_cal = f - body_effects_scalar
f_cal += (f.mean() - f_cal.mean())
```
To level scalar anomaly data:
```Python
import pandas as pd
from MAMMAL.Utils import ProcessingUtils as pu
log_df = pd.Dataframe() # Replace with df where df.F are the scalar anomaly values
# PCA leveling
lvld_survey_df = pcaLvl.pca_lvl(survey_df = log_df,
num_ptls = 2,
ptl_locs = np.array([0.25, 0.75]))
# Per flight line leveling
lvld_survey_df = tieLvl.tie_lvl(survey_df = log_df,
approach = 'lobf')
# Plane of best fit leveling
lvld_survey_df = tieLvl.tie_lvl(survey_df = log_df,
approach = 'lsq')
```
To interpolate scalar anomaly data:
```Python
import pandas as pd
from MAMMAL.Utils import ProcessingUtils as pu
DX = 5 # meters
DY = 5 # meters
MAX_TERRAIN_MSL = 630 # meters
log_df = pd.Dataframe() # Replace with df where df.F are the scalar anomaly values
interp_type = 'RBF'
interp_df = pu.interp_flight_lines(anomaly_df = log_df,
dx = DX,
dy = DY,
max_terrain_msl = MAX_TERRAIN_MSL,
buffer = 0,
interp_type = interp_type,
neighbors = None,
skip_na_mask = True)
```
To create and export a magnetic anomaly map:
```Python
from MAMMAL.Utils import mapUtils as mu
# Replace each argument with the appropriate value for your use-case
# **See export_map doc string for argument details**
map = mu.export_map(out_dir = SURVEY_DIR,
location = map_title,
date = log_df.datetime[0],
lats = interp_lats,
lons = interp_lons,
scalar = interp_scalar_LPF,
heights = interp_heights,
process_df = pd.DataFrame(process_dict),
process_app = PROCESS_APP,
stds = interp_std,
vector = None,
scalar_type = SCALAR_TYPE,
vector_type = VECTOR_TYPE,
scalar_var = np.nan,
vector_var = np.nan,
poc = POC,
flight_path = flight_path,
area_polys = area_polys,
osm_path = None,
level_type = 'No leveling',
tl_coeff_types = TL_COEFF_TYPES,
tl_coeffs = TL_C,
interp_type = interp_type,
final_filt_cut = FINAL_FILT_CUT,
final_filt_order = FINAL_FILT_ORDER)
```
### Map Metadata
---
Magnetic anomaly maps for magnetic navigation (MagNav) must be standardized
in a easy to use, common file format with consistent use of units. This will ensure
plug-and-play interoperability between all future MagNav filters and maps generated
by various sources.
The GeoTIFF format is a highly versatile extension designed to represent various
geospacial data and is ubiquitous in the geospacial data processing discipline with
many mapping tools and software already supporting the file format. For this reason,
all MagNav survey maps should be published as GeoTIFF files with the following
metadata and fields:
* Coordinate reference system:
* WGS84
* Orientation of raster bands:
* North up
* Invalid pixel value:
* NaN
* Top level metadata:
* Metadata field name: “Description”
* Standardized value: “MagNav Aeromagnetic Anomaly Map”
* Metadata field name: “ProcessingApp”
* Description of the application name and version used to generate the map file
* Metadata field name: “SurveyDateUTC”
* Approximate UTC data of the survey in an ISO 8601 formatted string
* Metadata field name: “SampleDistM”
* Approximate distance between each magnetic reading along a given flight line in meters
* Metadata field name: “xResolutionM”
* Pixel width in meters
* Metadata field name: “yResolutionM”
* Pixel height in meters
* Metadata field name: “ExtentDD”
* Extent of map in degrees decimal
* Example: “[-84.0958, 39.7617, -84.0484, 39.7823]”
* Metadata field name: “ScalarType”
* Description of the make/model/type of scalar magnetometer used
* Metadata field name: “VectorType”
* Description of the make/model/type of vector magnetometer used
* Metadata field name: “ScalarSensorVar”
* Survey scalar magnetometer variance in nT
* Metadata field name: “VectorSensorVar”
* Survey vector magnetometer variance in nT
* Metadata field name: “POC”
* Point of contact information about the organization who conducted the survey and produced the map (no standard format for the information in this metadata field)
* Metadata field name: “KML”
* Keyhole Markup Language (KML) document text that specifies the timestamped survey sample locations; flight/tie line average directions, distances, and altitudes for each sub-survey area; and location of roads, power lines, and substations
* The timestamped survey sample locations must be represented by a top-level GxTimeSpan named “FlightPath” with UTC timestamps; WGS84 coordinates; and the altitude mode set to “absolute”.
* The sub-survey areas must be represented by a top-level folder of polygons named “SubSurveyAreas”. Each sub-survey area polygon must have the following description: “FL Dir: (fldir)°, FL Dist: (fldist)m, TL Dir: (tldir)°, TL Dist: (tldist)m, Alt: (alt)m above MSL” where:
* “(fldir)” is replaced with the average flight line direction in degrees off North
* “(fldist)” is replaced with the average flight line distance in meters
* “(tldir)” is replaced with the average tie line direction in degrees off North (if tie lines not present, set to -90)
* “(tldist)” is replaced with the average tie line distance in meters (if tie lines not present, set to 0)
* “(alt)” is replaced with the average altitude in meters above mean sea level (MSL)
* Directions must within the range [0°, 180°) except for the tie line direction if tie lines are not present (set value to -90)
* The road locations must be represented by a top-level multigeometry of line strings named “Roads” with WGS84 coordinates and the altitude mode to “clampToGround”
* The power line locations must be represented by a top-level multigeometry of line strings named “PowerLines” with WGS84 coordinates and the altitude mode set to “clampToGround”
* The substation locations must be represented by a top-level multigeometry of polygons named “Substations” with WGS84 coordinates and the altitude mode set to “clampToGround”
* Metadata field name: “LevelType”
* Description of the algorithm used for map leveling
* Metadata field name: “TLCoeffTypes”
* Ordered list of Tolles-Lawson coefficient types used
* Example: “[Permanent, Induced, Eddy]”
* Metadata field name: “TLCoeffs”
* Ordered list of Tolles-Lawson coefficients used
* Example: “[0.62, 0.70, 0.55, 0.24, 0.49, 0.28, 0.43, 0.57, 0.90, 0.80, 0.84, 0.14, 0.42, 0.58, 0.85, 0.86, 0.80, 0.73]”
* Metadata field name: “CSV”
* Comma-separated values (CSV) document text that includes all pertinent survey data points and data processing steps.
* Minimum required columns include:
* TIMESTAMP: Coordinated Universal Time (UTC) timestamps (s)
* LAT: Latitudes (dd)
* LONG: Longitudes (dd)
* ALT: Altitudes above MSL (m)
* DC_X: Direction cosine X-Components (dimensionless)
* DC_Y: Direction cosine Y-Components (dimensionless)
* DC_Z: Direction cosine Z-Components (dimensionless)
* F: Raw scalar measurements (nT)
* Suggested columns include (may vary depending on exact steps used to produce the original map values):
* F_CAL: Calibrated scalar measurements (nT)
* F_CAL_IGRF: Calibrated scalar measurements without core field (nT)
* F_CAL_IGRF_TEMPORAL: Calibrated scalar measurements without core field or temporal corrections (nT)
* F_CAL_IGRF_TEMPORAL_FILT: Calibrated scalar measurements without core field or temporal corrections after low pass filtering (nT)
* F_CAL_IGRF_TEMPORAL_FILT_LEVEL: Calibrated scalar measurements without core field or temporal corrections after low pass filtering and map leveling (nT)
* Metadata field name: “InterpType”
* Description of the algorithm used for map pixel interpolation
* Metadata field name: “FinalFiltCut”
* Cutoff wavelength of the 2D low pass filter applied to the interpolated scalar pixel values
* Metadata field name: “FinalFiltOrder”
* Order number of the 2D low pass filter applied to the interpolated scalar pixel values
* Band 1:
* NxM raster array of scalar magnetic anomaly values in nT
* Band metadata:
* Metadata field name: “Type”
· Standardized value: “F”
* Metadata field name: “Units”
· Standardized value: “nT”
* Metadata field name: “Direction”
· Standardized value: “n/a”
* Band 2:
* NxM raster array of North magnetic anomaly component values in nT (optional - if no data provided, fill band with NaNs)
* Band metadata:
* Metadata field name: “Type”
· Standardized value: “X”
* Metadata field name: “Units”
· Standardized value: “nT”
* Metadata field name: “Direction”
· Standardized value: “North”
* Band 3:
* NxM raster array of East magnetic anomaly component values in nT (optional - if no data provided, fill band with NaNs)
* Band metadata:
* Metadata field name: “Type”
* Standardized value: “Y”
* Metadata field name: “Units”
* Standardized value: “nT”
* Metadata field name: “Direction”
* Standardized value: “East”
* Band 4
* NxM raster array of downward magnetic anomaly component values in nT (optional - if no data provided, fill band with NaNs)
* Band metadata:
* Metadata field name: “Type”
* Standardized value: “Z”
* Metadata field name: “Units”
* Standardized value: “nT”
* Metadata field name: “Direction”
* Standardized value: “Down”
* Band 5
* NxM raster array of pixel altitudes in meters above MSL
* Band metadata:
* Metadata field name: “Type”
* Standardized value: “ALT”
* Metadata field name: “Units”
* Standardized value: “m MSL”
* Metadata field name: “Direction”
* Standardized value: “n/a”
* Band 6
* NxM raster array of pixel interpolation standard deviation values in nT (optional - if no data provided, fill band with NaNs)
* Band metadata:
* Metadata field name: “Type”
* Standardized value: “STD”
* Metadata field name: “Units”
* Standardized value: “nT”
* Metadata field name: “Direction”
* Standardized value: “n/a”
* Band 7
* NxM raster array of pixel easterly gradients
* Band metadata:
* Metadata field name: “Type”
* Standardized value: “dX”
* Metadata field name: “Units”
* Standardized value: “nT”
* Metadata field name: “Direction”
* Standardized value: “East”
* Band 8
* NxM raster array of pixel northerly gradients
* Band metadata:
* Metadata field name: “Type”
* Standardized value: “dY”
* Metadata field name: “Units”
* Standardized value: “nT”
* Metadata field name: “Direction”
* Standardized value: “North”
Raw data
{
"_id": null,
"home_page": "https://github.com/PowerBroker2/MAMMAL",
"name": "MAMMAL-MagNav",
"maintainer": null,
"docs_url": null,
"requires_python": null,
"maintainer_email": null,
"keywords": "navigation, mapping, geospatial, magnetometer, survey, remote-sensing, magnet, magnav",
"author": "Power_Broker",
"author_email": "gitstuff2@gmail.com",
"download_url": "https://files.pythonhosted.org/packages/30/94/262f383095d9e729d72068324fe94a2397be3b86b66e05bc8c5c2ca75db5/MAMMAL_MagNav-0.0.1.tar.gz",
"platform": null,
"description": "# MAMMAL\r\n\r\nMAMMAL - Magnetic Anomaly Map Matching Airborne and Land\r\n\r\nA Python package for simulating and processing aeromagnetic anomaly survey data. It can be used to create magnetic anomaly maps for Magnetic Navigation solutions (MagNav).\r\n\r\n## Install\r\n\r\nTo install MAMMAL, clone the repository to your machine and open a terminal in the folder containing `setup.py`. Lastly, run the following commands:\r\n\r\n```bash\r\nconda install gdal==3.4.3\r\npython setup.py install\r\n```\r\n\r\nYou will also need to download and install the GeoScraper package. Navigate to the [GeoScraper repository](https://git.antcenter.net/lbergeron/geoscraper), clone the repository to your machine, and open a terminal in the folder containing `setup.py`. Lastly, run the following commands:\r\n\r\n```bash\r\npython setup.py install\r\n```\r\n\r\n### If the osgeo (GDAL) package is not importing correctly on Windows:\r\n\r\n1. Download and install GDAL core _and_ Python binding binaries from https://www.gisinternals.com/release.php\r\n2. Find folder where GDAL was installed (usually `C:\\Program Files (x86)\\GDAL`)\r\n3. Create a new environment variable named `GDAL` and set its value to the GDAL install folder path\r\n4. Download the GDAL wheel from https://www.lfd.uci.edu/~gohlke/pythonlibs/#gdal for your CPU type _and_ Python version\r\n5. Navigate to the folder the wheel was saved to\r\n6. Open a command terminal and run the following:\r\n\r\n```bash\r\npip install GDAL\u2011X.X.X\u2011cpXX\u2011cpXX\u2011winXXX.whl\r\n```\r\n\r\n7. Test installation by opening a python/ipython terminal and trying:\r\n\r\n```Python\r\nimport osgeo\r\n```\r\n\r\n### If the rioxarray/rasterio packages are not importing correctly on Windows\r\n\r\nIf rioxarray is erroring on import, it might be because rasterio was installed incorrectly. If this is the case:\r\n\r\n1. Install rasterio manually by downloading the rasterio wheel from https://www.lfd.uci.edu/~gohlke/pythonlibs/#rasterio for your CPU type _and_ Python version\r\n2. Navigate to the folder the wheel was saved to\r\n3. Open a command terminal and run the following:\r\n\r\n```bash\r\npip install rasterio\u2011X.X.X\u2011cpXX\u2011cpXX\u2011winXXX.whl\r\n```\r\n\r\n## Usage\r\n\r\n### Parsing Log Files\r\n\r\n---\r\n\r\nTo process a single MFAM Dev Kit log and save as a CSV:\r\n\r\n```Python\r\nfrom MAMMAL.Parse import parseGeometrics as pg\r\n\r\n\r\nLOG_FNAME = r'dev_kit_log.txt'\r\nCSV_FNAME = r'dev_kit_log.csv'\r\n\r\n\r\ndf = pg.parse_devLog(LOG_FNAME)\r\nprint(df)\r\n\r\ndf.to_csv(CSV_FNAME, index=False)\r\n```\r\n\r\nTo process an entire acquisition of MFAM Dev Kit logs and save as a CSV:\r\n\r\n```Python\r\nfrom MAMMAL.Parse import parseGeometrics as pg\r\n\r\n\r\nLOG_PATH = r'dev_kit_acqu_folder_path'\r\nCSV_FNAME = r'dev_kit_log.csv'\r\n\r\n\r\ndf = pg.parse_devACQU(LOG_FNAME)\r\nprint(df)\r\n\r\ndf.to_csv(CSV_FNAME, index=False)\r\n```\r\n\r\nTo process a GSMP sensor log and save as a CSV:\r\n\r\n```Python\r\nfrom MAMMAL.Parse import parseGSMP as pgsmp\r\n\r\n\r\nLOG_FNAME = r'gsmp_log.txt'\r\nCSV_FNAME = r'gsmp_log.csv'\r\n \r\n \r\ndf = pgsmp.parse_GSMP(LOG_FNAME)\r\nprint(df)\r\n \r\ndf.to_csv(CSV_FNAME, index=False)\r\n```\r\n\r\nTo process an INTERMAGNET ground reference station log and save as a CSV:\r\n\r\n```Python\r\nfrom MAMMAL.Parse import parseIM as pim\r\n\r\n\r\nLOG_FNAME = r'intermagnet_log.sec'\r\nCSV_FNAME = r'intermagnet_log.csv'\r\n \r\n \r\ndf = pim.parse_sec(LOG_FNAME)\r\nprint(df)\r\n \r\ndf.to_csv(CSV_FNAME, index=False)\r\n```\r\n\r\nTo process a Pixhawk flight log and save as a CSV:\r\n\r\n```Python\r\nfrom MAMMAL.Parse import parsePixhawk as pp\r\n\r\n\r\nLOG_FNAME = r'pix_log.txt'\r\nCSV_FNAME = r'pix_log.csv'\r\n \r\n \r\ndf = pp.parsePix(LOG_FNAME)\r\nprint(df)\r\n \r\ndf.to_csv(CSV_FNAME, index=False)\r\n```\r\n\r\nTo load a raster map:\r\n\r\n```Python\r\nfrom MAMMAL.Parse import parseRaster as praster\r\n\r\n\r\nMAP_FNAME = r'map.tiff'\r\n\r\n\r\nmap = praster.parse_raster(MAP_FNAME)\r\nprint(map)\r\n```\r\n\r\n### Data Processing\r\n\r\n---\r\n\r\nTo find temporal variations after reading-in flight and magnetic reference datasets:\r\n\r\n```Python\r\nimport pandas as pd\r\n\r\nfrom MAMMAL import Diurnal\r\n\r\n\r\nREF_FNAME = r'ref_log.csv'\r\nLOG_FNAME = r'flight_log.csv'\r\n\r\n\r\nref_df = pd.read_csv(REF_FNAME, parse_dates=['datetime'])\r\n\r\nlog_df = pd.read_csv(LOG_FNAME, parse_dates=['datetime'])\r\ntimestamps = np.array(log_df.epoch_sec)\r\n\r\n_, ref_mag = Diurnal.interp_reference_df(df = ref_df,\r\n timestamps = timestamps,\r\n survey_lon = log_df.LONG.mean(),\r\n subtract_core = True)\r\n```\r\n\r\nTo calibrate airborne scalar data:\r\n\r\n```Python\r\nimport pandas as pd\r\n\r\nfrom MAMMAL.VehicleCal import magUtilsTL as magtl\r\n\r\n\r\nLOG_FNAME = r'flight_log.csv'\r\n\r\nTL_C = np.array([-1.86687725e+01, 1.33975396e+02, -1.80762945e+02, 1.69023832e-01,\r\n -3.92262356e-03, -1.84382741e-03, 1.71830230e-01, -1.61173781e-04,\r\n 1.72575427e-01, -4.31927864e-04, -8.21512835e-05, -4.37609432e-05,\r\n -1.06838978e-04, -1.22444017e-04, -2.76294434e-04, -8.51727772e-05,\r\n 3.16374022e-05, -2.77441572e-05])\r\nTL_TERMS = magtl.DEFAULT_TL_TERMS\r\n\r\n\r\nlog_df = pd.read_csv(LOG_FNAME, parse_dates=['datetime'])\r\nf = log_df.F\r\n\r\nb_vector = np.hstack([np.array(log_df.X)[:, np.newaxis], \r\n np.array(log_df.Y)[:, np.newaxis],\r\n np.array(log_df.Z)[:, np.newaxis]])\r\n\r\nbody_effects_scalar = magtl.tlc_compensation(vector = b_vector,\r\n tlc = TL_C,\r\n terms = TL_TERMS)\r\nf_cal = f - body_effects_scalar\r\nf_cal += (f.mean() - f_cal.mean())\r\n```\r\n\r\nTo level scalar anomaly data:\r\n\r\n```Python\r\nimport pandas as pd\r\n\r\nfrom MAMMAL.Utils import ProcessingUtils as pu\r\n\r\n\r\n\r\nlog_df = pd.Dataframe() # Replace with df where df.F are the scalar anomaly values\r\n\r\n# PCA leveling\r\nlvld_survey_df = pcaLvl.pca_lvl(survey_df = log_df,\r\n num_ptls = 2,\r\n ptl_locs = np.array([0.25, 0.75]))\r\n\r\n# Per flight line leveling\r\nlvld_survey_df = tieLvl.tie_lvl(survey_df = log_df,\r\n approach = 'lobf')\r\n\r\n# Plane of best fit leveling\r\nlvld_survey_df = tieLvl.tie_lvl(survey_df = log_df,\r\n approach = 'lsq')\r\n```\r\n\r\nTo interpolate scalar anomaly data:\r\n\r\n```Python\r\nimport pandas as pd\r\n\r\nfrom MAMMAL.Utils import ProcessingUtils as pu\r\n\r\n\r\nDX = 5 # meters\r\nDY = 5 # meters\r\n\r\nMAX_TERRAIN_MSL = 630 # meters\r\n\r\n\r\nlog_df = pd.Dataframe() # Replace with df where df.F are the scalar anomaly values\r\n\r\ninterp_type = 'RBF'\r\ninterp_df = pu.interp_flight_lines(anomaly_df = log_df,\r\n dx = DX,\r\n dy = DY,\r\n max_terrain_msl = MAX_TERRAIN_MSL,\r\n buffer = 0,\r\n interp_type = interp_type,\r\n neighbors = None,\r\n skip_na_mask = True)\r\n```\r\n\r\nTo create and export a magnetic anomaly map:\r\n\r\n```Python\r\nfrom MAMMAL.Utils import mapUtils as mu\r\n\r\n\r\n# Replace each argument with the appropriate value for your use-case\r\n# **See export_map doc string for argument details**\r\nmap = mu.export_map(out_dir = SURVEY_DIR,\r\n location = map_title,\r\n date = log_df.datetime[0],\r\n lats = interp_lats,\r\n lons = interp_lons,\r\n scalar = interp_scalar_LPF,\r\n heights = interp_heights,\r\n process_df = pd.DataFrame(process_dict),\r\n process_app = PROCESS_APP,\r\n stds = interp_std,\r\n vector = None,\r\n scalar_type = SCALAR_TYPE,\r\n vector_type = VECTOR_TYPE,\r\n scalar_var = np.nan,\r\n vector_var = np.nan,\r\n poc = POC,\r\n flight_path = flight_path,\r\n area_polys = area_polys,\r\n osm_path = None,\r\n level_type = 'No leveling',\r\n tl_coeff_types = TL_COEFF_TYPES,\r\n tl_coeffs = TL_C,\r\n interp_type = interp_type,\r\n final_filt_cut = FINAL_FILT_CUT,\r\n final_filt_order = FINAL_FILT_ORDER)\r\n```\r\n\r\n### Map Metadata\r\n\r\n---\r\n\r\nMagnetic anomaly maps for magnetic navigation (MagNav) must be standardized\r\nin a easy to use, common file format with consistent use of units. This will ensure\r\nplug-and-play interoperability between all future MagNav filters and maps generated\r\nby various sources.\r\n\r\nThe GeoTIFF format is a highly versatile extension designed to represent various\r\ngeospacial data and is ubiquitous in the geospacial data processing discipline with\r\nmany mapping tools and software already supporting the file format. For this reason,\r\nall MagNav survey maps should be published as GeoTIFF files with the following\r\nmetadata and fields:\r\n\r\n* Coordinate reference system:\r\n * WGS84\r\n* Orientation of raster bands:\r\n * North up\r\n* Invalid pixel value:\r\n * NaN\r\n* Top level metadata:\r\n * Metadata field name: \u201cDescription\u201d\r\n * Standardized value: \u201cMagNav Aeromagnetic Anomaly Map\u201d\r\n * Metadata field name: \u201cProcessingApp\u201d\r\n * Description of the application name and version used to generate the map file\r\n * Metadata field name: \u201cSurveyDateUTC\u201d\r\n * Approximate UTC data of the survey in an ISO 8601 formatted string\r\n * Metadata field name: \u201cSampleDistM\u201d\r\n * Approximate distance between each magnetic reading along a given flight line in meters\r\n * Metadata field name: \u201cxResolutionM\u201d\r\n * Pixel width in meters\r\n * Metadata field name: \u201cyResolutionM\u201d\r\n * Pixel height in meters\r\n * Metadata field name: \u201cExtentDD\u201d\r\n * Extent of map in degrees decimal\r\n * Example: \u201c[-84.0958, 39.7617, -84.0484, 39.7823]\u201d\r\n * Metadata field name: \u201cScalarType\u201d\r\n * Description of the make/model/type of scalar magnetometer used\r\n * Metadata field name: \u201cVectorType\u201d\r\n * Description of the make/model/type of vector magnetometer used\r\n * Metadata field name: \u201cScalarSensorVar\u201d\r\n * Survey scalar magnetometer variance in nT\r\n * Metadata field name: \u201cVectorSensorVar\u201d\r\n * Survey vector magnetometer variance in nT\r\n * Metadata field name: \u201cPOC\u201d\r\n * Point of contact information about the organization who conducted the survey and produced the map (no standard format for the information in this metadata field)\r\n * Metadata field name: \u201cKML\u201d\r\n * Keyhole Markup Language (KML) document text that specifies the timestamped survey sample locations; flight/tie line average directions, distances, and altitudes for each sub-survey area; and location of roads, power lines, and substations\r\n * The timestamped survey sample locations must be represented by a top-level GxTimeSpan named \u201cFlightPath\u201d with UTC timestamps; WGS84 coordinates; and the altitude mode set to \u201cabsolute\u201d.\r\n * The sub-survey areas must be represented by a top-level folder of polygons named \u201cSubSurveyAreas\u201d. Each sub-survey area polygon must have the following description: \u201cFL Dir: (fldir)\u00b0, FL Dist: (fldist)m, TL Dir: (tldir)\u00b0, TL Dist: (tldist)m, Alt: (alt)m above MSL\u201d where:\r\n * \u201c(fldir)\u201d is replaced with the average flight line direction in degrees off North\r\n * \u201c(fldist)\u201d is replaced with the average flight line distance in meters\r\n * \u201c(tldir)\u201d is replaced with the average tie line direction in degrees off North (if tie lines not present, set to -90)\r\n * \u201c(tldist)\u201d is replaced with the average tie line distance in meters (if tie lines not present, set to 0)\r\n * \u201c(alt)\u201d is replaced with the average altitude in meters above mean sea level (MSL)\r\n * Directions must within the range [0\u00b0, 180\u00b0) except for the tie line direction if tie lines are not present (set value to -90)\r\n * The road locations must be represented by a top-level multigeometry of line strings named \u201cRoads\u201d with WGS84 coordinates and the altitude mode to \u201cclampToGround\u201d\r\n * The power line locations must be represented by a top-level multigeometry of line strings named \u201cPowerLines\u201d with WGS84 coordinates and the altitude mode set to \u201cclampToGround\u201d\r\n * The substation locations must be represented by a top-level multigeometry of polygons named \u201cSubstations\u201d with WGS84 coordinates and the altitude mode set to \u201cclampToGround\u201d\r\n * Metadata field name: \u201cLevelType\u201d\r\n * Description of the algorithm used for map leveling\r\n * Metadata field name: \u201cTLCoeffTypes\u201d\r\n * Ordered list of Tolles-Lawson coefficient types used\r\n * Example: \u201c[Permanent, Induced, Eddy]\u201d\r\n * Metadata field name: \u201cTLCoeffs\u201d\r\n * Ordered list of Tolles-Lawson coefficients used\r\n * Example: \u201c[0.62, 0.70, 0.55, 0.24, 0.49, 0.28, 0.43, 0.57, 0.90, 0.80, 0.84, 0.14, 0.42, 0.58, 0.85, 0.86, 0.80, 0.73]\u201d\r\n * Metadata field name: \u201cCSV\u201d\r\n * Comma-separated values (CSV) document text that includes all pertinent survey data points and data processing steps.\r\n * Minimum required columns include:\r\n * TIMESTAMP: Coordinated Universal Time (UTC) timestamps (s)\r\n * LAT: Latitudes (dd)\r\n * LONG: Longitudes (dd)\r\n * ALT: Altitudes above MSL (m)\r\n * DC_X: Direction cosine X-Components (dimensionless)\r\n * DC_Y: Direction cosine Y-Components (dimensionless)\r\n * DC_Z: Direction cosine Z-Components (dimensionless)\r\n * F: Raw scalar measurements (nT)\r\n * Suggested columns include (may vary depending on exact steps used to produce the original map values):\r\n * F_CAL: Calibrated scalar measurements (nT)\r\n * F_CAL_IGRF: Calibrated scalar measurements without core field (nT)\r\n * F_CAL_IGRF_TEMPORAL: Calibrated scalar measurements without core field or temporal corrections (nT)\r\n * F_CAL_IGRF_TEMPORAL_FILT: Calibrated scalar measurements without core field or temporal corrections after low pass filtering (nT)\r\n * F_CAL_IGRF_TEMPORAL_FILT_LEVEL: Calibrated scalar measurements without core field or temporal corrections after low pass filtering and map leveling (nT)\r\n * Metadata field name: \u201cInterpType\u201d\r\n * Description of the algorithm used for map pixel interpolation\r\n * Metadata field name: \u201cFinalFiltCut\u201d\r\n * Cutoff wavelength of the 2D low pass filter applied to the interpolated scalar pixel values\r\n * Metadata field name: \u201cFinalFiltOrder\u201d\r\n * Order number of the 2D low pass filter applied to the interpolated scalar pixel values\r\n* Band 1:\r\n * NxM raster array of scalar magnetic anomaly values in nT\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n\u00b7 Standardized value: \u201cF\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n\u00b7 Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n\u00b7 Standardized value: \u201cn/a\u201d\r\n* Band 2:\r\n * NxM raster array of North magnetic anomaly component values in nT (optional - if no data provided, fill band with NaNs)\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n\u00b7 Standardized value: \u201cX\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n\u00b7 Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n\u00b7 Standardized value: \u201cNorth\u201d\r\n* Band 3:\r\n * NxM raster array of East magnetic anomaly component values in nT (optional - if no data provided, fill band with NaNs)\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n * Standardized value: \u201cY\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n * Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n * Standardized value: \u201cEast\u201d\r\n* Band 4\r\n * NxM raster array of downward magnetic anomaly component values in nT (optional - if no data provided, fill band with NaNs)\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n * Standardized value: \u201cZ\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n * Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n * Standardized value: \u201cDown\u201d\r\n* Band 5\r\n * NxM raster array of pixel altitudes in meters above MSL\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n * Standardized value: \u201cALT\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n * Standardized value: \u201cm MSL\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n * Standardized value: \u201cn/a\u201d\r\n* Band 6\r\n * NxM raster array of pixel interpolation standard deviation values in nT (optional - if no data provided, fill band with NaNs)\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n * Standardized value: \u201cSTD\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n * Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n * Standardized value: \u201cn/a\u201d\r\n* Band 7\r\n * NxM raster array of pixel easterly gradients\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n * Standardized value: \u201cdX\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n * Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n * Standardized value: \u201cEast\u201d\r\n* Band 8\r\n * NxM raster array of pixel northerly gradients\r\n * Band metadata:\r\n * Metadata field name: \u201cType\u201d\r\n * Standardized value: \u201cdY\u201d\r\n * Metadata field name: \u201cUnits\u201d\r\n * Standardized value: \u201cnT\u201d\r\n * Metadata field name: \u201cDirection\u201d\r\n * Standardized value: \u201cNorth\u201d\r\n",
"bugtrack_url": null,
"license": null,
"summary": "Python package used to create aeromagnetic anomaly maps for Magnetic Navigation (MagNav)",
"version": "0.0.1",
"project_urls": {
"Download": "https://github.com/PowerBroker2/MAMMAL/archive/0.0.1.tar.gz",
"Homepage": "https://github.com/PowerBroker2/MAMMAL"
},
"split_keywords": [
"navigation",
" mapping",
" geospatial",
" magnetometer",
" survey",
" remote-sensing",
" magnet",
" magnav"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "10142bd2476671bb2828f17430e20a2f7315e837a7a562d601426cfb42f337fb",
"md5": "c74af90750d6b14df96bf78b58be7927",
"sha256": "bb14d28b43ea391cabe722aaeb17c6ba5c8d4c20409cc814bb7962300e42cb53"
},
"downloads": -1,
"filename": "MAMMAL_MagNav-0.0.1-py3-none-any.whl",
"has_sig": false,
"md5_digest": "c74af90750d6b14df96bf78b58be7927",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": null,
"size": 73736,
"upload_time": "2024-05-24T05:40:32",
"upload_time_iso_8601": "2024-05-24T05:40:32.604206Z",
"url": "https://files.pythonhosted.org/packages/10/14/2bd2476671bb2828f17430e20a2f7315e837a7a562d601426cfb42f337fb/MAMMAL_MagNav-0.0.1-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
},
{
"comment_text": "",
"digests": {
"blake2b_256": "3094262f383095d9e729d72068324fe94a2397be3b86b66e05bc8c5c2ca75db5",
"md5": "0a9beb67eec8a522ed9705bc18e6f439",
"sha256": "2c660f4e987b2222ff08c2f269eca221c40ea6563a8edfe48eedeb654df76fdd"
},
"downloads": -1,
"filename": "MAMMAL_MagNav-0.0.1.tar.gz",
"has_sig": false,
"md5_digest": "0a9beb67eec8a522ed9705bc18e6f439",
"packagetype": "sdist",
"python_version": "source",
"requires_python": null,
"size": 66247,
"upload_time": "2024-05-24T05:40:35",
"upload_time_iso_8601": "2024-05-24T05:40:35.201566Z",
"url": "https://files.pythonhosted.org/packages/30/94/262f383095d9e729d72068324fe94a2397be3b86b66e05bc8c5c2ca75db5/MAMMAL_MagNav-0.0.1.tar.gz",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-05-24 05:40:35",
"github": true,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"github_user": "PowerBroker2",
"github_project": "MAMMAL",
"travis_ci": false,
"coveralls": false,
"github_actions": false,
"requirements": [
{
"name": "numpy",
"specs": [
[
"==",
"1.20.3"
]
]
},
{
"name": "pandas",
"specs": [
[
"==",
"1.3.4"
]
]
},
{
"name": "scipy",
"specs": [
[
"==",
"1.7.1"
]
]
},
{
"name": "matplotlib",
"specs": [
[
"==",
"3.4.3"
]
]
},
{
"name": "tqdm",
"specs": [
[
"==",
"4.62.3"
]
]
},
{
"name": "xarray",
"specs": [
[
"==",
"2022.3.0"
]
]
},
{
"name": "rioxarray",
"specs": [
[
"==",
"0.10.3"
]
]
},
{
"name": "scikit-learn",
"specs": [
[
"==",
"0.24.2"
]
]
},
{
"name": "ppigrf",
"specs": [
[
"==",
"1.0.0"
]
]
},
{
"name": "simplekml",
"specs": [
[
"==",
"1.3.3"
]
]
},
{
"name": "pyyaml",
"specs": []
},
{
"name": "types-PyYAML",
"specs": []
},
{
"name": "yapf",
"specs": []
},
{
"name": "isort",
"specs": []
},
{
"name": "mypy",
"specs": []
},
{
"name": "flake8",
"specs": []
},
{
"name": "flake8-docstrings",
"specs": []
},
{
"name": "pylint",
"specs": []
},
{
"name": "pytest",
"specs": []
},
{
"name": "pytest-cov",
"specs": []
},
{
"name": "pytest-order",
"specs": []
},
{
"name": "flaky",
"specs": []
},
{
"name": "pre-commit",
"specs": []
},
{
"name": "pre-commit-hooks",
"specs": []
},
{
"name": "blacken-docs",
"specs": []
},
{
"name": "bashate",
"specs": []
},
{
"name": "mkdocs",
"specs": []
},
{
"name": "mkdocs-git-revision-date-localized-plugin",
"specs": []
},
{
"name": "mkdocs-macros-plugin",
"specs": []
},
{
"name": "mkdocs-material",
"specs": []
},
{
"name": "mkdocs-mermaid2-plugin",
"specs": []
},
{
"name": "mkdocs-pdf-export-plugin",
"specs": []
},
{
"name": "mktheapidocs",
"specs": [
[
">=",
"0.2.0.post0.dev21"
]
]
},
{
"name": "pymdown-extensions",
"specs": []
}
],
"lcname": "mammal-magnav"
}