mirror of
https://github.com/pim-n/pg-rad
synced 2026-02-02 14:33:09 +01:00
Compare commits
19 Commits
aa76900bd4
...
fix-object
| Author | SHA1 | Date | |
|---|---|---|---|
| 52b2eaaeb5 | |||
| ead96eb723 | |||
| f5a126b927 | |||
| c1b827c871 | |||
| 85f80ace97 | |||
| a4e965c9d6 | |||
| 15b7e7e65e | |||
| db6f859a60 | |||
| 14e49e63aa | |||
| 2551f854d6 | |||
| caec70b39b | |||
| 21ea25a3d8 | |||
| d7c670d344 | |||
| 85f306a469 | |||
| c459b732bb | |||
| 4632fe35c9 | |||
| 7cfbbb8792 | |||
| 7290e241a2 | |||
| 38318ad822 |
2
.github/workflows/ci-docs.yml
vendored
2
.github/workflows/ci-docs.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
git config user.email 41898282+github-actions[bot]@users.noreply.github.com
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.x
|
||||
python-version: 3.12.9
|
||||
- run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,3 +1,6 @@
|
||||
# Custom
|
||||
dev-tools/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[codz]
|
||||
|
||||
20
README.md
20
README.md
@ -23,13 +23,27 @@ With Python verion `>=3.12.4` and `<3.13`, create a virtual environment and inst
|
||||
```
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
(venv) pip install -e .[dev]
|
||||
```
|
||||
|
||||
With the virtual environment activated, run:
|
||||
|
||||
```
|
||||
pip install -e .[dev]
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
Tests can be run with `pytest` from the root directory of the repository.
|
||||
Tests can be run with `pytest` from the root directory of the repository. With the virtual environment activated, run:
|
||||
|
||||
```
|
||||
(venv) pytest
|
||||
pytest
|
||||
```
|
||||
|
||||
## Local viewing of documentation
|
||||
|
||||
PG-RAD uses [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) for generating documentation. It can be locally viewed by (in the venv) running:
|
||||
```
|
||||
mkdocs serve
|
||||
```
|
||||
|
||||
where you can add the `--livereload` flag to automatically update the documentation as you write to the Markdown files.
|
||||
|
||||
253
demo/demo.ipynb
253
demo/demo.ipynb
File diff suppressed because one or more lines are too long
18
docs/javascripts/mathjax.js
Normal file
18
docs/javascripts/mathjax.js
Normal file
@ -0,0 +1,18 @@
|
||||
window.MathJax = {
|
||||
tex: {
|
||||
inlineMath: [['$', '$'], ["\\(", "\\)"]],
|
||||
displayMath: [['$$', '$$'], ["\\[", "\\]"]],
|
||||
processEscapes: true,
|
||||
processEnvironments: true
|
||||
},
|
||||
options: {
|
||||
processHtmlClass: "arithmatex"
|
||||
}
|
||||
};
|
||||
|
||||
document$.subscribe(() => {
|
||||
MathJax.startup.output.clearCache()
|
||||
MathJax.typesetClear()
|
||||
MathJax.texReset()
|
||||
MathJax.typesetPromise()
|
||||
})
|
||||
272
docs/pg-rad-in-python.ipynb
Normal file
272
docs/pg-rad-in-python.ipynb
Normal file
File diff suppressed because one or more lines are too long
@ -1,5 +0,0 @@
|
||||
---
|
||||
title: Using PG-RAD as a module
|
||||
---
|
||||
|
||||
Consult the API documentation in the side bar.
|
||||
@ -28,8 +28,16 @@ markdown_extensions:
|
||||
- pymdownx.inlinehilite
|
||||
- pymdownx.snippets
|
||||
- pymdownx.superfences
|
||||
- pymdownx.arithmatex:
|
||||
generic: true
|
||||
|
||||
extra_javascript:
|
||||
- javascripts/mathjax.js
|
||||
- https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js
|
||||
|
||||
plugins:
|
||||
- mkdocs-jupyter:
|
||||
execute: false
|
||||
- mkdocstrings:
|
||||
enabled: !ENV [ENABLE_MKDOCSTRINGS, true]
|
||||
default_handler: python
|
||||
|
||||
@ -7,7 +7,7 @@ where = ["src"]
|
||||
|
||||
[project]
|
||||
name = "pg-rad"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
authors = [
|
||||
{ name="Pim Nelissen", email="pi0274ne-s@student.lu.se" },
|
||||
]
|
||||
@ -29,4 +29,4 @@ Homepage = "https://github.com/pim-n/pg-rad"
|
||||
Issues = "https://github.com/pim-n/pg-rad/issues"
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest", "notebook", "mkdocs-material", "mkdocstrings-python"]
|
||||
dev = ["pytest", "mkinit", "notebook", "mkdocs-material", "mkdocstrings-python", "mkdocs-jupyter"]
|
||||
8
src/pg_rad/dataloader/__init__.py
Normal file
8
src/pg_rad/dataloader/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
# do not expose internal logger when running mkinit
|
||||
__ignore__ = ["logger"]
|
||||
|
||||
from pg_rad.dataloader import dataloader
|
||||
|
||||
from pg_rad.dataloader.dataloader import (load_data,)
|
||||
|
||||
__all__ = ['dataloader', 'load_data']
|
||||
@ -1,12 +1,13 @@
|
||||
import logging
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from pg_rad.logger import setup_logger
|
||||
from pg_rad.exceptions import DataLoadError, InvalidCSVError
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def load_data(filename: str) -> pd.DataFrame:
|
||||
logger.debug(f"Attempting to load data from {filename}")
|
||||
logger.debug(f"Attempting to load file: {filename}")
|
||||
|
||||
try:
|
||||
df = pd.read_csv(filename, delimiter=',')
|
||||
@ -23,4 +24,5 @@ def load_data(filename: str) -> pd.DataFrame:
|
||||
logger.exception(f"Unexpected error while loading {filename}")
|
||||
raise DataLoadError("Unexpected error while loading data") from e
|
||||
|
||||
logger.debug(f"File loaded: {filename}")
|
||||
return df
|
||||
10
src/pg_rad/exceptions/__init__.py
Normal file
10
src/pg_rad/exceptions/__init__.py
Normal file
@ -0,0 +1,10 @@
|
||||
# do not expose internal logger when running mkinit
|
||||
__ignore__ = ["logger"]
|
||||
|
||||
from pg_rad.exceptions import exceptions
|
||||
|
||||
from pg_rad.exceptions.exceptions import (ConvergenceError, DataLoadError,
|
||||
InvalidCSVError,)
|
||||
|
||||
__all__ = ['ConvergenceError', 'DataLoadError', 'InvalidCSVError',
|
||||
'exceptions']
|
||||
8
src/pg_rad/isotopes/__init__.py
Normal file
8
src/pg_rad/isotopes/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
# do not expose internal logger when running mkinit
|
||||
__ignore__ = ["logger"]
|
||||
|
||||
from pg_rad.isotopes import isotope
|
||||
|
||||
from pg_rad.isotopes.isotope import (Isotope,)
|
||||
|
||||
__all__ = ['Isotope', 'isotope']
|
||||
8
src/pg_rad/landscape/__init__.py
Normal file
8
src/pg_rad/landscape/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
# do not expose internal logger when running mkinit
|
||||
__ignore__ = ["logger"]
|
||||
|
||||
from pg_rad.landscape import landscape
|
||||
|
||||
from pg_rad.landscape.landscape import (Landscape, create_landscape_from_path,)
|
||||
|
||||
__all__ = ['Landscape', 'create_landscape_from_path', 'landscape']
|
||||
@ -1,9 +1,13 @@
|
||||
import logging
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
from matplotlib.patches import Circle
|
||||
import numpy as np
|
||||
|
||||
from pg_rad.path import Path
|
||||
from pg_rad.sources import PointSource
|
||||
from pg_rad.objects import PointSource
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Landscape:
|
||||
"""A generic Landscape that can contain a Path and sources.
|
||||
@ -31,6 +35,7 @@ class Landscape:
|
||||
|
||||
self.path: Path = None
|
||||
self.sources: list[PointSource] = []
|
||||
logger.debug("Landscape initialized.")
|
||||
|
||||
def plot(self, z = 0):
|
||||
"""Plot a slice of the world at a height `z`.
|
||||
@ -1,17 +0,0 @@
|
||||
import logging
|
||||
import logging.config
|
||||
import pathlib
|
||||
|
||||
import yaml
|
||||
|
||||
def setup_logger(name):
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
base_dir = pathlib.Path(__file__).resolve().parent
|
||||
config_file = base_dir / "configs" / "logging.yml"
|
||||
|
||||
with open(config_file) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
return logger
|
||||
5
src/pg_rad/logging/__init__.py
Normal file
5
src/pg_rad/logging/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from pg_rad.logging import logger
|
||||
|
||||
from pg_rad.logging.logger import (setup_logger,)
|
||||
|
||||
__all__ = ['logger', 'setup_logger']
|
||||
20
src/pg_rad/logging/logger.py
Normal file
20
src/pg_rad/logging/logger.py
Normal file
@ -0,0 +1,20 @@
|
||||
import logging
|
||||
import pathlib
|
||||
|
||||
import yaml
|
||||
|
||||
def setup_logger(log_level: str = "WARNING"):
|
||||
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||
|
||||
if not log_level in levels:
|
||||
raise ValueError(f"Log level must be one of {levels}.")
|
||||
|
||||
base_dir = pathlib.Path(__file__).resolve().parent
|
||||
config_file = base_dir / "configs" / "logging.yml"
|
||||
|
||||
with open(config_file) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
config["loggers"]["root"]["level"] = log_level
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
13
src/pg_rad/objects/__init__.py
Normal file
13
src/pg_rad/objects/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# do not expose internal logger when running mkinit
|
||||
__ignore__ = ["logger"]
|
||||
|
||||
from pg_rad.objects import detectors
|
||||
from pg_rad.objects import objects
|
||||
from pg_rad.objects import sources
|
||||
|
||||
from pg_rad.objects.detectors import (Detector,)
|
||||
from pg_rad.objects.objects import (Object,)
|
||||
from pg_rad.objects.sources import (PointSource,)
|
||||
|
||||
__all__ = ['Detector', 'Object', 'PointSource', 'detectors', 'objects',
|
||||
'sources']
|
||||
@ -1,7 +1,7 @@
|
||||
import math
|
||||
from typing import Self
|
||||
|
||||
class Object:
|
||||
class BaseObject:
|
||||
def __init__(
|
||||
self,
|
||||
x: float,
|
||||
@ -1,7 +1,11 @@
|
||||
from pg_rad.objects import Object
|
||||
from pg_rad.isotope import Isotope
|
||||
import logging
|
||||
|
||||
class PointSource(Object):
|
||||
from .objects import BaseObject
|
||||
from pg_rad.isotopes import Isotope
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PointSource(BaseObject):
|
||||
_id_counter = 1
|
||||
def __init__(
|
||||
self,
|
||||
@ -39,5 +43,7 @@ class PointSource(Object):
|
||||
self.isotope = isotope
|
||||
self.color = color
|
||||
|
||||
logger.debug(f"Source created: {self.name}")
|
||||
|
||||
def __repr__(self):
|
||||
return f"PointSource(name={self.name}, pos={(self.x, self.y, self.z)}, isotope={self.isotope.name}, A={self.activity} MBq)"
|
||||
9
src/pg_rad/path/__init__.py
Normal file
9
src/pg_rad/path/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
# do not expose internal logger when running mkinit
|
||||
__ignore__ = ["logger"]
|
||||
|
||||
from pg_rad.path import path
|
||||
|
||||
from pg_rad.path.path import (Path, PathSegment, path_from_RT90,
|
||||
simplify_path,)
|
||||
|
||||
__all__ = ['Path', 'PathSegment', 'path', 'path_from_RT90', 'simplify_path']
|
||||
@ -1,4 +1,5 @@
|
||||
from collections.abc import Sequence
|
||||
import logging
|
||||
import math
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
@ -7,9 +8,8 @@ import pandas as pd
|
||||
import piecewise_regression
|
||||
|
||||
from pg_rad.exceptions import ConvergenceError
|
||||
from pg_rad.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PathSegment:
|
||||
def __init__(self, a: tuple[float, float], b: tuple[float, float]):
|
||||
@ -73,6 +73,8 @@ class Path:
|
||||
|
||||
self.z = z
|
||||
|
||||
logger.debug("Path created.")
|
||||
|
||||
def get_length(self) -> float:
|
||||
return sum([s.length for s in self.segments])
|
||||
|
||||
@ -136,7 +138,7 @@ def simplify_path(
|
||||
pw_res = pw_fit.get_results()
|
||||
|
||||
if pw_res == None:
|
||||
logger.error("Piecewise regression failed to converge.")
|
||||
logger.warning("Piecewise regression failed to converge.")
|
||||
raise ConvergenceError("Piecewise regression failed to converge.")
|
||||
|
||||
est = pw_res['estimates']
|
||||
@ -184,4 +186,5 @@ def path_from_RT90(
|
||||
coord_pairs = list(zip(east_arr, north_arr))
|
||||
|
||||
path = Path(coord_pairs, **kwargs)
|
||||
logger.debug("Loaded path from provided RT90 coordinates.")
|
||||
return path
|
||||
@ -1,28 +1,27 @@
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from pg_rad.objects import Source
|
||||
from pg_rad.sources import PointSource
|
||||
|
||||
@pytest.fixture
|
||||
def test_sources():
|
||||
pos_a = np.random.rand(3)
|
||||
pos_b = np.random.rand(3)
|
||||
|
||||
a = Source(*tuple(pos_a), strength = None)
|
||||
b = Source(*tuple(pos_b), strength = None)
|
||||
a = PointSource(*tuple(pos_a), strength = None)
|
||||
b = PointSource(*tuple(pos_b), strength = None)
|
||||
|
||||
return pos_a, pos_b, a, b
|
||||
|
||||
def test_if_distances_equal(test_sources):
|
||||
"""_Verify whether from object A to object B is the same as B to A._"""
|
||||
"""Verify whether from PointSource A to PointSource B is the same as B to A."""
|
||||
|
||||
_, _, a, b = test_sources
|
||||
|
||||
assert a.distance_to(b) == b.distance_to(a)
|
||||
|
||||
def test_distance_calculation(test_sources):
|
||||
"""_Verify whether distance between two static objects (e.g. sources)
|
||||
is calculated correctly._"""
|
||||
"""Verify whether distance between two PointSources is calculated correctly."""
|
||||
|
||||
pos_a, pos_b, a, b = test_sources
|
||||
|
||||
Reference in New Issue
Block a user