mirror of
https://github.com/macaodha/batdetect2.git
synced 2025-06-29 22:51:58 +02:00
Decouple config loading from preprocess function
This commit is contained in:
parent
17cf958cd3
commit
335a05d51a
@ -32,7 +32,7 @@ results will be combined into a dictionary with the following keys:
|
||||
for each detection. The CNN features are the output of the CNN before
|
||||
the final classification layer. You can use these features to train
|
||||
your own classifier, or to do other processing on the detections.
|
||||
They are in the same order as the detections in
|
||||
They are in the same order as the detections in
|
||||
`results['pred_dict']['annotation']`. Will only be returned if the
|
||||
`cnn_feats` parameter in the config is set to `True`.
|
||||
- `spec_slices`: Optional. A list of `numpy` arrays containing the spectrogram
|
||||
@ -96,6 +96,7 @@ If you wish to use a custom model or change the default parameters, please
|
||||
consult the API documentation in the code.
|
||||
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
@ -410,7 +411,9 @@ def print_summary(results: RunResults) -> None:
|
||||
Detection result.
|
||||
"""
|
||||
print("Results for " + results["pred_dict"]["id"])
|
||||
print("{} calls detected\n".format(len(results["pred_dict"]["annotation"])))
|
||||
print(
|
||||
"{} calls detected\n".format(len(results["pred_dict"]["annotation"]))
|
||||
)
|
||||
|
||||
print("time\tprob\tlfreq\tspecies_name")
|
||||
for ann in results["pred_dict"]["annotation"]:
|
||||
|
@ -65,7 +65,6 @@ def generate_heatmaps(
|
||||
|
||||
# Get the position of the sound event
|
||||
time, frequency = geometry.get_geometry_point(geom, position=position)
|
||||
print(time, frequency)
|
||||
|
||||
# Set 1.0 at the position of the sound event in the detection heatmap
|
||||
detection_heatmap = arrays.set_value_at_pos(
|
||||
|
@ -116,6 +116,9 @@ def preprocess_single_annotation(
|
||||
if path.is_file() and not replace:
|
||||
return
|
||||
|
||||
if path.is_file() and replace:
|
||||
path.unlink()
|
||||
|
||||
sample = generate_train_example(
|
||||
clip_annotation,
|
||||
class_mapper,
|
||||
@ -133,21 +136,18 @@ def preprocess_annotations(
|
||||
target_sigma: float = TARGET_SIGMA,
|
||||
filename_fn: FilenameFn = _get_filename,
|
||||
replace: bool = False,
|
||||
config_file: Optional[PathLike] = None,
|
||||
config: Optional[PreprocessingConfig] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
"""Preprocess annotations and save to disk."""
|
||||
output_dir = Path(output_dir)
|
||||
|
||||
if config is None:
|
||||
config = PreprocessingConfig()
|
||||
|
||||
if not output_dir.is_dir():
|
||||
output_dir.mkdir(parents=True)
|
||||
|
||||
if config_file is not None:
|
||||
config = load_config(config_file, **kwargs)
|
||||
else:
|
||||
config = PreprocessingConfig(**kwargs)
|
||||
|
||||
with Pool(max_workers) as pool:
|
||||
list(
|
||||
tqdm(
|
||||
|
Loading…
Reference in New Issue
Block a user