From 6ed54a5fdf950210aea81f7351f65f644254f11d Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Fri, 31 May 2024 20:47:10 +0200
Subject: [PATCH 01/20] Readd Processed folder
---
scripts/birdnet_analysis.py | 42 +++++++++++++++++++------------------
1 file changed, 22 insertions(+), 20 deletions(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index e34aaf116..232637f1c 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -5,6 +5,7 @@
import signal
import sys
import threading
+import shutil
from queue import Queue
from subprocess import CalledProcessError
@@ -20,13 +21,11 @@
log = logging.getLogger(__name__)
-
def sig_handler(sig_num, curr_stack_frame):
global shutdown
log.info('Caught shutdown signal %d', sig_num)
shutdown = True
-
def main():
write_settings()
load_global_model()
@@ -37,12 +36,12 @@ def main():
backlog = get_wav_files()
report_queue = Queue()
- thread = threading.Thread(target=handle_reporting_queue, args=(report_queue, ))
+ thread = threading.Thread(target=handle_reporting_queue, args=(report_queue, conf))
thread.start()
log.info('backlog is %d', len(backlog))
for file_name in backlog:
- process_file(file_name, report_queue)
+ process_file(file_name, report_queue, conf)
if shutdown:
break
log.info('backlog done')
@@ -66,21 +65,17 @@ def main():
file_path = os.path.join(path, file_name)
if file_path in backlog:
- # if we're very lucky, the first event could be for the file in the backlog that finished
- # while running get_wav_files()
backlog = []
continue
- process_file(file_path, report_queue)
+ process_file(file_path, report_queue, conf)
empty_count = 0
- # we're all done
report_queue.put(None)
thread.join()
report_queue.join()
-
-def process_file(file_name, report_queue):
+def process_file(file_name, report_queue, conf):
try:
if os.path.getsize(file_name) == 0:
os.remove(file_name)
@@ -90,20 +85,14 @@ def process_file(file_name, report_queue):
analyzing.write(file_name)
file = ParseFileName(file_name)
detections = run_analysis(file)
- # we join() to make sure te reporting queue does not get behind
- if not report_queue.empty():
- log.warning('reporting queue not yet empty')
- report_queue.join()
report_queue.put((file, detections))
except BaseException as e:
stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
log.exception(f'Unexpected error: {stderr}', exc_info=e)
-
-def handle_reporting_queue(queue):
+def handle_reporting_queue(queue, conf):
while True:
msg = queue.get()
- # check for signal that we are done
if msg is None:
break
@@ -118,17 +107,31 @@ def handle_reporting_queue(queue):
apprise(file, detections)
bird_weather(file, detections)
heartbeat()
- os.remove(file.file_name)
+
+ # Move the file to the 'Processed' folder
+ processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
+ if not os.path.exists(processed_dir):
+ os.makedirs(processed_dir)
+ shutil.move(file.file_name, processed_dir)
+
+ # Maintain the file count in the 'Processed' folder
+ maintain_file_count(processed_dir)
+
except BaseException as e:
stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
log.exception(f'Unexpected error: {stderr}', exc_info=e)
queue.task_done()
- # mark the 'None' signal as processed
queue.task_done()
log.info('handle_reporting_queue done')
+def maintain_file_count(directory, max_files=15):
+ files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
+ files.sort(key=lambda x: os.path.getmtime(x))
+
+ while len(files) > max_files:
+ os.remove(files.pop(0))
def setup_logging():
logger = logging.getLogger()
@@ -140,7 +143,6 @@ def setup_logging():
global log
log = logging.getLogger('birdnet_analysis')
-
if __name__ == '__main__':
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
From 6afc8c724057f9906aa28b4eecb2a7e4ef665e7c Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Fri, 31 May 2024 21:29:05 +0200
Subject: [PATCH 02/20] Update birdnet_analysis.py
---
scripts/birdnet_analysis.py | 225 ++++++++++++------------------------
1 file changed, 76 insertions(+), 149 deletions(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index 232637f1c..e3961e994 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -1,152 +1,79 @@
-import logging
import os
-import os.path
-import re
-import signal
import sys
-import threading
-import shutil
-from queue import Queue
-from subprocess import CalledProcessError
-import inotify.adapters
-from inotify.constants import IN_CLOSE_WRITE
-
-from server import load_global_model, run_analysis
-from utils.helpers import get_settings, ParseFileName, get_wav_files, write_settings, ANALYZING_NOW
-from utils.reporting import extract_detection, summary, write_to_file, write_to_db, apprise, bird_weather, heartbeat, \
- update_json_file
-
-shutdown = False
-
-log = logging.getLogger(__name__)
-
-def sig_handler(sig_num, curr_stack_frame):
- global shutdown
- log.info('Caught shutdown signal %d', sig_num)
- shutdown = True
-
-def main():
- write_settings()
- load_global_model()
- conf = get_settings()
- i = inotify.adapters.Inotify()
- i.add_watch(os.path.join(conf['RECS_DIR'], 'StreamData'), mask=IN_CLOSE_WRITE)
-
- backlog = get_wav_files()
-
- report_queue = Queue()
- thread = threading.Thread(target=handle_reporting_queue, args=(report_queue, conf))
- thread.start()
-
- log.info('backlog is %d', len(backlog))
- for file_name in backlog:
- process_file(file_name, report_queue, conf)
- if shutdown:
- break
- log.info('backlog done')
-
- empty_count = 0
- for event in i.event_gen():
- if shutdown:
- break
-
- if event is None:
- if empty_count > (conf.getint('RECORDING_LENGTH') * 2):
- log.error('no more notifications: restarting...')
- break
- empty_count += 1
- continue
-
- (_, type_names, path, file_name) = event
- if re.search('.wav$', file_name) is None:
- continue
- log.debug("PATH=[%s] FILENAME=[%s] EVENT_TYPES=%s", path, file_name, type_names)
-
- file_path = os.path.join(path, file_name)
- if file_path in backlog:
- backlog = []
- continue
-
- process_file(file_path, report_queue, conf)
- empty_count = 0
-
- report_queue.put(None)
- thread.join()
- report_queue.join()
-
-def process_file(file_name, report_queue, conf):
- try:
- if os.path.getsize(file_name) == 0:
- os.remove(file_name)
- return
- log.info('Analyzing %s', file_name)
- with open(ANALYZING_NOW, 'w') as analyzing:
- analyzing.write(file_name)
- file = ParseFileName(file_name)
- detections = run_analysis(file)
- report_queue.put((file, detections))
- except BaseException as e:
- stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
- log.exception(f'Unexpected error: {stderr}', exc_info=e)
-
-def handle_reporting_queue(queue, conf):
- while True:
- msg = queue.get()
- if msg is None:
- break
-
- file, detections = msg
- try:
- update_json_file(file, detections)
- for detection in detections:
- detection.file_name_extr = extract_detection(file, detection)
- log.info('%s;%s', summary(file, detection), os.path.basename(detection.file_name_extr))
- write_to_file(file, detection)
- write_to_db(file, detection)
- apprise(file, detections)
- bird_weather(file, detections)
- heartbeat()
-
- # Move the file to the 'Processed' folder
- processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
- if not os.path.exists(processed_dir):
- os.makedirs(processed_dir)
- shutil.move(file.file_name, processed_dir)
-
- # Maintain the file count in the 'Processed' folder
- maintain_file_count(processed_dir)
-
- except BaseException as e:
- stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
- log.exception(f'Unexpected error: {stderr}', exc_info=e)
-
- queue.task_done()
-
- queue.task_done()
- log.info('handle_reporting_queue done')
-
-def maintain_file_count(directory, max_files=15):
- files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
- files.sort(key=lambda x: os.path.getmtime(x))
-
- while len(files) > max_files:
- os.remove(files.pop(0))
-
-def setup_logging():
- logger = logging.getLogger()
- formatter = logging.Formatter("[%(name)s][%(levelname)s] %(message)s")
- handler = logging.StreamHandler(stream=sys.stdout)
- handler.setFormatter(formatter)
- logger.addHandler(handler)
- logger.setLevel(logging.INFO)
- global log
- log = logging.getLogger('birdnet_analysis')
-
-if __name__ == '__main__':
- signal.signal(signal.SIGINT, sig_handler)
- signal.signal(signal.SIGTERM, sig_handler)
-
- setup_logging()
-
- main()
+def read_file(file_path):
+ with open(file_path, 'r') as file:
+ return file.readlines()
+
+def write_file(file_path, content):
+ with open(file_path, 'w') as file:
+ file.writelines(content)
+
+def process_code(lines, max_files):
+ processed_lines = []
+ for line in lines:
+ # Add the import statement for shutil if it's not already there
+ if 'import os.path' in line and 'import shutil' not in lines:
+ processed_lines.append('import shutil\n')
+ processed_lines.append(line)
+
+ # Modify the process_file function to include the conf parameter
+ if 'def process_file(file_name, report_queue):' in line:
+ processed_lines[-1] = 'def process_file(file_name, report_queue, conf):\n'
+
+ # Modify the handle_reporting_queue function to include the conf parameter and the new logic
+ if 'def handle_reporting_queue(queue):' in line:
+ processed_lines[-1] = 'def handle_reporting_queue(queue, conf):\n'
+ if 'os.remove(file.file_name)' in line:
+ indent = ' ' * (len(line) - len(line.lstrip()))
+ processed_lines.append(f"{indent}# Move the file to the 'Processed' folder\n")
+ processed_lines.append(f"{indent}processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')\n")
+ processed_lines.append(f"{indent}if not os.path.exists(processed_dir):\n")
+ processed_lines.append(f"{indent} os.makedirs(processed_dir)\n")
+ processed_lines.append(f"{indent}shutil.move(file.file_name, processed_dir)\n")
+ processed_lines.append(f"{indent}\n")
+ processed_lines.append(f"{indent}# Maintain the file count in the 'Processed' folder\n")
+ processed_lines.append(f"{indent}maintain_file_count(processed_dir, max_files)\n")
+ processed_lines.append(f"{indent}\n")
+ continue # Skip the original line that removes the file
+
+ # Add the new maintain_file_count function at the end of the file
+ if 'if __name__' in line and not any('def maintain_file_count' in l for l in processed_lines):
+ processed_lines.append('\n')
+ processed_lines.append('def maintain_file_count(directory, max_files):\n')
+ processed_lines.append(' files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith(\'.wav\')]\n')
+ processed_lines.append(' files.sort(key=lambda x: os.path.getmtime(x))\n')
+ processed_lines.append('\n')
+ processed_lines.append(' while len(files) > max_files:\n')
+ processed_lines.append(' os.remove(files.pop(0))\n')
+ processed_lines.append('\n')
+
+ return processed_lines
+
+# Hardcoded path to the birdnet_analysis.py file
+file_path = os.path.expanduser('~/BirdNET-Pi/scripts/birdnet_analysis.py')
+
+# Get the maximum number of processed files from the environment variable
+max_files = int(os.environ.get('Processed_Files', '15'))
+
+# If the environment variable is set to 0, exit the script
+if max_files == 0:
+ print("The 'Processed_Files' environment variable is set to 0. Exiting without making changes.")
+ sys.exit(0)
+
+# Read the original code
+original_lines = read_file(file_path)
+
+# Process the code
+modified_lines = process_code(original_lines, max_files)
+
+# Write the modified code back to the same file
+write_file(file_path, modified_lines)
+
+# Ensure the 'Processed' directory exists and is owned by the current user
+processed_dir = os.path.join(os.path.dirname(file_path), 'Processed')
+if not os.path.exists(processed_dir):
+ os.makedirs(processed_dir)
+os.chown(processed_dir, os.getuid(), os.getgid())
+
+print(f"The code has been modified and saved to {file_path}")
From c6477d988e634cc8936b8b12112364b6d6f244e4 Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sat, 1 Jun 2024 10:02:15 +0200
Subject: [PATCH 03/20] Update birdnet_analysis.py
---
scripts/birdnet_analysis.py | 225 ++++++++++++++++++++++++------------
1 file changed, 149 insertions(+), 76 deletions(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index e3961e994..232637f1c 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -1,79 +1,152 @@
+import logging
import os
+import os.path
+import re
+import signal
import sys
+import threading
+import shutil
+from queue import Queue
+from subprocess import CalledProcessError
-def read_file(file_path):
- with open(file_path, 'r') as file:
- return file.readlines()
-
-def write_file(file_path, content):
- with open(file_path, 'w') as file:
- file.writelines(content)
-
-def process_code(lines, max_files):
- processed_lines = []
- for line in lines:
- # Add the import statement for shutil if it's not already there
- if 'import os.path' in line and 'import shutil' not in lines:
- processed_lines.append('import shutil\n')
- processed_lines.append(line)
-
- # Modify the process_file function to include the conf parameter
- if 'def process_file(file_name, report_queue):' in line:
- processed_lines[-1] = 'def process_file(file_name, report_queue, conf):\n'
-
- # Modify the handle_reporting_queue function to include the conf parameter and the new logic
- if 'def handle_reporting_queue(queue):' in line:
- processed_lines[-1] = 'def handle_reporting_queue(queue, conf):\n'
- if 'os.remove(file.file_name)' in line:
- indent = ' ' * (len(line) - len(line.lstrip()))
- processed_lines.append(f"{indent}# Move the file to the 'Processed' folder\n")
- processed_lines.append(f"{indent}processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')\n")
- processed_lines.append(f"{indent}if not os.path.exists(processed_dir):\n")
- processed_lines.append(f"{indent} os.makedirs(processed_dir)\n")
- processed_lines.append(f"{indent}shutil.move(file.file_name, processed_dir)\n")
- processed_lines.append(f"{indent}\n")
- processed_lines.append(f"{indent}# Maintain the file count in the 'Processed' folder\n")
- processed_lines.append(f"{indent}maintain_file_count(processed_dir, max_files)\n")
- processed_lines.append(f"{indent}\n")
- continue # Skip the original line that removes the file
-
- # Add the new maintain_file_count function at the end of the file
- if 'if __name__' in line and not any('def maintain_file_count' in l for l in processed_lines):
- processed_lines.append('\n')
- processed_lines.append('def maintain_file_count(directory, max_files):\n')
- processed_lines.append(' files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith(\'.wav\')]\n')
- processed_lines.append(' files.sort(key=lambda x: os.path.getmtime(x))\n')
- processed_lines.append('\n')
- processed_lines.append(' while len(files) > max_files:\n')
- processed_lines.append(' os.remove(files.pop(0))\n')
- processed_lines.append('\n')
-
- return processed_lines
-
-# Hardcoded path to the birdnet_analysis.py file
-file_path = os.path.expanduser('~/BirdNET-Pi/scripts/birdnet_analysis.py')
-
-# Get the maximum number of processed files from the environment variable
-max_files = int(os.environ.get('Processed_Files', '15'))
-
-# If the environment variable is set to 0, exit the script
-if max_files == 0:
- print("The 'Processed_Files' environment variable is set to 0. Exiting without making changes.")
- sys.exit(0)
-
-# Read the original code
-original_lines = read_file(file_path)
-
-# Process the code
-modified_lines = process_code(original_lines, max_files)
-
-# Write the modified code back to the same file
-write_file(file_path, modified_lines)
-
-# Ensure the 'Processed' directory exists and is owned by the current user
-processed_dir = os.path.join(os.path.dirname(file_path), 'Processed')
-if not os.path.exists(processed_dir):
- os.makedirs(processed_dir)
-os.chown(processed_dir, os.getuid(), os.getgid())
-
-print(f"The code has been modified and saved to {file_path}")
+import inotify.adapters
+from inotify.constants import IN_CLOSE_WRITE
+
+from server import load_global_model, run_analysis
+from utils.helpers import get_settings, ParseFileName, get_wav_files, write_settings, ANALYZING_NOW
+from utils.reporting import extract_detection, summary, write_to_file, write_to_db, apprise, bird_weather, heartbeat, \
+ update_json_file
+
+shutdown = False
+
+log = logging.getLogger(__name__)
+
+def sig_handler(sig_num, curr_stack_frame):
+ global shutdown
+ log.info('Caught shutdown signal %d', sig_num)
+ shutdown = True
+
+def main():
+ write_settings()
+ load_global_model()
+ conf = get_settings()
+ i = inotify.adapters.Inotify()
+ i.add_watch(os.path.join(conf['RECS_DIR'], 'StreamData'), mask=IN_CLOSE_WRITE)
+
+ backlog = get_wav_files()
+
+ report_queue = Queue()
+ thread = threading.Thread(target=handle_reporting_queue, args=(report_queue, conf))
+ thread.start()
+
+ log.info('backlog is %d', len(backlog))
+ for file_name in backlog:
+ process_file(file_name, report_queue, conf)
+ if shutdown:
+ break
+ log.info('backlog done')
+
+ empty_count = 0
+ for event in i.event_gen():
+ if shutdown:
+ break
+
+ if event is None:
+ if empty_count > (conf.getint('RECORDING_LENGTH') * 2):
+ log.error('no more notifications: restarting...')
+ break
+ empty_count += 1
+ continue
+
+ (_, type_names, path, file_name) = event
+ if re.search('.wav$', file_name) is None:
+ continue
+ log.debug("PATH=[%s] FILENAME=[%s] EVENT_TYPES=%s", path, file_name, type_names)
+
+ file_path = os.path.join(path, file_name)
+ if file_path in backlog:
+ backlog = []
+ continue
+
+ process_file(file_path, report_queue, conf)
+ empty_count = 0
+
+ report_queue.put(None)
+ thread.join()
+ report_queue.join()
+
+def process_file(file_name, report_queue, conf):
+ try:
+ if os.path.getsize(file_name) == 0:
+ os.remove(file_name)
+ return
+ log.info('Analyzing %s', file_name)
+ with open(ANALYZING_NOW, 'w') as analyzing:
+ analyzing.write(file_name)
+ file = ParseFileName(file_name)
+ detections = run_analysis(file)
+ report_queue.put((file, detections))
+ except BaseException as e:
+ stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
+ log.exception(f'Unexpected error: {stderr}', exc_info=e)
+
+def handle_reporting_queue(queue, conf):
+ while True:
+ msg = queue.get()
+ if msg is None:
+ break
+
+ file, detections = msg
+ try:
+ update_json_file(file, detections)
+ for detection in detections:
+ detection.file_name_extr = extract_detection(file, detection)
+ log.info('%s;%s', summary(file, detection), os.path.basename(detection.file_name_extr))
+ write_to_file(file, detection)
+ write_to_db(file, detection)
+ apprise(file, detections)
+ bird_weather(file, detections)
+ heartbeat()
+
+ # Move the file to the 'Processed' folder
+ processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
+ if not os.path.exists(processed_dir):
+ os.makedirs(processed_dir)
+ shutil.move(file.file_name, processed_dir)
+
+ # Maintain the file count in the 'Processed' folder
+ maintain_file_count(processed_dir)
+
+ except BaseException as e:
+ stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
+ log.exception(f'Unexpected error: {stderr}', exc_info=e)
+
+ queue.task_done()
+
+ queue.task_done()
+ log.info('handle_reporting_queue done')
+
+def maintain_file_count(directory, max_files=15):
+ files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
+ files.sort(key=lambda x: os.path.getmtime(x))
+
+ while len(files) > max_files:
+ os.remove(files.pop(0))
+
+def setup_logging():
+ logger = logging.getLogger()
+ formatter = logging.Formatter("[%(name)s][%(levelname)s] %(message)s")
+ handler = logging.StreamHandler(stream=sys.stdout)
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+ logger.setLevel(logging.INFO)
+ global log
+ log = logging.getLogger('birdnet_analysis')
+
+if __name__ == '__main__':
+ signal.signal(signal.SIGINT, sig_handler)
+ signal.signal(signal.SIGTERM, sig_handler)
+
+ setup_logging()
+
+ main()
From 08214c471a1c86ed6f932604b7e68695bbc06450 Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sat, 1 Jun 2024 10:05:26 +0200
Subject: [PATCH 04/20] Update birdnet_analysis.py
---
scripts/birdnet_analysis.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index 232637f1c..8f84dfdea 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -112,6 +112,7 @@ def handle_reporting_queue(queue, conf):
processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
if not os.path.exists(processed_dir):
os.makedirs(processed_dir)
+ os.chown(processed_dir, os.getuid(), os.getgid())
shutil.move(file.file_name, processed_dir)
# Maintain the file count in the 'Processed' folder
@@ -126,7 +127,9 @@ def handle_reporting_queue(queue, conf):
queue.task_done()
log.info('handle_reporting_queue done')
-def maintain_file_count(directory, max_files=15):
+def maintain_file_count(directory, max_files=None):
+ if max_files is None:
+ max_files = int(os.getenv('Processed_Buffer', '0'))
files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
files.sort(key=lambda x: os.path.getmtime(x))
From 24cfa7aeea9489e3a54776ab8ce1e824a961982e Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sat, 1 Jun 2024 10:15:50 +0200
Subject: [PATCH 05/20] Set default to 15
---
scripts/birdnet_analysis.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index 8f84dfdea..0c6d88726 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -129,7 +129,7 @@ def handle_reporting_queue(queue, conf):
def maintain_file_count(directory, max_files=None):
if max_files is None:
- max_files = int(os.getenv('Processed_Buffer', '0'))
+ max_files = int(os.getenv('Processed_Buffer', '15'))
files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
files.sort(key=lambda x: os.path.getmtime(x))
From ef22d8fd417d6f4475fbf4c8ebbc863681b49986 Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sat, 1 Jun 2024 11:47:01 +0200
Subject: [PATCH 06/20] Update birdnet_analysis.py
---
scripts/birdnet_analysis.py | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index 0c6d88726..232637f1c 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -112,7 +112,6 @@ def handle_reporting_queue(queue, conf):
processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
if not os.path.exists(processed_dir):
os.makedirs(processed_dir)
- os.chown(processed_dir, os.getuid(), os.getgid())
shutil.move(file.file_name, processed_dir)
# Maintain the file count in the 'Processed' folder
@@ -127,9 +126,7 @@ def handle_reporting_queue(queue, conf):
queue.task_done()
log.info('handle_reporting_queue done')
-def maintain_file_count(directory, max_files=None):
- if max_files is None:
- max_files = int(os.getenv('Processed_Buffer', '15'))
+def maintain_file_count(directory, max_files=15):
files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
files.sort(key=lambda x: os.path.getmtime(x))
From f3bd34918e4b5205b2e8eb0bf27ac27c726f89ce Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sat, 1 Jun 2024 20:45:07 +0200
Subject: [PATCH 07/20] Update birdnet_analysis.py
---
scripts/birdnet_analysis.py | 53 +++++++++++++++++++++----------------
1 file changed, 30 insertions(+), 23 deletions(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index 232637f1c..a7aefe51c 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -5,15 +5,17 @@
import signal
import sys
import threading
-import shutil
from queue import Queue
from subprocess import CalledProcessError
+import glob
+import time
+import pwd
import inotify.adapters
from inotify.constants import IN_CLOSE_WRITE
from server import load_global_model, run_analysis
-from utils.helpers import get_settings, ParseFileName, get_wav_files, write_settings, ANALYZING_NOW
+from utils.helpers import get_settings, ParseFileName, get_wav_files, ANALYZING_NOW
from utils.reporting import extract_detection, summary, write_to_file, write_to_db, apprise, bird_weather, heartbeat, \
update_json_file
@@ -27,7 +29,6 @@ def sig_handler(sig_num, curr_stack_frame):
shutdown = True
def main():
- write_settings()
load_global_model()
conf = get_settings()
i = inotify.adapters.Inotify()
@@ -36,12 +37,12 @@ def main():
backlog = get_wav_files()
report_queue = Queue()
- thread = threading.Thread(target=handle_reporting_queue, args=(report_queue, conf))
+ thread = threading.Thread(target=handle_reporting_queue, args=(report_queue, ))
thread.start()
log.info('backlog is %d', len(backlog))
for file_name in backlog:
- process_file(file_name, report_queue, conf)
+ process_file(file_name, report_queue)
if shutdown:
break
log.info('backlog done')
@@ -65,17 +66,20 @@ def main():
file_path = os.path.join(path, file_name)
if file_path in backlog:
+ # if we're very lucky, the first event could be for the file in the backlog that finished
+ # while running get_wav_files()
backlog = []
continue
- process_file(file_path, report_queue, conf)
+ process_file(file_path, report_queue)
empty_count = 0
+ # we're all done
report_queue.put(None)
thread.join()
report_queue.join()
-def process_file(file_name, report_queue, conf):
+def process_file(file_name, report_queue):
try:
if os.path.getsize(file_name) == 0:
os.remove(file_name)
@@ -85,14 +89,19 @@ def process_file(file_name, report_queue, conf):
analyzing.write(file_name)
file = ParseFileName(file_name)
detections = run_analysis(file)
+ # we join() to make sure te reporting queue does not get behind
+ if not report_queue.empty():
+ log.warning('reporting queue not yet empty')
+ report_queue.join()
report_queue.put((file, detections))
except BaseException as e:
stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
log.exception(f'Unexpected error: {stderr}', exc_info=e)
-def handle_reporting_queue(queue, conf):
+def handle_reporting_queue(queue):
while True:
msg = queue.get()
+ # check for signal that we are done
if msg is None:
break
@@ -107,30 +116,28 @@ def handle_reporting_queue(queue, conf):
apprise(file, detections)
bird_weather(file, detections)
heartbeat()
-
- # Move the file to the 'Processed' folder
- processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
- if not os.path.exists(processed_dir):
- os.makedirs(processed_dir)
- shutil.move(file.file_name, processed_dir)
-
- # Maintain the file count in the 'Processed' folder
- maintain_file_count(processed_dir)
-
+ move_to_processed(file.file_name)
except BaseException as e:
stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
log.exception(f'Unexpected error: {stderr}', exc_info=e)
queue.task_done()
+ # mark the 'None' signal as processed
queue.task_done()
log.info('handle_reporting_queue done')
-def maintain_file_count(directory, max_files=15):
- files = [os.path.join(directory, f) for f in os.listdir(directory) if f.endswith('.wav')]
- files.sort(key=lambda x: os.path.getmtime(x))
-
- while len(files) > max_files:
+def move_to_processed(file_name):
+ conf = get_settings()
+ processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
+ os.makedirs(processed_dir, exist_ok=True)
+ user_id = pwd.getpwnam(os.getenv('USER')).pw_uid
+ os.chown(processed_dir, user_id, user_id)
+ os.rename(file_name, os.path.join(processed_dir, os.path.basename(file_name)))
+ files = glob.glob(os.path.join(processed_dir, '*'))
+ files.sort(key=os.path.getmtime)
+ buffer_size = int(os.getenv('Processed_Buffer', 30))
+ while len(files) > buffer_size:
os.remove(files.pop(0))
def setup_logging():
From ed61913fd5c24dd88470d581ff6d65a2fea87446 Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sun, 2 Jun 2024 09:14:02 +0200
Subject: [PATCH 08/20] User the birdnet.conf PROCESSED_SIZE
---
scripts/birdnet_analysis.py | 23 +++++++++++++----------
1 file changed, 13 insertions(+), 10 deletions(-)
diff --git a/scripts/birdnet_analysis.py b/scripts/birdnet_analysis.py
index a7aefe51c..4547f6d40 100644
--- a/scripts/birdnet_analysis.py
+++ b/scripts/birdnet_analysis.py
@@ -9,7 +9,6 @@
from subprocess import CalledProcessError
import glob
import time
-import pwd
import inotify.adapters
from inotify.constants import IN_CLOSE_WRITE
@@ -23,11 +22,13 @@
log = logging.getLogger(__name__)
+
def sig_handler(sig_num, curr_stack_frame):
global shutdown
log.info('Caught shutdown signal %d', sig_num)
shutdown = True
+
def main():
load_global_model()
conf = get_settings()
@@ -79,6 +80,7 @@ def main():
thread.join()
report_queue.join()
+
def process_file(file_name, report_queue):
try:
if os.path.getsize(file_name) == 0:
@@ -98,6 +100,7 @@ def process_file(file_name, report_queue):
stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
log.exception(f'Unexpected error: {stderr}', exc_info=e)
+
def handle_reporting_queue(queue):
while True:
msg = queue.get()
@@ -116,7 +119,11 @@ def handle_reporting_queue(queue):
apprise(file, detections)
bird_weather(file, detections)
heartbeat()
- move_to_processed(file.file_name)
+ processed_size = get_settings().getint('PROCESSED_SIZE')
+ if processed_size > 0:
+ move_to_processed(file.file_name, processed_size)
+ else:
+ os.remove(file.file_name)
except BaseException as e:
stderr = e.stderr.decode('utf-8') if isinstance(e, CalledProcessError) else ""
log.exception(f'Unexpected error: {stderr}', exc_info=e)
@@ -127,17 +134,12 @@ def handle_reporting_queue(queue):
queue.task_done()
log.info('handle_reporting_queue done')
-def move_to_processed(file_name):
- conf = get_settings()
- processed_dir = os.path.join(conf['RECS_DIR'], 'Processed')
- os.makedirs(processed_dir, exist_ok=True)
- user_id = pwd.getpwnam(os.getenv('USER')).pw_uid
- os.chown(processed_dir, user_id, user_id)
+def move_to_processed(file_name, processed_size):
+ processed_dir = os.path.join(get_settings()['RECS_DIR'], 'Processed')
os.rename(file_name, os.path.join(processed_dir, os.path.basename(file_name)))
files = glob.glob(os.path.join(processed_dir, '*'))
files.sort(key=os.path.getmtime)
- buffer_size = int(os.getenv('Processed_Buffer', 30))
- while len(files) > buffer_size:
+ while len(files) > processed_size:
os.remove(files.pop(0))
def setup_logging():
@@ -150,6 +152,7 @@ def setup_logging():
global log
log = logging.getLogger('birdnet_analysis')
+
if __name__ == '__main__':
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
From d1b5e3f31bd9047e03a5bd388b28f1b3ea76788a Mon Sep 17 00:00:00 2001
From: Alexandre <44178713+alexbelgium@users.noreply.github.com>
Date: Sun, 2 Jun 2024 09:39:30 +0200
Subject: [PATCH 09/20] Add PROCESSED_SIZE to config.php
---
scripts/config.php | 19 +++++++++++++------
1 file changed, 13 insertions(+), 6 deletions(-)
diff --git a/scripts/config.php b/scripts/config.php
index 6425d529d..0ea015ca2 100644
--- a/scripts/config.php
+++ b/scripts/config.php
@@ -63,6 +63,7 @@ function syslog_shell_exec($cmd, $sudo_user = null) {
$flickr_filter_email = $_GET["flickr_filter_email"];
$language = $_GET["language"];
$info_site = $_GET["info_site"];
+ $processed_size = $_GET["$processed_size"];
$timezone = $_GET["timezone"];
$model = $_GET["model"];
$sf_thresh = $_GET["sf_thresh"];
@@ -156,6 +157,7 @@ function() {
$contents = preg_replace("/FLICKR_API_KEY=.*/", "FLICKR_API_KEY=$flickr_api_key", $contents);
$contents = preg_replace("/DATABASE_LANG=.*/", "DATABASE_LANG=$language", $contents);
$contents = preg_replace("/INFO_SITE=.*/", "INFO_SITE=$info_site", $contents);
+ $contents = preg_replace("/PROCESSED_SIZE=.*/", "PROCESSED_SIZE=$processed_size", $contents);
$contents = preg_replace("/FLICKR_FILTER_EMAIL=.*/", "FLICKR_FILTER_EMAIL=$flickr_filter_email", $contents);
$contents = preg_replace("/APPRISE_MINIMUM_SECONDS_BETWEEN_NOTIFICATIONS_PER_SPECIES=.*/", "APPRISE_MINIMUM_SECONDS_BETWEEN_NOTIFICATIONS_PER_SPECIES=$minimum_time_limit", $contents);
$contents = preg_replace("/MODEL=.*/", "MODEL=$model", $contents);
@@ -607,6 +609,16 @@ function runProcess() {
+
+ Processed folder management+ + + |
+ Processed is the directory where the formerly 'Analyzed' files are moved after extractions, mostly for troubleshooting purposes. + This value defines the maximum amount of files that are kept before replacement with new files. + |
Additional Info@@ -628,11 +640,6 @@ function runProcess() { } ?> - -allaboutbirds.org default
- |