윤영준 윤영준 05-27
hourly update code
@812c5224a78f116521dfbf4e2473b90acda3f2b2
 
Hourly_db_schedular.py (added)
+++ Hourly_db_schedular.py
@@ -0,0 +1,84 @@
+import pandas as pd
+from datetime import datetime, timedelta
+import psycopg2
+import json
+
+from flask import Flask
+from flask_restx import Api
+from apscheduler.schedulers.background import BackgroundScheduler
+
+with open('config_files/MAIN_DB_ENDPOINT.json', 'r') as file:
+    db_config = json.load(file)
+
+app = Flask(__name__)
+print("ITS API Updater START")
+
+api = Api(app,
+          version='0.1',
+          title="monitoring",
+          description="API Server",
+          terms_url="/",
+          contact="",
+          )
+
+def get_hourly_datetime_range():
+    current_time = datetime.now()
+    current_hour_start = current_time.replace(minute=0, second=0, microsecond=0)
+    next_hour_start = current_hour_start + timedelta(hours=1)
+
+    # If the current time is past 5 minutes into the hour
+    if current_time.minute > 5:
+        return current_hour_start, next_hour_start
+    else:
+        previous_hour_start = current_hour_start - timedelta(hours=1)
+        return previous_hour_start, current_hour_start
+
+
+def fetch_and_update():
+    conn = psycopg2.connect(**db_config)
+    cursor = conn.cursor()
+
+    set_schema_query = """
+        SET search_path TO ai_camera_v0_1;
+    """
+    cursor.execute(set_schema_query)
+
+    with conn:
+        with conn.cursor() as cursor:
+            # Determine the time window from last full hour to current full hour
+            now = datetime.now()
+            previous_hour = (now - timedelta(hours=1)).replace(minute=0, second=0, microsecond=0)
+            current_hour = now.replace(minute=0, second=0, microsecond=0)
+
+            # SQL to fetch data
+            fetch_sql = """
+            SELECT eqpmn_id, COUNT(*) AS flooding_cnt
+            FROM flooding_detect_event
+            WHERE ocrn_dt >= %s AND ocrn_dt < %s AND norm_to_alert_flag = 'True'
+            GROUP BY eqpmn_id;
+            """
+            cursor.execute(fetch_sql, (previous_hour, current_hour))
+            rows = cursor.fetchall()
+            df = pd.DataFrame(rows, columns=['eqpmn_id', 'flooding_cnt'])
+
+            # Insert results into flooding_anals_event_data_hr
+            insert_sql = """
+            INSERT INTO flooding_anals_event_data_hr (clct_dt, eqpmn_id, flooding_cnt)
+            VALUES (%s, %s, %s);
+            """
+            for index, row in df.iterrows():
+                cursor.execute(insert_sql, (previous_hour, row['eqpmn_id'], row['flooding_cnt']))
+        conn.commit()
+
+# Scheduler configuration
+scheduler = BackgroundScheduler()
+scheduler.add_job(func=fetch_and_update, trigger='cron', minute=5)
+scheduler.start()
+
[email protected]('/')
+def home():
+    return "Flooding analysis service running."
+
+if __name__ == '__main__':
+    app.run(debug=True, use_reloader=False)
+
config_files/MAIN_DB_ENDPOINT.json
--- config_files/MAIN_DB_ENDPOINT.json
+++ config_files/MAIN_DB_ENDPOINT.json
@@ -3,17 +3,5 @@
   "port" : "5423",
   "id" : "takensoft",
   "password" : "tts96314728!@",
-  "table_name" : "flooding_detect_event",
-  "columns" : [
-    "ocrn_dt",
-    "eqpmn_id",
-    "flooding_result",
-    "flooding_per",
-    "image",
-    "image_seg",
-    "eqpmn_lat",
-    "eqpmn_lon",
-    "flooding_y",
-    "flooding_x"
-  ]
+  "schema_name" : "ai_camera_v0_1",
 }
(파일 끝에 줄바꿈 문자 없음)
postprocess_draft.py
--- postprocess_draft.py
+++ postprocess_draft.py
@@ -2,11 +2,13 @@
 from flask import Flask, request, jsonify
 from flask_restx import Api, Resource, fields
 import os
+import psycopg2
 from datetime import datetime
 from yoloseg.inference_ import Inference, overlay_mask
 import cv2
 import time
 import base64
+import json
 import requests
 import typing
 from requests_toolbelt import MultipartEncoder
@@ -30,6 +32,9 @@
 
 # Namespace definition
 ns = api.namespace('postprocess', description='Postprocessing of inference results')
+
+with open('config_files/MAIN_DB_ENDPOINT.json', 'r') as file:
+    db_config = json.load(file)
 
 class StreamSources():
     def __init__(self, buffer_size, normal_send_interval, failure_mode_thres, failure_mode_check_past_n, normal_mode_thres, normal_mode_check_past_n):
@@ -147,7 +152,7 @@
         if flag_send_event:
             self.send_event(source)
 
-        # alert only alarms once
+        # alert alarms only once
         if self.sources[source]["failure_to_normal_mode_change_alert"]:
             self.sources[source]["failure_to_normal_mode_change_alert"] = False
 
@@ -155,6 +160,22 @@
             self.sources[source]["normal_to_failure_mode_change_alert"] = False
 
     def send_event(self, source):
+        try :
+            conn = psycopg2.connect(**db_config)
+            cursor = conn.cursor()
+
+            upload_data_sql_query = """
+                INSERT INTO 
+            """
+
+            cursor.close()
+            conn.close()
+
+        except ValueError as e:
+            print(e)
+        except Exception as e:
+            print(e)
+
         self.sources[source]["last_send_before"] = 0
         print(f"EVENT : SENDING {source}!!")
         pass
Add a comment
List