윤영준 윤영준 2023-10-22
made manufacturing data DB interaction
@90051294973383887aef7d165f91be451cc12e99
action.py
--- action.py
+++ action.py
@@ -1,10 +1,12 @@
-import sched
 import psycopg2
 from flask_restx import Resource, Api, Namespace, fields
 from flask import request
 from flask import Flask, render_template, request, jsonify, Response
+from flask_restful import reqparse
 from statsmodels.tsa.statespace.sarimax import SARIMAX
 from datetime import datetime, timedelta
+from scipy.stats import stats
+from scipy.stats import pointbiserialr
 import pandas as pd
 import numpy as np
 import pickle
@@ -12,7 +14,7 @@
 
 Action = Namespace(
     name="Action",
-    description="노드 분석을 위해 사용하는 api.",
+    description="다양한 분석과 DB 조회 기능",
 )
 
 db_config = {
@@ -134,7 +136,7 @@
 @Action.route('/fetch_sensor')
 class FetchSensorData(Resource):
     @Action.doc(responses={200: 'Success', 500: 'Failed'})
-    def get(self):
+    def post(self):
         conn_params = db_config  # Define or fetch your connection parameters here
         query = "SELECT * FROM weather_data ORDER BY time DESC LIMIT 600"
         try:
@@ -148,4 +150,131 @@
 
                 return df.to_dict(orient='list'), 200
         except Exception as e:
-            return {"message": str(e)}, 500
(파일 끝에 줄바꿈 문자 없음)
+            return {"message": str(e)}, 500
+
+def get_manufacturing_data():
+    # Connect to the database
+    connection = psycopg2.connect(**db_config)
+
+    # Query the relevant data
+    df = pd.read_sql_query("SELECT * FROM Welding_Jobs ORDER BY welding_job_number ASC;", connection)
+    connection.close()
+
+    return df
+
+
[email protected]('/correlation')
+class Correlation(Resource):
+    @Action.doc(responses={200: 'Success'})
+    @Action.doc(responses={500: 'Register Failed'})
+    def post(self):
+        try:
+            df_failure = get_manufacturing_data()
+
+            correlation_manufacturing_abhumidity = pointbiserialr(df_failure["absolute_humidity"],
+                                                                  df_failure['defect_status'])
+            correlation_manufacturing_rehumidity = pointbiserialr(df_failure["relative_humidity"],
+                                                                  df_failure['defect_status'])
+            correlation_manufacturing_temp = pointbiserialr(df_failure["temperature"], df_failure['defect_status'])
+
+            correlations = {
+                'Absolute Humidity': correlation_manufacturing_abhumidity,
+                'Relative Humidity': correlation_manufacturing_rehumidity,
+                'Temperature': correlation_manufacturing_temp
+            }
+
+            return {"status": "success", "correlations": correlations}, 200
+
+        except Exception as e:
+            return {"status": "failure", "message": str(e)}, 500
+
+
[email protected]('/anova')
+class AnovaAnalysis(Resource):
+    @Action.doc(responses={200: 'Success'})
+    @Action.doc(responses={500: 'Analysis Failed'})
+    def post(self):
+        try:
+            df_failure = get_manufacturing_data()
+
+            F_statistic, pVal = stats.f_oneway(df_failure[df_failure['defect_status'] == 0].loc[:,
+                                               ['relative_humidity', 'temperature', 'absolute_humidity']],
+                                               df_failure[df_failure['defect_status'] == 1].loc[:,
+                                               ['relative_humidity', 'temperature', 'absolute_humidity']])
+
+            results = {
+                'F_statistic': F_statistic.tolist(),
+                'pVal': pVal.tolist()
+            }
+
+            return {"status": "success", "results": results}, 200
+
+        except Exception as e:
+            return {"status": "failure", "message": str(e)}, 500
+
+
+parser = Action.model('공정정보 업로드', {
+    'mold_name': fields.String(required=True, description='Mold name'),
+    'work_start_time': fields.DateTime(required=True, description='Start time of work'),
+    'defect_status': fields.String(required=True, description='Defect status')
+})
+
[email protected]('/upload_manufacturing_data')
+class UploadData(Resource):
+
+    @Action.doc(responses={200: 'Success', 500: 'Analysis Failed'})
+    @Action.expect(parser)
+    def post(self):
+        try:
+            # Extract data from POST request
+            data = request.json
+
+            # Connect to the database
+            connection = psycopg2.connect(**db_config)
+            cursor = connection.cursor()
+
+            # Query the latest weather data
+            weather_query = """
+                SELECT temperature, relative_humidity, absolute_humidity 
+                FROM weather_data 
+                ORDER BY time DESC 
+                LIMIT 1;
+            """
+            cursor.execute(weather_query)
+            weather_data = cursor.fetchone()
+
+            # If no weather data is found, return an error message
+            if not weather_data:
+                return {"status": "failure", "message": "No weather data found"}, 500
+
+            # Extract the latest welding job number
+            job_number_query = """
+                SELECT welding_job_number 
+                FROM Welding_Jobs 
+                ORDER BY welding_job_number DESC 
+                LIMIT 1;
+            """
+            cursor.execute(job_number_query)
+            latest_job_number = cursor.fetchone()[0] + 1
+
+            # Construct the SQL query
+            query = """
+                INSERT INTO Welding_Jobs (welding_job_number, mold_name, work_start_time, defect_status, temperature, relative_humidity, absolute_humidity)
+                VALUES (%s, %s, %s, %s, %s, %s, %s);
+            """
+
+            # Execute the insert query
+            cursor.execute(query, (latest_job_number, data['mold_name'], data['work_start_time'], data['defect_status'], weather_data[0], weather_data[1], weather_data[2]))
+            connection.commit()
+
+            cursor.close()
+            connection.close()
+
+            return {"status": "success", "message": "Data uploaded successfully"}, 200
+
+        except Exception as e:
+            return {"status": "failure", "message": str(e)}, 500
+
+
+if __name__ == "__main__":
+    get_manufacturing_data()
(파일 끝에 줄바꿈 문자 없음)
 
database/DBupload_TH.py (added)
+++ database/DBupload_TH.py
@@ -0,0 +1,48 @@
+import pandas as pd
+import psycopg2
+import numpy as np
+
+def read_data_from_csv(filepath):
+    """Read data from CSV and return a DataFrame with required columns."""
+    df = pd.read_csv(filepath)
+    selected_df = df[['관측시각', '기온', '상대습도']]
+    selected_df['관측시각'] = pd.to_datetime(selected_df['관측시각'], format='%Y%m%d%H%M')
+    return selected_df
+
+def buck_equation(temperature): # temp in Celsius
+    saturation_vapor_pressure = 0.61121 * np.exp((18.678 - temperature / 234.5) * (temperature / (257.14 + temperature)))
+    return saturation_vapor_pressure * 1000 # KPa -> Pa
+
+def calculate_absolute_humidity(relative_humidity, temperature):
+    relative_humidity = np.array(relative_humidity)
+    temperature = np.array(temperature)
+    saturation_vapor_pressure = buck_equation(temperature)
+    # 461.5/Kg Kelvin is specific gas constant
+    return saturation_vapor_pressure * relative_humidity * 0.01 /(461.5 * (temperature + 273.15)) # g/m^3
+
+def upload_to_postgresql(dataframe, conn_params):
+    """Upload data from DataFrame to PostgreSQL."""
+    dataframe['absolute_humidity'] = calculate_absolute_humidity(dataframe['상대습도'], dataframe['기온'])
+    with psycopg2.connect(**conn_params) as conn:
+        cur = conn.cursor()
+        for _, row in dataframe.iterrows():
+            cur.execute(
+                "INSERT INTO weather_data (time, temperature, relative_humidity, absolute_humidity) VALUES (%s, %s, %s, %s)",
+                (row['관측시각'], row['기온'], row['상대습도'], row['absolute_humidity'])
+            )
+        conn.commit()
+
+
+if __name__ == "__main__":
+    directory = input("Enter the directory path containing the files: ")
+    db_config = {
+        'dbname': 'welding',
+        'user': 'postgres',
+        'password': 'ts4430!@',
+        'host': 'localhost',  # e.g., 'localhost'
+        'port': '5432',  # e.g., '5432'
+    }
+
+    # Read data, calculate absolute humidity and upload to PostgreSQL
+    data = read_data_from_csv(directory)
+    upload_to_postgresql(data, db_config)(파일 끝에 줄바꿈 문자 없음)
 
database/DBupload_manufacturing_records.py (added)
+++ database/DBupload_manufacturing_records.py
@@ -0,0 +1,38 @@
+import psycopg2
+import csv
+import pandas as pd
+
+# Step 1: Parse the CSV data
+df = pd.read_csv("/home/juni/문서/대옹_모니터링/%EB%8C%80%EC%9B%85%ED%95%98%EC%9D%B4%ED%85%8D-%EB%AA%A8%EB%8B%88%ED%84%B0%EB%A7%81-%EC%86%8C%ED%94%84%ED%8A%B8%EC%9B%A8%EC%96%B4/file/workHistory.csv")  # Truncated for brevity, paste the entire CSV data here.
+
+df = df.iloc[:,1:]
+
+db_config = {
+    'dbname': 'welding',
+    'user': 'postgres',
+    'password': 'ts4430!@',
+    'host': 'localhost',  # e.g., 'localhost'
+    'port': '5432',  # e.g., '5432'
+}
+
+conn = psycopg2.connect(**db_config)
+cursor = conn.cursor()
+
+insert_sql = """
+INSERT INTO Welding_Jobs (Welding_Job_Number, Mold_Name, Work_Start_Time, Defect_Status, Temperature, Relative_Humidity, Absolute_Humidity)
+VALUES (%s, %s, %s, %s, %s, %s, %s)
+"""
+for index, row in df.iterrows():
+    cursor.execute(insert_sql, (
+        row['용접 작업번호'],
+        row['금형 이름'],
+        row['작업 시작 시간'],
+        row['불량 여부'],
+        row['기온'],
+        row['상대습도'],
+        row['절대습도']
+    ))
+
+conn.commit()
+cursor.close()
+conn.close()(파일 끝에 줄바꿈 문자 없음)
tools/algo/ANOVA.py
--- tools/algo/ANOVA.py
+++ tools/algo/ANOVA.py
@@ -1,11 +1,8 @@
 import numpy as np
 import pandas as pd
-from statsmodels.stats.anova import anova_lm
-from statsmodels.formula.api import ols
 from scipy.stats import stats
 from tools.algo.humidity import absolute_humidity
 from tools.algo.interpolation import interpolate_value
-from datetime import datetime
 import plotly.express as px
 
 if __name__ == "__main__":
Add a comment
List