--- action.py
+++ action.py
... | ... | @@ -1,10 +1,12 @@ |
1 |
-import sched |
|
2 | 1 |
import psycopg2 |
3 | 2 |
from flask_restx import Resource, Api, Namespace, fields |
4 | 3 |
from flask import request |
5 | 4 |
from flask import Flask, render_template, request, jsonify, Response |
5 |
+from flask_restful import reqparse |
|
6 | 6 |
from statsmodels.tsa.statespace.sarimax import SARIMAX |
7 | 7 |
from datetime import datetime, timedelta |
8 |
+from scipy.stats import stats |
|
9 |
+from scipy.stats import pointbiserialr |
|
8 | 10 |
import pandas as pd |
9 | 11 |
import numpy as np |
10 | 12 |
import pickle |
... | ... | @@ -12,7 +14,7 @@ |
12 | 14 |
|
13 | 15 |
Action = Namespace( |
14 | 16 |
name="Action", |
15 |
- description="노드 분석을 위해 사용하는 api.", |
|
17 |
+ description="다양한 분석과 DB 조회 기능", |
|
16 | 18 |
) |
17 | 19 |
|
18 | 20 |
db_config = { |
... | ... | @@ -134,7 +136,7 @@ |
134 | 136 |
@Action.route('/fetch_sensor') |
135 | 137 |
class FetchSensorData(Resource): |
136 | 138 |
@Action.doc(responses={200: 'Success', 500: 'Failed'}) |
137 |
- def get(self): |
|
139 |
+ def post(self): |
|
138 | 140 |
conn_params = db_config # Define or fetch your connection parameters here |
139 | 141 |
query = "SELECT * FROM weather_data ORDER BY time DESC LIMIT 600" |
140 | 142 |
try: |
... | ... | @@ -148,4 +150,131 @@ |
148 | 150 |
|
149 | 151 |
return df.to_dict(orient='list'), 200 |
150 | 152 |
except Exception as e: |
151 |
- return {"message": str(e)}, 500(파일 끝에 줄바꿈 문자 없음) |
|
153 |
+ return {"message": str(e)}, 500 |
|
154 |
+ |
|
155 |
+def get_manufacturing_data(): |
|
156 |
+ # Connect to the database |
|
157 |
+ connection = psycopg2.connect(**db_config) |
|
158 |
+ |
|
159 |
+ # Query the relevant data |
|
160 |
+ df = pd.read_sql_query("SELECT * FROM Welding_Jobs ORDER BY welding_job_number ASC;", connection) |
|
161 |
+ connection.close() |
|
162 |
+ |
|
163 |
+ return df |
|
164 |
+ |
|
165 |
+ |
|
166 |
[email protected]('/correlation') |
|
167 |
+class Correlation(Resource): |
|
168 |
+ @Action.doc(responses={200: 'Success'}) |
|
169 |
+ @Action.doc(responses={500: 'Register Failed'}) |
|
170 |
+ def post(self): |
|
171 |
+ try: |
|
172 |
+ df_failure = get_manufacturing_data() |
|
173 |
+ |
|
174 |
+ correlation_manufacturing_abhumidity = pointbiserialr(df_failure["absolute_humidity"], |
|
175 |
+ df_failure['defect_status']) |
|
176 |
+ correlation_manufacturing_rehumidity = pointbiserialr(df_failure["relative_humidity"], |
|
177 |
+ df_failure['defect_status']) |
|
178 |
+ correlation_manufacturing_temp = pointbiserialr(df_failure["temperature"], df_failure['defect_status']) |
|
179 |
+ |
|
180 |
+ correlations = { |
|
181 |
+ 'Absolute Humidity': correlation_manufacturing_abhumidity, |
|
182 |
+ 'Relative Humidity': correlation_manufacturing_rehumidity, |
|
183 |
+ 'Temperature': correlation_manufacturing_temp |
|
184 |
+ } |
|
185 |
+ |
|
186 |
+ return {"status": "success", "correlations": correlations}, 200 |
|
187 |
+ |
|
188 |
+ except Exception as e: |
|
189 |
+ return {"status": "failure", "message": str(e)}, 500 |
|
190 |
+ |
|
191 |
+ |
|
192 |
[email protected]('/anova') |
|
193 |
+class AnovaAnalysis(Resource): |
|
194 |
+ @Action.doc(responses={200: 'Success'}) |
|
195 |
+ @Action.doc(responses={500: 'Analysis Failed'}) |
|
196 |
+ def post(self): |
|
197 |
+ try: |
|
198 |
+ df_failure = get_manufacturing_data() |
|
199 |
+ |
|
200 |
+ F_statistic, pVal = stats.f_oneway(df_failure[df_failure['defect_status'] == 0].loc[:, |
|
201 |
+ ['relative_humidity', 'temperature', 'absolute_humidity']], |
|
202 |
+ df_failure[df_failure['defect_status'] == 1].loc[:, |
|
203 |
+ ['relative_humidity', 'temperature', 'absolute_humidity']]) |
|
204 |
+ |
|
205 |
+ results = { |
|
206 |
+ 'F_statistic': F_statistic.tolist(), |
|
207 |
+ 'pVal': pVal.tolist() |
|
208 |
+ } |
|
209 |
+ |
|
210 |
+ return {"status": "success", "results": results}, 200 |
|
211 |
+ |
|
212 |
+ except Exception as e: |
|
213 |
+ return {"status": "failure", "message": str(e)}, 500 |
|
214 |
+ |
|
215 |
+ |
|
216 |
+parser = Action.model('공정정보 업로드', { |
|
217 |
+ 'mold_name': fields.String(required=True, description='Mold name'), |
|
218 |
+ 'work_start_time': fields.DateTime(required=True, description='Start time of work'), |
|
219 |
+ 'defect_status': fields.String(required=True, description='Defect status') |
|
220 |
+}) |
|
221 |
+ |
|
222 |
[email protected]('/upload_manufacturing_data') |
|
223 |
+class UploadData(Resource): |
|
224 |
+ |
|
225 |
+ @Action.doc(responses={200: 'Success', 500: 'Analysis Failed'}) |
|
226 |
+ @Action.expect(parser) |
|
227 |
+ def post(self): |
|
228 |
+ try: |
|
229 |
+ # Extract data from POST request |
|
230 |
+ data = request.json |
|
231 |
+ |
|
232 |
+ # Connect to the database |
|
233 |
+ connection = psycopg2.connect(**db_config) |
|
234 |
+ cursor = connection.cursor() |
|
235 |
+ |
|
236 |
+ # Query the latest weather data |
|
237 |
+ weather_query = """ |
|
238 |
+ SELECT temperature, relative_humidity, absolute_humidity |
|
239 |
+ FROM weather_data |
|
240 |
+ ORDER BY time DESC |
|
241 |
+ LIMIT 1; |
|
242 |
+ """ |
|
243 |
+ cursor.execute(weather_query) |
|
244 |
+ weather_data = cursor.fetchone() |
|
245 |
+ |
|
246 |
+ # If no weather data is found, return an error message |
|
247 |
+ if not weather_data: |
|
248 |
+ return {"status": "failure", "message": "No weather data found"}, 500 |
|
249 |
+ |
|
250 |
+ # Extract the latest welding job number |
|
251 |
+ job_number_query = """ |
|
252 |
+ SELECT welding_job_number |
|
253 |
+ FROM Welding_Jobs |
|
254 |
+ ORDER BY welding_job_number DESC |
|
255 |
+ LIMIT 1; |
|
256 |
+ """ |
|
257 |
+ cursor.execute(job_number_query) |
|
258 |
+ latest_job_number = cursor.fetchone()[0] + 1 |
|
259 |
+ |
|
260 |
+ # Construct the SQL query |
|
261 |
+ query = """ |
|
262 |
+ INSERT INTO Welding_Jobs (welding_job_number, mold_name, work_start_time, defect_status, temperature, relative_humidity, absolute_humidity) |
|
263 |
+ VALUES (%s, %s, %s, %s, %s, %s, %s); |
|
264 |
+ """ |
|
265 |
+ |
|
266 |
+ # Execute the insert query |
|
267 |
+ cursor.execute(query, (latest_job_number, data['mold_name'], data['work_start_time'], data['defect_status'], weather_data[0], weather_data[1], weather_data[2])) |
|
268 |
+ connection.commit() |
|
269 |
+ |
|
270 |
+ cursor.close() |
|
271 |
+ connection.close() |
|
272 |
+ |
|
273 |
+ return {"status": "success", "message": "Data uploaded successfully"}, 200 |
|
274 |
+ |
|
275 |
+ except Exception as e: |
|
276 |
+ return {"status": "failure", "message": str(e)}, 500 |
|
277 |
+ |
|
278 |
+ |
|
279 |
+if __name__ == "__main__": |
|
280 |
+ get_manufacturing_data()(파일 끝에 줄바꿈 문자 없음) |
+++ database/DBupload_TH.py
... | ... | @@ -0,0 +1,48 @@ |
1 | +import pandas as pd | |
2 | +import psycopg2 | |
3 | +import numpy as np | |
4 | + | |
5 | +def read_data_from_csv(filepath): | |
6 | + """Read data from CSV and return a DataFrame with required columns.""" | |
7 | + df = pd.read_csv(filepath) | |
8 | + selected_df = df[['관측시각', '기온', '상대습도']] | |
9 | + selected_df['관측시각'] = pd.to_datetime(selected_df['관측시각'], format='%Y%m%d%H%M') | |
10 | + return selected_df | |
11 | + | |
12 | +def buck_equation(temperature): # temp in Celsius | |
13 | + saturation_vapor_pressure = 0.61121 * np.exp((18.678 - temperature / 234.5) * (temperature / (257.14 + temperature))) | |
14 | + return saturation_vapor_pressure * 1000 # KPa -> Pa | |
15 | + | |
16 | +def calculate_absolute_humidity(relative_humidity, temperature): | |
17 | + relative_humidity = np.array(relative_humidity) | |
18 | + temperature = np.array(temperature) | |
19 | + saturation_vapor_pressure = buck_equation(temperature) | |
20 | + # 461.5/Kg Kelvin is specific gas constant | |
21 | + return saturation_vapor_pressure * relative_humidity * 0.01 /(461.5 * (temperature + 273.15)) # g/m^3 | |
22 | + | |
23 | +def upload_to_postgresql(dataframe, conn_params): | |
24 | + """Upload data from DataFrame to PostgreSQL.""" | |
25 | + dataframe['absolute_humidity'] = calculate_absolute_humidity(dataframe['상대습도'], dataframe['기온']) | |
26 | + with psycopg2.connect(**conn_params) as conn: | |
27 | + cur = conn.cursor() | |
28 | + for _, row in dataframe.iterrows(): | |
29 | + cur.execute( | |
30 | + "INSERT INTO weather_data (time, temperature, relative_humidity, absolute_humidity) VALUES (%s, %s, %s, %s)", | |
31 | + (row['관측시각'], row['기온'], row['상대습도'], row['absolute_humidity']) | |
32 | + ) | |
33 | + conn.commit() | |
34 | + | |
35 | + | |
36 | +if __name__ == "__main__": | |
37 | + directory = input("Enter the directory path containing the files: ") | |
38 | + db_config = { | |
39 | + 'dbname': 'welding', | |
40 | + 'user': 'postgres', | |
41 | + 'password': 'ts4430!@', | |
42 | + 'host': 'localhost', # e.g., 'localhost' | |
43 | + 'port': '5432', # e.g., '5432' | |
44 | + } | |
45 | + | |
46 | + # Read data, calculate absolute humidity and upload to PostgreSQL | |
47 | + data = read_data_from_csv(directory) | |
48 | + upload_to_postgresql(data, db_config)(파일 끝에 줄바꿈 문자 없음) |
+++ database/DBupload_manufacturing_records.py
... | ... | @@ -0,0 +1,38 @@ |
1 | +import psycopg2 | |
2 | +import csv | |
3 | +import pandas as pd | |
4 | + | |
5 | +# Step 1: Parse the CSV data | |
6 | +df = pd.read_csv("/home/juni/문서/대옹_모니터링/%EB%8C%80%EC%9B%85%ED%95%98%EC%9D%B4%ED%85%8D-%EB%AA%A8%EB%8B%88%ED%84%B0%EB%A7%81-%EC%86%8C%ED%94%84%ED%8A%B8%EC%9B%A8%EC%96%B4/file/workHistory.csv") # Truncated for brevity, paste the entire CSV data here. | |
7 | + | |
8 | +df = df.iloc[:,1:] | |
9 | + | |
10 | +db_config = { | |
11 | + 'dbname': 'welding', | |
12 | + 'user': 'postgres', | |
13 | + 'password': 'ts4430!@', | |
14 | + 'host': 'localhost', # e.g., 'localhost' | |
15 | + 'port': '5432', # e.g., '5432' | |
16 | +} | |
17 | + | |
18 | +conn = psycopg2.connect(**db_config) | |
19 | +cursor = conn.cursor() | |
20 | + | |
21 | +insert_sql = """ | |
22 | +INSERT INTO Welding_Jobs (Welding_Job_Number, Mold_Name, Work_Start_Time, Defect_Status, Temperature, Relative_Humidity, Absolute_Humidity) | |
23 | +VALUES (%s, %s, %s, %s, %s, %s, %s) | |
24 | +""" | |
25 | +for index, row in df.iterrows(): | |
26 | + cursor.execute(insert_sql, ( | |
27 | + row['용접 작업번호'], | |
28 | + row['금형 이름'], | |
29 | + row['작업 시작 시간'], | |
30 | + row['불량 여부'], | |
31 | + row['기온'], | |
32 | + row['상대습도'], | |
33 | + row['절대습도'] | |
34 | + )) | |
35 | + | |
36 | +conn.commit() | |
37 | +cursor.close() | |
38 | +conn.close()(파일 끝에 줄바꿈 문자 없음) |
--- tools/algo/ANOVA.py
+++ tools/algo/ANOVA.py
... | ... | @@ -1,11 +1,8 @@ |
1 | 1 |
import numpy as np |
2 | 2 |
import pandas as pd |
3 |
-from statsmodels.stats.anova import anova_lm |
|
4 |
-from statsmodels.formula.api import ols |
|
5 | 3 |
from scipy.stats import stats |
6 | 4 |
from tools.algo.humidity import absolute_humidity |
7 | 5 |
from tools.algo.interpolation import interpolate_value |
8 |
-from datetime import datetime |
|
9 | 6 |
import plotly.express as px |
10 | 7 |
|
11 | 8 |
if __name__ == "__main__": |
Add a comment
Delete comment
Once you delete this comment, you won't be able to recover it. Are you sure you want to delete this comment?