

main
@54572a7b36f22e6cc5a59e747a0f11b1f56551b6
+++ main_server/85CF_HD_20211018_039146.jpg
Binary file is not shown |
+++ main_server/README.MD
... | ... | @@ -0,0 +1,0 @@ |
+++ main_server/__pycache__/action.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/action.cpython-311.pyc
Binary file is not shown |
+++ main_server/__pycache__/app.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/auth.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/auth.cpython-311.pyc
Binary file is not shown |
+++ main_server/__pycache__/config.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/fog - 복사본.pkl
Binary file is not shown |
+++ main_server/__pycache__/model_trip.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/predict - 복사본.py
... | ... | @@ -0,0 +1,30 @@ |
1 | +import pickle | |
2 | +from sklearn.metrics import * | |
3 | +from model import darkchannel, sobel | |
4 | +import pandas as pd | |
5 | +import joblib | |
6 | +import numpy as np | |
7 | + | |
8 | + | |
9 | +#이미지 주소 | |
10 | + | |
11 | +def predict(X_test): | |
12 | + svclassifier_from_pickle = joblib.load('fog.pkl') | |
13 | + | |
14 | + Y_pred = svclassifier_from_pickle.predict(X_test) | |
15 | + if Y_pred == 0: | |
16 | + print("fog") | |
17 | + else: | |
18 | + print("normal") | |
19 | + | |
20 | + #print("정확도 : ", accuracy_score(Y_test,Y_pred)) | |
21 | + #print(confusion_matrix(Y_test,Y_pred)) | |
22 | + #print(classification_report(Y_test,Y_pred)) | |
23 | + | |
24 | +img = "C:/Users/lim/Desktop/data/85CF_HD_20211018_038932.jpg" | |
25 | + | |
26 | +alpha_map,result=darkchannel(img) | |
27 | +edge = sobel(img) | |
28 | +value=np.array([[result,edge]]) | |
29 | +predict(value) | |
30 | + |
+++ main_server/__pycache__/todo.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/trip.cpython-310.pyc
Binary file is not shown |
+++ main_server/__pycache__/trip.cpython-311.pyc
Binary file is not shown |
+++ main_server/action.py
... | ... | @@ -0,0 +1,67 @@ |
1 | +from flask_restx import Resource, Api, Namespace, fields | |
2 | +from flask import request | |
3 | +from flask import Flask, render_template, request | |
4 | +from werkzeug.utils import secure_filename | |
5 | +import os | |
6 | +from database.database import DB | |
7 | +from PIL import Image | |
8 | +from datetime import datetime, timedelta | |
9 | + | |
10 | +paths = os.getcwd() | |
11 | + | |
12 | +Action = Namespace( | |
13 | + name="Action", | |
14 | + description="노드 분석을 위해 사용하는 api.", | |
15 | +) | |
16 | + | |
17 | + | |
18 | [email protected]('/image_summit') | |
19 | +class fileUpload(Resource): | |
20 | + @Action.doc(responses={200: 'Success'}) | |
21 | + @Action.doc(responses={500: 'Register Failed'}) | |
22 | + def post(self): | |
23 | + if request.method == 'POST': | |
24 | + f = request.files['file'] | |
25 | + f.save(secure_filename(f.filename)) | |
26 | + return { | |
27 | + 'save': 'done' # str으로 반환하여 return | |
28 | + }, 200 | |
29 | + | |
30 | + | |
31 | + | |
32 | [email protected]('/image_anal') | |
33 | +class fileUpload(Resource): | |
34 | + @Action.doc(responses={200: 'Success'}) | |
35 | + @Action.doc(responses={500: 'Register Failed'}) | |
36 | + def post(self): | |
37 | + if request.method == 'POST': | |
38 | + db=DB() | |
39 | + dir = os.getcwd() | |
40 | + filename = request.json['filename'] | |
41 | + file_type = request.json['file_type'] | |
42 | + lat = float(request.json['gps_x']) | |
43 | + lon = float(request.json['gps_y']) | |
44 | + user_id = 'test' | |
45 | + action_success = True | |
46 | + action_id = 'test' | |
47 | + db.db_add_action(action_id,lat,lon,user_id,action_success) | |
48 | + return { | |
49 | + 'node': (lat,lon), | |
50 | + 'rain' : 'rain', | |
51 | + }, 200 | |
52 | + | |
53 | + | |
54 | [email protected]('/action_display') | |
55 | +class fileUpload(Resource): | |
56 | + @Action.doc(responses={200: 'Success'}) | |
57 | + @Action.doc(responses={500: 'Register Failed'}) | |
58 | + def post(self): | |
59 | + if request.method == 'GET': | |
60 | + db = DB() | |
61 | + now=datetime.now() | |
62 | + d=now.strftime('%Y-%m-%d %X') | |
63 | + value=db.db_display_action(d) | |
64 | + return { | |
65 | + 'report': list(value) | |
66 | + }, 200 | |
67 | + |
+++ main_server/app.py
... | ... | @@ -0,0 +1,30 @@ |
1 | +from flask import Flask | |
2 | +from flask_restx import Api | |
3 | +from auth import Auth | |
4 | +from action import Action | |
5 | + | |
6 | + | |
7 | + | |
8 | + | |
9 | +app = Flask(__name__) | |
10 | + | |
11 | + | |
12 | +print("Api Start") | |
13 | +api = Api( app, | |
14 | + version='0.1', | |
15 | + title="trafficagent", | |
16 | + description="API Server", | |
17 | + terms_url="/", | |
18 | + contact="[email protected]", | |
19 | + license="MIT") | |
20 | + | |
21 | + | |
22 | + | |
23 | +api.add_namespace(Auth, '/auth') | |
24 | +print("Api Add Auth") | |
25 | + | |
26 | +api.add_namespace(Action, '/action') | |
27 | + | |
28 | +if __name__ == "__main__": | |
29 | + app.run(debug=False, host='0.0.0.0', port=8080) | |
30 | + print("Flask Start")(파일 끝에 줄바꿈 문자 없음) |
+++ main_server/auth.py
... | ... | @@ -0,0 +1,114 @@ |
1 | +import hashlib | |
2 | +from flask import request,jsonify,render_template,redirect,url_for | |
3 | +from flask_restx import Resource, Api, Namespace, fields | |
4 | +from database.database import DB | |
5 | +import datetime | |
6 | +import jwt | |
7 | + | |
8 | + | |
9 | + | |
10 | + | |
11 | + | |
12 | +users = {} | |
13 | + | |
14 | +Auth = Namespace( | |
15 | + name="Auth", | |
16 | + description="사용자 인증을 위한 API", | |
17 | +) | |
18 | + | |
19 | + | |
20 | +user_fields = Auth.model('User', { # Model 객체 생성 | |
21 | + 'id': fields.String(description='a User Name', required=True, example="id") | |
22 | +}) | |
23 | + | |
24 | + | |
25 | +user_fields_auth = Auth.inherit('User Auth', user_fields, { | |
26 | + 'password': fields.String(description='Password', required=True) | |
27 | + | |
28 | +}) | |
29 | + | |
30 | +user_fields_register = Auth.inherit('User reigster', user_fields, { | |
31 | + 'password': fields.String(description='Password', required=True),'email': fields.String(description='email', required=True),'user_sex': fields.String(description='sex', required=True),'phone': fields.String(description='phone', required=True) | |
32 | + | |
33 | +}) | |
34 | + | |
35 | + | |
36 | + | |
37 | [email protected]('/id') | |
38 | +class AuthCheck(Resource): | |
39 | + @Auth.doc(responses={200: 'Success'}) | |
40 | + @Auth.doc(responses={500: 'Register Failed'}) | |
41 | + def post(self): | |
42 | + db=DB() | |
43 | + id = request.json['id'] | |
44 | + value=db.db_check_id(id) | |
45 | + if value != None: | |
46 | + return { | |
47 | + "message": "중복 아이디가 있습니다" | |
48 | + }, 500 | |
49 | + else: | |
50 | + return { | |
51 | + 'message': '사용가능한 아이디입니다' # str으로 반환하여 return | |
52 | + }, 200 | |
53 | + | |
54 | + | |
55 | + | |
56 | + | |
57 | [email protected]('/register') | |
58 | +class AuthRegister(Resource): | |
59 | + @Auth.expect(user_fields_register) | |
60 | + @Auth.doc(responses={200: 'Success'}) | |
61 | + @Auth.doc(responses={500: 'Register Failed'}) | |
62 | + def post(self): | |
63 | + db=DB() | |
64 | + id = request.json['id'] | |
65 | + password = request.json['password'] | |
66 | + user_email = request.json['email'] | |
67 | + sex = request.json['user_sex'] | |
68 | + phone = request.json['phone'] | |
69 | + pw_has = hashlib.sha256(password.encode('utf-8')).hexdigest() | |
70 | + value=db.db_login(id,password) | |
71 | + if value != None: | |
72 | + return { | |
73 | + "message": "Register Failed" | |
74 | + }, 500 | |
75 | + else: | |
76 | + db.db_add_id(id,pw_has,user_email,sex,phone) | |
77 | + return { | |
78 | + 'Authorization': id # str으로 반환하여 return | |
79 | + }, 200 | |
80 | + | |
81 | [email protected]('/login') | |
82 | +class AuthLogin(Resource): | |
83 | + @Auth.expect(user_fields_auth) | |
84 | + @Auth.doc(responses={200: 'Success'}) | |
85 | + @Auth.doc(responses={404: 'User Not Found'}) | |
86 | + @Auth.doc(responses={500: 'Auth Failed'}) | |
87 | + def post(self): | |
88 | + db=DB() | |
89 | + id = request.json['id'] | |
90 | + password = request.json['password'] | |
91 | + pw_hash = hashlib.sha256(password.encode('utf-8')).hexdigest() | |
92 | + result = db.db_login(id,pw_hash) | |
93 | + if result is not None: | |
94 | + payload = { | |
95 | + 'id' : id, | |
96 | + 'exp' : datetime.datetime.utcnow() + datetime.timedelta(seconds=70) | |
97 | + } | |
98 | + token = jwt.encode(payload, "secret", algorithm='HS256') | |
99 | + return jsonify({'result': 'success', 'token': token}) | |
100 | + else: | |
101 | + return jsonify({'result': 'fail', 'msg': '아이디/비밀번호가 일치하지 않습니다.'}) | |
102 | + | |
103 | + | |
104 | [email protected]('/secession') | |
105 | +class AuthSecession(Resource): | |
106 | + def post(self): | |
107 | + db=DB() | |
108 | + id = request.json['token'] | |
109 | + payload = jwt.decode(id, "secret", algorithms=['HS256']) | |
110 | + db.db_delete_id(payload['id']) | |
111 | + return {'secession':'success'} | |
112 | + | |
113 | + | |
114 | + |
+++ main_server/database/__pycache__/database.cpython-310.pyc
Binary file is not shown |
+++ main_server/database/__pycache__/database.cpython-311.pyc
Binary file is not shown |
+++ main_server/database/database.py
... | ... | @@ -0,0 +1,93 @@ |
1 | +import psycopg2 # driver 임포트 | |
2 | +import time | |
3 | +from datetime import datetime, timedelta | |
4 | + | |
5 | + | |
6 | +class DB(): | |
7 | + def __init__(self): | |
8 | + self.conn=psycopg2.connect( | |
9 | + host='localhost', | |
10 | + dbname='postgres', | |
11 | + user='postgres', | |
12 | + password='ts4430!@', | |
13 | + port='5432' | |
14 | + ) # db에 접속 | |
15 | + self.conn.autocommit=True | |
16 | + | |
17 | + def db_check_id(self,id): | |
18 | + cur = self.conn.cursor() # 커서를 생성한다 | |
19 | + | |
20 | + cur.execute(f''' | |
21 | + SELECT user_id | |
22 | + FROM rds.user_id | |
23 | + Where user_id = '{id}'; | |
24 | + ''') | |
25 | + result=cur.fetchone() | |
26 | + cur.close() | |
27 | + | |
28 | + return result | |
29 | + | |
30 | + def db_login(self,id,pw): | |
31 | + cur = self.conn.cursor() # 커서를 생성한다 | |
32 | + | |
33 | + cur.execute(f''' | |
34 | + SELECT user_id, user_pw, user_email, user_sex, user_phone, user_time_stamp | |
35 | + FROM rds.user_id | |
36 | + Where user_id = '{id}' and user_pw='{pw}'; | |
37 | + ''') | |
38 | + result=cur.fetchone() | |
39 | + | |
40 | + | |
41 | + cur.close() | |
42 | + | |
43 | + return result | |
44 | + | |
45 | + def db_add_id(self,user_id,user_pw,user_email,user_sex,user_phone) : | |
46 | + cur = self.conn.cursor() # 커서를 생성한다 | |
47 | + now=time.localtime() | |
48 | + d=time.strftime('%Y-%m-%d %X', now) | |
49 | + cur.execute(f''' | |
50 | + insert into rds.user_id (user_id,user_pw,user_email,user_sex,user_phone,user_time_stamp) | |
51 | + values ('{user_id}','{user_pw}','{user_email}','{user_sex}','{user_phone}','{d}') | |
52 | + ''') | |
53 | + cur.close() | |
54 | + | |
55 | + def db_delete_id(self,user_id) : | |
56 | + cur = self.conn.cursor() # 커서를 생성한다 | |
57 | + cur.execute(f''' | |
58 | + delete | |
59 | + from rds.user_id ui | |
60 | + where user_id = '{user_id}' | |
61 | + ''') | |
62 | + cur.close() | |
63 | + | |
64 | + | |
65 | + | |
66 | + def db_add_action(self,action_id,lat,lon,user_id,action_success) : | |
67 | + cur = self.conn.cursor() # 커서를 생성한다 | |
68 | + now=datetime.now() | |
69 | + d=now.strftime('%Y-%m-%d %X') | |
70 | + cur.execute(f''' | |
71 | + insert into rds.action (action_id,lat,lon,action_time_stamp,user_id,action_success) | |
72 | + values ('{action_id}','{lat}','{lon}','{d}','{user_id}','{action_success}') | |
73 | + ''') | |
74 | + | |
75 | + | |
76 | + def db_display_action(self,timestamp) : | |
77 | + cur = self.conn.cursor() # 커서를 생성한다 | |
78 | + now=timestamp | |
79 | + d_plus=now +timedelta(hours=2) | |
80 | + d_plus=str("'"+d_plus.strftime('%Y-%m-%d %X')+"'") | |
81 | + d_minus=now -timedelta(hours=2) | |
82 | + d_minus=str("'"+d_minus.strftime('%Y-%m-%d %X')+"'") | |
83 | + cur.execute(f''' | |
84 | + select * from rds.pothole | |
85 | + where timestamp between {d_minus} and {d_plus}; | |
86 | + ''') | |
87 | + result=cur.fetchall() | |
88 | + return result | |
89 | + | |
90 | + | |
91 | + | |
92 | + | |
93 | + (파일 끝에 줄바꿈 문자 없음) |
+++ main_server/fog_1.jpg
Binary file is not shown |
+++ main_server/mrousavy1404057804125804675.jpg
Binary file is not shown |
+++ main_server/mrousavy1609405171016471384.jpg
Binary file is not shown |
+++ main_server/mrousavy6478677259507416977.jpg
Binary file is not shown |
+++ main_server/mrousavy9178771130024154858.jpg
Binary file is not shown |
+++ main_server/requirements.txt
... | ... | @@ -0,0 +1,113 @@ |
1 | +absl-py==1.3.0 | |
2 | +aniso8601==9.0.1 | |
3 | +asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1660605382950/work | |
4 | +astunparse==1.6.3 | |
5 | +attrs==22.1.0 | |
6 | +backcall @ file:///home/conda/feedstock_root/build_artifacts/backcall_1592338393461/work | |
7 | +backports.functools-lru-cache @ file:///home/conda/feedstock_root/build_artifacts/backports.functools_lru_cache_1618230623929/work | |
8 | +cachetools==5.2.0 | |
9 | +certifi==2022.9.24 | |
10 | +charset-normalizer==2.1.1 | |
11 | +click==8.1.3 | |
12 | +colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1655412516417/work | |
13 | +contourpy==1.0.6 | |
14 | +cycler==0.11.0 | |
15 | +debugpy @ file:///D:/bld/debugpy_1660619096890/work | |
16 | +decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1641555617451/work | |
17 | +dgl==0.9.1 | |
18 | +entrypoints @ file:///home/conda/feedstock_root/build_artifacts/entrypoints_1643888246732/work | |
19 | +executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1664126393503/work | |
20 | +Flask==2.1.3 | |
21 | +flask-restx==0.5.1 | |
22 | +flatbuffers==22.9.24 | |
23 | +fonttools==4.38.0 | |
24 | +gast==0.4.0 | |
25 | +geojson==2.5.0 | |
26 | +google-auth==2.13.0 | |
27 | +google-auth-oauthlib==0.4.6 | |
28 | +google-pasta==0.2.0 | |
29 | +grpcio==1.50.0 | |
30 | +h5py==3.7.0 | |
31 | +haversine==2.7.0 | |
32 | +idna==3.4 | |
33 | +imageio==2.22.2 | |
34 | +ipykernel @ file:///D:/bld/ipykernel_1664214869204/work | |
35 | +ipython @ file:///D:/bld/ipython_1662481701382/work | |
36 | +itsdangerous==2.1.2 | |
37 | +jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1659959867326/work | |
38 | +Jinja2==3.1.2 | |
39 | +joblib==1.2.0 | |
40 | +jsonschema==4.16.0 | |
41 | +jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1661522530937/work | |
42 | +jupyter_core @ file:///D:/bld/jupyter_core_1658332495289/work | |
43 | +keras==2.10.0 | |
44 | +Keras-Preprocessing==1.1.2 | |
45 | +kiwisolver==1.4.4 | |
46 | +libclang==14.0.6 | |
47 | +Markdown==3.4.1 | |
48 | +MarkupSafe==2.1.1 | |
49 | +matplotlib==3.6.0 | |
50 | +matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1660814786464/work | |
51 | +nest-asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1664684991461/work | |
52 | +networkx==2.8.7 | |
53 | +numpy==1.23.3 | |
54 | +oauthlib==3.2.2 | |
55 | +opencv-python==4.6.0.66 | |
56 | +opt-einsum==3.3.0 | |
57 | +packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1637239678211/work | |
58 | +pandas==1.5.0 | |
59 | +parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1638334955874/work | |
60 | +pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1602536217715/work | |
61 | +Pillow==9.2.0 | |
62 | +platformdirs==2.5.2 | |
63 | +prompt-toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1662384672173/work | |
64 | +protobuf==3.19.6 | |
65 | +psutil @ file:///C:/Windows/Temp/abs_b2c2fd7f-9fd5-4756-95ea-8aed74d0039flsd9qufz/croots/recipe/psutil_1656431277748/work | |
66 | +psycopg2==2.9.4 | |
67 | +pure-eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1642875951954/work | |
68 | +pyasn1==0.4.8 | |
69 | +pyasn1-modules==0.2.8 | |
70 | +Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1660666458521/work | |
71 | +PyJWT==2.6.0 | |
72 | +pyopencl==2022.2.4 | |
73 | +pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1652235407899/work | |
74 | +pyrsistent==0.18.1 | |
75 | +pyshp==2.3.1 | |
76 | +python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1626286286081/work | |
77 | +pytools==2022.1.12 | |
78 | +pytz==2022.4 | |
79 | +PyWavelets==1.4.1 | |
80 | +pywin32==303 | |
81 | +PyYAML==6.0 | |
82 | +pyzmq @ file:///C:/ci/pyzmq_1657616000714/work | |
83 | +requests==2.28.1 | |
84 | +requests-oauthlib==1.3.1 | |
85 | +rsa==4.9 | |
86 | +scikit-image==0.19.3 | |
87 | +scikit-learn==1.1.2 | |
88 | +scipy==1.9.2 | |
89 | +seaborn==0.12.1 | |
90 | +six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work | |
91 | +sklearn==0.0 | |
92 | +stack-data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1664126450622/work | |
93 | +tensorboard==2.10.1 | |
94 | +tensorboard-data-server==0.6.1 | |
95 | +tensorboard-plugin-wit==1.8.1 | |
96 | +tensorflow==2.10.0 | |
97 | +tensorflow-estimator==2.10.0 | |
98 | +tensorflow-io-gcs-filesystem==0.27.0 | |
99 | +termcolor==2.0.1 | |
100 | +threadpoolctl==3.1.0 | |
101 | +tifffile==2022.10.10 | |
102 | +torch==1.12.1+cu116 | |
103 | +torchaudio==0.12.1+cu116 | |
104 | +torchvision==0.13.1+cu116 | |
105 | +tornado @ file:///D:/bld/tornado_1656937966227/work | |
106 | +tqdm==4.64.1 | |
107 | +traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1663005918942/work | |
108 | +typing_extensions==4.4.0 | |
109 | +urllib3==1.26.12 | |
110 | +wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1600965781394/work | |
111 | +Werkzeug==2.1.2 | |
112 | +wincertstore==0.2 | |
113 | +wrapt==1.14.1 |
+++ main_server/subfuction/generate.py
... | ... | @@ -0,0 +1,57 @@ |
1 | +from haversine import haversine | |
2 | +import networkx as nx | |
3 | +import geojson | |
4 | + | |
5 | + | |
6 | + | |
7 | +with open("D:/takensoft/project2/data/기타 가공/데이터/osm.geojson",encoding='utf-8') as f: | |
8 | + gj = geojson.load(f) | |
9 | + | |
10 | + | |
11 | +def swith_xy(tuples): | |
12 | + x,y=tuples | |
13 | + return (y,x) | |
14 | + | |
15 | +G = nx.Graph () | |
16 | + | |
17 | +total_data_num= gj['features'] | |
18 | +for j in range(len(total_data_num)): | |
19 | + features = gj['features'][j] | |
20 | + lines=features['geometry']['coordinates'][0] | |
21 | + print(j) | |
22 | + | |
23 | + for i in range(len(lines)-1): | |
24 | + G.add_edge(swith_xy(lines[i]),swith_xy(lines[i+1]),flcass=features['properties']['fclass'],oneway=features['properties']['oneway']) | |
25 | + | |
26 | +sg = (G.subgraph(c) for c in nx.connected_components(G)) #가져올 수 없는 패키지가 있는 경우 | |
27 | +sg = list(sg)[0] | |
28 | + | |
29 | +for n0, n1 in G.edges (): | |
30 | + dist = haversine(n0, n1,unit='m') | |
31 | + G.edges [n0,n1][" dist "] = dist | |
32 | + | |
33 | +df=nx.to_pandas_edgelist(G) | |
34 | + | |
35 | +li_source=list(df['source']) | |
36 | +li_source_x= [] | |
37 | +li_source_y=[] | |
38 | + | |
39 | +for i in li_source: | |
40 | + li_source_x.append(str(i[0])) | |
41 | + li_source_y.append(str(i[1])) | |
42 | +df['source_x']=li_source_x | |
43 | +df['source_y']=li_source_y | |
44 | + | |
45 | +li_target=list(df['target']) | |
46 | +li_target_x= [] | |
47 | +li_target_y=[] | |
48 | + | |
49 | +for i in li_target: | |
50 | + li_target_x.append(str(i[0])) | |
51 | + li_target_y.append(str(i[1])) | |
52 | +df['target_x']=li_target_x | |
53 | +df['target_y']=li_target_y | |
54 | +df=df.drop(['source','target'],axis=1) | |
55 | +df=df.reset_index() | |
56 | +df.to_csv('node.csv',encoding='euc-kr') | |
57 | + |
+++ main_server/subfuction/save_pickle.py
... | ... | @@ -0,0 +1,35 @@ |
1 | +import torch | |
2 | +import numpy as np | |
3 | +import networkx as nx | |
4 | +from database.database import DB | |
5 | +import pandas as pd | |
6 | + | |
7 | +import networkx as nx | |
8 | +import math | |
9 | +from itertools import tee | |
10 | +import shapefile | |
11 | +import os | |
12 | + | |
13 | +def pairwise( iterable ): | |
14 | + """Returns an iterable access binary tuple | |
15 | + s -> (s0,s1), (s1,s2), (s2, s3), ...""" | |
16 | + a, b = tee( iterable ) | |
17 | + next(b, None) | |
18 | + return zip(a, b) | |
19 | + | |
20 | +def swith_xy(tuples): | |
21 | + x,y=tuples | |
22 | + return (y,x) | |
23 | + | |
24 | + | |
25 | + | |
26 | + | |
27 | + | |
28 | +db=DB() | |
29 | +df=pd.DataFrame(db.db_get_node()) | |
30 | +df.columns=['index','source_x','source_y','target_x','target_y','distance'] | |
31 | +G=nx.Graph() | |
32 | +for j in range(len(df)): | |
33 | + G.add_edge((df['source_x'][j],df['source_y'][j]),(df['target_x'][j],df['target_y'][j]),length=df['distance'][j]) | |
34 | +nx.write_gpickle(G,'OSM_gpickle.gpickle') | |
35 | + |
Add a comment
Delete comment
Once you delete this comment, you won't be able to recover it. Are you sure you want to delete this comment?