nastasiasnk commited on
Commit
eda0b1c
1 Parent(s): e8a1fdb

Create imports_utils

Browse files
Files changed (1) hide show
  1. imports_utils +250 -0
imports_utils ADDED
@@ -0,0 +1,250 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ !pip install requests
2
+ !pip install specklepy
3
+
4
+ import sys
5
+
6
+ # delete (if it already exists) , clone repro
7
+ !rm -rf RECODE_speckle_utils
8
+ !git clone https://github.com/SerjoschDuering/RECODE_speckle_utils
9
+ sys.path.append('/content/RECODE_speckle_utils')
10
+
11
+ # import from repro
12
+ import speckle_utils
13
+ import data_utils
14
+
15
+ #import other libaries
16
+ from specklepy.api.client import SpeckleClient
17
+ from specklepy.api.credentials import get_default_account, get_local_accounts
18
+ from specklepy.transports.server import ServerTransport
19
+ from specklepy.api import operations
20
+ from specklepy.objects.geometry import Polyline, Point
21
+ from specklepy.objects import Base
22
+
23
+
24
+ import numpy as np
25
+ import pandas as pd
26
+ import matplotlib.pyplot as plt
27
+ import seaborn as sns
28
+ import math
29
+ import matplotlib
30
+ from google.colab import files
31
+
32
+ import json
33
+
34
+ !pip install notion-client
35
+
36
+ from notion_client import Client as client_notion
37
+
38
+
39
+
40
+
41
+ # query full database
42
+ def fetch_all_database_pages(client, database_id):
43
+ """
44
+ Fetches all pages from a specified Notion database.
45
+
46
+ :param client: Initialized Notion client.
47
+ :param database_id: The ID of the Notion database to query.
48
+ :return: A list containing all pages from the database.
49
+ """
50
+ start_cursor = None
51
+ all_pages = []
52
+
53
+ while True:
54
+ response = client.databases.query(
55
+ **{
56
+ "database_id": database_id,
57
+ "start_cursor": start_cursor
58
+ }
59
+ )
60
+
61
+ all_pages.extend(response['results'])
62
+
63
+ # Check if there's more data to fetch
64
+ if response['has_more']:
65
+ start_cursor = response['next_cursor']
66
+ else:
67
+ break
68
+
69
+ return all_pages
70
+
71
+
72
+
73
+ def get_property_value(page, property_name):
74
+ """
75
+ Extracts the value from a specific property in a Notion page based on its type.
76
+ :param page: The Notion page data as retrieved from the API.
77
+ :param property_name: The name of the property whose value is to be fetched.
78
+ :return: The value or values contained in the specified property, depending on type.
79
+ """
80
+ # Check if the property exists in the page
81
+ if property_name not in page['properties']:
82
+ return None # or raise an error if you prefer
83
+
84
+ property_data = page['properties'][property_name]
85
+ prop_type = property_data['type']
86
+
87
+ # Handle 'title' and 'rich_text' types
88
+ if prop_type in ['title', 'rich_text']:
89
+ return ''.join(text_block['text']['content'] for text_block in property_data[prop_type])
90
+
91
+ # Handle 'number' type
92
+ elif prop_type == 'number':
93
+ return property_data[prop_type]
94
+
95
+ # Handle 'select' type
96
+ elif prop_type == 'select':
97
+ return property_data[prop_type]['name'] if property_data[prop_type] else None
98
+
99
+ # Handle 'multi_select' type
100
+ elif prop_type == 'multi_select':
101
+ return [option['name'] for option in property_data[prop_type]]
102
+
103
+ # Handle 'date' type
104
+ elif prop_type == 'date':
105
+ if property_data[prop_type]['end']:
106
+ return (property_data[prop_type]['start'], property_data[prop_type]['end'])
107
+ else:
108
+ return property_data[prop_type]['start']
109
+
110
+ # Handle 'relation' type
111
+ elif prop_type == 'relation':
112
+ return [relation['id'] for relation in property_data[prop_type]]
113
+
114
+ # Handle 'people' type
115
+ elif prop_type == 'people':
116
+ return [person['name'] for person in property_data[prop_type] if 'name' in person]
117
+
118
+ # Add more handlers as needed for other property types
119
+
120
+ else:
121
+ # Return None or raise an error for unsupported property types
122
+ return None
123
+
124
+
125
+
126
+ def get_page_by_id(notion_db_pages, page_id):
127
+ for pg in notion_db_pages:
128
+ if pg["id"] == page_id:
129
+ return pg
130
+
131
+
132
+
133
+
134
+
135
+
136
+ CLIENT = SpeckleClient(host="https://speckle.xyz/")
137
+ CLIENT.authenticate_with_token(token=userdata.get('speckle_token'))
138
+
139
+
140
+ notion = client_notion(auth=userdata.get('notion_token'))
141
+
142
+
143
+ stream_id="ebcfc50abe"
144
+
145
+
146
+ # MAIN DISTANCE MATRIX
147
+ branch_name_dm = "graph_geometry/distance_matrix"
148
+ commit_id_dm = "cfde6f4ba4" # ebcfc50abe/commits/cfde6f4ba4
149
+ dm_activityNodes = "activity_node+distance_matrix_ped_mm_noEntr"
150
+ dm_transportStops = "an_stations+distance_matrix_ped_mm_art_noEntr"
151
+
152
+ # LAND USE ATTRIBUTES
153
+ branch_name_lu = "graph_geometry/activity_nodes_with_land_use"
154
+ commit_id_lu = "13ae6cdd30"
155
+
156
+
157
+ # LIVABILITY DOMAINS ATTRIBUTES
158
+ notion_lu_domains = "407c2fce664f4dde8940bb416780a86d"
159
+ notion_domain_attributes = "01401b78420f4296a2449f587d4ed9c9"
160
+
161
+
162
+
163
+ lu_attributes = fetch_all_database_pages(notion, notion_lu_domains)
164
+
165
+
166
+
167
+ domain_attributes = fetch_all_database_pages(notion, notion_domain_attributes)
168
+
169
+
170
+
171
+ lu_domain_mapper ={}
172
+
173
+ subdomains_unique = []
174
+
175
+ for page in lu_attributes:
176
+ value_landuse = get_property_value(page, "LANDUSE")
177
+ value_subdomain = get_property_value(page, "SUBDOMAIN_LIVEABILITY")
178
+ if value_subdomain and value_landuse:
179
+ lu_domain_mapper[value_landuse] = value_subdomain
180
+ if value_subdomain != "":
181
+ subdomains_unique.append(value_subdomain)
182
+
183
+ subdomains_unique = list(set(subdomains_unique))
184
+
185
+
186
+
187
+
188
+
189
+ attribute_mapper ={}
190
+
191
+ domains_unique = []
192
+
193
+ for page in domain_attributes:
194
+ subdomain = get_property_value(page, "SUBDOMAIN_UNIQUE")
195
+ sqm_per_employee = get_property_value(page, "SQM PER EMPL")
196
+ thresholds = get_property_value(page, "MANHATTAN THRESHOLD")
197
+ max_points = get_property_value(page, "LIVABILITY MAX POINT")
198
+ domain = get_property_value(page, "DOMAIN")
199
+ if thresholds: #domain !="Transportation" and
200
+ attribute_mapper[subdomain] = {
201
+ 'sqmPerEmpl': [sqm_per_employee if sqm_per_employee != "" else 0],
202
+ 'thresholds': thresholds,
203
+ 'max_points': max_points,
204
+ 'domain': [domain if domain != "" else 0]
205
+ }
206
+
207
+ if domain != "":
208
+ domains_unique.append(domain)
209
+
210
+ domains_unique = list(set(domains_unique))
211
+
212
+
213
+ attribute_mapper[subdomain] = [sqm_per_employee if sqm_per_employee != "" else 0, thresholds,max_points,domain if domain != "" else 0 ]
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+ stream_distance_matrice = speckle_utils.getSpeckleStream(stream_id,
222
+ branch_name_dm,
223
+ CLIENT,
224
+ commit_id = commit_id_dm)
225
+
226
+
227
+
228
+
229
+ # navigate to list with speckle objects of interest
230
+ distance_matrices = {}
231
+ for distM in stream_distance_matrice["@Data"]['@{0}']:
232
+ for kk in distM.__dict__.keys():
233
+ try:
234
+ if kk.split("+")[1].startswith("distance_matrix"):
235
+ distance_matrix_dict = json.loads(distM[kk])
236
+ origin_ids = distance_matrix_dict["origin_uuid"]
237
+ destination_ids = distance_matrix_dict["destination_uuid"]
238
+ distance_matrix = distance_matrix_dict["matrix"]
239
+ # Convert the distance matrix to a DataFrame
240
+ df_distances = pd.DataFrame(distance_matrix, index=origin_ids, columns=destination_ids)
241
+
242
+ # i want to add the index & colum names to dist_m_csv
243
+ #distance_matrices[kk] = dist_m_csv[kk]
244
+ distance_matrices[kk] = df_distances
245
+ except:
246
+ pass
247
+
248
+
249
+ df_dm_transport = distance_matrices[dm_transportStops]
250
+