2
2
Module to define methods that fetch data to store in the oss metric
3
3
entity objects.
4
4
"""
5
+ import os
5
6
import json
6
7
from metricsLib .metrics_definitions import SIMPLE_METRICS , ORG_METRICS , ADVANCED_METRICS
7
8
from metricsLib .metrics_definitions import PERIODIC_METRICS , RESOURCE_METRICS
9
+ from metricsLib .oss_metric_entities import GithubOrg , Repository
10
+ from metricsLib .constants import PATH_TO_METADATA
8
11
12
+ def parse_tracked_repos_file ():
13
+ """
14
+ Function to parse projects_tracked.json
15
+
16
+ Returns:
17
+ Tuple of lists of strings that represent repos and orgs
18
+ """
19
+
20
+ # TODO: Create a read repos-to-include.txt
21
+ metadata_path = os .path .join (PATH_TO_METADATA , "projects_tracked.json" )
22
+ with open (metadata_path , "r" , encoding = "utf-8" ) as file :
23
+ tracking_file = json .load (file )
24
+
25
+ # Track specific repositories e.g. ['dsacms.github.io']
26
+ repo_urls = tracking_file ["Open Source Projects" ]
27
+
28
+ # Get two lists of objects that will hold all the new metrics
29
+ return tracking_file ["orgs" ], repo_urls
30
+
31
+ def parse_repos_and_orgs_into_objects (org_name_list , repo_name_list ):
32
+ """
33
+ This function parses lists of strings into oss metric entities and
34
+ returns lists of corresponding oss metric entitiy objects.
35
+
36
+ Arguments:
37
+ org_name_list: list of logins for github orgs
38
+ repo_name_list: list of urls for git repositories with groups labeled
39
+
40
+ Returns:
41
+ Tuple of lists of oss metric entity objects
42
+ """
43
+ orgs = [GithubOrg (org ) for org in org_name_list ]
44
+
45
+ repos = [] # [Repository(repo_url) for repo_url in repo_name_list]
46
+
47
+ for owner , urls in repo_name_list .items ():
48
+ print (owner )
49
+ # search for matching org
50
+ org_id = next (
51
+ (x .repo_group_id for x in orgs if x .login .lower () == owner .lower ()), None )
52
+
53
+ # print(f"!!{org_id}")
54
+ for repo_url in urls :
55
+ repos .append (Repository (repo_url , org_id ))
56
+ return orgs , repos
9
57
10
58
def get_all_data (all_orgs , all_repos ):
11
59
"""
@@ -95,8 +143,56 @@ def fetch_all_new_metric_data(all_orgs, all_repos):
95
143
print (f"Fetching metrics for org { org .name } id #{ org .repo_group_id } " )
96
144
for metric in ORG_METRICS :
97
145
org .apply_metric_and_store_data (metric )
146
+ print (metric .name )
98
147
add_info_to_org_from_list_of_repos (all_repos , org )
99
148
149
+ def read_current_metric_data (repos ,orgs ):
150
+ """
151
+ Read current metrics and load previous metrics that
152
+ were saved in .old files.
153
+
154
+ Arguments:
155
+ orgs: orgs to read data for.
156
+ repos: repos to read data for.
157
+ """
158
+
159
+ for org in orgs :
160
+
161
+ path = org .get_path_to_json_data ()
162
+ #generate dict of previous and save it as {path}.old
163
+ #previous_metric_org_json = json.dumps(org.previous_metric_data, indent=4)
164
+
165
+ with open (f"{ path } .old" ,"r" ,encoding = "utf-8" ) as file :
166
+ previous_metric_org_json = json .load (file )
167
+
168
+ #generate dict of current metric data.
169
+ org .previous_metric_data .update (previous_metric_org_json )
170
+
171
+
172
+ with open (path , "r" , encoding = "utf-8" ) as file :
173
+ #file.write(org_metric_data)
174
+ print (path )
175
+ current_metric_org_json = json .load (file )
176
+
177
+ org .metric_data .update (current_metric_org_json )
178
+
179
+ for repo in repos :
180
+ #previous_metric_repo_json = json.dumps(repo.previous_metric_data, indent=4)
181
+ path = repo .get_path_to_json_data ()
182
+
183
+ with open (f"{ path } .old" ,"r" ,encoding = "utf-8" ) as file :
184
+ #file.write(previous_metric_repo_json)
185
+ previous_metric_repo_json = json .load (file )
186
+
187
+ repo .previous_metric_data .update (previous_metric_repo_json )
188
+
189
+
190
+ with open (path , "r" , encoding = "utf-8" ) as file :
191
+ #file.write(repo_metric_data)
192
+ metric_repo_json = json .load (file )
193
+
194
+ repo .metric_data .update (metric_repo_json )
195
+
100
196
101
197
def read_previous_metric_data (repos , orgs ):
102
198
"""
@@ -116,7 +212,8 @@ def read_previous_metric_data(repos, orgs):
116
212
org .previous_metric_data .update (prev_data )
117
213
except FileNotFoundError :
118
214
print ("Could not find previous data for records for org" +
119
- f"{ org .login } " )
215
+ f"{ org .login } " )
216
+
120
217
121
218
for repo in repos :
122
219
try :
@@ -131,20 +228,45 @@ def read_previous_metric_data(repos, orgs):
131
228
def write_metric_data_json_to_file (orgs , repos ):
132
229
"""
133
230
Write all metric data to json files.
231
+
232
+ Keep old metrics as a .old file.
134
233
135
234
Arguments:
136
235
orgs: orgs to write to file
137
236
repos: repos to write to file
138
237
"""
139
238
140
239
for org in orgs :
141
- org_metric_data = json .dumps (org .metric_data , indent = 4 )
142
240
143
- with open (org .get_path_to_json_data (), "w+" , encoding = "utf-8" ) as file :
241
+ path = org .get_path_to_json_data ()
242
+ #generate dict of previous and save it as {path}.old
243
+ previous_metric_org_json = json .dumps (org .previous_metric_data , indent = 4 )
244
+
245
+ with open (f"{ path } .old" ,"w+" ,encoding = "utf-8" ) as file :
246
+ file .write (previous_metric_org_json )
247
+
248
+ #generate dict of current metric data.
249
+ org_dict = org .previous_metric_data
250
+ org_dict .update (org .metric_data )
251
+ org_metric_data = json .dumps (org_dict , indent = 4 )
252
+
253
+ #print(org_metric_data)
254
+
255
+ with open (path , "w+" , encoding = "utf-8" ) as file :
144
256
file .write (org_metric_data )
145
257
146
258
for repo in repos :
147
- repo_metric_data = json .dumps (repo .metric_data , indent = 4 )
259
+ path = repo .get_path_to_json_data ()
260
+
261
+ previous_metric_repo_json = json .dumps (repo .previous_metric_data , indent = 4 )
262
+
263
+ with open (f"{ path } .old" ,"w+" ,encoding = "utf-8" ) as file :
264
+ file .write (previous_metric_repo_json )
265
+
266
+ repo_dict = repo .previous_metric_data
267
+ repo_dict .update (repo .metric_data )
268
+ repo_metric_data = json .dumps (repo_dict , indent = 4 )
269
+
148
270
149
- with open (repo . get_path_to_json_data () , "w+" , encoding = "utf-8" ) as file :
271
+ with open (path , "w+" , encoding = "utf-8" ) as file :
150
272
file .write (repo_metric_data )
0 commit comments