dongqing.li | 93ffff3 | 2022-07-25 10:38:31 +0800 | [diff] [blame] | 1 | #!/usr/bin/python3 |
| 2 | # -*- coding:UTF-8 -*- |
| 3 | |
| 4 | # ===================================================================== |
| 5 | # @module: Get all blx commits and save to csv file |
| 6 | # @author: Li Dongqing (dongqing.li@amlogic.com) |
| 7 | # @License: Copyright (c) 2019 Amlogic, Inc. All rights reserved. |
| 8 | # @Changes: |
| 9 | # 1. 2022.07.20 v0.1 create for stable branch update. |
| 10 | # ===================================================================== |
| 11 | |
dongqing.li | 23bd26f | 2022-12-05 20:30:43 +0800 | [diff] [blame] | 12 | # Import modules |
dongqing.li | 93ffff3 | 2022-07-25 10:38:31 +0800 | [diff] [blame] | 13 | try: |
| 14 | import sys |
| 15 | import os |
| 16 | import re |
| 17 | import json |
| 18 | import time |
| 19 | import argparse |
| 20 | import subprocess |
| 21 | import IPython |
| 22 | import openpyxl |
| 23 | from git.repo import Repo |
| 24 | from openpyxl.styles import Font, Alignment |
| 25 | from collections import OrderedDict |
| 26 | except Exception as e: |
| 27 | print(e) |
dongqing.li | 23bd26f | 2022-12-05 20:30:43 +0800 | [diff] [blame] | 28 | exit('Please install modules, eg: pip3 install os') |
dongqing.li | 93ffff3 | 2022-07-25 10:38:31 +0800 | [diff] [blame] | 29 | |
| 30 | # jason example: bl3.4.5-20220711-pre-ver.json |
| 31 | # { |
| 32 | # "source_gits" : [ |
| 33 | # {"blType" : "bl2_sc2", "gitPath" : "bl2/core_sc2", "lastCommit" : "feebe5301418c038a06d45d0216c780ae9ea0033"}, |
| 34 | # {"blType" : "bl2_s4", "gitPath" : "bl2/core_s4", "lastCommit" : "817779a738e99b81081a31035ed784840cace44c"}, |
| 35 | # {"blType" : "bl2_ree", "gitPath" : "bl2/ree", "lastCommit" : "09d5c246638e95e2598a264b36da7f6ede7f6ea8"}, |
| 36 | # {"blType" : "bl2_tee", "gitPath" : "bl2/tee", "lastCommit" : "115e9fc38721c3564435f582875fc02908787b53"}, |
| 37 | # {"blType" : "bl30_aocpu", "gitPath" : "bl30/src_ao", "lastCommit" : "8c0b17692bd51f9c3311f8c51cd28bdf808a27a3"}, |
| 38 | # {"blType" : "bl31_1.3", "gitPath" : "bl31_1.3/src", "lastCommit" : "cf6108ce548d7ad3bfe268dedf801358664b6ead"}, |
| 39 | # {"blType" : "bl32_3.8", "gitPath" : "bl32_3.8/src", "lastCommit" : "c5ef42f2ce59304e2a4df7cf2dcbb12ab7ccefd1"}, |
| 40 | # {"blType" : "bl33", "gitPath" : "bl33/v2019", "lastCommit" : "f03ed9bc121114e9f31f1ee924d3adc176f13faa"}, |
| 41 | # {"blType" : "fip", "gitPath" : "fip", "lastCommit" : "d60ea7c9adfe8e537d0a11d2c2ce8e8097de5035"} |
| 42 | # ] |
| 43 | # } |
| 44 | |
| 45 | # bootloader trunk branch and remote info |
| 46 | pdPrefix = "https://jira.amlogic.com/browse/" |
| 47 | scgitPrefix = "https://scgit.amlogic.com/plugins/gitiles/" |
| 48 | blSrcGits = [ |
| 49 | {"blType" : "bl2_sc2", "gitBranch" : "projects/sc2", "gitRemote" : "firmware", "upStream" : "bootloader/amlogic-advanced-bootloader/core/+/"}, |
| 50 | {"blType" : "bl2_s4", "gitBranch" : "projects/s4", "gitRemote" : "firmware", "upStream" : "bootloader/amlogic-advanced-bootloader/core/+/"}, |
| 51 | {"blType" : "bl2_ree", "gitBranch" : "projects/amlogic-dev", "gitRemote" : "firmware", "upStream" : "bootloader/amlogic-advanced-bootloader/ree/+/"}, |
| 52 | {"blType" : "bl2_tee", "gitBranch" : "projects/amlogic-dev", "gitRemote" : "firmware", "upStream" : "bootloader/amlogic-advanced-bootloader/tee/+/"}, |
| 53 | {"blType" : "bl30_aocpu", "gitBranch" : "projects/amlogic-dev", "gitRemote" : "firmware", "upStream" : "firmware/aocpu/+/"}, |
| 54 | {"blType" : "bl31_1.3", "gitBranch" : "amlogic-dev-1.3", "gitRemote" : "firmware", "upStream" : "ARM-software/arm-trusted-firmware/+/"}, |
| 55 | {"blType" : "bl32_3.8", "gitBranch" : "amlogic-dev-3.8.0", "gitRemote" : "firmware", "upStream" : "OP-TEE/optee_os/+/"}, |
| 56 | {"blType" : "bl33", "gitBranch" : "amlogic-dev-2019", "gitRemote" : "uboot", "upStream" : "uboot/+/"}, |
| 57 | {"blType" : "fip", "gitBranch" : "amlogic-dev", "gitRemote" : "fip", "upStream" : "amlogic/tools/fip/+/"} |
| 58 | ] |
| 59 | |
| 60 | # the local csv file columns |
| 61 | csv_file_column = [ |
| 62 | {"ID" : "A", "WIDTH" : 12, "NAME" : "Index"}, |
| 63 | {"ID" : "B", "WIDTH" : 45, "NAME" : "Trunk Commit"}, |
| 64 | {"ID" : "C", "WIDTH" : 30, "NAME" : "Trunk Cl Link"}, |
| 65 | {"ID" : "D", "WIDTH" : 20, "NAME" : "Is Force Patch?"}, |
| 66 | {"ID" : "E", "WIDTH" : 20, "NAME" : "Is Secure Patch?"}, |
| 67 | {"ID" : "F", "WIDTH" : 20, "NAME" : "Reviewer"}, |
| 68 | {"ID" : "G", "WIDTH" : 16, "NAME" : "Related to other CL ?"}, |
| 69 | {"ID" : "H", "WIDTH" : 20, "NAME" : "QA Test Cases"}, |
| 70 | {"ID" : "I", "WIDTH" : 16, "NAME" : "QA Verify Result"}, |
| 71 | {"ID" : "J", "WIDTH" : 20, "NAME" : "New CL"} |
| 72 | ] |
| 73 | |
| 74 | # git update branch |
| 75 | def git_src_update(gitPath, gitRemote, gitBranch): |
| 76 | local_path = os.path.join(gitPath) |
| 77 | repo = Repo(local_path) |
| 78 | |
| 79 | try: |
| 80 | repo.git.branch('-D', 'test') |
| 81 | except: |
| 82 | pass |
| 83 | |
| 84 | try: |
| 85 | repo.git.checkout('-b', 'test') |
| 86 | except: |
| 87 | try: |
| 88 | repo.git.checkout('test') |
| 89 | except: |
| 90 | pass |
| 91 | pass |
| 92 | |
| 93 | try: |
| 94 | repo.git.branch('-D', gitBranch) |
| 95 | except: |
| 96 | pass |
| 97 | |
| 98 | try: |
| 99 | #repo.git.clean('-d', '-fx') |
| 100 | repo.git.checkout('-t', 'remotes/' + gitRemote + '/' + gitBranch) |
| 101 | except: |
| 102 | try: |
| 103 | repo.git.checkout(gitBranch) |
| 104 | except: |
| 105 | exit('Error: check out branch (%s / %s) failed!'%(gitRemote,gitBranch)) |
| 106 | pass |
| 107 | |
| 108 | try: |
| 109 | repo.git.branch('-D', 'test') |
| 110 | except: |
| 111 | pass |
| 112 | |
| 113 | try: |
| 114 | repo.git.fetch('--all') |
| 115 | except: |
| 116 | pass |
| 117 | |
| 118 | try: |
| 119 | repo.git.reset('--hard', gitRemote + '/' + gitBranch) |
| 120 | except: |
| 121 | exit('Error: git reset branch (%s / %s) failed!'%(gitRemote,gitBranch)) |
| 122 | |
| 123 | try: |
| 124 | #repo.git.pull(gitRemote, gitBranch) |
| 125 | repo.git.pull() |
| 126 | except: |
| 127 | exit('Error: git pull branch (%s) failed!'%gitBranch) |
| 128 | |
| 129 | # git update branch |
| 130 | def get_bltype_branch_id(bltype, list): |
| 131 | for i in range(len(list)): |
| 132 | stream_dic = {"blType":None, "gitBranch":None, "gitRemote":None, "upStream":None} |
| 133 | stream_dic = list[i] |
| 134 | |
| 135 | if str(bltype) == str(stream_dic['blType']): |
| 136 | print(' > Match the local bltype ID = ', i+1) |
| 137 | return i |
| 138 | |
| 139 | else: |
| 140 | return -1 |
| 141 | |
| 142 | # Prase json array to get commits info |
| 143 | def git_commits_to_src_link(): |
| 144 | print('\n[TRUNK LIST]:') |
| 145 | |
| 146 | # prase each repo |
| 147 | for i in range(len(trunk_list)): |
| 148 | print(' > [%d] gitPath: %-12s lastCommit: %s'%(i+1, trunk_list[i]['gitPath'], trunk_list[i]['lastCommit'])) |
| 149 | |
dongqing.li | a8d42d0 | 2023-02-22 17:04:33 +0800 | [diff] [blame^] | 150 | if len(trunk_list[i]['lastCommit']) == 0: |
| 151 | print(' > lastCommit is NULL !') |
| 152 | continue |
| 153 | |
dongqing.li | 93ffff3 | 2022-07-25 10:38:31 +0800 | [diff] [blame] | 154 | try: |
| 155 | os.chdir(topdir + trunk_list[i]['gitPath']) |
| 156 | except: |
| 157 | exit('Error: NO such git path:', trunk_list[i]['gitPath']) |
| 158 | |
| 159 | # compare trunk list and local blSrcGits, find match id |
| 160 | index = get_bltype_branch_id(trunk_list[i]['blType'], blSrcGits) |
| 161 | if index < 0: |
| 162 | exit("Error: NO found match blType:", trunk_list[i]['blType']) |
| 163 | |
| 164 | # update target branch |
| 165 | git_src_update(os.getcwd(), blSrcGits[index]['gitRemote'], blSrcGits[index]['gitBranch']) |
| 166 | |
| 167 | # run git log format and produce commit list |
| 168 | commit_list = git_cmt_parse(os.getcwd(), trunk_list[i]['lastCommit'], 'HEAD', 'TRUE') |
| 169 | |
| 170 | git_cmt_2_csv(csvfile, trunk_list[i]['blType'], commit_list, blSrcGits[index], i) |
| 171 | |
| 172 | # Open json file, prase last commit |
| 173 | def prase_json_file(): |
| 174 | global trunk_list |
| 175 | |
| 176 | with open(jsonfile,'r') as load_f: |
| 177 | try: |
| 178 | json_array = json.load(load_f, object_pairs_hook=OrderedDict) |
| 179 | except: |
| 180 | exit('Error: Incorrect json format!') |
| 181 | |
| 182 | trunk_list = [] |
| 183 | for item in json_array['source_gits']: |
| 184 | try: |
| 185 | store_details = {"blType":None, "gitPath":None, "lastCommit":None} |
| 186 | store_details['blType'] = item['blType'] |
| 187 | store_details['gitPath'] = item['gitPath'] |
| 188 | store_details['lastCommit'] = item['lastCommit'] |
| 189 | trunk_list.append(store_details) |
| 190 | except: |
| 191 | exit('Error: get trunk last commit failed.\n') |
| 192 | |
| 193 | return trunk_list |
| 194 | |
| 195 | # output instance of str |
| 196 | def to_str(bytes_or_str): |
| 197 | if isinstance(bytes_or_str, bytes): |
| 198 | value = bytes_or_str.decode('utf-8') |
| 199 | |
| 200 | else: |
| 201 | value = bytes_or_str |
| 202 | |
| 203 | return value |
| 204 | |
| 205 | # run shell cmd return bytes |
| 206 | def bash_command(cmd): |
| 207 | process = subprocess.Popen(cmd, |
| 208 | shell=True, |
| 209 | stdout=subprocess.PIPE, |
| 210 | stderr=subprocess.PIPE) |
| 211 | |
| 212 | #stdout, stderr = process.communicate() |
| 213 | |
| 214 | return process.stdout.read() |
| 215 | |
| 216 | # get commit list info |
| 217 | def git_cmt_parse(gitPath, lastCommit, headCommit, isSrc): |
| 218 | local_path = os.path.join(gitPath) |
| 219 | |
| 220 | repo = Repo(local_path) |
| 221 | |
| 222 | # run git log --format |
| 223 | commit_log = repo.git.log('--pretty={"summary":"%s","commit":"%h","hash":"%H","author":"%ae","date":"%cd","pd":""}', |
| 224 | '--reverse', lastCommit + '...' + headCommit) |
| 225 | |
| 226 | try: |
| 227 | log_list = commit_log.split("\n") |
| 228 | #if debug_enable: |
| 229 | # print(' > %s'%(log_list)) |
| 230 | except: |
| 231 | pass |
| 232 | |
| 233 | # deal with "Merge into" or "revert" commits |
| 234 | for i in range(len(log_list)): |
| 235 | try: |
| 236 | log_list[i] = str(re.sub(r'Merge "', r'Merge <', str(log_list[i]))) |
| 237 | log_list[i] = str(re.sub(r'" into', r'> into', str(log_list[i]))) |
| 238 | log_list[i] = str(re.sub(r'Revert "', r'Revert <', str(log_list[i]))) |
| 239 | log_list[i] = str(re.sub(r'"",', r'>",', str(log_list[i]))) |
dongqing.li | a8d42d0 | 2023-02-22 17:04:33 +0800 | [diff] [blame^] | 240 | log_list[i] = str(re.sub(r' "', r' <', str(log_list[i]))) |
| 241 | log_list[i] = str(re.sub(r'" ', r'> ', str(log_list[i]))) |
dongqing.li | 93ffff3 | 2022-07-25 10:38:31 +0800 | [diff] [blame] | 242 | |
| 243 | if debug_enable: |
| 244 | print(' > [%d] %s'%(i,log_list[i])) |
| 245 | except: |
| 246 | pass |
| 247 | |
| 248 | # eval special special characters |
| 249 | try: |
| 250 | real_log_list = [eval(str(item)) for item in log_list] |
| 251 | except: |
| 252 | real_log_list = [] |
dongqing.li | a8d42d0 | 2023-02-22 17:04:33 +0800 | [diff] [blame^] | 253 | if debug_enable: |
| 254 | print(' > eval(str(item)) ERROR!') |
| 255 | print(' > %s'%(log_list)) |
dongqing.li | 93ffff3 | 2022-07-25 10:38:31 +0800 | [diff] [blame] | 256 | pass |
| 257 | |
| 258 | # update real_log_list[i]['pd'] with JiraNo |
| 259 | for j in range(len(real_log_list)): |
| 260 | try: |
| 261 | cmd = 'git log ' + real_log_list[j]['commit'] + ' -1 | grep PD# | head -n 1' |
| 262 | res = to_str(bash_command(cmd)).replace('\n', '') |
| 263 | |
| 264 | if res: |
| 265 | real_log_list[j]['pd'] = res.split("PD#")[1] |
| 266 | |
| 267 | else: |
| 268 | real_log_list[j]['pd'] = 'NULL' |
| 269 | |
| 270 | if debug_enable: |
| 271 | print(' > [%d] overwrite PD# = %s'%(j,real_log_list[j]['pd'])) |
| 272 | except: |
| 273 | pass |
| 274 | |
| 275 | try: |
| 276 | print(' > Commit list max number of rows = ', len(real_log_list)) |
| 277 | except: |
| 278 | pass |
| 279 | |
| 280 | return real_log_list |
| 281 | |
| 282 | # save commit info to csv |
| 283 | def git_cmt_2_csv(csvfile, blType, commit_list, stream_dic, sheet_index): |
| 284 | global alignment |
| 285 | |
| 286 | # Load a workbook object |
| 287 | wb = openpyxl.load_workbook(csvfile) |
| 288 | |
| 289 | # Creat csv sheet named by blType |
| 290 | title = re.sub(r'[?|$|.|!|/|*]', r'_', blType) |
| 291 | sheet = wb.create_sheet(title, sheet_index) |
| 292 | |
| 293 | sheet.title = title |
| 294 | |
| 295 | # Set active sheet |
| 296 | wb.active = sheet_index |
| 297 | |
| 298 | try: |
| 299 | # Set aligment |
| 300 | alignment = Alignment(horizontal="left", vertical="center", wrap_text=True) |
| 301 | |
| 302 | for i in range(len(commit_list) + 1): |
| 303 | |
| 304 | for j in range(len(csv_file_column)): |
| 305 | sheet.cell(row = i + 1, column = j + 1).alignment = alignment |
| 306 | except: |
| 307 | pass |
| 308 | |
| 309 | # set cell(1,1): value and font |
| 310 | font = Font(size=11, bold=True) |
| 311 | sheet.cell(row = 1, column = 1).value = blType + " " + csv_file_column[0]['NAME'] |
| 312 | sheet.cell(row = 1, column = 1).font = font |
| 313 | |
| 314 | # set row 1: value and font |
| 315 | for i in range(1, len(csv_file_column)): |
| 316 | sheet.cell(row = 1, column = i + 1).value = csv_file_column[i]['NAME'] |
| 317 | sheet.cell(row = 1, column = i + 1).font = font |
| 318 | |
| 319 | # set row 1: height |
| 320 | # sheet.row_dimensions[1].height = 30 |
| 321 | |
| 322 | # set row 2-n: trunk commit |
| 323 | for i in range(len(commit_list)): |
| 324 | try: |
| 325 | # column 1: ID index |
| 326 | sheet.cell(row = i + 2, column = 1).value = i + 1 |
| 327 | |
| 328 | # column 2: Trunk Commit |
| 329 | if commit_list[i]['pd'] == 'NULL': |
| 330 | jira_pd = '\n' |
| 331 | |
| 332 | else: |
| 333 | jira_pd = pdPrefix + commit_list[i]['pd'] + '\n' |
| 334 | |
| 335 | sheet.cell(row = i + 2, column = 2).value = \ |
| 336 | jira_pd + commit_list[i]['summary'] + '\n'\ |
| 337 | 'Commit: ' + commit_list[i]['commit'] + '\n'\ |
| 338 | 'Author: ' + commit_list[i]['author'] + '\n'\ |
| 339 | 'Date: ' + commit_list[i]['date'] |
| 340 | |
| 341 | # column 3:Trunk Cl Link |
| 342 | sheet.cell(row = i + 2, column = 3).value = scgitPrefix + \ |
| 343 | stream_dic['upStream'] + \ |
| 344 | commit_list[i]['hash'] |
| 345 | except: |
| 346 | pass |
| 347 | |
| 348 | # set column A-I width |
| 349 | for i in range(len(csv_file_column)): |
| 350 | sheet.column_dimensions[csv_file_column[i]['ID']].width = csv_file_column[i]['WIDTH'] |
| 351 | |
| 352 | # save the file |
| 353 | wb.save(csvfile) |
| 354 | |
| 355 | # Creat csv file to save cmt list |
| 356 | def create_csv_file(outdir, inputfile): |
| 357 | global csvfile |
| 358 | |
| 359 | # get csv file full name |
| 360 | localTime = time.strftime("_%Y%m%d_%H%M%S", time.localtime()) |
| 361 | |
| 362 | base_name=os.path.basename(inputfile) |
| 363 | file_name = os.path.splitext(base_name)[0] |
| 364 | |
| 365 | csvfile = str(topdir) + str(outdir) + "/" + file_name + str(localTime) + ".xlsx" |
| 366 | |
| 367 | # creat csv file, overwrite existing files |
| 368 | wb = openpyxl.Workbook() |
| 369 | |
| 370 | wb.save(csvfile) |
| 371 | |
| 372 | # get argv |
| 373 | def getOptions(args=sys.argv[1:]): |
| 374 | global jsonfile |
| 375 | global debug_enable |
| 376 | |
| 377 | # crate prase progress |
| 378 | parser = argparse.ArgumentParser(description="Get all blx commits and save to csv file.") |
| 379 | |
| 380 | # add argv support |
| 381 | parser.add_argument("-j", "--jsoncfg", required=True, help="Your input json file.") |
| 382 | parser.add_argument("-o", "--outDir", required=True, help="Your output .xlsx dir.") |
| 383 | parser.add_argument("-v", "--verbose", required=False, help="Increase output verbosity.", action="store_true") |
| 384 | parser.add_argument("-p", "--push", required=False, help="Push csv file to confluence.", action="store_true") |
| 385 | |
| 386 | # prase argv |
| 387 | options = parser.parse_args(args) |
| 388 | |
| 389 | # check debug on |
| 390 | if options.verbose: |
| 391 | debug_enable = 1 |
| 392 | else: |
| 393 | debug_enable = 0 |
| 394 | |
| 395 | # check whether the outdir is available |
| 396 | jsonfile = str(options.jsoncfg) |
| 397 | if not os.path.isdir(options.outDir): |
| 398 | exit('Error: No such directory!') |
| 399 | |
| 400 | # creat new csv file |
| 401 | create_csv_file(options.outDir, options.jsoncfg) |
| 402 | print('\n[CONFIG INFO]:') |
| 403 | print(' > Run in toplevel : ', topdir) |
| 404 | print(' > Input json file : ', options.jsoncfg) |
| 405 | print(' > Output xlsx dir : ', options.outDir) |
| 406 | print(' > Output xlsx name: ', csvfile) |
| 407 | |
| 408 | # Get the top-level directory which include .repo |
| 409 | def get_top_dir(): |
| 410 | global topdir |
| 411 | |
| 412 | pwd = os.getcwd() |
| 413 | dirName = 'fip' |
| 414 | |
| 415 | if not os.path.exists(dirName): |
| 416 | topdir = pwd + "/../" |
| 417 | else: |
| 418 | topdir = pwd + "/" |
| 419 | |
| 420 | return topdir |
| 421 | |
| 422 | # Record summary info, and save to scv file |
| 423 | def record_in_summary_sheet(): |
| 424 | wb = openpyxl.load_workbook(csvfile) |
| 425 | |
| 426 | default_ws = 'Sheet' |
| 427 | # active |
| 428 | for s in range(len(wb.sheetnames)): |
| 429 | if wb.sheetnames[s] == default_ws: |
| 430 | break |
| 431 | |
| 432 | try: |
| 433 | wb.active = s |
| 434 | sheet = wb[default_ws] |
| 435 | sheet.title = 'summary' |
| 436 | except: |
| 437 | exit('Error: NOT found sheet: %s'%default_ws) |
| 438 | |
| 439 | # set column width |
| 440 | try: |
| 441 | sheet.column_dimensions['A'].width = 10 |
| 442 | sheet.column_dimensions['B'].width = 20 |
| 443 | sheet.column_dimensions['C'].width = 15 |
| 444 | except: |
| 445 | pass |
| 446 | |
| 447 | # row 1: summary head |
| 448 | sheet.cell(row = 1, column = 1).value = 'ID' |
| 449 | sheet.cell(row = 1, column = 2).value = 'title' |
| 450 | sheet.cell(row = 1, column = 3).value = 'max_row' |
| 451 | |
| 452 | # row n: summary info |
| 453 | for j in range(0,len(summary_list)): |
| 454 | try: |
| 455 | sheet.cell(row = j+2, column = 1).value = summary_list[j]['ID'] |
| 456 | sheet.cell(row = j+2, column = 2).value = summary_list[j]['title'] |
| 457 | sheet.cell(row = j+2, column = 3).value = summary_list[j]['max_row'] |
| 458 | |
| 459 | if debug_enable: |
| 460 | print('[%d] id:%d, title:%s, title:%2d'%(j+1, |
| 461 | summary_list[j]['ID'], |
| 462 | summary_list[j]['title'], |
| 463 | summary_list[j]['max_row'])) |
| 464 | except: |
| 465 | pass |
| 466 | |
| 467 | # Set aligment |
| 468 | for i in range(0, len(summary_list) + 1): |
| 469 | |
| 470 | for j in range(0, 3): |
| 471 | try: |
| 472 | sheet.cell(row = i + 1, column = j + 1).alignment = alignment |
| 473 | except: |
| 474 | pass |
| 475 | |
| 476 | wb.save(csvfile) |
| 477 | return |
| 478 | |
| 479 | # Printf scv file , such as max list num |
| 480 | def summary_for_scv_sheets(): |
| 481 | global summary_list |
| 482 | |
| 483 | wb = openpyxl.load_workbook(csvfile) |
| 484 | |
| 485 | summary_list = [] |
| 486 | |
| 487 | print('\n[SUMMARY SHEETS]:') |
| 488 | print('=========================================') |
| 489 | |
| 490 | for i in range(0, len(wb.sheetnames)-1): |
| 491 | try: |
| 492 | sheet = wb[wb.sheetnames[i]] |
| 493 | print('[%d] Sheet: %-12s Max_Row: %2d'%(i+1, sheet.title, sheet.max_row-1)) |
| 494 | summary_details = {"ID":0, "title":None, "max_row":0} |
| 495 | summary_details['ID'] = i+1 |
| 496 | summary_details['title'] = sheet.title |
| 497 | summary_details['max_row'] = sheet.max_row-1 |
| 498 | summary_list.append(summary_details) |
| 499 | except: |
| 500 | pass |
| 501 | |
| 502 | print('=========================================\n') |
| 503 | |
| 504 | #print('summary_list: ', summary_list) |
| 505 | |
| 506 | wb.save(csvfile) |
| 507 | return |
| 508 | |
| 509 | # Main func |
| 510 | if __name__ == "__main__": |
| 511 | # Set stderr with color |
| 512 | from IPython.core.ultratb import ColorTB |
| 513 | sys.excepthook = ColorTB() |
| 514 | |
| 515 | # get repo top dir |
| 516 | get_top_dir() |
| 517 | |
| 518 | # Prase argv in / out |
| 519 | getOptions(sys.argv[1:]) |
| 520 | |
| 521 | # Prase json file to get last commit |
| 522 | prase_json_file() |
| 523 | |
| 524 | # Get commits list and src link |
| 525 | git_commits_to_src_link() |
| 526 | |
| 527 | # Output csv summary |
| 528 | summary_for_scv_sheets() |
| 529 | |
| 530 | # Record in summary sheet |
| 531 | record_in_summary_sheet() |
| 532 | |
| 533 | print('OUTPUT csv: ', os.path.basename(csvfile)) |
| 534 | exit('RUN OK !') |
| 535 | |