#!/usr/bin/python # script to generate the json for anchor and test based on metrics import csv,sys,os,shutil,ast,json,hashlib,pprint,platform,time,datetime,glob,re def creation_date(path_to_file): """ Try to get the date that a file was created, falling back to when it was last modified if that isn't possible. See http://stackoverflow.com/a/39501288/1709587 for explanation. """ if platform.system() == 'Windows': return os.path.getctime(path_to_file) else: stat = os.stat(path_to_file) try: return stat.st_birthtime except AttributeError: # We're probably on Linux. No easy way to get creation dates here, # so we'll settle for when its content was last modified. return stat.st_mtime reference_rows = [] anchor_rows = [] crosscheck_rows = [] verification_rows = [] refseqcsv='reference-sequence.csv' anchorcsv='streams.csv' crosscheckcsv='cross-check.csv' verificationcsv='verification.csv' verification1csv='verification1.csv' referenceSequences = '../ReferenceSequences/' metricsDirectory = 'Metrics/' configDirectory = 'CFG/' rootOnlineDirectory = 'https://dash-large-files.akamaized.net/WAVE/3GPP/5GVideo/' bitstreamDir = 'Bitstreams/' referenceSequencesDir = 'ReferenceSequences/' # for all scenarios scenarios = [ f.path for f in os.scandir("./") if f.is_dir() ] for s in scenarios: # testfolder = [ "Scenario-5-Gaming/265" ] testfolder = [ ff.path for ff in os.scandir(s) if ff.is_dir() ] rfile = s + '/' + refseqcsv # does reference sequence exists if os.path.isfile(rfile): # for test models for test in testfolder: adir = test + '/' adir = adir.replace('.\\','') adir = adir.replace('\\','/') afile = adir + anchorcsv vfile = adir + verificationcsv cfile = adir + crosscheckcsv # do all files exist if os.path.isfile(afile) and os.path.isfile(vfile) and os.path.isfile(cfile): # open reference sequence, anchor, cross-check and verification with open(rfile) as csv_reference, open(afile) as csv_anchor, open(vfile) as csv_verification, open(cfile) as csv_crosscheck: reference = csv.reader(csv_reference, delimiter=',') reference_header = next(reference) for row in reference: reference_rows.append(row) # read anchor file anchor = csv.reader(csv_anchor, delimiter=',') anchor_header = next(anchor) for row in anchor: anchor_rows.append(row) verification = csv.reader(csv_verification, delimiter=',') verification_header1 = next(verification) verification_header2 = next(verification) for row in verification: verification_rows.append(row) # read cross-check file crosscheck = csv.reader(csv_crosscheck, delimiter=',') crosscheck_header = next(crosscheck) for row in crosscheck: crosscheck_rows.append(row) # For each anchor tuple, get the metrics for i in range(anchor.line_num-1): # find correct reference sequence for j in range(reference.line_num-1): if anchor_rows[i][2] == reference_rows[j][0]: sequence = reference_rows[j][3] # read sequence file sequencefile1 = sequence + '.json' sequencefile = referenceSequences + sequence + '/' + sequencefile1 with open(sequencefile) as json_file: data = json.load(json_file) # open the metrics file file = adir + metricsDirectory + anchor_rows[i][0] + '.csv' if os.path.isfile(file): with open(file) as csv_metrics: metrics = csv.reader(csv_metrics, delimiter=',') metrics_header = next(metrics) metrics_rows = [] for row in metrics: metrics_rows.append(row) values = ast.literal_eval(anchor_rows[i][5]) for k in range(metrics.line_num-1): for q in values: if metrics_rows[k][0] == str(q): anchorfile = adir + anchor_rows[i][0] + '/' + anchor_rows[i][0] + '-' + repr(q) + '.json' with open(anchorfile, 'r') as json_file: anchordata = json.load(json_file) # print(anchorfile) # Assign JSON output BitstreamJSON = anchordata['Bitstream'] GenerationJSON = anchordata['Generation'] ReconstructionJSON = anchordata['Reconstruction'] MetricsJSON = anchordata['Metrics'] copyrightJSON = anchordata['copyRight'] ContactJSON = anchordata['Contact'] # VerificationJSON = {'bitstream','metrics','reconstruction'} key = BitstreamJSON['key'] # Evaluate and add reports VBitstreamJSON = {'status': "missing"} VMetricsJSON = {'status': "missing"} VReconstructionJSON = {'status': "missing"} VerificationJSON = { 'bitstream': VBitstreamJSON, 'metrics': VMetricsJSON, 'reconstruction': VReconstructionJSON } for v in range(crosscheck.line_num-1): if crosscheck_rows[v][1] == key: ReportJSON = { 'type': crosscheck_rows[v][0], 'status': crosscheck_rows[v][4], 'orig-date': crosscheck_rows[v][3], 'info': crosscheck_rows[v][5], 'orig-info': crosscheck_rows[v][6], 'company': crosscheck_rows[v][7], 'e-mail': crosscheck_rows[v][8], 'tdoc': crosscheck_rows[v][9] } if(crosscheck_rows[v][0] != 'bitstream' and crosscheck_rows[v][0] != 'metrics' and crosscheck_rows[v][0] != 'reconstruction'): print("Wrong key", crosscheck_rows[v][0], vfile) if VerificationJSON[crosscheck_rows[v][0]]['status'] == "missing" and crosscheck_rows[v][4] == "successful": VerificationJSON[crosscheck_rows[v][0]]['status'] = 'successful' if VerificationJSON[crosscheck_rows[v][0]]['status'] == "missing" and crosscheck_rows[v][4] == "failed": VerificationJSON[crosscheck_rows[v][0]]['status'] = 'failed' if VerificationJSON[crosscheck_rows[v][0]]['status'] == "failed" and crosscheck_rows[v][4] == "successful": VerificationJSON[crosscheck_rows[v][0]]['status'] = 'ambiguous' if VerificationJSON[crosscheck_rows[v][0]]['status'] == "successful" and crosscheck_rows[v][4] == "failed": VerificationJSON[crosscheck_rows[v][0]]['status'] = 'ambiguous' if 'Reports' in VerificationJSON[crosscheck_rows[v][0]]: VerificationJSON[crosscheck_rows[v][0]]['Reports'].append(ReportJSON) else: VerificationJSON[crosscheck_rows[v][0]]['Reports'] = [ReportJSON] # Update the verification matrix for w in range(verification.line_num-3): if verification_rows[w][0] == anchor_rows[i][0]: # find cell depending on status and q value offset = [ 'bitstream', 'reconstruction', 'metrics'].index(crosscheck_rows[v][0]) position = (values.index(q) + 1) * 3 + 1 + offset if VerificationJSON[crosscheck_rows[v][0]]['status'] == "missing": verification_rows[w][position] = '' elif VerificationJSON[crosscheck_rows[v][0]]['status'] == "failed": verification_rows[w][position] = 'F' elif VerificationJSON[crosscheck_rows[v][0]]['status'] == "successful": verification_rows[w][position] = 'S' else: print("unknown value", verification_rows[w][0], VerificationJSON[crosscheck_rows[v][0]]['status']) AnchorJSON = { 'Bitstream': BitstreamJSON, 'Generation': GenerationJSON, 'Reconstruction': ReconstructionJSON, 'Metrics': MetricsJSON, 'Verification': VerificationJSON, 'copyRight': data['copyRight'], 'Contact': ContactJSON } with open(anchorfile, 'w') as json_file: json.dump(AnchorJSON, json_file, sort_keys=False, indent=2) else: print('Metrics file', file, 'does not exist') # verification dump for v in range(verification.line_num-3): # do a summary for z in range(4, len(verification_rows[v])): if verification_rows[v][z] == 'F': verification_rows[v][(z+2)%3+1] = 'F' if ((z+2)%3) == 0: # Bitstream if verification_rows[v][(z+2)%3+1] == '' and verification_rows[v][z] == 'S': verification_rows[v][(z+2)%3+1] = 'S' else: # Reconstruction # Metrics # print((z+2)%3+1, verification_rows[v][(z+2)%3+1],verification_rows[v][z]) if verification_rows[v][(z+2)%3+1] == '' and verification_rows[v][z] == 'S': verification_rows[v][(z+2)%3+1] = 'S' if verification_rows[v][(z+2)%3+1] == 'S' and verification_rows[v][z] == '': verification_rows[v][(z+2)%3+1] = 'P' print(verification_rows[v]) # print(verification) # clear the variables reference_header = None reference_rows = [] anchor_header = None anchor_rows = [] crosscheck_header = None crosscheck_rows = [] # all files are closed # Open Verification file and write data with open(vfile, mode='w',newline='') as outfile: out_writer = csv.writer(outfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) out_writer.writerow(verification_header1) out_writer.writerow(verification_header2) for v in range(verification.line_num-3): out_writer.writerow(verification_rows[v]) verification_rows = [] else: print('Some files missing', afile, vfile, cfile) else: print('Reference Sequences', rfile, 'does not exist')