#!/usr/bin/python # script to generate the json for anchor and test based on metrics import csv,sys,os,shutil,ast,json,hashlib,pprint,platform,time,datetime,glob,re def creation_date(path_to_file): """ Try to get the date that a file was created, falling back to when it was last modified if that isn't possible. See http://stackoverflow.com/a/39501288/1709587 for explanation. """ if platform.system() == 'Windows': return os.path.getctime(path_to_file) else: stat = os.stat(path_to_file) try: return stat.st_birthtime except AttributeError: # We're probably on Linux. No easy way to get creation dates here, # so we'll settle for when its content was last modified. return stat.st_mtime reference_rows = [] anchor_rows = [] refseqcsv='reference-sequence.csv' anchorcsv='streams.csv' referenceSequences = '../ReferenceSequences/' metricsDirectory = 'Metrics/' configDirectory = 'CFG/' rootOnlineDirectory = 'https://dash-large-files.akamaized.net/WAVE/3GPP/5GVideo/' bitstreamDir = 'Bitstreams/' referenceSequencesDir = 'ReferenceSequences/' # for all scenarios scenarios = [ f.path for f in os.scandir("./") if f.is_dir() ] for s in scenarios: # testfolder = [ ff.path for ff in os.scandir(s) if ff.is_dir() ] testfolder = [ "Scenario-1-FHD/265", "Scenario-2-4K/265", "Scenario-3-Screen/265", "Scenario-4-Sharing/265", "Scenario-5-Gaming/265"] rfile = s + '/' + refseqcsv # does reference sequence exists if os.path.isfile(rfile): # for test models for test in testfolder: adir = test + '/' adir = adir.replace('.\\','') adir = adir.replace('\\','/') afile = adir + anchorcsv # does anchor file exist if os.path.isfile(afile): # open reference sequence and anchor with open(rfile) as csv_reference, open(afile) as csv_anchor: reference = csv.reader(csv_reference, delimiter=',') reference_header = next(reference) for row in reference: reference_rows.append(row) # get anchors anchor_rows = [] anchor = csv.reader(csv_anchor, delimiter=',') anchor_header = next(anchor) for row in anchor: anchor_rows.append(row) # For each anchor tuple, get the metrics for i in range(anchor.line_num-1): # determine codec print(anchor_rows[i]) codec = anchor_rows[i][4][3:5] # find correct reference sequence for j in range(reference.line_num-1): if anchor_rows[i][2] == reference_rows[j][0]: sequence = reference_rows[j][1] # read sequence file sequencefile1 = sequence + '.json' sequencefile = referenceSequences + reference_rows[j][3] + '/' + sequencefile1 with open(sequencefile) as json_file: data = json.load(json_file) # open the metrics file file = adir + metricsDirectory + anchor_rows[i][0] + '.csv' if os.path.isfile(file): with open(file) as csv_metrics: metrics = csv.reader(csv_metrics, delimiter=',') metrics_header = next(metrics) metrics_rows = [] for row in metrics: metrics_rows.append(row) values = ast.literal_eval(anchor_rows[i][5]) for k in range(metrics.line_num-1): for q in values: if metrics_rows[k][0] == str(q): if reference_rows[j][5] == "1": # HDR Metrics parameter = metrics_rows[k][0] bitrate = metrics_rows[k][1] y_psnr = metrics_rows[k][2] u_psnr = metrics_rows[k][3] v_psnr = metrics_rows[k][4] psnr = metrics_rows[k][5] y_wpsnr = metrics_rows[k][6] u_wpsnr = metrics_rows[k][7] v_wpsnr = metrics_rows[k][8] wpsnr = metrics_rows[k][9] psnrl100 = metrics_rows[k][10] de100 = metrics_rows[k][11] bitrate_log = metrics_rows[k][12] encode_time = metrics_rows[k][13] decode_time = metrics_rows[k][14] recon_md5 = metrics_rows[k][15] bitstream_md5 = metrics_rows[k][16] else: # SDR Metrics parameter = metrics_rows[k][0] bitrate = metrics_rows[k][1] y_psnr = metrics_rows[k][2] u_psnr = metrics_rows[k][3] v_psnr = metrics_rows[k][4] psnr = metrics_rows[k][5] ms_ssim = metrics_rows[k][6] vmaf = metrics_rows[k][7] bitrate_log = metrics_rows[k][8] encode_time = metrics_rows[k][9] decode_time = metrics_rows[k][10] recon_md5 = metrics_rows[k][11] bitstream_md5 = metrics_rows[k][12] #get size and MD5 # If it is AV1, obu ending, otherwise bin, also check for duplicate anchors if codec == 'AV': filename1 = anchor_rows[i][0] + '-' + repr(q) + '.obu' filename = adir + anchor_rows[i][0] + '/' + filename1 if not os.path.isfile(filename): filename1 = anchor_rows[i-4][0] + '-' + repr(q) + '.obu' filename = adir + anchor_rows[i-4][0] + '/' + filename1 print("Alternate Anchor", anchor_rows[i][0], filename) else: filename1 = anchor_rows[i][0] + '-' + repr(q) + '.bin' filename = adir + anchor_rows[i][0] + '/' + filename1 if not os.path.isfile(filename): if codec == 'HM': filename1 = anchor_rows[i-4][0] + '-' + repr(q) + '.bin' filename = adir + anchor_rows[i-4][0] + '/' + filename1 elif anchor_rows[i][0] == "S3-T35-ETM": filename1 = anchor_rows[i-18][0] + '-' + repr(q) + '.bin' filename = adir + anchor_rows[i-18][0] + '/' + filename1 else: filename1 = anchor_rows[i-2][0] + '-' + repr(q) + '.bin' filename = adir + anchor_rows[i-2][0] + '/' + filename1 print("Alternate Anchor", anchor_rows[i][0], filename) if os.path.isfile(filename): date = time.ctime(creation_date(filename)) size = os.path.getsize(filename) md5_hash = hashlib.md5() with open(filename,"rb") as f: # Read and update hash in chunks of 4K for byte_block in iter(lambda: f.read(4096),b""): md5_hash.update(byte_block) md5 = md5_hash.hexdigest() else: print(filename, "does not exist") date = 0 size = 0 md5 = 0 # other values key = anchor_rows[i][0] + '-' + repr(q) # check log files encoderlogfile = 'encoder_' + key + '.log' if not os.path.isfile(encoderlogfile): encoderlogfile = '' decoderlogfile = 'decoder_' + key + '.log' if not os.path.isfile(decoderlogfile): decoderlogfile = '' # Write to JSON output BitstreamJSON = { 'URI': rootOnlineDirectory + bitstreamDir + filename, 'md5': md5, 'size': size, 'key': key, 'date': date } if reference_rows[j][5] == "1": # HDR Metrics MetricsJSON = { 'Bitrate': bitrate, 'YPSNR': y_psnr, 'UPSNR': u_psnr, 'VPSNR': v_psnr, 'PSNR': psnr, 'YWPSNR': y_wpsnr, 'UWPSNR': u_wpsnr, 'VWPSNR': v_wpsnr, 'WPSNR': wpsnr, 'PSNRL100': psnrl100, 'DE100': de100, 'BitrateLog': bitrate_log, 'EncodeTime': encode_time, 'DecodeTime': decode_time } else: MetricsJSON = { 'Bitrate': bitrate, 'YPSNR': y_psnr, 'UPSNR': u_psnr, 'VPSNR': v_psnr, 'PSNR': psnr, 'MS_SSIM': ms_ssim, 'VMAF': vmaf, 'BitrateLog': bitrate_log, 'EncodeTime': encode_time, 'DecodeTime': decode_time } GenerationJSON = { 'sequence': rootOnlineDirectory + referenceSequencesDir + sequence + '/' + sequencefile1, 'key': anchor_rows[i][2], 'encoder': anchor_rows[i][3], 'config-file': anchor_rows[i][4] + '.cfg', 'variant': '-qp ' + str(q), 'log-file': encoderlogfile, } ReconstructionJSON = { 'decoder': anchor_rows[i][3], 'log-file': decoderlogfile, 'md5': recon_md5 } ContactJSON = { 'Company': '3GPP', 'e-mail': '3GPP_TSG_SA_WG4_VIDEO@LIST.ETSI.ORG'} # Evaluate and add reports VBitstreamJSON = {'status': "missing"} VMetricsJSON = {'status': "missing"} VReconstructionJSON = {'status': "missing"} VerificationJSON = { 'bitstream': VBitstreamJSON, 'metrics': VMetricsJSON, 'reconstruction': VReconstructionJSON } AnchorJSON = { 'Bitstream': BitstreamJSON, 'Generation': GenerationJSON, 'Reconstruction': ReconstructionJSON, 'Metrics': MetricsJSON, 'Verification': VerificationJSON, 'copyRight': data['copyRight'], 'Contact': ContactJSON } outname = adir + anchor_rows[i][0] + '/' + anchor_rows[i][0] + '-' + repr(q) + '.json' # print(outname) with open(outname, 'w') as outfile: json.dump(AnchorJSON, outfile, sort_keys=False, indent=2) else: print('Metrics file', file, 'does not exist') # clear the variables reference_header = None reference_rows = [] anchor_header = None anchor_rows = [] else: print('Anchor files', afile, 'does not exist') else: print('Reference Sequences', rfile, 'does not exist')